chore: checkpoint accumulated dashboard-nanobot changes

dev
mula.liu 2026-03-23 21:28:39 +08:00
parent f77851d496
commit da018d515d
82 changed files with 11301 additions and 1066 deletions

View File

@ -3,12 +3,9 @@ DATA_ROOT=../data
BOTS_WORKSPACE_ROOT=../workspace/bots
# Database
# SQLite (recommended): leave DATABASE_URL unset, backend will use:
# sqlite:///{DATA_ROOT}/nanobot_dashboard.db
# DATABASE_URL=sqlite:///../data/nanobot_dashboard.db
# PostgreSQL example:
# DATABASE_URL=postgresql+psycopg://user:password@127.0.0.1:5432/nanobot_dashboard
# MySQL example:
# PostgreSQL:
DATABASE_URL=postgresql+psycopg://user:password@127.0.0.1:5432/nanobot_dashboard
# MySQL:
# DATABASE_URL=mysql+pymysql://user:password@127.0.0.1:3306/nanobot_dashboard
# Show SQL statements in backend logs (debug only).
DATABASE_ECHO=true
@ -50,6 +47,8 @@ STT_DEVICE=cpu
APP_HOST=0.0.0.0
APP_PORT=8000
APP_RELOAD=true
APP_LOG_LEVEL=warning
APP_ACCESS_LOG=false
# Optional overrides (fallback only; usually keep empty when using template files)
DEFAULT_AGENTS_MD=

View File

@ -1,12 +1,29 @@
from typing import Optional
import time
import shlex
from typing import Any, Dict, Optional
import logging
import httpx
from fastapi import APIRouter, Depends, HTTPException, Request
from sqlmodel import Session
from sqlmodel import Session, select
from clients.edge.errors import log_edge_failure, summarize_edge_exception
from clients.edge.http import HttpEdgeClient
from core.cache import cache
from core.database import get_session
from schemas.platform import PlatformSettingsPayload, SystemSettingPayload
from models.bot import BotInstance
from providers.target import ProviderTarget
from providers.selector import get_runtime_provider
from schemas.platform import (
ManagedNodeConnectivityResult,
ManagedNodeNativePreflightResult,
ManagedNodePayload,
PlatformSettingsPayload,
SystemSettingPayload,
)
from services.node_registry_service import ManagedNode
from services.platform_service import (
build_node_resource_overview,
build_platform_overview,
create_or_update_system_setting,
delete_system_setting,
@ -18,10 +35,284 @@ from services.platform_service import (
)
router = APIRouter()
logger = logging.getLogger(__name__)
PLATFORM_OVERVIEW_CACHE_KEY = "platform:overview"
PLATFORM_OVERVIEW_CACHE_TTL_SECONDS = 15
PLATFORM_NODES_CACHE_KEY = "platform:nodes:list"
PLATFORM_NODES_CACHE_TTL_SECONDS = 20
def _cached_platform_overview_payload() -> Optional[Dict[str, Any]]:
cached = cache.get_json(PLATFORM_OVERVIEW_CACHE_KEY)
return cached if isinstance(cached, dict) else None
def _store_platform_overview_payload(payload: Dict[str, Any]) -> Dict[str, Any]:
cache.set_json(PLATFORM_OVERVIEW_CACHE_KEY, payload, ttl=PLATFORM_OVERVIEW_CACHE_TTL_SECONDS)
return payload
def _invalidate_platform_overview_cache() -> None:
cache.delete(PLATFORM_OVERVIEW_CACHE_KEY)
def _cached_platform_nodes_payload() -> Optional[Dict[str, Any]]:
cached = cache.get_json(PLATFORM_NODES_CACHE_KEY)
if not isinstance(cached, dict):
return None
items = cached.get("items")
if not isinstance(items, list):
return None
return {"items": items}
def _store_platform_nodes_payload(items: list[Dict[str, Any]]) -> Dict[str, Any]:
payload = {"items": items}
cache.set_json(PLATFORM_NODES_CACHE_KEY, payload, ttl=PLATFORM_NODES_CACHE_TTL_SECONDS)
return payload
def _invalidate_platform_nodes_cache() -> None:
cache.delete(PLATFORM_NODES_CACHE_KEY)
def _normalize_node_payload(payload: ManagedNodePayload) -> ManagedNodePayload:
normalized_node_id = str(payload.node_id or "").strip().lower()
if not normalized_node_id:
raise HTTPException(status_code=400, detail="node_id is required")
transport_kind = str(payload.transport_kind or "edge").strip().lower() or "edge"
if transport_kind != "edge":
raise HTTPException(status_code=400, detail="Only edge transport is supported")
runtime_kind = str(payload.runtime_kind or "docker").strip().lower() or "docker"
core_adapter = str(payload.core_adapter or "nanobot").strip().lower() or "nanobot"
native_sandbox_mode = _normalize_native_sandbox_mode(payload.native_sandbox_mode)
base_url = str(payload.base_url or "").strip()
if transport_kind == "edge" and not base_url:
raise HTTPException(status_code=400, detail="base_url is required for edge nodes")
return payload.model_copy(
update={
"node_id": normalized_node_id,
"display_name": str(payload.display_name or normalized_node_id).strip() or normalized_node_id,
"base_url": base_url,
"auth_token": str(payload.auth_token or "").strip(),
"transport_kind": transport_kind,
"runtime_kind": runtime_kind,
"core_adapter": core_adapter,
"workspace_root": str(payload.workspace_root or "").strip(),
"native_command": str(payload.native_command or "").strip(),
"native_workdir": str(payload.native_workdir or "").strip(),
"native_sandbox_mode": native_sandbox_mode,
}
)
def _normalize_native_sandbox_mode(raw_value: Any) -> str:
text = str(raw_value or "").strip().lower()
if text in {"workspace", "sandbox", "strict"}:
return "workspace"
if text in {"full_access", "full-access", "danger-full-access", "escape"}:
return "full_access"
return "inherit"
def _managed_node_from_payload(payload: ManagedNodePayload) -> ManagedNode:
normalized = _normalize_node_payload(payload)
return ManagedNode(
node_id=normalized.node_id,
display_name=normalized.display_name,
base_url=normalized.base_url,
enabled=bool(normalized.enabled),
auth_token=normalized.auth_token,
metadata={
"transport_kind": normalized.transport_kind,
"runtime_kind": normalized.runtime_kind,
"core_adapter": normalized.core_adapter,
"workspace_root": normalized.workspace_root,
"native_command": normalized.native_command,
"native_workdir": normalized.native_workdir,
"native_sandbox_mode": normalized.native_sandbox_mode,
},
)
def _node_status(node: ManagedNode, *, refresh_failed: bool = False) -> str:
if not bool(node.enabled):
return "disabled"
transport_kind = str((node.metadata or {}).get("transport_kind") or "edge").strip().lower()
if transport_kind != "edge":
return "unknown"
if refresh_failed:
return "offline"
return "online" if node.last_seen_at else "unknown"
def _serialize_node(node: ManagedNode, *, refresh_failed: bool = False) -> Dict[str, Any]:
metadata = dict(node.metadata or {})
return {
"node_id": node.node_id,
"display_name": node.display_name,
"base_url": node.base_url,
"enabled": bool(node.enabled),
"transport_kind": str(metadata.get("transport_kind") or ""),
"runtime_kind": str(metadata.get("runtime_kind") or ""),
"core_adapter": str(metadata.get("core_adapter") or ""),
"workspace_root": str(metadata.get("workspace_root") or ""),
"native_command": str(metadata.get("native_command") or ""),
"native_workdir": str(metadata.get("native_workdir") or ""),
"native_sandbox_mode": str(metadata.get("native_sandbox_mode") or "inherit"),
"metadata": metadata,
"capabilities": dict(node.capabilities or {}),
"resources": dict(getattr(node, "resources", {}) or {}),
"last_seen_at": node.last_seen_at,
"status": _node_status(node, refresh_failed=refresh_failed),
}
def _test_edge_connectivity(resolve_edge_client, node: ManagedNode) -> ManagedNodeConnectivityResult:
started = time.perf_counter()
try:
client = resolve_edge_client(
ProviderTarget(
node_id=node.node_id,
transport_kind="edge",
runtime_kind=str((node.metadata or {}).get("runtime_kind") or "docker"),
core_adapter=str((node.metadata or {}).get("core_adapter") or "nanobot"),
)
)
node_self = _edge_node_self_with_native_preflight(client=client, node=node)
latency_ms = max(1, int((time.perf_counter() - started) * 1000))
return ManagedNodeConnectivityResult(
ok=True,
status="online",
latency_ms=latency_ms,
detail="dashboard-edge reachable",
node_self=node_self,
)
except Exception as exc:
latency_ms = max(1, int((time.perf_counter() - started) * 1000))
return ManagedNodeConnectivityResult(
ok=False,
status="offline",
latency_ms=latency_ms,
detail=summarize_edge_exception(exc),
node_self=None,
)
def _split_native_command(raw_command: Optional[str]) -> list[str]:
text = str(raw_command or "").strip()
if not text:
return []
try:
return [str(item or "").strip() for item in shlex.split(text) if str(item or "").strip()]
except Exception:
return [text]
def _runtime_native_supported(node_self: Dict[str, Any]) -> bool:
capabilities = dict(node_self.get("capabilities") or {})
runtime_caps = dict(capabilities.get("runtime") or {})
return bool(runtime_caps.get("native") is True)
def _test_edge_native_preflight(
resolve_edge_client,
node: ManagedNode,
*,
native_command: Optional[str] = None,
native_workdir: Optional[str] = None,
) -> ManagedNodeNativePreflightResult:
started = time.perf_counter()
command_hint = _split_native_command(native_command)
workdir_hint = str(native_workdir or "").strip()
try:
client = resolve_edge_client(
ProviderTarget(
node_id=node.node_id,
transport_kind="edge",
runtime_kind=str((node.metadata or {}).get("runtime_kind") or "docker"),
core_adapter=str((node.metadata or {}).get("core_adapter") or "nanobot"),
)
)
node_self = dict(client.heartbeat_node() or {})
preflight = dict(
client.preflight_native(
native_command=native_command,
native_workdir=native_workdir,
)
or {}
)
latency_ms = max(1, int((time.perf_counter() - started) * 1000))
command = [str(item or "").strip() for item in list(preflight.get("command") or []) if str(item or "").strip()]
workdir = str(preflight.get("workdir") or "")
detail = str(preflight.get("detail") or "")
if not detail:
detail = "native launcher ready" if bool(preflight.get("ok")) else "native launcher not ready"
return ManagedNodeNativePreflightResult(
ok=bool(preflight.get("ok")),
status="online",
latency_ms=latency_ms,
detail=detail,
command=command,
workdir=workdir,
command_available=bool(preflight.get("command_available")),
workdir_exists=bool(preflight.get("workdir_exists")),
runtime_native_supported=_runtime_native_supported(node_self),
node_self=node_self,
)
except Exception as exc:
latency_ms = max(1, int((time.perf_counter() - started) * 1000))
return ManagedNodeNativePreflightResult(
ok=False,
status="offline",
latency_ms=latency_ms,
detail=summarize_edge_exception(exc),
command=command_hint,
workdir=workdir_hint,
command_available=False,
workdir_exists=False if workdir_hint else True,
runtime_native_supported=False,
node_self=None,
)
def _edge_node_self_with_native_preflight(*, client: HttpEdgeClient, node: ManagedNode) -> Dict[str, Any]:
node_self = dict(client.heartbeat_node() or {})
metadata = dict(node.metadata or {})
native_command = str(metadata.get("native_command") or "").strip() or None
native_workdir = str(metadata.get("native_workdir") or "").strip() or None
runtime_kind = str(metadata.get("runtime_kind") or "docker").strip().lower()
should_probe = bool(native_command or native_workdir or runtime_kind == "native")
if not should_probe:
return node_self
try:
preflight = dict(client.preflight_native(native_command=native_command, native_workdir=native_workdir) or {})
except Exception as exc:
log_edge_failure(
logger,
key=f"platform-node-native-preflight:{node.node_id}",
exc=exc,
message=f"Failed to run native preflight for node_id={node.node_id}",
)
return node_self
caps = dict(node_self.get("capabilities") or {})
process_caps = dict(caps.get("process") or {})
if preflight.get("command"):
process_caps["command"] = list(preflight.get("command") or [])
process_caps["available"] = bool(preflight.get("ok"))
process_caps["command_available"] = bool(preflight.get("command_available"))
process_caps["workdir_exists"] = bool(preflight.get("workdir_exists"))
process_caps["workdir"] = str(preflight.get("workdir") or "")
process_caps["detail"] = str(preflight.get("detail") or "")
caps["process"] = process_caps
node_self["capabilities"] = caps
node_self["native_preflight"] = preflight
return node_self
def _apply_platform_runtime_changes(request: Request) -> None:
cache.delete_prefix("")
_invalidate_platform_overview_cache()
_invalidate_platform_nodes_cache()
speech_service = getattr(request.app.state, "speech_service", None)
if speech_service is not None and hasattr(speech_service, "reset_runtime"):
speech_service.reset_runtime()
@ -29,8 +320,305 @@ def _apply_platform_runtime_changes(request: Request) -> None:
@router.get("/api/platform/overview")
def get_platform_overview(request: Request, session: Session = Depends(get_session)):
docker_manager = getattr(request.app.state, "docker_manager", None)
return build_platform_overview(session, docker_manager)
cached_payload = _cached_platform_overview_payload()
if cached_payload is not None:
return cached_payload
def _read_runtime(bot):
provider = get_runtime_provider(request.app.state, bot)
status = str(provider.get_runtime_status(bot_id=str(bot.id or "")) or "STOPPED").upper()
runtime = dict(provider.get_resource_snapshot(bot_id=str(bot.id or "")) or {})
runtime.setdefault("docker_status", status)
return status, runtime
payload = build_platform_overview(session, read_runtime=_read_runtime)
return _store_platform_overview_payload(payload)
@router.get("/api/platform/nodes")
def list_platform_nodes(request: Request, session: Session = Depends(get_session)):
cached_payload = _cached_platform_nodes_payload()
if cached_payload is not None:
return cached_payload
node_registry = getattr(request.app.state, "node_registry_service", None)
if node_registry is None or not hasattr(node_registry, "list_nodes"):
return {"items": []}
resolve_edge_client = getattr(request.app.state, "resolve_edge_client", None)
refreshed_items = []
for node in node_registry.list_nodes():
metadata = dict(node.metadata or {})
refresh_failed = False
if (
callable(resolve_edge_client)
and str(metadata.get("transport_kind") or "").strip().lower() == "edge"
and bool(node.enabled)
):
try:
client = resolve_edge_client(
ProviderTarget(
node_id=node.node_id,
transport_kind="edge",
runtime_kind=str(metadata.get("runtime_kind") or "docker"),
core_adapter=str(metadata.get("core_adapter") or "nanobot"),
)
)
node_self = _edge_node_self_with_native_preflight(client=client, node=node)
node = node_registry.mark_node_seen(
session,
node_id=node.node_id,
display_name=str(node.display_name or node_self.get("display_name") or node.node_id),
capabilities=dict(node_self.get("capabilities") or {}),
resources=dict(node_self.get("resources") or {}),
)
except Exception as exc:
refresh_failed = True
log_edge_failure(
logger,
key=f"platform-node-refresh:{node.node_id}",
exc=exc,
message=f"Failed to refresh edge node metadata for node_id={node.node_id}",
)
refreshed_items.append((node, refresh_failed))
items = []
for node, refresh_failed in refreshed_items:
items.append(_serialize_node(node, refresh_failed=refresh_failed))
return _store_platform_nodes_payload(items)
@router.get("/api/platform/nodes/{node_id}")
def get_platform_node(node_id: str, request: Request, session: Session = Depends(get_session)):
normalized_node_id = str(node_id or "").strip().lower()
node_registry = getattr(request.app.state, "node_registry_service", None)
if node_registry is None or not hasattr(node_registry, "get_node"):
raise HTTPException(status_code=500, detail="node registry is unavailable")
node = node_registry.get_node(normalized_node_id)
if node is None:
raise HTTPException(status_code=404, detail=f"Managed node not found: {normalized_node_id}")
return _serialize_node(node)
@router.post("/api/platform/nodes")
def create_platform_node(payload: ManagedNodePayload, request: Request, session: Session = Depends(get_session)):
node_registry = getattr(request.app.state, "node_registry_service", None)
if node_registry is None or not hasattr(node_registry, "get_node"):
raise HTTPException(status_code=500, detail="node registry is unavailable")
normalized = _normalize_node_payload(payload)
if node_registry.get_node(normalized.node_id) is not None:
raise HTTPException(status_code=409, detail=f"Node already exists: {normalized.node_id}")
node = node_registry.upsert_node(session, _managed_node_from_payload(normalized))
_invalidate_platform_overview_cache()
_invalidate_platform_nodes_cache()
return _serialize_node(node)
@router.put("/api/platform/nodes/{node_id}")
def update_platform_node(node_id: str, payload: ManagedNodePayload, request: Request, session: Session = Depends(get_session)):
normalized_node_id = str(node_id or "").strip().lower()
node_registry = getattr(request.app.state, "node_registry_service", None)
if node_registry is None or not hasattr(node_registry, "get_node"):
raise HTTPException(status_code=500, detail="node registry is unavailable")
existing = node_registry.get_node(normalized_node_id)
if existing is None:
raise HTTPException(status_code=404, detail=f"Managed node not found: {normalized_node_id}")
normalized = _normalize_node_payload(payload)
if normalized.node_id != normalized_node_id:
raise HTTPException(status_code=400, detail="node_id cannot be changed")
node = node_registry.upsert_node(
session,
ManagedNode(
node_id=normalized_node_id,
display_name=normalized.display_name,
base_url=normalized.base_url,
enabled=bool(normalized.enabled),
auth_token=normalized.auth_token or existing.auth_token,
metadata={
"transport_kind": normalized.transport_kind,
"runtime_kind": normalized.runtime_kind,
"core_adapter": normalized.core_adapter,
"workspace_root": normalized.workspace_root,
"native_command": normalized.native_command,
"native_workdir": normalized.native_workdir,
"native_sandbox_mode": normalized.native_sandbox_mode,
},
capabilities=dict(existing.capabilities or {}),
resources=dict(existing.resources or {}),
last_seen_at=existing.last_seen_at,
),
)
_invalidate_platform_overview_cache()
_invalidate_platform_nodes_cache()
return _serialize_node(node)
@router.delete("/api/platform/nodes/{node_id}")
def delete_platform_node(node_id: str, request: Request, session: Session = Depends(get_session)):
normalized_node_id = str(node_id or "").strip().lower()
if normalized_node_id == "local":
raise HTTPException(status_code=400, detail="Local node cannot be deleted")
node_registry = getattr(request.app.state, "node_registry_service", None)
if node_registry is None or not hasattr(node_registry, "get_node"):
raise HTTPException(status_code=500, detail="node registry is unavailable")
if node_registry.get_node(normalized_node_id) is None:
raise HTTPException(status_code=404, detail=f"Managed node not found: {normalized_node_id}")
attached_bot_ids = session.exec(select(BotInstance.id).where(BotInstance.node_id == normalized_node_id)).all()
if attached_bot_ids:
raise HTTPException(
status_code=400,
detail=f"Node {normalized_node_id} still has bots assigned: {', '.join(str(item) for item in attached_bot_ids[:5])}",
)
node_registry.delete_node(session, normalized_node_id)
_invalidate_platform_overview_cache()
_invalidate_platform_nodes_cache()
return {"status": "deleted", "node_id": normalized_node_id}
@router.post("/api/platform/nodes/test")
def test_platform_node(payload: ManagedNodePayload, request: Request):
normalized = _normalize_node_payload(payload)
temp_node = _managed_node_from_payload(normalized)
result = _test_edge_connectivity(
lambda _target: HttpEdgeClient(
node=temp_node,
http_client_factory=lambda: httpx.Client(timeout=10.0, trust_env=False),
async_http_client_factory=lambda: httpx.AsyncClient(timeout=10.0, trust_env=False),
),
temp_node,
)
return result.model_dump()
@router.post("/api/platform/nodes/native/preflight")
def test_platform_node_native_preflight(payload: ManagedNodePayload, request: Request):
normalized = _normalize_node_payload(payload)
temp_node = _managed_node_from_payload(normalized)
result = _test_edge_native_preflight(
lambda _target: HttpEdgeClient(
node=temp_node,
http_client_factory=lambda: httpx.Client(timeout=10.0, trust_env=False),
async_http_client_factory=lambda: httpx.AsyncClient(timeout=10.0, trust_env=False),
),
temp_node,
native_command=str(normalized.native_command or "").strip() or None,
native_workdir=str(normalized.native_workdir or "").strip() or None,
)
return result.model_dump()
@router.post("/api/platform/nodes/{node_id}/test")
def test_saved_platform_node(node_id: str, request: Request, session: Session = Depends(get_session)):
normalized_node_id = str(node_id or "").strip().lower()
node_registry = getattr(request.app.state, "node_registry_service", None)
if node_registry is None or not hasattr(node_registry, "get_node"):
raise HTTPException(status_code=500, detail="node registry is unavailable")
node = node_registry.get_node(normalized_node_id)
if node is None:
raise HTTPException(status_code=404, detail=f"Managed node not found: {normalized_node_id}")
transport_kind = str((node.metadata or {}).get("transport_kind") or "edge").strip().lower()
if transport_kind != "edge":
_invalidate_platform_nodes_cache()
raise HTTPException(status_code=400, detail="Only edge transport is supported")
result = _test_edge_connectivity(
lambda target: HttpEdgeClient(
node=node,
http_client_factory=lambda: httpx.Client(timeout=10.0, trust_env=False),
async_http_client_factory=lambda: httpx.AsyncClient(timeout=10.0, trust_env=False),
),
node,
)
if result.ok:
node_registry.mark_node_seen(
session,
node_id=node.node_id,
display_name=str(node.display_name or result.node_self.get("display_name") or node.node_id) if result.node_self else node.display_name,
capabilities=dict(result.node_self.get("capabilities") or {}) if result.node_self else dict(node.capabilities or {}),
resources=dict(result.node_self.get("resources") or {}) if result.node_self else dict(getattr(node, "resources", {}) or {}),
)
_invalidate_platform_nodes_cache()
return result.model_dump()
@router.post("/api/platform/nodes/{node_id}/native/preflight")
def test_saved_platform_node_native_preflight(node_id: str, request: Request, session: Session = Depends(get_session)):
normalized_node_id = str(node_id or "").strip().lower()
node_registry = getattr(request.app.state, "node_registry_service", None)
if node_registry is None or not hasattr(node_registry, "get_node"):
raise HTTPException(status_code=500, detail="node registry is unavailable")
node = node_registry.get_node(normalized_node_id)
if node is None:
raise HTTPException(status_code=404, detail=f"Managed node not found: {normalized_node_id}")
transport_kind = str((node.metadata or {}).get("transport_kind") or "edge").strip().lower()
if transport_kind != "edge":
_invalidate_platform_nodes_cache()
raise HTTPException(status_code=400, detail="Only edge transport is supported")
metadata = dict(node.metadata or {})
result = _test_edge_native_preflight(
lambda _target: HttpEdgeClient(
node=node,
http_client_factory=lambda: httpx.Client(timeout=10.0, trust_env=False),
async_http_client_factory=lambda: httpx.AsyncClient(timeout=10.0, trust_env=False),
),
node,
native_command=str(metadata.get("native_command") or "").strip() or None,
native_workdir=str(metadata.get("native_workdir") or "").strip() or None,
)
if result.status == "online" and result.node_self:
node_registry.mark_node_seen(
session,
node_id=node.node_id,
display_name=str(node.display_name or result.node_self.get("display_name") or node.node_id),
capabilities=dict(result.node_self.get("capabilities") or {}),
resources=dict(result.node_self.get("resources") or {}),
)
_invalidate_platform_nodes_cache()
return result.model_dump()
@router.get("/api/platform/nodes/{node_id}/resources")
def get_platform_node_resources(node_id: str, request: Request, session: Session = Depends(get_session)):
normalized_node_id = str(node_id or "").strip().lower()
node_registry = getattr(request.app.state, "node_registry_service", None)
if node_registry is not None and hasattr(node_registry, "get_node"):
node = node_registry.get_node(normalized_node_id)
if node is not None:
metadata = dict(getattr(node, "metadata", {}) or {})
if str(metadata.get("transport_kind") or "").strip().lower() == "edge":
resolve_edge_client = getattr(request.app.state, "resolve_edge_client", None)
if callable(resolve_edge_client):
from providers.target import ProviderTarget
base = build_node_resource_overview(session, node_id=normalized_node_id, read_runtime=None)
client = resolve_edge_client(
ProviderTarget(
node_id=normalized_node_id,
transport_kind="edge",
runtime_kind=str(metadata.get("runtime_kind") or "docker"),
core_adapter=str(metadata.get("core_adapter") or "nanobot"),
)
)
try:
resource_report = dict(client.get_node_resources() or {})
except Exception as exc:
log_edge_failure(
logger,
key=f"platform-node-resources:{normalized_node_id}",
exc=exc,
message=f"Failed to load edge node resources for node_id={normalized_node_id}",
)
return base
base["resources"] = dict(resource_report.get("resources") or resource_report)
if resource_report:
base["node_report"] = resource_report
return base
def _read_runtime(bot):
provider = get_runtime_provider(request.app.state, bot)
status = str(provider.get_runtime_status(bot_id=str(bot.id or "")) or "STOPPED").upper()
runtime = dict(provider.get_resource_snapshot(bot_id=str(bot.id or "")) or {})
runtime.setdefault("docker_status", status)
return status, runtime
return build_node_resource_overview(session, node_id=normalized_node_id, read_runtime=_read_runtime)
@router.get("/api/platform/settings")
@ -47,7 +635,8 @@ def update_platform_settings_api(payload: PlatformSettingsPayload, request: Requ
@router.post("/api/platform/cache/clear")
def clear_platform_cache():
cache.delete_prefix("")
_invalidate_platform_overview_cache()
_invalidate_platform_nodes_cache()
return {"status": "cleared"}

View File

@ -0,0 +1 @@
# Client package for dashboard-edge integrations.

View File

@ -0,0 +1,147 @@
from abc import ABC, abstractmethod
from typing import Any, Dict, List, Optional
from fastapi import Request, UploadFile
from fastapi.responses import Response
from models.bot import BotInstance
class EdgeClient(ABC):
@abstractmethod
async def start_bot(self, *, bot: BotInstance, start_payload: Dict[str, Any]) -> Dict[str, Any]:
raise NotImplementedError
@abstractmethod
def stop_bot(self, *, bot: BotInstance) -> Dict[str, Any]:
raise NotImplementedError
@abstractmethod
def deliver_command(self, *, bot_id: str, command: str, media: Optional[List[str]] = None) -> Optional[str]:
raise NotImplementedError
@abstractmethod
def get_recent_logs(self, *, bot_id: str, tail: int = 300) -> List[str]:
raise NotImplementedError
@abstractmethod
def ensure_monitor(self, *, bot_id: str) -> bool:
raise NotImplementedError
@abstractmethod
def get_monitor_packets(self, *, bot_id: str, after_seq: int = 0, limit: int = 200) -> List[Dict[str, Any]]:
raise NotImplementedError
@abstractmethod
def get_runtime_status(self, *, bot_id: str) -> str:
raise NotImplementedError
@abstractmethod
def get_resource_snapshot(self, *, bot_id: str) -> Dict[str, Any]:
raise NotImplementedError
@abstractmethod
def get_node_resources(self) -> Dict[str, Any]:
raise NotImplementedError
@abstractmethod
def get_node_self(self) -> Dict[str, Any]:
raise NotImplementedError
@abstractmethod
def preflight_native(self, *, native_command: Optional[str] = None, native_workdir: Optional[str] = None) -> Dict[str, Any]:
raise NotImplementedError
@abstractmethod
def read_state(
self,
*,
bot_id: str,
state_key: str,
workspace_root: Optional[str] = None,
) -> Dict[str, Any]:
raise NotImplementedError
@abstractmethod
def write_state(
self,
*,
bot_id: str,
state_key: str,
data: Dict[str, Any],
workspace_root: Optional[str] = None,
) -> Dict[str, Any]:
raise NotImplementedError
@abstractmethod
def sync_bot_workspace(
self,
*,
bot_id: str,
channels_override: Optional[List[Dict[str, Any]]] = None,
global_delivery_override: Optional[Dict[str, Any]] = None,
runtime_overrides: Optional[Dict[str, Any]] = None,
) -> None:
raise NotImplementedError
@abstractmethod
def purge_workspace(self, *, bot_id: str, workspace_root: Optional[str] = None) -> Dict[str, Any]:
raise NotImplementedError
@abstractmethod
def list_tree(
self,
*,
bot_id: str,
path: Optional[str] = None,
recursive: bool = False,
workspace_root: Optional[str] = None,
) -> Dict[str, Any]:
raise NotImplementedError
@abstractmethod
def read_file(
self,
*,
bot_id: str,
path: str,
max_bytes: int = 200000,
workspace_root: Optional[str] = None,
) -> Dict[str, Any]:
raise NotImplementedError
@abstractmethod
def write_markdown(
self,
*,
bot_id: str,
path: str,
content: str,
workspace_root: Optional[str] = None,
) -> Dict[str, Any]:
raise NotImplementedError
@abstractmethod
async def upload_files(
self,
*,
bot_id: str,
files: List[UploadFile],
path: Optional[str] = None,
workspace_root: Optional[str] = None,
) -> Dict[str, Any]:
raise NotImplementedError
@abstractmethod
def serve_file(
self,
*,
bot_id: str,
path: str,
download: bool,
request: Request,
public: bool = False,
redirect_html_to_raw: bool = False,
workspace_root: Optional[str] = None,
) -> Response:
raise NotImplementedError

View File

@ -0,0 +1,84 @@
import logging
import threading
import time
from typing import Any
import httpx
from fastapi import HTTPException
_OFFLINE_LOG_LOCK = threading.Lock()
_OFFLINE_LOGGED_AT: dict[str, float] = {}
_DEFAULT_LOG_COOLDOWN_SECONDS = 60.0
def describe_edge_node(node: Any) -> str:
display_name = str(getattr(node, "display_name", "") or "").strip()
node_id = str(getattr(node, "node_id", "") or "").strip()
if display_name and node_id and display_name != node_id:
return f"{display_name} ({node_id})"
return display_name or node_id or "unknown edge node"
def summarize_edge_exception(exc: Exception) -> str:
detail = getattr(exc, "detail", None)
text = str(detail if detail is not None else exc).strip()
if not text:
return exc.__class__.__name__
return text[:400]
def edge_transport_http_exception(exc: httpx.RequestError, *, node: Any) -> HTTPException:
node_label = describe_edge_node(node)
if isinstance(exc, httpx.TimeoutException):
detail = f"dashboard-edge timed out for node {node_label}"
else:
reason = str(exc).strip() or exc.__class__.__name__
detail = f"dashboard-edge is unreachable for node {node_label}: {reason}"
return HTTPException(status_code=502, detail=detail[:400])
def is_expected_edge_offline_error(exc: Exception) -> bool:
if isinstance(exc, httpx.RequestError):
return True
if not isinstance(exc, HTTPException):
return False
if int(getattr(exc, "status_code", 0) or 0) not in {502, 503, 504}:
return False
detail = summarize_edge_exception(exc).lower()
markers = (
"dashboard-edge is unreachable",
"dashboard-edge timed out",
"connection refused",
"request failed before receiving a response",
"name or service not known",
"nodename nor servname provided",
"temporary failure in name resolution",
)
return any(marker in detail for marker in markers)
def log_edge_failure(
logger: logging.Logger,
*,
key: str,
exc: Exception,
message: str,
cooldown_seconds: float = _DEFAULT_LOG_COOLDOWN_SECONDS,
) -> None:
detail = summarize_edge_exception(exc)
if is_expected_edge_offline_error(exc):
if _should_emit_offline_log(key=key, cooldown_seconds=cooldown_seconds):
logger.info("%s detail=%s", message, detail)
return
logger.exception("%s detail=%s", message, detail)
def _should_emit_offline_log(*, key: str, cooldown_seconds: float) -> bool:
now = time.monotonic()
normalized_key = str(key or "edge-offline").strip() or "edge-offline"
with _OFFLINE_LOG_LOCK:
last_logged_at = _OFFLINE_LOGGED_AT.get(normalized_key, 0.0)
if now - last_logged_at < max(1.0, float(cooldown_seconds or _DEFAULT_LOG_COOLDOWN_SECONDS)):
return False
_OFFLINE_LOGGED_AT[normalized_key] = now
return True

View File

@ -0,0 +1,455 @@
import mimetypes
from typing import Any, Callable, Dict, List, Optional
from urllib.parse import quote
import httpx
from fastapi import HTTPException, Request, UploadFile
from fastapi.responses import RedirectResponse, Response
from clients.edge.base import EdgeClient
from clients.edge.errors import edge_transport_http_exception
from models.bot import BotInstance
from schemas.edge import (
EdgeCommandRequest,
EdgeLogsResponse,
EdgeNativePreflightRequest,
EdgeNativePreflightResponse,
EdgeNodeHeartbeatResponse,
EdgeMonitorPacketsResponse,
EdgeMarkdownWriteRequest,
EdgeMonitorEnsureResponse,
EdgeNodeResourcesResponse,
EdgeNodeSelfResponse,
EdgeStateResponse,
EdgeStateWriteRequest,
EdgeStartBotRequest,
EdgeStatusResponse,
EdgeWorkspaceSyncRequest,
)
from services.node_registry_service import ManagedNode
EDGE_AUTH_HEADER = "x-dashboard-edge-token"
class HttpEdgeClient(EdgeClient):
def __init__(
self,
*,
node: ManagedNode,
http_client_factory: Optional[Callable[[], httpx.Client]] = None,
async_http_client_factory: Optional[Callable[[], httpx.AsyncClient]] = None,
) -> None:
self._node = node
self._http_client_factory = http_client_factory or (lambda: httpx.Client(timeout=15.0, trust_env=False))
self._async_http_client_factory = async_http_client_factory or (
lambda: httpx.AsyncClient(timeout=15.0, trust_env=False)
)
async def start_bot(self, *, bot: BotInstance, start_payload: Dict[str, Any]) -> Dict[str, Any]:
payload = await self._async_request_json(
"POST",
f"/api/edge/bots/{bot.id}/start",
json=EdgeStartBotRequest.model_validate(start_payload).model_dump(),
)
return EdgeStatusResponse.model_validate(payload).model_dump()
def stop_bot(self, *, bot: BotInstance) -> Dict[str, Any]:
payload = self._request_json("POST", f"/api/edge/bots/{bot.id}/stop")
return EdgeStatusResponse.model_validate(payload).model_dump()
def deliver_command(self, *, bot_id: str, command: str, media: Optional[List[str]] = None) -> Optional[str]:
self._request_json(
"POST",
f"/api/edge/bots/{bot_id}/command",
json=EdgeCommandRequest(command=command, media=list(media or [])).model_dump(),
)
return None
def get_recent_logs(self, *, bot_id: str, tail: int = 300) -> List[str]:
payload = self._request_json(
"GET",
f"/api/edge/bots/{bot_id}/logs",
params={"tail": max(1, int(tail or 300))},
)
return EdgeLogsResponse.model_validate(payload).logs
def ensure_monitor(self, *, bot_id: str) -> bool:
payload = self._request_json("POST", f"/api/edge/bots/{bot_id}/monitor/ensure")
return bool(EdgeMonitorEnsureResponse.model_validate(payload).ensured)
def get_monitor_packets(self, *, bot_id: str, after_seq: int = 0, limit: int = 200) -> List[Dict[str, Any]]:
payload = self._request_json(
"GET",
f"/api/edge/bots/{bot_id}/monitor/packets",
params={"after_seq": max(0, int(after_seq or 0)), "limit": max(1, int(limit or 200))},
)
parsed = EdgeMonitorPacketsResponse.model_validate(payload)
rows: List[Dict[str, Any]] = []
for item in parsed.packets or []:
rows.append(item.model_dump())
return rows
def get_runtime_status(self, *, bot_id: str) -> str:
payload = self._request_json("GET", f"/api/edge/bots/{bot_id}/runtime/status")
return str(payload.get("status") or "STOPPED").upper()
def get_resource_snapshot(self, *, bot_id: str) -> Dict[str, Any]:
return self._request_json("GET", f"/api/edge/bots/{bot_id}/resources")
def get_node_resources(self) -> Dict[str, Any]:
payload = self._request_json("GET", "/api/edge/node/resources")
return EdgeNodeResourcesResponse.model_validate(payload).model_dump()
def get_node_self(self) -> Dict[str, Any]:
payload = self._request_json("GET", "/api/edge/node/self")
return EdgeNodeSelfResponse.model_validate(payload).model_dump()
def heartbeat_node(self) -> Dict[str, Any]:
payload = self._request_json("POST", "/api/edge/node/heartbeat")
return EdgeNodeHeartbeatResponse.model_validate(payload).model_dump()
def preflight_native(self, *, native_command: Optional[str] = None, native_workdir: Optional[str] = None) -> Dict[str, Any]:
payload = self._request_json(
"POST",
"/api/edge/runtime/native/preflight",
json=EdgeNativePreflightRequest(
native_command=str(native_command or "").strip() or None,
native_workdir=str(native_workdir or "").strip() or None,
).model_dump(),
)
return EdgeNativePreflightResponse.model_validate(payload).model_dump()
def read_state(
self,
*,
bot_id: str,
state_key: str,
workspace_root: Optional[str] = None,
) -> Dict[str, Any]:
params: Dict[str, Any] = {}
if workspace_root:
params["workspace_root"] = str(workspace_root).strip()
payload = self._request_json(
"GET",
f"/api/edge/bots/{bot_id}/state/{state_key}",
params=params or None,
)
return EdgeStateResponse.model_validate(payload).model_dump()
def write_state(
self,
*,
bot_id: str,
state_key: str,
data: Dict[str, Any],
workspace_root: Optional[str] = None,
) -> Dict[str, Any]:
payload = self._request_json(
"PUT",
f"/api/edge/bots/{bot_id}/state/{state_key}",
json=EdgeStateWriteRequest(
data=dict(data or {}),
workspace_root=str(workspace_root or "").strip() or None,
).model_dump(),
)
return EdgeStateResponse.model_validate(payload).model_dump()
def sync_bot_workspace(
self,
*,
bot_id: str,
channels_override: Optional[List[Dict[str, Any]]] = None,
global_delivery_override: Optional[Dict[str, Any]] = None,
runtime_overrides: Optional[Dict[str, Any]] = None,
) -> None:
self._request_json(
"POST",
f"/api/edge/bots/{bot_id}/workspace/sync",
json=EdgeWorkspaceSyncRequest(
channels_override=channels_override,
global_delivery_override=global_delivery_override,
runtime_overrides=runtime_overrides,
).model_dump(),
)
def purge_workspace(self, *, bot_id: str, workspace_root: Optional[str] = None) -> Dict[str, Any]:
params: Dict[str, Any] = {}
if workspace_root:
params["workspace_root"] = str(workspace_root).strip()
payload = self._request_json(
"POST",
f"/api/edge/bots/{bot_id}/workspace/purge",
params=params or None,
)
return EdgeStatusResponse.model_validate(payload).model_dump()
def list_tree(
self,
*,
bot_id: str,
path: Optional[str] = None,
recursive: bool = False,
workspace_root: Optional[str] = None,
) -> Dict[str, Any]:
params: Dict[str, Any] = {"recursive": bool(recursive)}
if path:
params["path"] = path
if workspace_root:
params["workspace_root"] = str(workspace_root).strip()
return self._request_json("GET", f"/api/edge/bots/{bot_id}/workspace/tree", params=params)
def read_file(
self,
*,
bot_id: str,
path: str,
max_bytes: int = 200000,
workspace_root: Optional[str] = None,
) -> Dict[str, Any]:
params: Dict[str, Any] = {"path": path, "max_bytes": max(4096, int(max_bytes or 200000))}
if workspace_root:
params["workspace_root"] = str(workspace_root).strip()
return self._request_json(
"GET",
f"/api/edge/bots/{bot_id}/workspace/file",
params=params,
)
def write_markdown(
self,
*,
bot_id: str,
path: str,
content: str,
workspace_root: Optional[str] = None,
) -> Dict[str, Any]:
params: Dict[str, Any] = {"path": path}
if workspace_root:
params["workspace_root"] = str(workspace_root).strip()
return self._request_json(
"PUT",
f"/api/edge/bots/{bot_id}/workspace/file/markdown",
params=params,
json=EdgeMarkdownWriteRequest(content=str(content or "")).model_dump(),
)
async def upload_files(
self,
*,
bot_id: str,
files: List[UploadFile],
path: Optional[str] = None,
workspace_root: Optional[str] = None,
) -> Dict[str, Any]:
base_url = self._require_base_url()
multipart_files = []
response: httpx.Response | None = None
try:
async with self._async_http_client_factory() as client:
for upload in files:
await upload.seek(0)
multipart_files.append(
(
"files",
(
upload.filename or "upload.bin",
upload.file,
upload.content_type or "application/octet-stream",
),
)
)
response = await client.request(
method="POST",
url=f"{base_url}/api/edge/bots/{quote(bot_id, safe='')}/workspace/upload",
headers=self._headers(),
params=self._workspace_upload_params(path=path, workspace_root=workspace_root),
files=multipart_files,
)
except httpx.RequestError as exc:
raise edge_transport_http_exception(exc, node=self._node) from exc
finally:
for upload in files:
await upload.close()
if response is None:
raise HTTPException(status_code=502, detail="dashboard-edge upload request failed before receiving a response")
return self._parse_json_response(response)
def serve_file(
self,
*,
bot_id: str,
path: str,
download: bool,
request: Request,
public: bool = False,
redirect_html_to_raw: bool = False,
workspace_root: Optional[str] = None,
) -> Response:
media_type, _ = mimetypes.guess_type(path)
if redirect_html_to_raw and not download and str(media_type or "").startswith("text/html"):
raw_url = self._build_dashboard_raw_url(bot_id=bot_id, path=path, public=public)
if raw_url:
return RedirectResponse(url=raw_url, status_code=307)
base_url = self._require_base_url()
url = self._build_edge_file_url(
bot_id=bot_id,
path=path,
download=download,
raw=not redirect_html_to_raw,
workspace_root=workspace_root,
)
headers = self._headers()
range_header = request.headers.get("range", "").strip()
if range_header and not download:
headers["range"] = range_header
try:
with self._http_client_factory() as client:
response = client.request(
method="GET",
url=f"{base_url}{url}",
headers=headers,
)
except httpx.RequestError as exc:
raise edge_transport_http_exception(exc, node=self._node) from exc
self._raise_for_status(response)
return Response(
content=response.content,
status_code=response.status_code,
media_type=response.headers.get("content-type") or "application/octet-stream",
headers=self._response_proxy_headers(response),
)
def _request_json(
self,
method: str,
path: str,
*,
params: Optional[Dict[str, Any]] = None,
json: Optional[Dict[str, Any]] = None,
) -> Dict[str, Any]:
base_url = self._require_base_url()
try:
with self._http_client_factory() as client:
response = client.request(
method=method.upper(),
url=f"{base_url}{path}",
headers=self._headers(),
params=params,
json=json,
)
except httpx.RequestError as exc:
raise edge_transport_http_exception(exc, node=self._node) from exc
return self._parse_json_response(response)
async def _async_request_json(
self,
method: str,
path: str,
*,
params: Optional[Dict[str, Any]] = None,
json: Optional[Dict[str, Any]] = None,
) -> Dict[str, Any]:
base_url = self._require_base_url()
try:
async with self._async_http_client_factory() as client:
response = await client.request(
method=method.upper(),
url=f"{base_url}{path}",
headers=self._headers(),
params=params,
json=json,
)
except httpx.RequestError as exc:
raise edge_transport_http_exception(exc, node=self._node) from exc
return self._parse_json_response(response)
def _headers(self) -> Dict[str, str]:
headers = {"accept": "application/json"}
token = str(self._node.auth_token or "").strip()
if token:
headers[EDGE_AUTH_HEADER] = token
return headers
def _require_base_url(self) -> str:
base_url = str(self._node.base_url or "").strip().rstrip("/")
if not base_url:
raise self._not_implemented("connect to node")
return base_url
@staticmethod
def _raise_for_status(response: httpx.Response) -> None:
try:
response.raise_for_status()
except httpx.HTTPStatusError as exc:
detail = exc.response.text.strip() or str(exc)
raise HTTPException(status_code=502, detail=f"dashboard-edge request failed: {detail[:400]}") from exc
@classmethod
def _parse_json_response(cls, response: httpx.Response) -> Dict[str, Any]:
cls._raise_for_status(response)
try:
payload = response.json()
except Exception as exc:
raise HTTPException(status_code=502, detail="dashboard-edge returned invalid JSON") from exc
if not isinstance(payload, dict):
raise HTTPException(status_code=502, detail="dashboard-edge returned unexpected payload")
return payload
@staticmethod
def _build_dashboard_raw_url(bot_id: str, path: str, public: bool) -> str:
normalized = "/".join(part for part in str(path or "").strip().split("/") if part)
if not normalized:
return ""
prefix = "/public" if public else "/api"
return f"{prefix}/bots/{quote(bot_id, safe='')}/workspace/raw/{quote(normalized, safe='/')}"
@staticmethod
def _build_edge_file_url(
*,
bot_id: str,
path: str,
download: bool,
raw: bool,
workspace_root: Optional[str] = None,
) -> str:
workspace_root_qs = ""
normalized_workspace_root = str(workspace_root or "").strip()
if normalized_workspace_root:
workspace_root_qs = f"&workspace_root={quote(normalized_workspace_root, safe='/')}"
if raw:
normalized = "/".join(part for part in str(path or "").strip().split("/") if part)
if not normalized:
raise HTTPException(status_code=400, detail="invalid workspace path")
return (
f"/api/edge/bots/{quote(bot_id, safe='')}/workspace/raw/"
f"{quote(normalized, safe='/')}?download={'true' if download else 'false'}{workspace_root_qs}"
)
return (
f"/api/edge/bots/{quote(bot_id, safe='')}/workspace/download"
f"?path={quote(str(path or ''), safe='/')}&download={'true' if download else 'false'}{workspace_root_qs}"
)
@staticmethod
def _workspace_upload_params(*, path: Optional[str], workspace_root: Optional[str]) -> Optional[Dict[str, Any]]:
params: Dict[str, Any] = {}
if path:
params["path"] = path
normalized_workspace_root = str(workspace_root or "").strip()
if normalized_workspace_root:
params["workspace_root"] = normalized_workspace_root
return params or None
@staticmethod
def _response_proxy_headers(response: httpx.Response) -> Dict[str, str]:
kept: Dict[str, str] = {}
for name in ("accept-ranges", "content-disposition", "content-length", "content-range", "cache-control"):
value = response.headers.get(name)
if value:
kept[name] = value
return kept
def _not_implemented(self, capability: str) -> HTTPException:
node_label = self._node.display_name or self._node.node_id
return HTTPException(status_code=501, detail=f"dashboard-edge {capability} is not implemented yet for node {node_label}")

View File

@ -37,6 +37,7 @@ class BotConfigManager:
"qwen": "dashscope",
"aliyun-qwen": "dashscope",
"moonshot": "kimi",
"vllm": "openai",
# Xunfei Spark provides OpenAI-compatible endpoint.
"xunfei": "openai",
"iflytek": "openai",
@ -71,6 +72,11 @@ class BotConfigManager:
existing_tools = existing_config.get("tools")
tools_cfg: Dict[str, Any] = dict(existing_tools) if isinstance(existing_tools, dict) else {}
native_sandbox_mode = self._normalize_native_sandbox_mode(bot_data.get("native_sandbox_mode"))
if native_sandbox_mode == "workspace":
tools_cfg["restrictToWorkspace"] = True
elif native_sandbox_mode == "full_access":
tools_cfg["restrictToWorkspace"] = False
if "mcp_servers" in bot_data:
mcp_servers = bot_data.get("mcp_servers")
if isinstance(mcp_servers, dict):
@ -249,3 +255,12 @@ class BotConfigManager:
if not rows:
return ["*"]
return rows
@staticmethod
def _normalize_native_sandbox_mode(raw_value: Any) -> str:
text = str(raw_value or "").strip().lower()
if text in {"workspace", "sandbox", "strict"}:
return "workspace"
if text in {"full_access", "full-access", "danger-full-access", "escape"}:
return "full_access"
return "inherit"

View File

@ -20,18 +20,15 @@ from models import topic as _topic_models # noqa: F401
_engine_kwargs = {
"echo": DATABASE_ECHO,
}
if DATABASE_ENGINE == "sqlite":
_engine_kwargs["connect_args"] = {"check_same_thread": False}
else:
_engine_kwargs.update(
{
"pool_pre_ping": True,
"pool_size": DATABASE_POOL_SIZE,
"max_overflow": DATABASE_MAX_OVERFLOW,
"pool_timeout": DATABASE_POOL_TIMEOUT,
"pool_recycle": DATABASE_POOL_RECYCLE,
}
)
_engine_kwargs.update(
{
"pool_pre_ping": True,
"pool_size": DATABASE_POOL_SIZE,
"max_overflow": DATABASE_MAX_OVERFLOW,
"pool_timeout": DATABASE_POOL_TIMEOUT,
"pool_recycle": DATABASE_POOL_RECYCLE,
}
)
engine = create_engine(DATABASE_URL, **_engine_kwargs)
@ -41,6 +38,7 @@ BOT_IMAGE_TABLE = "bot_image"
BOT_REQUEST_USAGE_TABLE = "bot_request_usage"
BOT_ACTIVITY_EVENT_TABLE = "bot_activity_event"
SYS_SETTING_TABLE = "sys_setting"
MANAGED_NODE_TABLE = "managed_node"
POSTGRES_MIGRATION_LOCK_KEY = 2026031801
MYSQL_MIGRATION_LOCK_NAME = "dashboard_nanobot_schema_migration"
LEGACY_TABLE_PAIRS = [
@ -266,30 +264,41 @@ def _ensure_botinstance_columns() -> None:
dialect = engine.dialect.name
required_columns = {
"current_state": {
"sqlite": "TEXT DEFAULT 'IDLE'",
"postgresql": "TEXT DEFAULT 'IDLE'",
"mysql": "VARCHAR(64) DEFAULT 'IDLE'",
},
"last_action": {
"sqlite": "TEXT",
"postgresql": "TEXT",
"mysql": "LONGTEXT",
},
"image_tag": {
"sqlite": "TEXT DEFAULT 'nanobot-base:v0.1.4'",
"postgresql": "TEXT DEFAULT 'nanobot-base:v0.1.4'",
"mysql": "VARCHAR(255) DEFAULT 'nanobot-base:v0.1.4'",
},
"access_password": {
"sqlite": "TEXT DEFAULT ''",
"postgresql": "TEXT DEFAULT ''",
"mysql": "VARCHAR(255) DEFAULT ''",
},
"enabled": {
"sqlite": "INTEGER NOT NULL DEFAULT 1",
"postgresql": "BOOLEAN NOT NULL DEFAULT TRUE",
"mysql": "BOOLEAN NOT NULL DEFAULT TRUE",
},
"node_id": {
"postgresql": "TEXT NOT NULL DEFAULT 'local'",
"mysql": "VARCHAR(120) NOT NULL DEFAULT 'local'",
},
"transport_kind": {
"postgresql": "TEXT NOT NULL DEFAULT 'direct'",
"mysql": "VARCHAR(32) NOT NULL DEFAULT 'direct'",
},
"runtime_kind": {
"postgresql": "TEXT NOT NULL DEFAULT 'docker'",
"mysql": "VARCHAR(32) NOT NULL DEFAULT 'docker'",
},
"core_adapter": {
"postgresql": "TEXT NOT NULL DEFAULT 'nanobot'",
"mysql": "VARCHAR(64) NOT NULL DEFAULT 'nanobot'",
},
}
inspector = inspect(engine)
@ -304,13 +313,36 @@ def _ensure_botinstance_columns() -> None:
for col, ddl_map in required_columns.items():
if col in existing:
continue
ddl = ddl_map.get(dialect) or ddl_map.get("sqlite")
ddl = ddl_map.get(dialect) or ddl_map.get("postgresql")
conn.execute(text(f"ALTER TABLE {BOT_INSTANCE_TABLE} ADD COLUMN {col} {ddl}"))
if "enabled" in existing:
if dialect == "sqlite":
conn.execute(text(f"UPDATE {BOT_INSTANCE_TABLE} SET enabled = 1 WHERE enabled IS NULL"))
else:
conn.execute(text(f"UPDATE {BOT_INSTANCE_TABLE} SET enabled = TRUE WHERE enabled IS NULL"))
conn.execute(text(f"UPDATE {BOT_INSTANCE_TABLE} SET enabled = TRUE WHERE enabled IS NULL"))
conn.commit()
def _ensure_botinstance_indexes() -> None:
required_indexes = [
("idx_bot_instance_enabled", BOT_INSTANCE_TABLE, ["enabled"]),
("idx_bot_instance_docker_status", BOT_INSTANCE_TABLE, ["docker_status"]),
("idx_bot_instance_node_id", BOT_INSTANCE_TABLE, ["node_id"]),
("idx_bot_instance_transport_kind", BOT_INSTANCE_TABLE, ["transport_kind"]),
("idx_bot_instance_runtime_kind", BOT_INSTANCE_TABLE, ["runtime_kind"]),
("idx_bot_instance_core_adapter", BOT_INSTANCE_TABLE, ["core_adapter"]),
("idx_bot_instance_node_transport_runtime", BOT_INSTANCE_TABLE, ["node_id", "transport_kind", "runtime_kind"]),
]
inspector = inspect(engine)
with engine.connect() as conn:
if not inspector.has_table(BOT_INSTANCE_TABLE):
return
existing = {
str(item.get("name"))
for item in inspector.get_indexes(BOT_INSTANCE_TABLE)
if item.get("name")
}
for name, table_name, columns in required_indexes:
if name in existing:
continue
conn.execute(text(f"CREATE INDEX {name} ON {table_name} ({', '.join(columns)})"))
conn.commit()
@ -350,8 +382,6 @@ def _drop_legacy_botinstance_columns() -> None:
try:
if engine.dialect.name == "mysql":
conn.execute(text(f"ALTER TABLE {BOT_INSTANCE_TABLE} DROP COLUMN `{col}`"))
elif engine.dialect.name == "sqlite":
conn.execute(text(f'ALTER TABLE {BOT_INSTANCE_TABLE} DROP COLUMN "{col}"'))
else:
conn.execute(text(f'ALTER TABLE {BOT_INSTANCE_TABLE} DROP COLUMN IF EXISTS "{col}"'))
except Exception:
@ -360,24 +390,6 @@ def _drop_legacy_botinstance_columns() -> None:
conn.commit()
def _ensure_botmessage_columns() -> None:
if engine.dialect.name != "sqlite":
return
required_columns = {
"media_json": "TEXT",
"feedback": "TEXT",
"feedback_at": "DATETIME",
}
with engine.connect() as conn:
existing_rows = conn.execute(text(f"PRAGMA table_info({BOT_MESSAGE_TABLE})")).fetchall()
existing = {str(row[1]) for row in existing_rows}
for col, ddl in required_columns.items():
if col in existing:
continue
conn.execute(text(f"ALTER TABLE {BOT_MESSAGE_TABLE} ADD COLUMN {col} {ddl}"))
conn.commit()
def _drop_legacy_skill_tables() -> None:
"""Drop deprecated skill registry tables (moved to workspace filesystem mode)."""
with engine.connect() as conn:
@ -390,32 +402,26 @@ def _ensure_sys_setting_columns() -> None:
dialect = engine.dialect.name
required_columns = {
"name": {
"sqlite": "TEXT NOT NULL DEFAULT ''",
"postgresql": "TEXT NOT NULL DEFAULT ''",
"mysql": "VARCHAR(200) NOT NULL DEFAULT ''",
},
"category": {
"sqlite": "TEXT NOT NULL DEFAULT 'general'",
"postgresql": "TEXT NOT NULL DEFAULT 'general'",
"mysql": "VARCHAR(64) NOT NULL DEFAULT 'general'",
},
"description": {
"sqlite": "TEXT NOT NULL DEFAULT ''",
"postgresql": "TEXT NOT NULL DEFAULT ''",
"mysql": "LONGTEXT",
},
"value_type": {
"sqlite": "TEXT NOT NULL DEFAULT 'json'",
"postgresql": "TEXT NOT NULL DEFAULT 'json'",
"mysql": "VARCHAR(32) NOT NULL DEFAULT 'json'",
},
"is_public": {
"sqlite": "INTEGER NOT NULL DEFAULT 0",
"postgresql": "BOOLEAN NOT NULL DEFAULT FALSE",
"mysql": "BOOLEAN NOT NULL DEFAULT FALSE",
},
"sort_order": {
"sqlite": "INTEGER NOT NULL DEFAULT 100",
"postgresql": "INTEGER NOT NULL DEFAULT 100",
"mysql": "INTEGER NOT NULL DEFAULT 100",
},
@ -432,7 +438,7 @@ def _ensure_sys_setting_columns() -> None:
for col, ddl_map in required_columns.items():
if col in existing:
continue
ddl = ddl_map.get(dialect) or ddl_map.get("sqlite")
ddl = ddl_map.get(dialect) or ddl_map.get("postgresql")
conn.execute(text(f"ALTER TABLE {SYS_SETTING_TABLE} ADD COLUMN {col} {ddl}"))
conn.commit()
@ -441,17 +447,14 @@ def _ensure_bot_request_usage_columns() -> None:
dialect = engine.dialect.name
required_columns = {
"message_id": {
"sqlite": "INTEGER",
"postgresql": "INTEGER",
"mysql": "INTEGER",
},
"provider": {
"sqlite": "TEXT",
"postgresql": "TEXT",
"mysql": "VARCHAR(120)",
},
"model": {
"sqlite": "TEXT",
"postgresql": "TEXT",
"mysql": "VARCHAR(255)",
},
@ -468,69 +471,105 @@ def _ensure_bot_request_usage_columns() -> None:
for col, ddl_map in required_columns.items():
if col in existing:
continue
ddl = ddl_map.get(dialect) or ddl_map.get("sqlite")
ddl = ddl_map.get(dialect) or ddl_map.get("postgresql")
conn.execute(text(f"ALTER TABLE {BOT_REQUEST_USAGE_TABLE} ADD COLUMN {col} {ddl}"))
conn.commit()
def _ensure_topic_tables_sqlite() -> None:
if engine.dialect.name != "sqlite":
def _ensure_managed_node_columns() -> None:
dialect = engine.dialect.name
required_columns = {
"display_name": {
"postgresql": "TEXT NOT NULL DEFAULT ''",
"mysql": "VARCHAR(200) NOT NULL DEFAULT ''",
},
"base_url": {
"postgresql": "TEXT NOT NULL DEFAULT ''",
"mysql": "TEXT NOT NULL",
},
"enabled": {
"postgresql": "BOOLEAN NOT NULL DEFAULT TRUE",
"mysql": "BOOLEAN NOT NULL DEFAULT TRUE",
},
"auth_token": {
"postgresql": "TEXT NOT NULL DEFAULT ''",
"mysql": "TEXT NOT NULL",
},
"transport_kind": {
"postgresql": "TEXT NOT NULL DEFAULT 'direct'",
"mysql": "VARCHAR(32) NOT NULL DEFAULT 'direct'",
},
"runtime_kind": {
"postgresql": "TEXT NOT NULL DEFAULT 'docker'",
"mysql": "VARCHAR(32) NOT NULL DEFAULT 'docker'",
},
"core_adapter": {
"postgresql": "TEXT NOT NULL DEFAULT 'nanobot'",
"mysql": "VARCHAR(64) NOT NULL DEFAULT 'nanobot'",
},
"metadata_json": {
"postgresql": "TEXT NOT NULL DEFAULT '{}'",
"mysql": "LONGTEXT",
},
"capabilities_json": {
"postgresql": "TEXT NOT NULL DEFAULT '{}'",
"mysql": "LONGTEXT",
},
"resources_json": {
"postgresql": "TEXT NOT NULL DEFAULT '{}'",
"mysql": "LONGTEXT",
},
"last_seen_at": {
"postgresql": "TIMESTAMP",
"mysql": "DATETIME",
},
"created_at": {
"postgresql": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
"mysql": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
},
"updated_at": {
"postgresql": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
"mysql": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
},
}
inspector = inspect(engine)
if not inspector.has_table(MANAGED_NODE_TABLE):
return
with engine.connect() as conn:
conn.execute(
text(
"""
CREATE TABLE IF NOT EXISTS topic_topic (
id INTEGER PRIMARY KEY AUTOINCREMENT,
bot_id TEXT NOT NULL,
topic_key TEXT NOT NULL,
name TEXT NOT NULL DEFAULT '',
description TEXT NOT NULL DEFAULT '',
is_active INTEGER NOT NULL DEFAULT 1,
is_default_fallback INTEGER NOT NULL DEFAULT 0,
routing_json TEXT NOT NULL DEFAULT '{}',
view_schema_json TEXT NOT NULL DEFAULT '{}',
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY(bot_id) REFERENCES bot_instance(id)
)
"""
)
)
conn.execute(
text(
"""
CREATE TABLE IF NOT EXISTS topic_item (
id INTEGER PRIMARY KEY AUTOINCREMENT,
bot_id TEXT NOT NULL,
topic_key TEXT NOT NULL,
title TEXT NOT NULL DEFAULT '',
content TEXT NOT NULL DEFAULT '',
level TEXT NOT NULL DEFAULT 'info',
tags_json TEXT,
view_json TEXT,
source TEXT NOT NULL DEFAULT 'mcp',
dedupe_key TEXT,
is_read INTEGER NOT NULL DEFAULT 0,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY(bot_id) REFERENCES bot_instance(id)
)
"""
)
)
existing = {
str(row.get("name"))
for row in inspect(conn).get_columns(MANAGED_NODE_TABLE)
if row.get("name")
}
for col, ddl_map in required_columns.items():
if col in existing:
continue
ddl = ddl_map.get(dialect) or ddl_map.get("postgresql")
conn.execute(text(f"ALTER TABLE {MANAGED_NODE_TABLE} ADD COLUMN {col} {ddl}"))
conn.commit()
conn.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS uq_topic_topic_bot_topic_key ON topic_topic(bot_id, topic_key)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_topic_bot_id ON topic_topic(bot_id)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_topic_topic_key ON topic_topic(topic_key)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_topic_bot_fallback ON topic_topic(bot_id, is_default_fallback)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_bot_id ON topic_item(bot_id)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_topic_key ON topic_item(topic_key)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_level ON topic_item(level)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_source ON topic_item(source)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_is_read ON topic_item(is_read)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_created_at ON topic_item(created_at)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_bot_topic_created_at ON topic_item(bot_id, topic_key, created_at)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_bot_dedupe ON topic_item(bot_id, dedupe_key)"))
def _ensure_managed_node_indexes() -> None:
required_indexes = [
("idx_managed_node_enabled", MANAGED_NODE_TABLE, ["enabled"]),
("idx_managed_node_transport_kind", MANAGED_NODE_TABLE, ["transport_kind"]),
("idx_managed_node_runtime_kind", MANAGED_NODE_TABLE, ["runtime_kind"]),
("idx_managed_node_core_adapter", MANAGED_NODE_TABLE, ["core_adapter"]),
("idx_managed_node_last_seen_at", MANAGED_NODE_TABLE, ["last_seen_at"]),
]
inspector = inspect(engine)
with engine.connect() as conn:
if not inspector.has_table(MANAGED_NODE_TABLE):
return
existing = {
str(item.get("name"))
for item in inspector.get_indexes(MANAGED_NODE_TABLE)
if item.get("name")
}
for name, table_name, columns in required_indexes:
if name in existing:
continue
conn.execute(text(f"CREATE INDEX {name} ON {table_name} ({', '.join(columns)})"))
conn.commit()
@ -539,84 +578,68 @@ def _ensure_topic_columns() -> None:
required_columns = {
"topic_topic": {
"name": {
"sqlite": "TEXT NOT NULL DEFAULT ''",
"postgresql": "TEXT NOT NULL DEFAULT ''",
"mysql": "VARCHAR(255) NOT NULL DEFAULT ''",
},
"description": {
"sqlite": "TEXT NOT NULL DEFAULT ''",
"postgresql": "TEXT NOT NULL DEFAULT ''",
"mysql": "LONGTEXT",
},
"is_active": {
"sqlite": "INTEGER NOT NULL DEFAULT 1",
"postgresql": "BOOLEAN NOT NULL DEFAULT TRUE",
"mysql": "BOOLEAN NOT NULL DEFAULT TRUE",
},
"is_default_fallback": {
"sqlite": "INTEGER NOT NULL DEFAULT 0",
"postgresql": "BOOLEAN NOT NULL DEFAULT FALSE",
"mysql": "BOOLEAN NOT NULL DEFAULT FALSE",
},
"routing_json": {
"sqlite": "TEXT NOT NULL DEFAULT '{}'",
"postgresql": "TEXT NOT NULL DEFAULT '{}'",
"mysql": "LONGTEXT",
},
"view_schema_json": {
"sqlite": "TEXT NOT NULL DEFAULT '{}'",
"postgresql": "TEXT NOT NULL DEFAULT '{}'",
"mysql": "LONGTEXT",
},
"created_at": {
"sqlite": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
"postgresql": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
"mysql": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
},
"updated_at": {
"sqlite": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
"postgresql": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
"mysql": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
},
},
"topic_item": {
"title": {
"sqlite": "TEXT NOT NULL DEFAULT ''",
"postgresql": "TEXT NOT NULL DEFAULT ''",
"mysql": "VARCHAR(2000) NOT NULL DEFAULT ''",
},
"level": {
"sqlite": "TEXT NOT NULL DEFAULT 'info'",
"postgresql": "TEXT NOT NULL DEFAULT 'info'",
"mysql": "VARCHAR(32) NOT NULL DEFAULT 'info'",
},
"tags_json": {
"sqlite": "TEXT",
"postgresql": "TEXT",
"mysql": "LONGTEXT",
},
"view_json": {
"sqlite": "TEXT",
"postgresql": "TEXT",
"mysql": "LONGTEXT",
},
"source": {
"sqlite": "TEXT NOT NULL DEFAULT 'mcp'",
"postgresql": "TEXT NOT NULL DEFAULT 'mcp'",
"mysql": "VARCHAR(64) NOT NULL DEFAULT 'mcp'",
},
"dedupe_key": {
"sqlite": "TEXT",
"postgresql": "TEXT",
"mysql": "VARCHAR(200)",
},
"is_read": {
"sqlite": "INTEGER NOT NULL DEFAULT 0",
"postgresql": "BOOLEAN NOT NULL DEFAULT FALSE",
"mysql": "BOOLEAN NOT NULL DEFAULT FALSE",
},
"created_at": {
"sqlite": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
"postgresql": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
"mysql": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
},
@ -636,7 +659,7 @@ def _ensure_topic_columns() -> None:
for col, ddl_map in cols.items():
if col in existing:
continue
ddl = ddl_map.get(dialect) or ddl_map.get("sqlite")
ddl = ddl_map.get(dialect) or ddl_map.get("postgresql")
conn.execute(text(f"ALTER TABLE {table_name} ADD COLUMN {col} {ddl}"))
conn.commit()
@ -783,10 +806,11 @@ def init_database() -> None:
_drop_legacy_skill_tables()
_ensure_sys_setting_columns()
_ensure_bot_request_usage_columns()
_ensure_managed_node_columns()
_ensure_botinstance_columns()
_ensure_botinstance_indexes()
_ensure_managed_node_indexes()
_drop_legacy_botinstance_columns()
_ensure_botmessage_columns()
_ensure_topic_tables_sqlite()
_repair_postgres_topic_foreign_keys()
_ensure_topic_columns()
_ensure_topic_indexes()

View File

@ -1,6 +1,7 @@
import json
import os
import re
import shutil
from pathlib import Path
from typing import Final
from urllib.parse import urlsplit, urlunsplit
@ -119,21 +120,11 @@ BOTS_WORKSPACE_ROOT: Final[str] = _normalize_dir_path(
def _normalize_database_url(url: str) -> str:
raw = str(url or "").strip()
prefix = "sqlite:///"
if not raw.startswith(prefix):
return raw
path_part = raw[len(prefix) :]
if not path_part or path_part.startswith("/"):
return raw
abs_path = (BACKEND_ROOT / path_part).resolve()
return f"{prefix}{abs_path.as_posix()}"
return str(url or "").strip()
def _database_engine(url: str) -> str:
raw = str(url or "").strip().lower()
if raw.startswith("sqlite"):
return "sqlite"
if raw.startswith("postgresql"):
return "postgresql"
if raw.startswith("mysql"):
@ -147,7 +138,7 @@ def _database_engine(url: str) -> str:
def _mask_database_url(url: str) -> str:
raw = str(url or "").strip()
if not raw or raw.startswith("sqlite"):
if not raw:
return raw
try:
parsed = urlsplit(raw)
@ -168,10 +159,12 @@ def _mask_database_url(url: str) -> str:
_db_env = str(os.getenv("DATABASE_URL") or "").strip()
DATABASE_URL: Final[str] = _normalize_database_url(
_db_env if _db_env else f"sqlite:///{Path(DATA_ROOT) / 'nanobot_dashboard.db'}"
)
if not _db_env:
raise RuntimeError("DATABASE_URL is required")
DATABASE_URL: Final[str] = _normalize_database_url(_db_env)
DATABASE_ENGINE: Final[str] = _database_engine(DATABASE_URL)
if DATABASE_ENGINE not in {"postgresql", "mysql"}:
raise RuntimeError(f"Unsupported DATABASE_URL engine: {DATABASE_ENGINE}")
DATABASE_URL_DISPLAY: Final[str] = _mask_database_url(DATABASE_URL)
DATABASE_ECHO: Final[bool] = _env_bool("DATABASE_ECHO", True)
DATABASE_POOL_SIZE: Final[int] = _env_int("DATABASE_POOL_SIZE", 20, 1, 200)
@ -223,9 +216,27 @@ REDIS_PREFIX: Final[str] = str(os.getenv("REDIS_PREFIX") or "dashboard_nanobot")
REDIS_DEFAULT_TTL: Final[int] = _env_int("REDIS_DEFAULT_TTL", 60, 1, 86400)
PANEL_ACCESS_PASSWORD: Final[str] = str(os.getenv("PANEL_ACCESS_PASSWORD") or "").strip()
TEMPLATE_ROOT: Final[Path] = (BACKEND_ROOT / "templates").resolve()
AGENT_MD_TEMPLATES_FILE: Final[Path] = TEMPLATE_ROOT / "agent_md_templates.json"
TOPIC_PRESETS_TEMPLATES_FILE: Final[Path] = TEMPLATE_ROOT / "topic_presets.json"
LEGACY_TEMPLATE_ROOT: Final[Path] = (BACKEND_ROOT / "templates").resolve()
TEMPLATE_ROOT: Final[Path] = (Path(DATA_ROOT) / "templates").resolve()
TEMPLATE_ROOT.mkdir(parents=True, exist_ok=True)
def _resolve_template_file(filename: str) -> Path:
target = (TEMPLATE_ROOT / filename).resolve()
legacy = (LEGACY_TEMPLATE_ROOT / filename).resolve()
if target.exists():
return target
if legacy.exists():
try:
shutil.copy2(legacy, target)
return target
except Exception:
return legacy
return target
AGENT_MD_TEMPLATES_FILE: Final[Path] = _resolve_template_file("agent_md_templates.json")
TOPIC_PRESETS_TEMPLATES_FILE: Final[Path] = _resolve_template_file("topic_presets.json")
_agent_md_templates_raw = _load_json_object(AGENT_MD_TEMPLATES_FILE)
DEFAULT_AGENTS_MD: Final[str] = _env_text(

0
backend/data.db 100644
View File

File diff suppressed because it is too large Load Diff

View File

@ -14,6 +14,10 @@ class BotInstance(SQLModel, table=True):
current_state: Optional[str] = Field(default="IDLE")
last_action: Optional[str] = Field(default=None)
image_tag: str = Field(default="nanobot-base:v0.1.4") # 记录该机器人使用的镜像版本
node_id: str = Field(default="local", index=True)
transport_kind: str = Field(default="direct", index=True)
runtime_kind: str = Field(default="docker", index=True)
core_adapter: str = Field(default="nanobot", index=True)
created_at: datetime = Field(default_factory=datetime.utcnow)
updated_at: datetime = Field(default_factory=datetime.utcnow)

View File

@ -19,6 +19,25 @@ class PlatformSetting(SQLModel, table=True):
updated_at: datetime = Field(default_factory=datetime.utcnow, index=True)
class ManagedNodeRecord(SQLModel, table=True):
__tablename__ = "managed_node"
node_id: str = Field(primary_key=True, max_length=120)
display_name: str = Field(default="", max_length=200)
base_url: str = Field(default="")
enabled: bool = Field(default=True, index=True)
auth_token: str = Field(default="")
transport_kind: str = Field(default="direct", max_length=32, index=True)
runtime_kind: str = Field(default="docker", max_length=32, index=True)
core_adapter: str = Field(default="nanobot", max_length=64, index=True)
metadata_json: str = Field(default="{}")
capabilities_json: str = Field(default="{}")
resources_json: str = Field(default="{}")
last_seen_at: Optional[datetime] = Field(default=None, index=True)
created_at: datetime = Field(default_factory=datetime.utcnow)
updated_at: datetime = Field(default_factory=datetime.utcnow, index=True)
class BotRequestUsage(SQLModel, table=True):
__tablename__ = "bot_request_usage"

View File

@ -0,0 +1 @@
# Provider package for runtime/workspace/provision abstractions.

View File

@ -0,0 +1,4 @@
from providers.provision.base import ProvisionProvider
from providers.provision.local import LocalProvisionProvider
__all__ = ["ProvisionProvider", "LocalProvisionProvider"]

View File

@ -0,0 +1,18 @@
from abc import ABC, abstractmethod
from typing import Any, Dict, List, Optional
from sqlmodel import Session
class ProvisionProvider(ABC):
@abstractmethod
def sync_bot_workspace(
self,
*,
session: Session,
bot_id: str,
channels_override: Optional[List[Dict[str, Any]]] = None,
global_delivery_override: Optional[Dict[str, Any]] = None,
runtime_overrides: Optional[Dict[str, Any]] = None,
) -> None:
raise NotImplementedError

View File

@ -0,0 +1,105 @@
from typing import Any, Callable, Dict, List, Optional
from fastapi import HTTPException
from sqlmodel import Session
from clients.edge.base import EdgeClient
from models.bot import BotInstance
from providers.provision.base import ProvisionProvider
from providers.target import ProviderTarget
class EdgeProvisionProvider(ProvisionProvider):
def __init__(
self,
*,
read_provider_target: Callable[[str], ProviderTarget],
resolve_edge_client: Callable[[ProviderTarget], EdgeClient],
read_runtime_snapshot: Callable[[BotInstance], Dict[str, Any]],
read_bot_channels: Callable[[BotInstance], List[Dict[str, Any]]],
read_node_metadata: Callable[[str], Dict[str, Any]],
) -> None:
self._read_provider_target = read_provider_target
self._resolve_edge_client = resolve_edge_client
self._read_runtime_snapshot = read_runtime_snapshot
self._read_bot_channels = read_bot_channels
self._read_node_metadata = read_node_metadata
def sync_bot_workspace(
self,
*,
session: Session,
bot_id: str,
channels_override: Optional[List[Dict[str, Any]]] = None,
global_delivery_override: Optional[Dict[str, Any]] = None,
runtime_overrides: Optional[Dict[str, Any]] = None,
) -> None:
bot = session.get(BotInstance, bot_id)
if bot is None:
raise HTTPException(status_code=404, detail="Bot not found")
snapshot = dict(self._read_runtime_snapshot(bot))
merged_runtime = dict(snapshot)
if isinstance(runtime_overrides, dict):
for key, value in runtime_overrides.items():
if key in {"api_key", "llm_provider", "llm_model"}:
text = str(value or "").strip()
if not text:
continue
merged_runtime[key] = text
continue
if key == "api_base":
merged_runtime[key] = str(value or "").strip()
continue
merged_runtime[key] = value
target = self._read_provider_target(bot_id)
merged_runtime.update(self._node_runtime_overrides(target.node_id, target.runtime_kind))
resolved_delivery = dict(global_delivery_override or {})
if "sendProgress" not in resolved_delivery:
resolved_delivery["sendProgress"] = bool(merged_runtime.get("send_progress", False))
if "sendToolHints" not in resolved_delivery:
resolved_delivery["sendToolHints"] = bool(merged_runtime.get("send_tool_hints", False))
self._client_for_target(target).sync_bot_workspace(
bot_id=bot_id,
channels_override=channels_override if channels_override is not None else self._read_bot_channels(bot),
global_delivery_override=resolved_delivery,
runtime_overrides=merged_runtime,
)
def _client_for_bot(self, bot_id: str) -> EdgeClient:
target = self._read_provider_target(bot_id)
return self._client_for_target(target)
def _client_for_target(self, target: ProviderTarget) -> EdgeClient:
if target.transport_kind != "edge":
raise HTTPException(status_code=400, detail=f"edge provision provider requires edge transport, got {target.transport_kind}")
return self._resolve_edge_client(target)
def _node_runtime_overrides(self, node_id: str, runtime_kind: str) -> Dict[str, str]:
metadata = dict(self._read_node_metadata(str(node_id or "").strip().lower()) or {})
payload: Dict[str, str] = {}
workspace_root = str(metadata.get("workspace_root") or "").strip()
if workspace_root:
payload["workspace_root"] = workspace_root
if str(runtime_kind or "").strip().lower() != "native":
return payload
native_sandbox_mode = self._normalize_native_sandbox_mode(metadata.get("native_sandbox_mode"))
if native_sandbox_mode != "inherit":
payload["native_sandbox_mode"] = native_sandbox_mode
native_command = str(metadata.get("native_command") or "").strip()
native_workdir = str(metadata.get("native_workdir") or "").strip()
if native_command:
payload["native_command"] = native_command
if native_workdir:
payload["native_workdir"] = native_workdir
return payload
@staticmethod
def _normalize_native_sandbox_mode(raw_value: Any) -> str:
text = str(raw_value or "").strip().lower()
if text in {"workspace", "sandbox", "strict"}:
return "workspace"
if text in {"full_access", "full-access", "danger-full-access", "escape"}:
return "full_access"
return "inherit"

View File

@ -0,0 +1,34 @@
from typing import Any, Callable, Dict, List, Optional
from sqlmodel import Session
from providers.provision.base import ProvisionProvider
class LocalProvisionProvider(ProvisionProvider):
def __init__(
self,
*,
sync_workspace_func: Callable[
[Session, str, Optional[List[Dict[str, Any]]], Optional[Dict[str, Any]], Optional[Dict[str, Any]]],
None,
],
) -> None:
self._sync_workspace_func = sync_workspace_func
def sync_bot_workspace(
self,
*,
session: Session,
bot_id: str,
channels_override: Optional[List[Dict[str, Any]]] = None,
global_delivery_override: Optional[Dict[str, Any]] = None,
runtime_overrides: Optional[Dict[str, Any]] = None,
) -> None:
self._sync_workspace_func(
session,
bot_id,
channels_override,
global_delivery_override,
runtime_overrides,
)

View File

@ -0,0 +1,47 @@
from dataclasses import dataclass, field
from typing import Dict, Optional
from providers.target import ProviderTarget
from providers.provision.base import ProvisionProvider
from providers.runtime.base import RuntimeProvider
from providers.workspace.base import WorkspaceProvider
@dataclass
class ProviderRegistry:
runtime: Dict[str, RuntimeProvider] = field(default_factory=dict)
workspace: Dict[str, WorkspaceProvider] = field(default_factory=dict)
provision: Dict[str, ProvisionProvider] = field(default_factory=dict)
def register_bundle(
self,
*,
key: str,
runtime_provider: RuntimeProvider,
workspace_provider: WorkspaceProvider,
provision_provider: ProvisionProvider,
) -> None:
self.runtime[key] = runtime_provider
self.workspace[key] = workspace_provider
self.provision[key] = provision_provider
def resolve_bundle_key(self, target: ProviderTarget) -> Optional[str]:
exact = target.key
if exact in self.runtime and exact in self.workspace and exact in self.provision:
return exact
for key in self.runtime.keys():
if key not in self.workspace or key not in self.provision:
continue
parts = str(key or "").split(":")
if len(parts) < 4:
continue
_, transport_kind, runtime_kind, core_adapter = parts[0], parts[1], parts[2], ":".join(parts[3:])
if (
str(transport_kind or "").strip().lower() == str(target.transport_kind or "").strip().lower()
and str(runtime_kind or "").strip().lower() == str(target.runtime_kind or "").strip().lower()
and str(core_adapter or "").strip().lower() == str(target.core_adapter or "").strip().lower()
):
return key
return None

View File

@ -0,0 +1,4 @@
from providers.runtime.base import RuntimeProvider
from providers.runtime.local import LocalRuntimeProvider
__all__ = ["RuntimeProvider", "LocalRuntimeProvider"]

View File

@ -0,0 +1,40 @@
from abc import ABC, abstractmethod
from typing import Any, Dict, List, Optional
from sqlmodel import Session
from models.bot import BotInstance
class RuntimeProvider(ABC):
@abstractmethod
async def start_bot(self, *, session: Session, bot: BotInstance) -> Dict[str, Any]:
raise NotImplementedError
@abstractmethod
def stop_bot(self, *, session: Session, bot: BotInstance) -> Dict[str, Any]:
raise NotImplementedError
@abstractmethod
def deliver_command(self, *, bot_id: str, command: str, media: Optional[List[str]] = None) -> Optional[str]:
raise NotImplementedError
@abstractmethod
def get_recent_logs(self, *, bot_id: str, tail: int = 300) -> List[str]:
raise NotImplementedError
@abstractmethod
def ensure_monitor(self, *, bot_id: str) -> bool:
raise NotImplementedError
@abstractmethod
def get_monitor_packets(self, *, bot_id: str, after_seq: int = 0, limit: int = 200) -> List[Dict[str, Any]]:
raise NotImplementedError
@abstractmethod
def get_runtime_status(self, *, bot_id: str) -> str:
raise NotImplementedError
@abstractmethod
def get_resource_snapshot(self, *, bot_id: str) -> Dict[str, Any]:
raise NotImplementedError

View File

@ -0,0 +1,136 @@
from typing import Any, Callable, Dict, List, Optional
from fastapi import HTTPException
from sqlmodel import Session
from clients.edge.base import EdgeClient
from models.bot import BotInstance
from providers.runtime.base import RuntimeProvider
from providers.target import ProviderTarget, provider_target_to_dict
class EdgeRuntimeProvider(RuntimeProvider):
def __init__(
self,
*,
read_provider_target: Callable[[str], ProviderTarget],
resolve_edge_client: Callable[[ProviderTarget], EdgeClient],
read_runtime_snapshot: Callable[[BotInstance], Dict[str, Any]],
resolve_env_params: Callable[[str], Dict[str, str]],
read_bot_channels: Callable[[BotInstance], List[Dict[str, Any]]],
read_node_metadata: Callable[[str], Dict[str, Any]],
) -> None:
self._read_provider_target = read_provider_target
self._resolve_edge_client = resolve_edge_client
self._read_runtime_snapshot = read_runtime_snapshot
self._resolve_env_params = resolve_env_params
self._read_bot_channels = read_bot_channels
self._read_node_metadata = read_node_metadata
async def start_bot(self, *, session: Session, bot: BotInstance) -> Dict[str, Any]:
bot_id = str(bot.id or "").strip()
if not bot_id:
raise HTTPException(status_code=400, detail="Bot id is required")
if not bool(getattr(bot, "enabled", True)):
raise HTTPException(status_code=403, detail="Bot is disabled. Enable it first.")
runtime_snapshot = self._read_runtime_snapshot(bot)
target = self._read_provider_target(bot_id)
client = self._client_for_target(target)
node_runtime_overrides = self._node_runtime_overrides(target.node_id, target.runtime_kind)
workspace_runtime = {
**dict(runtime_snapshot),
**provider_target_to_dict(target),
**node_runtime_overrides,
}
client.sync_bot_workspace(
bot_id=bot_id,
channels_override=self._read_bot_channels(bot),
global_delivery_override={
"sendProgress": bool(runtime_snapshot.get("send_progress")),
"sendToolHints": bool(runtime_snapshot.get("send_tool_hints")),
},
runtime_overrides=workspace_runtime,
)
result = await client.start_bot(
bot=bot,
start_payload={
"image_tag": bot.image_tag,
"runtime_kind": target.runtime_kind,
"env_vars": self._resolve_env_params(bot_id),
"cpu_cores": runtime_snapshot.get("cpu_cores"),
"memory_mb": runtime_snapshot.get("memory_mb"),
"storage_gb": runtime_snapshot.get("storage_gb"),
**node_runtime_overrides,
},
)
bot.docker_status = "RUNNING"
session.add(bot)
session.commit()
return result
def stop_bot(self, *, session: Session, bot: BotInstance) -> Dict[str, Any]:
bot_id = str(bot.id or "").strip()
if not bot_id:
raise HTTPException(status_code=400, detail="Bot id is required")
if not bool(getattr(bot, "enabled", True)):
raise HTTPException(status_code=403, detail="Bot is disabled. Enable it first.")
result = self._client_for_bot(bot_id).stop_bot(bot=bot)
bot.docker_status = "STOPPED"
session.add(bot)
session.commit()
return result
def deliver_command(self, *, bot_id: str, command: str, media: Optional[List[str]] = None) -> Optional[str]:
return self._client_for_bot(bot_id).deliver_command(bot_id=bot_id, command=command, media=media)
def get_recent_logs(self, *, bot_id: str, tail: int = 300) -> List[str]:
return self._client_for_bot(bot_id).get_recent_logs(bot_id=bot_id, tail=tail)
def ensure_monitor(self, *, bot_id: str) -> bool:
return bool(self._client_for_bot(bot_id).ensure_monitor(bot_id=bot_id))
def get_monitor_packets(self, *, bot_id: str, after_seq: int = 0, limit: int = 200) -> List[Dict[str, Any]]:
return list(self._client_for_bot(bot_id).get_monitor_packets(bot_id=bot_id, after_seq=after_seq, limit=limit) or [])
def get_runtime_status(self, *, bot_id: str) -> str:
return str(self._client_for_bot(bot_id).get_runtime_status(bot_id=bot_id) or "STOPPED").upper()
def get_resource_snapshot(self, *, bot_id: str) -> Dict[str, Any]:
return dict(self._client_for_bot(bot_id).get_resource_snapshot(bot_id=bot_id) or {})
def _client_for_bot(self, bot_id: str) -> EdgeClient:
target = self._read_provider_target(bot_id)
return self._client_for_target(target)
def _client_for_target(self, target: ProviderTarget) -> EdgeClient:
if target.transport_kind != "edge":
raise HTTPException(status_code=400, detail=f"edge runtime provider requires edge transport, got {target.transport_kind}")
return self._resolve_edge_client(target)
def _node_runtime_overrides(self, node_id: str, runtime_kind: str) -> Dict[str, str]:
metadata = dict(self._read_node_metadata(str(node_id or "").strip().lower()) or {})
payload: Dict[str, str] = {}
workspace_root = str(metadata.get("workspace_root") or "").strip()
if workspace_root:
payload["workspace_root"] = workspace_root
if str(runtime_kind or "").strip().lower() != "native":
return payload
native_sandbox_mode = self._normalize_native_sandbox_mode(metadata.get("native_sandbox_mode"))
if native_sandbox_mode != "inherit":
payload["native_sandbox_mode"] = native_sandbox_mode
native_command = str(metadata.get("native_command") or "").strip()
native_workdir = str(metadata.get("native_workdir") or "").strip()
if native_command:
payload["native_command"] = native_command
if native_workdir:
payload["native_workdir"] = native_workdir
return payload
@staticmethod
def _normalize_native_sandbox_mode(raw_value: Any) -> str:
text = str(raw_value or "").strip().lower()
if text in {"workspace", "sandbox", "strict"}:
return "workspace"
if text in {"full_access", "full-access", "danger-full-access", "escape"}:
return "full_access"
return "inherit"

View File

@ -0,0 +1,117 @@
import asyncio
from typing import Any, Awaitable, Callable, Dict, List, Optional
from fastapi import HTTPException
from sqlmodel import Session
from models.bot import BotInstance
from providers.provision.base import ProvisionProvider
from providers.runtime.base import RuntimeProvider
from services.platform_service import record_activity_event
class LocalRuntimeProvider(RuntimeProvider):
def __init__(
self,
*,
docker_manager: Any,
on_state_change: Callable[[str, dict], None],
provision_provider: ProvisionProvider,
read_runtime_snapshot: Callable[[BotInstance], Dict[str, Any]],
resolve_env_params: Callable[[str], Dict[str, str]],
write_env_store: Callable[[str, Dict[str, str]], None],
invalidate_bot_cache: Callable[[str], None],
record_agent_loop_ready_warning: Callable[[str], Awaitable[None]],
safe_float: Callable[[Any, float], float],
safe_int: Callable[[Any, int], int],
) -> None:
self._docker_manager = docker_manager
self._on_state_change = on_state_change
self._provision_provider = provision_provider
self._read_runtime_snapshot = read_runtime_snapshot
self._resolve_env_params = resolve_env_params
self._write_env_store = write_env_store
self._invalidate_bot_cache = invalidate_bot_cache
self._record_agent_loop_ready_warning = record_agent_loop_ready_warning
self._safe_float = safe_float
self._safe_int = safe_int
async def start_bot(self, *, session: Session, bot: BotInstance) -> Dict[str, Any]:
bot_id = str(bot.id or "").strip()
if not bot_id:
raise HTTPException(status_code=400, detail="Bot id is required")
if not bool(getattr(bot, "enabled", True)):
raise HTTPException(status_code=403, detail="Bot is disabled. Enable it first.")
self._provision_provider.sync_bot_workspace(session=session, bot_id=bot_id)
runtime_snapshot = self._read_runtime_snapshot(bot)
env_params = self._resolve_env_params(bot_id)
self._write_env_store(bot_id, env_params)
success = self._docker_manager.start_bot(
bot_id,
image_tag=bot.image_tag,
on_state_change=self._on_state_change,
env_vars=env_params,
cpu_cores=self._safe_float(runtime_snapshot.get("cpu_cores"), 1.0),
memory_mb=self._safe_int(runtime_snapshot.get("memory_mb"), 1024),
storage_gb=self._safe_int(runtime_snapshot.get("storage_gb"), 10),
)
if not success:
bot.docker_status = "STOPPED"
session.add(bot)
session.commit()
raise HTTPException(status_code=500, detail=f"Failed to start container with image {bot.image_tag}")
actual_status = self._docker_manager.get_bot_status(bot_id)
bot.docker_status = actual_status
if actual_status != "RUNNING":
session.add(bot)
session.commit()
self._invalidate_bot_cache(bot_id)
raise HTTPException(
status_code=500,
detail="Bot container failed shortly after startup. Check bot logs/config.",
)
asyncio.create_task(self._record_agent_loop_ready_warning(bot_id))
session.add(bot)
record_activity_event(session, bot_id, "bot_started", channel="system", detail=f"Container started for {bot_id}")
session.commit()
self._invalidate_bot_cache(bot_id)
return {"status": "started"}
def stop_bot(self, *, session: Session, bot: BotInstance) -> Dict[str, Any]:
bot_id = str(bot.id or "").strip()
if not bot_id:
raise HTTPException(status_code=400, detail="Bot id is required")
if not bool(getattr(bot, "enabled", True)):
raise HTTPException(status_code=403, detail="Bot is disabled. Enable it first.")
self._docker_manager.stop_bot(bot_id)
bot.docker_status = "STOPPED"
session.add(bot)
record_activity_event(session, bot_id, "bot_stopped", channel="system", detail=f"Container stopped for {bot_id}")
session.commit()
self._invalidate_bot_cache(bot_id)
return {"status": "stopped"}
def deliver_command(self, *, bot_id: str, command: str, media: Optional[List[str]] = None) -> Optional[str]:
success = self._docker_manager.send_command(bot_id, command, media=media)
if success:
return None
return self._docker_manager.get_last_delivery_error(bot_id) or "command delivery failed"
def get_recent_logs(self, *, bot_id: str, tail: int = 300) -> List[str]:
return list(self._docker_manager.get_recent_logs(bot_id, tail=tail) or [])
def ensure_monitor(self, *, bot_id: str) -> bool:
return bool(self._docker_manager.ensure_monitor(bot_id, self._on_state_change))
def get_monitor_packets(self, *, bot_id: str, after_seq: int = 0, limit: int = 200) -> List[Dict[str, Any]]:
return []
def get_runtime_status(self, *, bot_id: str) -> str:
return str(self._docker_manager.get_bot_status(bot_id) or "STOPPED").upper()
def get_resource_snapshot(self, *, bot_id: str) -> Dict[str, Any]:
return dict(self._docker_manager.get_bot_resource_snapshot(bot_id) or {})

View File

@ -0,0 +1,59 @@
from typing import Any
from models.bot import BotInstance
from providers.registry import ProviderRegistry
from providers.provision.base import ProvisionProvider
from providers.runtime.base import RuntimeProvider
from providers.target import resolve_provider_target
from providers.workspace.base import WorkspaceProvider
def _require_provider(value: Any, label: str):
if value is None:
raise RuntimeError(f"{label} is not configured")
return value
def _get_registry(app_state: Any) -> ProviderRegistry | None:
registry = getattr(app_state, "provider_registry", None)
if registry is None:
return None
if not isinstance(registry, ProviderRegistry):
raise RuntimeError("provider registry is misconfigured")
return registry
def get_runtime_provider(app_state: Any, bot: BotInstance) -> RuntimeProvider:
registry = _get_registry(app_state)
if registry is not None:
target = resolve_provider_target(app_state, bot)
bundle_key = registry.resolve_bundle_key(target)
provider = registry.runtime.get(bundle_key) if bundle_key else None
if provider is not None:
return provider
raise RuntimeError(f"runtime provider is not configured for target {target.key}")
return _require_provider(getattr(app_state, "runtime_provider", None), "runtime provider")
def get_workspace_provider(app_state: Any, bot: BotInstance) -> WorkspaceProvider:
registry = _get_registry(app_state)
if registry is not None:
target = resolve_provider_target(app_state, bot)
bundle_key = registry.resolve_bundle_key(target)
provider = registry.workspace.get(bundle_key) if bundle_key else None
if provider is not None:
return provider
raise RuntimeError(f"workspace provider is not configured for target {target.key}")
return _require_provider(getattr(app_state, "workspace_provider", None), "workspace provider")
def get_provision_provider(app_state: Any, bot: BotInstance) -> ProvisionProvider:
registry = _get_registry(app_state)
if registry is not None:
target = resolve_provider_target(app_state, bot)
bundle_key = registry.resolve_bundle_key(target)
provider = registry.provision.get(bundle_key) if bundle_key else None
if provider is not None:
return provider
raise RuntimeError(f"provision provider is not configured for target {target.key}")
return _require_provider(getattr(app_state, "provision_provider", None), "provision provider")

View File

@ -0,0 +1,118 @@
from dataclasses import dataclass
from typing import Any
from models.bot import BotInstance
DEFAULT_NODE_ID = "local"
DEFAULT_TRANSPORT_KIND = "direct"
DEFAULT_RUNTIME_KIND = "docker"
DEFAULT_CORE_ADAPTER = "nanobot"
TARGET_CONFIG_KEY = "dashboardRuntime"
SUPPORTED_TRANSPORT_KINDS = {"direct", "edge"}
SUPPORTED_RUNTIME_KINDS = {"docker", "native"}
def _normalize_target_part(value: Any, fallback: str) -> str:
text = str(value or "").strip().lower()
return text or fallback
@dataclass(frozen=True)
class ProviderTarget:
node_id: str = DEFAULT_NODE_ID
transport_kind: str = DEFAULT_TRANSPORT_KIND
runtime_kind: str = DEFAULT_RUNTIME_KIND
core_adapter: str = DEFAULT_CORE_ADAPTER
@property
def key(self) -> str:
return ":".join([self.node_id, self.transport_kind, self.runtime_kind, self.core_adapter])
def normalize_provider_target(value: Any, fallback: ProviderTarget | None = None) -> ProviderTarget:
base = fallback or ProviderTarget()
if isinstance(value, ProviderTarget):
raw_node_id = value.node_id
raw_transport_kind = value.transport_kind
raw_runtime_kind = value.runtime_kind
raw_core_adapter = value.core_adapter
elif isinstance(value, dict):
raw_node_id = value.get("node_id", value.get("nodeId"))
raw_transport_kind = value.get("transport_kind", value.get("transportKind"))
raw_runtime_kind = value.get("runtime_kind", value.get("runtimeKind"))
raw_core_adapter = value.get("core_adapter", value.get("coreAdapter"))
else:
raw_node_id = None
raw_transport_kind = None
raw_runtime_kind = None
raw_core_adapter = None
transport_kind = _normalize_target_part(raw_transport_kind, base.transport_kind)
if transport_kind not in SUPPORTED_TRANSPORT_KINDS:
transport_kind = base.transport_kind
runtime_kind = _normalize_target_part(raw_runtime_kind, base.runtime_kind)
if runtime_kind not in SUPPORTED_RUNTIME_KINDS:
runtime_kind = base.runtime_kind
return ProviderTarget(
node_id=_normalize_target_part(raw_node_id, base.node_id),
transport_kind=transport_kind,
runtime_kind=runtime_kind,
core_adapter=_normalize_target_part(raw_core_adapter, base.core_adapter),
)
def provider_target_to_dict(target: ProviderTarget) -> dict[str, str]:
return {
"node_id": target.node_id,
"transport_kind": target.transport_kind,
"runtime_kind": target.runtime_kind,
"core_adapter": target.core_adapter,
}
def provider_target_from_config(config_data: Any, fallback: ProviderTarget | None = None) -> ProviderTarget:
if not isinstance(config_data, dict):
return normalize_provider_target(None, fallback=fallback)
return normalize_provider_target(config_data.get(TARGET_CONFIG_KEY), fallback=fallback)
def write_provider_target_config(config_data: dict[str, Any], target: ProviderTarget) -> dict[str, Any]:
config_data[TARGET_CONFIG_KEY] = {
"nodeId": target.node_id,
"transportKind": target.transport_kind,
"runtimeKind": target.runtime_kind,
"coreAdapter": target.core_adapter,
}
return config_data
def resolve_provider_target(app_state: Any, bot: BotInstance) -> ProviderTarget:
fallback = ProviderTarget(
node_id=_normalize_target_part(getattr(app_state, "provider_default_node_id", None), DEFAULT_NODE_ID),
transport_kind=_normalize_target_part(
getattr(app_state, "provider_default_transport_kind", None),
DEFAULT_TRANSPORT_KIND,
),
runtime_kind=_normalize_target_part(
getattr(app_state, "provider_default_runtime_kind", None),
DEFAULT_RUNTIME_KIND,
),
core_adapter=_normalize_target_part(
getattr(app_state, "provider_default_core_adapter", None),
DEFAULT_CORE_ADAPTER,
),
)
resolver = getattr(app_state, "resolve_bot_provider_target", None)
if callable(resolver):
return normalize_provider_target(resolver(bot), fallback=fallback)
return normalize_provider_target(
{
"node_id": getattr(bot, "node_id", None),
"transport_kind": getattr(bot, "transport_kind", None),
"runtime_kind": getattr(bot, "runtime_kind", None),
"core_adapter": getattr(bot, "core_adapter", None),
},
fallback=fallback,
)

View File

@ -0,0 +1,128 @@
from typing import Any, Dict, List, Optional
from pydantic import BaseModel, Field
NODE_PROTOCOL_VERSION = "1"
class EdgeNodeIdentityBase(BaseModel):
protocol_version: str = NODE_PROTOCOL_VERSION
node_id: str
display_name: str
service: str = "dashboard-edge"
transport_kind: str = "edge"
runtime_kind: str = "docker"
core_adapter: str = "nanobot"
class EdgeStatusResponse(BaseModel):
status: str
class EdgeStateWriteRequest(BaseModel):
data: Dict[str, Any] = Field(default_factory=dict)
workspace_root: Optional[str] = None
class EdgeStateResponse(BaseModel):
bot_id: str
state_key: str
data: Dict[str, Any] = Field(default_factory=dict)
class EdgeNativePreflightRequest(BaseModel):
native_command: Optional[str] = None
native_workdir: Optional[str] = None
class EdgeNativePreflightResponse(BaseModel):
ok: bool = False
command: List[str] = Field(default_factory=list)
workdir: str = ""
command_available: bool = False
workdir_exists: bool = False
detail: str = ""
class EdgeStartBotRequest(BaseModel):
image_tag: str
runtime_kind: str = "docker"
env_vars: Dict[str, str] = Field(default_factory=dict)
workspace_root: Optional[str] = None
native_command: Optional[str] = None
native_workdir: Optional[str] = None
cpu_cores: float = 1.0
memory_mb: int = 1024
storage_gb: int = 10
class EdgeCommandRequest(BaseModel):
command: str
media: List[str] = Field(default_factory=list)
class EdgeLogsResponse(BaseModel):
bot_id: str
logs: List[str] = Field(default_factory=list)
class EdgeMonitorEnsureResponse(BaseModel):
ensured: bool = False
class EdgeMonitorPacket(BaseModel):
protocol_version: str = NODE_PROTOCOL_VERSION
node_id: str = ""
bot_id: str = ""
seq: int = 0
captured_at: str = ""
packet: Dict[str, Any] = Field(default_factory=dict)
class EdgeMonitorPacketsResponse(BaseModel):
protocol_version: str = NODE_PROTOCOL_VERSION
node_id: str = ""
bot_id: str
latest_seq: int = 0
packets: List[EdgeMonitorPacket] = Field(default_factory=list)
class EdgeWorkspaceSyncRequest(BaseModel):
channels_override: Optional[List[Dict[str, Any]]] = None
global_delivery_override: Optional[Dict[str, Any]] = None
runtime_overrides: Optional[Dict[str, Any]] = None
class EdgeMarkdownWriteRequest(BaseModel):
content: str = ""
class EdgeNodeResourcesResponse(BaseModel):
protocol_version: str = NODE_PROTOCOL_VERSION
node_id: str
display_name: str = ""
service: str = "dashboard-edge"
transport_kind: str = "edge"
runtime_kind: str = "docker"
core_adapter: str = "nanobot"
resources: Dict[str, Any] = Field(default_factory=dict)
reported_at: str = ""
class EdgeNodeSelfResponse(BaseModel):
protocol_version: str = NODE_PROTOCOL_VERSION
node_id: str
display_name: str
service: str = "dashboard-edge"
transport_kind: str = "edge"
runtime_kind: str = "docker"
core_adapter: str = "nanobot"
capabilities: Dict[str, Any] = Field(default_factory=dict)
resources: Dict[str, Any] = Field(default_factory=dict)
reported_at: str = ""
class EdgeNodeHeartbeatResponse(EdgeNodeIdentityBase):
capabilities: Dict[str, Any] = Field(default_factory=dict)
resources: Dict[str, Any] = Field(default_factory=dict)
reported_at: str = ""

View File

@ -97,3 +97,39 @@ class SystemSettingItem(BaseModel):
sort_order: int = 100
created_at: str
updated_at: str
class ManagedNodePayload(BaseModel):
node_id: str
display_name: str = ""
base_url: str = ""
enabled: bool = True
auth_token: str = ""
transport_kind: str = "edge"
runtime_kind: str = "docker"
core_adapter: str = "nanobot"
workspace_root: str = ""
native_command: str = ""
native_workdir: str = ""
native_sandbox_mode: str = "inherit"
class ManagedNodeConnectivityResult(BaseModel):
ok: bool
status: str
latency_ms: int = 0
detail: str = ""
node_self: Optional[Dict[str, Any]] = None
class ManagedNodeNativePreflightResult(BaseModel):
ok: bool
status: str
latency_ms: int = 0
detail: str = ""
command: List[str] = Field(default_factory=list)
workdir: str = ""
command_available: bool = False
workdir_exists: bool = False
runtime_native_supported: bool = False
node_self: Optional[Dict[str, Any]] = None

View File

@ -0,0 +1,333 @@
import asyncio
import os
import threading
import time
from datetime import datetime, timezone
from typing import Any, Callable, Dict, List, Optional
from fastapi import HTTPException
from sqlmodel import Session
from models.bot import BotInstance
from providers.runtime.base import RuntimeProvider
class BotCommandService:
def __init__(
self,
*,
read_runtime_snapshot: Callable[[BotInstance], Dict[str, Any]],
normalize_media_list: Callable[[Any, str], List[str]],
resolve_workspace_path: Callable[[str, Optional[str]], tuple[str, str]],
is_visual_attachment_path: Callable[[str], bool],
is_video_attachment_path: Callable[[str], bool],
create_usage_request: Callable[..., str],
record_activity_event: Callable[..., None],
fail_latest_usage: Callable[[Session, str, str], None],
persist_runtime_packet: Callable[[str, Dict[str, Any]], Optional[int]],
get_main_loop: Callable[[Any], Any],
broadcast_packet: Callable[[str, Dict[str, Any], Any], None],
) -> None:
self._read_runtime_snapshot = read_runtime_snapshot
self._normalize_media_list = normalize_media_list
self._resolve_workspace_path = resolve_workspace_path
self._is_visual_attachment_path = is_visual_attachment_path
self._is_video_attachment_path = is_video_attachment_path
self._create_usage_request = create_usage_request
self._record_activity_event = record_activity_event
self._fail_latest_usage = fail_latest_usage
self._persist_runtime_packet = persist_runtime_packet
self._get_main_loop = get_main_loop
self._broadcast_packet = broadcast_packet
self._monitor_sync_threads: Dict[tuple[str, str], threading.Thread] = {}
self._monitor_sync_lock = threading.Lock()
self._monitor_sync_seq_lock = threading.Lock()
self._monitor_sync_last_seq: Dict[str, int] = {}
def execute(
self,
*,
session: Session,
bot_id: str,
bot: BotInstance,
payload: Any,
runtime_provider: RuntimeProvider,
app_state: Any,
) -> Dict[str, Any]:
runtime_snapshot = self._read_runtime_snapshot(bot)
attachments = self._normalize_media_list(getattr(payload, "attachments", None), bot_id)
command = str(getattr(payload, "command", None) or "").strip()
if not command and not attachments:
raise HTTPException(status_code=400, detail="Command or attachments is required")
checked_attachments: List[str] = []
transport_kind = str(getattr(bot, "transport_kind", "") or "").strip().lower()
for rel in attachments:
if transport_kind != "edge":
_, target = self._resolve_workspace_path(bot_id, rel)
if not os.path.isfile(target):
raise HTTPException(status_code=400, detail=f"attachment not found: {rel}")
checked_attachments.append(rel)
delivery_media = [f"/root/.nanobot/workspace/{p.lstrip('/')}" for p in checked_attachments]
display_command = command if command else "[attachment message]"
delivery_command = self._build_delivery_command(command=command, checked_attachments=checked_attachments)
request_id = self._create_usage_request(
session,
bot_id,
display_command,
attachments=checked_attachments,
channel="dashboard",
metadata={"attachment_count": len(checked_attachments)},
provider=str(runtime_snapshot.get("llm_provider") or "").strip() or None,
model=str(runtime_snapshot.get("llm_model") or "").strip() or None,
)
self._record_activity_event(
session,
bot_id,
"command_submitted",
request_id=request_id,
channel="dashboard",
detail="command submitted",
metadata={
"attachment_count": len(checked_attachments),
"has_text": bool(command),
},
)
session.commit()
outbound_user_packet: Optional[Dict[str, Any]] = None
if display_command or checked_attachments:
outbound_user_packet = {
"type": "USER_COMMAND",
"channel": "dashboard",
"text": display_command,
"media": checked_attachments,
"request_id": request_id,
}
self._persist_runtime_packet(bot_id, outbound_user_packet)
loop = self._get_main_loop(app_state)
if loop and loop.is_running() and outbound_user_packet:
self._broadcast_packet(bot_id, outbound_user_packet, loop)
detail = runtime_provider.deliver_command(bot_id=bot_id, command=delivery_command, media=delivery_media)
if detail is not None:
self._fail_latest_usage(session, bot_id, detail or "command delivery failed")
self._record_activity_event(
session,
bot_id,
"command_failed",
request_id=request_id,
channel="dashboard",
detail=(detail or "command delivery failed")[:400],
)
session.commit()
if loop and loop.is_running():
self._broadcast_packet(
bot_id,
{
"type": "AGENT_STATE",
"channel": "dashboard",
"payload": {
"state": "ERROR",
"action_msg": detail or "command delivery failed",
},
},
loop,
)
raise HTTPException(
status_code=502,
detail=f"Failed to deliver command to bot dashboard channel{': ' + detail if detail else ''}",
)
self._maybe_sync_edge_monitor_packets(
runtime_provider=runtime_provider,
bot_id=bot_id,
request_id=request_id,
after_seq=self._resolve_monitor_baseline_seq(runtime_provider, bot_id),
app_state=app_state,
)
return {"success": True}
def _maybe_sync_edge_monitor_packets(
self,
*,
runtime_provider: RuntimeProvider,
bot_id: str,
request_id: str,
after_seq: int,
app_state: Any,
) -> None:
provider_name = runtime_provider.__class__.__name__.strip().lower()
if provider_name != "edgeruntimeprovider":
return
bot_key = str(bot_id or "").strip()
if not bot_key:
return
request_key = str(request_id or "").strip() or f"seq:{int(after_seq or 0)}"
thread_key = (bot_key, request_key)
with self._monitor_sync_lock:
existing = self._monitor_sync_threads.get(thread_key)
if existing and existing.is_alive():
return
thread = threading.Thread(
target=self._sync_edge_monitor_packets,
args=(runtime_provider, bot_key, request_id, after_seq, app_state),
daemon=True,
)
self._monitor_sync_threads[thread_key] = thread
thread.start()
def sync_edge_monitor_packets(
self,
*,
runtime_provider: RuntimeProvider,
bot_id: str,
request_id: str,
app_state: Any,
) -> None:
self._maybe_sync_edge_monitor_packets(
runtime_provider=runtime_provider,
bot_id=bot_id,
request_id=request_id,
after_seq=0,
app_state=app_state,
)
def _sync_edge_monitor_packets(
self,
runtime_provider: RuntimeProvider,
bot_id: str,
request_id: str,
after_seq: int,
app_state: Any,
) -> None:
loop = self._get_main_loop(app_state)
last_seq = max(0, int(after_seq or 0))
deadline = time.monotonic() + 18.0
request_id_norm = str(request_id or "").strip()
try:
while time.monotonic() < deadline:
try:
rows = runtime_provider.get_monitor_packets(bot_id=bot_id, after_seq=last_seq, limit=200)
except Exception:
time.sleep(0.5)
continue
for row in rows or []:
try:
seq = int(row.get("seq") or 0)
except Exception:
seq = 0
packet = dict(row.get("packet") or {})
if not packet:
continue
packet_type = str(packet.get("type") or "").strip().upper()
packet_request_id = str(packet.get("request_id") or "").strip()
if packet_type == "USER_COMMAND":
continue
if packet_type in {"ASSISTANT_MESSAGE", "BUS_EVENT"} and request_id_norm and packet_request_id and packet_request_id != request_id_norm:
continue
if not self._mark_monitor_seq(bot_id, seq):
continue
last_seq = max(last_seq, seq)
self._persist_runtime_packet(bot_id, packet)
if loop and loop.is_running():
self._broadcast_packet(bot_id, packet, loop)
time.sleep(0.5)
finally:
with self._monitor_sync_lock:
request_key = request_id_norm or f"seq:{int(after_seq or 0)}"
existing = self._monitor_sync_threads.get((bot_id, request_key))
if existing is threading.current_thread():
self._monitor_sync_threads.pop((bot_id, request_key), None)
def _resolve_monitor_baseline_seq(self, runtime_provider: RuntimeProvider, bot_id: str) -> int:
try:
rows = runtime_provider.get_monitor_packets(bot_id=bot_id, after_seq=0, limit=1000)
except Exception:
return self._get_monitor_seq(bot_id)
latest_seq = 0
for row in rows or []:
try:
seq = int(row.get("seq") or 0)
except Exception:
seq = 0
latest_seq = max(latest_seq, seq)
return max(latest_seq, self._get_monitor_seq(bot_id))
def _mark_monitor_seq(self, bot_id: str, seq: int) -> bool:
if seq <= 0:
return False
bot_key = str(bot_id or "").strip()
with self._monitor_sync_seq_lock:
current = int(self._monitor_sync_last_seq.get(bot_key, 0) or 0)
if seq <= current:
return False
self._monitor_sync_last_seq[bot_key] = seq
return True
def _get_monitor_seq(self, bot_id: str) -> int:
bot_key = str(bot_id or "").strip()
with self._monitor_sync_seq_lock:
return int(self._monitor_sync_last_seq.get(bot_key, 0) or 0)
def _build_delivery_command(self, *, command: str, checked_attachments: List[str]) -> str:
display_command = command if command else "[attachment message]"
delivery_command = display_command
if not checked_attachments:
return delivery_command
attachment_block = "\n".join(f"- {p}" for p in checked_attachments)
all_visual = all(self._is_visual_attachment_path(p) for p in checked_attachments)
if all_visual:
has_video = any(self._is_video_attachment_path(p) for p in checked_attachments)
media_label = "图片/视频" if has_video else "图片"
capability_hint = (
"1) 附件已随请求附带;图片在可用时可直接作为多模态输入理解,视频请按附件路径处理。\n"
if has_video
else "1) 附件中的图片已作为多模态输入提供,优先直接理解并回答。\n"
)
if command:
return (
f"{command}\n\n"
"[Attached files]\n"
f"{attachment_block}\n\n"
"【附件处理要求】\n"
f"{capability_hint}"
"2) 若当前模型或接口不支持直接理解该附件,请明确说明后再调用工具解析。\n"
"3) 除非用户明确要求,不要先调用工具读取附件文件。\n"
"4) 回复语言必须遵循 USER.md若未指定则与用户当前输入语言保持一致。\n"
"5) 仅基于可见内容回答;看不清或无法确认的部分请明确说明,不要猜测。"
)
return (
"请先处理已附带的附件列表:\n"
f"{attachment_block}\n\n"
f"请直接分析已附带的{media_label}并总结关键信息。\n"
f"{'图片在可用时可直接作为多模态输入理解,视频请按附件路径处理。' if has_video else ''}\n"
"若当前模型或接口不支持直接理解该附件,请明确说明后再调用工具解析。\n"
"回复语言必须遵循 USER.md若未指定则与用户当前输入语言保持一致。\n"
"仅基于可见内容回答;看不清或无法确认的部分请明确说明,不要猜测。"
)
command_has_paths = all(p in command for p in checked_attachments) if command else False
if command and not command_has_paths:
return (
f"{command}\n\n"
"[Attached files]\n"
f"{attachment_block}\n\n"
"Please process the attached file(s) listed above when answering this request.\n"
"Reply language must follow USER.md. If not specified, use the same language as the user input."
)
if not command:
return (
"Please process the uploaded file(s) listed below:\n"
f"{attachment_block}\n\n"
"Reply language must follow USER.md. If not specified, use the same language as the user input."
)
return delivery_command

View File

@ -0,0 +1,181 @@
import json
from dataclasses import dataclass, field
from datetime import datetime
from typing import Any, Dict, List, Optional
from sqlmodel import Session, select
from models.platform import ManagedNodeRecord
@dataclass(frozen=True)
class ManagedNode:
node_id: str
display_name: str
base_url: str = ""
enabled: bool = True
auth_token: str = ""
metadata: Dict[str, Any] = field(default_factory=dict)
capabilities: Dict[str, Any] = field(default_factory=dict)
resources: Dict[str, Any] = field(default_factory=dict)
last_seen_at: Optional[str] = None
class NodeRegistryService:
def __init__(self) -> None:
self._nodes: Dict[str, ManagedNode] = {}
def register_node(self, node: ManagedNode) -> None:
self._nodes[str(node.node_id or "").strip().lower()] = self._normalize_node(node)
def list_nodes(self) -> List[ManagedNode]:
return [self._nodes[key] for key in sorted(self._nodes.keys())]
def get_node(self, node_id: str) -> Optional[ManagedNode]:
key = str(node_id or "").strip().lower()
if not key:
return None
return self._nodes.get(key)
def require_node(self, node_id: str) -> ManagedNode:
node = self.get_node(node_id)
if node is None:
raise ValueError(f"Managed node not found: {node_id}")
if not node.enabled:
raise ValueError(f"Managed node is disabled: {node_id}")
return node
def load_from_session(self, session: Session) -> List[ManagedNode]:
rows = session.exec(select(ManagedNodeRecord)).all()
self._nodes = {}
for row in rows:
self.register_node(self._row_to_node(row))
return self.list_nodes()
def upsert_node(self, session: Session, node: ManagedNode) -> ManagedNode:
normalized = self._normalize_node(node)
row = session.get(ManagedNodeRecord, normalized.node_id)
if row is None:
row = ManagedNodeRecord(node_id=normalized.node_id)
metadata = dict(normalized.metadata or {})
row.display_name = normalized.display_name or normalized.node_id
row.base_url = normalized.base_url or ""
row.enabled = bool(normalized.enabled)
row.auth_token = normalized.auth_token or ""
row.transport_kind = str(metadata.get("transport_kind") or "edge").strip().lower() or "edge"
row.runtime_kind = str(metadata.get("runtime_kind") or "docker").strip().lower() or "docker"
row.core_adapter = str(metadata.get("core_adapter") or "nanobot").strip().lower() or "nanobot"
row.metadata_json = json.dumps(metadata, ensure_ascii=False, sort_keys=True)
row.capabilities_json = json.dumps(dict(normalized.capabilities or {}), ensure_ascii=False, sort_keys=True)
row.resources_json = json.dumps(dict(normalized.resources or {}), ensure_ascii=False, sort_keys=True)
row.last_seen_at = self._parse_datetime(normalized.last_seen_at) or row.last_seen_at
row.updated_at = datetime.utcnow()
if row.created_at is None:
row.created_at = datetime.utcnow()
session.add(row)
session.commit()
session.refresh(row)
self.register_node(self._row_to_node(row))
return self.require_node(normalized.node_id)
def mark_node_seen(
self,
session: Session,
*,
node_id: str,
display_name: Optional[str] = None,
capabilities: Optional[Dict[str, Any]] = None,
resources: Optional[Dict[str, Any]] = None,
) -> ManagedNode:
row = session.get(ManagedNodeRecord, str(node_id or "").strip().lower())
if row is None:
raise ValueError(f"Managed node not found: {node_id}")
if str(display_name or "").strip():
row.display_name = str(display_name).strip()
if capabilities is not None:
row.capabilities_json = json.dumps(dict(capabilities or {}), ensure_ascii=False, sort_keys=True)
if resources is not None:
row.resources_json = json.dumps(dict(resources or {}), ensure_ascii=False, sort_keys=True)
row.last_seen_at = datetime.utcnow()
row.updated_at = datetime.utcnow()
session.add(row)
session.commit()
session.refresh(row)
self.register_node(self._row_to_node(row))
return self.require_node(str(node_id or "").strip().lower())
def delete_node(self, session: Session, node_id: str) -> None:
key = str(node_id or "").strip().lower()
if not key:
raise ValueError("node_id is required")
row = session.get(ManagedNodeRecord, key)
if row is None:
raise ValueError(f"Managed node not found: {node_id}")
session.delete(row)
session.commit()
self._nodes.pop(key, None)
@staticmethod
def _normalize_node(node: ManagedNode) -> ManagedNode:
metadata = dict(node.metadata or {})
normalized = ManagedNode(
node_id=str(node.node_id or "").strip().lower(),
display_name=str(node.display_name or node.node_id or "").strip() or str(node.node_id or "").strip().lower(),
base_url=str(node.base_url or "").strip(),
enabled=bool(node.enabled),
auth_token=str(node.auth_token or "").strip(),
metadata=metadata,
capabilities=dict(node.capabilities or {}),
resources=dict(node.resources or {}),
last_seen_at=str(node.last_seen_at or "").strip() or None,
)
return normalized
@staticmethod
def _row_to_node(row: ManagedNodeRecord) -> ManagedNode:
metadata: Dict[str, Any] = {}
capabilities: Dict[str, Any] = {}
try:
loaded = json.loads(str(row.metadata_json or "{}"))
if isinstance(loaded, dict):
metadata = loaded
except Exception:
metadata = {}
try:
loaded_capabilities = json.loads(str(row.capabilities_json or "{}"))
if isinstance(loaded_capabilities, dict):
capabilities = loaded_capabilities
except Exception:
capabilities = {}
resources: Dict[str, Any] = {}
try:
loaded_resources = json.loads(str(row.resources_json or "{}"))
if isinstance(loaded_resources, dict):
resources = loaded_resources
except Exception:
resources = {}
metadata.setdefault("transport_kind", str(row.transport_kind or "").strip().lower() or "edge")
metadata.setdefault("runtime_kind", str(row.runtime_kind or "").strip().lower() or "docker")
metadata.setdefault("core_adapter", str(row.core_adapter or "").strip().lower() or "nanobot")
return ManagedNode(
node_id=str(row.node_id or "").strip().lower(),
display_name=str(row.display_name or row.node_id or "").strip(),
base_url=str(row.base_url or "").strip(),
enabled=bool(row.enabled),
auth_token=str(row.auth_token or "").strip(),
metadata=metadata,
capabilities=capabilities,
resources=resources,
last_seen_at=(row.last_seen_at.isoformat() + "Z") if row.last_seen_at else None,
)
@staticmethod
def _parse_datetime(value: Optional[str]) -> Optional[datetime]:
raw = str(value or "").strip()
if not raw:
return None
normalized = raw[:-1] if raw.endswith("Z") else raw
try:
return datetime.fromisoformat(normalized)
except Exception:
return None

View File

@ -1,14 +1,16 @@
import json
import logging
import math
import os
import re
import uuid
from datetime import datetime, timedelta
from typing import Any, Dict, List, Optional
from typing import Any, Callable, Dict, List, Optional, Tuple
from sqlalchemy import delete as sql_delete, func
from sqlmodel import Session, select
from clients.edge.errors import log_edge_failure
from core.database import engine
from core.settings import (
BOTS_WORKSPACE_ROOT,
@ -46,6 +48,7 @@ ACTIVITY_EVENT_RETENTION_SETTING_KEY = "activity_event_retention_days"
ACTIVITY_EVENT_PRUNE_INTERVAL = timedelta(minutes=10)
OPERATIONAL_ACTIVITY_EVENT_TYPES = {
"bot_created",
"bot_deployed",
"bot_started",
"bot_stopped",
"bot_warning",
@ -151,6 +154,7 @@ SYSTEM_SETTING_DEFINITIONS: Dict[str, Dict[str, Any]] = {
}
_last_activity_event_prune_at: Optional[datetime] = None
logger = logging.getLogger(__name__)
def _utcnow() -> datetime:
@ -272,6 +276,20 @@ def _calc_dir_size_bytes(path: str) -> int:
return total
def _workspace_usage_bytes(runtime: Dict[str, Any], bot_id: str) -> int:
usage = dict(runtime.get("usage") or {})
value = usage.get("workspace_used_bytes")
if value in {None, 0, "0", ""}:
value = usage.get("container_rw_bytes")
try:
normalized = int(value or 0)
except Exception:
normalized = 0
if normalized > 0:
return normalized
return _calc_dir_size_bytes(_bot_workspace_root(bot_id))
def _read_bot_resources(bot_id: str) -> Dict[str, Any]:
path = os.path.join(_bot_data_root(bot_id), "resources.json")
raw: Dict[str, Any] = {}
@ -959,7 +977,10 @@ def list_activity_events(
return items
def build_platform_overview(session: Session, docker_manager: Any) -> Dict[str, Any]:
def build_platform_overview(
session: Session,
read_runtime: Optional[Callable[[BotInstance], Tuple[str, Dict[str, Any]]]] = None,
) -> Dict[str, Any]:
deleted = prune_expired_activity_events(session, force=False)
if deleted > 0:
session.commit()
@ -978,15 +999,37 @@ def build_platform_overview(session: Session, docker_manager: Any) -> Dict[str,
live_cpu_percent_total = 0.0
live_memory_used_total = 0
live_memory_limit_total = 0
dirty = False
bot_rows: List[Dict[str, Any]] = []
for bot in bots:
enabled = bool(getattr(bot, "enabled", True))
runtime_status = docker_manager.get_bot_status(bot.id) if docker_manager else str(bot.docker_status or "STOPPED")
resources = _read_bot_resources(bot.id)
runtime = docker_manager.get_bot_resource_snapshot(bot.id) if docker_manager else {"usage": {}, "limits": {}, "docker_status": runtime_status}
workspace_root = _bot_workspace_root(bot.id)
workspace_used = _calc_dir_size_bytes(workspace_root)
runtime_status = str(bot.docker_status or "STOPPED").upper()
runtime: Dict[str, Any] = {"usage": {}, "limits": {}, "docker_status": runtime_status}
if callable(read_runtime):
try:
runtime_status, runtime = read_runtime(bot)
except Exception as exc:
log_edge_failure(
logger,
key=f"platform-overview-runtime:{bot.id}",
exc=exc,
message=f"Failed to read platform runtime snapshot for bot_id={bot.id}",
)
runtime_status = str(runtime_status or runtime.get("docker_status") or "STOPPED").upper()
runtime["docker_status"] = runtime_status
if str(bot.docker_status or "").upper() != runtime_status:
bot.docker_status = runtime_status
session.add(bot)
dirty = True
if runtime_status != "RUNNING" and str(bot.current_state or "").upper() not in {"ERROR"}:
next_state = "IDLE"
if str(bot.current_state or "") != next_state:
bot.current_state = next_state
session.add(bot)
dirty = True
workspace_used = _workspace_usage_bytes(runtime, bot.id)
workspace_limit = int(resources["storage_gb"] or 0) * 1024 * 1024 * 1024
configured_cpu_total += float(resources["cpu_cores"] or 0)
@ -1022,6 +1065,9 @@ def build_platform_overview(session: Session, docker_manager: Any) -> Dict[str,
}
)
if dirty:
session.commit()
usage = list_usage(session, limit=20)
events = list_activity_events(session, limit=20)
@ -1064,3 +1110,90 @@ def build_platform_overview(session: Session, docker_manager: Any) -> Dict[str,
"usage": usage,
"events": events,
}
def build_node_resource_overview(
session: Session,
*,
node_id: str,
read_runtime: Optional[Callable[[BotInstance], Tuple[str, Dict[str, Any]]]] = None,
) -> Dict[str, Any]:
normalized_node_id = str(node_id or "").strip().lower()
bots = session.exec(select(BotInstance).where(BotInstance.node_id == normalized_node_id)).all()
running = 0
stopped = 0
disabled = 0
configured_cpu_total = 0.0
configured_memory_total = 0
configured_storage_total = 0
workspace_used_total = 0
workspace_limit_total = 0
live_cpu_percent_total = 0.0
live_memory_used_total = 0
live_memory_limit_total = 0
dirty = False
for bot in bots:
enabled = bool(getattr(bot, "enabled", True))
resources = _read_bot_resources(bot.id)
runtime_status = str(bot.docker_status or "STOPPED").upper()
runtime: Dict[str, Any] = {"usage": {}, "limits": {}, "docker_status": runtime_status}
if callable(read_runtime):
try:
runtime_status, runtime = read_runtime(bot)
except Exception as exc:
log_edge_failure(
logger,
key=f"platform-node-runtime:{normalized_node_id}:{bot.id}",
exc=exc,
message=f"Failed to read node runtime snapshot for bot_id={bot.id}",
)
runtime_status = str(runtime_status or runtime.get("docker_status") or "STOPPED").upper()
runtime["docker_status"] = runtime_status
if str(bot.docker_status or "").upper() != runtime_status:
bot.docker_status = runtime_status
session.add(bot)
dirty = True
workspace_used = _workspace_usage_bytes(runtime, bot.id)
workspace_limit = int(resources["storage_gb"] or 0) * 1024 * 1024 * 1024
configured_cpu_total += float(resources["cpu_cores"] or 0)
configured_memory_total += int(resources["memory_mb"] or 0) * 1024 * 1024
configured_storage_total += workspace_limit
workspace_used_total += workspace_used
workspace_limit_total += workspace_limit
live_cpu_percent_total += float((runtime.get("usage") or {}).get("cpu_percent") or 0.0)
live_memory_used_total += int((runtime.get("usage") or {}).get("memory_bytes") or 0)
live_memory_limit_total += int((runtime.get("usage") or {}).get("memory_limit_bytes") or 0)
if not enabled:
disabled += 1
elif runtime_status == "RUNNING":
running += 1
else:
stopped += 1
if dirty:
session.commit()
return {
"node_id": normalized_node_id,
"bots": {
"total": len(bots),
"running": running,
"stopped": stopped,
"disabled": disabled,
},
"resources": {
"configured_cpu_cores": round(configured_cpu_total, 2),
"configured_memory_bytes": configured_memory_total,
"configured_storage_bytes": configured_storage_total,
"live_cpu_percent": round(live_cpu_percent_total, 2),
"live_memory_used_bytes": live_memory_used_total,
"live_memory_limit_bytes": live_memory_limit_total,
"workspace_used_bytes": workspace_used_total,
"workspace_limit_bytes": workspace_limit_total,
},
}

View File

@ -0,0 +1,133 @@
from datetime import datetime
from typing import Any, Callable, Dict
from sqlmodel import Session, select
from models.bot import BotInstance, BotMessage
from providers.runtime.base import RuntimeProvider
from services.bot_command_service import BotCommandService
class RuntimeService:
def __init__(
self,
*,
command_service: BotCommandService,
resolve_runtime_provider: Callable[[Any, BotInstance], RuntimeProvider],
clear_bot_sessions: Callable[[str], int],
clear_dashboard_direct_session_file: Callable[[str], Dict[str, Any]],
invalidate_bot_detail_cache: Callable[[str], None],
invalidate_bot_messages_cache: Callable[[str], None],
record_activity_event: Callable[..., None],
) -> None:
self._command_service = command_service
self._resolve_runtime_provider = resolve_runtime_provider
self._clear_bot_sessions = clear_bot_sessions
self._clear_dashboard_direct_session_file = clear_dashboard_direct_session_file
self._invalidate_bot_detail_cache = invalidate_bot_detail_cache
self._invalidate_bot_messages_cache = invalidate_bot_messages_cache
self._record_activity_event = record_activity_event
async def start_bot(self, *, app_state: Any, session: Session, bot: BotInstance) -> Dict[str, Any]:
result = await self._resolve_runtime_provider(app_state, bot).start_bot(session=session, bot=bot)
self._invalidate_bot_detail_cache(str(bot.id or ""))
return result
def stop_bot(self, *, app_state: Any, session: Session, bot: BotInstance) -> Dict[str, Any]:
result = self._resolve_runtime_provider(app_state, bot).stop_bot(session=session, bot=bot)
self._invalidate_bot_detail_cache(str(bot.id or ""))
return result
def send_command(
self,
*,
app_state: Any,
session: Session,
bot_id: str,
bot: BotInstance,
payload: Any,
) -> Dict[str, Any]:
return self._command_service.execute(
session=session,
bot_id=bot_id,
bot=bot,
payload=payload,
runtime_provider=self._resolve_runtime_provider(app_state, bot),
app_state=app_state,
)
def get_logs(self, *, app_state: Any, bot: BotInstance, tail: int = 300) -> Dict[str, Any]:
return {
"bot_id": bot.id,
"logs": self._resolve_runtime_provider(app_state, bot).get_recent_logs(bot_id=bot.id, tail=tail),
}
def ensure_monitor(self, *, app_state: Any, bot: BotInstance) -> bool:
return bool(self._resolve_runtime_provider(app_state, bot).ensure_monitor(bot_id=bot.id))
def sync_edge_monitor_packets(self, *, app_state: Any, bot: BotInstance, request_id: str) -> None:
runtime_provider = self._resolve_runtime_provider(app_state, bot)
self._command_service.sync_edge_monitor_packets(
runtime_provider=runtime_provider,
bot_id=str(bot.id or "").strip(),
request_id=str(request_id or "").strip(),
app_state=app_state,
)
def clear_messages(self, *, app_state: Any, session: Session, bot: BotInstance) -> Dict[str, Any]:
bot_id = str(bot.id or "").strip()
rows = session.exec(select(BotMessage).where(BotMessage.bot_id == bot_id)).all()
deleted = 0
for row in rows:
session.delete(row)
deleted += 1
cleared_sessions = self._clear_bot_sessions(bot_id)
self._reset_running_runtime_session(app_state=app_state, bot=bot)
bot.last_action = ""
bot.current_state = "IDLE"
bot.updated_at = datetime.utcnow()
session.add(bot)
self._record_activity_event(
session,
bot_id,
"history_cleared",
channel="system",
detail=f"Cleared {deleted} stored messages",
metadata={"deleted_messages": deleted, "cleared_sessions": cleared_sessions},
)
session.commit()
self._invalidate_bot_detail_cache(bot_id)
self._invalidate_bot_messages_cache(bot_id)
return {"bot_id": bot_id, "deleted": deleted, "cleared_sessions": cleared_sessions}
def clear_dashboard_direct_session(self, *, app_state: Any, session: Session, bot: BotInstance) -> Dict[str, Any]:
bot_id = str(bot.id or "").strip()
result = self._clear_dashboard_direct_session_file(bot_id)
self._reset_running_runtime_session(app_state=app_state, bot=bot)
bot.updated_at = datetime.utcnow()
session.add(bot)
self._record_activity_event(
session,
bot_id,
"dashboard_session_cleared",
channel="dashboard",
detail="Cleared dashboard_direct session file",
metadata={"session_file": result["path"], "previously_existed": result["existed"]},
)
session.commit()
self._invalidate_bot_detail_cache(bot_id)
return {"bot_id": bot_id, "cleared": True, "session_file": result["path"], "previously_existed": result["existed"]}
def _reset_running_runtime_session(self, *, app_state: Any, bot: BotInstance) -> None:
if not self._is_runtime_running(bot):
return
try:
self._resolve_runtime_provider(app_state, bot).deliver_command(bot_id=str(bot.id), command="/new")
except Exception:
pass
@staticmethod
def _is_runtime_running(bot: BotInstance) -> bool:
runtime_status = str(getattr(bot, "runtime_status", None) or getattr(bot, "docker_status", None) or "").upper()
return runtime_status == "RUNNING"

View File

@ -0,0 +1,68 @@
from typing import Any, Dict, List, Optional
from fastapi import Request, UploadFile
from models.bot import BotInstance
from providers.selector import get_workspace_provider
class WorkspaceService:
def list_tree(
self,
*,
app_state: Any,
bot: BotInstance,
path: Optional[str] = None,
recursive: bool = False,
) -> Dict[str, Any]:
return get_workspace_provider(app_state, bot).list_tree(bot_id=bot.id, path=path, recursive=recursive)
def read_file(
self,
*,
app_state: Any,
bot: BotInstance,
path: str,
max_bytes: int = 200000,
) -> Dict[str, Any]:
return get_workspace_provider(app_state, bot).read_file(bot_id=bot.id, path=path, max_bytes=max_bytes)
def write_markdown(
self,
*,
app_state: Any,
bot: BotInstance,
path: str,
content: str,
) -> Dict[str, Any]:
return get_workspace_provider(app_state, bot).write_markdown(bot_id=bot.id, path=path, content=content)
async def upload_files(
self,
*,
app_state: Any,
bot: BotInstance,
files: List[UploadFile],
path: Optional[str] = None,
) -> Dict[str, Any]:
return await get_workspace_provider(app_state, bot).upload_files(bot_id=bot.id, files=files, path=path)
def serve_file(
self,
*,
app_state: Any,
bot: BotInstance,
path: str,
download: bool,
request: Request,
public: bool = False,
redirect_html_to_raw: bool = False,
):
return get_workspace_provider(app_state, bot).serve_file(
bot_id=bot.id,
path=path,
download=download,
request=request,
public=public,
redirect_html_to_raw=redirect_html_to_raw,
)

View File

@ -0,0 +1,12 @@
FROM python:3.12-slim
WORKDIR /app
COPY requirements.txt /app/requirements.txt
RUN pip install --no-cache-dir -r /app/requirements.txt
COPY . /app
EXPOSE 8010
CMD ["python", "main.py"]

View File

@ -0,0 +1,59 @@
# dashboard-edge
`dashboard-edge` is the execution-plane service for Dashboard Nanobot.
It is designed to run on every managed node and bridge Dashboard control requests to local Docker or native Bot runtimes.
## Local development
```bash
cd dashboard-edge
python -m venv .venv
source .venv/bin/activate
pip install -r requirements.txt
python main.py
```
If your configured pip mirror is unavailable, install with the official index:
```bash
pip install -r requirements.txt -i https://pypi.org/simple
```
Default server:
- Host: `0.0.0.0`
- Port: `8010`
- Dev reload: enabled by default in `./scripts/dev-edge.sh`
Native local development:
```bash
cd /Users/jiliu/WorkSpace/dashboard-nanobot
bash scripts/dev-edge-native.sh
```
By default, the native launcher will auto-detect:
- `engines/nanobot-v0.1.4-post5/.venv/bin/python -m nanobot.cli.commands gateway`
when that virtualenv exists. You can still override it with `EDGE_NATIVE_COMMAND`.
Environment variables:
- `EDGE_HOST`
- `EDGE_PORT`
- `EDGE_RELOAD`
- `EDGE_AUTH_TOKEN`
- `EDGE_NODE_ID`
- `EDGE_NODE_NAME`
- `EDGE_BOTS_WORKSPACE_ROOT`
- `EDGE_BASE_IMAGE`
- `EDGE_RUNTIME_KIND`
- `EDGE_NATIVE_COMMAND`
- `EDGE_NATIVE_DASHBOARD_URL`
- `EDGE_NATIVE_DASHBOARD_HOST`
- `EDGE_NATIVE_DASHBOARD_PORT`
- `EDGE_NATIVE_WORKDIR`
- `EDGE_UPLOAD_MAX_MB`
- `EDGE_ALLOWED_ATTACHMENT_EXTENSIONS`

View File

@ -0,0 +1 @@
# dashboard-edge application package.

View File

@ -0,0 +1 @@
# API package for dashboard-edge.

View File

@ -0,0 +1,220 @@
from typing import List, Optional
from fastapi import APIRouter, Depends, File, HTTPException, Query, Request, UploadFile
from app.dependencies.auth import require_edge_auth
from app.schemas.edge import (
EdgeCommandRequest,
EdgeLogsResponse,
EdgeNativePreflightRequest,
EdgeNativePreflightResponse,
EdgeNodeHeartbeatResponse,
EdgeMonitorPacketsResponse,
EdgeMarkdownWriteRequest,
EdgeMonitorEnsureResponse,
EdgeNodeResourcesResponse,
EdgeNodeSelfResponse,
EdgeStateResponse,
EdgeStateWriteRequest,
EdgeStatusResponse,
EdgeWorkspaceSyncRequest,
)
from app.schemas.runtime import EdgeStartBotRequest
from app.services import provision_service as provision_service_module
from app.services import state_store_service as state_store_service_module
from app.services.runtime_service import edge_runtime_service
from app.services import workspace_service as workspace_service_module
router = APIRouter(dependencies=[Depends(require_edge_auth)])
@router.get("/api/edge/node/self", response_model=EdgeNodeSelfResponse)
def get_edge_node_self():
return edge_runtime_service.get_node_identity()
@router.get("/api/edge/node/resources", response_model=EdgeNodeResourcesResponse)
def get_edge_node_resources():
return edge_runtime_service.get_node_resource_summary()
@router.post("/api/edge/node/heartbeat", response_model=EdgeNodeHeartbeatResponse)
def heartbeat_edge_node():
return edge_runtime_service.heartbeat()
@router.post("/api/edge/runtime/native/preflight", response_model=EdgeNativePreflightResponse)
def native_preflight(payload: EdgeNativePreflightRequest):
return edge_runtime_service.native_preflight(
native_command=str(payload.native_command or "").strip() or None,
native_workdir=str(payload.native_workdir or "").strip() or None,
)
@router.post("/api/edge/bots/{bot_id}/start", response_model=EdgeStatusResponse)
async def start_bot(bot_id: str, payload: EdgeStartBotRequest):
return await edge_runtime_service.start_bot(bot_id=bot_id, payload=payload)
@router.post("/api/edge/bots/{bot_id}/stop", response_model=EdgeStatusResponse)
def stop_bot(bot_id: str):
return edge_runtime_service.stop_bot(bot_id=bot_id)
@router.post("/api/edge/bots/{bot_id}/command", response_model=EdgeStatusResponse)
def send_command(bot_id: str, payload: EdgeCommandRequest):
return edge_runtime_service.send_command(bot_id=bot_id, payload=payload)
@router.post("/api/edge/bots/{bot_id}/monitor/ensure", response_model=EdgeMonitorEnsureResponse)
def ensure_monitor(bot_id: str):
return edge_runtime_service.ensure_monitor(bot_id=bot_id)
@router.get("/api/edge/bots/{bot_id}/monitor/packets", response_model=EdgeMonitorPacketsResponse)
def get_monitor_packets(bot_id: str, after_seq: int = 0, limit: int = 200):
return edge_runtime_service.get_monitor_packets(bot_id=bot_id, after_seq=after_seq, limit=limit)
@router.get("/api/edge/bots/{bot_id}/logs", response_model=EdgeLogsResponse)
def get_logs(bot_id: str, tail: int = Query(300, ge=1, le=2000)):
return edge_runtime_service.get_recent_logs(bot_id=bot_id, tail=tail)
@router.get("/api/edge/bots/{bot_id}/runtime/status", response_model=EdgeStatusResponse)
def get_runtime_status(bot_id: str):
return edge_runtime_service.get_runtime_status(bot_id=bot_id)
@router.get("/api/edge/bots/{bot_id}/resources")
def get_resource_snapshot(bot_id: str):
return edge_runtime_service.get_resource_snapshot(bot_id=bot_id)
@router.post("/api/edge/bots/{bot_id}/workspace/sync", response_model=EdgeStatusResponse)
def sync_workspace(bot_id: str, payload: EdgeWorkspaceSyncRequest):
return provision_service_module.edge_provision_service.sync_bot_workspace(bot_id=bot_id, payload=payload)
@router.get("/api/edge/bots/{bot_id}/state/{state_key}", response_model=EdgeStateResponse)
def read_bot_state(bot_id: str, state_key: str, workspace_root: str | None = None):
return state_store_service_module.edge_state_store_service.read_state(
bot_id=bot_id,
state_key=state_key,
workspace_root=workspace_root,
)
@router.put("/api/edge/bots/{bot_id}/state/{state_key}", response_model=EdgeStateResponse)
def write_bot_state(bot_id: str, state_key: str, payload: EdgeStateWriteRequest):
return state_store_service_module.edge_state_store_service.write_state(
bot_id=bot_id,
state_key=state_key,
data=dict(payload.data or {}),
workspace_root=str(payload.workspace_root or "").strip() or None,
)
@router.get("/api/edge/bots/{bot_id}/workspace/tree")
def list_workspace_tree(
bot_id: str,
path: str | None = None,
recursive: bool = False,
workspace_root: str | None = None,
):
return workspace_service_module.edge_workspace_service.list_tree(
bot_id=bot_id,
path=path,
recursive=recursive,
workspace_root=workspace_root,
)
@router.get("/api/edge/bots/{bot_id}/workspace/file")
def read_workspace_file(
bot_id: str,
path: str = Query(...),
max_bytes: int = Query(200000, ge=4096, le=1000000),
workspace_root: str | None = None,
):
return workspace_service_module.edge_workspace_service.read_file(
bot_id=bot_id,
path=path,
max_bytes=max_bytes,
workspace_root=workspace_root,
)
@router.put("/api/edge/bots/{bot_id}/workspace/file/markdown")
def write_workspace_markdown(
bot_id: str,
path: str = Query(...),
payload: EdgeMarkdownWriteRequest = None,
workspace_root: str | None = None,
):
if payload is None:
raise HTTPException(status_code=400, detail="markdown payload is required")
return workspace_service_module.edge_workspace_service.write_markdown(
bot_id=bot_id,
path=path,
content=payload.content,
workspace_root=workspace_root,
)
@router.post("/api/edge/bots/{bot_id}/workspace/upload")
async def upload_workspace_files(
bot_id: str,
files: List[UploadFile] = File(...),
path: Optional[str] = None,
workspace_root: str | None = None,
):
return await workspace_service_module.edge_workspace_service.upload_files(
bot_id=bot_id,
files=files,
path=path,
workspace_root=workspace_root,
)
@router.get("/api/edge/bots/{bot_id}/workspace/download")
def download_workspace_file(
bot_id: str,
path: str = Query(...),
download: bool = False,
request: Request = None,
workspace_root: str | None = None,
):
return workspace_service_module.edge_workspace_service.serve_file(
bot_id=bot_id,
path=path,
download=download,
request=request,
workspace_root=workspace_root,
)
@router.get("/api/edge/bots/{bot_id}/workspace/raw/{path:path}")
def raw_workspace_file(
bot_id: str,
path: str,
download: bool = False,
request: Request = None,
workspace_root: str | None = None,
):
return workspace_service_module.edge_workspace_service.serve_file(
bot_id=bot_id,
path=path,
download=download,
request=request,
workspace_root=workspace_root,
)
@router.post("/api/edge/bots/{bot_id}/workspace/purge", response_model=EdgeStatusResponse)
def purge_workspace(bot_id: str, workspace_root: str | None = None):
result = workspace_service_module.edge_workspace_service.purge_bot_workspace(
bot_id=bot_id,
workspace_root=workspace_root,
)
return EdgeStatusResponse(status="deleted" if bool(result.get("deleted")) else "not_found")

View File

@ -0,0 +1 @@
# Core package for dashboard-edge.

View File

@ -0,0 +1,86 @@
import os
import re
from pathlib import Path
from dotenv import load_dotenv
load_dotenv()
EDGE_ROOT = Path(__file__).resolve().parents[2]
PROJECT_ROOT = EDGE_ROOT.parent
EDGE_HOST = str(os.getenv("EDGE_HOST", "0.0.0.0") or "0.0.0.0").strip() or "0.0.0.0"
try:
EDGE_PORT = int(os.getenv("EDGE_PORT", "8010"))
except Exception:
EDGE_PORT = 8010
EDGE_PORT = max(1, min(EDGE_PORT, 65535))
EDGE_RELOAD = str(os.getenv("EDGE_RELOAD", "true")).strip().lower() in {"1", "true", "yes", "on"}
EDGE_AUTH_TOKEN = str(os.getenv("EDGE_AUTH_TOKEN", "") or "").strip()
EDGE_NODE_ID = str(os.getenv("EDGE_NODE_ID", "local") or "local").strip().lower() or "local"
EDGE_NODE_NAME = str(os.getenv("EDGE_NODE_NAME", "Local Node") or "Local Node").strip() or "Local Node"
EDGE_BASE_IMAGE = str(os.getenv("EDGE_BASE_IMAGE", "nanobot-base:v0.1.4") or "nanobot-base:v0.1.4").strip()
EDGE_LOG_LEVEL = str(os.getenv("EDGE_LOG_LEVEL", "warning") or "warning").strip().lower() or "warning"
EDGE_ACCESS_LOG = str(os.getenv("EDGE_ACCESS_LOG", "false")).strip().lower() in {"1", "true", "yes", "on"}
def _default_native_command() -> str:
configured = str(os.getenv("EDGE_NATIVE_COMMAND", "") or "").strip()
if configured:
return configured
native_python = PROJECT_ROOT / "engines" / "nanobot-v0.1.4-post5" / ".venv" / "bin" / "python"
if native_python.is_file() and os.access(native_python, os.X_OK):
return f"{native_python} -m nanobot.cli.commands gateway"
return "nanobot gateway"
EDGE_NATIVE_COMMAND = _default_native_command()
EDGE_NATIVE_DASHBOARD_URL = str(
os.getenv("EDGE_NATIVE_DASHBOARD_URL", "http://127.0.0.1:9000/chat") or "http://127.0.0.1:9000/chat"
).strip() or "http://127.0.0.1:9000/chat"
EDGE_NATIVE_WORKDIR = str(os.getenv("EDGE_NATIVE_WORKDIR", "") or "").strip()
EDGE_BOTS_WORKSPACE_ROOT = str(
Path(os.getenv("EDGE_BOTS_WORKSPACE_ROOT", str(PROJECT_ROOT / "workspace" / "bots"))).expanduser().resolve()
)
def _env_int(name: str, default: int, min_value: int, max_value: int) -> int:
raw = os.getenv(name)
if raw is None:
return default
try:
value = int(str(raw).strip())
except Exception:
value = default
return max(min_value, min(max_value, value))
def _normalize_extension(raw: str) -> str:
text = str(raw or "").strip().lower()
if not text:
return ""
if text.startswith("*."):
text = text[1:]
if not text.startswith("."):
text = f".{text}"
if not re.fullmatch(r"\.[a-z0-9][a-z0-9._+-]{0,31}", text):
return ""
return text
def _env_extensions(name: str) -> tuple[str, ...]:
raw = os.getenv(name)
if raw is None:
return ()
rows: list[str] = []
for item in re.split(r"[,;\s]+", str(raw)):
ext = _normalize_extension(item)
if ext and ext not in rows:
rows.append(ext)
return tuple(rows)
EDGE_UPLOAD_MAX_MB = _env_int("EDGE_UPLOAD_MAX_MB", 100, 1, 2048)
EDGE_ALLOWED_ATTACHMENT_EXTENSIONS = _env_extensions("EDGE_ALLOWED_ATTACHMENT_EXTENSIONS")

View File

@ -0,0 +1 @@
# Dependency package for dashboard-edge.

View File

@ -0,0 +1,14 @@
from fastapi import Header, HTTPException
from app.core.settings import EDGE_AUTH_TOKEN
EDGE_AUTH_HEADER = "x-dashboard-edge-token"
def require_edge_auth(x_dashboard_edge_token: str | None = Header(default=None)) -> None:
configured = str(EDGE_AUTH_TOKEN or "").strip()
if not configured:
return
supplied = str(x_dashboard_edge_token or "").strip()
if supplied != configured:
raise HTTPException(status_code=401, detail="Invalid dashboard-edge token")

View File

@ -0,0 +1,30 @@
from fastapi import FastAPI
from app.api.router import router as edge_router
from app.core.settings import EDGE_BOTS_WORKSPACE_ROOT, EDGE_NODE_ID, EDGE_NODE_NAME
from app.services.provision_service import EdgeProvisionService
from app.services.runtime_service import edge_runtime_service
from app.services.state_store_service import EdgeStateStoreService
from app.services.workspace_service import EdgeWorkspaceService
app = FastAPI(title="Dashboard Edge API")
app.include_router(edge_router)
app.state.edge_runtime_service = edge_runtime_service
from app.services import provision_service as provision_service_module
from app.services import state_store_service as state_store_service_module
from app.services import workspace_service as workspace_service_module
provision_service_module.edge_provision_service = EdgeProvisionService(host_data_root=EDGE_BOTS_WORKSPACE_ROOT)
state_store_service_module.edge_state_store_service = EdgeStateStoreService(host_data_root=EDGE_BOTS_WORKSPACE_ROOT)
workspace_service_module.edge_workspace_service = EdgeWorkspaceService(host_data_root=EDGE_BOTS_WORKSPACE_ROOT)
@app.get("/api/edge/health")
def healthcheck():
return {
"status": "ok",
"service": "dashboard-edge",
"node_id": EDGE_NODE_ID,
"node_name": EDGE_NODE_NAME,
}

View File

@ -0,0 +1 @@
# Runtime package for dashboard-edge.

View File

@ -0,0 +1,62 @@
from abc import ABC, abstractmethod
from typing import Any, Callable, Dict, List, Optional
class EdgeRuntimeBackend(ABC):
runtime_kind: str = "docker"
@abstractmethod
def capabilities(self) -> Dict[str, Any]:
raise NotImplementedError
@abstractmethod
def has_image(self, tag: str) -> bool:
raise NotImplementedError
@abstractmethod
def start_bot(
self,
bot_id: str,
image_tag: Optional[str] = None,
env_vars: Optional[Dict[str, str]] = None,
workspace_root: Optional[str] = None,
native_command: Optional[str] = None,
native_workdir: Optional[str] = None,
cpu_cores: Optional[float] = None,
memory_mb: Optional[int] = None,
storage_gb: Optional[int] = None,
on_state_change: Optional[Callable[[str, dict], None]] = None,
) -> bool:
raise NotImplementedError
@abstractmethod
def ensure_monitor(self, bot_id: str, on_state_change: Callable[[str, dict], None]) -> bool:
raise NotImplementedError
@abstractmethod
def stop_bot(self, bot_id: str) -> bool:
raise NotImplementedError
@abstractmethod
def get_bot_status(self, bot_id: str) -> str:
raise NotImplementedError
@abstractmethod
def get_bot_resource_snapshot(self, bot_id: str) -> Dict[str, Any]:
raise NotImplementedError
@abstractmethod
def get_recent_logs(self, bot_id: str, tail: int = 300) -> List[str]:
raise NotImplementedError
@abstractmethod
def send_command(self, bot_id: str, command: str, media: Optional[List[str]] = None) -> bool:
raise NotImplementedError
@abstractmethod
def get_last_delivery_error(self, bot_id: str) -> str:
raise NotImplementedError
@abstractmethod
def parse_monitor_packet(self, line: str) -> Optional[Dict[str, Any]]:
raise NotImplementedError

View File

@ -0,0 +1,710 @@
import base64
import codecs
import json
import os
import re
import threading
import time
from typing import Any, Callable, Dict, List, Optional, Tuple
import docker
import httpx
from app.runtime.base import EdgeRuntimeBackend
class EdgeDockerManager(EdgeRuntimeBackend):
runtime_kind = "docker"
def __init__(self, host_data_root: str, base_image: str = "nanobot-base:v0.1.4") -> None:
try:
self.client = docker.from_env(timeout=6)
self.client.version()
print("✅ Edge Docker engine connected")
except Exception as exc:
self.client = None
print(f"⚠️ Edge Docker engine unavailable: {exc}")
self.host_data_root = host_data_root
self.base_image = base_image
self.active_monitors: Dict[str, threading.Thread] = {}
self._last_delivery_error: Dict[str, str] = {}
def capabilities(self) -> Dict[str, Any]:
return {
"protocol": {"version": "1"},
"runtime": {"docker": bool(self.client is not None), "native": False},
"workspace": {
"tree": True,
"read_file": True,
"write_markdown": True,
"upload_files": True,
"serve_file": True,
},
"monitor": {"logs": True, "ensure": True},
}
@staticmethod
def _normalize_resource_limits(
cpu_cores: Optional[float],
memory_mb: Optional[int],
storage_gb: Optional[int],
) -> Tuple[float, int, int]:
try:
cpu = float(cpu_cores) if cpu_cores is not None else 1.0
except Exception:
cpu = 1.0
try:
memory = int(memory_mb) if memory_mb is not None else 1024
except Exception:
memory = 1024
try:
storage = int(storage_gb) if storage_gb is not None else 10
except Exception:
storage = 10
if cpu < 0:
cpu = 1.0
if memory < 0:
memory = 1024
if storage < 0:
storage = 10
cpu = 0.0 if cpu == 0 else min(16.0, max(0.1, cpu))
memory = 0 if memory == 0 else min(65536, max(256, memory))
storage = 0 if storage == 0 else min(1024, max(1, storage))
return cpu, memory, storage
def has_image(self, tag: str) -> bool:
if not self.client:
return False
try:
self.client.images.get(tag)
return True
except Exception:
return False
def start_bot(
self,
bot_id: str,
image_tag: Optional[str] = None,
env_vars: Optional[Dict[str, str]] = None,
workspace_root: Optional[str] = None,
native_command: Optional[str] = None,
native_workdir: Optional[str] = None,
cpu_cores: Optional[float] = None,
memory_mb: Optional[int] = None,
storage_gb: Optional[int] = None,
on_state_change: Optional[Callable[[str, dict], None]] = None,
) -> bool:
if not self.client:
return False
image = image_tag or self.base_image
if not self.has_image(image):
return False
state_nanobot_dir = self._state_nanobot_dir(bot_id=bot_id, workspace_root=workspace_root)
workspace_dir = self._workspace_dir(bot_id=bot_id, workspace_root=workspace_root)
default_workspace_dir = os.path.join(state_nanobot_dir, "workspace")
container_name = f"worker_{bot_id}"
os.makedirs(state_nanobot_dir, exist_ok=True)
os.makedirs(workspace_dir, exist_ok=True)
cpu, memory, storage = self._normalize_resource_limits(cpu_cores, memory_mb, storage_gb)
volumes = {
state_nanobot_dir: {"bind": "/root/.nanobot", "mode": "rw"},
}
if os.path.abspath(workspace_dir) != os.path.abspath(default_workspace_dir):
volumes[workspace_dir] = {"bind": "/root/.nanobot/workspace", "mode": "rw"}
base_kwargs = {
"image": image,
"name": container_name,
"detach": True,
"stdin_open": True,
"tty": True,
"environment": env_vars or {},
"volumes": volumes,
"network_mode": "bridge",
}
if memory > 0:
base_kwargs["mem_limit"] = f"{memory}m"
if cpu > 0:
base_kwargs["nano_cpus"] = int(cpu * 1_000_000_000)
try:
try:
container = self.client.containers.get(container_name)
container.reload()
if container.status == "running":
if on_state_change:
self.ensure_monitor(bot_id, on_state_change)
return True
container.remove(force=True)
except docker.errors.NotFound:
pass
if storage > 0:
try:
container = self.client.containers.run(
storage_opt={"size": f"{storage}G"},
**base_kwargs,
)
except Exception:
container = self.client.containers.run(**base_kwargs)
else:
container = self.client.containers.run(**base_kwargs)
if on_state_change:
monitor_thread = threading.Thread(
target=self._monitor_container_logs,
args=(bot_id, container, on_state_change),
daemon=True,
)
monitor_thread.start()
self.active_monitors[bot_id] = monitor_thread
return True
except Exception:
return False
def ensure_monitor(self, bot_id: str, on_state_change: Callable[[str, dict], None]) -> bool:
if not self.client:
return False
existing = self.active_monitors.get(bot_id)
if existing and existing.is_alive():
return True
try:
container = self.client.containers.get(f"worker_{bot_id}")
container.reload()
if container.status != "running":
return False
monitor_thread = threading.Thread(
target=self._monitor_container_logs,
args=(bot_id, container, on_state_change),
daemon=True,
)
monitor_thread.start()
self.active_monitors[bot_id] = monitor_thread
return True
except Exception:
return False
def stop_bot(self, bot_id: str) -> bool:
if not self.client:
return False
try:
container = self.client.containers.get(f"worker_{bot_id}")
container.stop(timeout=5)
container.remove()
self.active_monitors.pop(bot_id, None)
return True
except docker.errors.NotFound:
self.active_monitors.pop(bot_id, None)
return False
except Exception:
return False
def get_bot_status(self, bot_id: str) -> str:
if not self.client:
return "STOPPED"
try:
container = self.client.containers.get(f"worker_{bot_id}")
container.reload()
raw = str(container.status or "").strip().lower()
if raw in {"running", "restarting"}:
return "RUNNING"
return "STOPPED"
except Exception:
return "STOPPED"
@staticmethod
def _parse_size_to_bytes(raw: Any) -> Optional[int]:
if raw is None:
return None
text = str(raw).strip()
if not text:
return None
try:
return int(float(text))
except Exception:
pass
match = re.fullmatch(r"([0-9]+(?:\.[0-9]+)?)\s*([kmgtp]?)(i?b)?", text.lower())
if not match:
return None
number = float(match.group(1))
unit = (match.group(2) or "").lower()
scale = {
"": 1,
"k": 1024,
"m": 1024 ** 2,
"g": 1024 ** 3,
"t": 1024 ** 4,
"p": 1024 ** 5,
}.get(unit, 1)
return int(number * scale)
@staticmethod
def _calc_cpu_percent(stats: Dict[str, Any]) -> float:
try:
cpu_stats = stats.get("cpu_stats") or {}
precpu_stats = stats.get("precpu_stats") or {}
cpu_total = float((cpu_stats.get("cpu_usage") or {}).get("total_usage") or 0)
prev_cpu_total = float((precpu_stats.get("cpu_usage") or {}).get("total_usage") or 0)
cpu_delta = cpu_total - prev_cpu_total
system_total = float(cpu_stats.get("system_cpu_usage") or 0)
prev_system_total = float(precpu_stats.get("system_cpu_usage") or 0)
system_delta = system_total - prev_system_total
online_cpus = int(
cpu_stats.get("online_cpus")
or len((cpu_stats.get("cpu_usage") or {}).get("percpu_usage") or [])
or 1
)
if cpu_delta <= 0 or system_delta <= 0:
return 0.0
return max(0.0, (cpu_delta / system_delta) * online_cpus * 100.0)
except Exception:
return 0.0
def get_bot_resource_snapshot(self, bot_id: str) -> Dict[str, Any]:
snapshot: Dict[str, Any] = {
"docker_status": "STOPPED",
"limits": {
"cpu_cores": None,
"memory_bytes": None,
"storage_bytes": None,
"nano_cpus": 0,
"storage_opt_raw": "",
},
"usage": {
"cpu_percent": 0.0,
"memory_bytes": 0,
"memory_limit_bytes": 0,
"memory_percent": 0.0,
"network_rx_bytes": 0,
"network_tx_bytes": 0,
"blk_read_bytes": 0,
"blk_write_bytes": 0,
"pids": 0,
"container_rw_bytes": 0,
},
}
if not self.client:
return snapshot
try:
container = self.client.containers.get(f"worker_{bot_id}")
container.reload()
status_raw = str(container.status or "").strip().lower()
snapshot["docker_status"] = "RUNNING" if status_raw in {"running", "restarting"} else "STOPPED"
inspect: Dict[str, Any]
try:
inspect = self.client.api.inspect_container(container.id, size=True)
except TypeError:
inspect = self.client.api.inspect_container(container.id)
except Exception as e:
if "unexpected keyword argument 'size'" in str(e):
inspect = self.client.api.inspect_container(container.id)
else:
raise
host_cfg = inspect.get("HostConfig") or {}
nano_cpus = int(host_cfg.get("NanoCpus") or 0)
cpu_quota = int(host_cfg.get("CpuQuota") or 0)
cpu_period = int(host_cfg.get("CpuPeriod") or 0)
memory_bytes = int(host_cfg.get("Memory") or 0)
storage_opt = host_cfg.get("StorageOpt") or {}
storage_raw = storage_opt.get("size")
storage_bytes = self._parse_size_to_bytes(storage_raw)
if nano_cpus > 0:
cpu_cores = nano_cpus / 1_000_000_000
elif cpu_quota > 0 and cpu_period > 0:
cpu_cores = cpu_quota / cpu_period
else:
cpu_cores = None
snapshot["limits"] = {
"cpu_cores": cpu_cores,
"memory_bytes": memory_bytes if memory_bytes > 0 else None,
"storage_bytes": storage_bytes,
"nano_cpus": nano_cpus,
"storage_opt_raw": str(storage_raw or ""),
}
snapshot["usage"]["container_rw_bytes"] = int(inspect.get("SizeRw") or 0)
if snapshot["docker_status"] == "RUNNING":
stats = container.stats(stream=False) or {}
memory_stats = stats.get("memory_stats") or {}
memory_usage = int(memory_stats.get("usage") or 0)
memory_limit = int(memory_stats.get("limit") or 0)
if memory_usage > 0:
cache = int((memory_stats.get("stats") or {}).get("inactive_file") or 0)
memory_usage = max(0, memory_usage - cache)
networks = stats.get("networks") or {}
rx_total = 0
tx_total = 0
for _, row in networks.items():
if isinstance(row, dict):
rx_total += int(row.get("rx_bytes") or 0)
tx_total += int(row.get("tx_bytes") or 0)
blk_stats = stats.get("blkio_stats") or {}
io_rows = blk_stats.get("io_service_bytes_recursive") or []
blk_read = 0
blk_write = 0
for row in io_rows:
if not isinstance(row, dict):
continue
op = str(row.get("op") or "").upper()
value = int(row.get("value") or 0)
if op == "READ":
blk_read += value
elif op == "WRITE":
blk_write += value
pids_current = int((stats.get("pids_stats") or {}).get("current") or 0)
cpu_percent = self._calc_cpu_percent(stats)
memory_percent = 0.0
if memory_limit > 0:
memory_percent = (memory_usage / memory_limit) * 100.0
if snapshot["usage"]["container_rw_bytes"] <= 0:
storage_stats = stats.get("storage_stats") or {}
rw_size = int(storage_stats.get("size_rw") or storage_stats.get("rw_size") or 0)
snapshot["usage"]["container_rw_bytes"] = max(0, rw_size)
snapshot["usage"].update(
{
"cpu_percent": cpu_percent,
"memory_bytes": memory_usage,
"memory_limit_bytes": memory_limit,
"memory_percent": max(0.0, memory_percent),
"network_rx_bytes": rx_total,
"network_tx_bytes": tx_total,
"blk_read_bytes": blk_read,
"blk_write_bytes": blk_write,
"pids": pids_current,
}
)
except docker.errors.NotFound:
return snapshot
except Exception:
return snapshot
return snapshot
def send_command(self, bot_id: str, command: str, media: Optional[List[str]] = None) -> bool:
if not self.client:
self._last_delivery_error[bot_id] = "Docker client is not available"
return False
media_paths = [str(v).strip().replace("\\", "/") for v in (media or []) if str(v).strip()]
self._last_delivery_error.pop(bot_id, None)
for attempt in range(3):
if self._send_command_via_exec(bot_id, command, media_paths):
self._last_delivery_error.pop(bot_id, None)
return True
time.sleep(0.25 * (attempt + 1))
if self._send_command_via_host_http(bot_id, command, media_paths):
self._last_delivery_error.pop(bot_id, None)
return True
if bot_id not in self._last_delivery_error:
self._last_delivery_error[bot_id] = "Unknown delivery failure"
return False
def get_last_delivery_error(self, bot_id: str) -> str:
return str(self._last_delivery_error.get(bot_id, "") or "").strip()
def get_recent_logs(self, bot_id: str, tail: int = 300) -> List[str]:
if not self.client:
return []
try:
container = self.client.containers.get(f"worker_{bot_id}")
raw = container.logs(tail=max(1, int(tail)))
text = raw.decode("utf-8", errors="ignore")
return [line for line in text.splitlines() if line.strip()]
except Exception:
return []
def parse_monitor_packet(self, line: str) -> Optional[Dict[str, Any]]:
return self._parse_log_line(str(line or "").strip())
def _workspace_dir(self, *, bot_id: str, workspace_root: Optional[str]) -> str:
return os.path.abspath(os.path.join(self._state_nanobot_dir(bot_id=bot_id, workspace_root=workspace_root), "workspace"))
def _state_nanobot_dir(self, *, bot_id: str, workspace_root: Optional[str]) -> str:
configured_root = str(workspace_root or "").strip()
if not configured_root:
return os.path.abspath(os.path.join(self.host_data_root, bot_id, ".nanobot"))
normalized_root = os.path.abspath(os.path.expanduser(configured_root))
return os.path.abspath(os.path.join(normalized_root, bot_id, ".nanobot"))
def _monitor_container_logs(self, bot_id: str, container: Any, callback: Callable[[str, dict], None]) -> None:
try:
buffer = ""
decoder = codecs.getincrementaldecoder("utf-8")("replace")
since_ts = int(time.time())
for chunk in container.logs(stream=True, follow=True, since=since_ts):
text = decoder.decode(chunk) if isinstance(chunk, bytes) else str(chunk)
if not text:
continue
buffer += text
while "\n" in buffer:
line, buffer = buffer.split("\n", 1)
normalized = line.strip("\r").strip()
if normalized:
state_packet = self._parse_log_line(normalized)
if state_packet:
callback(bot_id, state_packet)
callback(bot_id, {"type": "RAW_LOG", "text": normalized})
rest = decoder.decode(b"", final=True)
if rest:
buffer += rest
tail = buffer.strip()
if tail:
state_packet = self._parse_log_line(tail)
if state_packet:
callback(bot_id, state_packet)
callback(bot_id, {"type": "RAW_LOG", "text": tail})
except Exception:
return
def _parse_monitor_packet_json(self, line: str) -> Optional[Dict[str, Any]]:
if "__DASHBOARD_DATA_START__" not in line or "__DASHBOARD_DATA_END__" not in line:
return None
try:
raw_json = line.split("__DASHBOARD_DATA_START__", 1)[1].split("__DASHBOARD_DATA_END__", 1)[0].strip()
data = json.loads(raw_json)
event_type = str(data.get("type", "")).upper()
content = str(data.get("content") or data.get("text") or "").strip()
media = [str(v).strip().replace("\\", "/") for v in (data.get("media") or []) if str(v).strip()]
is_progress = bool(data.get("is_progress", False))
is_tool = bool(data.get("is_tool", False))
usage = data.get("usage") if isinstance(data.get("usage"), dict) else None
request_id = str(data.get("request_id") or "").strip() or None
provider = str(data.get("provider") or "").strip() or None
model = str(data.get("model") or "").strip() or None
if event_type == "AGENT_STATE":
payload = data.get("payload") or {}
state = str(payload.get("state") or data.get("state") or ("TOOL_CALL" if is_tool else "THINKING"))
action_msg = str(payload.get("action_msg") or payload.get("msg") or content)
return {
"type": "AGENT_STATE",
"channel": "dashboard",
"payload": {"state": state, "action_msg": action_msg},
"request_id": request_id,
}
if event_type == "ASSISTANT_MESSAGE":
if content or media:
return {
"type": "ASSISTANT_MESSAGE",
"channel": "dashboard",
"text": content,
"media": media,
"usage": usage,
"request_id": request_id,
"provider": provider,
"model": model,
}
return None
if event_type == "BUS_EVENT" or is_progress:
return {
"type": "BUS_EVENT",
"channel": "dashboard",
"content": content,
"media": media,
"is_progress": is_progress,
"is_tool": is_tool,
"usage": usage,
"request_id": request_id,
"provider": provider,
"model": model,
}
if content or media:
return {
"type": "ASSISTANT_MESSAGE",
"channel": "dashboard",
"text": content,
"media": media,
"usage": usage,
"request_id": request_id,
"provider": provider,
"model": model,
}
except Exception:
return None
return None
def _parse_log_line(self, line: str) -> Optional[Dict[str, Any]]:
if "__DASHBOARD_DATA_START__" in line:
packet = self._parse_monitor_packet_json(line)
if packet:
return packet
process_match = re.search(r"Processing message from ([\w\-]+):[^:]+:\s*(.+)$", line)
if process_match:
channel = process_match.group(1).strip().lower()
action_msg = process_match.group(2).strip()
return {
"type": "AGENT_STATE",
"channel": channel,
"payload": {
"state": "THINKING",
"action_msg": action_msg[:4000],
},
}
response_match = re.search(r"Response to ([\w\-]+):[^:]+:\s*(.+)$", line)
if response_match:
channel = response_match.group(1).strip().lower()
action_msg = response_match.group(2).strip()
return {
"type": "AGENT_STATE",
"channel": channel,
"payload": {
"state": "SUCCESS",
"action_msg": action_msg[:4000],
},
}
lower = line.lower()
tool_call_match = re.search(r"tool call:\s*(.+)$", line, re.IGNORECASE)
if tool_call_match:
return {
"type": "AGENT_STATE",
"payload": {
"state": "TOOL_CALL",
"action_msg": tool_call_match.group(1).strip()[:4000],
},
}
if "error" in lower or "traceback" in lower:
return {
"type": "AGENT_STATE",
"payload": {"state": "ERROR", "action_msg": "执行异常,请检查日志"},
}
return None
def _send_command_via_exec(self, bot_id: str, command: str, media: Optional[List[str]] = None) -> bool:
try:
container = self.client.containers.get(f"worker_{bot_id}")
container.reload()
if container.status != "running":
self._last_delivery_error[bot_id] = f"Container status is {container.status}"
return False
dashboard_port = self._resolve_dashboard_port(container=container, bot_id=bot_id)
dashboard_url = f"http://127.0.0.1:{dashboard_port}/chat"
payload_json = json.dumps({"message": command, "media": media or []}, ensure_ascii=False)
result = container.exec_run(
[
"curl",
"-sS",
"--fail",
"--max-time",
"6",
"-X",
"POST",
"-H",
"Content-Type: application/json",
"-d",
payload_json,
dashboard_url,
]
)
output = result.output.decode("utf-8", errors="ignore") if isinstance(result.output, (bytes, bytearray)) else str(result.output)
if result.exit_code != 0:
payload_b64 = base64.b64encode(payload_json.encode("utf-8")).decode("ascii")
py_script = (
"import base64,json,os,urllib.request\n"
"payload=json.loads(base64.b64decode(os.environ['DASHBOARD_PAYLOAD_B64']).decode('utf-8'))\n"
"req=urllib.request.Request(os.environ.get('DASHBOARD_CHAT_URL', 'http://127.0.0.1:9000/chat'),"
"data=json.dumps(payload,ensure_ascii=False).encode('utf-8'),"
"headers={'Content-Type':'application/json'})\n"
"with urllib.request.urlopen(req, timeout=8) as resp:\n"
" print(resp.read().decode('utf-8','ignore'))\n"
)
for py_bin in ["python3", "python"]:
py_result = container.exec_run(
[py_bin, "-c", py_script],
environment={
"DASHBOARD_PAYLOAD_B64": payload_b64,
"DASHBOARD_CHAT_URL": dashboard_url,
},
)
py_output = py_result.output.decode("utf-8", errors="ignore") if isinstance(py_result.output, (bytes, bytearray)) else str(py_result.output)
if py_result.exit_code == 0:
return True
self._last_delivery_error[bot_id] = f"exec fallback failed: {py_output[:300]}"
self._last_delivery_error[bot_id] = f"exec curl failed: {output[:300]}"
return False
return True
except Exception as exc:
self._last_delivery_error[bot_id] = f"exec curl exception: {exc}"
return False
def _send_command_via_host_http(self, bot_id: str, command: str, media: Optional[List[str]] = None) -> bool:
try:
container = self.client.containers.get(f"worker_{bot_id}")
container.reload()
ip_address = self._resolve_container_ip(container)
if not ip_address:
self._last_delivery_error[bot_id] = "host HTTP failed: container has no reachable IP address"
return False
dashboard_port = self._resolve_dashboard_port(container=container, bot_id=bot_id)
target_url = f"http://{ip_address}:{dashboard_port}/chat"
with httpx.Client(timeout=4.0) as client:
resp = client.post(target_url, json={"message": command, "media": media or []})
if resp.status_code == 200:
return True
self._last_delivery_error[bot_id] = f"host HTTP failed: {resp.status_code} - {resp.text[:300]}"
return False
except Exception as exc:
self._last_delivery_error[bot_id] = f"host HTTP exception: {exc}"
return False
def _resolve_dashboard_port(self, *, container: Any, bot_id: str) -> int:
# Dashboard channel port may be per-bot dynamic; read from mounted config.json when available.
default_port = 9000
config_path = self._resolve_mounted_config_path(container=container, bot_id=bot_id)
if not config_path or not os.path.isfile(config_path):
return default_port
try:
with open(config_path, "r", encoding="utf-8") as fh:
payload = json.load(fh)
if not isinstance(payload, dict):
return default_port
channels = payload.get("channels")
if not isinstance(channels, dict):
return default_port
dashboard = channels.get("dashboard")
if not isinstance(dashboard, dict):
return default_port
raw_port = int(dashboard.get("port") or default_port)
if 1 <= raw_port <= 65535:
return raw_port
except Exception:
return default_port
return default_port
def _resolve_mounted_config_path(self, *, container: Any, bot_id: str) -> str:
mounts = list((container.attrs or {}).get("Mounts") or [])
for row in mounts:
if not isinstance(row, dict):
continue
destination = str(row.get("Destination") or "").strip()
if destination != "/root/.nanobot":
continue
source = str(row.get("Source") or "").strip()
if source:
return os.path.join(source, "config.json")
return os.path.join(self.host_data_root, bot_id, ".nanobot", "config.json")
@staticmethod
def _resolve_container_ip(container: Any) -> str:
attrs = dict(getattr(container, "attrs", {}) or {})
network = dict(attrs.get("NetworkSettings") or {})
primary = str(network.get("IPAddress") or "").strip()
if primary:
return primary
networks = dict(network.get("Networks") or {})
for _, row in networks.items():
if not isinstance(row, dict):
continue
ip = str(row.get("IPAddress") or "").strip()
if ip:
return ip
return ""

View File

@ -0,0 +1,31 @@
import os
from typing import Dict
from app.core.settings import EDGE_BOTS_WORKSPACE_ROOT, EDGE_BASE_IMAGE
from app.runtime.docker_manager import EdgeDockerManager
from app.runtime.native_manager import EdgeNativeRuntimeBackend
def edge_runtime_mode() -> str:
runtime_kind = str(os.getenv("EDGE_RUNTIME_KIND", "all") or "all").strip().lower()
if runtime_kind in {"docker", "native"}:
return runtime_kind
return "all"
def build_edge_runtime_backends() -> Dict[str, object]:
mode = edge_runtime_mode()
backends: Dict[str, object] = {}
if mode in {"all", "docker"}:
backends["docker"] = EdgeDockerManager(host_data_root=EDGE_BOTS_WORKSPACE_ROOT, base_image=EDGE_BASE_IMAGE)
if mode in {"all", "native"}:
backends["native"] = EdgeNativeRuntimeBackend()
return backends
def preferred_edge_runtime_kind(backends: Dict[str, object]) -> str:
if "docker" in backends:
return "docker"
if "native" in backends:
return "native"
return "docker"

View File

@ -0,0 +1,776 @@
import codecs
import csv
import hashlib
import json
import signal
import socket
import os
import re
import shlex
import shutil
import subprocess
import threading
import time
from dataclasses import dataclass, field
from datetime import datetime, timezone
from typing import Any, Callable, Dict, List, Optional
import httpx
import psutil
from app.core.settings import (
EDGE_BOTS_WORKSPACE_ROOT,
EDGE_NATIVE_COMMAND,
EDGE_NATIVE_DASHBOARD_URL,
EDGE_NATIVE_WORKDIR,
)
from app.runtime.base import EdgeRuntimeBackend
@dataclass
class _NativeProcessRecord:
process: subprocess.Popen[str]
command: List[str]
cwd: str
log_path: str
log_handle: Any
dashboard_url: str
dashboard_host: str
dashboard_port: int
cpu_cores: Optional[float]
memory_mb: Optional[int]
storage_gb: Optional[int]
stop_event: threading.Event = field(default_factory=threading.Event)
stdout_thread: Optional[threading.Thread] = None
last_error: str = ""
class EdgeNativeRuntimeBackend(EdgeRuntimeBackend):
runtime_kind = "native"
def __init__(self) -> None:
self._command = shlex.split(EDGE_NATIVE_COMMAND)
self._native_available = bool(self._command and shutil.which(self._command[0]))
self._last_errors: Dict[str, str] = {}
self._records: Dict[str, _NativeProcessRecord] = {}
self._lock = threading.RLock()
def capabilities(self) -> Dict[str, Any]:
available = bool(self._native_available)
return {
"protocol": {"version": "1"},
"runtime": {"docker": False, "native": available},
"workspace": {
"tree": True,
"read_file": True,
"write_markdown": True,
"upload_files": True,
"serve_file": True,
},
"monitor": {"logs": available, "ensure": available},
"process": {"command": list(self._command), "available": available},
}
def has_image(self, tag: str) -> bool:
return False
def start_bot(
self,
bot_id: str,
image_tag: Optional[str] = None,
env_vars: Optional[Dict[str, str]] = None,
workspace_root: Optional[str] = None,
native_command: Optional[str] = None,
native_workdir: Optional[str] = None,
cpu_cores: Optional[float] = None,
memory_mb: Optional[int] = None,
storage_gb: Optional[int] = None,
on_state_change: Optional[Callable[[str, dict], None]] = None,
) -> bool:
bot_id = str(bot_id or "").strip()
if not bot_id:
return False
effective_env = dict(env_vars or {})
launch_command = self._resolve_launch_command(native_command=native_command, env_vars=effective_env)
if not self._is_launch_command_available(launch_command):
self._set_last_error(bot_id, f"native command not available: {self._render_command(launch_command) or 'nanobot gateway'}")
return False
with self._lock:
existing = self._records.get(bot_id)
if existing and existing.process.poll() is None:
if on_state_change:
self.ensure_monitor(bot_id, on_state_change)
return True
if existing:
self._cleanup_record(bot_id, existing)
state_root = self._bot_root(bot_id)
workspace_dir = self._workspace_dir(bot_id=bot_id, workspace_root=workspace_root)
config_path = self._config_path(bot_id, workspace_root=workspace_root)
runtime_dir = os.path.join(os.path.dirname(config_path), "runtime")
os.makedirs(runtime_dir, exist_ok=True)
os.makedirs(workspace_dir, exist_ok=True)
log_path = os.path.join(runtime_dir, "native.log")
cwd = self._resolve_workdir(state_root, native_workdir=native_workdir, env_vars=effective_env)
dashboard_host, dashboard_port, dashboard_url = self._resolve_dashboard_endpoint(bot_id, effective_env)
env = os.environ.copy()
env.update({str(k): str(v) for k, v in effective_env.items() if str(k).strip()})
env.setdefault("PYTHONUNBUFFERED", "1")
env.setdefault("EDGE_RUNTIME_KIND", "native")
env.setdefault("EDGE_NODE_MODE", "native")
env.setdefault("NANOBOT_BOT_ID", bot_id)
env.setdefault("DASHBOARD_HOST", dashboard_host)
env.setdefault("DASHBOARD_PORT", str(dashboard_port))
env.setdefault("DASHBOARD_URL", dashboard_url)
env.setdefault("NANOBOT_CONFIG", config_path)
env.setdefault("NANOBOT_WORKSPACE", workspace_dir)
if not os.path.isfile(config_path):
self._set_last_error(bot_id, f"native config not found: {config_path}")
return False
self._terminate_orphan_processes(bot_id=bot_id, config_path=config_path)
log_handle = open(log_path, "a", encoding="utf-8")
command = self._build_launch_command(base_command=launch_command, config_path=config_path, workspace_dir=workspace_dir)
log_handle.write(
f"[{self._now()}] native bootstrap command={shlex.join(command)} cwd={cwd} config={config_path} workspace={workspace_dir} dashboard={dashboard_url}\n"
)
log_handle.flush()
try:
process = subprocess.Popen(
command,
cwd=cwd,
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
bufsize=1,
start_new_session=True,
)
except FileNotFoundError as exc:
log_handle.write(f"[{self._now()}] native bootstrap failed: {exc}\n")
log_handle.flush()
log_handle.close()
self._set_last_error(bot_id, f"native command not found: {exc}")
return False
except Exception as exc:
log_handle.write(f"[{self._now()}] native bootstrap failed: {exc}\n")
log_handle.flush()
log_handle.close()
self._set_last_error(bot_id, f"native start failed: {exc}")
return False
record = _NativeProcessRecord(
process=process,
command=command,
cwd=cwd,
log_path=log_path,
log_handle=log_handle,
dashboard_url=dashboard_url,
dashboard_host=dashboard_host,
dashboard_port=dashboard_port,
cpu_cores=cpu_cores,
memory_mb=memory_mb,
storage_gb=storage_gb,
)
self._records[bot_id] = record
record.stdout_thread = threading.Thread(
target=self._drain_stdout,
args=(bot_id, record, on_state_change),
daemon=True,
)
record.stdout_thread.start()
if not self._wait_for_dashboard_ready(record):
self._set_last_error(bot_id, f"native dashboard did not become ready: {dashboard_url}")
try:
if process.poll() is None:
process.terminate()
process.wait(timeout=5)
except Exception:
pass
self._cleanup_record(bot_id, record)
self._records.pop(bot_id, None)
return False
self._set_last_error(bot_id, "")
return True
def ensure_monitor(self, bot_id: str, on_state_change: Callable[[str, dict], None]) -> bool:
record = self._records.get(bot_id)
if record is None or record.process.poll() is not None:
return False
thread = record.stdout_thread
if thread is not None and thread.is_alive():
return True
record.stdout_thread = threading.Thread(
target=self._drain_stdout,
args=(bot_id, record, on_state_change),
daemon=True,
)
record.stdout_thread.start()
return True
def stop_bot(self, bot_id: str) -> bool:
bot_id = str(bot_id or "").strip()
with self._lock:
record = self._records.pop(bot_id, None)
stopped = False
if record is not None:
try:
if record.process.poll() is None:
record.stop_event.set()
record.process.terminate()
try:
record.process.wait(timeout=8)
except Exception:
record.process.kill()
record.process.wait(timeout=5)
self._cleanup_record(bot_id, record)
stopped = True
except Exception as exc:
self._set_last_error(bot_id, f"native stop failed: {exc}")
self._cleanup_record(bot_id, record)
return False
orphan_stopped = self._terminate_orphan_processes(bot_id=bot_id, config_path=self._config_path(bot_id))
return bool(stopped or orphan_stopped)
def get_bot_status(self, bot_id: str) -> str:
normalized_bot_id = str(bot_id or "").strip()
record = self._records.get(normalized_bot_id)
if record is None:
return "RUNNING" if self._has_orphan_process(normalized_bot_id) else "STOPPED"
try:
return "RUNNING" if record.process.poll() is None else "STOPPED"
except Exception:
return "STOPPED"
def get_bot_resource_snapshot(self, bot_id: str) -> Dict[str, Any]:
bot_id = str(bot_id or "").strip()
record = self._records.get(bot_id)
snapshot: Dict[str, Any] = {
"docker_status": self.get_bot_status(bot_id),
"limits": {
"cpu_cores": self._normalize_cpu_limit(record.cpu_cores if record else None),
"memory_bytes": self._normalize_memory_limit(record.memory_mb if record else None),
"storage_bytes": self._normalize_storage_limit(record.storage_gb if record else None),
"nano_cpus": 0,
"storage_opt_raw": "",
},
"usage": {
"cpu_percent": 0.0,
"memory_bytes": 0,
"memory_limit_bytes": 0,
"memory_percent": 0.0,
"network_rx_bytes": 0,
"network_tx_bytes": 0,
"blk_read_bytes": 0,
"blk_write_bytes": 0,
"pids": 0,
"container_rw_bytes": 0,
},
}
if record is None or record.process.poll() is not None:
return snapshot
try:
proc = psutil.Process(record.process.pid)
cpu_percent = float(proc.cpu_percent(interval=None) or 0.0)
memory_info = proc.memory_info()
memory_bytes = int(getattr(memory_info, "rss", 0) or 0)
memory_limit = int(psutil.virtual_memory().total or 0)
memory_percent = float(proc.memory_percent() or 0.0)
children = proc.children(recursive=True)
workspace_used = self._calc_workspace_used_bytes(bot_id)
snapshot["usage"].update(
{
"cpu_percent": round(cpu_percent, 2),
"memory_bytes": memory_bytes,
"memory_limit_bytes": memory_limit,
"memory_percent": round(memory_percent, 2),
"network_rx_bytes": 0,
"network_tx_bytes": 0,
"blk_read_bytes": 0,
"blk_write_bytes": 0,
"pids": 1 + len(children),
"container_rw_bytes": workspace_used,
}
)
except Exception:
workspace_used = self._calc_workspace_used_bytes(bot_id)
snapshot["usage"]["container_rw_bytes"] = workspace_used
return snapshot
def get_recent_logs(self, bot_id: str, tail: int = 300) -> List[str]:
log_path = self._log_path(str(bot_id or "").strip())
if not os.path.isfile(log_path):
return []
try:
with open(log_path, "r", encoding="utf-8", errors="ignore") as fh:
rows = [line.rstrip("\n") for line in fh.readlines() if line.strip()]
if tail > 0:
return rows[-int(tail) :]
return rows
except Exception:
return []
def send_command(self, bot_id: str, command: str, media: Optional[List[str]] = None) -> bool:
bot_id = str(bot_id or "").strip()
record = self._records.get(bot_id)
if record is None or record.process.poll() is not None:
self._set_last_error(bot_id, "native process is not running")
return False
try:
payload = {"message": command, "media": list(media or [])}
with httpx.Client(timeout=5.0, trust_env=False) as client:
resp = client.post(record.dashboard_url, json=payload)
if resp.status_code == 200:
self._set_last_error(bot_id, "")
return True
self._set_last_error(bot_id, f"native dashboard returned {resp.status_code}: {resp.text[:300]}")
return False
except Exception as exc:
self._set_last_error(bot_id, f"native dashboard request failed: {exc}")
return False
def get_last_delivery_error(self, bot_id: str) -> str:
bot_id = str(bot_id or "").strip()
record = self._records.get(bot_id)
if record is None:
return str(self._last_errors.get(bot_id) or "").strip()
return str(record.last_error or self._last_errors.get(bot_id) or "").strip()
def parse_monitor_packet(self, line: str) -> Optional[Dict[str, Any]]:
return self._parse_log_line(str(line or "").strip())
def _drain_stdout(
self,
bot_id: str,
record: _NativeProcessRecord,
callback: Optional[Callable[[str, dict], None]] = None,
) -> None:
stream = record.process.stdout
if stream is None:
return
try:
for raw_line in iter(stream.readline, ""):
if record.stop_event.is_set():
break
line = str(raw_line or "").rstrip("\r\n")
if not line:
continue
try:
record.log_handle.write(f"{line}\n")
record.log_handle.flush()
except Exception:
pass
if callback:
parsed = self._parse_log_line(line)
if parsed:
callback(bot_id, parsed)
callback(bot_id, {"type": "RAW_LOG", "text": line})
finally:
try:
stream.close()
except Exception:
pass
try:
record.log_handle.flush()
except Exception:
pass
def _cleanup_record(self, bot_id: str, record: _NativeProcessRecord) -> None:
try:
record.stop_event.set()
except Exception:
pass
try:
if record.log_handle and not record.log_handle.closed:
record.log_handle.flush()
record.log_handle.close()
except Exception:
pass
def _set_last_error(self, bot_id: str, message: str) -> None:
normalized_bot_id = str(bot_id or "").strip()
self._last_errors[normalized_bot_id] = str(message or "").strip()
record = self._records.get(normalized_bot_id)
if record is None:
return
record.last_error = self._last_errors[normalized_bot_id]
def _resolve_workdir(
self,
bot_root: str,
*,
native_workdir: Optional[str] = None,
env_vars: Optional[Dict[str, str]] = None,
) -> str:
configured = str(native_workdir or (env_vars or {}).get("EDGE_NATIVE_WORKDIR") or EDGE_NATIVE_WORKDIR or "").strip()
if configured:
return os.path.abspath(configured)
return os.path.abspath(bot_root)
def _resolve_dashboard_endpoint(self, bot_id: str, env_vars: Dict[str, str]) -> tuple[str, int, str]:
host = str(env_vars.get("DASHBOARD_HOST") or os.getenv("EDGE_NATIVE_DASHBOARD_HOST") or "127.0.0.1").strip() or "127.0.0.1"
raw_port = str(env_vars.get("DASHBOARD_PORT") or os.getenv("EDGE_NATIVE_DASHBOARD_PORT") or "").strip()
try:
port = int(raw_port) if raw_port else self._default_dashboard_port(bot_id)
except Exception:
port = self._default_dashboard_port(bot_id)
port = max(1, min(port, 65535))
url = str(env_vars.get("DASHBOARD_URL") or os.getenv("EDGE_NATIVE_DASHBOARD_URL") or f"http://{host}:{port}/chat").strip()
if not url:
url = f"http://{host}:{port}/chat"
return host, port, url
def _build_launch_command(self, *, base_command: List[str], config_path: str, workspace_dir: str) -> List[str]:
command = list(base_command)
has_config_flag = any(part in {"--config", "-c"} for part in command)
has_workspace_flag = any(part in {"--workspace", "-w"} for part in command)
if not has_config_flag:
command.extend(["--config", config_path])
if not has_workspace_flag:
command.extend(["--workspace", workspace_dir])
return command
def _resolve_launch_command(self, *, native_command: Optional[str], env_vars: Dict[str, str]) -> List[str]:
explicit = str(native_command or "").strip()
if explicit:
return self._parse_launcher_command(explicit)
configured = str(env_vars.get("EDGE_NATIVE_COMMAND") or "").strip()
if configured:
rows = self._parse_launcher_command(configured)
if rows:
return rows
return list(self._command)
@staticmethod
def _parse_launcher_command(raw_command: str) -> List[str]:
text = str(raw_command or "").strip()
if not text:
return []
if text.startswith("[") and text.endswith("]"):
try:
payload = json.loads(text)
if isinstance(payload, list):
rows = [str(item or "").strip() for item in payload if str(item or "").strip()]
if rows:
return rows
except Exception:
pass
if "," in text and any(mark in text for mark in ['"', "'"]):
try:
rows = [str(item or "").strip() for item in next(csv.reader([text], skipinitialspace=True)) if str(item or "").strip()]
if rows:
return rows
except Exception:
pass
try:
return [str(item or "").strip() for item in shlex.split(text) if str(item or "").strip()]
except Exception:
return []
@staticmethod
def _is_launch_command_available(command: List[str]) -> bool:
if not command:
return False
return bool(shutil.which(command[0]))
@staticmethod
def _render_command(command: List[str]) -> str:
return " ".join(str(part or "").strip() for part in command if str(part or "").strip())
def _log_path(self, bot_id: str) -> str:
config_path = self._config_path(bot_id)
return os.path.join(os.path.dirname(config_path), "runtime", "native.log")
def _config_path(self, bot_id: str, workspace_root: Optional[str] = None) -> str:
configured_root = str(workspace_root or "").strip()
if configured_root:
external_config = os.path.abspath(
os.path.join(
os.path.abspath(os.path.expanduser(configured_root)),
bot_id,
".nanobot",
"config.json",
)
)
if os.path.isfile(external_config):
return external_config
inferred_root = self._workspace_root_from_runtime_target(bot_id)
if inferred_root:
inferred_config = os.path.abspath(os.path.join(inferred_root, bot_id, ".nanobot", "config.json"))
if os.path.isfile(inferred_config):
return inferred_config
return os.path.join(self._bot_root(bot_id), ".nanobot", "config.json")
def _bot_root(self, bot_id: str) -> str:
return os.path.abspath(os.path.join(EDGE_BOTS_WORKSPACE_ROOT, bot_id))
def _workspace_dir(self, *, bot_id: str, workspace_root: Optional[str] = None) -> str:
configured_root = str(workspace_root or "").strip()
if configured_root:
normalized_root = os.path.abspath(os.path.expanduser(configured_root))
return os.path.abspath(os.path.join(normalized_root, bot_id, ".nanobot", "workspace"))
config_workspace = self._workspace_dir_from_config(bot_id)
if config_workspace:
return config_workspace
return os.path.abspath(os.path.join(self._bot_root(bot_id), ".nanobot", "workspace"))
def _workspace_dir_from_config(self, bot_id: str) -> Optional[str]:
config_path = self._config_path(bot_id)
if not os.path.isfile(config_path):
return None
try:
with open(config_path, "r", encoding="utf-8") as fh:
payload = json.load(fh)
if not isinstance(payload, dict):
return None
agents = payload.get("agents") if isinstance(payload.get("agents"), dict) else {}
defaults = agents.get("defaults") if isinstance(agents.get("defaults"), dict) else {}
workspace = str(defaults.get("workspace") or "").strip()
if not workspace:
return None
return os.path.abspath(os.path.expanduser(workspace))
except Exception:
return None
def _workspace_root_from_runtime_target(self, bot_id: str) -> str:
path = os.path.join(self._bot_root(bot_id), ".nanobot", "runtime-target.json")
if not os.path.isfile(path):
return ""
try:
with open(path, "r", encoding="utf-8") as fh:
payload = json.load(fh)
if not isinstance(payload, dict):
return ""
raw_root = str(payload.get("workspace_root") or "").strip()
if not raw_root:
return ""
return os.path.abspath(os.path.expanduser(raw_root))
except Exception:
return ""
def _has_orphan_process(self, bot_id: str) -> bool:
return bool(self._find_orphan_processes(bot_id=bot_id, config_path=self._config_path(bot_id)))
def _find_orphan_processes(self, *, bot_id: str, config_path: str) -> List[psutil.Process]:
matches: List[psutil.Process] = []
normalized_config_path = os.path.abspath(config_path)
for proc in psutil.process_iter(["pid", "cmdline"]):
try:
cmdline = [str(part or "") for part in (proc.info.get("cmdline") or [])]
if not cmdline:
continue
joined = " ".join(cmdline)
if "nanobot.cli.commands" not in joined or " gateway" not in joined:
continue
if normalized_config_path not in joined:
continue
matches.append(proc)
except (psutil.NoSuchProcess, psutil.AccessDenied):
continue
except Exception:
continue
return matches
def _terminate_orphan_processes(self, *, bot_id: str, config_path: str) -> int:
stopped = 0
for proc in self._find_orphan_processes(bot_id=bot_id, config_path=config_path):
try:
os.kill(int(proc.pid), signal.SIGTERM)
try:
proc.wait(timeout=5)
except psutil.TimeoutExpired:
os.kill(int(proc.pid), signal.SIGKILL)
proc.wait(timeout=3)
stopped += 1
except (psutil.NoSuchProcess, ProcessLookupError):
continue
except Exception as exc:
self._set_last_error(bot_id, f"failed to cleanup orphan native process: {exc}")
return stopped
@staticmethod
def _wait_for_dashboard_ready(record: _NativeProcessRecord, timeout_seconds: float = 8.0) -> bool:
deadline = time.monotonic() + max(1.0, float(timeout_seconds or 8.0))
while time.monotonic() < deadline:
if record.process.poll() is not None:
return False
try:
with socket.create_connection((record.dashboard_host, record.dashboard_port), timeout=0.5):
return True
except OSError:
time.sleep(0.2)
continue
return False
@staticmethod
def _default_dashboard_port(bot_id: str) -> int:
digest = hashlib.sha1(str(bot_id or "").strip().encode("utf-8")).hexdigest()
return 19000 + (int(digest[:6], 16) % 2000)
@staticmethod
def _normalize_cpu_limit(value: Optional[float]) -> Optional[float]:
if value is None:
return None
try:
return round(float(value), 2)
except Exception:
return None
@staticmethod
def _normalize_memory_limit(value: Optional[int]) -> Optional[int]:
if value is None:
return None
try:
return max(0, int(value)) * 1024 * 1024
except Exception:
return None
@staticmethod
def _normalize_storage_limit(value: Optional[int]) -> Optional[int]:
if value is None:
return None
try:
return max(0, int(value)) * 1024 * 1024 * 1024
except Exception:
return None
def _calc_workspace_used_bytes(self, bot_id: str) -> int:
total = 0
root = self._workspace_dir(bot_id=bot_id)
for current_root, _, files in os.walk(root):
for filename in files:
path = os.path.join(current_root, filename)
try:
total += int(os.path.getsize(path))
except Exception:
continue
return total
@staticmethod
def _now() -> str:
return datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
@staticmethod
def _parse_monitor_packet_json(line: str) -> Optional[Dict[str, Any]]:
if "__DASHBOARD_DATA_START__" not in line or "__DASHBOARD_DATA_END__" not in line:
return None
try:
raw_json = line.split("__DASHBOARD_DATA_START__", 1)[1].split("__DASHBOARD_DATA_END__", 1)[0].strip()
data = json.loads(raw_json)
event_type = str(data.get("type", "")).upper()
content = str(data.get("content") or data.get("text") or "").strip()
media = [str(v).strip().replace("\\", "/") for v in (data.get("media") or []) if str(v).strip()]
is_progress = bool(data.get("is_progress", False))
is_tool = bool(data.get("is_tool", False))
usage = data.get("usage") if isinstance(data.get("usage"), dict) else None
request_id = str(data.get("request_id") or "").strip() or None
provider = str(data.get("provider") or "").strip() or None
model = str(data.get("model") or "").strip() or None
if event_type == "AGENT_STATE":
payload = data.get("payload") or {}
state = str(payload.get("state") or data.get("state") or ("TOOL_CALL" if is_tool else "THINKING"))
action_msg = str(payload.get("action_msg") or payload.get("msg") or content)
return {
"type": "AGENT_STATE",
"channel": "dashboard",
"payload": {"state": state, "action_msg": action_msg},
"request_id": request_id,
}
if event_type == "ASSISTANT_MESSAGE":
if content or media:
return {
"type": "ASSISTANT_MESSAGE",
"channel": "dashboard",
"text": content,
"media": media,
"usage": usage,
"request_id": request_id,
"provider": provider,
"model": model,
}
return None
if event_type == "BUS_EVENT" or is_progress:
return {
"type": "BUS_EVENT",
"channel": "dashboard",
"content": content,
"media": media,
"is_progress": is_progress,
"is_tool": is_tool,
"usage": usage,
"request_id": request_id,
"provider": provider,
"model": model,
}
if content or media:
return {
"type": "ASSISTANT_MESSAGE",
"channel": "dashboard",
"text": content,
"media": media,
"usage": usage,
"request_id": request_id,
"provider": provider,
"model": model,
}
except Exception:
return None
return None
@classmethod
def _parse_log_line(cls, line: str) -> Optional[Dict[str, Any]]:
if "__DASHBOARD_DATA_START__" in line:
packet = cls._parse_monitor_packet_json(line)
if packet:
return packet
process_match = re.search(r"Processing message from ([\w\-]+):[^:]+:\s*(.+)$", line)
if process_match:
channel = process_match.group(1).strip().lower()
action_msg = process_match.group(2).strip()
return {
"type": "AGENT_STATE",
"channel": channel,
"payload": {
"state": "THINKING",
"action_msg": action_msg[:4000],
},
}
response_match = re.search(r"Response to ([\w\-]+):[^:]+:\s*(.+)$", line)
if response_match:
channel = response_match.group(1).strip().lower()
action_msg = response_match.group(2).strip()
return {
"type": "AGENT_STATE",
"channel": channel,
"payload": {
"state": "SUCCESS",
"action_msg": action_msg[:4000],
},
}
tool_call_match = re.search(r"tool call:\s*(.+)$", line, re.IGNORECASE)
if tool_call_match:
return {
"type": "AGENT_STATE",
"payload": {
"state": "TOOL_CALL",
"action_msg": tool_call_match.group(1).strip()[:4000],
},
}
lower = line.lower()
if "error" in lower or "traceback" in lower:
return {
"type": "AGENT_STATE",
"payload": {"state": "ERROR", "action_msg": "执行异常,请检查日志"},
}
return None

View File

@ -0,0 +1 @@
# Schema package for dashboard-edge.

View File

@ -0,0 +1,116 @@
from typing import Any, Dict, List, Optional
from pydantic import BaseModel, Field
NODE_PROTOCOL_VERSION = "1"
class EdgeNodeIdentityBase(BaseModel):
protocol_version: str = NODE_PROTOCOL_VERSION
node_id: str
display_name: str
service: str = "dashboard-edge"
transport_kind: str = "edge"
runtime_kind: str = "docker"
core_adapter: str = "nanobot"
class EdgeStatusResponse(BaseModel):
status: str
class EdgeStateWriteRequest(BaseModel):
data: Dict[str, Any] = Field(default_factory=dict)
workspace_root: Optional[str] = None
class EdgeStateResponse(BaseModel):
bot_id: str
state_key: str
data: Dict[str, Any] = Field(default_factory=dict)
class EdgeNativePreflightRequest(BaseModel):
native_command: Optional[str] = None
native_workdir: Optional[str] = None
class EdgeNativePreflightResponse(BaseModel):
ok: bool = False
command: List[str] = Field(default_factory=list)
workdir: str = ""
command_available: bool = False
workdir_exists: bool = False
detail: str = ""
class EdgeCommandRequest(BaseModel):
command: str
media: List[str] = Field(default_factory=list)
class EdgeLogsResponse(BaseModel):
bot_id: str
logs: List[str] = Field(default_factory=list)
class EdgeMonitorEnsureResponse(BaseModel):
ensured: bool = False
class EdgeMonitorPacket(BaseModel):
protocol_version: str = NODE_PROTOCOL_VERSION
node_id: str = ""
bot_id: str = ""
seq: int = 0
captured_at: str = ""
packet: Dict[str, Any] = Field(default_factory=dict)
class EdgeMonitorPacketsResponse(BaseModel):
protocol_version: str = NODE_PROTOCOL_VERSION
node_id: str = ""
bot_id: str
latest_seq: int = 0
packets: List[EdgeMonitorPacket] = Field(default_factory=list)
class EdgeWorkspaceSyncRequest(BaseModel):
channels_override: Optional[List[Dict[str, Any]]] = None
global_delivery_override: Optional[Dict[str, Any]] = None
runtime_overrides: Optional[Dict[str, Any]] = None
class EdgeMarkdownWriteRequest(BaseModel):
content: str = ""
class EdgeNodeSelfResponse(BaseModel):
protocol_version: str = NODE_PROTOCOL_VERSION
node_id: str
display_name: str
service: str = "dashboard-edge"
transport_kind: str = "edge"
runtime_kind: str = "docker"
core_adapter: str = "nanobot"
capabilities: Dict[str, Any] = Field(default_factory=dict)
resources: Dict[str, Any] = Field(default_factory=dict)
reported_at: str = ""
class EdgeNodeResourcesResponse(BaseModel):
protocol_version: str = NODE_PROTOCOL_VERSION
node_id: str
display_name: str = ""
service: str = "dashboard-edge"
transport_kind: str = "edge"
runtime_kind: str = "docker"
core_adapter: str = "nanobot"
resources: Dict[str, Any] = Field(default_factory=dict)
reported_at: str = ""
class EdgeNodeHeartbeatResponse(EdgeNodeIdentityBase):
capabilities: Dict[str, Any] = Field(default_factory=dict)
resources: Dict[str, Any] = Field(default_factory=dict)
reported_at: str = ""

View File

@ -0,0 +1,15 @@
from typing import Dict, Optional
from pydantic import BaseModel, Field
class EdgeStartBotRequest(BaseModel):
image_tag: str
runtime_kind: str = "docker"
env_vars: Dict[str, str] = Field(default_factory=dict)
workspace_root: Optional[str] = None
native_command: Optional[str] = None
native_workdir: Optional[str] = None
cpu_cores: float = 1.0
memory_mb: int = 1024
storage_gb: int = 10

View File

@ -0,0 +1 @@
# Service package for dashboard-edge.

View File

@ -0,0 +1,279 @@
import json
import os
import hashlib
from typing import Any, Dict, List, Optional
DEFAULT_SOUL_MD = "# Soul\n"
DEFAULT_AGENTS_MD = "# Agent Instructions\n"
DEFAULT_USER_MD = "# User Preferences\n"
DEFAULT_TOOLS_MD = "# Tools\n"
DEFAULT_IDENTITY_MD = "# Identity\n"
class EdgeProvisionService:
def __init__(self, *, host_data_root: str) -> None:
self._host_data_root = host_data_root
def sync_bot_workspace(
self,
*,
bot_id: str,
payload: Any,
) -> Dict[str, Any]:
runtime = dict(getattr(payload, "runtime_overrides", None) or {})
workspace_root_override = self._workspace_root_override(runtime)
workspace_bot_dir = self._bot_workspace_dir(bot_id, workspace_root_override)
state_nanobot_dir = os.path.join(workspace_bot_dir, ".nanobot")
workspace_dir = os.path.join(workspace_bot_dir, ".nanobot", "workspace")
memory_dir = os.path.join(workspace_dir, "memory")
skills_dir = os.path.join(workspace_dir, "skills")
for path in [state_nanobot_dir, workspace_dir, memory_dir, skills_dir]:
os.makedirs(path, exist_ok=True)
channels_override = list(getattr(payload, "channels_override", None) or [])
global_delivery_override = dict(getattr(payload, "global_delivery_override", None) or {})
raw_provider_name = str(runtime.get("llm_provider") or "openrouter").strip().lower()
provider_name = {
"aliyun": "dashscope",
"qwen": "dashscope",
"aliyun-qwen": "dashscope",
"moonshot": "kimi",
"vllm": "openai",
"xunfei": "openai",
"iflytek": "openai",
"xfyun": "openai",
}.get(raw_provider_name, raw_provider_name)
model_name = str(runtime.get("llm_model") or "openai/gpt-4o-mini").strip()
if provider_name == "openai" and raw_provider_name in {"xunfei", "iflytek", "xfyun"} and model_name and "/" not in model_name:
model_name = f"openai/{model_name}"
provider_cfg: Dict[str, Any] = {"apiKey": str(runtime.get("api_key") or "").strip()}
api_base = str(runtime.get("api_base") or "").strip()
if api_base:
provider_cfg["apiBase"] = api_base
channels_cfg: Dict[str, Any] = {
"sendProgress": bool(global_delivery_override.get("sendProgress", runtime.get("send_progress", False))),
"sendToolHints": bool(global_delivery_override.get("sendToolHints", runtime.get("send_tool_hints", False))),
}
existing_config: Dict[str, Any] = {}
config_path = os.path.join(state_nanobot_dir, "config.json")
if os.path.isfile(config_path):
try:
with open(config_path, "r", encoding="utf-8") as fh:
loaded = json.load(fh)
if isinstance(loaded, dict):
existing_config = loaded
except Exception:
existing_config = {}
existing_tools = existing_config.get("tools")
tools_cfg: Dict[str, Any] = dict(existing_tools) if isinstance(existing_tools, dict) else {}
native_sandbox_mode = self._normalize_native_sandbox_mode(runtime.get("native_sandbox_mode"))
if native_sandbox_mode == "workspace":
tools_cfg["restrictToWorkspace"] = True
elif native_sandbox_mode == "full_access":
tools_cfg["restrictToWorkspace"] = False
existing_channels = existing_config.get("channels")
existing_dashboard_cfg = (
existing_channels.get("dashboard")
if isinstance(existing_channels, dict) and isinstance(existing_channels.get("dashboard"), dict)
else {}
)
dashboard_cfg: Dict[str, Any] = {
"enabled": True,
"host": "0.0.0.0",
"port": self._dashboard_port_for_bot(bot_id),
"allowFrom": ["*"],
}
for key in ("host", "port", "allowFrom"):
if key in existing_dashboard_cfg:
dashboard_cfg[key] = existing_dashboard_cfg[key]
dashboard_cfg["port"] = self._dashboard_port_for_bot(bot_id)
channels_cfg["dashboard"] = dashboard_cfg
for channel in channels_override:
channel_type = str(channel.get("channel_type") or "").strip().lower()
if not channel_type or channel_type == "dashboard":
continue
extra = channel.get("extra_config") if isinstance(channel.get("extra_config"), dict) else {}
enabled = bool(channel.get("is_active", True))
external = str(channel.get("external_app_id") or "")
secret = str(channel.get("app_secret") or "")
if channel_type == "telegram":
channels_cfg["telegram"] = {
"enabled": enabled,
"token": secret,
"proxy": extra.get("proxy", ""),
"replyToMessage": bool(extra.get("replyToMessage", False)),
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
}
continue
if channel_type == "feishu":
channels_cfg["feishu"] = {
"enabled": enabled,
"appId": external,
"appSecret": secret,
"encryptKey": extra.get("encryptKey", ""),
"verificationToken": extra.get("verificationToken", ""),
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
}
continue
if channel_type == "dingtalk":
channels_cfg["dingtalk"] = {
"enabled": enabled,
"clientId": external,
"clientSecret": secret,
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
}
continue
if channel_type == "slack":
channels_cfg["slack"] = {
"enabled": enabled,
"mode": extra.get("mode", "socket"),
"botToken": external,
"appToken": secret,
"replyInThread": bool(extra.get("replyInThread", True)),
"groupPolicy": extra.get("groupPolicy", "mention"),
"groupAllowFrom": extra.get("groupAllowFrom", []),
"reactEmoji": extra.get("reactEmoji", "eyes"),
}
continue
if channel_type == "qq":
channels_cfg["qq"] = {
"enabled": enabled,
"appId": external,
"secret": secret,
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
}
continue
if channel_type == "email":
channels_cfg["email"] = {
"enabled": enabled,
"consentGranted": bool(extra.get("consentGranted", False)),
"imapHost": extra.get("imapHost", ""),
"imapPort": max(1, min(int(extra.get("imapPort", 993) or 993), 65535)),
"imapUsername": extra.get("imapUsername", ""),
"imapPassword": extra.get("imapPassword", ""),
"imapMailbox": extra.get("imapMailbox", "INBOX"),
"imapUseSsl": bool(extra.get("imapUseSsl", True)),
"smtpHost": extra.get("smtpHost", ""),
"smtpPort": max(1, min(int(extra.get("smtpPort", 587) or 587), 65535)),
"smtpUsername": extra.get("smtpUsername", ""),
"smtpPassword": extra.get("smtpPassword", ""),
"smtpUseTls": bool(extra.get("smtpUseTls", True)),
"smtpUseSsl": bool(extra.get("smtpUseSsl", False)),
"fromAddress": extra.get("fromAddress", ""),
"autoReplyEnabled": bool(extra.get("autoReplyEnabled", True)),
"pollIntervalSeconds": max(5, int(extra.get("pollIntervalSeconds", 30) or 30)),
"markSeen": bool(extra.get("markSeen", True)),
"maxBodyChars": max(1, int(extra.get("maxBodyChars", 12000) or 12000)),
"subjectPrefix": extra.get("subjectPrefix", "Re: "),
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
}
continue
channels_cfg[channel_type] = {
"enabled": enabled,
"appId": external,
"appSecret": secret,
**extra,
}
config_data: Dict[str, Any] = {
"agents": {
"defaults": {
"workspace": workspace_dir,
"model": model_name,
"temperature": float(runtime.get("temperature") or 0.2),
"topP": float(runtime.get("top_p") or 1.0),
"maxTokens": int(runtime.get("max_tokens") or 8192),
}
},
"providers": {provider_name: provider_cfg},
"channels": channels_cfg,
}
if tools_cfg:
config_data["tools"] = tools_cfg
self._write_json(config_path, config_data)
runtime_target = {
"runtime_kind": str(runtime.get("runtime_kind") or "").strip().lower(),
"transport_kind": str(runtime.get("transport_kind") or "").strip().lower(),
"core_adapter": str(runtime.get("core_adapter") or "").strip().lower(),
}
if native_sandbox_mode != "inherit":
runtime_target["native_sandbox_mode"] = native_sandbox_mode
if workspace_root_override:
runtime_target["workspace_root"] = workspace_root_override
if any(runtime_target.values()):
runtime_target_path = os.path.join(state_nanobot_dir, "runtime-target.json")
self._write_json(runtime_target_path, runtime_target)
bootstrap_files = {
"AGENTS.md": str(runtime.get("agents_md") or DEFAULT_AGENTS_MD).strip() + "\n",
"SOUL.md": str(runtime.get("soul_md") or runtime.get("system_prompt") or DEFAULT_SOUL_MD).strip() + "\n",
"USER.md": str(runtime.get("user_md") or DEFAULT_USER_MD).strip() + "\n",
"TOOLS.md": str(runtime.get("tools_md") or DEFAULT_TOOLS_MD).strip() + "\n",
"IDENTITY.md": str(runtime.get("identity_md") or DEFAULT_IDENTITY_MD).strip() + "\n",
}
for filename, content in bootstrap_files.items():
file_path = os.path.join(workspace_dir, filename)
with open(file_path, "w", encoding="utf-8") as fh:
fh.write(content)
return {"status": "ok"}
@staticmethod
def _normalize_allow_from(raw: Any) -> List[str]:
rows: List[str] = []
if isinstance(raw, list):
for item in raw:
text = str(item or "").strip()
if text and text not in rows:
rows.append(text)
if not rows:
return ["*"]
return rows
@staticmethod
def _dashboard_port_for_bot(bot_id: str) -> int:
digest = hashlib.sha1(str(bot_id or "").strip().encode("utf-8")).hexdigest()
return 19000 + (int(digest[:6], 16) % 2000)
@staticmethod
def _workspace_root_override(runtime_overrides: Dict[str, Any]) -> str:
raw = str(runtime_overrides.get("workspace_root") or "").strip()
if not raw:
return ""
return os.path.abspath(os.path.expanduser(raw))
@staticmethod
def _normalize_native_sandbox_mode(raw_value: Any) -> str:
text = str(raw_value or "").strip().lower()
if text in {"workspace", "sandbox", "strict"}:
return "workspace"
if text in {"full_access", "full-access", "danger-full-access", "escape"}:
return "full_access"
return "inherit"
def _bot_workspace_dir(self, bot_id: str, workspace_root_override: str) -> str:
if not workspace_root_override:
return os.path.abspath(os.path.join(self._host_data_root, str(bot_id or "").strip()))
return os.path.abspath(os.path.join(workspace_root_override, str(bot_id or "").strip()))
@staticmethod
def _write_json(path: str, payload: Dict[str, Any]) -> None:
os.makedirs(os.path.dirname(path), exist_ok=True)
with open(path, "w", encoding="utf-8") as fh:
json.dump(payload, fh, ensure_ascii=False, indent=2)
edge_provision_service: EdgeProvisionService | None = None

View File

@ -0,0 +1,511 @@
import json
import os
import shlex
import shutil
import csv
from datetime import datetime, timezone
import psutil
from fastapi import HTTPException
from app.core.settings import EDGE_BOTS_WORKSPACE_ROOT, EDGE_NODE_ID, EDGE_NODE_NAME
from app.runtime.base import EdgeRuntimeBackend
from app.runtime.factory import build_edge_runtime_backends, preferred_edge_runtime_kind
from app.schemas.edge import (
EdgeCommandRequest,
EdgeLogsResponse,
EdgeMonitorEnsureResponse,
EdgeMonitorPacket,
EdgeMonitorPacketsResponse,
EdgeNodeHeartbeatResponse,
EdgeNodeResourcesResponse,
EdgeNodeSelfResponse,
EdgeStatusResponse,
NODE_PROTOCOL_VERSION,
)
from app.schemas.runtime import EdgeStartBotRequest
class EdgeRuntimeService:
def __init__(self) -> None:
self._runtime_backends: dict[str, EdgeRuntimeBackend] = {
str(kind).strip().lower(): backend
for kind, backend in build_edge_runtime_backends().items()
if isinstance(kind, str)
}
self._recent_packets: dict[str, list[dict]] = {}
self._packet_counters: dict[str, int] = {}
self._backfilled_bots: set[str] = set()
def _runtime_kind(self) -> str:
return preferred_edge_runtime_kind(self._runtime_backends)
def capabilities(self) -> dict:
caps: dict = {"protocol": {"version": NODE_PROTOCOL_VERSION}}
runtime_caps: dict[str, bool] = {}
workspace_caps: dict[str, bool] = {}
monitor_caps: dict[str, bool] = {}
process_caps: dict[str, object] = {}
for backend in self._runtime_backends.values():
current = dict(backend.capabilities() if hasattr(backend, "capabilities") else {})
for key, value in dict(current.get("runtime") or {}).items():
normalized = str(key or "").strip().lower()
if not normalized:
continue
runtime_caps[normalized] = bool(runtime_caps.get(normalized) or value is True)
for key, value in dict(current.get("workspace") or {}).items():
normalized = str(key or "").strip()
if not normalized:
continue
workspace_caps[normalized] = bool(workspace_caps.get(normalized) or value is True)
for key, value in dict(current.get("monitor") or {}).items():
normalized = str(key or "").strip()
if not normalized:
continue
monitor_caps[normalized] = bool(monitor_caps.get(normalized) or value is True)
for key, value in dict(current.get("process") or {}).items():
normalized = str(key or "").strip()
if normalized:
process_caps[normalized] = value
if runtime_caps:
caps["runtime"] = runtime_caps
if workspace_caps:
caps["workspace"] = workspace_caps
if monitor_caps:
caps["monitor"] = monitor_caps
if process_caps:
caps["process"] = process_caps
return caps
async def start_bot(self, *, bot_id: str, payload: EdgeStartBotRequest) -> EdgeStatusResponse:
runtime_kind = self._resolve_runtime_kind(bot_id, preferred=payload.runtime_kind)
backend = self._backend_for_bot(bot_id, preferred=runtime_kind)
self._write_runtime_target(
bot_id=bot_id,
runtime_kind=runtime_kind,
workspace_root=str(payload.workspace_root or "").strip() or None,
)
success = backend.start_bot(
bot_id=bot_id,
image_tag=str(payload.image_tag or "").strip(),
env_vars=dict(payload.env_vars or {}),
workspace_root=str(payload.workspace_root or "").strip() or None,
native_command=str(payload.native_command or "").strip() or None,
native_workdir=str(payload.native_workdir or "").strip() or None,
cpu_cores=float(payload.cpu_cores),
memory_mb=int(payload.memory_mb),
storage_gb=int(payload.storage_gb),
on_state_change=self._record_monitor_packet,
)
if not success:
detail = backend.get_last_delivery_error(bot_id) or f"Failed to start bot {bot_id} on dashboard-edge"
raise HTTPException(status_code=500, detail=detail)
return EdgeStatusResponse(status="started")
def stop_bot(self, *, bot_id: str) -> EdgeStatusResponse:
resolved_kind = self._resolve_runtime_kind(bot_id)
ordered_kinds: list[str] = []
if resolved_kind:
ordered_kinds.append(resolved_kind)
for kind in self._runtime_backends.keys():
if kind not in ordered_kinds:
ordered_kinds.append(kind)
for kind in ordered_kinds:
backend = self._runtime_backends.get(kind)
if backend is None:
continue
try:
backend.stop_bot(bot_id)
except Exception:
continue
return EdgeStatusResponse(status="stopped")
def send_command(self, *, bot_id: str, payload: EdgeCommandRequest) -> EdgeStatusResponse:
backend = self._backend_for_bot(bot_id)
ok = backend.send_command(bot_id, payload.command, media=list(payload.media or []))
if not ok:
detail = backend.get_last_delivery_error(bot_id) or "command delivery failed"
raise HTTPException(status_code=502, detail=detail)
return EdgeStatusResponse(status="ok")
def ensure_monitor(self, *, bot_id: str) -> EdgeMonitorEnsureResponse:
backend = self._backend_for_bot(bot_id)
ensured = backend.ensure_monitor(bot_id, self._record_monitor_packet)
return EdgeMonitorEnsureResponse(ensured=bool(ensured))
def get_recent_logs(self, *, bot_id: str, tail: int) -> EdgeLogsResponse:
backend = self._backend_for_bot(bot_id)
return EdgeLogsResponse(bot_id=bot_id, logs=backend.get_recent_logs(bot_id, tail=tail))
def get_monitor_packets(self, *, bot_id: str, after_seq: int = 0, limit: int = 200) -> EdgeMonitorPacketsResponse:
self._backfill_monitor_packets(bot_id=bot_id)
rows = [
dict(row)
for row in self._recent_packets.get(bot_id, [])
if int(row.get("seq") or 0) > max(0, int(after_seq or 0))
]
rows.sort(key=lambda row: int(row.get("seq") or 0))
if limit > 0:
rows = rows[: int(limit)]
latest_seq = int(self._packet_counters.get(bot_id, 0) or 0)
return EdgeMonitorPacketsResponse(
protocol_version=NODE_PROTOCOL_VERSION,
node_id=EDGE_NODE_ID,
bot_id=bot_id,
latest_seq=latest_seq,
packets=[
EdgeMonitorPacket.model_validate(
{
"protocol_version": NODE_PROTOCOL_VERSION,
"node_id": EDGE_NODE_ID,
"bot_id": bot_id,
**row,
}
)
for row in rows
],
)
def get_runtime_status(self, *, bot_id: str) -> EdgeStatusResponse:
backend = self._backend_for_bot(bot_id)
return EdgeStatusResponse(status=backend.get_bot_status(bot_id))
def get_resource_snapshot(self, *, bot_id: str) -> dict:
backend = self._backend_for_bot(bot_id)
snapshot = dict(backend.get_bot_resource_snapshot(bot_id) or {})
snapshot.setdefault("runtime_kind", self._resolve_runtime_kind(bot_id))
return snapshot
def get_node_identity(self) -> EdgeNodeSelfResponse:
resources = self.get_node_resource_summary()
return EdgeNodeSelfResponse(
protocol_version=resources.protocol_version,
node_id=EDGE_NODE_ID,
display_name=EDGE_NODE_NAME,
service="dashboard-edge",
transport_kind="edge",
runtime_kind=self._runtime_kind(),
core_adapter="nanobot",
capabilities=self.capabilities(),
resources=dict(resources.resources or {}),
reported_at=resources.reported_at,
)
def get_node_resource_summary(self) -> EdgeNodeResourcesResponse:
cpu_percent = 0.0
try:
cpu_percent = float(psutil.cpu_percent(interval=None) or 0.0)
except Exception:
cpu_percent = 0.0
memory_total = 0
memory_used = 0
try:
memory = psutil.virtual_memory()
memory_total = int(getattr(memory, "total", 0) or 0)
memory_used = int(getattr(memory, "used", 0) or 0)
except Exception:
memory_total = 0
memory_used = 0
workspace_limit = 0
workspace_used = 0
try:
disk = psutil.disk_usage(EDGE_BOTS_WORKSPACE_ROOT)
workspace_limit = int(getattr(disk, "total", 0) or 0)
workspace_used = int(getattr(disk, "used", 0) or 0)
except Exception:
workspace_limit = 0
workspace_used = self._calc_workspace_used_bytes()
cpu_cores = 0.0
try:
cpu_cores = float(psutil.cpu_count(logical=True) or 0)
except Exception:
cpu_cores = 0.0
return EdgeNodeResourcesResponse(
protocol_version=NODE_PROTOCOL_VERSION,
node_id=EDGE_NODE_ID,
display_name=EDGE_NODE_NAME,
transport_kind="edge",
runtime_kind=self._runtime_kind(),
core_adapter="nanobot",
resources={
"configured_cpu_cores": round(cpu_cores, 2),
"configured_memory_bytes": memory_total,
"configured_storage_bytes": workspace_limit,
"live_cpu_percent": round(cpu_percent, 2),
"live_memory_used_bytes": memory_used,
"live_memory_limit_bytes": memory_total,
"workspace_used_bytes": workspace_used,
"workspace_limit_bytes": workspace_limit,
},
reported_at=datetime.now(timezone.utc).isoformat().replace("+00:00", "Z"),
)
def heartbeat(self) -> EdgeNodeHeartbeatResponse:
node_resources = self.get_node_resource_summary()
return EdgeNodeHeartbeatResponse(
protocol_version=NODE_PROTOCOL_VERSION,
node_id=EDGE_NODE_ID,
display_name=EDGE_NODE_NAME,
service="dashboard-edge",
transport_kind="edge",
runtime_kind=self._runtime_kind(),
core_adapter="nanobot",
capabilities=self.capabilities(),
resources=dict(node_resources.resources or {}),
reported_at=datetime.now(timezone.utc).isoformat().replace("+00:00", "Z"),
)
def native_preflight(self, *, native_command: str | None = None, native_workdir: str | None = None) -> dict:
raw_command = str(native_command or "").strip()
command_parts: list[str] = []
parse_error = ""
if raw_command:
command_parts, parse_error = self._parse_native_command(raw_command)
if not raw_command and not command_parts:
backend = self._runtime_backends.get("native")
process_caps = {}
if backend is not None:
process_caps = dict((backend.capabilities() or {}).get("process") or {})
command_parts = [str(item or "").strip() for item in list(process_caps.get("command") or []) if str(item or "").strip()]
command_available = bool(command_parts and shutil.which(command_parts[0]))
configured_workdir = str(native_workdir or "").strip()
if configured_workdir:
workdir = os.path.abspath(configured_workdir)
workdir_exists = os.path.isdir(workdir)
else:
workdir = ""
workdir_exists = True
ok = bool(command_available and workdir_exists)
detail_parts: list[str] = []
if not command_available:
detail_parts.append("native command not available")
if not workdir_exists:
detail_parts.append("native workdir does not exist")
if parse_error:
detail_parts.append(parse_error)
if not detail_parts:
detail_parts.append("native launcher ready")
return {
"ok": ok,
"command": command_parts,
"workdir": workdir,
"command_available": command_available,
"workdir_exists": workdir_exists,
"detail": "; ".join(detail_parts),
}
@staticmethod
def _parse_native_command(raw_command: str) -> tuple[list[str], str]:
text = str(raw_command or "").strip()
if not text:
return [], ""
if text.startswith("[") and text.endswith("]"):
try:
payload = json.loads(text)
if isinstance(payload, list):
rows = [str(item or "").strip() for item in payload if str(item or "").strip()]
if rows:
return rows, ""
return [], "native command JSON list is empty"
except Exception:
return [], "native command JSON is invalid"
if "," in text and any(mark in text for mark in ['"', "'"]):
try:
rows = [str(item or "").strip() for item in next(csv.reader([text], skipinitialspace=True)) if str(item or "").strip()]
if rows:
return rows, ""
except Exception:
pass
try:
rows = [str(item or "").strip() for item in shlex.split(text) if str(item or "").strip()]
if rows:
return rows, ""
return [], "native command is empty"
except Exception:
return [], "native command format is invalid"
def _record_monitor_packet(self, bot_id: str, packet: dict) -> None:
rows = self._recent_packets.setdefault(bot_id, [])
next_seq = int(self._packet_counters.get(bot_id, 0) or 0) + 1
self._packet_counters[bot_id] = next_seq
captured_at = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
rows.append(
{
"protocol_version": NODE_PROTOCOL_VERSION,
"node_id": EDGE_NODE_ID,
"bot_id": bot_id,
"seq": next_seq,
"captured_at": captured_at,
"packet": dict(packet or {}),
}
)
if len(rows) > 200:
del rows[:-200]
def _backfill_monitor_packets(self, bot_id: str) -> None:
if bot_id in self._backfilled_bots:
return
self._backfilled_bots.add(bot_id)
backend = self._backend_for_bot(bot_id)
for line in backend.get_recent_logs(bot_id, tail=500):
packet = backend.parse_monitor_packet(line)
if packet:
self._record_monitor_packet(bot_id, packet)
def _backend_for_bot(self, bot_id: str, preferred: str | None = None) -> EdgeRuntimeBackend:
runtime_kind = self._resolve_runtime_kind(bot_id, preferred=preferred)
backend = self._runtime_backends.get(runtime_kind)
if backend is None:
raise HTTPException(status_code=501, detail=f"dashboard-edge runtime is not available: {runtime_kind}")
return backend
def _resolve_runtime_kind(self, bot_id: str, preferred: str | None = None) -> str:
normalized_preferred = self._normalize_runtime_kind(preferred, allow_empty=True)
if normalized_preferred and normalized_preferred in self._runtime_backends:
return normalized_preferred
persisted = self._normalize_runtime_kind(self._read_runtime_target(bot_id), allow_empty=True)
if persisted and persisted in self._runtime_backends:
return persisted
for runtime_kind, backend in self._runtime_backends.items():
try:
if str(backend.get_bot_status(bot_id) or "").strip().upper() == "RUNNING":
return runtime_kind
except Exception:
continue
return self._runtime_kind()
@staticmethod
def _normalize_runtime_kind(value: str | None, *, allow_empty: bool = False) -> str:
text = str(value or "").strip().lower()
if allow_empty and not text:
return ""
return text if text in {"docker", "native"} else "docker"
@staticmethod
def _runtime_target_path(bot_id: str) -> str:
return os.path.join(EDGE_BOTS_WORKSPACE_ROOT, str(bot_id or "").strip(), ".nanobot", "runtime-target.json")
@staticmethod
def _config_path(bot_id: str) -> str:
return os.path.join(EDGE_BOTS_WORKSPACE_ROOT, str(bot_id or "").strip(), ".nanobot", "config.json")
def _read_runtime_target(self, bot_id: str) -> str:
payload = self._read_runtime_target_payload(bot_id)
if isinstance(payload, dict):
return str(payload.get("runtime_kind") or "").strip().lower()
return ""
def _read_runtime_target_payload(self, bot_id: str) -> dict:
for path in self._runtime_target_paths_for_read(bot_id):
if not os.path.isfile(path):
continue
try:
with open(path, "r", encoding="utf-8") as fh:
payload = json.load(fh)
if isinstance(payload, dict):
return payload
except Exception:
continue
return {}
def _write_runtime_target(self, *, bot_id: str, runtime_kind: str, workspace_root: str | None = None) -> None:
payload = dict(self._read_runtime_target_payload(bot_id))
payload["runtime_kind"] = self._normalize_runtime_kind(runtime_kind)
if workspace_root is not None:
normalized_root = str(workspace_root or "").strip()
if normalized_root:
payload["workspace_root"] = os.path.abspath(os.path.expanduser(normalized_root))
else:
payload.pop("workspace_root", None)
paths = self._runtime_target_paths(bot_id=bot_id, payload=payload)
for path in paths:
os.makedirs(os.path.dirname(path), exist_ok=True)
with open(path, "w", encoding="utf-8") as fh:
json.dump(payload, fh, ensure_ascii=False, indent=2)
primary = self._runtime_target_path(bot_id)
if primary not in paths and os.path.isfile(primary):
try:
os.remove(primary)
except Exception:
pass
def _runtime_target_paths(self, *, bot_id: str, payload: dict) -> list[str]:
primary = self._runtime_target_path(bot_id)
workspace_root = str(payload.get("workspace_root") or "").strip()
if workspace_root:
external = os.path.join(
os.path.abspath(os.path.expanduser(workspace_root)),
str(bot_id or "").strip(),
".nanobot",
"runtime-target.json",
)
if os.path.abspath(external) != os.path.abspath(primary):
return [external]
return [primary]
def _runtime_target_paths_for_read(self, bot_id: str) -> list[str]:
primary = self._runtime_target_path(bot_id)
rows: list[str] = [primary]
workspace_root = self._workspace_root_from_config(bot_id)
if workspace_root:
external = os.path.join(
workspace_root,
str(bot_id or "").strip(),
".nanobot",
"runtime-target.json",
)
if os.path.abspath(external) != os.path.abspath(primary):
rows.insert(0, external)
return rows
def _workspace_root_from_config(self, bot_id: str) -> str:
path = self._config_path(bot_id)
if not os.path.isfile(path):
return ""
try:
with open(path, "r", encoding="utf-8") as fh:
payload = json.load(fh)
if not isinstance(payload, dict):
return ""
agents = payload.get("agents")
if not isinstance(agents, dict):
return ""
defaults = agents.get("defaults")
if not isinstance(defaults, dict):
return ""
workspace = str(defaults.get("workspace") or "").strip()
if not workspace:
return ""
normalized_workspace = os.path.abspath(os.path.expanduser(workspace))
marker = f"{os.sep}{str(bot_id or '').strip()}{os.sep}.nanobot{os.sep}workspace"
if marker in normalized_workspace:
return normalized_workspace.rsplit(marker, 1)[0]
except Exception:
return ""
return ""
@staticmethod
def _calc_workspace_used_bytes() -> int:
total = 0
for root, _, files in os.walk(EDGE_BOTS_WORKSPACE_ROOT):
for filename in files:
path = os.path.join(root, filename)
try:
total += int(os.path.getsize(path))
except Exception:
continue
return total
edge_runtime_service = EdgeRuntimeService()

View File

@ -0,0 +1,133 @@
import json
import os
from typing import Any, Dict, Optional
from fastapi import HTTPException
class EdgeStateStoreService:
_STATE_FILE_MAP = {
"config": ("config.json",),
"env": ("env.json",),
"resources": ("resources.json",),
"cron": ("cron", "jobs.json"),
}
def __init__(self, *, host_data_root: str) -> None:
self._host_data_root = os.path.abspath(os.path.expanduser(str(host_data_root or "").strip()))
def read_state(self, *, bot_id: str, state_key: str, workspace_root: Optional[str] = None) -> Dict[str, Any]:
normalized_bot_id = self._normalize_bot_id(bot_id)
normalized_key = self._normalize_state_key(state_key)
path = self._state_file_path(normalized_bot_id, normalized_key, workspace_root=workspace_root)
payload = self._default_payload(normalized_key)
if os.path.isfile(path):
loaded = self._read_json(path)
if isinstance(loaded, dict):
payload = self._normalize_state_payload(normalized_key, loaded)
return {"bot_id": normalized_bot_id, "state_key": normalized_key, "data": payload}
def write_state(
self,
*,
bot_id: str,
state_key: str,
data: Dict[str, Any],
workspace_root: Optional[str] = None,
) -> Dict[str, Any]:
normalized_bot_id = self._normalize_bot_id(bot_id)
normalized_key = self._normalize_state_key(state_key)
payload = self._normalize_state_payload(normalized_key, data if isinstance(data, dict) else {})
path = self._state_file_path(normalized_bot_id, normalized_key, workspace_root=workspace_root)
self._write_json_atomic(path, payload)
return {"bot_id": normalized_bot_id, "state_key": normalized_key, "data": payload}
def _state_file_path(self, bot_id: str, state_key: str, *, workspace_root: Optional[str] = None) -> str:
nanobot_root = self._nanobot_root(bot_id, workspace_root=workspace_root)
relative = self._STATE_FILE_MAP[state_key]
return os.path.join(nanobot_root, *relative)
def _nanobot_root(self, bot_id: str, *, workspace_root: Optional[str] = None) -> str:
configured_workspace_root = str(workspace_root or "").strip()
if configured_workspace_root:
normalized_root = os.path.abspath(os.path.expanduser(configured_workspace_root))
return os.path.abspath(os.path.join(normalized_root, bot_id, ".nanobot"))
primary = os.path.abspath(os.path.join(self._host_data_root, bot_id, ".nanobot"))
inferred_workspace_root = self._workspace_root_from_runtime_target(primary)
if inferred_workspace_root:
return os.path.abspath(os.path.join(inferred_workspace_root, bot_id, ".nanobot"))
return primary
@staticmethod
def _workspace_root_from_runtime_target(primary_nanobot_root: str) -> str:
path = os.path.join(primary_nanobot_root, "runtime-target.json")
if not os.path.isfile(path):
return ""
try:
with open(path, "r", encoding="utf-8") as fh:
payload = json.load(fh)
if not isinstance(payload, dict):
return ""
raw_root = str(payload.get("workspace_root") or "").strip()
if not raw_root:
return ""
return os.path.abspath(os.path.expanduser(raw_root))
except Exception:
return ""
@classmethod
def _normalize_state_key(cls, state_key: str) -> str:
normalized = str(state_key or "").strip().lower()
if normalized not in cls._STATE_FILE_MAP:
raise HTTPException(status_code=400, detail=f"unsupported state key: {state_key}")
return normalized
@staticmethod
def _normalize_bot_id(bot_id: str) -> str:
normalized = str(bot_id or "").strip()
if not normalized:
raise HTTPException(status_code=400, detail="bot_id is required")
return normalized
@staticmethod
def _default_payload(state_key: str) -> Dict[str, Any]:
if state_key == "cron":
return {"version": 1, "jobs": []}
return {}
@classmethod
def _normalize_state_payload(cls, state_key: str, payload: Dict[str, Any]) -> Dict[str, Any]:
if state_key == "cron":
normalized = dict(payload if isinstance(payload, dict) else {})
jobs = normalized.get("jobs")
if not isinstance(jobs, list):
jobs = []
try:
version = int(normalized.get("version", 1) or 1)
except Exception:
version = 1
return {"version": max(1, version), "jobs": jobs}
return dict(payload if isinstance(payload, dict) else {})
@staticmethod
def _read_json(path: str) -> Dict[str, Any]:
try:
with open(path, "r", encoding="utf-8") as fh:
payload = json.load(fh)
if isinstance(payload, dict):
return payload
except Exception:
return {}
return {}
@staticmethod
def _write_json_atomic(path: str, payload: Dict[str, Any]) -> None:
os.makedirs(os.path.dirname(path), exist_ok=True)
tmp = f"{path}.tmp"
with open(tmp, "w", encoding="utf-8") as fh:
json.dump(payload, fh, ensure_ascii=False, indent=2)
os.replace(tmp, path)
edge_state_store_service: EdgeStateStoreService | None = None

View File

@ -0,0 +1,441 @@
import mimetypes
import json
import os
import re
from datetime import datetime
from typing import Any, Dict, Generator, List, Optional
from fastapi import HTTPException, Request, UploadFile
from fastapi.responses import FileResponse, Response, StreamingResponse
from app.core.settings import EDGE_ALLOWED_ATTACHMENT_EXTENSIONS, EDGE_UPLOAD_MAX_MB
class EdgeWorkspaceService:
def __init__(self, *, host_data_root: str) -> None:
self._host_data_root = host_data_root
def list_tree(
self,
*,
bot_id: str,
path: Optional[str] = None,
recursive: bool = False,
workspace_root: Optional[str] = None,
) -> Dict[str, Any]:
root = self._workspace_root(bot_id, workspace_root=workspace_root)
if not os.path.isdir(root):
return {"bot_id": bot_id, "root": root, "cwd": "", "parent": None, "entries": []}
_, target = self._resolve_workspace_path(bot_id, path, workspace_root=workspace_root)
if not os.path.isdir(target):
raise HTTPException(status_code=400, detail="workspace path is not a directory")
cwd = os.path.relpath(target, root).replace("\\", "/")
if cwd == ".":
cwd = ""
parent = None
if cwd:
parent = os.path.dirname(cwd).replace("\\", "/")
if parent == ".":
parent = ""
return {
"bot_id": bot_id,
"root": root,
"cwd": cwd,
"parent": parent,
"entries": self._list_workspace_dir_recursive(target, root) if recursive else self._list_workspace_dir(target, root),
}
def read_file(
self,
*,
bot_id: str,
path: str,
max_bytes: int = 200000,
workspace_root: Optional[str] = None,
) -> Dict[str, Any]:
root, target = self._resolve_workspace_path(bot_id, path, workspace_root=workspace_root)
if not os.path.isfile(target):
raise HTTPException(status_code=404, detail="workspace file not found")
safe_max = max(4096, min(int(max_bytes), 1000000))
with open(target, "rb") as fh:
raw = fh.read(safe_max + 1)
if b"\x00" in raw:
raise HTTPException(status_code=400, detail="binary file is not previewable")
truncated = len(raw) > safe_max
body = raw[:safe_max] if truncated else raw
rel_path = os.path.relpath(target, root).replace("\\", "/")
ext = os.path.splitext(target)[1].lower()
return {
"bot_id": bot_id,
"path": rel_path,
"size": os.path.getsize(target),
"is_markdown": ext in {".md", ".markdown"},
"truncated": truncated,
"content": body.decode("utf-8", errors="replace"),
}
def write_markdown(
self,
*,
bot_id: str,
path: str,
content: str,
workspace_root: Optional[str] = None,
) -> Dict[str, Any]:
root, target = self._resolve_workspace_path(bot_id, path, workspace_root=workspace_root)
if not os.path.isfile(target):
raise HTTPException(status_code=404, detail="workspace file not found")
ext = os.path.splitext(target)[1].lower()
if ext not in {".md", ".markdown"}:
raise HTTPException(status_code=400, detail=f"editing is only supported for markdown files: {ext or '(none)'}")
encoded = str(content or "").encode("utf-8")
if len(encoded) > 2_000_000:
raise HTTPException(status_code=413, detail="markdown file too large to save")
if "\x00" in str(content or ""):
raise HTTPException(status_code=400, detail="markdown content contains invalid null bytes")
self._write_text_atomic(target, str(content or ""))
rel_path = os.path.relpath(target, root).replace("\\", "/")
return {
"bot_id": bot_id,
"path": rel_path,
"size": os.path.getsize(target),
"is_markdown": True,
"truncated": False,
"content": str(content or ""),
}
async def upload_files(
self,
*,
bot_id: str,
files: List[UploadFile],
path: Optional[str] = None,
workspace_root: Optional[str] = None,
) -> Dict[str, Any]:
if not files:
raise HTTPException(status_code=400, detail="no files uploaded")
max_bytes = EDGE_UPLOAD_MAX_MB * 1024 * 1024
allowed_extensions = set(EDGE_ALLOWED_ATTACHMENT_EXTENSIONS)
root, upload_dir = self._resolve_workspace_path(bot_id, path or "uploads", workspace_root=workspace_root)
os.makedirs(upload_dir, exist_ok=True)
safe_dir_real = os.path.abspath(upload_dir)
if os.path.commonpath([root, safe_dir_real]) != root:
raise HTTPException(status_code=400, detail="invalid upload target path")
rows: List[Dict[str, Any]] = []
for upload in files:
original = (upload.filename or "upload.bin").strip() or "upload.bin"
name = os.path.basename(original).replace("\\", "_").replace("/", "_")
name = re.sub(r"[^\w.\-()+@ ]+", "_", name)
if not name:
name = "upload.bin"
ext = str(os.path.splitext(name)[1] or "").strip().lower()
if allowed_extensions and ext not in allowed_extensions:
raise HTTPException(
status_code=400,
detail=f"File '{name}' extension is not allowed. Allowed: {', '.join(sorted(allowed_extensions))}",
)
abs_path = os.path.join(safe_dir_real, name)
if os.path.exists(abs_path):
base, file_ext = os.path.splitext(name)
name = f"{base}-{int(datetime.utcnow().timestamp())}{file_ext}"
abs_path = os.path.join(safe_dir_real, name)
total_size = 0
try:
with open(abs_path, "wb") as fh:
while True:
chunk = await upload.read(1024 * 1024)
if not chunk:
break
total_size += len(chunk)
if total_size > max_bytes:
raise HTTPException(
status_code=413,
detail=f"File '{name}' too large (max {EDGE_UPLOAD_MAX_MB}MB)",
)
fh.write(chunk)
except HTTPException:
if os.path.exists(abs_path):
os.remove(abs_path)
raise
except OSError as exc:
if os.path.exists(abs_path):
os.remove(abs_path)
raise HTTPException(
status_code=500,
detail=f"Failed to write file '{name}': {exc.strerror or str(exc)}",
)
except Exception:
if os.path.exists(abs_path):
os.remove(abs_path)
raise HTTPException(status_code=500, detail=f"Failed to upload file '{name}'")
finally:
await upload.close()
rel = os.path.relpath(abs_path, root).replace("\\", "/")
rows.append({"name": name, "path": rel, "size": total_size})
return {"bot_id": bot_id, "files": rows}
def serve_file(
self,
*,
bot_id: str,
path: str,
download: bool,
request: Request,
workspace_root: Optional[str] = None,
) -> Response:
_root, target = self._resolve_workspace_path(bot_id, path, workspace_root=workspace_root)
if not os.path.isfile(target):
raise HTTPException(status_code=404, detail="File not found")
media_type, _ = mimetypes.guess_type(target)
range_header = request.headers.get("range", "")
if range_header and not download:
return self._build_ranged_workspace_response(target, media_type or "application/octet-stream", range_header)
common_headers = {"Accept-Ranges": "bytes"}
if download:
return FileResponse(
target,
media_type=media_type or "application/octet-stream",
filename=os.path.basename(target),
headers=common_headers,
)
return FileResponse(target, media_type=media_type or "application/octet-stream", headers=common_headers)
def purge_bot_workspace(self, *, bot_id: str, workspace_root: Optional[str] = None) -> Dict[str, Any]:
deleted = False
state_root = self._state_bot_root(bot_id, workspace_root=workspace_root)
workspace_bot_root = self._workspace_bot_root(bot_id, workspace_root=workspace_root)
targets = [state_root]
if os.path.abspath(workspace_bot_root) != os.path.abspath(state_root):
targets.append(workspace_bot_root)
import shutil
for target in targets:
if not target or target in {"/", "."}:
raise HTTPException(status_code=400, detail="invalid bot workspace root")
if os.path.isdir(target):
shutil.rmtree(target, ignore_errors=True)
deleted = True
return {"bot_id": str(bot_id or "").strip(), "deleted": deleted}
def _workspace_root(self, bot_id: str, workspace_root: Optional[str] = None) -> str:
return os.path.abspath(os.path.join(self._workspace_bot_root(bot_id, workspace_root=workspace_root), ".nanobot", "workspace"))
def _state_bot_root(self, bot_id: str, workspace_root: Optional[str] = None) -> str:
configured_workspace_root = str(workspace_root or "").strip()
if configured_workspace_root:
normalized_root = os.path.abspath(os.path.expanduser(configured_workspace_root))
return os.path.abspath(os.path.join(normalized_root, str(bot_id or "").strip()))
return os.path.abspath(os.path.join(self._host_data_root, str(bot_id or "").strip()))
def _runtime_target_path(self, bot_id: str) -> str:
return os.path.join(self._state_bot_root(bot_id), ".nanobot", "runtime-target.json")
def _runtime_target_payload(self, bot_id: str) -> Dict[str, Any]:
path = self._runtime_target_path(bot_id)
if not os.path.isfile(path):
return {}
try:
with open(path, "r", encoding="utf-8") as fh:
payload = json.load(fh)
if isinstance(payload, dict):
return payload
except Exception:
return {}
return {}
def _workspace_bot_root(self, bot_id: str, workspace_root: Optional[str] = None) -> str:
configured_workspace_root = str(workspace_root or "").strip()
if configured_workspace_root:
normalized_root = os.path.abspath(os.path.expanduser(configured_workspace_root))
return os.path.abspath(os.path.join(normalized_root, str(bot_id or "").strip()))
payload = self._runtime_target_payload(bot_id)
workspace_root = str(payload.get("workspace_root") or "").strip()
if workspace_root:
normalized_root = os.path.abspath(os.path.expanduser(workspace_root))
return os.path.abspath(os.path.join(normalized_root, str(bot_id or "").strip()))
from_config = self._workspace_bot_root_from_config(bot_id)
if from_config:
return from_config
return self._state_bot_root(bot_id)
def _workspace_bot_root_from_config(self, bot_id: str) -> str:
config_path = os.path.join(self._state_bot_root(bot_id), ".nanobot", "config.json")
if not os.path.isfile(config_path):
return ""
try:
with open(config_path, "r", encoding="utf-8") as fh:
payload = json.load(fh)
if not isinstance(payload, dict):
return ""
agents = payload.get("agents")
if not isinstance(agents, dict):
return ""
defaults = agents.get("defaults")
if not isinstance(defaults, dict):
return ""
workspace = str(defaults.get("workspace") or "").strip()
if not workspace:
return ""
normalized_workspace = os.path.abspath(os.path.expanduser(workspace))
if normalized_workspace.endswith("/.nanobot/workspace"):
return os.path.abspath(os.path.dirname(os.path.dirname(normalized_workspace)))
marker = f"{os.sep}.nanobot{os.sep}workspace"
if marker in normalized_workspace:
return os.path.abspath(normalized_workspace.split(marker, 1)[0])
except Exception:
return ""
return ""
def _resolve_workspace_path(
self,
bot_id: str,
rel_path: Optional[str] = None,
workspace_root: Optional[str] = None,
) -> tuple[str, str]:
root = self._workspace_root(bot_id, workspace_root=workspace_root)
rel = (rel_path or "").strip().replace("\\", "/")
target = os.path.abspath(os.path.join(root, rel))
if os.path.commonpath([root, target]) != root:
raise HTTPException(status_code=400, detail="invalid workspace path")
return root, target
@staticmethod
def _ctime_iso(stat: os.stat_result) -> str:
ts = getattr(stat, "st_birthtime", None)
if ts is None:
ts = getattr(stat, "st_ctime", None)
try:
return datetime.utcfromtimestamp(float(ts)).isoformat() + "Z"
except Exception:
return datetime.utcfromtimestamp(stat.st_mtime).isoformat() + "Z"
@staticmethod
def _write_text_atomic(target: str, content: str) -> None:
os.makedirs(os.path.dirname(target), exist_ok=True)
tmp = f"{target}.tmp"
with open(tmp, "w", encoding="utf-8") as fh:
fh.write(content)
os.replace(tmp, target)
@staticmethod
def _stream_file_range(target: str, start: int, end: int, chunk_size: int = 1024 * 1024) -> Generator[bytes, None, None]:
with open(target, "rb") as fh:
fh.seek(start)
remaining = end - start + 1
while remaining > 0:
chunk = fh.read(min(chunk_size, remaining))
if not chunk:
break
remaining -= len(chunk)
yield chunk
def _build_ranged_workspace_response(self, target: str, media_type: str, range_header: str) -> Response:
file_size = os.path.getsize(target)
range_match = re.match(r"bytes=(\d*)-(\d*)", range_header.strip())
if not range_match:
raise HTTPException(status_code=416, detail="Invalid range")
start_raw, end_raw = range_match.groups()
if start_raw == "" and end_raw == "":
raise HTTPException(status_code=416, detail="Invalid range")
if start_raw == "":
length = int(end_raw)
if length <= 0:
raise HTTPException(status_code=416, detail="Invalid range")
start = max(file_size - length, 0)
end = file_size - 1
else:
start = int(start_raw)
end = int(end_raw) if end_raw else file_size - 1
if start >= file_size or start < 0:
raise HTTPException(status_code=416, detail="Requested range not satisfiable")
end = min(end, file_size - 1)
if end < start:
raise HTTPException(status_code=416, detail="Requested range not satisfiable")
content_length = end - start + 1
headers = {
"Accept-Ranges": "bytes",
"Content-Range": f"bytes {start}-{end}/{file_size}",
"Content-Length": str(content_length),
}
return StreamingResponse(
self._stream_file_range(target, start, end),
status_code=206,
media_type=media_type or "application/octet-stream",
headers=headers,
)
def _list_workspace_dir(self, path: str, root: str) -> List[Dict[str, Any]]:
rows: List[Dict[str, Any]] = []
names = sorted(os.listdir(path), key=lambda v: (not os.path.isdir(os.path.join(path, v)), v.lower()))
for name in names:
if name in {".DS_Store"}:
continue
abs_path = os.path.join(path, name)
stat = os.stat(abs_path)
rows.append(
{
"name": name,
"path": os.path.relpath(abs_path, root).replace("\\", "/"),
"type": "dir" if os.path.isdir(abs_path) else "file",
"size": stat.st_size if os.path.isfile(abs_path) else None,
"ext": os.path.splitext(name)[1].lower() if os.path.isfile(abs_path) else "",
"ctime": self._ctime_iso(stat),
"mtime": datetime.utcfromtimestamp(stat.st_mtime).isoformat() + "Z",
}
)
return rows
def _list_workspace_dir_recursive(self, path: str, root: str) -> List[Dict[str, Any]]:
rows: List[Dict[str, Any]] = []
for walk_root, dirnames, filenames in os.walk(path):
dirnames.sort(key=lambda v: v.lower())
filenames.sort(key=lambda v: v.lower())
for name in dirnames:
if name in {".DS_Store"}:
continue
abs_path = os.path.join(walk_root, name)
stat = os.stat(abs_path)
rows.append(
{
"name": name,
"path": os.path.relpath(abs_path, root).replace("\\", "/"),
"type": "dir",
"size": None,
"ext": "",
"ctime": self._ctime_iso(stat),
"mtime": datetime.utcfromtimestamp(stat.st_mtime).isoformat() + "Z",
}
)
for name in filenames:
if name in {".DS_Store"}:
continue
abs_path = os.path.join(walk_root, name)
stat = os.stat(abs_path)
rows.append(
{
"name": name,
"path": os.path.relpath(abs_path, root).replace("\\", "/"),
"type": "file",
"size": stat.st_size,
"ext": os.path.splitext(name)[1].lower(),
"ctime": self._ctime_iso(stat),
"mtime": datetime.utcfromtimestamp(stat.st_mtime).isoformat() + "Z",
}
)
return rows
edge_workspace_service: EdgeWorkspaceService | None = None

View File

@ -0,0 +1,25 @@
from pathlib import Path
from app.main import app
if __name__ == "__main__":
import logging
import uvicorn
from app.core.settings import EDGE_ACCESS_LOG, EDGE_HOST, EDGE_LOG_LEVEL, EDGE_PORT, EDGE_RELOAD
for name in ("httpx", "httpcore", "uvicorn.access", "watchfiles.main", "watchfiles.watcher"):
logging.getLogger(name).setLevel(logging.WARNING)
edge_root = Path(__file__).resolve().parent
reload_dirs = [str(edge_root), str(edge_root / "app")] if EDGE_RELOAD else None
uvicorn.run(
"app.main:app",
host=EDGE_HOST,
port=EDGE_PORT,
reload=EDGE_RELOAD,
reload_dirs=reload_dirs,
log_level=EDGE_LOG_LEVEL,
access_log=EDGE_ACCESS_LOG,
)

View File

@ -0,0 +1,9 @@
fastapi==0.110.0
uvicorn==0.27.1
pydantic==2.6.3
python-dotenv==1.0.1
httpx==0.27.0
python-multipart==0.0.9
docker==7.0.0
requests==2.31.0
psutil==5.9.8

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.7 MiB

View File

@ -0,0 +1,97 @@
# Edge Phase Report
Date: 2026-03-20
This phase focused on turning `dashboard-edge` from a thin remote stub into a real execution-plane service with a stable protocol and a native runtime skeleton.
## Completed
### 1. Execution-plane service extracted
- `dashboard-edge` now exists as a standalone deployable service.
- It owns node identity, heartbeat, node resources, command delivery, logs, workspace, and monitor packet handling.
### 2. Protocol stabilized
- Heartbeat, node self, node resources, and monitor packets now share a versioned node protocol.
- Node identity fields are consistent across backend and edge.
- Central backend stores node `capabilities`, `resources`, and `last_seen_at`.
### 3. Monitor packets are first-class
- Edge can expose stored monitor packets.
- Backend can poll and persist those packets after command delivery.
- Startup recovery can backfill pending edge conversations from recent logs.
### 4. Runtime abstraction introduced
- Edge execution now routes through `EdgeRuntimeBackend`.
- Docker execution is behind the runtime backend abstraction.
- Native execution has a real skeleton instead of a pure stub.
### 5. Native executor skeleton implemented
- Native runtime can launch a local child process.
- It manages stdout capture, log persistence, process status, command delivery, and resource snapshot reporting.
- Dashboard chat URL and native launch command are configurable through environment variables.
### 6. Node management is operational
- Nodes can be registered, tested, refreshed, and displayed with protocol/resource data.
- The `local-edge` node is now a usable test target.
- A separate `local-edge-native` node is now running against a real native launcher on port `8011`.
### 7. Deploy / migrate flow is now explicit
- A dedicated `POST /api/bots/{bot_id}/deploy` flow now handles cross-node migration and redeploy.
- Basic bot editing no longer accepts execution-target or image switching fields.
- The node home page now exposes a real deploy / migrate modal instead of a design-only placeholder.
### 8. Native launcher defaults are wired
- `dashboard-edge` now auto-detects the local `nanobot-v0.1.4-post5` virtualenv when `EDGE_NATIVE_COMMAND` is not explicitly configured.
- Native bot startup now passes per-bot `--config` and `--workspace`, so instances no longer fall back to the global `~/.nanobot/config.json`.
- A dedicated `scripts/dev-edge-native.sh` helper starts a native edge node as `local-edge-native`.
## Still To Do
### 1. Real native bot contract
- Validate a real Bot lifecycle on `local-edge-native` without disrupting the existing `local-edge` Docker regression target.
- Verify the dashboard channel injection path during a full assistant turn, not only node health and capability probing.
- Continue validating per-bot dashboard port or URL selection when multiple native bots run on one node.
### 2. Deploy flow hardening
- Add richer preflight validation and clearer failure surfacing for edge reachability before switching a Bot.
- Consider a deploy preview / diff step for workspace sync and image changes.
- Extend the flow when a single node eventually supports multiple runtime profiles.
### 3. Node-level resource telemetry
- Improve native runtime resource accounting with more precise metrics.
- Consider node heartbeat frequency and health thresholds.
### 4. Reverse-connect transport
- The design still prefers node-initiated connection to the central backend for remote machines.
- That transport is not fully implemented yet.
### 5. Full remote native rollout
- The native skeleton is in place, but real remote native nodes still need end-to-end validation.
- Existing test bot `unis_bot_a01` can be used as a regression target when those pieces are connected.
## Verification
- Backend and edge Python modules compile cleanly.
- `dashboard-edge` can report protocol-versioned node identity and resources.
- `local-edge` is already visible and testable from the central dashboard.
- `local-edge-native` is registered, online, and reports `runtime.native=true` with the real launcher command.
- Frontend build passes with the deploy / migrate modal wired into the active node home page.
## Notes
- `direct` remains the local fallback path.
- `edge` is now the intended execution-plane abstraction for both Docker and future Native nodes.
- The long-term goal remains: central dashboard as control plane, `dashboard-edge` as execution plane.

View File

@ -0,0 +1,722 @@
# Dashboard Nanobot 模块化重构与远端桥接演进方案
本文档用于指导当前项目的结构性重构,并为后续“支持同机/远端龙虾 + Docker/Native 双运行模式”升级提前抽离边界。
目标不是一次性大改所有代码,而是先把未来 2 个核心问题理顺:
- 当前前端/后端过于集中,后续功能迭代成本越来越高
- 执行层默认绑定“本机 Docker + 本机文件系统”,无法自然扩展到远端与 Native 模式
## 1. 结论先行
### 1.1 前端必须拆,但优先做“页面内模块化”,不是重写 UI
当前前端最大的问题不是样式,而是页面级组件承担了太多职责:
- 页面状态
- API 调用
- 业务编排
- 视图渲染
- 弹窗
- 文件预览
- 表单保存
- WebSocket/轮询
因此,前端应优先按“页面 -> 业务区块 -> 通用能力”三层拆分。
### 1.2 后端必须拆代码结构,但不建议现在把中央 backend 再拆成多个部署服务
当前 `backend/main.py` 已经承担:
- API 路由
- Bot 生命周期
- Workspace 文件操作
- Runtime 状态判断
- 日志/监控
- Provider 测试
- 平台默认值拼装
这会让未来远端执行支持非常困难。
建议:
- 现在:先把 `backend` 按领域和 provider 分层拆开,保持 **一个 central backend 服务**
- 后续:新增一个 **独立部署的桥接服务 `dashboard-edge`**
也就是说:
- 中央控制面 backend保留继续作为统一 API、数据库、权限、审计入口
- 执行桥接层:新增独立服务,不再内嵌在 central backend 中
### 1.3 为了支持远端龙虾,必须引入桥接服务,但不需要修改 nanobot 核心
推荐结构:
- `dashboard-backend`
- 控制平面
- 统一前端 API
- DB / 权限 / 审计 / 聚合视图
- `dashboard-edge`
- 执行平面
- 部署在本机或远端节点
- 管理该节点上的 Docker / Native Bot
- 与 nanobot 的 `dashboard.py` channel 通信
这是未来最稳的演进方向。
### 1.4 `dashboard-edge` 应作为执行节点的标准组件
长期目标不是“只有远端机器需要 edge”而是
- 任何实际运行 Bot 的节点,都部署一个 `dashboard-edge`
- 同机部署场景下central backend 与 edge 可以位于同一台机器
- 远端部署场景下,在远端 Bot 所在机器部署 edge
也就是说:
- `dashboard-backend` 负责控制平面
- `dashboard-edge` 负责执行平面
- 一个节点一个 edge不是一个 Bot 一个 edge
这样本机与远端最终会走同一条执行链路,避免长期维护两套调用路径。
### 1.5 `dashboard-edge` 的长期职责不应只绑定 nanobot
当前第一阶段仍以 nanobot 为主,因为现有接入基础已经存在:
- workspace 生成
- `dashboard.py` channel
- 运行态日志解析
但从架构角度,`dashboard-edge` 应定位为“Bot Core Adapter Host”
- edge 负责对接不同 Bot 核心
- 每类核心通过 adapter/executor 做本地适配
- 对上统一输出 dashboard 可理解的生命周期、命令、工作区、日志与状态协议
因此,未来可以扩展出:
- `NanobotCoreAdapter`
- `LobsterCoreAdapter`
- 其它兼容 Bot Core Adapter
控制面不需要理解不同核心的内部细节,只需要依赖统一 provider 契约。
## 2. 当前项目的主要结构问题
### 2.1 Frontend 问题
- `frontend/src/modules/dashboard/BotDashboardModule.tsx` 体量过大,已经承担多个页面级职责
- `BotWizardModule.tsx` 同时负责创建流程、provider 测试、五个 MD 文件编辑
- `PlatformDashboardPage.tsx` 混合平台设置、资源视图、Bot 管理视图
- 页面内部状态与 API 编排严重耦合,局部重构成本很高
- 公共能力分散:
- Markdown 编辑器
- workspace markdown 渲染
- timezone 选项
- page size 缓存
- bot 访问工具
### 2.2 Backend 问题
- `backend/main.py` 是单体入口,所有领域逻辑都堆在同一文件
- 运行时逻辑默认直接依赖本机资源:
- Docker
- 本地工作区
- 本地 dashboard channel
- 路由层和业务层耦合严重
- 未来引入 remote/native 时,如果继续在 `main.py` 里堆条件分支,会快速失控
### 2.3 未来升级的真正分界线
本质上项目需要明确区分:
- 控制平面
- 执行平面
- nanobot 接入层
目前这三者是混在一起的。
## 3. 目标架构
### 3.1 总体分层
推荐把系统拆成 4 层:
1. `frontend`
- 页面、组件、交互、状态编排
2. `dashboard-backend`
- 统一 API、鉴权、DB、Provider 路由、审计
3. `dashboard-edge`
- 本机或远端节点执行代理
4. `nanobot + dashboard.py`
- Bot 本体,不修改核心
### 3.2 控制平面 vs 执行平面
#### 控制平面
职责:
- 统一 UI
- 统一 API
- 节点注册与节点状态
- Bot 元数据存储
- 权限与访问控制
- 操作审计
- 运行状态聚合
#### 执行平面
职责:
- 启停 Bot
- 管理 Docker / Host 进程
- 读写本机工作区
- 注入 dashboard channel 配置
- 与 Bot 的本地 dashboard channel 通信
- 收集运行日志与事件
补充原则:
- 执行平面的核心宿主是 `dashboard-edge`
- edge 内部可以再通过 core adapter 对不同 Bot 核心做协议适配
- central backend 不直接理解某个具体核心的启动脚本、端口、日志细节
### 3.3 支持矩阵
最终要支持 4 种执行模式:
- Local + Docker
- Local + Native
- Remote + Docker
- Remote + Native
这 4 种模式在控制面前端和 API 层应该尽量表现一致,只在 Provider/Executor 层分流。
## 4. Backend 是否需要拆分
### 4.1 代码结构上:必须拆
必须从当前单文件/弱分层状态拆成:
- API 路由层
- 领域服务层
- Provider 层
- 基础设施层
否则未来远端支持几乎只能靠 `if local else remote` 的条件分支硬堆。
### 4.2 部署形态上:中央 backend 不建议继续拆成多个控制面服务
不建议现在把 central backend 再拆成:
- bot-api
- workspace-api
- monitor-api
原因:
- 当前规模还不值得引入多控制面服务复杂度
- 主要痛点不是“服务数量不足”,而是“执行逻辑没抽象”
- 真正需要新增的,是独立的执行代理服务 `dashboard-edge`
### 4.3 最终建议
- 保留一个 central backend
- 新增一个 edge service
- central backend 内部彻底模块化
## 5. Backend 目标目录建议
建议逐步演进为:
```text
backend/
app/
main.py
dependencies.py
lifespan.py
api/
bots.py
workspace.py
monitor.py
images.py
platform.py
topics.py
nodes.py
domain/
bots/
service.py
runtime_service.py
workspace_service.py
schemas.py
platform/
service.py
topics/
service.py
nodes/
service.py
providers/
runtime/
base.py
local.py
remote.py
workspace/
base.py
local.py
remote.py
provision/
base.py
local.py
remote.py
infra/
docker/
manager.py
workspace/
files.py
nanobot/
config_manager.py
dashboard_channel_client.py
persistence/
database.py
repositories/
settings/
config.py
models/
schemas/
services/
```
### 5.1 关键原则
- `api/` 只做 HTTP 协议转换,不做业务编排
- `domain/` 承担业务规则
- `providers/` 负责本机/远端、多运行模式适配
- `infra/` 负责 Docker、文件系统、dashboard channel、数据库等底层细节
## 6. Edge 服务目标目录建议
新增独立目录:
```text
edge/
main.py
api/
bots.py
workspace.py
monitor.py
health.py
executors/
base.py
docker_executor.py
native_executor.py
adapters/
dashboard_channel.py
log_parser.py
workspace/
service.py
runtime/
service.py
settings.py
```
### 6.1 Edge 的职责边界
只做“本节点执行代理”,不做:
- 全局权限模型
- 多节点聚合
- 平台配置中心
- 中央数据库业务
并且建议把“核心适配”显式纳入 edge 边界:
- edge 内部管理 `CoreAdapter`
- 负责把不同核心的运行方式转换成统一执行接口
- central backend 不直接依赖某个核心的私有实现
## 7. Frontend 目标结构建议
前端建议按“页面/区块/共享能力”拆,而不是继续让超大页面组件承担一切。
推荐结构:
```text
frontend/src/
app/
routes/
providers/
pages/
dashboard/
onboarding/
platform/
image-factory/
widgets/
bot-list/
bot-chat/
workspace-panel/
bot-settings/
topic-feed/
platform-overview/
features/
bot-control/
bot-editor/
workspace-preview/
skill-install/
mcp-config/
topic-config/
cron-config/
entities/
bot/
workspace/
topic/
platform/
node/
shared/
api/
ui/
hooks/
lib/
markdown/
i18n/
```
### 7.1 优先拆分的页面
#### `BotDashboardModule`
优先拆成:
- `BotHeader`
- `BotControlBar`
- `BotConversationPanel`
- `BotWorkspacePanel`
- `BotRuntimePanel`
- `BotSettingsModals`
- `WorkspacePreviewModal`
- `BotSkillSection`
- `TopicFeedSection`
并把状态和副作用抽入 hooks
- `useBotRuntime`
- `useWorkspaceBrowser`
- `useBotEditor`
- `useBotConversation`
- `useBotMonitorStream`
#### `BotWizardModule`
优先拆成:
- `WizardBaseStep`
- `WizardModelStep`
- `WizardAgentFilesStep`
- `WizardSummaryStep`
- `useBotWizardForm`
- `useProviderTest`
#### `PlatformDashboardPage`
优先拆成:
- `PlatformOverviewPanel`
- `PlatformBotsPanel`
- `PlatformSettingsPanel`
- `PlatformUsagePanel`
## 8. Provider 抽象建议
### 8.1 RuntimeProvider
统一接口:
- `start_bot`
- `stop_bot`
- `restart_bot`
- `send_command`
- `get_status`
- `get_recent_logs`
- `stream_monitor`
- `get_resource_snapshot`
### 8.2 WorkspaceProvider
统一接口:
- `list_tree`
- `read_file`
- `write_markdown`
- `upload_files`
- `download_file`
### 8.3 ProvisionProvider
统一接口:
- `create_bot`
- `delete_bot`
- `upgrade_bot`
- `sync_config`
### 8.4 实现矩阵
- `LocalDockerRuntimeProvider`
- `LocalNativeRuntimeProvider`
- `RemoteDockerRuntimeProvider`
- `RemoteNativeRuntimeProvider`
但对上层 API 来说,尽量只感知:
- `LocalRuntimeProvider`
- `RemoteRuntimeProvider`
具体 Docker / Native 再由 provider 内部选择 executor。
## 9. 数据模型建议
当前 `BotInstance` 不足以表达远端执行目标,需要新增节点维度。
### 9.1 建议新增 Node 实体
建议字段:
- `id`
- `name`
- `endpoint`
- `auth_type`
- `auth_secret_ref`
- `enabled`
- `status`
- `capabilities_json`
- `last_heartbeat_at`
### 9.2 BotInstance 建议新增字段
- `node_id`
- `runtime_kind`
- `docker`
- `native`
- `location_kind`
- `local`
- `remote`
或者简化为:
- `node_id`
- `runtime_kind`
其中:
- `node_id = local` 表示同服务器
- 其它 `node_id` 表示远端节点
### 9.3 迁移策略
现有数据默认迁移为:
- `node_id = local`
- `runtime_kind = docker`
保证老实例零感知升级。
## 10. 与 nanobot 的关系
原则保持不变:
- 不修改 nanobot 核心
- 继续复用 `dashboard.py`
- 由 dashboard 侧负责桥接与配置注入
具体职责:
- central backend 不直接操作 nanobot
- edge 负责:
- 生成/更新 `.nanobot/config.json`
- 确保 `channels.dashboard` 注入正确
- 调用 dashboard channel
## 11. 分阶段实施路线
### Phase 1前端拆大文件不改 API
目标:
- 仅拆页面组件、hooks、services
- API 不变
- 行为不变
优先级:
- `BotDashboardModule`
- `BotWizardModule`
- `PlatformDashboardPage`
### Phase 2后端拆主文件不改功能
目标:
- 从 `backend/main.py` 中拆出:
- bot lifecycle 路由
- workspace 路由
- bot service
- workspace service
- runtime service
- 行为保持不变
### Phase 3引入 Provider 抽象,保留本机 Docker
目标:
- 在不改变当前功能的前提下,把执行逻辑走 provider
- 当前 provider 先只实现 local docker
这是最关键的“抽骨架”阶段。
### Phase 4补 Local Native 模式
目标:
- 支持同机宿主机直装
- 通过 `NativeExecutor` 管理进程/端口/workspace
### Phase 5新增 `dashboard-edge`
目标:
- 远端节点部署 edge 服务
- central backend 引入 remote provider
- 不改前端 API只增加节点/运行模式字段
### Phase 6前端补节点视图
目标:
- 节点管理页
- Bot 运行位置与运行模式选择
- 节点资源与健康状态展示
### Phase 7本机执行路径统一切到 edge
目标:
- 逐步让同机 Docker / Native 也通过本机 edge 执行
- central backend 不再保留长期的“本机直连执行层”
- 最终形成“所有执行节点统一经由 edge”的稳定结构
## 12. 本轮重构建议的落地顺序
如果从今天开始进入真正改造,建议按下面顺序推进:
1. 先拆前端大页面
2. 再拆 backend main.py
3. 然后引入 provider interface
4. 再实现 local native
5. 最后接入 remote edge
原因:
- 页面和 `main.py` 的可维护性问题已经是当前痛点
- provider 抽象是远端能力的必要前提
- native 支持可以提前验证抽象是否合理
- remote edge 是最后一公里,不应该在结构没理顺前硬接
## 13. 建议的下一步动作
下一步不要直接开始“远端龙虾支持”的业务开发,而应先做一轮基础重构:
### 13.1 前端第一刀
`BotDashboardModule` 开始,拆成:
- workspace
- conversation
- runtime
- settings modal
- skills/topic
### 13.2 后端第一刀
`backend/main.py` 中先抽 3 个领域:
- bots lifecycle
- workspace
- bot runtime snapshot / config sync
### 13.3 provider 第一版
先定义接口,不着急做 remote
- `RuntimeProvider`
- `WorkspaceProvider`
- `ProvisionProvider`
并用当前本机逻辑实现 `Local*Provider`
---
本方案的核心思想只有一句话:
**中央 backend 保持控制平面,新增 edge 作为执行平面;先做代码结构分层,再做远端与 native 支持。**
## 14. 为什么不是一开始就直接拆成 backend + edge
这个问题的关键不在于“是否值得上 edge”而在于“当前代码是否已经具备承接 edge 的稳定边界”。
答案是:**方向上应该从一开始就以 backend + edge 为目标,但工程实施上不应该在现有单体逻辑不拆的情况下直接硬接 edge。**
原因如下。
### 14.1 当前 central backend 还没有稳定的 provider 边界
如果现在直接上 edge但 central backend 仍然保持现状,那么会出现:
- 本机逻辑继续直连 Docker / 文件系统
- 远端逻辑走 edge
- API 路由层同时理解本机细节和远端细节
结果会形成两套执行路径:
- local direct
- remote edge
这会让后续统一 Native、统一工作区、统一日志协议变得更贵。
### 14.2 先拆结构不是绕路,而是在降低总成本
先做结构重构的价值是:
- 把当前隐式耦合改成显式边界
- 先定义 provider / executor / adapter 契约
- 为 edge 预留稳定接入点
这样后续接 edge 是“接到标准接口上”,不是“插进一团已有业务里”。
### 14.3 如果现在直接硬上 edge代价反而更大
直接开做 edge 的风险:
- central backend 仍然需要保留大量本机直连逻辑
- 远端功能会复制一套相似业务编排
- 前端状态和 API 语义可能被迫提前扭曲
- 最后仍然要回头拆 `main.py` 和超大页面组件
也就是说,不是“现在拆结构,后面代价更大”,而是:
- **如果现在不先拆结构,后面引入 edge 的总代价更大**
### 14.4 推荐理解方式
正确顺序不是:
- 先重构
- 再考虑 edge
而是:
- 先以 edge 为最终目标设计边界
- 再做最小必要的结构重构
- 然后把 edge 接入这些边界
因此,这两件事不是对立关系,而是前后依赖关系。

View File

@ -64,6 +64,14 @@ flowchart LR
- 不扩展 nanobot 去理解“远端管理”。
- 不要求面板直接连接远端 Bot 的文件系统或 Docker。
- 所有远端执行动作都先到节点代理,再由节点代理在本机执行。
- `dashboard-edge` 应视为执行节点标准组件:
- 同机 Bot 节点可部署本机 edge
- 远端 Bot 节点部署远端 edge
- 一个节点一个 edge不是一个 Bot 一个 edge
- `dashboard-edge` 的长期定位应是“核心适配宿主”:
- 第一阶段适配 nanobot
- 后续可扩展适配其它 Bot Core
- 对上统一提供 dashboard 可消费的执行协议
## 3. 为什么选择方案 A
@ -211,6 +219,36 @@ nanobot 核心本身不需要感知节点、中心控制面或跨机调度。
### 8.1 职责
- 对中心端暴露统一的 Edge API
- 将中心端的 Bot 运行控制转换为本机 Docker / Host 操作
- 代理本机 workspace 访问
- 维护本机 Bot 的日志与 monitor 流
### 8.2 第一版最小 HTTP 契约
建议 `dashboard-edge` 第一版至少提供以下接口,全部由中心端 `HttpEdgeClient` 对接:
- `POST /api/edge/bots/{bot_id}/start`
- `POST /api/edge/bots/{bot_id}/stop`
- `POST /api/edge/bots/{bot_id}/command`
- `POST /api/edge/bots/{bot_id}/monitor/ensure`
- `GET /api/edge/bots/{bot_id}/logs`
- `POST /api/edge/bots/{bot_id}/workspace/sync`
- `GET /api/edge/bots/{bot_id}/workspace/tree`
- `GET /api/edge/bots/{bot_id}/workspace/file`
- `PUT /api/edge/bots/{bot_id}/workspace/file/markdown`
请求头建议:
- `x-dashboard-edge-token`
最小返回约定:
- 控制类接口返回 `{ "status": "started|stopped|ok" }`
- 日志接口返回 `{ "bot_id": "...", "logs": [] }`
- monitor 保活返回 `{ "ensured": true }`
- workspace 接口尽量复用当前 dashboard 本地接口的返回结构,降低前端差异
远端节点代理 `dashboard-edge` 负责:
- 本机 Bot 注册和枚举
@ -222,6 +260,12 @@ nanobot 核心本身不需要感知节点、中心控制面或跨机调度。
- Dashboard Channel 配置自动注入
- 向中心汇报节点状态和 Bot 状态
补充说明:
- 长期看,`dashboard-edge` 不只是“远端代理”
- 对于同机部署的执行节点,也建议通过本机 edge 统一执行路径
- 这样 central backend 不需要长期维护一套本机直连分支和一套远端代理分支
### 8.2 统一执行接口
节点代理内部也需要抽象执行器:
@ -240,6 +284,22 @@ nanobot 核心本身不需要感知节点、中心控制面或跨机调度。
- `write_workspace_file`
- `get_resource_snapshot`
节点代理内部建议再区分两层:
- `CoreAdapter`
- 负责适配不同 Bot 核心
- `Executor`
- 负责 Docker / Host 执行
例如:
- `NanobotCoreAdapter + DockerExecutor`
- `NanobotCoreAdapter + HostProcessExecutor`
- `LobsterCoreAdapter + DockerExecutor`
- `LobsterCoreAdapter + HostProcessExecutor`
这样 edge 才能成为真正可扩展的“桥接层”,而不是只绑定当前 nanobot 实现。
### 8.3 HostProcessExecutor 推荐实现
首版建议使用“节点代理托管子进程”的方式,而不是一开始就深度接入系统服务管理器。
@ -493,6 +553,28 @@ sequenceDiagram
## 15. 分阶段实施计划
### 14.1 迁移 / 部署入口
Bot 的跨节点迁移、重建到新节点、以及运行时或镜像切换,不应放在 Bot 的基础信息编辑里,而应作为单独的高风险运维动作处理。
建议在平台管理区预留独立入口,进入后再由迁移向导逐步完成:
- 选择源 Bot
- 选择目标节点
- 校验目标节点能力
- 同步 workspace、配置和运行元数据
- 停止源节点运行实例
- 在目标节点重建并启动
- 回写中心端节点与 Bot 关联
设计原则:
- 迁移动作必须显式确认
- 迁移和部署入口应与日常基础编辑分离
- 普通 Bot 编辑不允许修改节点、运行模式和基础镜像
- 创建时可以指定目标节点和镜像,但创建后不再在基础信息里暴露这些字段
- 远端 `dashboard-edge` 与中心端之间只通过迁移/部署协议协作,不通过临时手工字段拼接
### 15.1 第一阶段:跑通远端控制链路
目标:

View File

@ -1875,6 +1875,15 @@ body {
border-color: rgba(97, 174, 255, 0.34);
}
.platform-entry-card.is-static {
cursor: default;
}
.platform-entry-card.is-static:hover {
transform: none;
border-color: rgba(97, 174, 255, 0.18);
}
.platform-entry-card strong {
color: var(--title);
}
@ -1884,6 +1893,338 @@ body {
color: var(--muted);
}
.platform-node-grid {
display: grid;
grid-template-columns: repeat(2, minmax(0, 1fr));
gap: 12px;
}
.platform-node-toolbar {
display: inline-flex;
align-items: center;
gap: 8px;
flex-wrap: wrap;
}
.platform-node-card {
display: flex;
flex-direction: column;
gap: 10px;
align-items: stretch;
padding: 18px;
border-radius: 18px;
border: 1px solid rgba(97, 174, 255, 0.2);
background: linear-gradient(180deg, rgba(14, 22, 38, 0.84), rgba(8, 12, 21, 0.92));
color: var(--text);
text-align: left;
transition: transform 0.18s ease, border-color 0.18s ease, box-shadow 0.18s ease;
}
.platform-node-card:hover {
transform: translateY(-1px);
border-color: rgba(97, 174, 255, 0.38);
box-shadow: 0 16px 32px rgba(8, 25, 60, 0.12);
}
.platform-node-card.is-disabled {
opacity: 0.8;
}
.platform-node-card-head {
display: flex;
align-items: flex-start;
justify-content: space-between;
gap: 12px;
}
.platform-node-card-head-actions {
display: inline-flex;
align-items: center;
gap: 6px;
flex-wrap: wrap;
justify-content: flex-end;
}
.platform-node-card-head strong {
display: block;
font-size: 17px;
font-weight: 800;
color: var(--title);
}
.platform-node-card-id {
margin-top: 4px;
font-size: 11px;
color: var(--muted);
}
.platform-node-card-meta {
font-size: 12px;
color: var(--muted);
}
.platform-node-card-url {
font-size: 11px;
color: var(--muted);
word-break: break-all;
}
.platform-node-card-url-muted {
color: rgba(68, 87, 145, 0.72);
}
.platform-node-card-stats {
display: flex;
flex-wrap: wrap;
gap: 10px;
font-size: 12px;
color: var(--muted);
}
.platform-node-card-capabilities {
display: flex;
flex-wrap: wrap;
gap: 8px;
}
.platform-node-card-hint {
font-size: 12px;
color: var(--muted);
}
.platform-node-card-last-seen {
font-size: 12px;
color: var(--muted);
}
.platform-node-card-foot {
display: flex;
align-items: center;
justify-content: space-between;
gap: 12px;
padding-top: 6px;
color: var(--title);
}
.platform-node-card-link {
font-size: 13px;
font-weight: 700;
}
.platform-node-editor {
width: min(760px, 94vw);
max-height: min(88vh, 920px);
overflow-y: auto;
}
.platform-node-editor-grid {
display: grid;
grid-template-columns: repeat(2, minmax(0, 1fr));
gap: 12px;
}
.platform-node-editor-span-2 {
grid-column: span 2;
}
.platform-node-editor .field {
min-width: 0;
}
.platform-node-editor .field-checkbox {
justify-content: flex-end;
}
.platform-node-native-panel {
margin-top: 12px;
padding: 12px;
border-radius: 14px;
border: 1px solid var(--line);
background: color-mix(in oklab, var(--panel-soft) 86%, var(--panel) 14%);
display: grid;
gap: 10px;
}
.platform-node-native-panel-title {
font-size: 13px;
font-weight: 700;
color: var(--title);
}
.platform-node-native-panel-grid {
display: grid;
gap: 10px;
}
.check-row {
display: inline-flex;
align-items: center;
gap: 8px;
color: var(--text);
}
.platform-node-test-result {
padding: 14px 16px;
border-radius: 16px;
border: 1px solid var(--line);
background: color-mix(in oklab, var(--panel) 74%, transparent);
display: flex;
flex-direction: column;
gap: 6px;
font-size: 13px;
max-height: 240px;
overflow: auto;
}
.platform-node-test-result.is-ok {
border-color: color-mix(in oklab, var(--ok) 42%, var(--line) 58%);
background: color-mix(in oklab, var(--ok) 10%, var(--panel) 90%);
}
.platform-node-test-result.is-error {
border-color: color-mix(in oklab, var(--err) 42%, var(--line) 58%);
background: color-mix(in oklab, var(--err) 8%, var(--panel) 92%);
}
.platform-node-test-result-head,
.platform-node-test-result-meta,
.platform-node-editor-actions {
display: inline-flex;
align-items: center;
gap: 10px;
flex-wrap: wrap;
}
.platform-node-test-kv {
display: grid;
gap: 6px;
}
.platform-node-test-code {
display: block;
max-width: 100%;
overflow-x: auto;
white-space: nowrap;
padding: 6px 8px;
border-radius: 10px;
border: 1px solid var(--line);
background: color-mix(in oklab, var(--panel) 90%, transparent);
}
.platform-node-direct-note {
padding: 12px 14px;
border-radius: 14px;
border: 1px solid var(--line);
background: color-mix(in oklab, var(--panel) 82%, transparent);
color: var(--muted);
font-size: 13px;
line-height: 1.6;
}
.platform-node-current-target {
padding: 10px 12px;
border-radius: 14px;
border: 1px dashed color-mix(in oklab, var(--brand) 34%, var(--line) 66%);
background: color-mix(in oklab, var(--brand) 8%, var(--panel) 92%);
color: var(--title);
font-size: 12px;
}
.node-workspace-page {
display: flex;
flex-direction: column;
min-height: 0;
height: 100%;
}
.node-workspace-shell {
display: flex;
flex-direction: column;
gap: 18px;
flex: 0 0 auto;
min-height: 0;
}
.node-workspace-summary-grid {
grid-template-columns: minmax(260px, 0.96fr) minmax(220px, 0.82fr) minmax(0, 2.22fr);
}
.node-workspace-summary-grid .platform-resource-card {
grid-column: auto;
}
.node-workspace-summary-card {
min-width: 0;
}
.node-workspace-summary-value {
font-size: 22px;
font-weight: 800;
color: var(--title);
line-height: 1.2;
word-break: break-word;
}
.node-workspace-summary-id {
margin-top: -2px;
font-size: 12px;
color: var(--muted);
}
.node-workspace-chip-row {
display: flex;
flex-wrap: wrap;
gap: 8px;
margin-top: 2px;
}
.node-workspace-resource-card {
min-width: 0;
}
.node-workspace-content-shell {
display: flex;
flex: 1 1 auto;
min-height: 0;
margin-top: 24px;
}
.node-workspace-content-shell > * {
flex: 1 1 auto;
min-height: 0;
}
.platform-home-shell {
display: flex;
flex-direction: column;
gap: 18px;
min-height: 0;
}
.platform-home-summary-grid {
grid-template-columns: repeat(3, minmax(0, 1fr));
}
.platform-home-body {
display: grid;
grid-template-columns: minmax(0, 1.35fr) minmax(360px, 0.65fr);
gap: 18px;
align-items: start;
}
.platform-home-node-section,
.platform-home-management-section {
min-width: 0;
}
.platform-home-node-section .platform-node-grid {
grid-template-columns: repeat(2, minmax(0, 1fr));
}
.platform-home-management-section .platform-entry-grid {
grid-template-columns: repeat(2, minmax(0, 1fr));
}
.platform-settings-shell {
max-width: min(1400px, 96vw);
}
@ -2468,6 +2809,34 @@ body {
color: #173057;
}
.app-shell[data-theme='light'] .platform-node-card {
border-color: #b7c7e6;
background: linear-gradient(180deg, #f7fbff 0%, #edf4ff 100%);
color: #173057;
}
.app-shell[data-theme='light'] .platform-node-card:hover {
border-color: #8fb4ef;
box-shadow: 0 14px 28px rgba(86, 118, 176, 0.12);
}
.app-shell[data-theme='light'] .platform-node-card-head strong,
.app-shell[data-theme='light'] .platform-node-card-foot,
.app-shell[data-theme='light'] .platform-node-card-link,
.app-shell[data-theme='light'] .node-workspace-summary-value {
color: #173057;
}
.app-shell[data-theme='light'] .platform-node-card-id,
.app-shell[data-theme='light'] .platform-node-card-meta,
.app-shell[data-theme='light'] .platform-node-card-url,
.app-shell[data-theme='light'] .platform-node-card-stats,
.app-shell[data-theme='light'] .platform-node-card-last-seen,
.app-shell[data-theme='light'] .platform-node-card-hint,
.app-shell[data-theme='light'] .node-workspace-summary-id {
color: #49648f;
}
.app-shell[data-theme='light'] .platform-entry-card strong {
color: #173057;
}
@ -2609,6 +2978,15 @@ body {
grid-template-columns: repeat(3, minmax(0, 1fr));
}
.platform-home-summary-grid {
grid-template-columns: repeat(2, minmax(0, 1fr));
}
.node-workspace-summary-grid,
.platform-home-body {
grid-template-columns: minmax(0, 1fr);
}
.platform-resource-card {
grid-column: span 3;
}
@ -2626,6 +3004,7 @@ body {
}
.platform-grid,
.platform-home-body,
.platform-main-grid,
.platform-monitor-grid,
.platform-entry-grid,
@ -2633,6 +3012,11 @@ body {
grid-template-columns: 1fr;
}
.platform-home-node-section .platform-node-grid,
.platform-home-management-section .platform-entry-grid {
grid-template-columns: 1fr;
}
.platform-resource-card {
grid-column: auto;
}
@ -2687,6 +3071,14 @@ body {
.wizard-shell {
min-height: 640px;
}
.platform-node-editor-grid {
grid-template-columns: 1fr;
}
.platform-node-editor-span-2 {
grid-column: span 1;
}
}
@media (max-width: 980px) {

View File

@ -10,9 +10,15 @@ import { appEn } from './i18n/app.en';
import { LucentTooltip } from './components/lucent/LucentTooltip';
import { PasswordInput } from './components/PasswordInput';
import { clearBotAccessPassword, getBotAccessPassword, setBotAccessPassword } from './utils/botAccess';
import { clearPanelAccessPassword, getPanelAccessPassword, setPanelAccessPassword } from './utils/panelAccess';
import {
PANEL_AUTH_REQUIRED_EVENT,
clearPanelAccessPassword,
getPanelAccessPassword,
setPanelAccessPassword,
} from './utils/panelAccess';
import { BotHomePage } from './modules/bot-home/BotHomePage';
import { PlatformDashboardPage } from './modules/platform/PlatformDashboardPage';
import { NodeHomePage } from './modules/platform/NodeHomePage';
import { NodeWorkspacePage } from './modules/platform/NodeWorkspacePage';
import { SkillMarketManagerPage } from './modules/platform/components/SkillMarketManagerModal';
import { readCompactModeFromUrl, useAppRoute } from './utils/appRoute';
import './App.css';
@ -41,6 +47,7 @@ function AuthenticatedApp() {
: { show: 'Show password', hide: 'Hide password' };
const forcedBotId = route.kind === 'bot' ? route.botId : '';
const forcedNodeId = route.kind === 'dashboard-node' ? route.nodeId : '';
useBotsSync(forcedBotId || undefined);
useEffect(() => {
@ -66,8 +73,10 @@ function AuthenticatedApp() {
const headerTitle =
route.kind === 'bot'
? (forcedBot?.name || defaultLoadingPage.title)
: route.kind === 'dashboard-node'
? `${t.nodeWorkspace} · ${forcedNodeId || 'local'}`
: route.kind === 'dashboard-skills'
? (locale === 'zh' ? '技能市场管理' : 'Skill Marketplace')
? t.skillMarketplace
: t.title;
useEffect(() => {
@ -76,12 +85,16 @@ function AuthenticatedApp() {
return;
}
if (route.kind === 'dashboard-skills') {
document.title = `${t.title} - ${locale === 'zh' ? '技能市场' : 'Skill Marketplace'}`;
document.title = `${t.title} - ${t.skillMarketplace}`;
return;
}
if (route.kind === 'dashboard-node') {
document.title = `${t.title} - ${t.nodeWorkspace} - ${forcedNodeId || 'local'}`;
return;
}
const botName = String(forcedBot?.name || '').trim();
document.title = botName ? `${t.title} - ${botName}` : `${t.title} - ${forcedBotId}`;
}, [forcedBot?.name, forcedBotId, locale, route.kind, t.title]);
}, [forcedBot?.name, forcedBotId, forcedNodeId, route.kind, t.nodeWorkspace, t.skillMarketplace, t.title]);
useEffect(() => {
setSingleBotUnlocked(false);
@ -158,13 +171,17 @@ function AuthenticatedApp() {
<h1>{headerTitle}</h1>
{route.kind === 'dashboard-skills' ? (
<button type="button" className="app-route-subtitle app-route-crumb" onClick={navigateToDashboard}>
{locale === 'zh' ? '平台总览' : 'Platform Overview'}
{t.platformHome}
</button>
) : route.kind === 'dashboard-node' ? (
<button type="button" className="app-route-subtitle app-route-crumb" onClick={navigateToDashboard}>
{t.platformHome}
</button>
) : (
<div className="app-route-subtitle">
{route.kind === 'dashboard'
? (locale === 'zh' ? '平台总览' : 'Platform overview')
: (locale === 'zh' ? 'Bot 首页' : 'Bot Home')}
? t.platformHome
: t.botHome}
</div>
)}
{isCompactShell ? (
@ -221,7 +238,9 @@ function AuthenticatedApp() {
<main className="main-stage">
{route.kind === 'dashboard' ? (
<PlatformDashboardPage compactMode={compactMode} />
<NodeHomePage compactMode={compactMode} />
) : route.kind === 'dashboard-node' ? (
<NodeWorkspacePage nodeId={forcedNodeId} compactMode={compactMode} />
) : route.kind === 'dashboard-skills' ? (
<SkillMarketManagerPage isZh={locale === 'zh'} />
) : (
@ -316,8 +335,13 @@ function PanelLoginGate({ children }: { children: ReactElement }) {
}
} catch {
if (!alive) return;
setRequired(false);
setAuthenticated(true);
setRequired(true);
setAuthenticated(false);
setError(
locale === 'zh'
? '无法确认面板访问状态,请重新输入面板密码。若仍失败,请检查 Dashboard Backend 是否已重启并应用最新配置。'
: 'Unable to verify panel access. Enter the panel password again. If it still fails, restart the Dashboard backend and ensure the latest config is loaded.',
);
setChecking(false);
}
};
@ -327,6 +351,23 @@ function PanelLoginGate({ children }: { children: ReactElement }) {
};
}, [bypassPanelGate, locale]);
useEffect(() => {
if (typeof window === 'undefined' || bypassPanelGate) return;
const onPanelAuthRequired = (event: Event) => {
const detail = String((event as CustomEvent<string>)?.detail || '').trim();
setRequired(true);
setAuthenticated(false);
setChecking(false);
setSubmitting(false);
setPassword('');
setError(
detail || (locale === 'zh' ? '面板访问密码已失效,请重新输入。' : 'Panel access password expired. Please sign in again.'),
);
};
window.addEventListener(PANEL_AUTH_REQUIRED_EVENT, onPanelAuthRequired as EventListener);
return () => window.removeEventListener(PANEL_AUTH_REQUIRED_EVENT, onPanelAuthRequired as EventListener);
}, [bypassPanelGate, locale]);
const onSubmit = async () => {
const next = String(password || '').trim();
if (!next) {

View File

@ -1,5 +1,9 @@
export const appEn = {
title: 'Nanobot Control Plane',
platformHome: 'Node Home',
skillMarketplace: 'Skill Marketplace',
nodeWorkspace: 'Node Workspace',
botHome: 'Bot Home',
theme: 'Theme',
language: 'Language',
dark: 'Dark',

View File

@ -1,5 +1,9 @@
export const appZhCn = {
title: 'Nanobot 管理面板',
platformHome: '节点首页',
skillMarketplace: '技能市场管理',
nodeWorkspace: '节点工作台',
botHome: 'Bot 首页',
theme: '主题',
language: '语言',
dark: '深色',

View File

@ -66,6 +66,9 @@ export const dashboardEn = {
deleteBotDone: 'Bot and workspace deleted.',
deleteFail: 'Delete failed.',
titleBots: 'Bots',
nodeLabel: 'Node',
nodeUnknown: 'Unnamed node',
nodeGroupCount: (count: number) => `${count} bot${count === 1 ? '' : 's'}`,
botSearchPlaceholder: 'Search by bot name or ID',
botSearchNoResult: 'No matching bots.',
workspaceSearchPlaceholder: 'Search by file name or path',

View File

@ -66,6 +66,9 @@ export const dashboardZhCn = {
deleteBotDone: 'Bot 与 workspace 已删除。',
deleteFail: '删除失败。',
titleBots: 'Bot 列表',
nodeLabel: '节点',
nodeUnknown: '未命名节点',
nodeGroupCount: (count: number) => `${count} 个 Bot`,
botSearchPlaceholder: '按 Bot 名称或 ID 搜索',
botSearchNoResult: '没有匹配的 Bot。',
workspaceSearchPlaceholder: '按文件名或路径搜索',

View File

@ -205,6 +205,55 @@
font-weight: 700;
}
.ops-bot-group {
margin-bottom: 14px;
}
.ops-bot-group:last-child {
margin-bottom: 0;
}
.ops-bot-group-head {
margin: 4px 2px 10px;
padding: 0 2px;
}
.ops-bot-group-title-wrap {
display: grid;
gap: 4px;
}
.ops-bot-group-title {
font-size: 13px;
font-weight: 900;
letter-spacing: 0.02em;
color: var(--title);
}
.ops-bot-group-subtitle {
display: flex;
flex-wrap: wrap;
align-items: center;
gap: 6px;
font-size: 11px;
color: var(--subtitle);
font-weight: 700;
}
.ops-bot-group-chip {
display: inline-flex;
align-items: center;
padding: 2px 8px;
border-radius: 999px;
border: 1px solid color-mix(in oklab, var(--brand) 32%, var(--line) 68%);
background: color-mix(in oklab, var(--brand-soft) 24%, var(--panel-soft) 76%);
color: var(--title);
}
.ops-bot-group-sep {
opacity: 0.55;
}
.ops-bot-card {
position: relative;
border: 1px solid var(--line);
@ -381,6 +430,31 @@
gap: 8px;
}
.ops-runtime-kind-badge {
display: inline-flex;
align-items: center;
justify-content: center;
min-width: 64px;
padding: 2px 8px;
border-radius: 999px;
border: 1px solid color-mix(in oklab, var(--line) 84%, transparent);
font-size: 11px;
font-weight: 800;
letter-spacing: 0.03em;
}
.ops-runtime-kind-badge.is-docker {
color: color-mix(in oklab, #0f4e8c 76%, var(--text) 24%);
background: color-mix(in oklab, #9ed4ff 34%, var(--panel) 66%);
border-color: color-mix(in oklab, #64a8e8 42%, var(--line) 58%);
}
.ops-runtime-kind-badge.is-native {
color: color-mix(in oklab, #21643a 78%, var(--text) 22%);
background: color-mix(in oklab, #bfe8b6 34%, var(--panel) 66%);
border-color: color-mix(in oklab, #86cd95 42%, var(--line) 58%);
}
.ops-bot-name-row {
display: inline-flex;
align-items: center;

View File

@ -7,7 +7,7 @@ import rehypeRaw from 'rehype-raw';
import rehypeSanitize, { defaultSchema } from 'rehype-sanitize';
import { APP_ENDPOINTS } from '../../config/env';
import { useAppStore } from '../../store/appStore';
import type { ChatMessage } from '../../types/bot';
import type { BotState, ChatMessage } from '../../types/bot';
import { normalizeAssistantMessageText, normalizeUserMessageText, summarizeProgressText } from './messageParser';
import nanobotLogo from '../../assets/nanobot-logo.png';
import './BotDashboardModule.css';
@ -35,9 +35,17 @@ interface BotDashboardModuleProps {
onOpenCreateWizard?: () => void;
onOpenImageFactory?: () => void;
forcedBotId?: string;
forcedNodeId?: string;
compactMode?: boolean;
}
interface NodeBotGroup {
key: string;
label: string;
nodeId: string;
bots: BotState[];
}
type AgentTab = 'AGENTS' | 'SOUL' | 'USER' | 'TOOLS' | 'IDENTITY';
type WorkspaceNodeType = 'dir' | 'file';
type ChannelType = 'dashboard' | 'feishu' | 'qq' | 'dingtalk' | 'telegram' | 'slack' | 'email';
@ -212,17 +220,6 @@ interface TopicFeedStatsResponse {
latest_item_id?: number | null;
}
interface NanobotImage {
tag: string;
status: string;
}
interface BaseImageOption {
tag: string;
label: string;
disabled: boolean;
}
interface WorkspaceSkillOption {
id: string;
name: string;
@ -414,6 +411,13 @@ const providerPresets: Record<string, { model: string; apiBase?: string; note: {
en: 'OpenAI native endpoint',
},
},
vllm: {
model: 'Qwen/Qwen2.5-7B-Instruct',
note: {
'zh-cn': 'vLLMOpenAI 兼容)接口,请填写 API Base例如 http://127.0.0.1:8000/v1。',
en: 'vLLM (OpenAI-compatible). Please set API Base, e.g. http://127.0.0.1:8000/v1.',
},
},
deepseek: {
model: 'deepseek-chat',
note: {
@ -1060,6 +1064,7 @@ export function BotDashboardModule({
onOpenCreateWizard,
onOpenImageFactory,
forcedBotId,
forcedNodeId,
compactMode = false,
}: BotDashboardModuleProps) {
const {
@ -1197,7 +1202,6 @@ export function BotDashboardModule({
const [isSavingMcp, setIsSavingMcp] = useState(false);
const [isSavingGlobalDelivery, setIsSavingGlobalDelivery] = useState(false);
const [isBatchOperating, setIsBatchOperating] = useState(false);
const [availableImages, setAvailableImages] = useState<NanobotImage[]>([]);
const [controlCommandByBot, setControlCommandByBot] = useState<Record<string, string>>({});
const [globalDelivery, setGlobalDelivery] = useState<{ sendProgress: boolean; sendToolHints: boolean }>({
sendProgress: false,
@ -1242,12 +1246,6 @@ export function BotDashboardModule({
const [expandedUserByKey, setExpandedUserByKey] = useState<Record<string, boolean>>({});
const [feedbackSavingByMessageId, setFeedbackSavingByMessageId] = useState<Record<number, boolean>>({});
const [showRuntimeActionModal, setShowRuntimeActionModal] = useState(false);
const [showTemplateModal, setShowTemplateModal] = useState(false);
const [templateTab, setTemplateTab] = useState<'agent' | 'topic'>('agent');
const [isLoadingTemplates, setIsLoadingTemplates] = useState(false);
const [isSavingTemplates, setIsSavingTemplates] = useState(false);
const [templateAgentText, setTemplateAgentText] = useState('');
const [templateTopicText, setTemplateTopicText] = useState('');
const [controlCommandPanelOpen, setControlCommandPanelOpen] = useState(false);
const [workspaceHoverCard, setWorkspaceHoverCard] = useState<WorkspaceHoverCardState | null>(null);
const botSearchInputName = useMemo(
@ -1624,7 +1622,13 @@ export function BotDashboardModule({
const bots = useMemo(
() =>
Object.values(activeBots).sort((a, b) => {
Object.values(activeBots)
.filter((bot) => {
const expectedNodeId = String(forcedNodeId || '').trim().toLowerCase();
if (!expectedNodeId) return true;
return String(bot.node_id || 'local').trim().toLowerCase() === expectedNodeId;
})
.sort((a, b) => {
const aId = String(a.id || '').trim();
const bId = String(b.id || '').trim();
const aOrder = botOrderRef.current[aId] ?? Number.MAX_SAFE_INTEGER;
@ -1632,7 +1636,7 @@ export function BotDashboardModule({
if (aOrder !== bOrder) return aOrder - bOrder;
return aId.localeCompare(bId);
}),
[activeBots],
[activeBots, forcedNodeId],
);
const hasForcedBot = Boolean(String(forcedBotId || '').trim());
const compactListFirstMode = compactMode && !hasForcedBot;
@ -1645,7 +1649,14 @@ export function BotDashboardModule({
return bots.filter((bot) => {
const id = String(bot.id || '').toLowerCase();
const name = String(bot.name || '').toLowerCase();
return id.includes(normalizedBotListQuery) || name.includes(normalizedBotListQuery);
const nodeId = String(bot.node_id || '').toLowerCase();
const nodeName = String(bot.node_display_name || '').toLowerCase();
return (
id.includes(normalizedBotListQuery) ||
name.includes(normalizedBotListQuery) ||
nodeId.includes(normalizedBotListQuery) ||
nodeName.includes(normalizedBotListQuery)
);
});
}, [bots, normalizedBotListQuery]);
const botListTotalPages = Math.max(1, Math.ceil(filteredBots.length / botListPageSize));
@ -1654,6 +1665,32 @@ export function BotDashboardModule({
const start = (page - 1) * botListPageSize;
return filteredBots.slice(start, start + botListPageSize);
}, [filteredBots, botListPage, botListTotalPages, botListPageSize]);
const pagedBotGroups = useMemo<NodeBotGroup[]>(() => {
const unknownNodeLabel = locale === 'zh' ? '未命名节点' : 'Unnamed node';
const groups = new Map<string, NodeBotGroup>();
pagedBots.forEach((bot) => {
const nodeId = String(bot.node_id || 'local').trim() || 'local';
const label = String(bot.node_display_name || '').trim() || nodeId || unknownNodeLabel;
const key = nodeId.toLowerCase();
const existing = groups.get(key);
if (existing) {
existing.bots.push(bot);
return;
}
groups.set(key, {
key,
label,
nodeId,
bots: [bot],
});
});
return Array.from(groups.values()).sort((a, b) => {
if (a.key === b.key) return 0;
if (a.key === 'local') return -1;
if (b.key === 'local') return 1;
return a.label.localeCompare(b.label, undefined, { sensitivity: 'base' });
});
}, [locale, pagedBots]);
const selectedBot = selectedBotId ? activeBots[selectedBotId] : undefined;
const forcedBotMissing = Boolean(forcedBotId && bots.length > 0 && !activeBots[String(forcedBotId).trim()]);
const messages = selectedBot?.messages || [];
@ -1682,33 +1719,6 @@ export function BotDashboardModule({
return 'ready';
}, [activeTopicOptions, topics]);
const lc = isZh ? channelsZhCn : channelsEn;
const baseImageOptions = useMemo<BaseImageOption[]>(() => {
const imagesByTag = new Map<string, NanobotImage>();
availableImages.forEach((img) => {
const tag = String(img.tag || '').trim();
if (!tag || imagesByTag.has(tag)) return;
imagesByTag.set(tag, img);
});
const options = Array.from(imagesByTag.entries())
.sort((a, b) => a[0].localeCompare(b[0]))
.map(([tag, img]) => {
const status = String(img.status || '').toUpperCase() || 'UNKNOWN';
return {
tag,
label: `${tag} · ${status}`,
disabled: status !== 'READY',
};
});
const currentTag = String(editForm.image_tag || '').trim();
if (currentTag && !options.some((opt) => opt.tag === currentTag)) {
options.unshift({
tag: currentTag,
label: isZh ? `${currentTag} · 未登记(只读)` : `${currentTag} · unregistered (read-only)`,
disabled: true,
});
}
return options;
}, [availableImages, editForm.image_tag, isZh]);
const runtimeMoreLabel = isZh ? '更多' : 'More';
const effectiveTopicPresetTemplates = useMemo(
() => (topicPresetTemplates.length > 0 ? topicPresetTemplates : DEFAULT_TOPIC_PRESET_TEMPLATES),
@ -1720,28 +1730,6 @@ export function BotDashboardModule({
if (!source) return t.topicPresetBlank;
return resolvePresetText(source.name, isZh ? 'zh-cn' : 'en') || source.topic_key || source.id;
}, [effectiveTopicPresetTemplates, isZh, newTopicSource, t.topicPresetBlank]);
const templateAgentCount = useMemo(() => {
try {
const parsed = JSON.parse(templateAgentText || "{}");
if (!parsed || typeof parsed !== "object" || Array.isArray(parsed)) return 5;
const row = parsed as Record<string, unknown>;
return ["agents_md", "soul_md", "user_md", "tools_md", "identity_md"].filter((k) =>
Object.prototype.hasOwnProperty.call(row, k),
).length || 5;
} catch {
return 5;
}
}, [templateAgentText]);
const templateTopicCount = useMemo(() => {
try {
const parsed = JSON.parse(templateTopicText || '{"presets":[]}') as Record<string, unknown>;
const rows = parsed?.presets;
if (Array.isArray(rows)) return rows.length;
return effectiveTopicPresetTemplates.length;
} catch {
return effectiveTopicPresetTemplates.length;
}
}, [templateTopicText, effectiveTopicPresetTemplates.length]);
const selectedBotControlState = selectedBot ? controlStateByBot[selectedBot.id] : undefined;
const selectedBotEnabled = Boolean(selectedBot && selectedBot.enabled !== false);
const selectedBotSendingCount = selectedBot ? Number(sendingByBot[selectedBot.id] || 0) : 0;
@ -2385,28 +2373,17 @@ export function BotDashboardModule({
});
}, [selectedBotId, selectedBot?.send_progress, selectedBot?.send_tool_hints]);
const loadImageOptions = async () => {
const [imagesRes] = await Promise.allSettled([axios.get<NanobotImage[]>(`${APP_ENDPOINTS.apiBase}/images`)]);
if (imagesRes.status === 'fulfilled') {
setAvailableImages(Array.isArray(imagesRes.value.data) ? imagesRes.value.data : []);
} else {
setAvailableImages([]);
}
};
const refresh = async () => {
const forced = String(forcedBotId || '').trim();
if (forced) {
const targetId = String(selectedBotId || forced).trim() || forced;
const botRes = await axios.get(`${APP_ENDPOINTS.apiBase}/bots/${encodeURIComponent(targetId)}`);
setBots(botRes.data ? [botRes.data] : []);
await loadImageOptions();
return;
}
const botsRes = await axios.get(`${APP_ENDPOINTS.apiBase}/bots`);
setBots(botsRes.data);
await loadImageOptions();
};
const ensureSelectedBotDetail = useCallback(async () => {
@ -2441,17 +2418,6 @@ export function BotDashboardModule({
void loadResourceSnapshot(botId);
};
useEffect(() => {
void loadImageOptions();
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
useEffect(() => {
if (!showBaseModal) return;
void loadImageOptions();
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [showBaseModal]);
useEffect(() => {
if (!showResourceModal || !resourceBotId) return;
let stopped = false;
@ -3811,71 +3777,6 @@ export function BotDashboardModule({
return null;
};
const openTemplateManager = async () => {
setBotListMenuOpen(false);
setIsLoadingTemplates(true);
try {
const res = await axios.get(`${APP_ENDPOINTS.apiBase}/system/templates`);
const agentRaw = res.data?.agent_md_templates;
const topicRaw = res.data?.topic_presets;
setTemplateAgentText(JSON.stringify(agentRaw && typeof agentRaw === 'object' ? agentRaw : {}, null, 2));
setTemplateTopicText(JSON.stringify(topicRaw && typeof topicRaw === 'object' ? topicRaw : { presets: [] }, null, 2));
setTemplateTab('agent');
setShowTemplateModal(true);
} catch {
notify(t.templateLoadFail, { tone: 'error' });
} finally {
setIsLoadingTemplates(false);
}
};
const saveTemplateManager = async (scope: 'agent' | 'topic') => {
let payload: Record<string, unknown>;
try {
if (scope === 'agent') {
const parsedAgent = JSON.parse(templateAgentText || '{}');
if (!parsedAgent || typeof parsedAgent !== 'object' || Array.isArray(parsedAgent)) {
throw new Error(t.templateAgentInvalid);
}
const agentObject = parsedAgent as Record<string, unknown>;
payload = {
agent_md_templates: {
agents_md: String(agentObject.agents_md || ''),
soul_md: String(agentObject.soul_md || ''),
user_md: String(agentObject.user_md || ''),
tools_md: String(agentObject.tools_md || ''),
identity_md: String(agentObject.identity_md || ''),
},
};
} else {
const parsedTopic = JSON.parse(templateTopicText || '{"presets":[]}');
if (!parsedTopic || typeof parsedTopic !== 'object' || Array.isArray(parsedTopic)) {
throw new Error(t.templateTopicInvalid);
}
payload = {
topic_presets: parsedTopic,
};
}
} catch (error: any) {
notify(error?.message || t.templateParseFail, { tone: 'error' });
return;
}
setIsSavingTemplates(true);
try {
await axios.put(`${APP_ENDPOINTS.apiBase}/system/templates`, payload);
notify(t.templateSaved, { tone: 'success' });
if (scope === 'topic') {
const defaults = await axios.get<SystemDefaultsResponse>(`${APP_ENDPOINTS.apiBase}/system/defaults`);
setTopicPresetTemplates(parseTopicPresets(defaults.data?.topic_presets));
}
} catch {
notify(t.templateSaveFail, { tone: 'error' });
} finally {
setIsSavingTemplates(false);
}
};
const batchStartBots = async () => {
if (isBatchOperating) return;
const candidates = bots.filter((bot) => bot.enabled !== false && String(bot.docker_status || '').toUpperCase() !== 'RUNNING');
@ -5017,12 +4918,7 @@ export function BotDashboardModule({
if (mode === 'base') {
payload.name = editForm.name;
payload.access_password = editForm.access_password;
payload.image_tag = editForm.image_tag;
payload.system_timezone = editForm.system_timezone.trim() || defaultSystemTimezone;
const selectedImageOption = baseImageOptions.find((opt) => opt.tag === editForm.image_tag);
if (selectedImageOption?.disabled) {
throw new Error(isZh ? '当前镜像不可用,请选择可用镜像。' : 'Selected image is unavailable.');
}
const normalizedCpuCores = clampCpuCores(Number(paramDraft.cpu_cores));
const normalizedMemoryMb = clampMemoryMb(Number(paramDraft.memory_mb));
const normalizedStorageGb = clampStorageGb(Number(paramDraft.storage_gb));
@ -5235,6 +5131,16 @@ export function BotDashboardModule({
: `${t.titleBots} (${bots.length})`}
</h2>
<div className="ops-list-actions" ref={botListMenuRef}>
{onOpenImageFactory ? (
<LucentIconButton
className="btn btn-secondary btn-sm icon-btn"
onClick={onOpenImageFactory}
tooltip={t.manageImages}
aria-label={t.manageImages}
>
<Boxes size={14} />
</LucentIconButton>
) : null}
<LucentIconButton
className="btn btn-primary btn-sm icon-btn"
onClick={onOpenCreateWizard}
@ -5255,29 +5161,6 @@ export function BotDashboardModule({
</LucentIconButton>
{botListMenuOpen ? (
<div className="ops-more-menu" role="menu" aria-label={t.extensions}>
<button
className="ops-more-item"
role="menuitem"
disabled={!onOpenImageFactory}
onClick={() => {
setBotListMenuOpen(false);
onOpenImageFactory?.();
}}
>
<Boxes size={14} />
<span>{t.manageImages}</span>
</button>
<button
className="ops-more-item"
role="menuitem"
disabled={isLoadingTemplates}
onClick={() => {
void openTemplateManager();
}}
>
<FileText size={14} />
<span>{t.templateManager}</span>
</button>
<button
className="ops-more-item"
role="menuitem"
@ -5350,7 +5233,36 @@ export function BotDashboardModule({
<div className="ops-bot-list-empty">{t.syncingPageSize}</div>
) : null}
{botListPageSizeReady
? pagedBots.map((bot) => {
? pagedBotGroups.map((group) => {
const modeSet = Array.from(
new Set(
group.bots.map((bot) => {
const transport = String(bot.transport_kind || '').trim() || 'direct';
const runtime = String(bot.runtime_kind || '').trim() || 'docker';
return `${transport}/${runtime}`;
}),
),
);
return (
<div key={group.key} className="ops-bot-group">
<div className="ops-bot-group-head">
<div className="ops-bot-group-title-wrap">
<div className="ops-bot-group-title">{group.label}</div>
<div className="ops-bot-group-subtitle">
<span className="ops-bot-group-chip">{t.nodeLabel}</span>
<span className="mono">{group.nodeId}</span>
<span className="ops-bot-group-sep"></span>
<span>{t.nodeGroupCount(group.bots.length)}</span>
{modeSet.length > 0 ? (
<>
<span className="ops-bot-group-sep"></span>
<span className="mono">{modeSet.join(', ')}</span>
</>
) : null}
</div>
</div>
</div>
{group.bots.map((bot) => {
const selected = selectedBotId === bot.id;
const controlState = controlStateByBot[bot.id];
const isOperating = operatingBotId === bot.id;
@ -5360,6 +5272,7 @@ export function BotDashboardModule({
const isEnabling = controlState === 'enabling';
const isDisabling = controlState === 'disabling';
const isRunning = String(bot.docker_status || '').toUpperCase() === 'RUNNING';
const runtimeKind = String(bot.runtime_kind || '').trim().toLowerCase() || 'docker';
return (
<div
key={bot.id}
@ -5381,16 +5294,16 @@ export function BotDashboardModule({
<div className="ops-bot-name">{bot.name}</div>
<LucentIconButton
className="ops-bot-open-inline"
onClick={(e) => {
e.stopPropagation();
const target = `${window.location.origin}/bot/${encodeURIComponent(bot.id)}`;
window.open(target, '_blank', 'noopener,noreferrer');
}}
tooltip={isZh ? '新页面打开' : 'Open in new page'}
aria-label={isZh ? '新页面打开' : 'Open in new page'}
>
<ExternalLink size={11} />
</LucentIconButton>
onClick={(e) => {
e.stopPropagation();
const target = `${window.location.origin}/bot/${encodeURIComponent(bot.id)}`;
window.open(target, '_blank', 'noopener,noreferrer');
}}
tooltip={isZh ? '新页面打开' : 'Open in new page'}
aria-label={isZh ? '新页面打开' : 'Open in new page'}
>
<ExternalLink size={11} />
</LucentIconButton>
</div>
<div className="mono ops-bot-id">{bot.id}</div>
</div>
@ -5398,10 +5311,20 @@ export function BotDashboardModule({
{!isEnabled ? (
<span className="badge badge-err">{t.disabled}</span>
) : null}
<span className={`ops-runtime-kind-badge ${runtimeKind === 'native' ? 'is-native' : 'is-docker'}`}>
{runtimeKind === 'native' ? 'NATIVE' : 'DOCKER'}
</span>
<span className={bot.docker_status === 'RUNNING' ? 'badge badge-ok' : 'badge badge-unknown'}>{bot.docker_status}</span>
</div>
</div>
<div className="ops-bot-meta">{t.image}: <span className="mono">{bot.image_tag || '-'}</span></div>
<div className="ops-bot-meta">
<span className="mono">
{(String(bot.transport_kind || '').trim() || 'direct')}/{(String(bot.runtime_kind || '').trim() || 'docker')}
</span>
<span> · </span>
<span className="mono">{String(bot.core_adapter || '').trim() || 'nanobot'}</span>
</div>
<div className="ops-bot-actions">
<label
className="ops-bot-enable-switch"
@ -5467,6 +5390,9 @@ export function BotDashboardModule({
</div>
</div>
</div>
);
})}
</div>
);
})
: null}
@ -6330,18 +6256,6 @@ export function BotDashboardModule({
toggleLabels={passwordToggleLabels}
/>
<label className="field-label">{t.baseImageReadonly}</label>
<LucentSelect
value={editForm.image_tag}
onChange={(e) => setEditForm((p) => ({ ...p, image_tag: e.target.value }))}
>
{baseImageOptions.map((img) => (
<option key={img.tag} value={img.tag} disabled={img.disabled}>
{img.label}
</option>
))}
</LucentSelect>
<label className="field-label">{isZh ? '系统时区' : 'System Timezone'}</label>
<LucentSelect
value={editForm.system_timezone || defaultSystemTimezone}
@ -6417,6 +6331,7 @@ export function BotDashboardModule({
<option value="openrouter">openrouter</option>
<option value="dashscope">dashscope (aliyun qwen)</option>
<option value="openai">openai</option>
<option value="vllm">vllm (openai-compatible)</option>
<option value="deepseek">deepseek</option>
<option value="kimi">kimi (moonshot)</option>
<option value="minimax">minimax</option>
@ -7667,73 +7582,6 @@ export function BotDashboardModule({
</div>
)}
{showTemplateModal && (
<div className="modal-mask" onClick={() => setShowTemplateModal(false)}>
<div className="modal-card modal-wide" onClick={(e) => e.stopPropagation()}>
<div className="modal-title-row modal-title-with-close">
<div className="modal-title-main">
<h3>{t.templateManagerTitle}</h3>
</div>
<div className="modal-title-actions">
<LucentIconButton className="btn btn-secondary btn-sm icon-btn" onClick={() => setShowTemplateModal(false)} tooltip={t.close} aria-label={t.close}>
<X size={14} />
</LucentIconButton>
</div>
</div>
<div className="ops-template-tabs" role="tablist" aria-label={t.templateManagerTitle}>
<button
className={`ops-template-tab ${templateTab === 'agent' ? 'is-active' : ''}`}
onClick={() => setTemplateTab('agent')}
role="tab"
aria-selected={templateTab === 'agent'}
>
<span className="ops-template-tab-label">{`${t.templateTabAgent} (${templateAgentCount})`}</span>
</button>
<button
className={`ops-template-tab ${templateTab === 'topic' ? 'is-active' : ''}`}
onClick={() => setTemplateTab('topic')}
role="tab"
aria-selected={templateTab === 'topic'}
>
<span className="ops-template-tab-label">{`${t.templateTabTopic} (${templateTopicCount})`}</span>
</button>
</div>
<div className="ops-config-grid" style={{ gridTemplateColumns: '1fr' }}>
{templateTab === 'agent' ? (
<div className="ops-config-field">
<textarea
className="textarea md-area mono"
rows={16}
value={templateAgentText}
onChange={(e) => setTemplateAgentText(e.target.value)}
placeholder='{"agents_md":"..."}'
/>
</div>
) : (
<div className="ops-config-field">
<textarea
className="textarea md-area mono"
rows={16}
value={templateTopicText}
onChange={(e) => setTemplateTopicText(e.target.value)}
placeholder='{"presets":[...]}'
/>
</div>
)}
</div>
<div className="row-between">
<button className="btn btn-secondary" onClick={() => setShowTemplateModal(false)}>{t.cancel}</button>
<button className="btn btn-primary" disabled={isSavingTemplates} onClick={() => void saveTemplateManager(templateTab)}>
{isSavingTemplates ? t.processing : t.save}
</button>
</div>
</div>
</div>
)}
{showAgentModal && (
<div className="modal-mask" onClick={() => setShowAgentModal(false)}>
<div className="modal-card modal-wide" onClick={(e) => e.stopPropagation()}>

View File

@ -128,6 +128,7 @@ export function CreateBotModal({ isOpen, onClose, onSuccess }: CreateBotModalPro
onChange={(e) => setFormData({ ...formData, llm_provider: e.target.value })}
>
<option value="openai">OpenAI</option>
<option value="vllm">vLLM (OpenAI-compatible)</option>
<option value="deepseek">DeepSeek</option>
<option value="kimi">Kimi (Moonshot)</option>
<option value="minimax">MiniMax</option>

View File

@ -17,6 +17,7 @@ import { getSystemTimezoneOptions } from '../../utils/systemTimezones';
type AgentTab = 'AGENTS' | 'SOUL' | 'USER' | 'TOOLS' | 'IDENTITY';
type ChannelType = 'feishu' | 'qq' | 'dingtalk' | 'telegram' | 'slack';
type RuntimeKind = 'docker' | 'native';
const EMPTY_CHANNEL_PICKER = '__none__';
interface WizardChannelConfig {
@ -70,6 +71,13 @@ const providerPresets: Record<string, { model: string; note: { 'zh-cn': string;
en: 'OpenAI native models.',
},
},
vllm: {
model: 'Qwen/Qwen2.5-7B-Instruct',
note: {
'zh-cn': 'vLLMOpenAI 兼容)接口,请填写 API Base例如 http://127.0.0.1:8000/v1。',
en: 'vLLM (OpenAI-compatible). Please set API Base, e.g. http://127.0.0.1:8000/v1.',
},
},
deepseek: {
model: 'deepseek-chat',
note: {
@ -136,9 +144,55 @@ const optionalChannelTypes: ChannelType[] = ['feishu', 'qq', 'dingtalk', 'telegr
interface BotWizardModuleProps {
onCreated?: () => void;
onGoDashboard?: () => void;
initialNodeId?: string;
initialNodeDisplayName?: string;
initialTransportKind?: string;
initialRuntimeKind?: string;
initialRuntimeOptions?: string[];
initialNodeCapabilities?: Record<string, unknown>;
initialNodeMetadata?: Record<string, unknown>;
initialCoreAdapter?: string;
}
export function BotWizardModule({ onCreated, onGoDashboard }: BotWizardModuleProps) {
function normalizeRuntimeKind(value: string | undefined, fallback: RuntimeKind = 'docker'): RuntimeKind {
return String(value || '').trim().toLowerCase() === 'native' ? 'native' : fallback;
}
function normalizeRuntimeOptions(options?: string[], fallback?: string): RuntimeKind[] {
const rows: RuntimeKind[] = [];
for (const option of options || []) {
const normalized = normalizeRuntimeKind(option);
if (!rows.includes(normalized)) rows.push(normalized);
}
const fallbackRuntime = normalizeRuntimeKind(fallback, 'docker');
if (!rows.includes(fallbackRuntime)) rows.push(fallbackRuntime);
if (rows.length === 0) rows.push('docker');
return rows;
}
function runtimeLabel(runtimeKind: RuntimeKind, isZh: boolean) {
return runtimeKind === 'native' ? (isZh ? 'Native 进程' : 'Native Process') : 'Docker';
}
function formatLauncherCommand(value: unknown) {
if (Array.isArray(value)) {
return value.map((item) => String(item || '')).filter(Boolean).join(' ');
}
return String(value || '').trim();
}
export function BotWizardModule({
onCreated,
onGoDashboard,
initialNodeId,
initialNodeDisplayName,
initialTransportKind,
initialRuntimeKind,
initialRuntimeOptions,
initialNodeCapabilities,
initialNodeMetadata,
initialCoreAdapter,
}: BotWizardModuleProps) {
const { locale } = useAppStore();
const { notify } = useLucentPrompt();
const [step, setStep] = useState(1);
@ -163,9 +217,34 @@ export function BotWizardModule({ onCreated, onGoDashboard }: BotWizardModulePro
const [botIdStatus, setBotIdStatus] = useState<'idle' | 'checking' | 'available' | 'exists' | 'invalid'>('idle');
const [botIdStatusText, setBotIdStatusText] = useState('');
const [defaultSystemTimezone, setDefaultSystemTimezone] = useState('Asia/Shanghai');
const runtimeOptions = useMemo(
() => normalizeRuntimeOptions(initialRuntimeOptions, initialRuntimeKind),
[initialRuntimeKind, initialRuntimeOptions],
);
const defaultRuntimeKind = useMemo<RuntimeKind>(
() => normalizeRuntimeKind(initialRuntimeKind, runtimeOptions[0] || 'docker'),
[initialRuntimeKind, runtimeOptions],
);
const [selectedRuntimeKind, setSelectedRuntimeKind] = useState<RuntimeKind>(defaultRuntimeKind);
const readyImages = useMemo(() => images.filter((img) => img.status === 'READY'), [images]);
const isZh = locale === 'zh';
const isDockerRuntime = selectedRuntimeKind === 'docker';
const runtimeCaps =
initialNodeCapabilities && typeof initialNodeCapabilities.runtime === 'object'
? (initialNodeCapabilities.runtime as Record<string, unknown>)
: null;
const processCaps =
initialNodeCapabilities && typeof initialNodeCapabilities.process === 'object'
? (initialNodeCapabilities.process as Record<string, unknown>)
: null;
const nativeRuntimeSupported = runtimeCaps?.native === true;
const nativeLauncherAvailable = processCaps?.available === true;
const nativeLauncherCommand = formatLauncherCommand(processCaps?.command);
const configuredNativeCommand = String(initialNodeMetadata?.native_command || '').trim();
const configuredNativeWorkdir = String(initialNodeMetadata?.native_workdir || '').trim();
const displayedLauncherCommand = configuredNativeCommand || nativeLauncherCommand;
const nativePreflightReady = !initialNodeId || (nativeRuntimeSupported && nativeLauncherAvailable);
const ui = pickLocale(locale, { 'zh-cn': wizardZhCn, en: wizardEn });
const lc = isZh ? channelsZhCn : channelsEn;
const passwordToggleLabels = isZh
@ -189,6 +268,10 @@ export function BotWizardModule({ onCreated, onGoDashboard }: BotWizardModulePro
[form.env_params],
);
useEffect(() => {
setSelectedRuntimeKind(defaultRuntimeKind);
}, [defaultRuntimeKind]);
useEffect(() => {
const loadSystemDefaults = async () => {
try {
@ -255,6 +338,7 @@ export function BotWizardModule({ onCreated, onGoDashboard }: BotWizardModulePro
);
const loadImages = async () => {
if (!isDockerRuntime) return [];
setIsLoadingImages(true);
try {
const res = await axios.get<NanobotImage[]>(`${APP_ENDPOINTS.apiBase}/images`);
@ -271,9 +355,19 @@ export function BotWizardModule({ onCreated, onGoDashboard }: BotWizardModulePro
const next = async () => {
if (step === 1) {
const ready = await loadImages();
if (ready.length === 0) {
notify(ui.noReadyImage, { tone: 'warning' });
if (isDockerRuntime) {
const ready = await loadImages();
if (ready.length === 0) {
notify(ui.noReadyImage, { tone: 'warning' });
return;
}
} else if (!nativePreflightReady) {
notify(
isZh
? '当前节点未报告可用的 native launcher请先在目标机器安装并配置 native engine。'
: 'The selected node does not report an available native launcher. Install and configure the native engine on the target machine first.',
{ tone: 'warning' },
);
return;
}
}
@ -299,7 +393,7 @@ export function BotWizardModule({ onCreated, onGoDashboard }: BotWizardModulePro
notify(ui.botIdChecking, { tone: 'warning' });
return;
}
if (!form.name || !form.api_key || !form.image_tag || !form.llm_model) {
if (!form.name || !form.api_key || !form.llm_model || (isDockerRuntime && !form.image_tag)) {
notify(ui.requiredBase, { tone: 'warning' });
return;
}
@ -346,11 +440,15 @@ export function BotWizardModule({ onCreated, onGoDashboard }: BotWizardModulePro
await axios.post(`${APP_ENDPOINTS.apiBase}/bots`, {
id: form.id,
name: form.name,
node_id: initialNodeId || undefined,
transport_kind: initialTransportKind || undefined,
runtime_kind: selectedRuntimeKind,
core_adapter: initialCoreAdapter || undefined,
llm_provider: form.llm_provider,
llm_model: form.llm_model,
api_key: form.api_key,
api_base: form.api_base || undefined,
image_tag: form.image_tag,
image_tag: isDockerRuntime ? form.image_tag : undefined,
system_prompt: form.soul_md,
temperature: clampTemperature(Number(form.temperature)),
top_p: Number(form.top_p),
@ -388,6 +486,7 @@ export function BotWizardModule({ onCreated, onGoDashboard }: BotWizardModulePro
setMemoryMbDraft(String(initialForm.memory_mb));
setStorageGbDraft(String(initialForm.storage_gb));
setStep(1);
setSelectedRuntimeKind(defaultRuntimeKind);
setTestResult('');
setBotIdStatus('idle');
setBotIdStatusText('');
@ -642,22 +741,91 @@ export function BotWizardModule({ onCreated, onGoDashboard }: BotWizardModulePro
{step === 1 && (
<div className="stack">
<button className="btn btn-secondary" onClick={() => void loadImages()}>{isLoadingImages ? ui.loading : ui.loadImages}</button>
<div className="list-scroll wizard-image-list" style={{ maxHeight: '52vh' }}>
{readyImages.map((img) => (
<label key={img.tag} className="card selectable" style={{ display: 'block', cursor: 'pointer' }}>
<input
type="radio"
checked={form.image_tag === img.tag}
onChange={() => setForm((prev) => ({ ...prev, image_tag: img.tag }))}
style={{ marginRight: 8 }}
/>
<span className="mono">{img.tag}</span>
<span style={{ marginLeft: 10 }} className="badge badge-ok">READY</span>
</label>
))}
{readyImages.length === 0 && <div style={{ color: 'var(--muted)' }}>{ui.noReady}</div>}
<div className="card wizard-note-card stack" style={{ gap: 10 }}>
<div className="section-mini-title">{isZh ? '运行时选择' : 'Target Runtime'}</div>
{initialNodeId ? (
<div className="field-label">
{isZh ? '目标节点' : 'Target Node'}:{' '}
<span className="mono">
{initialNodeDisplayName || initialNodeId}
{initialNodeDisplayName && initialNodeDisplayName !== initialNodeId ? ` (${initialNodeId})` : ''}
</span>
</div>
) : null}
<LucentSelect value={selectedRuntimeKind} onChange={(event) => setSelectedRuntimeKind(normalizeRuntimeKind(event.target.value))}>
{runtimeOptions.map((runtimeKind) => (
<option key={runtimeKind} value={runtimeKind}>
{runtimeLabel(runtimeKind, isZh)}
</option>
))}
</LucentSelect>
<div className="field-label">
{isDockerRuntime
? (isZh ? 'Docker 模式需要选择 READY 镜像。' : 'Docker mode requires a READY image.')
: (isZh
? 'Native 模式不需要 Docker 镜像,但目标节点必须已经安装并配置 native engine / launcher。'
: 'Native mode does not require a Docker image, but the target node must already have the native engine / launcher installed and configured.')}
</div>
</div>
{isDockerRuntime ? (
<>
<button className="btn btn-secondary" onClick={() => void loadImages()}>{isLoadingImages ? ui.loading : ui.loadImages}</button>
<div className="list-scroll wizard-image-list" style={{ maxHeight: '52vh' }}>
{readyImages.map((img) => (
<label key={img.tag} className="card selectable" style={{ display: 'block', cursor: 'pointer' }}>
<input
type="radio"
checked={form.image_tag === img.tag}
onChange={() => setForm((prev) => ({ ...prev, image_tag: img.tag }))}
style={{ marginRight: 8 }}
/>
<span className="mono">{img.tag}</span>
<span style={{ marginLeft: 10 }} className="badge badge-ok">READY</span>
</label>
))}
{readyImages.length === 0 && <div style={{ color: 'var(--muted)' }}>{ui.noReady}</div>}
</div>
</>
) : (
<div className="card wizard-note-card stack" style={{ gap: 10 }}>
<div className="section-mini-title">{isZh ? 'Native Preflight' : 'Native Preflight'}</div>
<div>
{isZh ? '运行时能力' : 'Runtime Capability'}:{' '}
<span className={`badge ${nativeRuntimeSupported ? 'badge-ok' : 'badge-err'}`}>
{nativeRuntimeSupported ? (isZh ? '已支持' : 'Supported') : (isZh ? '未上报' : 'Missing')}
</span>
</div>
<div>
{isZh ? 'Launcher 可用性' : 'Launcher Availability'}:{' '}
<span className={`badge ${nativeLauncherAvailable ? 'badge-ok' : 'badge-err'}`}>
{nativeLauncherAvailable ? (isZh ? '可用' : 'Available') : (isZh ? '不可用' : 'Unavailable')}
</span>
</div>
<div>
{isZh ? 'Launcher 命令' : 'Launcher Command'}:{' '}
<span className="mono">{displayedLauncherCommand || (isZh ? '未上报' : 'Not reported')}</span>
</div>
{configuredNativeWorkdir ? (
<div>
{isZh ? 'Launcher 工作目录' : 'Launcher Workdir'}:{' '}
<span className="mono">{configuredNativeWorkdir}</span>
</div>
) : null}
<div className="field-label">
{isZh
? 'Dashboard 会继续由 edge 统一管理这个 Bot 的 .nanobot 工作区和配置;这里校验的是节点能否真正启动 native 进程。'
: 'Dashboard will still let edge manage this bot workspace and config. This preflight only checks whether the node can actually launch a native process.'}
</div>
{!nativePreflightReady ? (
<div className="field-label" style={{ color: 'var(--err)' }}>
{isZh
? '当前节点还不能启动 native Bot。请先在目标机器安装 native engine并确保 EDGE_NATIVE_COMMAND 可执行。'
: 'This node cannot launch native bots yet. Install the native engine on the target machine and make sure EDGE_NATIVE_COMMAND is executable.'}
</div>
) : null}
</div>
)}
</div>
)}
@ -752,6 +920,7 @@ export function BotWizardModule({ onCreated, onGoDashboard }: BotWizardModulePro
<option value="openrouter">openrouter</option>
<option value="dashscope">dashscope (aliyun qwen)</option>
<option value="openai">openai</option>
<option value="vllm">vllm (openai-compatible)</option>
<option value="deepseek">deepseek</option>
<option value="kimi">kimi (moonshot)</option>
<option value="minimax">minimax</option>
@ -878,7 +1047,23 @@ export function BotWizardModule({ onCreated, onGoDashboard }: BotWizardModulePro
{step === 4 && (
<div className="stack">
<div className="card summary-grid">
<div>{ui.image}: <span className="mono">{form.image_tag}</span></div>
<div>{isZh ? '运行时' : 'Runtime'}: <span className="mono">{selectedRuntimeKind}</span></div>
{isDockerRuntime ? <div>{ui.image}: <span className="mono">{form.image_tag}</span></div> : null}
{initialNodeId ? (
<div>
{isZh ? '目标节点' : 'Target Node'}:{' '}
<span className="mono">
{initialNodeDisplayName || initialNodeId}
{initialNodeDisplayName && initialNodeDisplayName !== initialNodeId ? ` (${initialNodeId})` : ''}
{' / '}
{String(initialTransportKind || 'edge').trim() || 'edge'}
/
{selectedRuntimeKind}
{' · '}
{String(initialCoreAdapter || 'nanobot').trim() || 'nanobot'}
</span>
</div>
) : null}
<div>Bot ID: <span className="mono">{form.id}</span></div>
<div>{ui.name}: {form.name}</div>
<div>Provider: {form.llm_provider}</div>

View File

@ -0,0 +1,825 @@
import { useEffect, useMemo, useState } from 'react';
import axios from 'axios';
import { ArrowRightLeft, Bot, ChevronRight, FileText, Hammer, Pencil, Plus, RefreshCw, ServerCog, Settings2, Sparkles, Trash2, Waypoints, Wifi, X } from 'lucide-react';
import { APP_ENDPOINTS } from '../../config/env';
import { useAppStore } from '../../store/appStore';
import type { BotState } from '../../types/bot';
import type {
ManagedNodeConnectivityResult,
ManagedNodeDraft,
ManagedNodeItem,
ManagedNodeNativePreflightResult,
PlatformOverviewResponse,
} from './types';
import { useLucentPrompt } from '../../components/lucent/LucentPromptProvider';
import { LucentIconButton } from '../../components/lucent/LucentIconButton';
import { TemplateManagerModal } from './components/TemplateManagerModal';
import { PlatformSettingsModal } from './components/PlatformSettingsModal';
import { BotDeployModal } from './components/BotDeployModal';
import { nodeRuntimeLabel, nodeSupportsRuntime } from './runtimeSupport';
import {
normalizePlatformPageSize,
writeCachedPlatformPageSize,
} from '../../utils/platformPageSize';
function navigatePlatform(path: string) {
if (typeof window === 'undefined') return;
window.history.pushState({}, '', path);
window.dispatchEvent(new PopStateEvent('popstate'));
}
function nodeHref(nodeId: string) {
return `/dashboard/nodes/${encodeURIComponent(String(nodeId || '').trim())}`;
}
interface NodeHomePageProps {
compactMode: boolean;
}
const emptyNodeDraft: ManagedNodeDraft = {
node_id: '',
display_name: '',
base_url: '',
enabled: true,
auth_token: '',
transport_kind: 'edge',
runtime_kind: 'docker',
core_adapter: 'nanobot',
workspace_root: '',
native_command: '',
native_workdir: '',
native_sandbox_mode: 'inherit',
};
function summarizeNodeBots(nodeId: string, bots: BotState[]) {
const rows = bots.filter((bot) => String(bot.node_id || 'local').trim().toLowerCase() === String(nodeId || '').trim().toLowerCase());
const total = rows.length;
const running = rows.filter((bot) => String(bot.docker_status || '').toUpperCase() === 'RUNNING').length;
const disabled = rows.filter((bot) => bot.enabled === false).length;
return {
total,
running,
stopped: Math.max(0, total - running - disabled),
disabled,
};
}
function nodeToDraft(node: ManagedNodeItem): ManagedNodeDraft {
const metadata = (node.metadata || {}) as Record<string, unknown>;
return {
node_id: String(node.node_id || '').trim().toLowerCase(),
display_name: String(node.display_name || ''),
base_url: String(node.base_url || ''),
enabled: node.enabled !== false,
auth_token: '',
transport_kind: 'edge',
runtime_kind: String(node.runtime_kind || 'docker'),
core_adapter: String(node.core_adapter || 'nanobot'),
workspace_root: String(node.workspace_root || metadata.workspace_root || ''),
native_command: String(node.native_command || metadata.native_command || ''),
native_workdir: String(node.native_workdir || metadata.native_workdir || ''),
native_sandbox_mode: String(node.native_sandbox_mode || metadata.native_sandbox_mode || 'inherit'),
};
}
function nodeStatusMeta(node: ManagedNodeItem, isZh: boolean) {
const status = String(node.status || '').toLowerCase();
if (node.enabled === false) {
return { className: 'badge badge-err', label: isZh ? '已停用' : 'Disabled' };
}
if (status === 'offline') {
return { className: 'badge badge-err', label: isZh ? '离线' : 'Offline' };
}
if (status === 'online') {
return { className: 'badge badge-ok', label: isZh ? '在线' : 'Online' };
}
return { className: 'badge badge-unknown', label: isZh ? '未知' : 'Unknown' };
}
function summarizeCapabilities(capabilities?: Record<string, unknown>) {
const tags: string[] = [];
const runtime = capabilities?.runtime;
if (runtime && typeof runtime === 'object') {
Object.entries(runtime as Record<string, unknown>).forEach(([key, value]) => {
if (value === true) tags.push(key);
});
}
const workspace = capabilities?.workspace;
if (workspace && typeof workspace === 'object' && Object.values(workspace as Record<string, unknown>).some((value) => value === true)) {
tags.push('workspace');
}
const monitor = capabilities?.monitor;
if (monitor && typeof monitor === 'object' && Object.values(monitor as Record<string, unknown>).some((value) => value === true)) {
tags.push('monitor');
}
return Array.from(new Set(tags)).slice(0, 5);
}
function formatNodeLastSeen(value: string | null | undefined, locale: string) {
if (!value) return '';
const parsed = new Date(value);
if (Number.isNaN(parsed.getTime())) return '';
return parsed.toLocaleString(locale === 'zh' ? 'zh-CN' : 'en-US', {
hour12: false,
month: '2-digit',
day: '2-digit',
hour: '2-digit',
minute: '2-digit',
});
}
export function NodeHomePage({ compactMode }: NodeHomePageProps) {
const { activeBots, setBots, locale } = useAppStore();
const { notify, confirm } = useLucentPrompt();
const isZh = locale === 'zh';
const [overview, setOverview] = useState<PlatformOverviewResponse | null>(null);
const [nodes, setNodes] = useState<ManagedNodeItem[]>([]);
const [loading, setLoading] = useState(false);
const [showTemplateManager, setShowTemplateManager] = useState(false);
const [showPlatformSettings, setShowPlatformSettings] = useState(false);
const [showDeployModal, setShowDeployModal] = useState(false);
const [showNodeEditor, setShowNodeEditor] = useState(false);
const [nodeEditorMode, setNodeEditorMode] = useState<'create' | 'edit'>('create');
const [editingNodeId, setEditingNodeId] = useState('');
const [nodeDraft, setNodeDraft] = useState<ManagedNodeDraft>(emptyNodeDraft);
const [savingNode, setSavingNode] = useState(false);
const [testingNode, setTestingNode] = useState(false);
const [nodeTestResult, setNodeTestResult] = useState<ManagedNodeConnectivityResult | null>(null);
const [testingNative, setTestingNative] = useState(false);
const [nativeTestResult, setNativeTestResult] = useState<ManagedNodeNativePreflightResult | null>(null);
const bots = useMemo(() => Object.values(activeBots), [activeBots]);
const nodeCards = useMemo(
() =>
nodes.map((node) => ({
...node,
summary: summarizeNodeBots(node.node_id, bots),
})),
[bots, nodes],
);
const loadAll = async () => {
setLoading(true);
try {
const [nodesRes, botsRes, overviewRes] = await Promise.all([
axios.get<{ items?: ManagedNodeItem[] }>(`${APP_ENDPOINTS.apiBase}/platform/nodes`),
axios.get<BotState[]>(`${APP_ENDPOINTS.apiBase}/bots`),
axios.get<PlatformOverviewResponse>(`${APP_ENDPOINTS.apiBase}/platform/overview`),
]);
setNodes(Array.isArray(nodesRes.data?.items) ? nodesRes.data.items : []);
setBots(Array.isArray(botsRes.data) ? botsRes.data : []);
setOverview(overviewRes.data || null);
const normalizedPageSize = normalizePlatformPageSize(overviewRes.data?.settings?.page_size, 10);
writeCachedPlatformPageSize(normalizedPageSize);
} catch (error: any) {
notify(error?.response?.data?.detail || (isZh ? '读取节点首页失败。' : 'Failed to load node home.'), { tone: 'error' });
} finally {
setLoading(false);
}
};
useEffect(() => {
void loadAll();
const timer = window.setInterval(() => {
void loadAll();
}, 30000);
return () => {
window.clearInterval(timer);
};
}, []);
const summaryBots = overview?.summary.bots;
const summaryUsage = overview?.usage.summary;
const enabledNodes = nodeCards.filter((node) => node.enabled).length;
const disabledNodes = Math.max(0, nodeCards.length - enabledNodes);
const dockerNodes = nodeCards.filter((node) => nodeSupportsRuntime(node, 'docker')).length;
const nativeNodes = nodeCards.filter((node) => nodeSupportsRuntime(node, 'native')).length;
const onlineNodes = nodeCards.filter((node) => String(node.status || '').toLowerCase() === 'online').length;
const offlineNodes = nodeCards.filter((node) => String(node.status || '').toLowerCase() === 'offline').length;
const openCreateNode = () => {
setNodeEditorMode('create');
setEditingNodeId('');
setNodeDraft(emptyNodeDraft);
setNodeTestResult(null);
setNativeTestResult(null);
setShowNodeEditor(true);
};
const openEditNode = (node: ManagedNodeItem) => {
setNodeEditorMode('edit');
setEditingNodeId(String(node.node_id || '').trim().toLowerCase());
setNodeDraft(nodeToDraft(node));
setNodeTestResult(null);
setNativeTestResult(null);
setShowNodeEditor(true);
};
const handleTestNode = async (draft: ManagedNodeDraft) => {
setTestingNode(true);
try {
const res = await axios.post<ManagedNodeConnectivityResult>(`${APP_ENDPOINTS.apiBase}/platform/nodes/test`, draft);
setNodeTestResult(res.data);
notify(
res.data.ok
? (isZh ? '节点连通性测试成功。' : 'Node connectivity test succeeded.')
: (res.data.detail || (isZh ? '节点连通性测试失败。' : 'Node connectivity test failed.')),
{ tone: res.data.ok ? 'success' : 'error' },
);
return res.data;
} catch (error: any) {
const detail = error?.response?.data?.detail || (isZh ? '节点连通性测试失败。' : 'Node connectivity test failed.');
notify(detail, { tone: 'error' });
const fallback: ManagedNodeConnectivityResult = {
ok: false,
status: 'offline',
latency_ms: 0,
detail,
node_self: null,
};
setNodeTestResult(fallback);
return fallback;
} finally {
setTestingNode(false);
}
};
const handleTestNodeNative = async (draft: ManagedNodeDraft) => {
setTestingNative(true);
try {
const res = await axios.post<ManagedNodeNativePreflightResult>(`${APP_ENDPOINTS.apiBase}/platform/nodes/native/preflight`, draft);
setNativeTestResult(res.data);
notify(
res.data.ok
? (isZh ? 'Native Launcher 校验成功。' : 'Native launcher preflight succeeded.')
: (res.data.detail || (isZh ? 'Native Launcher 校验失败。' : 'Native launcher preflight failed.')),
{ tone: res.data.ok ? 'success' : 'error' },
);
return res.data;
} catch (error: any) {
const detail = error?.response?.data?.detail || (isZh ? 'Native Launcher 校验失败。' : 'Native launcher preflight failed.');
notify(detail, { tone: 'error' });
const fallback: ManagedNodeNativePreflightResult = {
ok: false,
status: 'offline',
latency_ms: 0,
detail,
command: String(draft.native_command || '').trim() ? [String(draft.native_command || '').trim()] : [],
workdir: String(draft.native_workdir || '').trim(),
command_available: false,
workdir_exists: !String(draft.native_workdir || '').trim(),
runtime_native_supported: false,
node_self: null,
};
setNativeTestResult(fallback);
return fallback;
} finally {
setTestingNative(false);
}
};
const handleSaveNode = async () => {
const normalizedDraft: ManagedNodeDraft = {
...nodeDraft,
node_id: String(nodeDraft.node_id || '').trim().toLowerCase(),
display_name: String(nodeDraft.display_name || '').trim(),
base_url: String(nodeDraft.base_url || '').trim(),
auth_token: String(nodeDraft.auth_token || '').trim(),
transport_kind: String(nodeDraft.transport_kind || 'edge').trim().toLowerCase() || 'edge',
runtime_kind: String(nodeDraft.runtime_kind || 'docker').trim().toLowerCase() || 'docker',
core_adapter: String(nodeDraft.core_adapter || 'nanobot').trim().toLowerCase() || 'nanobot',
workspace_root: String(nodeDraft.workspace_root || '').trim(),
native_command: String(nodeDraft.native_command || '').trim(),
native_workdir: String(nodeDraft.native_workdir || '').trim(),
native_sandbox_mode: String(nodeDraft.native_sandbox_mode || 'inherit').trim().toLowerCase() || 'inherit',
};
if (!normalizedDraft.node_id) {
notify(isZh ? '请填写节点 ID。' : 'Node ID is required.', { tone: 'warning' });
return;
}
if (!normalizedDraft.base_url) {
notify(isZh ? '请填写 dashboard-edge 地址。' : 'dashboard-edge Base URL is required.', { tone: 'warning' });
return;
}
const isEditing = nodeEditorMode === 'edit';
const targetNodeId = String(editingNodeId || normalizedDraft.node_id || '').trim().toLowerCase();
if (isEditing && !targetNodeId) {
notify(isZh ? '未识别到要更新的节点,请重新打开编辑器。' : 'Failed to resolve target node for update. Please reopen editor.', { tone: 'error' });
return;
}
setNodeDraft(normalizedDraft);
setSavingNode(true);
try {
if (isEditing) {
await axios.put(`${APP_ENDPOINTS.apiBase}/platform/nodes/${encodeURIComponent(targetNodeId)}`, normalizedDraft);
} else {
await axios.post(`${APP_ENDPOINTS.apiBase}/platform/nodes`, normalizedDraft);
}
notify(isEditing ? (isZh ? '节点已更新。' : 'Node updated.') : (isZh ? '节点已创建。' : 'Node created.'), { tone: 'success' });
setNodeEditorMode('create');
setShowNodeEditor(false);
setEditingNodeId('');
await loadAll();
} catch (error: any) {
notify(error?.response?.data?.detail || (isZh ? '保存节点失败。' : 'Failed to save node.'), { tone: 'error' });
} finally {
setSavingNode(false);
}
};
const handleDeleteNode = async (node: ManagedNodeItem) => {
const ok = await confirm({
title: isZh ? '删除节点' : 'Delete Node',
message: isZh ? `确认删除节点 ${node.display_name || node.node_id}` : `Delete node ${node.display_name || node.node_id}?`,
tone: 'warning',
});
if (!ok) return;
try {
await axios.delete(`${APP_ENDPOINTS.apiBase}/platform/nodes/${encodeURIComponent(node.node_id)}`);
notify(isZh ? '节点已删除。' : 'Node deleted.', { tone: 'success' });
await loadAll();
} catch (error: any) {
notify(error?.response?.data?.detail || (isZh ? '删除节点失败。' : 'Failed to delete node.'), { tone: 'error' });
}
};
const handleTestSavedNode = async (node: ManagedNodeItem) => {
try {
const res = await axios.post<ManagedNodeConnectivityResult>(`${APP_ENDPOINTS.apiBase}/platform/nodes/${encodeURIComponent(node.node_id)}/test`);
notify(
res.data.ok
? (isZh ? '节点连通性测试成功。' : 'Node connectivity test succeeded.')
: (res.data.detail || (isZh ? '节点连通性测试失败。' : 'Node connectivity test failed.')),
{ tone: res.data.ok ? 'success' : 'error' },
);
await loadAll();
} catch (error: any) {
notify(error?.response?.data?.detail || (isZh ? '节点连通性测试失败。' : 'Node connectivity test failed.'), { tone: 'error' });
}
};
return (
<>
<div className={`platform-home-shell ${compactMode ? 'is-compact' : ''}`}>
<section className="platform-main">
<div className="platform-summary-grid platform-home-summary-grid">
<div className="panel platform-summary-card">
<div className="platform-summary-icon icon-bot"><Waypoints size={18} /></div>
<div className="platform-summary-label">{isZh ? '节点总览' : 'Nodes'}</div>
<div className="platform-summary-value">{nodeCards.length}</div>
<div className="platform-summary-meta">
{isZh
? `启用 ${enabledNodes} / 停用 ${disabledNodes} / 在线 ${onlineNodes} / 离线 ${offlineNodes}`
: `Enabled ${enabledNodes} / Disabled ${disabledNodes} / Online ${onlineNodes} / Offline ${offlineNodes}`}
</div>
</div>
<div className="panel platform-summary-card">
<div className="platform-summary-icon icon-bot"><Bot size={18} /></div>
<div className="platform-summary-label">{isZh ? 'Bot 概览' : 'Bots'}</div>
<div className="platform-summary-value">{summaryBots?.total || bots.length}</div>
<div className="platform-summary-meta">
{isZh
? `运行 ${summaryBots?.running || 0} / 停止 ${summaryBots?.stopped || 0} / 停用 ${summaryBots?.disabled || 0}`
: `Running ${summaryBots?.running || 0} / Stopped ${summaryBots?.stopped || 0} / Disabled ${summaryBots?.disabled || 0}`}
</div>
</div>
<div className="panel platform-summary-card">
<div className="platform-summary-icon icon-token"><Sparkles size={18} /></div>
<div className="platform-summary-label">{isZh ? '最近 24h Tokens' : '24h Tokens'}</div>
<div className="platform-summary-value">{summaryUsage?.total_tokens || 0}</div>
<div className="platform-summary-meta">
{isZh
? `输入 ${summaryUsage?.input_tokens || 0} / 输出 ${summaryUsage?.output_tokens || 0} / Docker ${dockerNodes} / Native ${nativeNodes}`
: `In ${summaryUsage?.input_tokens || 0} / Out ${summaryUsage?.output_tokens || 0} / Docker ${dockerNodes} / Native ${nativeNodes}`}
</div>
</div>
</div>
<div className="platform-home-body">
<section className="panel stack platform-home-node-section">
<div className="row-between">
<div>
<h2>{isZh ? '节点列表' : 'Nodes'}</h2>
</div>
<div className="platform-node-toolbar">
<button className="btn btn-primary btn-sm" type="button" onClick={openCreateNode}>
<Plus size={14} />
<span style={{ marginLeft: 6 }}>{isZh ? '新增节点' : 'Add Node'}</span>
</button>
<LucentIconButton
className="btn btn-secondary btn-sm icon-btn"
type="button"
onClick={() => void loadAll()}
disabled={loading}
tooltip={loading ? (isZh ? '刷新中...' : 'Refreshing...') : (isZh ? '刷新节点首页' : 'Refresh node home')}
aria-label={loading ? (isZh ? '刷新中...' : 'Refreshing...') : (isZh ? '刷新节点首页' : 'Refresh node home')}
>
<RefreshCw size={14} className={loading ? 'animate-spin' : undefined} />
</LucentIconButton>
</div>
</div>
<div className="platform-node-grid">
{nodeCards.map((node) => (
<button
key={node.node_id}
className={`platform-node-card ${node.enabled ? '' : 'is-disabled'}`}
type="button"
onClick={() => navigatePlatform(nodeHref(node.node_id))}
>
<div className="platform-node-card-head">
<div>
<strong>{node.display_name || node.node_id}</strong>
<div className="mono platform-node-card-id">{node.node_id}</div>
</div>
<div className="platform-node-card-head-actions">
<span className={nodeStatusMeta(node, isZh).className}>{nodeStatusMeta(node, isZh).label}</span>
<LucentIconButton
className="btn btn-secondary btn-sm icon-btn"
onClick={(event) => {
event.stopPropagation();
void handleTestSavedNode(node);
}}
tooltip={isZh ? '测试连通性' : 'Test connectivity'}
aria-label={isZh ? '测试连通性' : 'Test connectivity'}
>
<Wifi size={14} />
</LucentIconButton>
<LucentIconButton
className="btn btn-secondary btn-sm icon-btn"
onClick={(event) => {
event.stopPropagation();
openEditNode(node);
}}
tooltip={isZh ? '编辑节点' : 'Edit node'}
aria-label={isZh ? '编辑节点' : 'Edit node'}
>
<Pencil size={14} />
</LucentIconButton>
{String(node.node_id || '').toLowerCase() !== 'local' ? (
<LucentIconButton
className="btn btn-danger btn-sm icon-btn"
onClick={(event) => {
event.stopPropagation();
void handleDeleteNode(node);
}}
tooltip={isZh ? '删除节点' : 'Delete node'}
aria-label={isZh ? '删除节点' : 'Delete node'}
>
<Trash2 size={14} />
</LucentIconButton>
) : null}
</div>
</div>
<div className="platform-node-card-meta">
<span className="mono">{String(node.transport_kind || 'edge')}/{nodeRuntimeLabel(node)}</span>
<span> · </span>
<span className="mono">{String(node.core_adapter || 'nanobot')}</span>
</div>
{node.base_url ? (
<div className="platform-node-card-url mono">{node.base_url}</div>
) : null}
{String(node.workspace_root || (node.metadata as Record<string, unknown> | undefined)?.workspace_root || '').trim() ? (
<div className="platform-node-card-url mono">
{isZh ? 'workspace: ' : 'workspace: '}
{String(node.workspace_root || (node.metadata as Record<string, unknown> | undefined)?.workspace_root || '').trim()}
</div>
) : null}
<div className="platform-node-card-stats">
<span>{isZh ? `Bot ${node.summary.total}` : `${node.summary.total} bots`}</span>
<span>{isZh ? `运行 ${node.summary.running}` : `${node.summary.running} running`}</span>
<span>{isZh ? `停用 ${node.summary.disabled}` : `${node.summary.disabled} disabled`}</span>
</div>
<div className="platform-node-card-capabilities">
{summarizeCapabilities(node.capabilities).map((tag) => (
<span key={`${node.node_id}-${tag}`} className="badge badge-unknown">{tag}</span>
))}
{summarizeCapabilities(node.capabilities).length === 0 ? (
<span className="platform-node-card-hint">{isZh ? '等待节点能力上报' : 'Waiting for capability report'}</span>
) : null}
</div>
<div className="platform-node-card-last-seen">
{node.last_seen_at
? (isZh ? `最近心跳 ${formatNodeLastSeen(node.last_seen_at, locale)}` : `Last seen ${formatNodeLastSeen(node.last_seen_at, locale)}`)
: (isZh ? '尚未收到节点心跳' : 'No heartbeat received yet.')}
</div>
<div className="platform-node-card-foot">
<span className="platform-node-card-link">{isZh ? '进入节点工作台' : 'Open node workspace'}</span>
<ChevronRight size={16} />
</div>
</button>
))}
{!loading && nodeCards.length === 0 ? (
<div className="ops-bot-list-empty">{isZh ? '暂无已登记节点。' : 'No managed nodes yet.'}</div>
) : null}
</div>
</section>
<section className="panel stack platform-home-management-section">
<div>
<h2>{isZh ? '平台管理' : 'Platform Management'}</h2>
</div>
<div className="platform-entry-grid">
<button className="platform-entry-card" type="button" onClick={() => setShowTemplateManager(true)}>
<FileText size={18} />
<strong>{isZh ? '模版管理' : 'Template Management'}</strong>
<span>{isZh ? '统一维护默认提示词模版和 Topic 预设。' : 'Manage default prompts and topic presets.'}</span>
</button>
<button className="platform-entry-card" type="button" onClick={() => setShowPlatformSettings(true)}>
<Settings2 size={18} />
<strong>{isZh ? '平台参数' : 'Platform Settings'}</strong>
<span>{isZh ? '统一管理平台级参数。' : 'Manage platform-level settings.'}</span>
</button>
<button className="platform-entry-card" type="button" onClick={() => navigatePlatform('/dashboard/skills')}>
<Hammer size={18} />
<strong>{isZh ? '技能市场' : 'Skill Marketplace'}</strong>
<span>{isZh ? '管理技能包元数据,并为节点下 Bot 提供统一安装源。' : 'Manage marketplace metadata and provide a unified install source for bots.'}</span>
</button>
<button className="platform-entry-card" type="button" onClick={() => setShowDeployModal(true)}>
<ArrowRightLeft size={18} />
<strong>{isZh ? '迁移 / 部署' : 'Deploy / Migrate'}</strong>
<span>{isZh ? '对 Bot 执行跨节点迁移、镜像切换和显式 redeploy。' : 'Run explicit cross-node migration, image switching, and redeploy actions for bots.'}</span>
</button>
</div>
</section>
</div>
</section>
</div>
<TemplateManagerModal isZh={isZh} open={showTemplateManager} onClose={() => setShowTemplateManager(false)} />
<PlatformSettingsModal
isZh={isZh}
open={showPlatformSettings}
onClose={() => setShowPlatformSettings(false)}
onSaved={(settings) => {
setOverview((prev) => (prev ? { ...prev, settings } : prev));
const normalizedPageSize = normalizePlatformPageSize(settings.page_size, 10);
writeCachedPlatformPageSize(normalizedPageSize);
}}
/>
<BotDeployModal
open={showDeployModal}
isZh={isZh}
bots={bots}
nodes={nodes}
onClose={() => setShowDeployModal(false)}
onApplied={loadAll}
/>
{showNodeEditor ? (
<div className="modal-mask" onClick={() => setShowNodeEditor(false)}>
<div className="modal-card app-modal-card platform-node-editor" onClick={(event) => event.stopPropagation()}>
<div className="modal-title-row modal-title-with-close">
<div className="modal-title-main">
<h3>{nodeEditorMode === 'edit' ? (isZh ? '编辑节点' : 'Edit Node') : (isZh ? '新增节点' : 'Add Node')}</h3>
</div>
<div className="modal-title-actions">
<LucentIconButton className="btn btn-secondary btn-sm icon-btn" onClick={() => setShowNodeEditor(false)} tooltip={isZh ? '关闭' : 'Close'} aria-label={isZh ? '关闭' : 'Close'}>
<X size={14} />
</LucentIconButton>
</div>
</div>
<div className="platform-node-editor-grid">
<label className="field">
<span className="field-label">{isZh ? '节点 ID' : 'Node ID'}</span>
<input
className="input mono"
value={nodeDraft.node_id}
disabled={nodeEditorMode === 'edit'}
onChange={(event) => setNodeDraft((prev) => ({ ...prev, node_id: event.target.value.trim().toLowerCase() }))}
placeholder={isZh ? '例如 remote-sh-01' : 'For example: remote-sh-01'}
/>
</label>
<label className="field">
<span className="field-label">{isZh ? '显示名称' : 'Display Name'}</span>
<input
className="input"
value={nodeDraft.display_name}
onChange={(event) => setNodeDraft((prev) => ({ ...prev, display_name: event.target.value }))}
placeholder={isZh ? '例如 上海生产节点' : 'For example: Shanghai production node'}
/>
</label>
<label className="field">
<span className="field-label">{isZh ? '传输模式' : 'Transport'}</span>
<select className="input" value={nodeDraft.transport_kind} disabled>
<option value="edge">edge</option>
</select>
<div className="field-label">
{isZh ? '当前版本仅支持 edge 节点。' : 'Only edge nodes are supported in this version.'}
</div>
</label>
<label className="field">
<span className="field-label">{isZh ? '默认运行时' : 'Default Runtime'}</span>
<select className="input" value={nodeDraft.runtime_kind} onChange={(event) => setNodeDraft((prev) => ({ ...prev, runtime_kind: event.target.value }))}>
<option value="docker">docker</option>
<option value="native">native</option>
</select>
<div className="field-label">
{isZh
? '说明:节点实际支持的运行时以 edge 上报能力为准,这里只作为默认目标/兼容回退值。'
: 'Note: the node runtime reported by edge is authoritative. This field is only used as the default target and fallback value.'}
</div>
</label>
<label className="field">
<span className="field-label">{isZh ? '核心适配器' : 'Core Adapter'}</span>
<input
className="input mono"
value={nodeDraft.core_adapter}
onChange={(event) => setNodeDraft((prev) => ({ ...prev, core_adapter: event.target.value.trim().toLowerCase() }))}
placeholder="nanobot"
/>
</label>
<label className="field platform-node-editor-span-2">
<span className="field-label">{isZh ? 'Bot 工作区根目录' : 'Bot Workspace Root'}</span>
<input
className="input mono"
value={nodeDraft.workspace_root}
onChange={(event) => setNodeDraft((prev) => ({ ...prev, workspace_root: event.target.value }))}
placeholder={isZh ? '例如: /data/nanobot/workspace' : 'For example: /data/nanobot/workspace'}
/>
<div className="field-label">
{isZh
? '该目录将同时用于 docker/native Bot 工作区。留空时由 edge 使用节点默认工作区。'
: 'This root is used by both docker and native bot workspaces. Leave empty to use edge default workspace root.'}
</div>
</label>
<label className="field field-checkbox">
<span className="field-label">{isZh ? '节点启用' : 'Enabled'}</span>
<label className="check-row">
<input
type="checkbox"
checked={nodeDraft.enabled}
onChange={(event) => setNodeDraft((prev) => ({ ...prev, enabled: event.target.checked }))}
/>
<span>{nodeDraft.enabled ? (isZh ? '已启用' : 'Enabled') : (isZh ? '已停用' : 'Disabled')}</span>
</label>
</label>
</div>
<section className="platform-node-native-panel">
<div className="platform-node-native-panel-title">{isZh ? '节点连接配置' : 'Node Connectivity'}</div>
<div className="platform-node-native-panel-grid">
<label className="field">
<span className="field-label">{isZh ? 'dashboard-edge 地址' : 'dashboard-edge Base URL'}</span>
<input
className="input mono"
value={nodeDraft.base_url}
onChange={(event) => {
setNodeTestResult(null);
setNativeTestResult(null);
setNodeDraft((prev) => ({ ...prev, base_url: event.target.value.trim() }));
}}
placeholder="http://127.0.0.1:8010"
/>
</label>
<label className="field">
<span className="field-label">{isZh ? '访问 Token' : 'Auth Token'}</span>
<input
className="input mono"
type="password"
value={nodeDraft.auth_token}
onChange={(event) => {
setNodeTestResult(null);
setNativeTestResult(null);
setNodeDraft((prev) => ({ ...prev, auth_token: event.target.value }));
}}
placeholder={isZh ? '可选,和 edge 端保持一致' : 'Optional, must match dashboard-edge'}
/>
<div className="field-label">
{isZh
? '用于请求头 x-dashboard-edge-token需与 edge 端配置保持一致;未启用鉴权可留空。'
: 'Used as x-dashboard-edge-token request header. Keep it consistent with edge auth; leave empty when auth is disabled.'}
</div>
</label>
</div>
<div className="platform-node-editor-actions">
<button
className="btn btn-secondary"
type="button"
onClick={() => void handleTestNode(nodeDraft)}
disabled={testingNode || savingNode || testingNative}
>
<Wifi size={14} />
<span style={{ marginLeft: 6 }}>{testingNode ? (isZh ? '测试中...' : 'Testing...') : (isZh ? '测试连通性' : 'Test Connectivity')}</span>
</button>
</div>
{nodeTestResult ? (
<div className={`platform-node-test-result ${nodeTestResult.ok ? 'is-ok' : 'is-error'}`}>
<div className="platform-node-test-result-head">
<strong>{nodeTestResult.ok ? (isZh ? '节点探活成功' : 'Node reachable') : (isZh ? '节点探活失败' : 'Node unreachable')}</strong>
<span>{nodeTestResult.latency_ms} ms</span>
</div>
<div>{nodeTestResult.detail}</div>
{nodeTestResult.node_self?.display_name ? (
<div className="platform-node-test-result-meta">
<span className="mono">{nodeTestResult.node_self.display_name}</span>
<span> · </span>
<span className="mono">{nodeTestResult.node_self.service || 'dashboard-edge'}</span>
</div>
) : null}
</div>
) : null}
</section>
<section className="platform-node-native-panel">
<div className="platform-node-native-panel-title">{isZh ? 'Native Launcher 配置' : 'Native Launcher Configuration'}</div>
<div className="platform-node-native-panel-grid">
<label className="field">
<span className="field-label">{isZh ? 'Native Launcher 命令' : 'Native Launcher Command'}</span>
<input
className="input mono"
value={nodeDraft.native_command}
onChange={(event) => {
setNativeTestResult(null);
setNodeDraft((prev) => ({ ...prev, native_command: event.target.value }));
}}
placeholder={isZh ? '例如: /path/to/python -m nanobot.cli.commands gateway' : 'For example: /path/to/python -m nanobot.cli.commands gateway'}
/>
</label>
<label className="field">
<span className="field-label">{isZh ? 'Native Launcher 工作目录' : 'Native Launcher Workdir'}</span>
<input
className="input mono"
value={nodeDraft.native_workdir}
onChange={(event) => {
setNativeTestResult(null);
setNodeDraft((prev) => ({ ...prev, native_workdir: event.target.value }));
}}
placeholder={isZh ? '例如: /data/nanobot/workspace' : 'For example: /data/nanobot/workspace'}
/>
</label>
<label className="field">
<span className="field-label">{isZh ? 'Native 执行沙箱' : 'Native Sandbox Mode'}</span>
<select
className="input"
value={nodeDraft.native_sandbox_mode}
onChange={(event) => {
setNativeTestResult(null);
setNodeDraft((prev) => ({ ...prev, native_sandbox_mode: event.target.value }));
}}
>
<option value="inherit">{isZh ? '继承当前 Bot 配置' : 'Inherit bot config'}</option>
<option value="workspace">{isZh ? '工作区沙箱restrictToWorkspace=true' : 'Workspace sandbox (restrictToWorkspace=true)'}</option>
<option value="full_access">{isZh ? '全权限(突破沙箱)' : 'Full access (escape sandbox)'}</option>
</select>
<div className="field-label">
{isZh
? '该项会在 Bot 同步时写入 nanobot tools.restrictToWorkspace。full_access = falseworkspace = true。'
: 'This is written to nanobot tools.restrictToWorkspace during bot sync. full_access = false, workspace = true.'}
</div>
</label>
</div>
<div className="platform-node-editor-actions">
<button
className="btn btn-secondary"
type="button"
onClick={() => void handleTestNodeNative(nodeDraft)}
disabled={testingNative || savingNode || testingNode}
>
<ServerCog size={14} />
<span style={{ marginLeft: 6 }}>{testingNative ? (isZh ? '校验中...' : 'Checking...') : (isZh ? '测试 Native Launcher' : 'Test Native Launcher')}</span>
</button>
</div>
{nativeTestResult ? (
<div className={`platform-node-test-result ${nativeTestResult.ok ? 'is-ok' : 'is-error'}`}>
<div className="platform-node-test-result-head">
<strong>{nativeTestResult.ok ? (isZh ? 'Native Launcher 可用' : 'Native launcher ready') : (isZh ? 'Native Launcher 校验失败' : 'Native launcher check failed')}</strong>
<span>{nativeTestResult.latency_ms} ms</span>
</div>
<div>{nativeTestResult.detail}</div>
<div className="platform-node-test-result-meta">
<span>{isZh ? 'Runtime(native)' : 'Runtime(native)'}</span>
<span className="mono">{nativeTestResult.runtime_native_supported ? (isZh ? '支持' : 'supported') : (isZh ? '未上报' : 'not reported')}</span>
<span> · </span>
<span>{isZh ? '命令可执行' : 'Command executable'}</span>
<span className="mono">{nativeTestResult.command_available ? (isZh ? '是' : 'yes') : (isZh ? '否' : 'no')}</span>
<span> · </span>
<span>{isZh ? '工作目录存在' : 'Workdir exists'}</span>
<span className="mono">{nativeTestResult.workdir_exists ? (isZh ? '是' : 'yes') : (isZh ? '否' : 'no')}</span>
</div>
<div className="platform-node-test-kv">
<span>{isZh ? '命令' : 'Command'}</span>
<code className="platform-node-test-code mono">{nativeTestResult.command.length > 0 ? nativeTestResult.command.join(' ') : '-'}</code>
</div>
<div className="platform-node-test-kv">
<span>{isZh ? '工作目录' : 'Workdir'}</span>
<code className="platform-node-test-code mono">{nativeTestResult.workdir || '-'}</code>
</div>
</div>
) : null}
</section>
<div className="row-between">
<div />
<div className="platform-node-editor-actions">
<button className="btn btn-primary" type="button" onClick={() => void handleSaveNode()} disabled={savingNode || testingNode || testingNative}>
<ServerCog size={14} />
<span style={{ marginLeft: 6 }}>{savingNode ? (isZh ? '保存中...' : 'Saving...') : (nodeEditorMode === 'edit' ? (isZh ? '保存节点' : 'Save Node') : (isZh ? '创建节点' : 'Create Node'))}</span>
</button>
</div>
</div>
</div>
</div>
) : null}
</>
);
}

View File

@ -0,0 +1,266 @@
import { useEffect, useMemo, useState } from 'react';
import axios from 'axios';
import { Bot, Boxes, Cpu, Gauge, HardDrive, ServerCog, X } from 'lucide-react';
import { APP_ENDPOINTS } from '../../config/env';
import { useAppStore } from '../../store/appStore';
import type { BotState } from '../../types/bot';
import type { ManagedNodeItem, NodeResourcesResponse } from './types';
import { BotDashboardModule } from '../dashboard/BotDashboardModule';
import { ImageFactoryModule } from '../images/ImageFactoryModule';
import { BotWizardModule } from '../onboarding/BotWizardModule';
import { LucentIconButton } from '../../components/lucent/LucentIconButton';
import { nodeRuntimeLabel, nodeSupportsRuntime, supportedNodeRuntimeKinds } from './runtimeSupport';
interface NodeWorkspacePageProps {
nodeId: string;
compactMode: boolean;
}
function formatBytes(bytes: number) {
const value = Number(bytes || 0);
if (!Number.isFinite(value) || value <= 0) return '0 B';
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
const index = Math.min(units.length - 1, Math.floor(Math.log(value) / Math.log(1024)));
const sized = value / Math.pow(1024, index);
return `${sized >= 100 ? sized.toFixed(0) : sized >= 10 ? sized.toFixed(1) : sized.toFixed(2)} ${units[index]}`;
}
function formatPercent(value: number) {
return `${Number(value || 0).toFixed(1)}%`;
}
function clampPercent(value: number) {
return Math.max(0, Math.min(100, Number(value || 0)));
}
function summarizeNodeBots(nodeId: string, bots: BotState[]) {
const rows = bots.filter((bot) => String(bot.node_id || 'local').trim().toLowerCase() === String(nodeId || '').trim().toLowerCase());
const total = rows.length;
const running = rows.filter((bot) => String(bot.docker_status || '').toUpperCase() === 'RUNNING').length;
const disabled = rows.filter((bot) => bot.enabled === false).length;
return {
total,
running,
stopped: Math.max(0, total - running - disabled),
disabled,
};
}
export function NodeWorkspacePage({ nodeId, compactMode }: NodeWorkspacePageProps) {
const locale = useAppStore((state) => state.locale);
const activeBots = useAppStore((state) => state.activeBots);
const isZh = locale === 'zh';
const [nodes, setNodes] = useState<ManagedNodeItem[]>([]);
const [showImageFactory, setShowImageFactory] = useState(false);
const [showCreateWizard, setShowCreateWizard] = useState(false);
const [nodeResources, setNodeResources] = useState<NodeResourcesResponse | null>(null);
useEffect(() => {
let alive = true;
const loadNodes = async () => {
try {
const [nodesRes, resourcesRes] = await Promise.all([
axios.get<{ items?: ManagedNodeItem[] }>(`${APP_ENDPOINTS.apiBase}/platform/nodes`),
axios.get<NodeResourcesResponse>(`${APP_ENDPOINTS.apiBase}/platform/nodes/${encodeURIComponent(nodeId)}/resources`),
]);
if (!alive) return;
setNodes(Array.isArray(nodesRes.data?.items) ? nodesRes.data.items : []);
setNodeResources(resourcesRes.data || null);
} catch {
if (!alive) return;
setNodes([]);
setNodeResources(null);
}
};
void loadNodes();
const timer = window.setInterval(() => {
void loadNodes();
}, 30000);
return () => {
alive = false;
window.clearInterval(timer);
};
}, [nodeId]);
const selectedNode = useMemo(
() => nodes.find((node) => String(node.node_id || '').trim().toLowerCase() === String(nodeId || '').trim().toLowerCase()),
[nodeId, nodes],
);
const nodeBots = useMemo(() => Object.values(activeBots), [activeBots]);
const isDockerNode = nodeSupportsRuntime(selectedNode, 'docker');
const nodeBotSummary = useMemo(() => {
const fromStore = summarizeNodeBots(nodeId, nodeBots);
if (fromStore.total > 0 || !nodeResources?.bots) return fromStore;
return {
total: Number(nodeResources.bots.total || 0),
running: Number(nodeResources.bots.running || 0),
stopped: Number(nodeResources.bots.stopped || 0),
disabled: Number(nodeResources.bots.disabled || 0),
};
}, [nodeBots, nodeId, nodeResources]);
const nodeResourceSummary = nodeResources?.resources;
const memoryPercent = nodeResourceSummary?.live_memory_limit_bytes
? clampPercent((nodeResourceSummary.live_memory_used_bytes / nodeResourceSummary.live_memory_limit_bytes) * 100)
: 0;
const storagePercent = nodeResourceSummary?.workspace_limit_bytes
? clampPercent((nodeResourceSummary.workspace_used_bytes / nodeResourceSummary.workspace_limit_bytes) * 100)
: 0;
return (
<div className={`node-workspace-page ${compactMode ? 'is-compact' : ''}`}>
<div className="node-workspace-shell">
<section className="platform-main">
<div className="platform-summary-grid node-workspace-summary-grid">
<section className="panel platform-summary-card node-workspace-summary-card">
<div className="platform-summary-icon icon-bot"><ServerCog size={18} /></div>
<div className="platform-summary-label">{isZh ? '当前节点' : 'Current Node'}</div>
<div className="node-workspace-summary-value">{selectedNode?.display_name || nodeId}</div>
<div className="mono node-workspace-summary-id">{selectedNode?.node_id || nodeId}</div>
<div className="node-workspace-chip-row">
<span className={`badge ${selectedNode?.enabled === false ? 'badge-err' : 'badge-ok'}`}>
{selectedNode?.enabled === false ? (isZh ? '已停用' : 'Disabled') : (isZh ? '已启用' : 'Enabled')}
</span>
<span className="badge badge-unknown mono">
{String(selectedNode?.transport_kind || 'edge')}/{nodeRuntimeLabel(selectedNode)} · {String(selectedNode?.core_adapter || 'nanobot')}
</span>
</div>
</section>
<section className="panel platform-summary-card node-workspace-summary-card">
<div className="platform-summary-icon icon-bot"><Bot size={18} /></div>
<div className="platform-summary-label">{isZh ? 'Bot 运行概览' : 'Bot Status'}</div>
<div className="platform-summary-value">{nodeBotSummary.total}</div>
<div className="platform-summary-meta">
{isZh
? `活动 ${nodeBotSummary.running} / 停止 ${nodeBotSummary.stopped} / 停用 ${nodeBotSummary.disabled}`
: `Running ${nodeBotSummary.running} / Stopped ${nodeBotSummary.stopped} / Disabled ${nodeBotSummary.disabled}`}
</div>
</section>
<section className="panel platform-summary-card platform-resource-card node-workspace-resource-card">
<div className="platform-resource-head">
<div className="platform-summary-icon icon-resource"><Boxes size={18} /></div>
<div>
<div className="platform-summary-label">{isZh ? '服务器资源' : 'Server Resources'}</div>
<div className="platform-resource-subtitle">
{nodeResourceSummary
? (isZh
? `CPU 配额 ${nodeResourceSummary.configured_cpu_cores || 0} · 内存 ${formatBytes(nodeResourceSummary.live_memory_limit_bytes || 0)}`
: `CPU quota ${nodeResourceSummary.configured_cpu_cores || 0} · Memory ${formatBytes(nodeResourceSummary.live_memory_limit_bytes || 0)}`)
: (isZh ? '节点级资源采样待接入' : 'Node-level resource metrics pending')}
</div>
</div>
</div>
{nodeResourceSummary ? (
<>
<div className="platform-resource-meters">
<div className="platform-resource-meter">
<div className="platform-resource-meter-label">
<Cpu size={16} />
</div>
<div className="platform-resource-meter-track">
<div className="platform-resource-meter-fill" style={{ width: `${clampPercent(nodeResourceSummary.live_cpu_percent)}%` }} />
</div>
<div className="platform-resource-meter-value">{formatPercent(nodeResourceSummary.live_cpu_percent)}</div>
</div>
<div className="platform-resource-meter">
<div className="platform-resource-meter-label">
<Gauge size={16} />
</div>
<div className="platform-resource-meter-track">
<div className="platform-resource-meter-fill is-memory" style={{ width: `${memoryPercent}%` }} />
</div>
<div className="platform-resource-meter-value">{formatPercent(memoryPercent)}</div>
</div>
<div className="platform-resource-meter">
<div className="platform-resource-meter-label">
<HardDrive size={16} />
</div>
<div className="platform-resource-meter-track">
<div className="platform-resource-meter-fill is-storage" style={{ width: `${storagePercent}%` }} />
</div>
<div className="platform-resource-meter-value">{formatPercent(storagePercent)}</div>
</div>
</div>
<div className="platform-resource-footnote">
{isZh
? `内存 ${formatBytes(nodeResourceSummary.live_memory_used_bytes || 0)} / ${formatBytes(nodeResourceSummary.live_memory_limit_bytes || 0)} · 存储 ${formatBytes(nodeResourceSummary.workspace_used_bytes || 0)} / ${formatBytes(nodeResourceSummary.workspace_limit_bytes || 0)}`
: `Memory ${formatBytes(nodeResourceSummary.live_memory_used_bytes || 0)} / ${formatBytes(nodeResourceSummary.live_memory_limit_bytes || 0)} · Storage ${formatBytes(nodeResourceSummary.workspace_used_bytes || 0)} / ${formatBytes(nodeResourceSummary.workspace_limit_bytes || 0)}`}
</div>
</>
) : (
<div className="platform-resource-footnote">
{isZh
? '当前节点的资源采样能力暂未接入。后续会通过对应 dashboard-edge 返回节点级资源视图。'
: 'Node-level resource sampling is not available yet for this node.'}
</div>
)}
</section>
</div>
</section>
</div>
<div className="node-workspace-content-shell">
<BotDashboardModule
forcedNodeId={nodeId}
compactMode={compactMode}
onOpenCreateWizard={() => setShowCreateWizard(true)}
onOpenImageFactory={isDockerNode ? (() => setShowImageFactory(true)) : undefined}
/>
</div>
{showImageFactory && isDockerNode ? (
<div className="modal-mask app-modal-mask" onClick={() => setShowImageFactory(false)}>
<div className="modal-card app-modal-card" onClick={(event) => event.stopPropagation()}>
<div className="modal-title-row modal-title-with-close">
<div className="modal-title-main">
<h3>{isZh ? '节点镜像管理' : 'Node Image Management'}</h3>
<span className="modal-sub">{selectedNode?.display_name || nodeId}</span>
</div>
<div className="modal-title-actions">
<LucentIconButton className="btn btn-secondary btn-sm icon-btn" onClick={() => setShowImageFactory(false)} tooltip={isZh ? '关闭' : 'Close'} aria-label={isZh ? '关闭' : 'Close'}>
<X size={14} />
</LucentIconButton>
</div>
</div>
<div className="app-modal-body">
<ImageFactoryModule />
</div>
</div>
</div>
) : null}
{showCreateWizard ? (
<div className="modal-mask app-modal-mask" onClick={() => setShowCreateWizard(false)}>
<div className="modal-card app-modal-card" onClick={(event) => event.stopPropagation()}>
<div className="modal-title-row modal-title-with-close">
<div className="modal-title-main">
<h3>{isZh ? '在当前节点创建 Bot' : 'Create Bot In Current Node'}</h3>
<span className="modal-sub">{selectedNode?.display_name || nodeId}</span>
</div>
<div className="modal-title-actions">
<LucentIconButton className="btn btn-secondary btn-sm icon-btn" onClick={() => setShowCreateWizard(false)} tooltip={isZh ? '关闭' : 'Close'} aria-label={isZh ? '关闭' : 'Close'}>
<X size={14} />
</LucentIconButton>
</div>
</div>
<div className="app-modal-body">
<BotWizardModule
initialNodeId={selectedNode?.node_id || nodeId}
initialNodeDisplayName={selectedNode?.display_name || nodeId}
initialTransportKind={selectedNode?.transport_kind || 'edge'}
initialRuntimeKind={selectedNode?.runtime_kind || 'docker'}
initialRuntimeOptions={supportedNodeRuntimeKinds(selectedNode)}
initialNodeCapabilities={selectedNode?.capabilities}
initialNodeMetadata={selectedNode?.metadata}
initialCoreAdapter={selectedNode?.core_adapter || 'nanobot'}
onCreated={() => setShowCreateWizard(false)}
onGoDashboard={() => setShowCreateWizard(false)}
/>
</div>
</div>
</div>
) : null}
</div>
);
}

View File

@ -1,6 +1,7 @@
import { useEffect, useMemo, useRef, useState } from 'react';
import axios from 'axios';
import {
ArrowRightLeft,
Bot,
Boxes,
ChevronLeft,
@ -143,6 +144,7 @@ export function PlatformDashboardPage({ compactMode }: PlatformDashboardPageProp
const [showCreateWizard, setShowCreateWizard] = useState(false);
const [showTemplateManager, setShowTemplateManager] = useState(false);
const [showPlatformSettings, setShowPlatformSettings] = useState(false);
const [showMigrationDeployGuide, setShowMigrationDeployGuide] = useState(false);
const [showBotLastActionModal, setShowBotLastActionModal] = useState(false);
const [showResourceModal, setShowResourceModal] = useState(false);
const [selectedBotDetail, setSelectedBotDetail] = useState<BotState | null>(null);
@ -637,6 +639,11 @@ export function PlatformDashboardPage({ compactMode }: PlatformDashboardPageProp
<strong>{isZh ? '技能市场' : 'Skill Marketplace'}</strong>
<span>{isZh ? '管理技能包元数据,并给 Bot 技能面板提供一键安装源。' : 'Manage marketplace metadata and provide one-click installs to bot skill panels.'}</span>
</button>
<button className="platform-entry-card" type="button" onClick={() => setShowMigrationDeployGuide(true)}>
<ArrowRightLeft size={18} />
<strong>{isZh ? '迁移 / 部署' : 'Deploy / Migrate'}</strong>
<span>{isZh ? 'Bot 跨节点迁移、重建和部署的专用入口(设计中)。' : 'Dedicated entry for cross-node migration, rebuild, and deployment (design stage).'}</span>
</button>
</div>
</section>
);
@ -1017,6 +1024,43 @@ export function PlatformDashboardPage({ compactMode }: PlatformDashboardPageProp
setBotListPageSize(normalizedPageSize);
}}
/>
{showMigrationDeployGuide ? (
<div className="modal-mask app-modal-mask" onClick={() => setShowMigrationDeployGuide(false)}>
<div className="modal-card app-modal-card" onClick={(event) => event.stopPropagation()}>
<div className="modal-title-row modal-title-with-close">
<div className="modal-title-main">
<h3>{isZh ? '迁移 / 部署' : 'Deploy / Migrate'}</h3>
</div>
<div className="modal-title-actions">
<LucentIconButton className="btn btn-secondary btn-sm icon-btn" onClick={() => setShowMigrationDeployGuide(false)} tooltip={isZh ? '关闭' : 'Close'} aria-label={isZh ? '关闭' : 'Close'}>
<X size={14} />
</LucentIconButton>
</div>
</div>
<div className="card stack">
<div className="section-mini-title">{isZh ? '设计边界' : 'Design Scope'}</div>
<div className="field-label">
{isZh
? '这里会承载 Bot 的跨节点迁移、重建到新节点、镜像/运行时切换等高风险动作。'
: 'This entry will host high-risk operations such as cross-node migration, rebuild to another node, and runtime/image switching.'}
</div>
<div className="field-label">
{isZh
? '这些动作不会放在 Bot 的基础信息里,而会放在独立的部署/迁移流程中,并要求明确确认。'
: 'These actions will not live in Bot basic info; they will be handled in a dedicated deploy/migrate flow with explicit confirmation.'}
</div>
<div className="field-label">
{isZh
? '后续实现会结合 dashboard-edge、workspace 同步和节点心跳一起完成。'
: 'Future implementation will combine dashboard-edge, workspace sync, and node heartbeat together.'}
</div>
</div>
<div className="row-between" style={{ marginTop: 12 }}>
<button className="btn btn-secondary" onClick={() => setShowMigrationDeployGuide(false)}>{isZh ? '知道了' : 'Got it'}</button>
</div>
</div>
</div>
) : null}
{showBotLastActionModal && selectedBotInfo ? (
<div className="modal-mask" onClick={() => setShowBotLastActionModal(false)}>
<div className="modal-card platform-last-action-modal" onClick={(event) => event.stopPropagation()}>

View File

@ -0,0 +1,419 @@
import { useEffect, useMemo, useState } from 'react';
import axios from 'axios';
import { ArrowRightLeft, X } from 'lucide-react';
import { APP_ENDPOINTS } from '../../../config/env';
import type { BotState } from '../../../types/bot';
import type { ManagedNodeItem } from '../types';
import { useLucentPrompt } from '../../../components/lucent/LucentPromptProvider';
import { LucentIconButton } from '../../../components/lucent/LucentIconButton';
import { LucentSelect } from '../../../components/lucent/LucentSelect';
import { nodeRuntimeLabel, nodeSupportsRuntime, supportedNodeRuntimeKinds } from '../runtimeSupport';
interface BotDeployModalProps {
open: boolean;
isZh: boolean;
bots: BotState[];
nodes: ManagedNodeItem[];
initialBotId?: string;
onClose: () => void;
onApplied?: () => Promise<void> | void;
}
interface NanobotImageItem {
tag: string;
version: string;
status: string;
}
interface BotDeployResponse {
status: string;
started?: boolean;
next_target?: {
node_id?: string;
node_display_name?: string;
transport_kind?: string;
runtime_kind?: string;
core_adapter?: string;
};
image_tag?: string;
}
function normalizeId(value: string | null | undefined, fallback = '') {
return String(value || fallback).trim().toLowerCase();
}
function sortBots(bots: BotState[]) {
return [...bots].sort((a, b) => {
const left = `${String(a.name || '').trim()} ${String(a.id || '').trim()}`.trim().toLowerCase();
const right = `${String(b.name || '').trim()} ${String(b.id || '').trim()}`.trim().toLowerCase();
return left.localeCompare(right);
});
}
function sortNodes(nodes: ManagedNodeItem[]) {
return [...nodes].sort((a, b) => {
const left = `${String(a.display_name || a.node_id || '').trim()} ${String(a.node_id || '').trim()}`.trim().toLowerCase();
const right = `${String(b.display_name || b.node_id || '').trim()} ${String(b.node_id || '').trim()}`.trim().toLowerCase();
return left.localeCompare(right);
});
}
function targetSummary(node?: ManagedNodeItem | null) {
if (!node) return '-';
const displayName = String(node.display_name || node.node_id || '').trim() || '-';
const targetBits = [
`${String(node.transport_kind || 'direct')}/${nodeRuntimeLabel(node)}`,
String(node.core_adapter || 'nanobot'),
];
return `${displayName} · ${targetBits.join(' · ')}`;
}
export function BotDeployModal({
open,
isZh,
bots,
nodes,
initialBotId,
onClose,
onApplied,
}: BotDeployModalProps) {
const { notify, confirm } = useLucentPrompt();
const [selectedBotId, setSelectedBotId] = useState('');
const [targetNodeId, setTargetNodeId] = useState('');
const [targetRuntimeKind, setTargetRuntimeKind] = useState('docker');
const [imageTag, setImageTag] = useState('');
const [autoStart, setAutoStart] = useState(false);
const [images, setImages] = useState<NanobotImageItem[]>([]);
const [imageLoadFailed, setImageLoadFailed] = useState(false);
const [loadingImages, setLoadingImages] = useState(false);
const [submitting, setSubmitting] = useState(false);
const sortedBots = useMemo(() => sortBots(bots), [bots]);
const sortedNodes = useMemo(() => sortNodes(nodes), [nodes]);
const selectedBot = useMemo(
() => sortedBots.find((bot) => String(bot.id || '').trim() === String(selectedBotId || '').trim()),
[selectedBotId, sortedBots],
);
const currentNode = useMemo(
() => sortedNodes.find((node) => normalizeId(node.node_id) === normalizeId(selectedBot?.node_id, 'local')),
[selectedBot?.node_id, sortedNodes],
);
const targetNode = useMemo(
() => sortedNodes.find((node) => normalizeId(node.node_id) === normalizeId(targetNodeId)),
[sortedNodes, targetNodeId],
);
const readyImages = useMemo(() => {
const rows = images.filter((item) => String(item.status || '').trim().toUpperCase() === 'READY');
const currentTag = String(selectedBot?.image_tag || '').trim();
if (currentTag && !rows.some((item) => item.tag === currentTag)) {
return [{ tag: currentTag, version: 'current', status: 'READY' }, ...rows];
}
return rows;
}, [images, selectedBot?.image_tag]);
const currentNodeLabel = targetSummary(currentNode || {
node_id: String(selectedBot?.node_id || 'local'),
display_name: String(selectedBot?.node_display_name || selectedBot?.node_id || 'local'),
transport_kind: String(selectedBot?.transport_kind || 'direct'),
runtime_kind: String(selectedBot?.runtime_kind || 'docker'),
core_adapter: String(selectedBot?.core_adapter || 'nanobot'),
enabled: true,
});
const targetNodeLabel = targetNode
? `${String(targetNode.display_name || targetNode.node_id || '').trim() || '-'} · ${String(targetNode.transport_kind || 'direct')}/${String(targetRuntimeKind || 'docker')} · ${String(targetNode.core_adapter || 'nanobot')}`
: '-';
const isDockerTarget = targetRuntimeKind === 'docker';
const currentImageTag = String(selectedBot?.image_tag || '').trim();
const nextImageTag = isDockerTarget ? String(imageTag || '').trim() : currentImageTag;
const nodeChanged = normalizeId(targetNodeId) !== normalizeId(selectedBot?.node_id, 'local');
const runtimeChanged = String(targetRuntimeKind || 'docker').trim().toLowerCase() !== String(selectedBot?.runtime_kind || 'docker').trim().toLowerCase();
const imageChanged = isDockerTarget && nextImageTag !== currentImageTag;
const hasDeployChange = Boolean(selectedBot) && (nodeChanged || runtimeChanged || imageChanged);
const botRunning = String(selectedBot?.docker_status || '').trim().toUpperCase() === 'RUNNING';
const canSubmit = Boolean(selectedBot) && Boolean(targetNode) && hasDeployChange && !botRunning && (!isDockerTarget || Boolean(nextImageTag)) && !submitting;
useEffect(() => {
if (!open) return;
const nextBotId = String(initialBotId || sortedBots[0]?.id || '').trim();
setSelectedBotId(nextBotId);
setAutoStart(false);
}, [initialBotId, open, sortedBots]);
useEffect(() => {
if (!open || !selectedBot) return;
setTargetNodeId(String(selectedBot.node_id || 'local').trim().toLowerCase() || 'local');
setTargetRuntimeKind(String(selectedBot.runtime_kind || 'docker').trim().toLowerCase() || 'docker');
setImageTag(String(selectedBot.image_tag || '').trim());
setAutoStart(false);
}, [open, selectedBot]);
useEffect(() => {
if (!targetNode) return;
if (nodeSupportsRuntime(targetNode, targetRuntimeKind)) return;
setTargetRuntimeKind(supportedNodeRuntimeKinds(targetNode)[0] || 'docker');
}, [targetNode, targetRuntimeKind]);
useEffect(() => {
if (!open) return;
let alive = true;
setLoadingImages(true);
setImageLoadFailed(false);
void axios
.get<NanobotImageItem[]>(`${APP_ENDPOINTS.apiBase}/images`)
.then((res) => {
if (!alive) return;
setImages(Array.isArray(res.data) ? res.data : []);
})
.catch(() => {
if (!alive) return;
setImages([]);
setImageLoadFailed(true);
})
.finally(() => {
if (!alive) return;
setLoadingImages(false);
});
return () => {
alive = false;
};
}, [open]);
useEffect(() => {
if (!open || !isDockerTarget) return;
if (String(imageTag || '').trim()) return;
if (currentImageTag) {
setImageTag(currentImageTag);
return;
}
if (readyImages[0]?.tag) {
setImageTag(String(readyImages[0].tag || '').trim());
}
}, [currentImageTag, imageTag, isDockerTarget, open, readyImages]);
const handleSubmit = async () => {
if (!selectedBot || !targetNode) return;
if (botRunning) {
notify(isZh ? '请先停止 Bot再执行迁移或重部署。' : 'Stop the bot before deploy or migrate.', { tone: 'warning' });
return;
}
if (!hasDeployChange) {
notify(isZh ? '当前没有需要应用的部署变更。' : 'No deploy changes to apply.', { tone: 'warning' });
return;
}
if (isDockerTarget && !nextImageTag) {
notify(isZh ? 'Docker 目标节点必须选择一个 READY 镜像。' : 'Choose a READY image for a Docker target node.', { tone: 'warning' });
return;
}
const ok = await confirm({
title: isZh ? '确认迁移 / 部署' : 'Confirm Deploy / Migrate',
message: isZh
? [
`${selectedBot.name || selectedBot.id} (${selectedBot.id})`,
`当前: ${currentNodeLabel}`,
`目标: ${targetNodeLabel}`,
isDockerTarget ? `镜像: ${nextImageTag}` : '目标运行时为 Native本次不要求 Docker 镜像。',
autoStart ? '完成后自动启动。' : '完成后保持停止状态。',
].join('\n')
: [
`${selectedBot.name || selectedBot.id} (${selectedBot.id})`,
`Current: ${currentNodeLabel}`,
`Target: ${targetNodeLabel}`,
isDockerTarget ? `Image: ${nextImageTag}` : 'Target runtime is Native, so no Docker image is required.',
autoStart ? 'Start automatically after deployment.' : 'Keep the bot stopped after deployment.',
].join('\n'),
tone: 'warning',
confirmText: isZh ? '执行部署' : 'Deploy',
});
if (!ok) return;
setSubmitting(true);
try {
const res = await axios.post<BotDeployResponse>(`${APP_ENDPOINTS.apiBase}/bots/${encodeURIComponent(selectedBot.id)}/deploy`, {
node_id: String(targetNode.node_id || '').trim().toLowerCase(),
runtime_kind: targetRuntimeKind,
image_tag: isDockerTarget ? nextImageTag : undefined,
auto_start: autoStart,
});
await onApplied?.();
onClose();
const displayName = res.data?.next_target?.node_display_name || targetNode.display_name || targetNode.node_id;
notify(
isZh
? `${selectedBot.id} 已部署到 ${displayName}${res.data?.started ? ',并已自动启动。' : '。'}`
: `${selectedBot.id} deployed to ${displayName}${res.data?.started ? ' and started automatically.' : '.'}`,
{ tone: 'success' },
);
} catch (error: any) {
notify(error?.response?.data?.detail || (isZh ? '执行部署失败。' : 'Failed to deploy bot.'), { tone: 'error' });
} finally {
setSubmitting(false);
}
};
if (!open) return null;
return (
<div className="modal-mask app-modal-mask" onClick={onClose}>
<div className="modal-card app-modal-card platform-node-editor" onClick={(event) => event.stopPropagation()}>
<div className="modal-title-row modal-title-with-close">
<div className="modal-title-main">
<h3>{isZh ? '迁移 / 部署' : 'Deploy / Migrate'}</h3>
<span className="modal-sub">
{selectedBot ? `${selectedBot.name || selectedBot.id} · ${selectedBot.id}` : (isZh ? '选择一个 Bot 开始操作' : 'Choose a bot to continue')}
</span>
</div>
<div className="modal-title-actions">
<LucentIconButton className="btn btn-secondary btn-sm icon-btn" onClick={onClose} tooltip={isZh ? '关闭' : 'Close'} aria-label={isZh ? '关闭' : 'Close'}>
<X size={14} />
</LucentIconButton>
</div>
</div>
{sortedBots.length === 0 ? (
<div className="card stack">
<div className="section-mini-title">{isZh ? '暂无 Bot' : 'No Bots Yet'}</div>
<div className="field-label">
{isZh ? '当前还没有可迁移的 Bot。请先在任意节点创建一个 Bot。' : 'There are no bots available for deploy or migrate yet. Create a bot first.'}
</div>
</div>
) : (
<>
<div className="platform-node-editor-grid">
<label className="field platform-node-editor-span-2">
<span className="field-label">{isZh ? '目标 Bot' : 'Target Bot'}</span>
<LucentSelect value={selectedBotId} onChange={(event) => setSelectedBotId(event.target.value)}>
{sortedBots.map((bot) => (
<option key={bot.id} value={bot.id}>
{`${bot.name || bot.id} (${bot.id})`}
</option>
))}
</LucentSelect>
</label>
<div className="card stack">
<div className="section-mini-title">{isZh ? '当前部署' : 'Current Deployment'}</div>
<div className="platform-node-current-target">{currentNodeLabel}</div>
<div className="field-label">
{isZh ? '镜像' : 'Image'}: <span className="mono">{currentImageTag || '-'}</span>
</div>
<div className="field-label">
{isZh ? '状态' : 'Status'}: <span className="mono">{selectedBot?.docker_status || 'STOPPED'}</span>
</div>
</div>
<div className="card stack">
<div className="section-mini-title">{isZh ? '目标部署' : 'Target Deployment'}</div>
<div className="platform-node-current-target">{targetNodeLabel}</div>
<div className="field-label">
{isZh ? 'Transport 和 core adapter 跟随节点runtime 可按 Bot 单独选择。' : 'Transport and core adapter follow the node, while runtime can be selected per bot.'}
</div>
<div className="field-label">
{isZh ? '工作区会从中心控制面重新同步到目标节点。' : 'Workspace files will be re-synced from the central control plane to the target node.'}
</div>
</div>
<label className="field">
<span className="field-label">{isZh ? '目标节点' : 'Target Node'}</span>
<LucentSelect value={targetNodeId} onChange={(event) => setTargetNodeId(event.target.value)}>
{sortedNodes.map((node) => (
<option key={node.node_id} value={String(node.node_id || '').trim().toLowerCase()} disabled={node.enabled === false}>
{`${node.display_name || node.node_id} (${node.node_id})`}
</option>
))}
</LucentSelect>
</label>
<label className="field">
<span className="field-label">{isZh ? '执行模式' : 'Execution Mode'}</span>
<input
className="input mono"
value={targetNode ? `${String(targetNode.transport_kind || 'direct')}/${nodeRuntimeLabel(targetNode)} · ${String(targetNode.core_adapter || 'nanobot')}` : ''}
disabled
/>
</label>
<label className="field">
<span className="field-label">{isZh ? '目标运行时' : 'Target Runtime'}</span>
<LucentSelect value={targetRuntimeKind} onChange={(event) => setTargetRuntimeKind(String(event.target.value || 'docker').trim().toLowerCase() || 'docker')} disabled={!targetNode}>
{(targetNode ? supportedNodeRuntimeKinds(targetNode) : ['docker']).map((runtimeKind) => (
<option key={runtimeKind} value={runtimeKind}>
{runtimeKind}
</option>
))}
</LucentSelect>
</label>
<label className="field platform-node-editor-span-2">
<span className="field-label">{isZh ? '基础镜像' : 'Base Image'}</span>
{isDockerTarget ? (
<LucentSelect value={imageTag} onChange={(event) => setImageTag(event.target.value)} disabled={loadingImages}>
{readyImages.map((item) => (
<option key={item.tag} value={item.tag}>
{`${item.tag}${item.version ? ` · ${item.version}` : ''}`}
</option>
))}
</LucentSelect>
) : (
<div className="platform-node-direct-note">
{isZh
? `目标节点为 Native 运行时,本次沿用当前镜像记录 ${currentImageTag || '-'}`
: `The target node uses Native runtime, so the current image record ${currentImageTag || '-'} will be kept.`}
</div>
)}
{isDockerTarget ? (
<div className="field-label">
{loadingImages
? (isZh ? '正在读取 READY 镜像列表...' : 'Loading READY images...')
: imageLoadFailed
? (isZh ? '镜像列表读取失败,建议先检查镜像工厂。' : 'Failed to load image list. Check the image factory first.')
: (isZh ? '这里只显示 READY 镜像,基础镜像切换不再放在 Bot 基础编辑里。' : 'Only READY images are shown here. Base image changes are handled in this dedicated flow now.')}
</div>
) : null}
</label>
<div className="field platform-node-editor-span-2">
<span className="field-label">{isZh ? '启动策略' : 'Start Policy'}</span>
<label className="check-row">
<input type="checkbox" checked={autoStart} onChange={(event) => setAutoStart(event.target.checked)} />
<span>{isZh ? '部署完成后自动启动' : 'Start automatically after deployment'}</span>
</label>
</div>
</div>
{botRunning ? (
<div className="platform-node-test-result is-error" style={{ marginTop: 12 }}>
<div className="platform-node-test-result-head">
<ArrowRightLeft size={16} />
<strong>{isZh ? '当前 Bot 正在运行' : 'Bot Is Currently Running'}</strong>
</div>
<div>{isZh ? '为了避免源节点和目标节点状态漂移,请先停止该 Bot再执行迁移或重部署。' : 'Stop this bot first so source and target runtimes do not drift during migration.'}</div>
</div>
) : null}
{!hasDeployChange ? (
<div className="platform-node-direct-note" style={{ marginTop: 12 }}>
{isZh ? '当前目标节点和镜像都没有变化。若只是想启动 Bot请直接使用启动操作。' : 'The target node and image are unchanged. If you only want to start the bot, use the regular start action.'}
</div>
) : null}
<div className="row-between" style={{ marginTop: 12 }}>
<div className="field-label">
{isZh
? '高风险动作会要求 Bot 先停止,并通过中心工作区重新下发到目标节点。'
: 'High-risk actions require the bot to be stopped and re-sync the workspace from the central control plane.'}
</div>
<div className="platform-node-editor-actions">
<button className="btn btn-secondary" type="button" onClick={onClose}>{isZh ? '取消' : 'Cancel'}</button>
<button className="btn btn-primary" type="button" disabled={!canSubmit} onClick={() => void handleSubmit()}>
{submitting ? (isZh ? '部署中...' : 'Deploying...') : (isZh ? '执行部署' : 'Deploy')}
</button>
</div>
</div>
</>
)}
</div>
</div>
);
}

View File

@ -0,0 +1,27 @@
import type { ManagedNodeItem } from './types';
export function supportedNodeRuntimeKinds(node?: ManagedNodeItem | null): string[] {
if (!node) return ['docker'];
const runtime = node.capabilities?.runtime;
const rows: string[] = [];
if (runtime && typeof runtime === 'object') {
for (const [key, value] of Object.entries(runtime as Record<string, unknown>)) {
const normalized = String(key || '').trim().toLowerCase();
if (value === true && (normalized === 'docker' || normalized === 'native') && !rows.includes(normalized)) {
rows.push(normalized);
}
}
}
if (rows.length > 0) return rows;
const fallback = String(node.runtime_kind || 'docker').trim().toLowerCase();
return fallback === 'native' ? ['native'] : ['docker'];
}
export function nodeSupportsRuntime(node: ManagedNodeItem | null | undefined, runtimeKind: string): boolean {
const normalized = String(runtimeKind || '').trim().toLowerCase();
return supportedNodeRuntimeKinds(node).includes(normalized);
}
export function nodeRuntimeLabel(node?: ManagedNodeItem | null): string {
return supportedNodeRuntimeKinds(node).join('+');
}

View File

@ -157,3 +157,87 @@ export interface PlatformOverviewResponse {
created_at: string;
}>;
}
export interface ManagedNodeItem {
node_id: string;
display_name: string;
base_url?: string;
enabled: boolean;
transport_kind?: string;
runtime_kind?: string;
core_adapter?: string;
workspace_root?: string;
native_command?: string;
native_workdir?: string;
native_sandbox_mode?: string;
metadata?: Record<string, unknown>;
capabilities?: Record<string, unknown>;
last_seen_at?: string | null;
status?: string;
}
export interface ManagedNodeDraft {
node_id: string;
display_name: string;
base_url: string;
enabled: boolean;
auth_token: string;
transport_kind: string;
runtime_kind: string;
core_adapter: string;
workspace_root: string;
native_command: string;
native_workdir: string;
native_sandbox_mode: string;
}
export interface ManagedNodeConnectivityResult {
ok: boolean;
status: string;
latency_ms: number;
detail: string;
node_self?: {
node_id?: string;
display_name?: string;
service?: string;
capabilities?: Record<string, unknown>;
} | null;
}
export interface ManagedNodeNativePreflightResult {
ok: boolean;
status: string;
latency_ms: number;
detail: string;
command: string[];
workdir: string;
command_available: boolean;
workdir_exists: boolean;
runtime_native_supported: boolean;
node_self?: {
node_id?: string;
display_name?: string;
service?: string;
capabilities?: Record<string, unknown>;
} | null;
}
export interface NodeResourcesResponse {
node_id: string;
bots: {
total: number;
running: number;
stopped: number;
disabled: number;
};
resources: {
configured_cpu_cores: number;
configured_memory_bytes: number;
configured_storage_bytes: number;
live_cpu_percent: number;
live_memory_used_bytes: number;
live_memory_limit_bytes: number;
workspace_used_bytes: number;
workspace_limit_bytes: number;
};
}

View File

@ -26,6 +26,11 @@ export interface BotState {
avatar_skin?: string;
docker_status: string;
image_tag?: string;
node_id?: string;
node_display_name?: string;
transport_kind?: string;
runtime_kind?: string;
core_adapter?: string;
llm_provider?: string;
llm_model?: string;
system_prompt?: string;

View File

@ -2,11 +2,20 @@ import { useEffect, useState } from 'react';
export type AppRoute =
| { kind: 'dashboard' }
| { kind: 'dashboard-node'; nodeId: string }
| { kind: 'dashboard-skills' }
| { kind: 'bot'; botId: string };
function parsePathname(pathname: string): AppRoute {
const raw = String(pathname || '/').trim() || '/';
const nodeMatch = raw.match(/^\/dashboard\/nodes\/([^/?#]+)/i);
if (nodeMatch?.[1]) {
try {
return { kind: 'dashboard-node', nodeId: decodeURIComponent(nodeMatch[1]).trim() };
} catch {
return { kind: 'dashboard-node', nodeId: String(nodeMatch[1]).trim() };
}
}
if (/^\/dashboard\/skills\/?$/i.test(raw)) {
return { kind: 'dashboard-skills' };
}

View File

@ -2,6 +2,7 @@ import axios from 'axios';
const PANEL_PASSWORD_HEADER = 'X-Panel-Password';
const PANEL_STORAGE_KEY = 'nanobot-panel-access-password';
export const PANEL_AUTH_REQUIRED_EVENT = 'nanobot:panel-auth-required';
let initialized = false;
let memoryPassword = '';
@ -75,4 +76,26 @@ export function setupPanelAccessAuth(): void {
}
return config;
});
axios.interceptors.response.use(
(response) => response,
(error) => {
try {
const status = Number(error?.response?.status || 0);
const detail = String(error?.response?.data?.detail || '').trim();
const requestUrl = String(error?.config?.url || '');
const isPanelAuthApi = requestUrl.includes('/api/panel/auth/');
const isPanelAuthError =
status === 401 && /panel access password/i.test(detail);
if (!isPanelAuthApi && isPanelAuthError && typeof window !== 'undefined') {
clearPanelAccessPassword();
window.dispatchEvent(new CustomEvent(PANEL_AUTH_REQUIRED_EVENT, { detail }));
}
} catch {
// Ignore interceptor side effects and preserve original axios error.
}
return Promise.reject(error);
},
);
}

View File

@ -0,0 +1,30 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
EDGE_DIR="$ROOT_DIR/dashboard-edge"
if [ ! -d "$EDGE_DIR" ]; then
echo "dashboard-edge directory not found: $EDGE_DIR" >&2
exit 1
fi
export EDGE_RUNTIME_KIND="${EDGE_RUNTIME_KIND:-native}"
export EDGE_NODE_ID="${EDGE_NODE_ID:-local-edge-native}"
export EDGE_NODE_NAME="${EDGE_NODE_NAME:-Local Edge Native}"
export EDGE_PORT="${EDGE_PORT:-8011}"
export EDGE_RELOAD="${EDGE_RELOAD:-true}"
export EDGE_LOG_LEVEL="${EDGE_LOG_LEVEL:-warning}"
export EDGE_ACCESS_LOG="${EDGE_ACCESS_LOG:-false}"
cd "$EDGE_DIR"
if [ -x "venv/bin/python" ]; then
exec venv/bin/python main.py
fi
if [ -x ".venv/bin/python" ]; then
exec .venv/bin/python main.py
fi
exec python3 main.py

View File

@ -0,0 +1,26 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
EDGE_DIR="$ROOT_DIR/dashboard-edge"
if [ ! -d "$EDGE_DIR" ]; then
echo "dashboard-edge directory not found: $EDGE_DIR" >&2
exit 1
fi
cd "$EDGE_DIR"
export EDGE_RELOAD="${EDGE_RELOAD:-true}"
export EDGE_LOG_LEVEL="${EDGE_LOG_LEVEL:-warning}"
export EDGE_ACCESS_LOG="${EDGE_ACCESS_LOG:-false}"
if [ -x "venv/bin/python" ]; then
exec venv/bin/python main.py
fi
if [ -x ".venv/bin/python" ]; then
exec .venv/bin/python main.py
fi
exec python3 main.py