main
mula.liu 2026-03-03 14:09:11 +08:00
parent 5060c250c0
commit 413a7d6efb
16 changed files with 1488 additions and 199 deletions

View File

@ -29,3 +29,6 @@ NPM_REGISTRY=https://registry.npmmirror.com
# DATABASE_URL=postgresql+psycopg://user:password@127.0.0.1:5432/nanobot_dashboard # DATABASE_URL=postgresql+psycopg://user:password@127.0.0.1:5432/nanobot_dashboard
# DATABASE_URL=mysql+pymysql://user:password@127.0.0.1:3306/nanobot_dashboard # DATABASE_URL=mysql+pymysql://user:password@127.0.0.1:3306/nanobot_dashboard
DATABASE_URL= DATABASE_URL=
# Max upload size for backend validation (MB)
UPLOAD_MAX_MB=100

View File

@ -13,6 +13,9 @@ BOTS_WORKSPACE_ROOT=../workspace/bots
# Show SQL statements in backend logs (debug only). # Show SQL statements in backend logs (debug only).
DATABASE_ECHO=true DATABASE_ECHO=true
# Max upload size for backend validation (MB)
UPLOAD_MAX_MB=100
# Local backend server options (for `python3 main.py`) # Local backend server options (for `python3 main.py`)
APP_HOST=0.0.0.0 APP_HOST=0.0.0.0
APP_PORT=8000 APP_PORT=8000

View File

@ -3,7 +3,8 @@ import re
import threading import threading
import time import time
import codecs import codecs
from typing import Callable, Dict, List, Optional import base64
from typing import Any, Callable, Dict, List, Optional, Tuple
import json import json
import docker import docker
@ -22,6 +23,36 @@ class BotDockerManager:
self.host_data_root = host_data_root self.host_data_root = host_data_root
self.base_image = base_image self.base_image = base_image
self.active_monitors = {} self.active_monitors = {}
self._last_delivery_error: Dict[str, str] = {}
@staticmethod
def _normalize_resource_limits(
cpu_cores: Optional[float],
memory_mb: Optional[int],
storage_gb: Optional[int],
) -> Tuple[float, int, int]:
try:
cpu = float(cpu_cores) if cpu_cores is not None else 1.0
except Exception:
cpu = 1.0
try:
memory = int(memory_mb) if memory_mb is not None else 1024
except Exception:
memory = 1024
try:
storage = int(storage_gb) if storage_gb is not None else 10
except Exception:
storage = 10
if cpu < 0:
cpu = 1.0
if memory < 0:
memory = 1024
if storage < 0:
storage = 10
cpu = 0.0 if cpu == 0 else min(16.0, max(0.1, cpu))
memory = 0 if memory == 0 else min(65536, max(256, memory))
storage = 0 if storage == 0 else min(1024, max(1, storage))
return cpu, memory, storage
def has_image(self, tag: str) -> bool: def has_image(self, tag: str) -> bool:
if not self.client: if not self.client:
@ -62,6 +93,9 @@ class BotDockerManager:
bot_id: str, bot_id: str,
image_tag: Optional[str] = None, image_tag: Optional[str] = None,
env_vars: Optional[Dict[str, str]] = None, env_vars: Optional[Dict[str, str]] = None,
cpu_cores: Optional[float] = None,
memory_mb: Optional[int] = None,
storage_gb: Optional[int] = None,
on_state_change: Optional[Callable[[str, dict], None]] = None, on_state_change: Optional[Callable[[str, dict], None]] = None,
) -> bool: ) -> bool:
if not self.client: if not self.client:
@ -76,6 +110,23 @@ class BotDockerManager:
bot_workspace = os.path.join(self.host_data_root, bot_id, ".nanobot") bot_workspace = os.path.join(self.host_data_root, bot_id, ".nanobot")
container_name = f"worker_{bot_id}" container_name = f"worker_{bot_id}"
os.makedirs(bot_workspace, exist_ok=True) os.makedirs(bot_workspace, exist_ok=True)
cpu, memory, storage = self._normalize_resource_limits(cpu_cores, memory_mb, storage_gb)
base_kwargs = {
"image": image,
"name": container_name,
"detach": True,
"stdin_open": True,
"tty": True,
"environment": env_vars or {},
"volumes": {
bot_workspace: {"bind": "/root/.nanobot", "mode": "rw"},
},
"network_mode": "bridge",
}
if memory > 0:
base_kwargs["mem_limit"] = f"{memory}m"
if cpu > 0:
base_kwargs["nano_cpus"] = int(cpu * 1_000_000_000)
try: try:
try: try:
@ -89,20 +140,19 @@ class BotDockerManager:
except docker.errors.NotFound: except docker.errors.NotFound:
pass pass
container = self.client.containers.run( container = None
image=image, if storage > 0:
name=container_name, try:
detach=True, container = self.client.containers.run(
stdin_open=True, storage_opt={"size": f"{storage}G"},
tty=True, **base_kwargs,
environment=env_vars or {}, )
volumes={ except Exception as e:
bot_workspace: {"bind": "/root/.nanobot", "mode": "rw"}, # Some Docker engines (e.g. Desktop/overlay2) may not support size storage option.
}, print(f"[DockerManager] storage limit not applied for {bot_id}: {e}")
mem_limit="1g", container = self.client.containers.run(**base_kwargs)
cpu_quota=100000, else:
network_mode="bridge", container = self.client.containers.run(**base_kwargs)
)
if on_state_change: if on_state_change:
monitor_thread = threading.Thread( monitor_thread = threading.Thread(
@ -164,44 +214,304 @@ class BotDockerManager:
def send_command(self, bot_id: str, command: str, media: Optional[List[str]] = None) -> bool: def send_command(self, bot_id: str, command: str, media: Optional[List[str]] = None) -> bool:
"""Send a command to dashboard channel with robust container-local delivery.""" """Send a command to dashboard channel with robust container-local delivery."""
if not self.client: if not self.client:
self._last_delivery_error[bot_id] = "Docker client is not available"
return False return False
media_paths = [str(v).strip().replace("\\", "/") for v in (media or []) if str(v).strip()] media_paths = [str(v).strip().replace("\\", "/") for v in (media or []) if str(v).strip()]
self._last_delivery_error.pop(bot_id, None)
# Primary path on Docker Desktop/Mac: execute curl inside container namespace. # Primary path on Docker Desktop/Mac: execute curl inside container namespace.
for attempt in range(3): for attempt in range(3):
if self._send_command_via_exec(bot_id, command, media_paths): if self._send_command_via_exec(bot_id, command, media_paths):
self._last_delivery_error.pop(bot_id, None)
return True return True
time.sleep(0.25 * (attempt + 1)) time.sleep(0.25 * (attempt + 1))
# Secondary path for environments where host can reach container IP. # Secondary path for environments where host can reach container IP.
return self._send_command_via_host_http(bot_id, command, media_paths) if self._send_command_via_host_http(bot_id, command, media_paths):
self._last_delivery_error.pop(bot_id, None)
return True
if bot_id not in self._last_delivery_error:
self._last_delivery_error[bot_id] = "Unknown delivery failure"
return False
def get_last_delivery_error(self, bot_id: str) -> str:
return str(self._last_delivery_error.get(bot_id, "") or "").strip()
def get_bot_status(self, bot_id: str) -> str:
"""Return normalized runtime status from Docker: RUNNING or STOPPED."""
if not self.client:
return "STOPPED"
try:
container = self.client.containers.get(f"worker_{bot_id}")
container.reload()
raw = str(container.status or "").strip().lower()
if raw in {"running", "restarting"}:
return "RUNNING"
return "STOPPED"
except docker.errors.NotFound:
return "STOPPED"
except Exception:
return "STOPPED"
@staticmethod
def _parse_size_to_bytes(raw: Any) -> Optional[int]:
if raw is None:
return None
text = str(raw).strip()
if not text:
return None
try:
return int(float(text))
except Exception:
pass
match = re.fullmatch(r"([0-9]+(?:\.[0-9]+)?)\s*([kmgtp]?)(i?b)?", text.lower())
if not match:
return None
number = float(match.group(1))
unit = (match.group(2) or "").lower()
scale = {
"": 1,
"k": 1024,
"m": 1024 ** 2,
"g": 1024 ** 3,
"t": 1024 ** 4,
"p": 1024 ** 5,
}.get(unit, 1)
return int(number * scale)
@staticmethod
def _calc_cpu_percent(stats: Dict[str, Any]) -> float:
try:
cpu_stats = stats.get("cpu_stats") or {}
precpu_stats = stats.get("precpu_stats") or {}
cpu_total = float((cpu_stats.get("cpu_usage") or {}).get("total_usage") or 0)
prev_cpu_total = float((precpu_stats.get("cpu_usage") or {}).get("total_usage") or 0)
cpu_delta = cpu_total - prev_cpu_total
system_total = float(cpu_stats.get("system_cpu_usage") or 0)
prev_system_total = float(precpu_stats.get("system_cpu_usage") or 0)
system_delta = system_total - prev_system_total
online_cpus = int(
cpu_stats.get("online_cpus")
or len((cpu_stats.get("cpu_usage") or {}).get("percpu_usage") or [])
or 1
)
if cpu_delta <= 0 or system_delta <= 0:
return 0.0
return max(0.0, (cpu_delta / system_delta) * online_cpus * 100.0)
except Exception:
return 0.0
def get_bot_resource_snapshot(self, bot_id: str) -> Dict[str, Any]:
snapshot: Dict[str, Any] = {
"docker_status": "STOPPED",
"limits": {
"cpu_cores": None,
"memory_bytes": None,
"storage_bytes": None,
"nano_cpus": 0,
"storage_opt_raw": "",
},
"usage": {
"cpu_percent": 0.0,
"memory_bytes": 0,
"memory_limit_bytes": 0,
"memory_percent": 0.0,
"network_rx_bytes": 0,
"network_tx_bytes": 0,
"blk_read_bytes": 0,
"blk_write_bytes": 0,
"pids": 0,
"container_rw_bytes": 0,
},
}
if not self.client:
return snapshot
try:
container = self.client.containers.get(f"worker_{bot_id}")
container.reload()
status_raw = str(container.status or "").strip().lower()
snapshot["docker_status"] = "RUNNING" if status_raw in {"running", "restarting"} else "STOPPED"
inspect: Dict[str, Any]
try:
inspect = self.client.api.inspect_container(container.id, size=True)
except TypeError:
# Older docker SDK versions do not support `size` kwarg.
inspect = self.client.api.inspect_container(container.id)
except Exception as e:
if "unexpected keyword argument 'size'" in str(e):
inspect = self.client.api.inspect_container(container.id)
else:
raise
host_cfg = inspect.get("HostConfig") or {}
nano_cpus = int(host_cfg.get("NanoCpus") or 0)
cpu_quota = int(host_cfg.get("CpuQuota") or 0)
cpu_period = int(host_cfg.get("CpuPeriod") or 0)
memory_bytes = int(host_cfg.get("Memory") or 0)
storage_opt = host_cfg.get("StorageOpt") or {}
storage_raw = storage_opt.get("size")
storage_bytes = self._parse_size_to_bytes(storage_raw)
if nano_cpus > 0:
cpu_cores = nano_cpus / 1_000_000_000
elif cpu_quota > 0 and cpu_period > 0:
cpu_cores = cpu_quota / cpu_period
else:
cpu_cores = None
snapshot["limits"] = {
"cpu_cores": cpu_cores,
"memory_bytes": memory_bytes if memory_bytes > 0 else None,
"storage_bytes": storage_bytes,
"nano_cpus": nano_cpus,
"storage_opt_raw": str(storage_raw or ""),
}
snapshot["usage"]["container_rw_bytes"] = int(inspect.get("SizeRw") or 0)
if snapshot["docker_status"] == "RUNNING":
stats = container.stats(stream=False) or {}
memory_stats = stats.get("memory_stats") or {}
memory_usage = int(memory_stats.get("usage") or 0)
memory_limit = int(memory_stats.get("limit") or 0)
if memory_usage > 0:
cache = int((memory_stats.get("stats") or {}).get("inactive_file") or 0)
memory_usage = max(0, memory_usage - cache)
networks = stats.get("networks") or {}
rx_total = 0
tx_total = 0
for _, row in networks.items():
if isinstance(row, dict):
rx_total += int(row.get("rx_bytes") or 0)
tx_total += int(row.get("tx_bytes") or 0)
blk_stats = stats.get("blkio_stats") or {}
io_rows = blk_stats.get("io_service_bytes_recursive") or []
blk_read = 0
blk_write = 0
for row in io_rows:
if not isinstance(row, dict):
continue
op = str(row.get("op") or "").upper()
value = int(row.get("value") or 0)
if op == "READ":
blk_read += value
elif op == "WRITE":
blk_write += value
pids_current = int((stats.get("pids_stats") or {}).get("current") or 0)
cpu_percent = self._calc_cpu_percent(stats)
memory_percent = 0.0
if memory_limit > 0:
memory_percent = (memory_usage / memory_limit) * 100.0
if snapshot["usage"]["container_rw_bytes"] <= 0:
storage_stats = stats.get("storage_stats") or {}
rw_size = int(
storage_stats.get("size_rw")
or storage_stats.get("rw_size")
or 0
)
snapshot["usage"]["container_rw_bytes"] = max(0, rw_size)
snapshot["usage"].update(
{
"cpu_percent": cpu_percent,
"memory_bytes": memory_usage,
"memory_limit_bytes": memory_limit,
"memory_percent": max(0.0, memory_percent),
"network_rx_bytes": rx_total,
"network_tx_bytes": tx_total,
"blk_read_bytes": blk_read,
"blk_write_bytes": blk_write,
"pids": pids_current,
}
)
except docker.errors.NotFound:
return snapshot
except Exception as e:
print(f"[DockerManager] get_bot_resource_snapshot failed for {bot_id}: {e}")
return snapshot
def _send_command_via_exec(self, bot_id: str, command: str, media: Optional[List[str]] = None) -> bool: def _send_command_via_exec(self, bot_id: str, command: str, media: Optional[List[str]] = None) -> bool:
try: try:
container = self.client.containers.get(f"worker_{bot_id}") container = self.client.containers.get(f"worker_{bot_id}")
container.reload()
if container.status != "running":
self._last_delivery_error[bot_id] = f"Container status is {container.status}"
return False
payload_json = json.dumps({"message": command, "media": media or []}, ensure_ascii=False) payload_json = json.dumps({"message": command, "media": media or []}, ensure_ascii=False)
cmd = (
"curl -sS --fail --max-time 6 " # Try direct curl first (no shell dependency).
"-X POST -H 'Content-Type: application/json' " result = container.exec_run(
"-d \"$DASHBOARD_PAYLOAD\" http://127.0.0.1:9000/chat" [
"curl",
"-sS",
"--fail",
"--max-time",
"6",
"-X",
"POST",
"-H",
"Content-Type: application/json",
"-d",
payload_json,
"http://127.0.0.1:9000/chat",
]
) )
result = container.exec_run(["/bin/sh", "-c", cmd], environment={"DASHBOARD_PAYLOAD": payload_json})
output = result.output.decode("utf-8", errors="ignore") if isinstance(result.output, (bytes, bytearray)) else str(result.output) output = result.output.decode("utf-8", errors="ignore") if isinstance(result.output, (bytes, bytearray)) else str(result.output)
if result.exit_code != 0: if result.exit_code != 0:
print(f"[DockerManager] exec curl failed for {bot_id}: exit={result.exit_code}, out={output[:300]}") reason = f"exec curl failed: exit={result.exit_code}, out={output[:300]}"
print(f"[DockerManager] {reason}")
self._last_delivery_error[bot_id] = reason
# Fallback inside container without curl/shell.
payload_b64 = base64.b64encode(payload_json.encode("utf-8")).decode("ascii")
py_script = (
"import base64,json,os,urllib.request\n"
"payload=json.loads(base64.b64decode(os.environ['DASHBOARD_PAYLOAD_B64']).decode('utf-8'))\n"
"req=urllib.request.Request('http://127.0.0.1:9000/chat',"
"data=json.dumps(payload,ensure_ascii=False).encode('utf-8'),"
"headers={'Content-Type':'application/json'})\n"
"with urllib.request.urlopen(req, timeout=8) as resp:\n"
" print(resp.read().decode('utf-8','ignore'))\n"
)
py_bins = ["python3", "python"]
for py_bin in py_bins:
py_result = container.exec_run(
[py_bin, "-c", py_script],
environment={"DASHBOARD_PAYLOAD_B64": payload_b64},
)
py_output = py_result.output.decode("utf-8", errors="ignore") if isinstance(py_result.output, (bytes, bytearray)) else str(py_result.output)
if py_result.exit_code != 0:
py_reason = f"exec {py_bin} fallback failed: exit={py_result.exit_code}, out={py_output[:300]}"
print(f"[DockerManager] {py_reason}")
self._last_delivery_error[bot_id] = py_reason
continue
if py_output.strip():
try:
parsed = json.loads(py_output)
if str(parsed.get("status", "")).lower() != "ok":
py_reason = f"exec {py_bin} fallback non-ok response: {py_output[:300]}"
print(f"[DockerManager] {py_reason}")
self._last_delivery_error[bot_id] = py_reason
continue
except Exception:
pass
return True
return False return False
if output.strip(): if output.strip():
try: try:
parsed = json.loads(output) parsed = json.loads(output)
if str(parsed.get("status", "")).lower() != "ok": if str(parsed.get("status", "")).lower() != "ok":
print(f"[DockerManager] exec curl non-ok response for {bot_id}: {output[:300]}") reason = f"exec curl non-ok response: {output[:300]}"
print(f"[DockerManager] {reason}")
self._last_delivery_error[bot_id] = reason
return False return False
except Exception: except Exception:
# Non-JSON but zero exit still treated as success. # Non-JSON but zero exit still treated as success.
pass pass
return True return True
except Exception as e: except Exception as e:
print(f"[DockerManager] exec curl exception for {bot_id}: {e}") reason = f"exec curl exception: {e}"
print(f"[DockerManager] {reason}")
self._last_delivery_error[bot_id] = reason
return False return False
def _send_command_via_host_http(self, bot_id: str, command: str, media: Optional[List[str]] = None) -> bool: def _send_command_via_host_http(self, bot_id: str, command: str, media: Optional[List[str]] = None) -> bool:
@ -218,10 +528,14 @@ class BotDockerManager:
resp = client.post(target_url, json=payload) resp = client.post(target_url, json=payload)
if resp.status_code == 200: if resp.status_code == 200:
return True return True
print(f"[DockerManager] host HTTP failed: {resp.status_code} - {resp.text[:300]}") reason = f"host HTTP failed: {resp.status_code} - {resp.text[:300]}"
print(f"[DockerManager] {reason}")
self._last_delivery_error[bot_id] = reason
return False return False
except Exception as e: except Exception as e:
print(f"[DockerManager] host HTTP exception: {e}") reason = f"host HTTP exception: {e}"
print(f"[DockerManager] {reason}")
self._last_delivery_error[bot_id] = reason
return False return False
def get_recent_logs(self, bot_id: str, tail: int = 300) -> List[str]: def get_recent_logs(self, bot_id: str, tail: int = 300) -> List[str]:

View File

@ -27,6 +27,17 @@ def _env_bool(name: str, default: bool) -> bool:
return str(raw).strip().lower() in {"1", "true", "yes", "on"} return str(raw).strip().lower() in {"1", "true", "yes", "on"}
def _env_int(name: str, default: int, min_value: int, max_value: int) -> int:
raw = os.getenv(name)
if raw is None:
return default
try:
value = int(str(raw).strip())
except Exception:
value = default
return max(min_value, min(max_value, value))
def _normalize_dir_path(path_value: str) -> str: def _normalize_dir_path(path_value: str) -> str:
raw = str(path_value or "").strip() raw = str(path_value or "").strip()
if not raw: if not raw:
@ -99,6 +110,7 @@ DATABASE_URL: Final[str] = _normalize_database_url(
DATABASE_ENGINE: Final[str] = _database_engine(DATABASE_URL) DATABASE_ENGINE: Final[str] = _database_engine(DATABASE_URL)
DATABASE_URL_DISPLAY: Final[str] = _mask_database_url(DATABASE_URL) DATABASE_URL_DISPLAY: Final[str] = _mask_database_url(DATABASE_URL)
DATABASE_ECHO: Final[bool] = _env_bool("DATABASE_ECHO", True) DATABASE_ECHO: Final[bool] = _env_bool("DATABASE_ECHO", True)
UPLOAD_MAX_MB: Final[int] = _env_int("UPLOAD_MAX_MB", 100, 1, 2048)
DEFAULT_AGENTS_MD: Final[str] = _env_text( DEFAULT_AGENTS_MD: Final[str] = _env_text(
"DEFAULT_AGENTS_MD", "DEFAULT_AGENTS_MD",

View File

@ -1,5 +1,4 @@
import asyncio import asyncio
import io
import json import json
import mimetypes import mimetypes
import os import os
@ -32,6 +31,7 @@ from core.settings import (
DEFAULT_TOOLS_MD, DEFAULT_TOOLS_MD,
DEFAULT_USER_MD, DEFAULT_USER_MD,
PROJECT_ROOT, PROJECT_ROOT,
UPLOAD_MAX_MB,
) )
from models.bot import BotInstance, BotMessage, NanobotImage from models.bot import BotInstance, BotMessage, NanobotImage
@ -81,6 +81,9 @@ class BotCreateRequest(BaseModel):
temperature: float = 0.2 temperature: float = 0.2
top_p: float = 1.0 top_p: float = 1.0
max_tokens: int = 8192 max_tokens: int = 8192
cpu_cores: float = 1.0
memory_mb: int = 1024
storage_gb: int = 10
soul_md: Optional[str] = None soul_md: Optional[str] = None
agents_md: Optional[str] = None agents_md: Optional[str] = None
user_md: Optional[str] = None user_md: Optional[str] = None
@ -104,6 +107,9 @@ class BotUpdateRequest(BaseModel):
temperature: Optional[float] = None temperature: Optional[float] = None
top_p: Optional[float] = None top_p: Optional[float] = None
max_tokens: Optional[int] = None max_tokens: Optional[int] = None
cpu_cores: Optional[float] = None
memory_mb: Optional[int] = None
storage_gb: Optional[int] = None
soul_md: Optional[str] = None soul_md: Optional[str] = None
agents_md: Optional[str] = None agents_md: Optional[str] = None
user_md: Optional[str] = None user_md: Optional[str] = None
@ -276,6 +282,8 @@ async def on_startup():
print(f"📁 数据库连接: {DATABASE_URL_DISPLAY}") print(f"📁 数据库连接: {DATABASE_URL_DISPLAY}")
init_database() init_database()
with Session(engine) as session: with Session(engine) as session:
for bot in session.exec(select(BotInstance)).all():
_migrate_bot_resources_store(bot.id)
running_bots = session.exec(select(BotInstance).where(BotInstance.docker_status == "RUNNING")).all() running_bots = session.exec(select(BotInstance).where(BotInstance.docker_status == "RUNNING")).all()
for bot in running_bots: for bot in running_bots:
docker_manager.ensure_monitor(bot.id, docker_callback) docker_manager.ensure_monitor(bot.id, docker_callback)
@ -303,7 +311,10 @@ def get_system_defaults():
"user_md": DEFAULT_USER_MD, "user_md": DEFAULT_USER_MD,
"tools_md": DEFAULT_TOOLS_MD, "tools_md": DEFAULT_TOOLS_MD,
"identity_md": DEFAULT_IDENTITY_MD, "identity_md": DEFAULT_IDENTITY_MD,
} },
"limits": {
"upload_max_mb": UPLOAD_MAX_MB,
},
} }
@ -342,6 +353,84 @@ def _write_bot_config(bot_id: str, config_data: Dict[str, Any]) -> None:
os.replace(tmp, path) os.replace(tmp, path)
def _resources_json_path(bot_id: str) -> str:
return os.path.join(_bot_data_root(bot_id), "resources.json")
def _write_bot_resources(bot_id: str, cpu_cores: Any, memory_mb: Any, storage_gb: Any) -> None:
normalized = _normalize_resource_limits(cpu_cores, memory_mb, storage_gb)
payload = {
"cpuCores": normalized["cpu_cores"],
"memoryMB": normalized["memory_mb"],
"storageGB": normalized["storage_gb"],
}
path = _resources_json_path(bot_id)
os.makedirs(os.path.dirname(path), exist_ok=True)
tmp = f"{path}.tmp"
with open(tmp, "w", encoding="utf-8") as f:
json.dump(payload, f, ensure_ascii=False, indent=2)
os.replace(tmp, path)
def _read_bot_resources(bot_id: str, config_data: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
cpu_raw: Any = None
memory_raw: Any = None
storage_raw: Any = None
path = _resources_json_path(bot_id)
if os.path.isfile(path):
try:
with open(path, "r", encoding="utf-8") as f:
data = json.load(f)
if isinstance(data, dict):
cpu_raw = data.get("cpuCores", data.get("cpu_cores"))
memory_raw = data.get("memoryMB", data.get("memory_mb"))
storage_raw = data.get("storageGB", data.get("storage_gb"))
except Exception:
pass
# Backward compatibility: read old runtime.resources only if new file is missing/incomplete.
if cpu_raw is None or memory_raw is None or storage_raw is None:
cfg = config_data if isinstance(config_data, dict) else _read_bot_config(bot_id)
runtime_cfg = cfg.get("runtime")
if isinstance(runtime_cfg, dict):
resources_raw = runtime_cfg.get("resources")
if isinstance(resources_raw, dict):
if cpu_raw is None:
cpu_raw = resources_raw.get("cpuCores", resources_raw.get("cpu_cores"))
if memory_raw is None:
memory_raw = resources_raw.get("memoryMB", resources_raw.get("memory_mb"))
if storage_raw is None:
storage_raw = resources_raw.get("storageGB", resources_raw.get("storage_gb"))
return _normalize_resource_limits(cpu_raw, memory_raw, storage_raw)
def _migrate_bot_resources_store(bot_id: str) -> None:
config_data = _read_bot_config(bot_id)
runtime_cfg = config_data.get("runtime")
resources_raw: Dict[str, Any] = {}
if isinstance(runtime_cfg, dict):
legacy_raw = runtime_cfg.get("resources")
if isinstance(legacy_raw, dict):
resources_raw = legacy_raw
path = _resources_json_path(bot_id)
if not os.path.isfile(path):
_write_bot_resources(
bot_id,
resources_raw.get("cpuCores", resources_raw.get("cpu_cores")),
resources_raw.get("memoryMB", resources_raw.get("memory_mb")),
resources_raw.get("storageGB", resources_raw.get("storage_gb")),
)
if isinstance(runtime_cfg, dict) and "resources" in runtime_cfg:
runtime_cfg.pop("resources", None)
if not runtime_cfg:
config_data.pop("runtime", None)
_write_bot_config(bot_id, config_data)
def _normalize_channel_extra(raw: Any) -> Dict[str, Any]: def _normalize_channel_extra(raw: Any) -> Dict[str, Any]:
if not isinstance(raw, dict): if not isinstance(raw, dict):
return {} return {}
@ -629,6 +718,26 @@ def _safe_int(raw: Any, default: int) -> int:
return default return default
def _normalize_resource_limits(cpu_cores: Any, memory_mb: Any, storage_gb: Any) -> Dict[str, Any]:
cpu = _safe_float(cpu_cores, 1.0)
mem = _safe_int(memory_mb, 1024)
storage = _safe_int(storage_gb, 10)
if cpu < 0:
cpu = 1.0
if mem < 0:
mem = 1024
if storage < 0:
storage = 10
normalized_cpu = 0.0 if cpu == 0 else min(16.0, max(0.1, cpu))
normalized_mem = 0 if mem == 0 else min(65536, max(256, mem))
normalized_storage = 0 if storage == 0 else min(1024, max(1, storage))
return {
"cpu_cores": normalized_cpu,
"memory_mb": normalized_mem,
"storage_gb": normalized_storage,
}
def _read_workspace_md(bot_id: str, filename: str, default_value: str) -> str: def _read_workspace_md(bot_id: str, filename: str, default_value: str) -> str:
path = os.path.join(_workspace_root(bot_id), filename) path = os.path.join(_workspace_root(bot_id), filename)
if not os.path.isfile(path): if not os.path.isfile(path):
@ -670,6 +779,7 @@ def _read_bot_runtime_snapshot(bot: BotInstance) -> Dict[str, Any]:
api_base = str(provider_cfg.get("apiBase") or "").strip() api_base = str(provider_cfg.get("apiBase") or "").strip()
soul_md = _read_workspace_md(bot.id, "SOUL.md", DEFAULT_SOUL_MD) soul_md = _read_workspace_md(bot.id, "SOUL.md", DEFAULT_SOUL_MD)
resources = _read_bot_resources(bot.id, config_data=config_data)
return { return {
"llm_provider": llm_provider, "llm_provider": llm_provider,
@ -679,6 +789,9 @@ def _read_bot_runtime_snapshot(bot: BotInstance) -> Dict[str, Any]:
"temperature": _safe_float(agents_defaults.get("temperature"), 0.2), "temperature": _safe_float(agents_defaults.get("temperature"), 0.2),
"top_p": _safe_float(agents_defaults.get("topP"), 1.0), "top_p": _safe_float(agents_defaults.get("topP"), 1.0),
"max_tokens": _safe_int(agents_defaults.get("maxTokens"), 8192), "max_tokens": _safe_int(agents_defaults.get("maxTokens"), 8192),
"cpu_cores": resources["cpu_cores"],
"memory_mb": resources["memory_mb"],
"storage_gb": resources["storage_gb"],
"send_progress": send_progress, "send_progress": send_progress,
"send_tool_hints": send_tool_hints, "send_tool_hints": send_tool_hints,
"soul_md": soul_md, "soul_md": soul_md,
@ -707,6 +820,9 @@ def _serialize_bot(bot: BotInstance) -> Dict[str, Any]:
"temperature": _safe_float(runtime.get("temperature"), 0.2), "temperature": _safe_float(runtime.get("temperature"), 0.2),
"top_p": _safe_float(runtime.get("top_p"), 1.0), "top_p": _safe_float(runtime.get("top_p"), 1.0),
"max_tokens": _safe_int(runtime.get("max_tokens"), 8192), "max_tokens": _safe_int(runtime.get("max_tokens"), 8192),
"cpu_cores": _safe_float(runtime.get("cpu_cores"), 1.0),
"memory_mb": _safe_int(runtime.get("memory_mb"), 1024),
"storage_gb": _safe_int(runtime.get("storage_gb"), 10),
"send_progress": bool(runtime.get("send_progress")), "send_progress": bool(runtime.get("send_progress")),
"send_tool_hints": bool(runtime.get("send_tool_hints")), "send_tool_hints": bool(runtime.get("send_tool_hints")),
"soul_md": runtime.get("soul_md") or "", "soul_md": runtime.get("soul_md") or "",
@ -749,6 +865,9 @@ def _sync_workspace_channels(
"temperature": _safe_float(snapshot.get("temperature"), 0.2), "temperature": _safe_float(snapshot.get("temperature"), 0.2),
"top_p": _safe_float(snapshot.get("top_p"), 1.0), "top_p": _safe_float(snapshot.get("top_p"), 1.0),
"max_tokens": _safe_int(snapshot.get("max_tokens"), 8192), "max_tokens": _safe_int(snapshot.get("max_tokens"), 8192),
"cpu_cores": _safe_float(snapshot.get("cpu_cores"), 1.0),
"memory_mb": _safe_int(snapshot.get("memory_mb"), 1024),
"storage_gb": _safe_int(snapshot.get("storage_gb"), 10),
"send_progress": bool(snapshot.get("send_progress")), "send_progress": bool(snapshot.get("send_progress")),
"send_tool_hints": bool(snapshot.get("send_tool_hints")), "send_tool_hints": bool(snapshot.get("send_tool_hints")),
"tools_config_json": json.dumps(_normalize_tools_config(snapshot.get("tools_config")), ensure_ascii=False), "tools_config_json": json.dumps(_normalize_tools_config(snapshot.get("tools_config")), ensure_ascii=False),
@ -756,6 +875,14 @@ def _sync_workspace_channels(
if isinstance(runtime_overrides, dict): if isinstance(runtime_overrides, dict):
for key, value in runtime_overrides.items(): for key, value in runtime_overrides.items():
bot_data[key] = value bot_data[key] = value
resources = _normalize_resource_limits(
bot_data.get("cpu_cores"),
bot_data.get("memory_mb"),
bot_data.get("storage_gb"),
)
bot_data["cpu_cores"] = resources["cpu_cores"]
bot_data["memory_mb"] = resources["memory_mb"]
bot_data["storage_gb"] = resources["storage_gb"]
if "tools_config" in bot_data: if "tools_config" in bot_data:
bot_data["tools_config_json"] = json.dumps(_normalize_tools_config(bot_data.get("tools_config")), ensure_ascii=False) bot_data["tools_config_json"] = json.dumps(_normalize_tools_config(bot_data.get("tools_config")), ensure_ascii=False)
@ -790,6 +917,12 @@ def _sync_workspace_channels(
bot_data=bot_data, bot_data=bot_data,
channels=normalized_channels, channels=normalized_channels,
) )
_write_bot_resources(
bot_id,
bot_data.get("cpu_cores"),
bot_data.get("memory_mb"),
bot_data.get("storage_gb"),
)
def reconcile_image_registry(session: Session): def reconcile_image_registry(session: Session):
@ -953,6 +1086,43 @@ def _resolve_workspace_path(bot_id: str, rel_path: Optional[str] = None) -> tupl
return root, target return root, target
def _calc_dir_size_bytes(path: str) -> int:
total = 0
if not os.path.exists(path):
return 0
for root, _, files in os.walk(path):
for filename in files:
try:
file_path = os.path.join(root, filename)
if os.path.islink(file_path):
continue
total += os.path.getsize(file_path)
except Exception:
continue
return max(0, total)
def _is_image_attachment_path(path: str) -> bool:
lower = str(path or "").strip().lower()
return lower.endswith(".png") or lower.endswith(".jpg") or lower.endswith(".jpeg") or lower.endswith(".webp")
def _is_video_attachment_path(path: str) -> bool:
lower = str(path or "").strip().lower()
return (
lower.endswith(".mp4")
or lower.endswith(".mov")
or lower.endswith(".m4v")
or lower.endswith(".webm")
or lower.endswith(".mkv")
or lower.endswith(".avi")
)
def _is_visual_attachment_path(path: str) -> bool:
return _is_image_attachment_path(path) or _is_video_attachment_path(path)
def _build_workspace_tree(path: str, root: str, depth: int) -> List[Dict[str, Any]]: def _build_workspace_tree(path: str, root: str, depth: int) -> List[Dict[str, Any]]:
rows: List[Dict[str, Any]] = [] rows: List[Dict[str, Any]] = []
try: try:
@ -1157,6 +1327,7 @@ def create_bot(payload: BotCreateRequest, session: Session = Depends(get_session
session.add(bot) session.add(bot)
session.commit() session.commit()
session.refresh(bot) session.refresh(bot)
resource_limits = _normalize_resource_limits(payload.cpu_cores, payload.memory_mb, payload.storage_gb)
_write_env_store(payload.id, _normalize_env_params(payload.env_params)) _write_env_store(payload.id, _normalize_env_params(payload.env_params))
_sync_workspace_channels( _sync_workspace_channels(
session, session,
@ -1174,6 +1345,9 @@ def create_bot(payload: BotCreateRequest, session: Session = Depends(get_session
"temperature": payload.temperature, "temperature": payload.temperature,
"top_p": payload.top_p, "top_p": payload.top_p,
"max_tokens": payload.max_tokens, "max_tokens": payload.max_tokens,
"cpu_cores": resource_limits["cpu_cores"],
"memory_mb": resource_limits["memory_mb"],
"storage_gb": resource_limits["storage_gb"],
"system_prompt": payload.system_prompt or payload.soul_md or DEFAULT_SOUL_MD, "system_prompt": payload.system_prompt or payload.soul_md or DEFAULT_SOUL_MD,
"soul_md": payload.soul_md or payload.system_prompt or DEFAULT_SOUL_MD, "soul_md": payload.soul_md or payload.system_prompt or DEFAULT_SOUL_MD,
"agents_md": payload.agents_md or DEFAULT_AGENTS_MD, "agents_md": payload.agents_md or DEFAULT_AGENTS_MD,
@ -1192,9 +1366,66 @@ def create_bot(payload: BotCreateRequest, session: Session = Depends(get_session
@app.get("/api/bots") @app.get("/api/bots")
def list_bots(session: Session = Depends(get_session)): def list_bots(session: Session = Depends(get_session)):
bots = session.exec(select(BotInstance)).all() bots = session.exec(select(BotInstance)).all()
dirty = False
for bot in bots:
actual_status = docker_manager.get_bot_status(bot.id)
if bot.docker_status != actual_status:
bot.docker_status = actual_status
if actual_status != "RUNNING" and str(bot.current_state or "").upper() not in {"ERROR"}:
bot.current_state = "IDLE"
session.add(bot)
dirty = True
if dirty:
session.commit()
for bot in bots:
session.refresh(bot)
return [_serialize_bot(bot) for bot in bots] return [_serialize_bot(bot) for bot in bots]
@app.get("/api/bots/{bot_id}/resources")
def get_bot_resources(bot_id: str, session: Session = Depends(get_session)):
bot = session.get(BotInstance, bot_id)
if not bot:
raise HTTPException(status_code=404, detail="Bot not found")
configured = _read_bot_resources(bot_id)
runtime = docker_manager.get_bot_resource_snapshot(bot_id)
workspace_root = _workspace_root(bot_id)
workspace_bytes = _calc_dir_size_bytes(workspace_root)
configured_storage_bytes = int(configured.get("storage_gb", 0) or 0) * 1024 * 1024 * 1024
workspace_percent = 0.0
if configured_storage_bytes > 0:
workspace_percent = (workspace_bytes / configured_storage_bytes) * 100.0
limits = runtime.get("limits") or {}
cpu_limited = (limits.get("cpu_cores") or 0) > 0
memory_limited = (limits.get("memory_bytes") or 0) > 0
storage_limited = bool(limits.get("storage_bytes")) or bool(limits.get("storage_opt_raw"))
return {
"bot_id": bot_id,
"docker_status": runtime.get("docker_status") or bot.docker_status,
"configured": configured,
"runtime": runtime,
"workspace": {
"path": workspace_root,
"usage_bytes": workspace_bytes,
"configured_limit_bytes": configured_storage_bytes if configured_storage_bytes > 0 else None,
"usage_percent": max(0.0, workspace_percent),
},
"enforcement": {
"cpu_limited": cpu_limited,
"memory_limited": memory_limited,
"storage_limited": storage_limited,
},
"note": (
"Resource value 0 means unlimited. CPU/Memory limits come from Docker HostConfig and are enforced by cgroup. "
"Storage limit depends on Docker storage driver support."
),
"collected_at": datetime.utcnow().isoformat() + "Z",
}
@app.put("/api/bots/{bot_id}") @app.put("/api/bots/{bot_id}")
def update_bot(bot_id: str, payload: BotUpdateRequest, session: Session = Depends(get_session)): def update_bot(bot_id: str, payload: BotUpdateRequest, session: Session = Depends(get_session)):
bot = session.get(BotInstance, bot_id) bot = session.get(BotInstance, bot_id)
@ -1227,6 +1458,9 @@ def update_bot(bot_id: str, payload: BotUpdateRequest, session: Session = Depend
"temperature", "temperature",
"top_p", "top_p",
"max_tokens", "max_tokens",
"cpu_cores",
"memory_mb",
"storage_gb",
"soul_md", "soul_md",
"agents_md", "agents_md",
"user_md", "user_md",
@ -1244,6 +1478,13 @@ def update_bot(bot_id: str, payload: BotUpdateRequest, session: Session = Depend
runtime_overrides["soul_md"] = runtime_overrides["system_prompt"] runtime_overrides["soul_md"] = runtime_overrides["system_prompt"]
if "soul_md" in runtime_overrides and "system_prompt" not in runtime_overrides: if "soul_md" in runtime_overrides and "system_prompt" not in runtime_overrides:
runtime_overrides["system_prompt"] = runtime_overrides["soul_md"] runtime_overrides["system_prompt"] = runtime_overrides["soul_md"]
if {"cpu_cores", "memory_mb", "storage_gb"} & set(runtime_overrides.keys()):
normalized_resources = _normalize_resource_limits(
runtime_overrides.get("cpu_cores"),
runtime_overrides.get("memory_mb"),
runtime_overrides.get("storage_gb"),
)
runtime_overrides.update(normalized_resources)
db_fields = {"name", "image_tag"} db_fields = {"name", "image_tag"}
for key, value in update_data.items(): for key, value in update_data.items():
@ -1280,20 +1521,35 @@ async def start_bot(bot_id: str, session: Session = Depends(get_session)):
raise HTTPException(status_code=404, detail="Bot not found") raise HTTPException(status_code=404, detail="Bot not found")
_sync_workspace_channels(session, bot_id) _sync_workspace_channels(session, bot_id)
runtime_snapshot = _read_bot_runtime_snapshot(bot)
env_params = _read_env_store(bot_id) env_params = _read_env_store(bot_id)
success = docker_manager.start_bot( success = docker_manager.start_bot(
bot_id, bot_id,
image_tag=bot.image_tag, image_tag=bot.image_tag,
on_state_change=docker_callback, on_state_change=docker_callback,
env_vars=env_params, env_vars=env_params,
cpu_cores=_safe_float(runtime_snapshot.get("cpu_cores"), 1.0),
memory_mb=_safe_int(runtime_snapshot.get("memory_mb"), 1024),
storage_gb=_safe_int(runtime_snapshot.get("storage_gb"), 10),
) )
if success: if not success:
bot.docker_status = "RUNNING" bot.docker_status = "STOPPED"
session.add(bot) session.add(bot)
session.commit() session.commit()
return {"status": "started"} raise HTTPException(status_code=500, detail=f"Failed to start container with image {bot.image_tag}")
raise HTTPException(status_code=500, detail=f"Failed to start container with image {bot.image_tag}") actual_status = docker_manager.get_bot_status(bot_id)
bot.docker_status = actual_status
if actual_status != "RUNNING":
session.add(bot)
session.commit()
raise HTTPException(
status_code=500,
detail="Bot container failed shortly after startup. Check bot logs/config.",
)
session.add(bot)
session.commit()
return {"status": "started"}
@app.post("/api/bots/{bot_id}/stop") @app.post("/api/bots/{bot_id}/stop")
@ -1421,65 +1677,97 @@ async def upload_bot_skill_zip(bot_id: str, file: UploadFile = File(...), sessio
if not filename.lower().endswith(".zip"): if not filename.lower().endswith(".zip"):
raise HTTPException(status_code=400, detail="Only .zip skill package is supported") raise HTTPException(status_code=400, detail="Only .zip skill package is supported")
max_bytes = UPLOAD_MAX_MB * 1024 * 1024
tmp_zip_path: Optional[str] = None
total_size = 0
try: try:
zip_bytes = await file.read() with tempfile.NamedTemporaryFile(prefix=".skill_upload_", suffix=".zip", delete=False) as tmp_zip:
if not zip_bytes: tmp_zip_path = tmp_zip.name
raise HTTPException(status_code=400, detail="Zip package is empty") while True:
archive = zipfile.ZipFile(io.BytesIO(zip_bytes)) chunk = await file.read(1024 * 1024)
if not chunk:
break
total_size += len(chunk)
if total_size > max_bytes:
raise HTTPException(
status_code=413,
detail=f"Zip package too large (max {max_bytes // (1024 * 1024)}MB)",
)
tmp_zip.write(chunk)
except Exception: except Exception:
if tmp_zip_path and os.path.exists(tmp_zip_path):
os.remove(tmp_zip_path)
raise
finally:
await file.close()
if total_size == 0:
if tmp_zip_path and os.path.exists(tmp_zip_path):
os.remove(tmp_zip_path)
raise HTTPException(status_code=400, detail="Zip package is empty")
try:
archive = zipfile.ZipFile(tmp_zip_path)
except Exception:
if tmp_zip_path and os.path.exists(tmp_zip_path):
os.remove(tmp_zip_path)
raise HTTPException(status_code=400, detail="Invalid zip file") raise HTTPException(status_code=400, detail="Invalid zip file")
skills_root = _skills_root(bot_id) skills_root = _skills_root(bot_id)
os.makedirs(skills_root, exist_ok=True) os.makedirs(skills_root, exist_ok=True)
installed: List[str] = [] installed: List[str] = []
with archive: try:
members = archive.infolist() with archive:
file_members = [m for m in members if not m.is_dir()] members = archive.infolist()
if not file_members: file_members = [m for m in members if not m.is_dir()]
raise HTTPException(status_code=400, detail="Zip package has no files") if not file_members:
raise HTTPException(status_code=400, detail="Zip package has no files")
top_names: List[str] = [] top_names: List[str] = []
for member in file_members: for member in file_members:
raw_name = str(member.filename or "").replace("\\", "/").lstrip("/")
if not raw_name:
continue
first = raw_name.split("/", 1)[0].strip()
if not _is_valid_top_level_skill_name(first):
raise HTTPException(status_code=400, detail=f"Invalid skill entry name in zip: {first}")
if first not in top_names:
top_names.append(first)
if not top_names:
raise HTTPException(status_code=400, detail="Zip package has no valid skill entries")
conflicts = [name for name in top_names if os.path.exists(os.path.join(skills_root, name))]
if conflicts:
raise HTTPException(status_code=400, detail=f"Skill already exists: {', '.join(conflicts)}")
with tempfile.TemporaryDirectory(prefix=".skill_upload_", dir=skills_root) as tmp_dir:
tmp_root = os.path.abspath(tmp_dir)
for member in members:
raw_name = str(member.filename or "").replace("\\", "/").lstrip("/") raw_name = str(member.filename or "").replace("\\", "/").lstrip("/")
if not raw_name: if not raw_name:
continue continue
target = os.path.abspath(os.path.join(tmp_root, raw_name)) first = raw_name.split("/", 1)[0].strip()
if os.path.commonpath([tmp_root, target]) != tmp_root: if not _is_valid_top_level_skill_name(first):
raise HTTPException(status_code=400, detail=f"Unsafe zip entry path: {raw_name}") raise HTTPException(status_code=400, detail=f"Invalid skill entry name in zip: {first}")
if member.is_dir(): if first not in top_names:
os.makedirs(target, exist_ok=True) top_names.append(first)
continue
os.makedirs(os.path.dirname(target), exist_ok=True)
with archive.open(member, "r") as source, open(target, "wb") as dest:
shutil.copyfileobj(source, dest)
for name in top_names: if not top_names:
src = os.path.join(tmp_root, name) raise HTTPException(status_code=400, detail="Zip package has no valid skill entries")
dst = os.path.join(skills_root, name)
if not os.path.exists(src): conflicts = [name for name in top_names if os.path.exists(os.path.join(skills_root, name))]
continue if conflicts:
shutil.move(src, dst) raise HTTPException(status_code=400, detail=f"Skill already exists: {', '.join(conflicts)}")
installed.append(name)
with tempfile.TemporaryDirectory(prefix=".skill_upload_", dir=skills_root) as tmp_dir:
tmp_root = os.path.abspath(tmp_dir)
for member in members:
raw_name = str(member.filename or "").replace("\\", "/").lstrip("/")
if not raw_name:
continue
target = os.path.abspath(os.path.join(tmp_root, raw_name))
if os.path.commonpath([tmp_root, target]) != tmp_root:
raise HTTPException(status_code=400, detail=f"Unsafe zip entry path: {raw_name}")
if member.is_dir():
os.makedirs(target, exist_ok=True)
continue
os.makedirs(os.path.dirname(target), exist_ok=True)
with archive.open(member, "r") as source, open(target, "wb") as dest:
shutil.copyfileobj(source, dest)
for name in top_names:
src = os.path.join(tmp_root, name)
dst = os.path.join(skills_root, name)
if not os.path.exists(src):
continue
shutil.move(src, dst)
installed.append(name)
finally:
if tmp_zip_path and os.path.exists(tmp_zip_path):
os.remove(tmp_zip_path)
if not installed: if not installed:
raise HTTPException(status_code=400, detail="No skill entries installed from zip") raise HTTPException(status_code=400, detail="No skill entries installed from zip")
@ -1664,24 +1952,52 @@ def send_command(bot_id: str, payload: CommandRequest, session: Session = Depend
if not os.path.isfile(target): if not os.path.isfile(target):
raise HTTPException(status_code=400, detail=f"attachment not found: {rel}") raise HTTPException(status_code=400, detail=f"attachment not found: {rel}")
checked_attachments.append(rel) checked_attachments.append(rel)
delivery_media = [f"/root/.nanobot/workspace/{p.lstrip('/')}" for p in checked_attachments]
display_command = command if command else "[attachment message]" display_command = command if command else "[attachment message]"
delivery_command = display_command delivery_command = display_command
if checked_attachments: if checked_attachments:
command_has_paths = all(p in command for p in checked_attachments) if command else False all_visual = all(_is_visual_attachment_path(p) for p in checked_attachments)
attachment_block = "\n".join(f"- {p}" for p in checked_attachments) if all_visual:
if command and not command_has_paths: has_video = any(_is_video_attachment_path(p) for p in checked_attachments)
delivery_command = ( media_label = "图片/视频" if has_video else "图片"
f"{command}\n\n" if command:
"[Attached files]\n" delivery_command = (
f"{attachment_block}\n\n" f"{command}\n\n"
"Please process the attached file(s) listed above when answering this request." "【附件处理要求】\n"
) f"1) 附件中的{media_label}已作为多模态输入提供,优先直接理解并回答。\n"
elif not command: "2) 若当前模型无法直接理解图片或视频,必须先明确回复:"
delivery_command = ( "\"无法直接理解该图片/视频,正在调用工具解析。\"\n"
"Please process the uploaded file(s) listed below:\n" "3) 在给出上述提示后,再调用工具解析附件并继续完成用户任务。\n"
f"{attachment_block}" "4) 除非用户明确要求,不要先调用工具读取附件文件。\n"
) "5) 回复语言必须遵循 USER.md若未指定则与用户当前输入语言保持一致。\n"
"6) 仅基于可见内容回答;看不清或无法确认的部分请明确说明,不要猜测。"
)
else:
delivery_command = (
f"请直接分析已附带的{media_label}并总结关键信息。\n"
"若当前模型无法直接理解图片或视频,请先明确回复:"
"\"无法直接理解该图片/视频,正在调用工具解析。\",然后再调用工具解析。\n"
"回复语言必须遵循 USER.md若未指定则与用户当前输入语言保持一致。\n"
"仅基于可见内容回答;看不清或无法确认的部分请明确说明,不要猜测。"
)
else:
command_has_paths = all(p in command for p in checked_attachments) if command else False
attachment_block = "\n".join(f"- {p}" for p in checked_attachments)
if command and not command_has_paths:
delivery_command = (
f"{command}\n\n"
"[Attached files]\n"
f"{attachment_block}\n\n"
"Please process the attached file(s) listed above when answering this request.\n"
"Reply language must follow USER.md. If not specified, use the same language as the user input."
)
elif not command:
delivery_command = (
"Please process the uploaded file(s) listed below:\n"
f"{attachment_block}\n\n"
"Reply language must follow USER.md. If not specified, use the same language as the user input."
)
if display_command or checked_attachments: if display_command or checked_attachments:
_persist_runtime_packet( _persist_runtime_packet(
@ -1704,8 +2020,9 @@ def send_command(bot_id: str, payload: CommandRequest, session: Session = Depend
loop, loop,
) )
success = docker_manager.send_command(bot_id, delivery_command, media=checked_attachments) success = docker_manager.send_command(bot_id, delivery_command, media=delivery_media)
if not success: if not success:
detail = docker_manager.get_last_delivery_error(bot_id)
if loop and loop.is_running(): if loop and loop.is_running():
asyncio.run_coroutine_threadsafe( asyncio.run_coroutine_threadsafe(
manager.broadcast( manager.broadcast(
@ -1715,13 +2032,16 @@ def send_command(bot_id: str, payload: CommandRequest, session: Session = Depend
"channel": "dashboard", "channel": "dashboard",
"payload": { "payload": {
"state": "ERROR", "state": "ERROR",
"action_msg": "command delivery failed", "action_msg": detail or "command delivery failed",
}, },
}, },
), ),
loop, loop,
) )
raise HTTPException(status_code=502, detail="Failed to deliver command to bot dashboard channel") raise HTTPException(
status_code=502,
detail=f"Failed to deliver command to bot dashboard channel{': ' + detail if detail else ''}",
)
return {"success": True} return {"success": True}
@ -1955,6 +2275,7 @@ async def upload_workspace_files(
raise HTTPException(status_code=404, detail="Bot not found") raise HTTPException(status_code=404, detail="Bot not found")
if not files: if not files:
raise HTTPException(status_code=400, detail="no files uploaded") raise HTTPException(status_code=400, detail="no files uploaded")
max_bytes = UPLOAD_MAX_MB * 1024 * 1024
root, upload_dir = _resolve_workspace_path(bot_id, path or "uploads") root, upload_dir = _resolve_workspace_path(bot_id, path or "uploads")
os.makedirs(upload_dir, exist_ok=True) os.makedirs(upload_dir, exist_ok=True)
@ -1976,12 +2297,40 @@ async def upload_workspace_files(
name = f"{base}-{int(datetime.utcnow().timestamp())}{ext}" name = f"{base}-{int(datetime.utcnow().timestamp())}{ext}"
abs_path = os.path.join(safe_dir_real, name) abs_path = os.path.join(safe_dir_real, name)
content = await upload.read() total_size = 0
with open(abs_path, "wb") as f: try:
f.write(content) with open(abs_path, "wb") as f:
while True:
chunk = await upload.read(1024 * 1024)
if not chunk:
break
total_size += len(chunk)
if total_size > max_bytes:
raise HTTPException(
status_code=413,
detail=f"File '{name}' too large (max {max_bytes // (1024 * 1024)}MB)",
)
f.write(chunk)
except HTTPException:
if os.path.exists(abs_path):
os.remove(abs_path)
raise
except OSError as exc:
if os.path.exists(abs_path):
os.remove(abs_path)
raise HTTPException(
status_code=500,
detail=f"Failed to write file '{name}': {exc.strerror or str(exc)}",
)
except Exception:
if os.path.exists(abs_path):
os.remove(abs_path)
raise HTTPException(status_code=500, detail=f"Failed to upload file '{name}'")
finally:
await upload.close()
rel = os.path.relpath(abs_path, root).replace("\\", "/") rel = os.path.relpath(abs_path, root).replace("\\", "/")
rows.append({"name": name, "path": rel, "size": len(content)}) rows.append({"name": name, "path": rel, "size": total_size})
return {"bot_id": bot_id, "files": rows} return {"bot_id": bot_id, "files": rows}

View File

@ -49,7 +49,14 @@
1. 前端提交 Bot 创建参数镜像、provider、模型参数、5 个 md 文件) 1. 前端提交 Bot 创建参数镜像、provider、模型参数、5 个 md 文件)
2. 后端校验镜像可用并写入 `botinstance` 2. 后端校验镜像可用并写入 `botinstance`
3. 启动时生成 workspace 文件并拉起容器 3. 启动时生成 workspace 文件并按资源配额拉起容器CPU/内存/存储)
### 3.4 镜像升级流
1. 在运维面板基础配置中切换 `image_tag`
2. 后端更新 `.nanobot/config.json`
3. 用户手动执行 stop -> start使新镜像生效
4. `workspace` 目录保持挂载,既有插件与脚本继续可用
### 3.3 运维流 ### 3.3 运维流

View File

@ -34,9 +34,13 @@
- `temperature` - `temperature`
- `top_p` - `top_p`
- `max_tokens` - `max_tokens`
- `cpu_cores`
- `memory_mb`
- `storage_gb`
- 参数编辑交互: - 参数编辑交互:
- `temperature`、`top_p` 使用滑轨 - `temperature`、`top_p` 使用滑轨
- `max_tokens` 使用数字输入 - `max_tokens` 使用数字输入 + 快捷档位
- `cpu_cores`、`memory_mb`、`storage_gb` 使用数字输入
- 代理文件配置项(全部可编辑并持久化): - 代理文件配置项(全部可编辑并持久化):
- `AGENTS.md` - `AGENTS.md`
- `SOUL.md` - `SOUL.md`
@ -91,6 +95,17 @@
- `.nanobot/workspace/USER.md` - `.nanobot/workspace/USER.md`
- `.nanobot/workspace/TOOLS.md` - `.nanobot/workspace/TOOLS.md`
- `.nanobot/workspace/IDENTITY.md` - `.nanobot/workspace/IDENTITY.md`
- 容器资源配额来自 `config.json.runtime.resources`
- `cpuCores`
- `memoryMB`
- `storageGB`
### 3.3 基础镜像升级策略
- 运维面板基础配置允许切换 `image_tag`
- 切换镜像时后端仅更新配置文件,不自动重启容器。
- 需由用户手动执行停止/启动,使新镜像生效。
- 因 workspace 挂载路径不变,已安装插件、脚本和产物文件会保留。
### 3.3 Provider 测试接口 ### 3.3 Provider 测试接口

View File

@ -1,4 +1,4 @@
# Dashboard Nanobot 数据库设计文档(同步到当前代码 # Dashboard Nanobot 数据库设计文档(当前实现
数据库默认使用 SQLite`data/nanobot_dashboard.db`。 数据库默认使用 SQLite`data/nanobot_dashboard.db`。
@ -6,29 +6,12 @@
```mermaid ```mermaid
erDiagram erDiagram
BOTINSTANCE ||--o{ CHANNELROUTE : "路由" BOTINSTANCE ||--o{ BOTMESSAGE : "messages"
BOTINSTANCE ||--o{ BOTMESSAGE : "消息" NANOBOTIMAGE ||--o{ BOTINSTANCE : "referenced by"
NANOBOTIMAGE ||--o{ BOTINSTANCE : "被引用"
BOTINSTANCE { BOTINSTANCE {
string id PK string id PK
string name string name
text system_prompt
text soul_md
text agents_md
text user_md
text tools_md
text identity_md
text tools_config_json
string llm_provider
string llm_model
string api_key
string api_base
float temperature
float top_p
int max_tokens
float presence_penalty
float frequency_penalty
string workspace_dir UK string workspace_dir UK
string docker_status string docker_status
string image_tag string image_tag
@ -55,76 +38,48 @@ erDiagram
string source_dir string source_dir
datetime created_at datetime created_at
} }
CHANNELROUTE {
int id PK
string bot_id FK
string channel_type
string external_app_id
string app_secret
text extra_config
int internal_port
boolean is_active
}
``` ```
## 2. 表说明 ## 2. 设计原则
### 2.1 `botinstance` - 数据库只保留运行索引和历史消息。
- Bot 参数模型、渠道、资源配额、5 个 MD 文件)统一持久化在:
- `.nanobot/config.json`
- `.nanobot/workspace/*.md`
- `.nanobot/env.json`
- `channelroute` 已废弃,不再使用数据库存储渠道。
存储机器人实例核心配置与运行参数。 ## 3. 表说明
- 运行与镜像:`docker_status`、`image_tag` ### 3.1 `botinstance`
- LLM 基础:`llm_provider`、`llm_model`、`api_key`、`api_base`
- LLM 参数:`temperature`、`top_p`、`max_tokens`
- Agent Bootstrap 内容:`soul_md`、`agents_md`、`user_md`、`tools_md`、`identity_md`
- 工具配置:`tools_config_json`
- 运行态:`current_state`、`last_action`
### 2.2 `botmessage` 仅存基础索引与运行态:
保存 Dashboard 渠道持久化对话消息。 - 标识与展示:`id`、`name`
- 容器与镜像:`docker_status`、`image_tag`
- 运行状态:`current_state`、`last_action`
- 路径与时间:`workspace_dir`、`created_at`、`updated_at`
- `role``user | assistant` ### 3.2 `botmessage`
- `text`:消息正文
- `media_json`附件路径列表JSON 字符串)
### 2.3 `nanobotimage` Dashboard 渠道对话历史(用于会话回放):
镜像登记表(只记录显式登记镜像)。 - `role`: `user | assistant`
- `text`: 文本内容
- `media_json`: 附件相对路径 JSON
### 3.3 `nanobotimage`
基础镜像登记表(手动注册):
- `tag`: 如 `nanobot-base:v0.1.4` - `tag`: 如 `nanobot-base:v0.1.4`
- `status`: `READY | BUILDING | ERROR | UNKNOWN` - `status`: `READY | UNKNOWN | ERROR`
- `source_dir`: 当前手工模式通常为 `manual` - `source_dir`: 来源标识(通常 `manual`
### 2.4 `channelroute` ## 4. 迁移策略
渠道路由映射表。 服务启动时:
- `dashboard` 为内置渠道,不允许删除或禁用。
- `extra_config` 保存各渠道差异化字段。
## 3. 已移除的数据表
以下旧表已废弃,并在服务启动时自动删除:
- `skillregistry`
- `botskillmapping`
技能管理改为 **workspace 文件系统模式**
- 路径:`workspace/bots/{bot_id}/.nanobot/workspace/skills/`
- 管理 API`/api/bots/{bot_id}/skills`、`/api/bots/{bot_id}/skills/upload`、`/api/bots/{bot_id}/skills/{skill_name}`
## 4. 启动迁移策略(当前实现)
服务启动时自动执行:
1. `SQLModel.metadata.create_all(engine)` 1. `SQLModel.metadata.create_all(engine)`
2. 删除旧 skills 表(`DROP TABLE IF EXISTS botskillmapping/skillregistry` 2. 清理废弃表:`DROP TABLE IF EXISTS channelroute`
3. 针对 `botinstance``botmessage` 做增量列补齐(`ALTER TABLE ADD COLUMN` 3. 对 `botinstance` 做列对齐,删除历史遗留配置列(保留当前最小字段集)
## 5. 安全与一致性
- `api_key`、`app_secret` 建议加密存储(当前代码为明文字段,生产需加密层)。
- 启动 Bot 前,以数据库字段 + 渠道路由重新生成 workspace 文件,确保配置一致性。

View File

@ -15,6 +15,7 @@ services:
APP_PORT: 8000 APP_PORT: 8000
APP_RELOAD: "false" APP_RELOAD: "false"
DATABASE_ECHO: "false" DATABASE_ECHO: "false"
UPLOAD_MAX_MB: ${UPLOAD_MAX_MB:-100}
DATA_ROOT: ${HOST_DATA_ROOT} DATA_ROOT: ${HOST_DATA_ROOT}
BOTS_WORKSPACE_ROOT: ${HOST_BOTS_WORKSPACE_ROOT} BOTS_WORKSPACE_ROOT: ${HOST_BOTS_WORKSPACE_ROOT}
DATABASE_URL: ${DATABASE_URL:-} DATABASE_URL: ${DATABASE_URL:-}

View File

@ -5,6 +5,7 @@ upstream nanobot_backend {
server { server {
listen 80; listen 80;
server_name _; server_name _;
client_max_body_size 100m;
root /usr/share/nginx/html; root /usr/share/nginx/html;
index index.html; index index.html;
@ -12,6 +13,9 @@ server {
location /api/ { location /api/ {
proxy_pass http://nanobot_backend/api/; proxy_pass http://nanobot_backend/api/;
proxy_http_version 1.1; proxy_http_version 1.1;
proxy_request_buffering off;
proxy_read_timeout 300s;
proxy_send_timeout 300s;
proxy_set_header Host $host; proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

View File

@ -6,7 +6,7 @@ export const dashboardEn = {
thinking: 'Nanobot is thinking...', thinking: 'Nanobot is thinking...',
fileReadFail: 'Failed to read file.', fileReadFail: 'Failed to read file.',
workspaceLoadFail: 'Failed to load workspace tree.', workspaceLoadFail: 'Failed to load workspace tree.',
channelSaved: 'Channel saved.', channelSaved: 'Channel saved (effective after bot restart).',
channelSaveFail: 'Failed to save channel.', channelSaveFail: 'Failed to save channel.',
channelAddFail: 'Failed to add channel.', channelAddFail: 'Failed to add channel.',
channelDeleteConfirm: (channelType: string) => `Delete channel ${channelType}?`, channelDeleteConfirm: (channelType: string) => `Delete channel ${channelType}?`,
@ -18,13 +18,14 @@ export const dashboardEn = {
uploadFile: 'Upload file', uploadFile: 'Upload file',
uploadingFile: 'Uploading...', uploadingFile: 'Uploading...',
uploadFail: 'File upload failed.', uploadFail: 'File upload failed.',
uploadTooLarge: (files: string, limitMb: number) => `These files exceed the upload limit (${limitMb}MB): ${files}`,
attachmentMessage: '[attachment message]', attachmentMessage: '[attachment message]',
removeAttachment: 'Remove attachment', removeAttachment: 'Remove attachment',
sendFailMsg: (msg: string) => `Command delivery failed: ${msg}`, sendFailMsg: (msg: string) => `Command delivery failed: ${msg}`,
providerRequired: 'Set provider/model/new API key before testing.', providerRequired: 'Set provider/model/new API key before testing.',
connOk: (preview: string) => (preview ? `Connection passed, models: ${preview}` : 'Connection passed'), connOk: (preview: string) => (preview ? `Connection passed, models: ${preview}` : 'Connection passed'),
connFail: (msg: string) => `Failed: ${msg}`, connFail: (msg: string) => `Failed: ${msg}`,
configUpdated: 'Configuration updated.', configUpdated: 'Configuration updated (effective after bot restart).',
saveFail: 'Save failed.', saveFail: 'Save failed.',
deleteBotConfirm: (id: string) => `Delete bot ${id}? Workspace will be removed.`, deleteBotConfirm: (id: string) => `Delete bot ${id}? Workspace will be removed.`,
deleteBotDone: 'Bot and workspace deleted.', deleteBotDone: 'Bot and workspace deleted.',
@ -105,7 +106,7 @@ export const dashboardEn = {
botIdReadonly: 'Bot ID (Read-only)', botIdReadonly: 'Bot ID (Read-only)',
botName: 'Bot Name', botName: 'Bot Name',
botNamePlaceholder: 'Bot name', botNamePlaceholder: 'Bot name',
baseImageReadonly: 'Base Image (Read-only)', baseImageReadonly: 'Base Image',
modelName: 'Model Name', modelName: 'Model Name',
modelNamePlaceholder: 'e.g. qwen-plus', modelNamePlaceholder: 'e.g. qwen-plus',
newApiKey: 'New API Key (optional)', newApiKey: 'New API Key (optional)',

View File

@ -6,7 +6,7 @@ export const dashboardZhCn = {
thinking: 'Nanobot 正在思考中...', thinking: 'Nanobot 正在思考中...',
fileReadFail: '文件读取失败。', fileReadFail: '文件读取失败。',
workspaceLoadFail: '无法读取 workspace 目录。', workspaceLoadFail: '无法读取 workspace 目录。',
channelSaved: '渠道配置已保存。', channelSaved: '渠道配置已保存(重启 Bot 后生效)。',
channelSaveFail: '渠道保存失败。', channelSaveFail: '渠道保存失败。',
channelAddFail: '新增渠道失败。', channelAddFail: '新增渠道失败。',
channelDeleteConfirm: (channelType: string) => `确认删除渠道 ${channelType}`, channelDeleteConfirm: (channelType: string) => `确认删除渠道 ${channelType}`,
@ -18,13 +18,14 @@ export const dashboardZhCn = {
uploadFile: '上传文件', uploadFile: '上传文件',
uploadingFile: '上传中...', uploadingFile: '上传中...',
uploadFail: '文件上传失败。', uploadFail: '文件上传失败。',
uploadTooLarge: (files: string, limitMb: number) => `以下文件超过上传上限 ${limitMb}MB${files}`,
attachmentMessage: '[附件消息]', attachmentMessage: '[附件消息]',
removeAttachment: '移除附件', removeAttachment: '移除附件',
sendFailMsg: (msg: string) => `指令发送失败:${msg}`, sendFailMsg: (msg: string) => `指令发送失败:${msg}`,
providerRequired: '请填写 Provider、模型和新 API Key 后再测试。', providerRequired: '请填写 Provider、模型和新 API Key 后再测试。',
connOk: (preview: string) => (preview ? `连接成功,模型: ${preview}` : '连接成功'), connOk: (preview: string) => (preview ? `连接成功,模型: ${preview}` : '连接成功'),
connFail: (msg: string) => `连接失败: ${msg}`, connFail: (msg: string) => `连接失败: ${msg}`,
configUpdated: '配置已更新。', configUpdated: '配置已更新(重启 Bot 后生效)。',
saveFail: '保存失败。', saveFail: '保存失败。',
deleteBotConfirm: (id: string) => `确认删除 Bot ${id}?将删除对应 workspace。`, deleteBotConfirm: (id: string) => `确认删除 Bot ${id}?将删除对应 workspace。`,
deleteBotDone: 'Bot 与 workspace 已删除。', deleteBotDone: 'Bot 与 workspace 已删除。',
@ -105,7 +106,7 @@ export const dashboardZhCn = {
botIdReadonly: 'Bot ID只读', botIdReadonly: 'Bot ID只读',
botName: 'Bot 名称', botName: 'Bot 名称',
botNamePlaceholder: '输入 Bot 名称', botNamePlaceholder: '输入 Bot 名称',
baseImageReadonly: '基础镜像(只读)', baseImageReadonly: '基础镜像',
modelName: '模型名称', modelName: '模型名称',
modelNamePlaceholder: '如 qwen-plus', modelNamePlaceholder: '如 qwen-plus',
newApiKey: '新的 API Key留空不更新', newApiKey: '新的 API Key留空不更新',

View File

@ -619,6 +619,12 @@
background: color-mix(in oklab, var(--panel-soft) 78%, transparent); background: color-mix(in oklab, var(--panel-soft) 78%, transparent);
} }
.ops-pending-open {
max-width: 300px;
min-width: 0;
flex: 1 1 auto;
}
.ops-chip-remove { .ops-chip-remove {
width: 18px; width: 18px;
height: 18px; height: 18px;

View File

@ -1,6 +1,6 @@
import { useEffect, useMemo, useRef, useState, type AnchorHTMLAttributes, type ChangeEvent, type KeyboardEvent, type ReactNode } from 'react'; import { useEffect, useMemo, useRef, useState, type AnchorHTMLAttributes, type ChangeEvent, type KeyboardEvent, type ReactNode } from 'react';
import axios from 'axios'; import axios from 'axios';
import { Activity, Boxes, Check, Clock3, Download, EllipsisVertical, Eye, EyeOff, FileText, FolderOpen, Hammer, Maximize2, MessageSquareText, Minimize2, Paperclip, Plus, Power, PowerOff, RefreshCw, Repeat2, Save, Settings2, SlidersHorizontal, TriangleAlert, Trash2, UserRound, Waypoints, X } from 'lucide-react'; import { Activity, Boxes, Check, Clock3, Download, EllipsisVertical, Eye, EyeOff, FileText, FolderOpen, Gauge, Hammer, Maximize2, MessageSquareText, Minimize2, Paperclip, Plus, Power, PowerOff, RefreshCw, Repeat2, Save, Settings2, SlidersHorizontal, Square, TriangleAlert, Trash2, UserRound, Waypoints, X } from 'lucide-react';
import ReactMarkdown from 'react-markdown'; import ReactMarkdown from 'react-markdown';
import remarkGfm from 'remark-gfm'; import remarkGfm from 'remark-gfm';
import rehypeRaw from 'rehype-raw'; import rehypeRaw from 'rehype-raw';
@ -115,6 +115,24 @@ interface BotChannel {
locked?: boolean; locked?: boolean;
} }
interface NanobotImage {
tag: string;
status: string;
}
interface DockerImage {
tag: string;
version?: string;
image_id?: string;
}
interface BaseImageOption {
tag: string;
label: string;
disabled: boolean;
needsRegister: boolean;
}
interface WorkspaceSkillOption { interface WorkspaceSkillOption {
id: string; id: string;
name: string; name: string;
@ -125,6 +143,51 @@ interface WorkspaceSkillOption {
description?: string; description?: string;
} }
interface BotResourceSnapshot {
bot_id: string;
docker_status: string;
configured: {
cpu_cores: number;
memory_mb: number;
storage_gb: number;
};
runtime: {
docker_status: string;
limits: {
cpu_cores?: number | null;
memory_bytes?: number | null;
storage_bytes?: number | null;
nano_cpus?: number;
storage_opt_raw?: string;
};
usage: {
cpu_percent: number;
memory_bytes: number;
memory_limit_bytes: number;
memory_percent: number;
network_rx_bytes: number;
network_tx_bytes: number;
blk_read_bytes: number;
blk_write_bytes: number;
pids: number;
container_rw_bytes: number;
};
};
workspace: {
path: string;
usage_bytes: number;
configured_limit_bytes?: number | null;
usage_percent: number;
};
enforcement: {
cpu_limited: boolean;
memory_limited: boolean;
storage_limited: boolean;
};
note: string;
collected_at: string;
}
interface SkillUploadResponse { interface SkillUploadResponse {
status: string; status: string;
bot_id: string; bot_id: string;
@ -132,6 +195,12 @@ interface SkillUploadResponse {
skills: WorkspaceSkillOption[]; skills: WorkspaceSkillOption[];
} }
interface SystemDefaultsResponse {
limits?: {
upload_max_mb?: number;
};
}
type BotEnvParams = Record<string, string>; type BotEnvParams = Record<string, string>;
const providerPresets: Record<string, { model: string; apiBase?: string; note: { 'zh-cn': string; en: string } }> = { const providerPresets: Record<string, { model: string; apiBase?: string; note: { 'zh-cn': string; en: string } }> = {
@ -333,6 +402,44 @@ function clampTemperature(value: number) {
return Math.min(1, Math.max(0, value)); return Math.min(1, Math.max(0, value));
} }
function clampMaxTokens(value: number) {
if (Number.isNaN(value)) return 8192;
return Math.min(32768, Math.max(256, Math.round(value)));
}
function clampCpuCores(value: number) {
if (Number.isNaN(value)) return 1;
if (value === 0) return 0;
return Math.min(16, Math.max(0.1, Math.round(value * 10) / 10));
}
function clampMemoryMb(value: number) {
if (Number.isNaN(value)) return 1024;
if (value === 0) return 0;
return Math.min(65536, Math.max(256, Math.round(value)));
}
function clampStorageGb(value: number) {
if (Number.isNaN(value)) return 10;
if (value === 0) return 0;
return Math.min(1024, Math.max(1, Math.round(value)));
}
function formatBytes(bytes: number): string {
const value = Number(bytes || 0);
if (!Number.isFinite(value) || value <= 0) return '0 B';
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
const idx = Math.min(units.length - 1, Math.floor(Math.log(value) / Math.log(1024)));
const scaled = value / Math.pow(1024, idx);
return `${scaled >= 10 ? scaled.toFixed(1) : scaled.toFixed(2)} ${units[idx]}`;
}
function formatPercent(value: number): string {
const n = Number(value || 0);
if (!Number.isFinite(n)) return '0.00%';
return `${Math.max(0, n).toFixed(2)}%`;
}
function formatCronSchedule(job: CronJob, isZh: boolean) { function formatCronSchedule(job: CronJob, isZh: boolean) {
const s = job.schedule || {}; const s = job.schedule || {};
if (s.kind === 'every' && Number(s.everyMs) > 0) { if (s.kind === 'every' && Number(s.everyMs) > 0) {
@ -375,6 +482,11 @@ export function BotDashboardModule({
const [showEnvParamsModal, setShowEnvParamsModal] = useState(false); const [showEnvParamsModal, setShowEnvParamsModal] = useState(false);
const [showCronModal, setShowCronModal] = useState(false); const [showCronModal, setShowCronModal] = useState(false);
const [showAgentModal, setShowAgentModal] = useState(false); const [showAgentModal, setShowAgentModal] = useState(false);
const [showResourceModal, setShowResourceModal] = useState(false);
const [resourceBotId, setResourceBotId] = useState('');
const [resourceSnapshot, setResourceSnapshot] = useState<BotResourceSnapshot | null>(null);
const [resourceLoading, setResourceLoading] = useState(false);
const [resourceError, setResourceError] = useState('');
const [agentTab, setAgentTab] = useState<AgentTab>('AGENTS'); const [agentTab, setAgentTab] = useState<AgentTab>('AGENTS');
const [isTestingProvider, setIsTestingProvider] = useState(false); const [isTestingProvider, setIsTestingProvider] = useState(false);
const [providerTestResult, setProviderTestResult] = useState(''); const [providerTestResult, setProviderTestResult] = useState('');
@ -408,10 +520,13 @@ export function BotDashboardModule({
const [envVisibleByKey, setEnvVisibleByKey] = useState<Record<string, boolean>>({}); const [envVisibleByKey, setEnvVisibleByKey] = useState<Record<string, boolean>>({});
const [isSavingChannel, setIsSavingChannel] = useState(false); const [isSavingChannel, setIsSavingChannel] = useState(false);
const [isSavingGlobalDelivery, setIsSavingGlobalDelivery] = useState(false); const [isSavingGlobalDelivery, setIsSavingGlobalDelivery] = useState(false);
const [availableImages, setAvailableImages] = useState<NanobotImage[]>([]);
const [localDockerImages, setLocalDockerImages] = useState<DockerImage[]>([]);
const [globalDelivery, setGlobalDelivery] = useState<{ sendProgress: boolean; sendToolHints: boolean }>({ const [globalDelivery, setGlobalDelivery] = useState<{ sendProgress: boolean; sendToolHints: boolean }>({
sendProgress: false, sendProgress: false,
sendToolHints: false, sendToolHints: false,
}); });
const [uploadMaxMb, setUploadMaxMb] = useState(100);
const [newChannelType, setNewChannelType] = useState<ChannelType>('feishu'); const [newChannelType, setNewChannelType] = useState<ChannelType>('feishu');
const [runtimeViewMode, setRuntimeViewMode] = useState<RuntimeViewMode>('visual'); const [runtimeViewMode, setRuntimeViewMode] = useState<RuntimeViewMode>('visual');
const [runtimeMenuOpen, setRuntimeMenuOpen] = useState(false); const [runtimeMenuOpen, setRuntimeMenuOpen] = useState(false);
@ -607,12 +722,21 @@ export function BotDashboardModule({
temperature: 0.2, temperature: 0.2,
top_p: 1, top_p: 1,
max_tokens: 8192, max_tokens: 8192,
cpu_cores: 1,
memory_mb: 1024,
storage_gb: 10,
agents_md: '', agents_md: '',
soul_md: '', soul_md: '',
user_md: '', user_md: '',
tools_md: '', tools_md: '',
identity_md: '', identity_md: '',
}); });
const [paramDraft, setParamDraft] = useState({
max_tokens: '8192',
cpu_cores: '1',
memory_mb: '1024',
storage_gb: '10',
});
const bots = useMemo(() => Object.values(activeBots), [activeBots]); const bots = useMemo(() => Object.values(activeBots), [activeBots]);
const selectedBot = selectedBotId ? activeBots[selectedBotId] : undefined; const selectedBot = selectedBotId ? activeBots[selectedBotId] : undefined;
@ -623,6 +747,49 @@ export function BotDashboardModule({
const noteLocale = pickLocale(locale, { 'zh-cn': 'zh-cn' as const, en: 'en' as const }); const noteLocale = pickLocale(locale, { 'zh-cn': 'zh-cn' as const, en: 'en' as const });
const t = pickLocale(locale, { 'zh-cn': dashboardZhCn, en: dashboardEn }); const t = pickLocale(locale, { 'zh-cn': dashboardZhCn, en: dashboardEn });
const lc = isZh ? channelsZhCn : channelsEn; const lc = isZh ? channelsZhCn : channelsEn;
const baseImageOptions = useMemo<BaseImageOption[]>(() => {
const readyTags = new Set(
availableImages
.filter((img) => String(img.status || '').toUpperCase() === 'READY')
.map((img) => String(img.tag || '').trim())
.filter(Boolean),
);
const allTags = new Set<string>();
localDockerImages.forEach((img) => {
const tag = String(img.tag || '').trim();
if (tag) allTags.add(tag);
});
availableImages.forEach((img) => {
const tag = String(img.tag || '').trim();
if (tag) allTags.add(tag);
});
if (editForm.image_tag) {
allTags.add(editForm.image_tag);
}
return Array.from(allTags)
.sort((a, b) => a.localeCompare(b))
.map((tag) => {
const isReady = readyTags.has(tag);
if (isReady) {
return { tag, label: `${tag} · READY`, disabled: false, needsRegister: false };
}
const hasInDocker = localDockerImages.some((row) => String(row.tag || '').trim() === tag);
if (hasInDocker) {
return {
tag,
label: isZh ? `${tag} · 本地镜像(未登记)` : `${tag} · local image (unregistered)`,
disabled: false,
needsRegister: true,
};
}
return {
tag,
label: isZh ? `${tag} · 不可用` : `${tag} · unavailable`,
disabled: true,
needsRegister: false,
};
});
}, [availableImages, localDockerImages, editForm.image_tag, isZh]);
const runtimeMoreLabel = isZh ? '更多' : 'More'; const runtimeMoreLabel = isZh ? '更多' : 'More';
const selectedBotControlState = selectedBot ? controlStateByBot[selectedBot.id] : undefined; const selectedBotControlState = selectedBot ? controlStateByBot[selectedBot.id] : undefined;
const isSending = selectedBot ? Boolean(sendingByBot[selectedBot.id]) : false; const isSending = selectedBot ? Boolean(sendingByBot[selectedBot.id]) : false;
@ -702,6 +869,7 @@ export function BotDashboardModule({
return Boolean(full && full !== '-' && summary && full !== summary); return Boolean(full && full !== '-' && summary && full !== summary);
}, [runtimeAction, runtimeActionSummary]); }, [runtimeAction, runtimeActionSummary]);
const runtimeActionDisplay = runtimeActionHasMore ? runtimeActionSummary : runtimeAction; const runtimeActionDisplay = runtimeActionHasMore ? runtimeActionSummary : runtimeAction;
const resourceBot = useMemo(() => bots.find((b) => b.id === resourceBotId), [bots, resourceBotId]);
const shouldCollapseProgress = (text: string) => { const shouldCollapseProgress = (text: string) => {
const normalized = String(text || '').trim(); const normalized = String(text || '').trim();
@ -848,6 +1016,24 @@ export function BotDashboardModule({
setShowRuntimeActionModal(false); setShowRuntimeActionModal(false);
}, [selectedBotId]); }, [selectedBotId]);
useEffect(() => {
let alive = true;
const loadSystemDefaults = async () => {
try {
const res = await axios.get<SystemDefaultsResponse>(`${APP_ENDPOINTS.apiBase}/system/defaults`);
const configured = Number(res.data?.limits?.upload_max_mb);
if (!Number.isFinite(configured) || configured <= 0 || !alive) return;
setUploadMaxMb(Math.max(1, Math.floor(configured)));
} catch {
// keep default limit
}
};
void loadSystemDefaults();
return () => {
alive = false;
};
}, []);
useEffect(() => { useEffect(() => {
if (!compactMode) { if (!compactMode) {
setIsCompactMobile(false); setIsCompactMobile(false);
@ -875,13 +1061,22 @@ export function BotDashboardModule({
api_base: bot.api_base || '', api_base: bot.api_base || '',
temperature: clampTemperature(bot.temperature ?? 0.2), temperature: clampTemperature(bot.temperature ?? 0.2),
top_p: bot.top_p ?? 1, top_p: bot.top_p ?? 1,
max_tokens: bot.max_tokens ?? 8192, max_tokens: clampMaxTokens(bot.max_tokens ?? 8192),
cpu_cores: clampCpuCores(bot.cpu_cores ?? 1),
memory_mb: clampMemoryMb(bot.memory_mb ?? 1024),
storage_gb: clampStorageGb(bot.storage_gb ?? 10),
agents_md: bot.agents_md || '', agents_md: bot.agents_md || '',
soul_md: bot.soul_md || bot.system_prompt || '', soul_md: bot.soul_md || bot.system_prompt || '',
user_md: bot.user_md || '', user_md: bot.user_md || '',
tools_md: bot.tools_md || '', tools_md: bot.tools_md || '',
identity_md: bot.identity_md || '', identity_md: bot.identity_md || '',
}); });
setParamDraft({
max_tokens: String(clampMaxTokens(bot.max_tokens ?? 8192)),
cpu_cores: String(clampCpuCores(bot.cpu_cores ?? 1)),
memory_mb: String(clampMemoryMb(bot.memory_mb ?? 1024)),
storage_gb: String(clampStorageGb(bot.storage_gb ?? 10)),
});
setPendingAttachments([]); setPendingAttachments([]);
}, [selectedBotId, selectedBot?.id]); }, [selectedBotId, selectedBot?.id]);
@ -896,11 +1091,78 @@ export function BotDashboardModule({
}); });
}, [selectedBotId, selectedBot?.send_progress, selectedBot?.send_tool_hints]); }, [selectedBotId, selectedBot?.send_progress, selectedBot?.send_tool_hints]);
const refresh = async () => { const loadImageOptions = async () => {
const res = await axios.get(`${APP_ENDPOINTS.apiBase}/bots`); const [imagesRes, dockerImagesRes] = await Promise.allSettled([
setBots(res.data); axios.get<NanobotImage[]>(`${APP_ENDPOINTS.apiBase}/images`),
axios.get<DockerImage[]>(`${APP_ENDPOINTS.apiBase}/docker-images`),
]);
if (imagesRes.status === 'fulfilled') {
setAvailableImages(Array.isArray(imagesRes.value.data) ? imagesRes.value.data : []);
} else {
setAvailableImages([]);
}
if (dockerImagesRes.status === 'fulfilled') {
setLocalDockerImages(Array.isArray(dockerImagesRes.value.data) ? dockerImagesRes.value.data : []);
} else {
setLocalDockerImages([]);
}
}; };
const refresh = async () => {
const botsRes = await axios.get(`${APP_ENDPOINTS.apiBase}/bots`);
setBots(botsRes.data);
await loadImageOptions();
};
const loadResourceSnapshot = async (botId: string) => {
if (!botId) return;
setResourceLoading(true);
setResourceError('');
try {
const res = await axios.get<BotResourceSnapshot>(`${APP_ENDPOINTS.apiBase}/bots/${botId}/resources`);
setResourceSnapshot(res.data);
} catch (error: any) {
const msg = error?.response?.data?.detail || (isZh ? '读取资源监控失败。' : 'Failed to load resource metrics.');
setResourceError(String(msg));
} finally {
setResourceLoading(false);
}
};
const openResourceMonitor = (botId: string) => {
setResourceBotId(botId);
setShowResourceModal(true);
void loadResourceSnapshot(botId);
};
useEffect(() => {
void loadImageOptions();
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
useEffect(() => {
if (!showBaseModal) return;
void loadImageOptions();
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [showBaseModal]);
useEffect(() => {
if (!showResourceModal || !resourceBotId) return;
let stopped = false;
const tick = async () => {
if (stopped) return;
await loadResourceSnapshot(resourceBotId);
};
const timer = window.setInterval(() => {
void tick();
}, 2000);
return () => {
stopped = true;
window.clearInterval(timer);
};
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [showResourceModal, resourceBotId]);
const openWorkspaceFilePreview = async (path: string) => { const openWorkspaceFilePreview = async (path: string) => {
if (!selectedBotId || !path) return; if (!selectedBotId || !path) return;
const normalizedPath = String(path || '').trim(); const normalizedPath = String(path || '').trim();
@ -1404,15 +1666,23 @@ export function BotDashboardModule({
const onPickAttachments = async (event: ChangeEvent<HTMLInputElement>) => { const onPickAttachments = async (event: ChangeEvent<HTMLInputElement>) => {
if (!selectedBot || !event.target.files || event.target.files.length === 0) return; if (!selectedBot || !event.target.files || event.target.files.length === 0) return;
const files = Array.from(event.target.files); const files = Array.from(event.target.files);
const maxBytes = uploadMaxMb * 1024 * 1024;
const tooLarge = files.filter((f) => Number(f.size) > maxBytes);
if (tooLarge.length > 0) {
const names = tooLarge.map((f) => String(f.name || '').trim() || 'unknown').slice(0, 3).join(', ');
notify(t.uploadTooLarge(names, uploadMaxMb), { tone: 'warning' });
event.target.value = '';
return;
}
const formData = new FormData(); const formData = new FormData();
files.forEach((file) => formData.append('files', file)); files.forEach((file) => formData.append('files', file));
formData.append('path', 'uploads');
setIsUploadingAttachments(true); setIsUploadingAttachments(true);
try { try {
const res = await axios.post<WorkspaceUploadResponse>( const res = await axios.post<WorkspaceUploadResponse>(
`${APP_ENDPOINTS.apiBase}/bots/${selectedBot.id}/workspace/upload`, `${APP_ENDPOINTS.apiBase}/bots/${selectedBot.id}/workspace/upload`,
formData, formData,
{ params: { path: 'uploads' } },
); );
const uploaded = normalizeAttachmentPaths((res.data?.files || []).map((v) => v.path)); const uploaded = normalizeAttachmentPaths((res.data?.files || []).map((v) => v.path));
if (uploaded.length > 0) { if (uploaded.length > 0) {
@ -1518,12 +1788,43 @@ export function BotDashboardModule({
payload.llm_provider = editForm.llm_provider; payload.llm_provider = editForm.llm_provider;
payload.llm_model = editForm.llm_model; payload.llm_model = editForm.llm_model;
payload.api_base = editForm.api_base; payload.api_base = editForm.api_base;
payload.image_tag = editForm.image_tag;
const selectedImageOption = baseImageOptions.find((opt) => opt.tag === editForm.image_tag);
if (selectedImageOption?.disabled) {
throw new Error(isZh ? '当前镜像不可用,请选择可用镜像。' : 'Selected image is unavailable.');
}
if (selectedImageOption?.needsRegister) {
await axios.post(`${APP_ENDPOINTS.apiBase}/images/register`, {
tag: editForm.image_tag,
source_dir: 'manual',
});
}
if (editForm.api_key.trim()) payload.api_key = editForm.api_key.trim(); if (editForm.api_key.trim()) payload.api_key = editForm.api_key.trim();
} }
if (mode === 'params') { if (mode === 'params') {
payload.temperature = clampTemperature(Number(editForm.temperature)); payload.temperature = clampTemperature(Number(editForm.temperature));
payload.top_p = Number(editForm.top_p); payload.top_p = Number(editForm.top_p);
payload.max_tokens = Number(editForm.max_tokens); const normalizedMaxTokens = clampMaxTokens(Number(paramDraft.max_tokens));
const normalizedCpuCores = clampCpuCores(Number(paramDraft.cpu_cores));
const normalizedMemoryMb = clampMemoryMb(Number(paramDraft.memory_mb));
const normalizedStorageGb = clampStorageGb(Number(paramDraft.storage_gb));
payload.max_tokens = normalizedMaxTokens;
payload.cpu_cores = normalizedCpuCores;
payload.memory_mb = normalizedMemoryMb;
payload.storage_gb = normalizedStorageGb;
setEditForm((p) => ({
...p,
max_tokens: normalizedMaxTokens,
cpu_cores: normalizedCpuCores,
memory_mb: normalizedMemoryMb,
storage_gb: normalizedStorageGb,
}));
setParamDraft({
max_tokens: String(normalizedMaxTokens),
cpu_cores: String(normalizedCpuCores),
memory_mb: String(normalizedMemoryMb),
storage_gb: String(normalizedStorageGb),
});
} }
if (mode === 'agent') { if (mode === 'agent') {
payload.agents_md = editForm.agents_md; payload.agents_md = editForm.agents_md;
@ -1726,6 +2027,17 @@ export function BotDashboardModule({
</div> </div>
<div className="ops-bot-meta">{t.image}: <span className="mono">{bot.image_tag || '-'}</span></div> <div className="ops-bot-meta">{t.image}: <span className="mono">{bot.image_tag || '-'}</span></div>
<div className="ops-bot-actions"> <div className="ops-bot-actions">
<button
className="btn btn-secondary btn-sm ops-bot-icon-btn"
onClick={(e) => {
e.stopPropagation();
openResourceMonitor(bot.id);
}}
title={isZh ? '资源监测' : 'Resource Monitor'}
aria-label={isZh ? '资源监测' : 'Resource Monitor'}
>
<Gauge size={14} />
</button>
{bot.docker_status === 'RUNNING' ? ( {bot.docker_status === 'RUNNING' ? (
<button <button
className="btn btn-danger btn-sm icon-btn" className="btn btn-danger btn-sm icon-btn"
@ -1745,7 +2057,7 @@ export function BotDashboardModule({
<i /> <i />
</span> </span>
</span> </span>
) : <PowerOff size={14} />} ) : <Square size={14} />}
</button> </button>
) : ( ) : (
<button <button
@ -1863,10 +2175,39 @@ export function BotDashboardModule({
<div className="ops-pending-files"> <div className="ops-pending-files">
{pendingAttachments.map((p) => ( {pendingAttachments.map((p) => (
<span key={p} className="ops-pending-chip mono"> <span key={p} className="ops-pending-chip mono">
{p.split('/').pop() || p} {(() => {
const filePath = normalizeDashboardAttachmentPath(p);
const fileAction = workspaceFileAction(filePath);
const filename = filePath.split('/').pop() || filePath;
return (
<a
className="ops-attach-link mono ops-pending-open"
href="#"
onClick={(event) => {
event.preventDefault();
event.stopPropagation();
openWorkspacePathFromChat(filePath);
}}
title={fileAction === 'download' ? t.download : fileAction === 'preview' ? t.previewTitle : t.fileNotPreviewable}
>
{fileAction === 'download' ? (
<Download size={12} className="ops-attach-link-icon" />
) : fileAction === 'preview' ? (
<Eye size={12} className="ops-attach-link-icon" />
) : (
<FileText size={12} className="ops-attach-link-icon" />
)}
<span className="ops-attach-link-name">{filename}</span>
</a>
);
})()}
<button <button
className="icon-btn ops-chip-remove" className="icon-btn ops-chip-remove"
onClick={() => setPendingAttachments((prev) => prev.filter((v) => v !== p))} onClick={(event) => {
event.preventDefault();
event.stopPropagation();
setPendingAttachments((prev) => prev.filter((v) => v !== p));
}}
title={t.removeAttachment} title={t.removeAttachment}
aria-label={t.removeAttachment} aria-label={t.removeAttachment}
> >
@ -2147,6 +2488,106 @@ export function BotDashboardModule({
</button> </button>
) : null} ) : null}
{showResourceModal && (
<div className="modal-mask" onClick={() => setShowResourceModal(false)}>
<div className="modal-card modal-wide" onClick={(e) => e.stopPropagation()}>
<div className="modal-title-row modal-title-with-close">
<div className="modal-title-main">
<h3>{isZh ? '资源监测' : 'Resource Monitor'}</h3>
<span className="modal-sub mono">{resourceBot?.name || resourceBotId}</span>
</div>
<div className="modal-title-actions">
<button
className="btn btn-secondary btn-sm icon-btn"
onClick={() => void loadResourceSnapshot(resourceBotId)}
title={isZh ? '立即刷新' : 'Refresh now'}
aria-label={isZh ? '立即刷新' : 'Refresh now'}
>
<RefreshCw size={14} className={resourceLoading ? 'animate-spin' : ''} />
</button>
<button
className="btn btn-secondary btn-sm icon-btn"
onClick={() => setShowResourceModal(false)}
title={t.close}
aria-label={t.close}
>
<X size={14} />
</button>
</div>
</div>
{resourceError ? <div className="card">{resourceError}</div> : null}
{resourceSnapshot ? (
<div className="stack">
<div className="card summary-grid">
<div>{isZh ? '容器状态' : 'Container'}: <strong className="mono">{resourceSnapshot.docker_status}</strong></div>
<div>{isZh ? '采样时间' : 'Collected'}: <span className="mono">{resourceSnapshot.collected_at}</span></div>
<div>
{isZh ? 'CPU限制生效' : 'CPU limit'}:{' '}
<strong>
{Number(resourceSnapshot.configured.cpu_cores) === 0
? (isZh ? '不限' : 'UNLIMITED')
: (resourceSnapshot.enforcement.cpu_limited ? 'YES' : 'NO')}
</strong>
</div>
<div>
{isZh ? '内存限制生效' : 'Memory limit'}:{' '}
<strong>
{Number(resourceSnapshot.configured.memory_mb) === 0
? (isZh ? '不限' : 'UNLIMITED')
: (resourceSnapshot.enforcement.memory_limited ? 'YES' : 'NO')}
</strong>
</div>
<div>
{isZh ? '存储限制生效' : 'Storage limit'}:{' '}
<strong>
{Number(resourceSnapshot.configured.storage_gb) === 0
? (isZh ? '不限' : 'UNLIMITED')
: (resourceSnapshot.enforcement.storage_limited ? 'YES' : 'NO')}
</strong>
</div>
</div>
<div className="grid-2" style={{ gridTemplateColumns: '1fr 1fr' }}>
<div className="card stack">
<div className="section-mini-title">{isZh ? '配置配额' : 'Configured Limits'}</div>
<div className="ops-runtime-row"><span>CPU</span><strong>{Number(resourceSnapshot.configured.cpu_cores) === 0 ? (isZh ? '不限' : 'Unlimited') : resourceSnapshot.configured.cpu_cores}</strong></div>
<div className="ops-runtime-row"><span>{isZh ? '内存' : 'Memory'}</span><strong>{Number(resourceSnapshot.configured.memory_mb) === 0 ? (isZh ? '不限' : 'Unlimited') : `${resourceSnapshot.configured.memory_mb} MB`}</strong></div>
<div className="ops-runtime-row"><span>{isZh ? '存储' : 'Storage'}</span><strong>{Number(resourceSnapshot.configured.storage_gb) === 0 ? (isZh ? '不限' : 'Unlimited') : `${resourceSnapshot.configured.storage_gb} GB`}</strong></div>
</div>
<div className="card stack">
<div className="section-mini-title">{isZh ? 'Docker 实际限制' : 'Docker Runtime Limits'}</div>
<div className="ops-runtime-row"><span>CPU</span><strong>{resourceSnapshot.runtime.limits.cpu_cores ? resourceSnapshot.runtime.limits.cpu_cores.toFixed(2) : (Number(resourceSnapshot.configured.cpu_cores) === 0 ? (isZh ? '不限' : 'Unlimited') : '-')}</strong></div>
<div className="ops-runtime-row"><span>{isZh ? '内存' : 'Memory'}</span><strong>{resourceSnapshot.runtime.limits.memory_bytes ? formatBytes(resourceSnapshot.runtime.limits.memory_bytes) : (Number(resourceSnapshot.configured.memory_mb) === 0 ? (isZh ? '不限' : 'Unlimited') : '-')}</strong></div>
<div className="ops-runtime-row"><span>{isZh ? '存储' : 'Storage'}</span><strong>{resourceSnapshot.runtime.limits.storage_bytes ? formatBytes(resourceSnapshot.runtime.limits.storage_bytes) : (resourceSnapshot.runtime.limits.storage_opt_raw || (Number(resourceSnapshot.configured.storage_gb) === 0 ? (isZh ? '不限' : 'Unlimited') : '-'))}</strong></div>
</div>
</div>
<div className="card stack">
<div className="section-mini-title">{isZh ? '实时使用' : 'Live Usage'}</div>
<div className="ops-runtime-row"><span>CPU</span><strong>{formatPercent(resourceSnapshot.runtime.usage.cpu_percent)}</strong></div>
<div className="ops-runtime-row"><span>{isZh ? '内存' : 'Memory'}</span><strong>{formatBytes(resourceSnapshot.runtime.usage.memory_bytes)} / {resourceSnapshot.runtime.usage.memory_limit_bytes > 0 ? formatBytes(resourceSnapshot.runtime.usage.memory_limit_bytes) : '-'}</strong></div>
<div className="ops-runtime-row"><span>{isZh ? '内存占比' : 'Memory %'}</span><strong>{formatPercent(resourceSnapshot.runtime.usage.memory_percent)}</strong></div>
<div className="ops-runtime-row"><span>{isZh ? '工作区占用' : 'Workspace Usage'}</span><strong>{formatBytes(resourceSnapshot.workspace.usage_bytes)} / {resourceSnapshot.workspace.configured_limit_bytes ? formatBytes(resourceSnapshot.workspace.configured_limit_bytes) : '-'}</strong></div>
<div className="ops-runtime-row"><span>{isZh ? '工作区占比' : 'Workspace %'}</span><strong>{formatPercent(resourceSnapshot.workspace.usage_percent)}</strong></div>
<div className="ops-runtime-row"><span>{isZh ? '网络 I/O' : 'Network I/O'}</span><strong>RX {formatBytes(resourceSnapshot.runtime.usage.network_rx_bytes)} · TX {formatBytes(resourceSnapshot.runtime.usage.network_tx_bytes)}</strong></div>
<div className="ops-runtime-row"><span>{isZh ? '磁盘 I/O' : 'Block I/O'}</span><strong>R {formatBytes(resourceSnapshot.runtime.usage.blk_read_bytes)} · W {formatBytes(resourceSnapshot.runtime.usage.blk_write_bytes)}</strong></div>
<div className="ops-runtime-row"><span>PIDs</span><strong>{resourceSnapshot.runtime.usage.pids || 0}</strong></div>
</div>
<div className="field-label">
{resourceSnapshot.note}
{isZh ? '(界面规则:资源配置填写 0 表示不限制)' : ' (UI rule: value 0 means unlimited)'}
</div>
</div>
) : (
<div className="ops-empty-inline">{resourceLoading ? (isZh ? '读取中...' : 'Loading...') : (isZh ? '暂无监控数据' : 'No metrics')}</div>
)}
</div>
</div>
)}
{showBaseModal && ( {showBaseModal && (
<div className="modal-mask" onClick={() => setShowBaseModal(false)}> <div className="modal-mask" onClick={() => setShowBaseModal(false)}>
<div className="modal-card" onClick={(e) => e.stopPropagation()}> <div className="modal-card" onClick={(e) => e.stopPropagation()}>
@ -2169,7 +2610,22 @@ export function BotDashboardModule({
<input className="input" value={editForm.name} onChange={(e) => setEditForm((p) => ({ ...p, name: e.target.value }))} placeholder={t.botNamePlaceholder} /> <input className="input" value={editForm.name} onChange={(e) => setEditForm((p) => ({ ...p, name: e.target.value }))} placeholder={t.botNamePlaceholder} />
<label className="field-label">{t.baseImageReadonly}</label> <label className="field-label">{t.baseImageReadonly}</label>
<input className="input" value={editForm.image_tag} disabled /> <select
className="select"
value={editForm.image_tag}
onChange={(e) => setEditForm((p) => ({ ...p, image_tag: e.target.value }))}
>
{baseImageOptions.map((img) => (
<option key={img.tag} value={img.tag} disabled={img.disabled}>
{img.label}
</option>
))}
</select>
{baseImageOptions.find((opt) => opt.tag === editForm.image_tag)?.needsRegister ? (
<div className="field-label" style={{ color: 'var(--warning)' }}>
{isZh ? '该镜像尚未登记,保存时会自动加入镜像注册表。' : 'This image is not registered yet. It will be auto-registered on save.'}
</div>
) : null}
<label className="field-label">Provider</label> <label className="field-label">Provider</label>
<select className="select" value={editForm.llm_provider} onChange={(e) => onBaseProviderChange(e.target.value)}> <select className="select" value={editForm.llm_provider} onChange={(e) => onBaseProviderChange(e.target.value)}>
@ -2227,7 +2683,58 @@ export function BotDashboardModule({
<input type="range" min="0" max="1" step="0.01" value={editForm.top_p} onChange={(e) => setEditForm((p) => ({ ...p, top_p: Number(e.target.value) }))} /> <input type="range" min="0" max="1" step="0.01" value={editForm.top_p} onChange={(e) => setEditForm((p) => ({ ...p, top_p: Number(e.target.value) }))} />
</div> </div>
<label className="field-label">Max Tokens</label> <label className="field-label">Max Tokens</label>
<input className="input" type="number" step="1" min="256" max="32768" value={editForm.max_tokens} onChange={(e) => setEditForm((p) => ({ ...p, max_tokens: Number(e.target.value) }))} /> <input
className="input"
type="number"
step="1"
min="256"
max="32768"
value={paramDraft.max_tokens}
onChange={(e) => setParamDraft((p) => ({ ...p, max_tokens: e.target.value }))}
/>
<div style={{ display: 'flex', gap: 8, flexWrap: 'wrap' }}>
{[4096, 8192, 16384, 32768].map((value) => (
<button
key={value}
className="btn btn-secondary btn-sm"
type="button"
onClick={() => setParamDraft((p) => ({ ...p, max_tokens: String(value) }))}
>
{value}
</button>
))}
</div>
<label className="field-label">{isZh ? 'CPU 核心数' : 'CPU Cores'}</label>
<input
className="input"
type="number"
min="0"
max="16"
step="0.1"
value={paramDraft.cpu_cores}
onChange={(e) => setParamDraft((p) => ({ ...p, cpu_cores: e.target.value }))}
/>
<label className="field-label">{isZh ? '内存 (MB)' : 'Memory (MB)'}</label>
<input
className="input"
type="number"
min="0"
max="65536"
step="128"
value={paramDraft.memory_mb}
onChange={(e) => setParamDraft((p) => ({ ...p, memory_mb: e.target.value }))}
/>
<label className="field-label">{isZh ? '存储 (GB)' : 'Storage (GB)'}</label>
<input
className="input"
type="number"
min="0"
max="1024"
step="1"
value={paramDraft.storage_gb}
onChange={(e) => setParamDraft((p) => ({ ...p, storage_gb: e.target.value }))}
/>
<div className="field-label">{isZh ? '提示:填写 0 表示不限制(保存后需手动重启 Bot 生效)。' : 'Tip: value 0 means unlimited (takes effect after manual bot restart).'}</div>
<div className="row-between"> <div className="row-between">
<button className="btn btn-secondary" onClick={() => setShowParamModal(false)}>{t.cancel}</button> <button className="btn btn-secondary" onClick={() => setShowParamModal(false)}>{t.cancel}</button>
<button className="btn btn-primary" disabled={isSaving} onClick={() => void saveBot('params')}>{t.saveParams}</button> <button className="btn btn-primary" disabled={isSaving} onClick={() => void saveBot('params')}>{t.saveParams}</button>

View File

@ -104,6 +104,9 @@ const initialForm = {
temperature: 0.2, temperature: 0.2,
top_p: 1.0, top_p: 1.0,
max_tokens: 8192, max_tokens: 8192,
cpu_cores: 1,
memory_mb: 1024,
storage_gb: 10,
soul_md: FALLBACK_SOUL_MD, soul_md: FALLBACK_SOUL_MD,
agents_md: FALLBACK_AGENTS_MD, agents_md: FALLBACK_AGENTS_MD,
@ -143,6 +146,10 @@ export function BotWizardModule({ onCreated, onGoDashboard }: BotWizardModulePro
const [newChannelType, setNewChannelType] = useState<ChannelType>('feishu'); const [newChannelType, setNewChannelType] = useState<ChannelType>('feishu');
const [form, setForm] = useState(initialForm); const [form, setForm] = useState(initialForm);
const [defaultAgentsTemplate, setDefaultAgentsTemplate] = useState(FALLBACK_AGENTS_MD); const [defaultAgentsTemplate, setDefaultAgentsTemplate] = useState(FALLBACK_AGENTS_MD);
const [maxTokensDraft, setMaxTokensDraft] = useState(String(initialForm.max_tokens));
const [cpuCoresDraft, setCpuCoresDraft] = useState(String(initialForm.cpu_cores));
const [memoryMbDraft, setMemoryMbDraft] = useState(String(initialForm.memory_mb));
const [storageGbDraft, setStorageGbDraft] = useState(String(initialForm.storage_gb));
const readyImages = useMemo(() => images.filter((img) => img.status === 'READY'), [images]); const readyImages = useMemo(() => images.filter((img) => img.status === 'READY'), [images]);
const isZh = locale === 'zh'; const isZh = locale === 'zh';
@ -215,6 +222,10 @@ export function BotWizardModule({ onCreated, onGoDashboard }: BotWizardModulePro
} }
if (step === 2) { if (step === 2) {
commitMaxTokensDraft(maxTokensDraft);
commitCpuCoresDraft(cpuCoresDraft);
commitMemoryMbDraft(memoryMbDraft);
commitStorageGbDraft(storageGbDraft);
if (!form.id || !form.name || !form.api_key || !form.image_tag || !form.llm_model) { if (!form.id || !form.name || !form.api_key || !form.image_tag || !form.llm_model) {
notify(ui.requiredBase, { tone: 'warning' }); notify(ui.requiredBase, { tone: 'warning' });
return; return;
@ -271,6 +282,9 @@ export function BotWizardModule({ onCreated, onGoDashboard }: BotWizardModulePro
temperature: clampTemperature(Number(form.temperature)), temperature: clampTemperature(Number(form.temperature)),
top_p: Number(form.top_p), top_p: Number(form.top_p),
max_tokens: Number(form.max_tokens), max_tokens: Number(form.max_tokens),
cpu_cores: Number(form.cpu_cores),
memory_mb: Number(form.memory_mb),
storage_gb: Number(form.storage_gb),
soul_md: form.soul_md, soul_md: form.soul_md,
agents_md: form.agents_md, agents_md: form.agents_md,
user_md: form.user_md, user_md: form.user_md,
@ -295,6 +309,10 @@ export function BotWizardModule({ onCreated, onGoDashboard }: BotWizardModulePro
onCreated?.(); onCreated?.();
onGoDashboard?.(); onGoDashboard?.();
setForm(initialForm); setForm(initialForm);
setMaxTokensDraft(String(initialForm.max_tokens));
setCpuCoresDraft(String(initialForm.cpu_cores));
setMemoryMbDraft(String(initialForm.memory_mb));
setStorageGbDraft(String(initialForm.storage_gb));
setStep(1); setStep(1);
setTestResult(''); setTestResult('');
notify(ui.created, { tone: 'success' }); notify(ui.created, { tone: 'success' });
@ -389,6 +407,41 @@ export function BotWizardModule({ onCreated, onGoDashboard }: BotWizardModulePro
if (Number.isNaN(value)) return 0.2; if (Number.isNaN(value)) return 0.2;
return Math.min(1, Math.max(0, value)); return Math.min(1, Math.max(0, value));
}; };
const clampCpuCores = (value: number) => {
if (Number.isNaN(value)) return 1;
if (value === 0) return 0;
return Math.min(16, Math.max(0.1, Math.round(value * 10) / 10));
};
const clampMemoryMb = (value: number) => {
if (Number.isNaN(value)) return 1024;
if (value === 0) return 0;
return Math.min(65536, Math.max(256, Math.round(value)));
};
const clampStorageGb = (value: number) => {
if (Number.isNaN(value)) return 10;
if (value === 0) return 0;
return Math.min(1024, Math.max(1, Math.round(value)));
};
const commitMaxTokensDraft = (raw: string) => {
const next = clampMaxTokens(Number(raw));
setForm((p) => ({ ...p, max_tokens: next }));
setMaxTokensDraft(String(next));
};
const commitCpuCoresDraft = (raw: string) => {
const next = clampCpuCores(Number(raw));
setForm((p) => ({ ...p, cpu_cores: next }));
setCpuCoresDraft(String(next));
};
const commitMemoryMbDraft = (raw: string) => {
const next = clampMemoryMb(Number(raw));
setForm((p) => ({ ...p, memory_mb: next }));
setMemoryMbDraft(String(next));
};
const commitStorageGbDraft = (raw: string) => {
const next = clampStorageGb(Number(raw));
setForm((p) => ({ ...p, storage_gb: next }));
setStorageGbDraft(String(next));
};
const updateGlobalDeliveryFlag = (key: 'sendProgress' | 'sendToolHints', value: boolean) => { const updateGlobalDeliveryFlag = (key: 'sendProgress' | 'sendToolHints', value: boolean) => {
setForm((prev) => { setForm((prev) => {
@ -577,12 +630,64 @@ export function BotWizardModule({ onCreated, onGoDashboard }: BotWizardModulePro
step="1" step="1"
min="256" min="256"
max="32768" max="32768"
value={form.max_tokens} value={maxTokensDraft}
onChange={(e) => setForm((p) => ({ ...p, max_tokens: clampMaxTokens(Number(e.target.value)) }))} onChange={(e) => setMaxTokensDraft(e.target.value)}
onBlur={(e) => setForm((p) => ({ ...p, max_tokens: clampMaxTokens(Number(e.target.value)) }))} onBlur={(e) => commitMaxTokensDraft(e.target.value)}
/> />
<div className="field-label">{ui.tokenRange}</div> <div className="field-label">{ui.tokenRange}</div>
</div> </div>
<div style={{ display: 'flex', gap: 8, flexWrap: 'wrap' }}>
{[4096, 8192, 16384, 32768].map((value) => (
<button
key={value}
className="btn btn-secondary btn-sm"
type="button"
onClick={() => {
setForm((p) => ({ ...p, max_tokens: value }));
setMaxTokensDraft(String(value));
}}
>
{value}
</button>
))}
</div>
<div className="section-mini-title" style={{ marginTop: 10 }}>
{isZh ? '资源配额' : 'Resource Limits'}
</div>
<label className="field-label">{isZh ? 'CPU 核心数' : 'CPU Cores'}</label>
<input
className="input"
type="number"
min="0"
max="16"
step="0.1"
value={cpuCoresDraft}
onChange={(e) => setCpuCoresDraft(e.target.value)}
onBlur={(e) => commitCpuCoresDraft(e.target.value)}
/>
<label className="field-label">{isZh ? '内存 (MB)' : 'Memory (MB)'}</label>
<input
className="input"
type="number"
min="0"
max="65536"
step="128"
value={memoryMbDraft}
onChange={(e) => setMemoryMbDraft(e.target.value)}
onBlur={(e) => commitMemoryMbDraft(e.target.value)}
/>
<label className="field-label">{isZh ? '存储 (GB)' : 'Storage (GB)'}</label>
<input
className="input"
type="number"
min="0"
max="1024"
step="1"
value={storageGbDraft}
onChange={(e) => setStorageGbDraft(e.target.value)}
onBlur={(e) => commitStorageGbDraft(e.target.value)}
/>
<div className="field-label">{isZh ? '提示:填写 0 表示不限制。' : 'Tip: value 0 means unlimited.'}</div>
</div> </div>
@ -664,6 +769,9 @@ export function BotWizardModule({ onCreated, onGoDashboard }: BotWizardModulePro
<div>Temperature: {form.temperature.toFixed(2)}</div> <div>Temperature: {form.temperature.toFixed(2)}</div>
<div>Top P: {form.top_p.toFixed(2)}</div> <div>Top P: {form.top_p.toFixed(2)}</div>
<div>Max Tokens: {form.max_tokens}</div> <div>Max Tokens: {form.max_tokens}</div>
<div>CPU: {Number(form.cpu_cores) === 0 ? (isZh ? '不限' : 'Unlimited') : form.cpu_cores}</div>
<div>{isZh ? '内存' : 'Memory'}: {Number(form.memory_mb) === 0 ? (isZh ? '不限' : 'Unlimited') : `${form.memory_mb} MB`}</div>
<div>{isZh ? '存储' : 'Storage'}: {Number(form.storage_gb) === 0 ? (isZh ? '不限' : 'Unlimited') : `${form.storage_gb} GB`}</div>
<div>{ui.channels}: {configuredChannelsLabel}</div> <div>{ui.channels}: {configuredChannelsLabel}</div>
<div>{ui.tools}: {envEntries.map(([k]) => k).join(', ') || '-'}</div> <div>{ui.tools}: {envEntries.map(([k]) => k).join(', ') || '-'}</div>
</div> </div>

View File

@ -27,6 +27,9 @@ export interface BotState {
temperature?: number; temperature?: number;
top_p?: number; top_p?: number;
max_tokens?: number; max_tokens?: number;
cpu_cores?: number;
memory_mb?: number;
storage_gb?: number;
send_progress?: boolean; send_progress?: boolean;
send_tool_hints?: boolean; send_tool_hints?: boolean;
soul_md?: string; soul_md?: string;