v0.1.4-p2

main
mula.liu 2026-03-15 15:14:01 +08:00
parent 34f8a49bba
commit bee2d8294b
7 changed files with 384 additions and 81 deletions

View File

@ -243,3 +243,26 @@ def mark_bot_topic_item_read(bot_id: str, item_id: int, session: Session = Depen
"bot_id": bot_id, "bot_id": bot_id,
"item": _topic_item_to_dict(row), "item": _topic_item_to_dict(row),
} }
@router.delete("/api/bots/{bot_id}/topic-items/{item_id}")
def delete_bot_topic_item(bot_id: str, item_id: int, session: Session = Depends(get_session)):
bot = session.get(BotInstance, bot_id)
if not bot:
raise HTTPException(status_code=404, detail="Bot not found")
row = session.exec(
select(TopicItem)
.where(TopicItem.bot_id == bot_id)
.where(TopicItem.id == item_id)
.limit(1)
).first()
if not row:
raise HTTPException(status_code=404, detail="Topic item not found")
payload = _topic_item_to_dict(row)
session.delete(row)
session.commit()
return {
"status": "deleted",
"bot_id": bot_id,
"item": payload,
}

View File

@ -26,7 +26,8 @@ class BotConfigManager:
for d in [dot_nanobot_dir, workspace_dir, memory_dir, skills_dir]: for d in [dot_nanobot_dir, workspace_dir, memory_dir, skills_dir]:
os.makedirs(d, exist_ok=True) os.makedirs(d, exist_ok=True)
provider_name = (bot_data.get("llm_provider") or "openrouter").strip().lower() raw_provider_name = (bot_data.get("llm_provider") or "openrouter").strip().lower()
provider_name = raw_provider_name
model_name = (bot_data.get("llm_model") or "openai/gpt-4o-mini").strip() model_name = (bot_data.get("llm_model") or "openai/gpt-4o-mini").strip()
api_key = (bot_data.get("api_key") or "").strip() api_key = (bot_data.get("api_key") or "").strip()
api_base = (bot_data.get("api_base") or "").strip() or None api_base = (bot_data.get("api_base") or "").strip() or None
@ -36,8 +37,15 @@ class BotConfigManager:
"qwen": "dashscope", "qwen": "dashscope",
"aliyun-qwen": "dashscope", "aliyun-qwen": "dashscope",
"moonshot": "kimi", "moonshot": "kimi",
# Xunfei Spark provides OpenAI-compatible endpoint.
"xunfei": "openai",
"iflytek": "openai",
"xfyun": "openai",
} }
provider_name = provider_alias.get(provider_name, provider_name) provider_name = provider_alias.get(provider_name, provider_name)
if provider_name == "openai" and raw_provider_name in {"xunfei", "iflytek", "xfyun"}:
if model_name and "/" not in model_name:
model_name = f"openai/{model_name}"
provider_cfg: Dict[str, Any] = { provider_cfg: Dict[str, Any] = {
"apiKey": api_key, "apiKey": api_key,

View File

@ -4,6 +4,7 @@ import threading
import time import time
import codecs import codecs
import base64 import base64
import uuid
from typing import Any, Callable, Dict, List, Optional, Tuple from typing import Any, Callable, Dict, List, Optional, Tuple
import json import json
@ -25,6 +26,87 @@ class BotDockerManager:
self.active_monitors = {} self.active_monitors = {}
self._last_delivery_error: Dict[str, str] = {} self._last_delivery_error: Dict[str, str] = {}
@staticmethod
def _build_http_probe_payload_b64(
url: str,
method: str = "GET",
headers: Optional[Dict[str, str]] = None,
body_json: Optional[Dict[str, Any]] = None,
timeout_seconds: int = 10,
) -> str:
safe_method = str(method or "GET").strip().upper()
if safe_method not in {"GET", "POST"}:
safe_method = "GET"
timeout = max(1, min(int(timeout_seconds or 10), 30))
payload = {
"url": str(url or "").strip(),
"method": safe_method,
"headers": headers or {},
"body_json": body_json if isinstance(body_json, dict) else None,
"timeout": timeout,
}
return base64.b64encode(json.dumps(payload, ensure_ascii=False).encode("utf-8")).decode("ascii")
@staticmethod
def _http_probe_python_script() -> str:
return (
"import base64, json, os, urllib.request, urllib.error\n"
"cfg = json.loads(base64.b64decode(os.environ['DASHBOARD_HTTP_PROBE_B64']).decode('utf-8'))\n"
"url = str(cfg.get('url') or '').strip()\n"
"method = str(cfg.get('method') or 'GET').upper()\n"
"headers = cfg.get('headers') or {}\n"
"timeout = int(cfg.get('timeout') or 10)\n"
"data = None\n"
"if method == 'POST':\n"
" body = cfg.get('body_json')\n"
" if not isinstance(body, dict):\n"
" body = {}\n"
" data = json.dumps(body, ensure_ascii=False).encode('utf-8')\n"
" if 'Content-Type' not in headers:\n"
" headers['Content-Type'] = 'application/json'\n"
"req = urllib.request.Request(url, data=data, headers=headers, method=method)\n"
"result = {'ok': False, 'status_code': None, 'content_type': '', 'body_preview': '', 'message': ''}\n"
"try:\n"
" with urllib.request.urlopen(req, timeout=timeout) as resp:\n"
" body = resp.read(1024).decode('utf-8', 'ignore')\n"
" result.update({'ok': True, 'status_code': int(getattr(resp, 'status', 200) or 200), 'content_type': str(resp.headers.get('content-type') or ''), 'body_preview': body[:512], 'message': 'ok'})\n"
"except urllib.error.HTTPError as e:\n"
" body = ''\n"
" try:\n"
" body = e.read(1024).decode('utf-8', 'ignore')\n"
" except Exception:\n"
" body = ''\n"
" result.update({'ok': False, 'status_code': int(e.code or 0), 'content_type': str((e.headers or {}).get('content-type') or ''), 'body_preview': body[:512], 'message': f'HTTPError: {e.code}'})\n"
"except Exception as e:\n"
" result.update({'ok': False, 'status_code': None, 'content_type': '', 'body_preview': '', 'message': f'{type(e).__name__}: {e}'})\n"
"print(json.dumps(result, ensure_ascii=False))\n"
)
def _run_http_probe_exec(self, container, payload_b64: str) -> Dict[str, Any]:
py_script = self._http_probe_python_script()
py_bins = ["python3", "python"]
last_error = ""
for py_bin in py_bins:
try:
exec_result = container.exec_run(
[py_bin, "-c", py_script],
environment={"DASHBOARD_HTTP_PROBE_B64": payload_b64},
)
except Exception as e:
last_error = f"exec {py_bin} failed: {e}"
continue
output = exec_result.output.decode("utf-8", errors="ignore") if isinstance(exec_result.output, (bytes, bytearray)) else str(exec_result.output)
if exec_result.exit_code != 0:
last_error = f"exec {py_bin} exit={exec_result.exit_code}: {output[:300]}"
continue
try:
parsed = json.loads(output.strip() or "{}")
if isinstance(parsed, dict):
return parsed
except Exception:
last_error = f"exec {py_bin} returned non-json: {output[:300]}"
return {"ok": False, "message": last_error or "Failed to run probe in container"}
@staticmethod @staticmethod
def _normalize_resource_limits( def _normalize_resource_limits(
cpu_cores: Optional[float], cpu_cores: Optional[float],
@ -231,74 +313,56 @@ class BotDockerManager:
return {"ok": False, "message": "Bot container not found"} return {"ok": False, "message": "Bot container not found"}
except Exception as e: except Exception as e:
return {"ok": False, "message": f"Failed to inspect bot container: {e}"} return {"ok": False, "message": f"Failed to inspect bot container: {e}"}
payload_b64 = self._build_http_probe_payload_b64(
safe_method = str(method or "GET").strip().upper() url=url,
if safe_method not in {"GET", "POST"}: method=method,
safe_method = "GET" headers=headers,
timeout = max(1, min(int(timeout_seconds or 10), 30)) body_json=body_json,
payload = { timeout_seconds=timeout_seconds,
"url": str(url or "").strip(),
"method": safe_method,
"headers": headers or {},
"body_json": body_json if isinstance(body_json, dict) else None,
"timeout": timeout,
}
payload_b64 = base64.b64encode(json.dumps(payload, ensure_ascii=False).encode("utf-8")).decode("ascii")
py_script = (
"import base64, json, os, urllib.request, urllib.error\n"
"cfg = json.loads(base64.b64decode(os.environ['DASHBOARD_HTTP_PROBE_B64']).decode('utf-8'))\n"
"url = str(cfg.get('url') or '').strip()\n"
"method = str(cfg.get('method') or 'GET').upper()\n"
"headers = cfg.get('headers') or {}\n"
"timeout = int(cfg.get('timeout') or 10)\n"
"data = None\n"
"if method == 'POST':\n"
" body = cfg.get('body_json')\n"
" if not isinstance(body, dict):\n"
" body = {}\n"
" data = json.dumps(body, ensure_ascii=False).encode('utf-8')\n"
" if 'Content-Type' not in headers:\n"
" headers['Content-Type'] = 'application/json'\n"
"req = urllib.request.Request(url, data=data, headers=headers, method=method)\n"
"result = {'ok': False, 'status_code': None, 'content_type': '', 'body_preview': '', 'message': ''}\n"
"try:\n"
" with urllib.request.urlopen(req, timeout=timeout) as resp:\n"
" body = resp.read(1024).decode('utf-8', 'ignore')\n"
" result.update({'ok': True, 'status_code': int(getattr(resp, 'status', 200) or 200), 'content_type': str(resp.headers.get('content-type') or ''), 'body_preview': body[:512], 'message': 'ok'})\n"
"except urllib.error.HTTPError as e:\n"
" body = ''\n"
" try:\n"
" body = e.read(1024).decode('utf-8', 'ignore')\n"
" except Exception:\n"
" body = ''\n"
" result.update({'ok': False, 'status_code': int(e.code or 0), 'content_type': str((e.headers or {}).get('content-type') or ''), 'body_preview': body[:512], 'message': f'HTTPError: {e.code}'})\n"
"except Exception as e:\n"
" result.update({'ok': False, 'status_code': None, 'content_type': '', 'body_preview': '', 'message': f'{type(e).__name__}: {e}'})\n"
"print(json.dumps(result, ensure_ascii=False))\n"
) )
return self._run_http_probe_exec(container, payload_b64)
py_bins = ["python3", "python"] def probe_http_via_temporary_container(
last_error = "" self,
for py_bin in py_bins: image_tag: str,
url: str,
method: str = "GET",
headers: Optional[Dict[str, str]] = None,
body_json: Optional[Dict[str, Any]] = None,
timeout_seconds: int = 10,
) -> Dict[str, Any]:
if not self.client:
return {"ok": False, "message": "Docker client is not available"}
image = str(image_tag or self.base_image).strip() or self.base_image
payload_b64 = self._build_http_probe_payload_b64(
url=url,
method=method,
headers=headers,
body_json=body_json,
timeout_seconds=timeout_seconds,
)
container = None
try: try:
exec_result = container.exec_run( container = self.client.containers.run(
[py_bin, "-c", py_script], image=image,
environment={"DASHBOARD_HTTP_PROBE_B64": payload_b64}, name=f"dashboard_probe_{uuid.uuid4().hex[:10]}",
command=["sh", "-c", "sleep 45"],
detach=True,
tty=False,
stdin_open=False,
network_mode="bridge",
) )
return self._run_http_probe_exec(container, payload_b64)
except docker.errors.ImageNotFound:
return {"ok": False, "message": f"Probe image not found: {image}"}
except Exception as e: except Exception as e:
last_error = f"exec {py_bin} failed: {e}" return {"ok": False, "message": f"Failed to run temporary probe container: {e}"}
continue finally:
output = exec_result.output.decode("utf-8", errors="ignore") if isinstance(exec_result.output, (bytes, bytearray)) else str(exec_result.output) if container is not None:
if exec_result.exit_code != 0:
last_error = f"exec {py_bin} exit={exec_result.exit_code}: {output[:300]}"
continue
try: try:
parsed = json.loads(output.strip() or "{}") container.remove(force=True)
if isinstance(parsed, dict):
return parsed
except Exception: except Exception:
last_error = f"exec {py_bin} returned non-json: {output[:300]}" pass
return {"ok": False, "message": last_error or "Failed to run probe in bot container"}
def send_command(self, bot_id: str, command: str, media: Optional[List[str]] = None) -> bool: def send_command(self, bot_id: str, command: str, media: Optional[List[str]] = None) -> bool:
"""Send a command to dashboard channel with robust container-local delivery.""" """Send a command to dashboard channel with robust container-local delivery."""

View File

@ -526,6 +526,8 @@ def _provider_defaults(provider: str) -> tuple[str, str]:
return "openrouter", "https://openrouter.ai/api/v1" return "openrouter", "https://openrouter.ai/api/v1"
if p in {"dashscope", "aliyun", "qwen", "aliyun-qwen"}: if p in {"dashscope", "aliyun", "qwen", "aliyun-qwen"}:
return "dashscope", "https://dashscope.aliyuncs.com/compatible-mode/v1" return "dashscope", "https://dashscope.aliyuncs.com/compatible-mode/v1"
if p in {"xunfei", "iflytek", "xfyun"}:
return "openai", "https://spark-api-open.xf-yun.com/v1"
if p in {"kimi", "moonshot"}: if p in {"kimi", "moonshot"}:
return "kimi", "https://api.moonshot.cn/v1" return "kimi", "https://api.moonshot.cn/v1"
if p in {"minimax"}: if p in {"minimax"}:
@ -1294,6 +1296,122 @@ def _probe_mcp_server(cfg: Dict[str, Any], bot_id: Optional[str] = None) -> Dict
} }
def _probe_mcp_server_for_start(cfg: Dict[str, Any], image_tag: str) -> Dict[str, Any]:
transport_type = str(cfg.get("type") or "streamableHttp").strip()
if transport_type not in {"streamableHttp", "sse"}:
transport_type = "streamableHttp"
url = str(cfg.get("url") or "").strip()
headers_raw = cfg.get("headers")
headers: Dict[str, str] = {}
if isinstance(headers_raw, dict):
for k, v in headers_raw.items():
key = str(k or "").strip()
if key:
headers[key] = str(v or "").strip()
timeout_raw = cfg.get("toolTimeout", 10)
try:
timeout_s = max(1, min(int(timeout_raw), 30))
except Exception:
timeout_s = 10
if not url:
return {
"ok": False,
"transport": transport_type,
"status_code": None,
"message": "MCP url is required",
"probe_from": "validation",
}
probe_payload = {
"jsonrpc": "2.0",
"id": "dashboard-start-probe",
"method": "initialize",
"params": {
"protocolVersion": "2025-03-26",
"capabilities": {},
"clientInfo": {"name": "dashboard-nanobot", "version": "0.1.4"},
},
}
def _with_body_preview(message: str, preview: Any) -> str:
text = str(message or "").strip()
body = " ".join(str(preview or "").strip().split())
if not body:
return text
body = body[:240]
return f"{text}: {body}" if text else body
if transport_type == "sse":
probe_headers = dict(headers)
probe_headers.setdefault("Accept", "text/event-stream")
probe = docker_manager.probe_http_via_temporary_container(
image_tag=image_tag,
url=url,
method="GET",
headers=probe_headers,
body_json=None,
timeout_seconds=timeout_s,
)
status_code = probe.get("status_code")
content_type = str(probe.get("content_type") or "")
message = str(probe.get("message") or "").strip()
body_preview = probe.get("body_preview")
if status_code in {401, 403}:
return {"ok": False, "transport": transport_type, "status_code": status_code, "message": "Auth failed for MCP SSE endpoint", "content_type": content_type, "probe_from": "temp-container"}
if status_code == 404:
return {"ok": False, "transport": transport_type, "status_code": status_code, "message": "MCP SSE endpoint not found", "content_type": content_type, "probe_from": "temp-container"}
if isinstance(status_code, int) and status_code >= 500:
return {"ok": False, "transport": transport_type, "status_code": status_code, "message": _with_body_preview("MCP SSE endpoint server error", body_preview), "content_type": content_type, "probe_from": "temp-container"}
if not probe.get("ok"):
return {"ok": False, "transport": transport_type, "status_code": status_code, "message": _with_body_preview(message or "Failed to connect MCP SSE endpoint from temporary probe container", body_preview), "content_type": content_type, "probe_from": "temp-container"}
if "text/event-stream" not in content_type.lower():
return {"ok": False, "transport": transport_type, "status_code": status_code, "message": _with_body_preview("Endpoint reachable, but content-type is not text/event-stream", body_preview), "content_type": content_type, "probe_from": "temp-container"}
return {"ok": True, "transport": transport_type, "status_code": status_code, "message": "MCP SSE endpoint is reachable", "content_type": content_type, "probe_from": "temp-container"}
probe_headers = dict(headers)
probe_headers.setdefault("Content-Type", "application/json")
probe_headers.setdefault("Accept", "application/json, text/event-stream")
probe = docker_manager.probe_http_via_temporary_container(
image_tag=image_tag,
url=url,
method="POST",
headers=probe_headers,
body_json=probe_payload,
timeout_seconds=timeout_s,
)
status_code = probe.get("status_code")
message = str(probe.get("message") or "").strip()
body_preview = probe.get("body_preview")
if status_code in {401, 403}:
return {"ok": False, "transport": transport_type, "status_code": status_code, "message": "Auth failed for MCP endpoint", "probe_from": "temp-container"}
if status_code == 404:
return {"ok": False, "transport": transport_type, "status_code": status_code, "message": "MCP endpoint not found", "probe_from": "temp-container"}
if isinstance(status_code, int) and status_code >= 500:
return {"ok": False, "transport": transport_type, "status_code": status_code, "message": _with_body_preview("MCP endpoint server error", body_preview), "probe_from": "temp-container"}
if probe.get("ok") and status_code in {200, 201, 202, 204, 400, 405, 415, 422}:
reachability_msg = "MCP endpoint is reachable" if status_code in {200, 201, 202, 204} else "MCP endpoint is reachable (request format not fully accepted by probe)"
return {"ok": True, "transport": transport_type, "status_code": status_code, "message": reachability_msg, "probe_from": "temp-container"}
return {"ok": False, "transport": transport_type, "status_code": status_code, "message": _with_body_preview(message or "Unexpected response from MCP endpoint", body_preview), "probe_from": "temp-container"}
def _preflight_mcp_servers_for_start(bot_id: str, image_tag: str) -> List[str]:
config_data = _read_bot_config(bot_id)
tools_cfg = config_data.get("tools") if isinstance(config_data, dict) else {}
if not isinstance(tools_cfg, dict):
return []
mcp_servers = _normalize_mcp_servers(tools_cfg.get("mcpServers"))
failures: List[str] = []
for server_name, cfg in mcp_servers.items():
result = _probe_mcp_server_for_start(cfg, image_tag=image_tag)
if result.get("ok"):
continue
message = str(result.get("message") or "MCP precheck failed").strip()
probe_from = str(result.get("probe_from") or "temp-container").strip()
failures.append(f"{server_name}: {message} [{probe_from}]")
return failures
def _parse_env_params(raw: Any) -> Dict[str, str]: def _parse_env_params(raw: Any) -> Dict[str, str]:
return _normalize_env_params(raw) return _normalize_env_params(raw)
@ -1370,6 +1488,9 @@ def _read_bot_runtime_snapshot(bot: BotInstance) -> Dict[str, Any]:
llm_model = str(agents_defaults.get("model") or "") llm_model = str(agents_defaults.get("model") or "")
api_key = str(provider_cfg.get("apiKey") or "").strip() api_key = str(provider_cfg.get("apiKey") or "").strip()
api_base = str(provider_cfg.get("apiBase") or "").strip() api_base = str(provider_cfg.get("apiBase") or "").strip()
api_base_lower = api_base.lower()
if llm_provider == "openai" and ("spark-api-open.xf-yun.com" in api_base_lower or "xf-yun.com" in api_base_lower):
llm_provider = "xunfei"
soul_md = _read_workspace_md(bot.id, "SOUL.md", DEFAULT_SOUL_MD) soul_md = _read_workspace_md(bot.id, "SOUL.md", DEFAULT_SOUL_MD)
resources = _read_bot_resources(bot.id, config_data=config_data) resources = _read_bot_resources(bot.id, config_data=config_data)
@ -1963,7 +2084,7 @@ async def test_provider(payload: dict):
normalized_provider, default_base = _provider_defaults(provider) normalized_provider, default_base = _provider_defaults(provider)
base = (api_base or default_base).rstrip("/") base = (api_base or default_base).rstrip("/")
if normalized_provider not in {"openrouter", "dashscope", "kimi", "minimax"}: if normalized_provider not in {"openrouter", "dashscope", "kimi", "minimax", "openai", "deepseek"}:
raise HTTPException(status_code=400, detail=f"provider not supported for test: {provider}") raise HTTPException(status_code=400, detail=f"provider not supported for test: {provider}")
if not base: if not base:
@ -3481,12 +3602,33 @@ async def websocket_endpoint(websocket: WebSocket, bot_id: str):
await websocket.close(code=4404, reason="Bot not found") await websocket.close(code=4404, reason="Bot not found")
return return
connected = False
try:
await manager.connect(bot_id, websocket) await manager.connect(bot_id, websocket)
connected = True
except Exception as exc:
logger.warning("websocket connect failed bot_id=%s detail=%s", bot_id, exc)
try:
await websocket.close(code=1011, reason="WebSocket accept failed")
except Exception:
pass
return
docker_manager.ensure_monitor(bot_id, docker_callback) docker_manager.ensure_monitor(bot_id, docker_callback)
try: try:
while True: while True:
await websocket.receive_text() await websocket.receive_text()
except WebSocketDisconnect: except WebSocketDisconnect:
pass
except RuntimeError as exc:
# Client may drop before handshake settles; treat as benign disconnect.
msg = str(exc or "").lower()
if "need to call \"accept\" first" not in msg and "not connected" not in msg:
logger.exception("websocket runtime error bot_id=%s", bot_id)
except Exception:
logger.exception("websocket unexpected error bot_id=%s", bot_id)
finally:
if connected:
manager.disconnect(bot_id, websocket) manager.disconnect(bot_id, websocket)

View File

@ -424,6 +424,14 @@ const providerPresets: Record<string, { model: string; apiBase?: string; note: {
en: 'MiniMax endpoint, model example: MiniMax-Text-01', en: 'MiniMax endpoint, model example: MiniMax-Text-01',
}, },
}, },
xunfei: {
model: 'astron-code-latest',
apiBase: 'https://spark-api-open.xf-yun.com/v1',
note: {
'zh-cn': '讯飞星火OpenAI 兼容)接口,模型示例 astron-code-latest',
en: 'Xunfei Spark (OpenAI-compatible), model example: astron-code-latest',
},
},
}; };
const optionalChannelTypes: ChannelType[] = ['feishu', 'qq', 'dingtalk', 'telegram', 'slack', 'email']; const optionalChannelTypes: ChannelType[] = ['feishu', 'qq', 'dingtalk', 'telegram', 'slack', 'email'];
const RUNTIME_STALE_MS = 45000; const RUNTIME_STALE_MS = 45000;
@ -1076,6 +1084,7 @@ export function BotDashboardModule({
const [topicFeedLoadingMore, setTopicFeedLoadingMore] = useState(false); const [topicFeedLoadingMore, setTopicFeedLoadingMore] = useState(false);
const [topicFeedError, setTopicFeedError] = useState(''); const [topicFeedError, setTopicFeedError] = useState('');
const [topicFeedReadSavingById, setTopicFeedReadSavingById] = useState<Record<number, boolean>>({}); const [topicFeedReadSavingById, setTopicFeedReadSavingById] = useState<Record<number, boolean>>({});
const [topicFeedDeleteSavingById, setTopicFeedDeleteSavingById] = useState<Record<number, boolean>>({});
const [topicFeedUnreadCount, setTopicFeedUnreadCount] = useState(0); const [topicFeedUnreadCount, setTopicFeedUnreadCount] = useState(0);
const [topicDetailOpen, setTopicDetailOpen] = useState(false); const [topicDetailOpen, setTopicDetailOpen] = useState(false);
const [compactPanelTab, setCompactPanelTab] = useState<CompactPanelTab>('chat'); const [compactPanelTab, setCompactPanelTab] = useState<CompactPanelTab>('chat');
@ -2752,6 +2761,36 @@ export function BotDashboardModule({
} }
}; };
const deleteTopicFeedItem = async (item: TopicFeedItem) => {
if (!selectedBot) return;
const targetId = Number(item?.id);
if (!Number.isFinite(targetId) || targetId <= 0) return;
const displayName = String(item?.title || item?.topic_key || targetId).trim() || String(targetId);
const ok = await confirm({
title: t.delete,
message: isZh ? `确认删除这条主题消息?\n${displayName}` : `Delete this Topic item?\n${displayName}`,
tone: 'warning',
});
if (!ok) return;
setTopicFeedDeleteSavingById((prev) => ({ ...prev, [targetId]: true }));
try {
await axios.delete(`${APP_ENDPOINTS.apiBase}/bots/${selectedBot.id}/topic-items/${targetId}`);
setTopicFeedItems((prev) => prev.filter((row) => Number(row.id) !== targetId));
if (!Boolean(item?.is_read)) {
setTopicFeedUnreadCount((prev) => Math.max(0, prev - 1));
}
notify(isZh ? '主题消息已删除。' : 'Topic item deleted.', { tone: 'success' });
} catch (error: any) {
notify(error?.response?.data?.detail || (isZh ? '删除主题消息失败。' : 'Failed to delete topic item.'), { tone: 'error' });
} finally {
setTopicFeedDeleteSavingById((prev) => {
const next = { ...prev };
delete next[targetId];
return next;
});
}
};
const loadChannels = async (botId: string) => { const loadChannels = async (botId: string) => {
if (!botId) return; if (!botId) return;
const res = await axios.get<BotChannel[]>(`${APP_ENDPOINTS.apiBase}/bots/${botId}/channels`); const res = await axios.get<BotChannel[]>(`${APP_ENDPOINTS.apiBase}/bots/${botId}/channels`);
@ -3625,8 +3664,8 @@ export function BotDashboardModule({
await axios.post(`${APP_ENDPOINTS.apiBase}/bots/${id}/start`); await axios.post(`${APP_ENDPOINTS.apiBase}/bots/${id}/start`);
updateBotStatus(id, 'RUNNING'); updateBotStatus(id, 'RUNNING');
await refresh(); await refresh();
} catch { } catch (error: any) {
notify(t.startFail, { tone: 'error' }); notify(error?.response?.data?.detail || t.startFail, { tone: 'error' });
} finally { } finally {
setOperatingBotId(null); setOperatingBotId(null);
setControlStateByBot((prev) => { setControlStateByBot((prev) => {
@ -3656,8 +3695,8 @@ export function BotDashboardModule({
await axios.post(`${APP_ENDPOINTS.apiBase}/bots/${id}/start`); await axios.post(`${APP_ENDPOINTS.apiBase}/bots/${id}/start`);
updateBotStatus(id, 'RUNNING'); updateBotStatus(id, 'RUNNING');
await refresh(); await refresh();
} catch { } catch (error: any) {
notify(t.restartFail, { tone: 'error' }); notify(error?.response?.data?.detail || t.restartFail, { tone: 'error' });
} finally { } finally {
setOperatingBotId(null); setOperatingBotId(null);
setControlStateByBot((prev) => { setControlStateByBot((prev) => {
@ -4280,6 +4319,7 @@ export function BotDashboardModule({
setTopicFeedNextCursor(null); setTopicFeedNextCursor(null);
setTopicFeedError(''); setTopicFeedError('');
setTopicFeedReadSavingById({}); setTopicFeedReadSavingById({});
setTopicFeedDeleteSavingById({});
setTopicFeedUnreadCount(0); setTopicFeedUnreadCount(0);
return; return;
} }
@ -4297,6 +4337,7 @@ export function BotDashboardModule({
setTopicFeedNextCursor(null); setTopicFeedNextCursor(null);
setTopicFeedError(''); setTopicFeedError('');
setTopicFeedReadSavingById({}); setTopicFeedReadSavingById({});
setTopicFeedDeleteSavingById({});
let cancelled = false; let cancelled = false;
const loadAll = async () => { const loadAll = async () => {
try { try {
@ -4937,9 +4978,11 @@ export function BotDashboardModule({
nextCursor={topicFeedNextCursor} nextCursor={topicFeedNextCursor}
error={topicFeedError} error={topicFeedError}
readSavingById={topicFeedReadSavingById} readSavingById={topicFeedReadSavingById}
deleteSavingById={topicFeedDeleteSavingById}
onTopicChange={setTopicFeedTopicKey} onTopicChange={setTopicFeedTopicKey}
onRefresh={() => void loadTopicFeed({ append: false, topicKey: topicFeedTopicKey })} onRefresh={() => void loadTopicFeed({ append: false, topicKey: topicFeedTopicKey })}
onMarkRead={(itemId) => void markTopicFeedItemRead(itemId)} onMarkRead={(itemId) => void markTopicFeedItemRead(itemId)}
onDeleteItem={(item) => void deleteTopicFeedItem(item)}
onLoadMore={() => void loadTopicFeed({ append: true, cursor: topicFeedNextCursor, topicKey: topicFeedTopicKey })} onLoadMore={() => void loadTopicFeed({ append: true, cursor: topicFeedNextCursor, topicKey: topicFeedTopicKey })}
onOpenWorkspacePath={(path) => void openWorkspacePathFromChat(path)} onOpenWorkspacePath={(path) => void openWorkspacePathFromChat(path)}
onOpenTopicSettings={() => { onOpenTopicSettings={() => {
@ -5681,6 +5724,7 @@ export function BotDashboardModule({
<option value="deepseek">deepseek</option> <option value="deepseek">deepseek</option>
<option value="kimi">kimi (moonshot)</option> <option value="kimi">kimi (moonshot)</option>
<option value="minimax">minimax</option> <option value="minimax">minimax</option>
<option value="xunfei">xunfei (spark)</option>
</LucentSelect> </LucentSelect>
<label className="field-label">{t.modelName}</label> <label className="field-label">{t.modelName}</label>

View File

@ -1,6 +1,6 @@
import { useCallback, useEffect, useMemo, useState } from 'react'; import { useCallback, useEffect, useMemo, useState } from 'react';
import { createPortal } from 'react-dom'; import { createPortal } from 'react-dom';
import { Eye, RefreshCw, X } from 'lucide-react'; import { Eye, RefreshCw, Trash2, X } from 'lucide-react';
import ReactMarkdown from 'react-markdown'; import ReactMarkdown from 'react-markdown';
import remarkGfm from 'remark-gfm'; import remarkGfm from 'remark-gfm';
import rehypeRaw from 'rehype-raw'; import rehypeRaw from 'rehype-raw';
@ -40,9 +40,11 @@ interface TopicFeedPanelProps {
nextCursor: number | null; nextCursor: number | null;
error: string; error: string;
readSavingById: Record<number, boolean>; readSavingById: Record<number, boolean>;
deleteSavingById: Record<number, boolean>;
onTopicChange: (value: string) => void; onTopicChange: (value: string) => void;
onRefresh: () => void; onRefresh: () => void;
onMarkRead: (itemId: number) => void; onMarkRead: (itemId: number) => void;
onDeleteItem: (item: TopicFeedItem) => void;
onLoadMore: () => void; onLoadMore: () => void;
onOpenWorkspacePath: (path: string) => void; onOpenWorkspacePath: (path: string) => void;
onOpenTopicSettings?: () => void; onOpenTopicSettings?: () => void;
@ -139,9 +141,11 @@ export function TopicFeedPanel({
nextCursor, nextCursor,
error, error,
readSavingById, readSavingById,
deleteSavingById,
onTopicChange, onTopicChange,
onRefresh, onRefresh,
onMarkRead, onMarkRead,
onDeleteItem,
onLoadMore, onLoadMore,
onOpenWorkspacePath, onOpenWorkspacePath,
onOpenTopicSettings, onOpenTopicSettings,
@ -298,6 +302,15 @@ export function TopicFeedPanel({
<Eye size={14} /> <Eye size={14} />
</LucentIconButton> </LucentIconButton>
) : null} ) : null}
<LucentIconButton
className="btn btn-secondary btn-sm icon-btn"
disabled={Boolean(deleteSavingById[itemId])}
onClick={() => onDeleteItem(item)}
tooltip={isZh ? '删除消息' : 'Delete item'}
aria-label={isZh ? '删除消息' : 'Delete item'}
>
{deleteSavingById[itemId] ? <RefreshCw size={14} className="animate-spin" /> : <Trash2 size={14} />}
</LucentIconButton>
{unread ? ( {unread ? (
<button <button
className="btn btn-secondary btn-sm" className="btn btn-secondary btn-sm"

View File

@ -87,6 +87,14 @@ const providerPresets: Record<string, { model: string; note: { 'zh-cn': string;
}, },
apiBase: 'https://api.minimax.chat/v1', apiBase: 'https://api.minimax.chat/v1',
}, },
xunfei: {
model: 'astron-code-latest',
note: {
'zh-cn': '讯飞星火OpenAI 兼容)接口,模型示例 astron-code-latest。',
en: 'Xunfei Spark (OpenAI-compatible), model example: astron-code-latest.',
},
apiBase: 'https://spark-api-open.xf-yun.com/v1',
},
}; };
const initialForm = { const initialForm = {
@ -712,6 +720,7 @@ export function BotWizardModule({ onCreated, onGoDashboard }: BotWizardModulePro
<option value="deepseek">deepseek</option> <option value="deepseek">deepseek</option>
<option value="kimi">kimi (moonshot)</option> <option value="kimi">kimi (moonshot)</option>
<option value="minimax">minimax</option> <option value="minimax">minimax</option>
<option value="xunfei">xunfei (spark)</option>
</LucentSelect> </LucentSelect>
<input className="input" placeholder={ui.modelNamePlaceholder} value={form.llm_model} onChange={(e) => setForm((p) => ({ ...p, llm_model: e.target.value }))} /> <input className="input" placeholder={ui.modelNamePlaceholder} value={form.llm_model} onChange={(e) => setForm((p) => ({ ...p, llm_model: e.target.value }))} />
<input className="input" type="password" placeholder="API Key" value={form.api_key} onChange={(e) => setForm((p) => ({ ...p, api_key: e.target.value }))} /> <input className="input" type="password" placeholder="API Key" value={form.api_key} onChange={(e) => setForm((p) => ({ ...p, api_key: e.target.value }))} />