v0.1.4-p2

main
mula.liu 2026-03-13 14:40:54 +08:00
parent 6795fedbfe
commit 51645be883
34 changed files with 6382 additions and 617 deletions

View File

@ -40,9 +40,16 @@ REDIS_DEFAULT_TTL=60
# Panel access protection # Panel access protection
PANEL_ACCESS_PASSWORD=change_me_panel_password PANEL_ACCESS_PASSWORD=change_me_panel_password
# Internal URL used by built-in topic_mcp server inside bot container
TOPIC_MCP_INTERNAL_URL=http://host.docker.internal:8000/api/mcp/topic
# Template files inside backend container
AGENT_MD_TEMPLATES_FILE=templates/agent_md_templates.json
TOPIC_PRESETS_TEMPLATES_FILE=templates/topic_presets.json
# Max upload size for backend validation (MB) # Max upload size for backend validation (MB)
UPLOAD_MAX_MB=200 UPLOAD_MAX_MB=200
# Workspace files that should use direct download behavior in dashboard
WORKSPACE_DOWNLOAD_EXTENSIONS=.pdf,.doc,.docx,.xls,.xlsx,.xlsm,.ppt,.pptx,.odt,.ods,.odp,.wps,.stl,.scad,.zip,.rar
# Local speech-to-text (Whisper via whisper.cpp model file) # Local speech-to-text (Whisper via whisper.cpp model file)
STT_ENABLED=true STT_ENABLED=true

View File

@ -13,6 +13,7 @@ Dashboard Nanobot 是面向 `nanobot` 的控制平面项目,提供镜像管理
- `USER.md` - `USER.md`
- `TOOLS.md` - `TOOLS.md`
- `IDENTITY.md` - `IDENTITY.md`
- 模板管理:系统级模板改为文件化配置(`backend/templates/agent_md_templates.json` 与 `backend/templates/topic_presets.json`)。
- 2D 运维 DashboardBot 列表、启停、命令发送、日志流、遥测。 - 2D 运维 DashboardBot 列表、启停、命令发送、日志流、遥测。
- UI 全局支持Light/Dark 切换、中文/English 切换。 - UI 全局支持Light/Dark 切换、中文/English 切换。
@ -66,7 +67,9 @@ graph TD
- `DATABASE_ECHO`SQL 日志输出开关 - `DATABASE_ECHO`SQL 日志输出开关
- 不提供自动数据迁移(如需升级迁移请离线完成后再切换连接串) - 不提供自动数据迁移(如需升级迁移请离线完成后再切换连接串)
- `DATA_ROOT`、`BOTS_WORKSPACE_ROOT`:运行数据与 Bot 工作目录 - `DATA_ROOT`、`BOTS_WORKSPACE_ROOT`:运行数据与 Bot 工作目录
- `DEFAULT_*_MD`:创建向导默认模板来源(其中默认输出规范已并入 `DEFAULT_AGENTS_MD` - `AGENT_MD_TEMPLATES_FILE`5 个代理 MD 模板文件路径
- `TOPIC_PRESETS_TEMPLATES_FILE`:主题预设模板文件路径
- `DEFAULT_*_MD`:可选覆盖值(一般留空,推荐走模板文件)
- 前端: - 前端:
- 示例文件:`frontend/.env.example` - 示例文件:`frontend/.env.example`
- 本地配置:`frontend/.env` - 本地配置:`frontend/.env`
@ -119,6 +122,7 @@ graph TD
### 关键说明 ### 关键说明
- `backend` 不开放宿主机端口,仅在内部网络被 Nginx 访问。 - `backend` 不开放宿主机端口,仅在内部网络被 Nginx 访问。
- 上传大小使用单一参数 `UPLOAD_MAX_MB` 控制(后端校验 + Nginx 限制)。
- 必须挂载 `/var/run/docker.sock`,否则后端无法操作 Bot 镜像与容器。 - 必须挂载 `/var/run/docker.sock`,否则后端无法操作 Bot 镜像与容器。
- `HOST_BOTS_WORKSPACE_ROOT` 必须是宿主机绝对路径,并且在 `docker-compose.prod.yml` 中以“同路径”挂载到后端容器。 - `HOST_BOTS_WORKSPACE_ROOT` 必须是宿主机绝对路径,并且在 `docker-compose.prod.yml` 中以“同路径”挂载到后端容器。
原因:后端通过 Docker API 创建 Bot 容器时,使用的是宿主机可见的 bind 路径。 原因:后端通过 Docker API 创建 Bot 容器时,使用的是宿主机可见的 bind 路径。

View File

@ -23,9 +23,14 @@ REDIS_DEFAULT_TTL=60
# Optional panel-level access password for all backend API/WS calls. # Optional panel-level access password for all backend API/WS calls.
PANEL_ACCESS_PASSWORD= PANEL_ACCESS_PASSWORD=
# Internal URL used by built-in topic_mcp server inside bot container
TOPIC_MCP_INTERNAL_URL=http://host.docker.internal:8000/api/mcp/topic
# Max upload size for backend validation (MB) # Max upload size for backend validation (MB)
UPLOAD_MAX_MB=100 UPLOAD_MAX_MB=100
# Workspace files that should use direct download behavior in dashboard
# Comma/space/semicolon separated, e.g. ".pdf,.docx,.xlsx,.zip"
WORKSPACE_DOWNLOAD_EXTENSIONS=.pdf,.doc,.docx,.xls,.xlsx,.xlsm,.ppt,.pptx,.odt,.ods,.odp,.wps
# Local speech-to-text (Whisper via whisper.cpp model file) # Local speech-to-text (Whisper via whisper.cpp model file)
STT_ENABLED=true STT_ENABLED=true
@ -44,9 +49,17 @@ APP_HOST=0.0.0.0
APP_PORT=8000 APP_PORT=8000
APP_RELOAD=true APP_RELOAD=true
# Wizard / bootstrap default templates (use \n for line breaks) # Template files (no hard-coded default content in code)
DEFAULT_AGENTS_MD=# Agent Instructions\n\n- 优先完成任务目标\n- 操作前先说明意图\n- 输出必须可执行\n\n## 默认输出规范\n\n- 在workspace中创建目录保存输出。\n- 默认采用 Markdown.md格式。 # Agent template file must include:
DEFAULT_SOUL_MD=# Soul\n\n你是专业的企业数字员工表达清晰、可执行。 # agents_md, soul_md, user_md, tools_md, identity_md
DEFAULT_USER_MD=# User\n\n- 语言: 中文\n- 风格: 专业\n- 偏好: 简明且有步骤 AGENT_MD_TEMPLATES_FILE=templates/agent_md_templates.json
DEFAULT_TOOLS_MD=# Tools\n\n- 谨慎使用 shell\n- 修改文件后复核\n- 失败时说明原因并重试策略 # Topic presets template file must include:
DEFAULT_IDENTITY_MD=# Identity\n\n- 角色: 企业数字员工\n- 领域: 运维与任务执行 # { "presets": [ ... ] }
TOPIC_PRESETS_TEMPLATES_FILE=templates/topic_presets.json
# Optional overrides (fallback only; usually keep empty when using template files)
DEFAULT_AGENTS_MD=
DEFAULT_SOUL_MD=
DEFAULT_USER_MD=
DEFAULT_TOOLS_MD=
DEFAULT_IDENTITY_MD=

View File

@ -0,0 +1,291 @@
import json
from datetime import datetime
from typing import Any, Dict, List, Optional
from fastapi import APIRouter, Depends, HTTPException, Request
from fastapi.responses import JSONResponse, Response
from pydantic import BaseModel
from sqlalchemy import func
from sqlmodel import Session, select
from core.database import get_session
from models.bot import BotInstance
from models.topic import TopicItem, TopicTopic
from services.topic_service import (
TOPIC_MCP_TOKEN_HEADER,
_TOPIC_KEY_RE,
_handle_topic_mcp_rpc_item,
_jsonrpc_error,
_list_topics,
_normalize_topic_key,
_resolve_topic_mcp_bot_id_by_token,
_topic_item_to_dict,
_topic_to_dict,
)
router = APIRouter()
def _count_topic_items(
session: Session,
bot_id: str,
topic_key: Optional[str] = None,
unread_only: bool = False,
) -> int:
stmt = select(func.count()).select_from(TopicItem).where(TopicItem.bot_id == bot_id)
normalized_topic_key = _normalize_topic_key(topic_key or "")
if normalized_topic_key:
stmt = stmt.where(TopicItem.topic_key == normalized_topic_key)
if unread_only:
stmt = stmt.where(TopicItem.is_read == False) # noqa: E712
value = session.exec(stmt).one()
return int(value or 0)
class TopicCreateRequest(BaseModel):
topic_key: str
name: Optional[str] = None
description: Optional[str] = None
is_active: bool = True
routing: Optional[Dict[str, Any]] = None
view_schema: Optional[Dict[str, Any]] = None
class TopicUpdateRequest(BaseModel):
name: Optional[str] = None
description: Optional[str] = None
is_active: Optional[bool] = None
routing: Optional[Dict[str, Any]] = None
view_schema: Optional[Dict[str, Any]] = None
@router.get("/api/bots/{bot_id}/topics")
def list_bot_topics(bot_id: str, session: Session = Depends(get_session)):
bot = session.get(BotInstance, bot_id)
if not bot:
raise HTTPException(status_code=404, detail="Bot not found")
return _list_topics(session, bot_id)
@router.post("/api/bots/{bot_id}/topics")
def create_bot_topic(bot_id: str, payload: TopicCreateRequest, session: Session = Depends(get_session)):
bot = session.get(BotInstance, bot_id)
if not bot:
raise HTTPException(status_code=404, detail="Bot not found")
topic_key = _normalize_topic_key(payload.topic_key)
if not topic_key:
raise HTTPException(status_code=400, detail="topic_key is required")
if not _TOPIC_KEY_RE.fullmatch(topic_key):
raise HTTPException(status_code=400, detail="invalid topic_key")
exists = session.exec(
select(TopicTopic)
.where(TopicTopic.bot_id == bot_id)
.where(TopicTopic.topic_key == topic_key)
.limit(1)
).first()
if exists:
raise HTTPException(status_code=400, detail=f"Topic already exists: {topic_key}")
now = datetime.utcnow()
row = TopicTopic(
bot_id=bot_id,
topic_key=topic_key,
name=str(payload.name or topic_key).strip() or topic_key,
description=str(payload.description or "").strip(),
is_active=bool(payload.is_active),
is_default_fallback=False,
routing_json=json.dumps(payload.routing or {}, ensure_ascii=False),
view_schema_json=json.dumps(payload.view_schema or {}, ensure_ascii=False),
created_at=now,
updated_at=now,
)
session.add(row)
session.commit()
session.refresh(row)
return _topic_to_dict(row)
@router.put("/api/bots/{bot_id}/topics/{topic_key}")
def update_bot_topic(bot_id: str, topic_key: str, payload: TopicUpdateRequest, session: Session = Depends(get_session)):
bot = session.get(BotInstance, bot_id)
if not bot:
raise HTTPException(status_code=404, detail="Bot not found")
normalized_key = _normalize_topic_key(topic_key)
if not normalized_key:
raise HTTPException(status_code=400, detail="topic_key is required")
row = session.exec(
select(TopicTopic)
.where(TopicTopic.bot_id == bot_id)
.where(TopicTopic.topic_key == normalized_key)
.limit(1)
).first()
if not row:
raise HTTPException(status_code=404, detail="Topic not found")
update_data = payload.model_dump(exclude_unset=True)
if "name" in update_data:
row.name = str(update_data.get("name") or "").strip() or row.topic_key
if "description" in update_data:
row.description = str(update_data.get("description") or "").strip()
if "is_active" in update_data:
row.is_active = bool(update_data.get("is_active"))
if "routing" in update_data:
row.routing_json = json.dumps(update_data.get("routing") or {}, ensure_ascii=False)
if "view_schema" in update_data:
row.view_schema_json = json.dumps(update_data.get("view_schema") or {}, ensure_ascii=False)
row.is_default_fallback = False
row.updated_at = datetime.utcnow()
session.add(row)
session.commit()
session.refresh(row)
return _topic_to_dict(row)
@router.delete("/api/bots/{bot_id}/topics/{topic_key}")
def delete_bot_topic(bot_id: str, topic_key: str, session: Session = Depends(get_session)):
bot = session.get(BotInstance, bot_id)
if not bot:
raise HTTPException(status_code=404, detail="Bot not found")
normalized_key = _normalize_topic_key(topic_key)
if not normalized_key:
raise HTTPException(status_code=400, detail="topic_key is required")
row = session.exec(
select(TopicTopic)
.where(TopicTopic.bot_id == bot_id)
.where(TopicTopic.topic_key == normalized_key)
.limit(1)
).first()
if not row:
raise HTTPException(status_code=404, detail="Topic not found")
items = session.exec(
select(TopicItem)
.where(TopicItem.bot_id == bot_id)
.where(TopicItem.topic_key == normalized_key)
).all()
for item in items:
session.delete(item)
session.delete(row)
session.commit()
return {"status": "deleted", "bot_id": bot_id, "topic_key": normalized_key}
@router.get("/api/bots/{bot_id}/topic-items")
def list_bot_topic_items(
bot_id: str,
topic_key: Optional[str] = None,
cursor: Optional[int] = None,
limit: int = 50,
session: Session = Depends(get_session),
):
bot = session.get(BotInstance, bot_id)
if not bot:
raise HTTPException(status_code=404, detail="Bot not found")
normalized_limit = max(1, min(int(limit or 50), 100))
stmt = select(TopicItem).where(TopicItem.bot_id == bot_id)
normalized_topic_key = _normalize_topic_key(topic_key or "")
if normalized_topic_key:
stmt = stmt.where(TopicItem.topic_key == normalized_topic_key)
if cursor is not None:
normalized_cursor = int(cursor)
if normalized_cursor > 0:
stmt = stmt.where(TopicItem.id < normalized_cursor)
rows = session.exec(
stmt.order_by(TopicItem.id.desc()).limit(normalized_limit + 1)
).all()
next_cursor: Optional[int] = None
if len(rows) > normalized_limit:
next_cursor = rows[-1].id
rows = rows[:normalized_limit]
return {
"bot_id": bot_id,
"topic_key": normalized_topic_key or None,
"items": [_topic_item_to_dict(row) for row in rows],
"next_cursor": next_cursor,
"unread_count": _count_topic_items(session, bot_id, normalized_topic_key, unread_only=True),
"total_unread_count": _count_topic_items(session, bot_id, unread_only=True),
}
@router.get("/api/bots/{bot_id}/topic-items/stats")
def get_bot_topic_item_stats(bot_id: str, session: Session = Depends(get_session)):
bot = session.get(BotInstance, bot_id)
if not bot:
raise HTTPException(status_code=404, detail="Bot not found")
latest_item = session.exec(
select(TopicItem)
.where(TopicItem.bot_id == bot_id)
.order_by(TopicItem.id.desc())
.limit(1)
).first()
return {
"bot_id": bot_id,
"total_count": _count_topic_items(session, bot_id),
"unread_count": _count_topic_items(session, bot_id, unread_only=True),
"latest_item_id": int(latest_item.id or 0) if latest_item and latest_item.id else None,
}
@router.post("/api/bots/{bot_id}/topic-items/{item_id}/read")
def mark_bot_topic_item_read(bot_id: str, item_id: int, session: Session = Depends(get_session)):
bot = session.get(BotInstance, bot_id)
if not bot:
raise HTTPException(status_code=404, detail="Bot not found")
row = session.exec(
select(TopicItem)
.where(TopicItem.bot_id == bot_id)
.where(TopicItem.id == item_id)
.limit(1)
).first()
if not row:
raise HTTPException(status_code=404, detail="Topic item not found")
if not bool(row.is_read):
row.is_read = True
session.add(row)
session.commit()
session.refresh(row)
return {
"status": "updated",
"bot_id": bot_id,
"item": _topic_item_to_dict(row),
}
@router.post("/api/mcp/topic")
async def topic_mcp_entry(request: Request, session: Session = Depends(get_session)):
token = str(request.headers.get(TOPIC_MCP_TOKEN_HEADER) or "").strip()
if not token:
auth = str(request.headers.get("authorization") or "").strip()
if auth.lower().startswith("bearer "):
token = auth[7:].strip()
if not token:
raise HTTPException(status_code=401, detail="Missing topic_mcp token")
bot_id = _resolve_topic_mcp_bot_id_by_token(session, token)
if not bot_id:
raise HTTPException(status_code=401, detail="Invalid topic_mcp token")
bot = session.get(BotInstance, bot_id)
if not bot:
raise HTTPException(status_code=401, detail="Invalid topic_mcp token")
try:
payload = await request.json()
except Exception:
return JSONResponse(status_code=400, content=_jsonrpc_error(None, -32700, "Parse error"))
if isinstance(payload, list):
if not payload:
return JSONResponse(status_code=400, content=_jsonrpc_error(None, -32600, "Invalid Request"))
responses: List[Dict[str, Any]] = []
for item in payload:
resp = _handle_topic_mcp_rpc_item(session, bot_id, item)
if resp is not None:
responses.append(resp)
if not responses:
return Response(status_code=204)
return JSONResponse(content=responses)
resp = _handle_topic_mcp_rpc_item(session, bot_id, payload)
if resp is None:
return Response(status_code=204)
return JSONResponse(content=resp)

View File

@ -5,6 +5,7 @@ from core.settings import DATABASE_ECHO, DATABASE_URL
# Ensure table models are registered in SQLModel metadata before create_all. # Ensure table models are registered in SQLModel metadata before create_all.
from models import bot as _bot_models # noqa: F401 from models import bot as _bot_models # noqa: F401
from models import topic as _topic_models # noqa: F401
engine = create_engine(DATABASE_URL, echo=DATABASE_ECHO) engine = create_engine(DATABASE_URL, echo=DATABASE_ECHO)
@ -100,6 +101,296 @@ def _drop_legacy_skill_tables() -> None:
conn.commit() conn.commit()
def _ensure_topic_tables_sqlite() -> None:
if engine.dialect.name != "sqlite":
return
with engine.connect() as conn:
conn.execute(
text(
"""
CREATE TABLE IF NOT EXISTS topic_bot_settings (
id INTEGER PRIMARY KEY AUTOINCREMENT,
bot_id TEXT NOT NULL,
topic_enabled INTEGER NOT NULL DEFAULT 1,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY(bot_id) REFERENCES botinstance(id)
)
"""
)
)
conn.execute(
text(
"""
CREATE TABLE IF NOT EXISTS topic_topic (
id INTEGER PRIMARY KEY AUTOINCREMENT,
bot_id TEXT NOT NULL,
topic_key TEXT NOT NULL,
name TEXT NOT NULL DEFAULT '',
description TEXT NOT NULL DEFAULT '',
is_active INTEGER NOT NULL DEFAULT 1,
is_default_fallback INTEGER NOT NULL DEFAULT 0,
routing_json TEXT NOT NULL DEFAULT '{}',
view_schema_json TEXT NOT NULL DEFAULT '{}',
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY(bot_id) REFERENCES botinstance(id)
)
"""
)
)
conn.execute(
text(
"""
CREATE TABLE IF NOT EXISTS topic_item (
id INTEGER PRIMARY KEY AUTOINCREMENT,
bot_id TEXT NOT NULL,
topic_key TEXT NOT NULL,
title TEXT NOT NULL DEFAULT '',
content TEXT NOT NULL DEFAULT '',
level TEXT NOT NULL DEFAULT 'info',
tags_json TEXT,
view_json TEXT,
source TEXT NOT NULL DEFAULT 'mcp',
dedupe_key TEXT,
is_read INTEGER NOT NULL DEFAULT 0,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY(bot_id) REFERENCES botinstance(id)
)
"""
)
)
conn.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS uq_topic_bot_settings_bot_id ON topic_bot_settings(bot_id)"))
conn.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS uq_topic_topic_bot_topic_key ON topic_topic(bot_id, topic_key)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_topic_bot_id ON topic_topic(bot_id)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_topic_topic_key ON topic_topic(topic_key)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_topic_bot_fallback ON topic_topic(bot_id, is_default_fallback)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_bot_id ON topic_item(bot_id)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_topic_key ON topic_item(topic_key)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_level ON topic_item(level)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_source ON topic_item(source)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_is_read ON topic_item(is_read)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_created_at ON topic_item(created_at)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_bot_topic_created_at ON topic_item(bot_id, topic_key, created_at)"))
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_bot_dedupe ON topic_item(bot_id, dedupe_key)"))
conn.commit()
def _ensure_topic_columns() -> None:
dialect = engine.dialect.name
required_columns = {
"topic_bot_settings": {
"topic_enabled": {
"sqlite": "INTEGER NOT NULL DEFAULT 1",
"postgresql": "BOOLEAN NOT NULL DEFAULT TRUE",
"mysql": "BOOLEAN NOT NULL DEFAULT TRUE",
},
"created_at": {
"sqlite": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
"postgresql": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
"mysql": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
},
"updated_at": {
"sqlite": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
"postgresql": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
"mysql": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
},
},
"topic_topic": {
"name": {
"sqlite": "TEXT NOT NULL DEFAULT ''",
"postgresql": "TEXT NOT NULL DEFAULT ''",
"mysql": "VARCHAR(255) NOT NULL DEFAULT ''",
},
"description": {
"sqlite": "TEXT NOT NULL DEFAULT ''",
"postgresql": "TEXT NOT NULL DEFAULT ''",
"mysql": "LONGTEXT",
},
"is_active": {
"sqlite": "INTEGER NOT NULL DEFAULT 1",
"postgresql": "BOOLEAN NOT NULL DEFAULT TRUE",
"mysql": "BOOLEAN NOT NULL DEFAULT TRUE",
},
"is_default_fallback": {
"sqlite": "INTEGER NOT NULL DEFAULT 0",
"postgresql": "BOOLEAN NOT NULL DEFAULT FALSE",
"mysql": "BOOLEAN NOT NULL DEFAULT FALSE",
},
"routing_json": {
"sqlite": "TEXT NOT NULL DEFAULT '{}'",
"postgresql": "TEXT NOT NULL DEFAULT '{}'",
"mysql": "LONGTEXT",
},
"view_schema_json": {
"sqlite": "TEXT NOT NULL DEFAULT '{}'",
"postgresql": "TEXT NOT NULL DEFAULT '{}'",
"mysql": "LONGTEXT",
},
"created_at": {
"sqlite": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
"postgresql": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
"mysql": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
},
"updated_at": {
"sqlite": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
"postgresql": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
"mysql": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
},
},
"topic_item": {
"title": {
"sqlite": "TEXT NOT NULL DEFAULT ''",
"postgresql": "TEXT NOT NULL DEFAULT ''",
"mysql": "VARCHAR(2000) NOT NULL DEFAULT ''",
},
"level": {
"sqlite": "TEXT NOT NULL DEFAULT 'info'",
"postgresql": "TEXT NOT NULL DEFAULT 'info'",
"mysql": "VARCHAR(32) NOT NULL DEFAULT 'info'",
},
"tags_json": {
"sqlite": "TEXT",
"postgresql": "TEXT",
"mysql": "LONGTEXT",
},
"view_json": {
"sqlite": "TEXT",
"postgresql": "TEXT",
"mysql": "LONGTEXT",
},
"source": {
"sqlite": "TEXT NOT NULL DEFAULT 'mcp'",
"postgresql": "TEXT NOT NULL DEFAULT 'mcp'",
"mysql": "VARCHAR(64) NOT NULL DEFAULT 'mcp'",
},
"dedupe_key": {
"sqlite": "TEXT",
"postgresql": "TEXT",
"mysql": "VARCHAR(200)",
},
"is_read": {
"sqlite": "INTEGER NOT NULL DEFAULT 0",
"postgresql": "BOOLEAN NOT NULL DEFAULT FALSE",
"mysql": "BOOLEAN NOT NULL DEFAULT FALSE",
},
"created_at": {
"sqlite": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
"postgresql": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
"mysql": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
},
},
}
inspector = inspect(engine)
with engine.connect() as conn:
for table_name, cols in required_columns.items():
if not inspector.has_table(table_name):
continue
existing = {
str(row.get("name"))
for row in inspector.get_columns(table_name)
if row.get("name")
}
for col, ddl_map in cols.items():
if col in existing:
continue
ddl = ddl_map.get(dialect) or ddl_map.get("sqlite")
conn.execute(text(f"ALTER TABLE {table_name} ADD COLUMN {col} {ddl}"))
conn.commit()
def _ensure_topic_indexes() -> None:
required_indexes = [
("uq_topic_bot_settings_bot_id", "topic_bot_settings", ["bot_id"], True),
("uq_topic_topic_bot_topic_key", "topic_topic", ["bot_id", "topic_key"], True),
("idx_topic_topic_bot_id", "topic_topic", ["bot_id"], False),
("idx_topic_topic_topic_key", "topic_topic", ["topic_key"], False),
("idx_topic_topic_bot_fallback", "topic_topic", ["bot_id", "is_default_fallback"], False),
("idx_topic_item_bot_id", "topic_item", ["bot_id"], False),
("idx_topic_item_topic_key", "topic_item", ["topic_key"], False),
("idx_topic_item_level", "topic_item", ["level"], False),
("idx_topic_item_source", "topic_item", ["source"], False),
("idx_topic_item_is_read", "topic_item", ["is_read"], False),
("idx_topic_item_created_at", "topic_item", ["created_at"], False),
("idx_topic_item_bot_topic_created_at", "topic_item", ["bot_id", "topic_key", "created_at"], False),
("idx_topic_item_bot_dedupe", "topic_item", ["bot_id", "dedupe_key"], False),
]
inspector = inspect(engine)
with engine.connect() as conn:
for name, table_name, columns, unique in required_indexes:
if not inspector.has_table(table_name):
continue
existing = {
str(item.get("name"))
for item in inspector.get_indexes(table_name)
if item.get("name")
}
existing.update(
str(item.get("name"))
for item in inspector.get_unique_constraints(table_name)
if item.get("name")
)
if name in existing:
continue
unique_sql = "UNIQUE " if unique else ""
cols_sql = ", ".join(columns)
conn.execute(text(f"CREATE {unique_sql}INDEX {name} ON {table_name} ({cols_sql})"))
conn.commit()
def _cleanup_legacy_default_topics() -> None:
"""
Remove legacy auto-created fallback topic rows from early topic-feed design.
Historical rows look like:
- topic_key = inbox
- name = Inbox
- description = Default topic for uncategorized items
- routing_json contains "Fallback topic"
"""
with engine.connect() as conn:
legacy_rows = conn.execute(
text(
"""
SELECT bot_id, topic_key
FROM topic_topic
WHERE lower(coalesce(topic_key, '')) = 'inbox'
AND lower(coalesce(name, '')) = 'inbox'
AND lower(coalesce(description, '')) = 'default topic for uncategorized items'
AND lower(coalesce(routing_json, '')) LIKE '%fallback topic%'
"""
)
).fetchall()
if not legacy_rows:
return
for row in legacy_rows:
bot_id = str(row[0] or "").strip()
topic_key = str(row[1] or "").strip().lower()
if not bot_id or not topic_key:
continue
conn.execute(
text(
"""
DELETE FROM topic_item
WHERE bot_id = :bot_id AND lower(coalesce(topic_key, '')) = :topic_key
"""
),
{"bot_id": bot_id, "topic_key": topic_key},
)
conn.execute(
text(
"""
DELETE FROM topic_topic
WHERE bot_id = :bot_id AND lower(coalesce(topic_key, '')) = :topic_key
"""
),
{"bot_id": bot_id, "topic_key": topic_key},
)
conn.commit()
def align_postgres_sequences() -> None: def align_postgres_sequences() -> None:
if engine.dialect.name != "postgresql": if engine.dialect.name != "postgresql":
return return
@ -135,6 +426,9 @@ def init_database() -> None:
_ensure_botinstance_columns() _ensure_botinstance_columns()
_drop_legacy_botinstance_columns() _drop_legacy_botinstance_columns()
_ensure_botmessage_columns() _ensure_botmessage_columns()
_ensure_topic_tables_sqlite()
_ensure_topic_columns()
_cleanup_legacy_default_topics()
align_postgres_sequences() align_postgres_sequences()

View File

@ -1,4 +1,6 @@
import json
import os import os
import re
from pathlib import Path from pathlib import Path
from typing import Final from typing import Final
from urllib.parse import urlsplit, urlunsplit from urllib.parse import urlsplit, urlunsplit
@ -8,13 +10,11 @@ from dotenv import load_dotenv
BACKEND_ROOT: Final[Path] = Path(__file__).resolve().parents[1] BACKEND_ROOT: Final[Path] = Path(__file__).resolve().parents[1]
PROJECT_ROOT: Final[Path] = BACKEND_ROOT.parent PROJECT_ROOT: Final[Path] = BACKEND_ROOT.parent
# Load env files from nearest to broadest scope. # Load env files used by this project.
# Priority (high -> low, with override=False preserving existing values): # Priority (high -> low, with override=False preserving existing values):
# 1) process environment # 1) process environment
# 2) backend/.env # 2) backend/.env
# 3) project/.env # 3) project/.env.prod
# 4) backend/.env.prod
# 5) project/.env.prod
load_dotenv(BACKEND_ROOT / ".env", override=False) load_dotenv(BACKEND_ROOT / ".env", override=False)
load_dotenv(PROJECT_ROOT / ".env.prod", override=False) load_dotenv(PROJECT_ROOT / ".env.prod", override=False)
@ -44,6 +44,35 @@ def _env_int(name: str, default: int, min_value: int, max_value: int) -> int:
return max(min_value, min(max_value, value)) return max(min_value, min(max_value, value))
def _normalize_extension(raw: str) -> str:
text = str(raw or "").strip().lower()
if not text:
return ""
if text.startswith("*."):
text = text[1:]
if not text.startswith("."):
text = f".{text}"
if not re.fullmatch(r"\.[a-z0-9][a-z0-9._+-]{0,31}", text):
return ""
return text
def _env_extensions(name: str, default: tuple[str, ...]) -> tuple[str, ...]:
raw = os.getenv(name)
if raw is None:
source = list(default)
else:
source = re.split(r"[,;\s]+", str(raw))
rows: list[str] = []
for item in source:
ext = _normalize_extension(item)
if ext and ext not in rows:
rows.append(ext)
if raw is None:
return tuple(rows or list(default))
return tuple(rows)
def _normalize_dir_path(path_value: str) -> str: def _normalize_dir_path(path_value: str) -> str:
raw = str(path_value or "").strip() raw = str(path_value or "").strip()
if not raw: if not raw:
@ -55,6 +84,34 @@ def _normalize_dir_path(path_value: str) -> str:
return str((BACKEND_ROOT / p).resolve()) return str((BACKEND_ROOT / p).resolve())
def _normalize_file_path(path_value: str, fallback: Path) -> Path:
raw = str(path_value or "").strip()
if not raw:
return fallback
raw = os.path.expandvars(os.path.expanduser(raw))
p = Path(raw)
if p.is_absolute():
return p
return (BACKEND_ROOT / p).resolve()
def _load_json_object(path: Path) -> dict[str, object]:
try:
with open(path, "r", encoding="utf-8") as f:
data = json.load(f)
if isinstance(data, dict):
return data
except Exception:
pass
return {}
def _read_template_md(raw: object) -> str:
if raw is None:
return ""
return str(raw).replace("\r\n", "\n").strip()
DATA_ROOT: Final[str] = _normalize_dir_path(os.getenv("DATA_ROOT", str(PROJECT_ROOT / "data"))) DATA_ROOT: Final[str] = _normalize_dir_path(os.getenv("DATA_ROOT", str(PROJECT_ROOT / "data")))
BOTS_WORKSPACE_ROOT: Final[str] = _normalize_dir_path( BOTS_WORKSPACE_ROOT: Final[str] = _normalize_dir_path(
os.getenv("BOTS_WORKSPACE_ROOT", str(PROJECT_ROOT / "workspace" / "bots")) os.getenv("BOTS_WORKSPACE_ROOT", str(PROJECT_ROOT / "workspace" / "bots"))
@ -118,6 +175,24 @@ DATABASE_ENGINE: Final[str] = _database_engine(DATABASE_URL)
DATABASE_URL_DISPLAY: Final[str] = _mask_database_url(DATABASE_URL) DATABASE_URL_DISPLAY: Final[str] = _mask_database_url(DATABASE_URL)
DATABASE_ECHO: Final[bool] = _env_bool("DATABASE_ECHO", True) DATABASE_ECHO: Final[bool] = _env_bool("DATABASE_ECHO", True)
UPLOAD_MAX_MB: Final[int] = _env_int("UPLOAD_MAX_MB", 100, 1, 2048) UPLOAD_MAX_MB: Final[int] = _env_int("UPLOAD_MAX_MB", 100, 1, 2048)
WORKSPACE_DOWNLOAD_EXTENSIONS_DEFAULT: Final[tuple[str, ...]] = (
".pdf",
".doc",
".docx",
".xls",
".xlsx",
".xlsm",
".ppt",
".pptx",
".odt",
".ods",
".odp",
".wps",
)
WORKSPACE_DOWNLOAD_EXTENSIONS: Final[tuple[str, ...]] = _env_extensions(
"WORKSPACE_DOWNLOAD_EXTENSIONS",
WORKSPACE_DOWNLOAD_EXTENSIONS_DEFAULT,
)
STT_ENABLED: Final[bool] = _env_bool("STT_ENABLED", True) STT_ENABLED: Final[bool] = _env_bool("STT_ENABLED", True)
STT_MODEL: Final[str] = str(os.getenv("STT_MODEL") or "ggml-small-q8_0.bin").strip() STT_MODEL: Final[str] = str(os.getenv("STT_MODEL") or "ggml-small-q8_0.bin").strip()
_DEFAULT_STT_MODEL_DIR: Final[Path] = (Path(DATA_ROOT) / "model").resolve() _DEFAULT_STT_MODEL_DIR: Final[Path] = (Path(DATA_ROOT) / "model").resolve()
@ -144,28 +219,60 @@ REDIS_URL: Final[str] = str(os.getenv("REDIS_URL") or "").strip()
REDIS_PREFIX: Final[str] = str(os.getenv("REDIS_PREFIX") or "dashboard_nanobot").strip() or "dashboard_nanobot" REDIS_PREFIX: Final[str] = str(os.getenv("REDIS_PREFIX") or "dashboard_nanobot").strip() or "dashboard_nanobot"
REDIS_DEFAULT_TTL: Final[int] = _env_int("REDIS_DEFAULT_TTL", 60, 1, 86400) REDIS_DEFAULT_TTL: Final[int] = _env_int("REDIS_DEFAULT_TTL", 60, 1, 86400)
PANEL_ACCESS_PASSWORD: Final[str] = str(os.getenv("PANEL_ACCESS_PASSWORD") or "").strip() PANEL_ACCESS_PASSWORD: Final[str] = str(os.getenv("PANEL_ACCESS_PASSWORD") or "").strip()
TOPIC_MCP_INTERNAL_URL: Final[str] = str(
os.getenv("TOPIC_MCP_INTERNAL_URL") or "http://host.docker.internal:8000/api/mcp/topic"
).strip()
TEMPLATE_ROOT: Final[Path] = (BACKEND_ROOT / "templates").resolve()
AGENT_MD_TEMPLATES_FILE: Final[Path] = _normalize_file_path(
os.getenv("AGENT_MD_TEMPLATES_FILE", ""),
TEMPLATE_ROOT / "agent_md_templates.json",
)
TOPIC_PRESETS_TEMPLATES_FILE: Final[Path] = _normalize_file_path(
os.getenv("TOPIC_PRESETS_TEMPLATES_FILE", ""),
TEMPLATE_ROOT / "topic_presets.json",
)
_agent_md_templates_raw = _load_json_object(AGENT_MD_TEMPLATES_FILE)
DEFAULT_AGENTS_MD: Final[str] = _env_text( DEFAULT_AGENTS_MD: Final[str] = _env_text(
"DEFAULT_AGENTS_MD", "DEFAULT_AGENTS_MD",
"# Agent Instructions\n\n- 优先完成任务目标\n- 操作前先说明意图\n- 输出必须可执行\n\n## 默认输出规范\n\n- 每次执行任务时,在 workspace 中创建新目录保存本次输出。\n- 输出内容默认采用 Markdown.md格式。", _read_template_md(_agent_md_templates_raw.get("agents_md")),
).strip() ).strip()
DEFAULT_SOUL_MD: Final[str] = _env_text( DEFAULT_SOUL_MD: Final[str] = _env_text(
"DEFAULT_SOUL_MD", "DEFAULT_SOUL_MD",
"# Soul\n\n你是专业的企业数字员工,表达清晰、可执行。", _read_template_md(_agent_md_templates_raw.get("soul_md")),
).strip() ).strip()
DEFAULT_USER_MD: Final[str] = _env_text( DEFAULT_USER_MD: Final[str] = _env_text(
"DEFAULT_USER_MD", "DEFAULT_USER_MD",
"# User\n\n- 语言: 中文\n- 风格: 专业\n- 偏好: 简明且有步骤", _read_template_md(_agent_md_templates_raw.get("user_md")),
).strip() ).strip()
DEFAULT_TOOLS_MD: Final[str] = _env_text( DEFAULT_TOOLS_MD: Final[str] = _env_text(
"DEFAULT_TOOLS_MD", "DEFAULT_TOOLS_MD",
"# Tools\n\n- 谨慎使用 shell\n- 修改文件后复核\n- 失败时说明原因并重试策略", _read_template_md(_agent_md_templates_raw.get("tools_md")),
).strip() ).strip()
DEFAULT_IDENTITY_MD: Final[str] = _env_text( DEFAULT_IDENTITY_MD: Final[str] = _env_text(
"DEFAULT_IDENTITY_MD", "DEFAULT_IDENTITY_MD",
"# Identity\n\n- 角色: 企业数字员工\n- 领域: 运维与任务执行", _read_template_md(_agent_md_templates_raw.get("identity_md")),
).strip() ).strip()
_topic_presets_raw = _load_json_object(TOPIC_PRESETS_TEMPLATES_FILE)
_topic_presets_list = _topic_presets_raw.get("presets")
TOPIC_PRESET_TEMPLATES: Final[list[dict[str, object]]] = [
dict(row) for row in (_topic_presets_list if isinstance(_topic_presets_list, list) else []) if isinstance(row, dict)
]
def load_agent_md_templates() -> dict[str, str]:
raw = _load_json_object(AGENT_MD_TEMPLATES_FILE)
rows: dict[str, str] = {}
for key in ("agents_md", "soul_md", "user_md", "tools_md", "identity_md"):
rows[key] = _read_template_md(raw.get(key))
return rows
def load_topic_presets_template() -> dict[str, object]:
raw = _load_json_object(TOPIC_PRESETS_TEMPLATES_FILE)
presets = raw.get("presets")
if not isinstance(presets, list):
return {"presets": []}
return {"presets": [dict(row) for row in presets if isinstance(row, dict)]}

View File

View File

@ -0,0 +1,85 @@
-- Topic subsystem schema (SQLite)
-- Apply manually before/after backend deployment if needed.
BEGIN TRANSACTION;
CREATE TABLE IF NOT EXISTS topic_bot_settings (
id INTEGER PRIMARY KEY AUTOINCREMENT,
bot_id TEXT NOT NULL,
topic_enabled INTEGER NOT NULL DEFAULT 1,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY(bot_id) REFERENCES botinstance(id)
);
CREATE TABLE IF NOT EXISTS topic_topic (
id INTEGER PRIMARY KEY AUTOINCREMENT,
bot_id TEXT NOT NULL,
topic_key TEXT NOT NULL,
name TEXT NOT NULL DEFAULT '',
description TEXT NOT NULL DEFAULT '',
is_active INTEGER NOT NULL DEFAULT 1,
is_default_fallback INTEGER NOT NULL DEFAULT 0,
routing_json TEXT NOT NULL DEFAULT '{}',
view_schema_json TEXT NOT NULL DEFAULT '{}',
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY(bot_id) REFERENCES botinstance(id)
);
CREATE TABLE IF NOT EXISTS topic_item (
id INTEGER PRIMARY KEY AUTOINCREMENT,
bot_id TEXT NOT NULL,
topic_key TEXT NOT NULL,
title TEXT NOT NULL DEFAULT '',
content TEXT NOT NULL DEFAULT '',
level TEXT NOT NULL DEFAULT 'info',
tags_json TEXT,
view_json TEXT,
source TEXT NOT NULL DEFAULT 'mcp',
dedupe_key TEXT,
is_read INTEGER NOT NULL DEFAULT 0,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY(bot_id) REFERENCES botinstance(id)
);
CREATE UNIQUE INDEX IF NOT EXISTS uq_topic_bot_settings_bot_id
ON topic_bot_settings(bot_id);
CREATE UNIQUE INDEX IF NOT EXISTS uq_topic_topic_bot_topic_key
ON topic_topic(bot_id, topic_key);
CREATE INDEX IF NOT EXISTS idx_topic_topic_bot_id
ON topic_topic(bot_id);
CREATE INDEX IF NOT EXISTS idx_topic_topic_topic_key
ON topic_topic(topic_key);
CREATE INDEX IF NOT EXISTS idx_topic_topic_bot_fallback
ON topic_topic(bot_id, is_default_fallback);
CREATE INDEX IF NOT EXISTS idx_topic_item_bot_id
ON topic_item(bot_id);
CREATE INDEX IF NOT EXISTS idx_topic_item_topic_key
ON topic_item(topic_key);
CREATE INDEX IF NOT EXISTS idx_topic_item_level
ON topic_item(level);
CREATE INDEX IF NOT EXISTS idx_topic_item_source
ON topic_item(source);
CREATE INDEX IF NOT EXISTS idx_topic_item_is_read
ON topic_item(is_read);
CREATE INDEX IF NOT EXISTS idx_topic_item_created_at
ON topic_item(created_at);
CREATE INDEX IF NOT EXISTS idx_topic_item_bot_topic_created_at
ON topic_item(bot_id, topic_key, created_at);
CREATE INDEX IF NOT EXISTS idx_topic_item_bot_dedupe
ON topic_item(bot_id, dedupe_key);
COMMIT;

View File

@ -34,6 +34,7 @@ from core.settings import (
DATABASE_ECHO, DATABASE_ECHO,
DATABASE_ENGINE, DATABASE_ENGINE,
DATABASE_URL_DISPLAY, DATABASE_URL_DISPLAY,
AGENT_MD_TEMPLATES_FILE,
DEFAULT_AGENTS_MD, DEFAULT_AGENTS_MD,
DEFAULT_IDENTITY_MD, DEFAULT_IDENTITY_MD,
DEFAULT_SOUL_MD, DEFAULT_SOUL_MD,
@ -49,9 +50,22 @@ from core.settings import (
STT_ENABLED, STT_ENABLED,
STT_MAX_AUDIO_SECONDS, STT_MAX_AUDIO_SECONDS,
STT_MODEL, STT_MODEL,
TOPIC_PRESET_TEMPLATES,
TOPIC_PRESETS_TEMPLATES_FILE,
UPLOAD_MAX_MB, UPLOAD_MAX_MB,
WORKSPACE_DOWNLOAD_EXTENSIONS,
load_agent_md_templates,
load_topic_presets_template,
) )
from models.bot import BotInstance, BotMessage, NanobotImage from models.bot import BotInstance, BotMessage, NanobotImage
from models.topic import TopicBotSettings, TopicItem, TopicTopic
from api.topic_router import router as topic_router
from services.topic_service import (
TOPIC_MCP_SERVER_NAME,
_annotate_locked_mcp_servers,
_ensure_topic_mcp_server,
)
from services.topic_runtime import publish_runtime_topic_packet
app = FastAPI(title="Dashboard Nanobot API") app = FastAPI(title="Dashboard Nanobot API")
logger = logging.getLogger("dashboard.backend") logger = logging.getLogger("dashboard.backend")
@ -62,6 +76,7 @@ app.add_middleware(
allow_methods=["*"], allow_methods=["*"],
allow_headers=["*"], allow_headers=["*"],
) )
app.include_router(topic_router)
os.makedirs(BOTS_WORKSPACE_ROOT, exist_ok=True) os.makedirs(BOTS_WORKSPACE_ROOT, exist_ok=True)
os.makedirs(DATA_ROOT, exist_ok=True) os.makedirs(DATA_ROOT, exist_ok=True)
@ -180,6 +195,11 @@ class PanelLoginRequest(BaseModel):
password: Optional[str] = None password: Optional[str] = None
class SystemTemplatesUpdateRequest(BaseModel):
agent_md_templates: Optional[Dict[str, str]] = None
topic_presets: Optional[Dict[str, Any]] = None
def _normalize_packet_channel(packet: Dict[str, Any]) -> str: def _normalize_packet_channel(packet: Dict[str, Any]) -> str:
raw = str(packet.get("channel") or packet.get("source") or "").strip().lower() raw = str(packet.get("channel") or packet.get("source") or "").strip().lower()
if raw in {"dashboard", "dashboard_channel", "dashboard-channel"}: if raw in {"dashboard", "dashboard_channel", "dashboard-channel"}:
@ -287,6 +307,16 @@ def _persist_runtime_packet(bot_id: str, packet: Dict[str, Any]) -> Optional[int
bot.updated_at = datetime.utcnow() bot.updated_at = datetime.utcnow()
session.add(bot) session.add(bot)
session.commit() session.commit()
publish_runtime_topic_packet(
engine,
bot_id,
packet,
source_channel,
persisted_message_id,
logger,
)
if persisted_message_id: if persisted_message_id:
packet["message_id"] = persisted_message_id packet["message_id"] = persisted_message_id
if packet_type in {"ASSISTANT_MESSAGE", "USER_COMMAND", "BUS_EVENT"}: if packet_type in {"ASSISTANT_MESSAGE", "USER_COMMAND", "BUS_EVENT"}:
@ -371,6 +401,7 @@ def _is_panel_protected_api_path(path: str, method: str = "GET") -> bool:
"/api/panel/auth/login", "/api/panel/auth/login",
"/api/health", "/api/health",
"/api/health/cache", "/api/health/cache",
"/api/mcp/topic",
}: }:
return False return False
if _is_bot_panel_management_api_path(raw, verb): if _is_bot_panel_management_api_path(raw, verb):
@ -484,6 +515,7 @@ async def on_startup():
with Session(engine) as session: with Session(engine) as session:
for bot in session.exec(select(BotInstance)).all(): for bot in session.exec(select(BotInstance)).all():
_migrate_bot_resources_store(bot.id) _migrate_bot_resources_store(bot.id)
_ensure_topic_mcp_server(bot.id)
running_bots = session.exec(select(BotInstance).where(BotInstance.docker_status == "RUNNING")).all() running_bots = session.exec(select(BotInstance).where(BotInstance.docker_status == "RUNNING")).all()
for bot in running_bots: for bot in running_bots:
docker_manager.ensure_monitor(bot.id, docker_callback) docker_manager.ensure_monitor(bot.id, docker_callback)
@ -504,17 +536,23 @@ def _provider_defaults(provider: str) -> tuple[str, str]:
@app.get("/api/system/defaults") @app.get("/api/system/defaults")
def get_system_defaults(): def get_system_defaults():
md_templates = load_agent_md_templates()
topic_presets = load_topic_presets_template()
return { return {
"templates": { "templates": {
"soul_md": DEFAULT_SOUL_MD, "soul_md": md_templates.get("soul_md") or DEFAULT_SOUL_MD,
"agents_md": DEFAULT_AGENTS_MD, "agents_md": md_templates.get("agents_md") or DEFAULT_AGENTS_MD,
"user_md": DEFAULT_USER_MD, "user_md": md_templates.get("user_md") or DEFAULT_USER_MD,
"tools_md": DEFAULT_TOOLS_MD, "tools_md": md_templates.get("tools_md") or DEFAULT_TOOLS_MD,
"identity_md": DEFAULT_IDENTITY_MD, "identity_md": md_templates.get("identity_md") or DEFAULT_IDENTITY_MD,
}, },
"limits": { "limits": {
"upload_max_mb": UPLOAD_MAX_MB, "upload_max_mb": UPLOAD_MAX_MB,
}, },
"workspace": {
"download_extensions": list(WORKSPACE_DOWNLOAD_EXTENSIONS),
},
"topic_presets": topic_presets.get("presets") or TOPIC_PRESET_TEMPLATES,
"speech": { "speech": {
"enabled": STT_ENABLED, "enabled": STT_ENABLED,
"model": STT_MODEL, "model": STT_MODEL,
@ -525,6 +563,47 @@ def get_system_defaults():
} }
def _write_json_atomic(path: str, payload: Dict[str, Any]) -> None:
os.makedirs(os.path.dirname(path), exist_ok=True)
tmp = f"{path}.tmp"
with open(tmp, "w", encoding="utf-8") as f:
json.dump(payload, f, ensure_ascii=False, indent=2)
os.replace(tmp, path)
@app.get("/api/system/templates")
def get_system_templates():
return {
"agent_md_templates": load_agent_md_templates(),
"topic_presets": load_topic_presets_template(),
}
@app.put("/api/system/templates")
def update_system_templates(payload: SystemTemplatesUpdateRequest):
if payload.agent_md_templates is not None:
sanitized_agent: Dict[str, str] = {}
for key in ("agents_md", "soul_md", "user_md", "tools_md", "identity_md"):
sanitized_agent[key] = str(payload.agent_md_templates.get(key, "") or "").replace("\r\n", "\n")
_write_json_atomic(str(AGENT_MD_TEMPLATES_FILE), sanitized_agent)
if payload.topic_presets is not None:
presets = payload.topic_presets.get("presets") if isinstance(payload.topic_presets, dict) else None
if presets is None:
normalized_topic: Dict[str, Any] = {"presets": []}
elif isinstance(presets, list):
normalized_topic = {"presets": [dict(row) for row in presets if isinstance(row, dict)]}
else:
raise HTTPException(status_code=400, detail="topic_presets.presets must be an array")
_write_json_atomic(str(TOPIC_PRESETS_TEMPLATES_FILE), normalized_topic)
return {
"status": "ok",
"agent_md_templates": load_agent_md_templates(),
"topic_presets": load_topic_presets_template(),
}
@app.get("/api/health") @app.get("/api/health")
def get_health(): def get_health():
try: try:
@ -1339,6 +1418,17 @@ def _sync_workspace_channels(
} }
if isinstance(runtime_overrides, dict): if isinstance(runtime_overrides, dict):
for key, value in runtime_overrides.items(): for key, value in runtime_overrides.items():
# Keep existing runtime secrets/config when caller sends empty placeholder values.
if key in {"api_key", "llm_provider", "llm_model"}:
text = str(value or "").strip()
if not text:
continue
bot_data[key] = text
continue
if key == "api_base":
# api_base may be intentionally empty (use provider default), so keep explicit value.
bot_data[key] = str(value or "").strip()
continue
bot_data[key] = value bot_data[key] = value
resources = _normalize_resource_limits( resources = _normalize_resource_limits(
bot_data.get("cpu_cores"), bot_data.get("cpu_cores"),
@ -1379,6 +1469,7 @@ def _sync_workspace_channels(
bot_data=bot_data, bot_data=bot_data,
channels=normalized_channels, channels=normalized_channels,
) )
_ensure_topic_mcp_server(bot_id)
_write_bot_resources( _write_bot_resources(
bot_id, bot_id,
bot_data.get("cpu_cores"), bot_data.get("cpu_cores"),
@ -2071,6 +2162,17 @@ def update_bot(bot_id: str, payload: BotUpdateRequest, session: Session = Depend
if field in update_data: if field in update_data:
runtime_overrides[field] = update_data.pop(field) runtime_overrides[field] = update_data.pop(field)
# Never allow empty placeholders to overwrite existing runtime model settings.
for text_field in ("llm_provider", "llm_model", "api_key"):
if text_field in runtime_overrides:
text = str(runtime_overrides.get(text_field) or "").strip()
if not text:
runtime_overrides.pop(text_field, None)
else:
runtime_overrides[text_field] = text
if "api_base" in runtime_overrides:
runtime_overrides["api_base"] = str(runtime_overrides.get("api_base") or "").strip()
if "system_prompt" in runtime_overrides and "soul_md" not in runtime_overrides: if "system_prompt" in runtime_overrides and "soul_md" not in runtime_overrides:
runtime_overrides["soul_md"] = runtime_overrides["system_prompt"] runtime_overrides["soul_md"] = runtime_overrides["system_prompt"]
if "soul_md" in runtime_overrides and "system_prompt" not in runtime_overrides: if "soul_md" in runtime_overrides and "system_prompt" not in runtime_overrides:
@ -2191,6 +2293,15 @@ def delete_bot(bot_id: str, delete_workspace: bool = True, session: Session = De
messages = session.exec(select(BotMessage).where(BotMessage.bot_id == bot_id)).all() messages = session.exec(select(BotMessage).where(BotMessage.bot_id == bot_id)).all()
for row in messages: for row in messages:
session.delete(row) session.delete(row)
topic_items = session.exec(select(TopicItem).where(TopicItem.bot_id == bot_id)).all()
for row in topic_items:
session.delete(row)
topics = session.exec(select(TopicTopic).where(TopicTopic.bot_id == bot_id)).all()
for row in topics:
session.delete(row)
topic_settings = session.exec(select(TopicBotSettings).where(TopicBotSettings.bot_id == bot_id)).all()
for row in topic_settings:
session.delete(row)
session.delete(bot) session.delete(bot)
session.commit() session.commit()
@ -2251,13 +2362,17 @@ def get_bot_mcp_config(bot_id: str, session: Session = Depends(get_session)):
if not bot: if not bot:
raise HTTPException(status_code=404, detail="Bot not found") raise HTTPException(status_code=404, detail="Bot not found")
config_data = _read_bot_config(bot_id) config_data = _read_bot_config(bot_id)
_ensure_topic_mcp_server(bot_id, config_data=config_data, persist=True)
config_data = _read_bot_config(bot_id)
tools_cfg = config_data.get("tools") tools_cfg = config_data.get("tools")
if not isinstance(tools_cfg, dict): if not isinstance(tools_cfg, dict):
tools_cfg = {} tools_cfg = {}
mcp_servers = _normalize_mcp_servers(tools_cfg.get("mcpServers")) mcp_servers = _normalize_mcp_servers(tools_cfg.get("mcpServers"))
mcp_servers = _annotate_locked_mcp_servers(mcp_servers)
return { return {
"bot_id": bot_id, "bot_id": bot_id,
"mcp_servers": mcp_servers, "mcp_servers": mcp_servers,
"locked_servers": [TOPIC_MCP_SERVER_NAME],
"restart_required": True, "restart_required": True,
} }
@ -2274,6 +2389,8 @@ def update_bot_mcp_config(bot_id: str, payload: BotMcpConfigUpdateRequest, sessi
if not isinstance(tools_cfg, dict): if not isinstance(tools_cfg, dict):
tools_cfg = {} tools_cfg = {}
mcp_servers = _normalize_mcp_servers(payload.mcp_servers or {}) mcp_servers = _normalize_mcp_servers(payload.mcp_servers or {})
locked_server = _ensure_topic_mcp_server(bot_id, config_data=config_data, persist=False)
mcp_servers[TOPIC_MCP_SERVER_NAME] = locked_server
tools_cfg["mcpServers"] = mcp_servers tools_cfg["mcpServers"] = mcp_servers
config_data["tools"] = tools_cfg config_data["tools"] = tools_cfg
_write_bot_config(bot_id, config_data) _write_bot_config(bot_id, config_data)
@ -2281,7 +2398,8 @@ def update_bot_mcp_config(bot_id: str, payload: BotMcpConfigUpdateRequest, sessi
return { return {
"status": "updated", "status": "updated",
"bot_id": bot_id, "bot_id": bot_id,
"mcp_servers": mcp_servers, "mcp_servers": _annotate_locked_mcp_servers(mcp_servers),
"locked_servers": [TOPIC_MCP_SERVER_NAME],
"restart_required": True, "restart_required": True,
} }

View File

@ -0,0 +1,59 @@
from datetime import datetime
from typing import Optional
from sqlalchemy import Index, UniqueConstraint
from sqlmodel import Field, SQLModel
class TopicBotSettings(SQLModel, table=True):
__tablename__ = "topic_bot_settings"
__table_args__ = (
UniqueConstraint("bot_id", name="uq_topic_bot_settings_bot_id"),
)
id: Optional[int] = Field(default=None, primary_key=True)
bot_id: str = Field(foreign_key="botinstance.id", index=True)
topic_enabled: bool = Field(default=True)
created_at: datetime = Field(default_factory=datetime.utcnow, index=True)
updated_at: datetime = Field(default_factory=datetime.utcnow, index=True)
class TopicTopic(SQLModel, table=True):
__tablename__ = "topic_topic"
__table_args__ = (
UniqueConstraint("bot_id", "topic_key", name="uq_topic_topic_bot_topic_key"),
Index("idx_topic_topic_bot_fallback", "bot_id", "is_default_fallback"),
)
id: Optional[int] = Field(default=None, primary_key=True)
bot_id: str = Field(foreign_key="botinstance.id", index=True)
topic_key: str = Field(index=True)
name: str = Field(default="")
description: str = Field(default="")
is_active: bool = Field(default=True)
is_default_fallback: bool = Field(default=False)
routing_json: str = Field(default="{}")
view_schema_json: str = Field(default="{}")
created_at: datetime = Field(default_factory=datetime.utcnow, index=True)
updated_at: datetime = Field(default_factory=datetime.utcnow, index=True)
class TopicItem(SQLModel, table=True):
__tablename__ = "topic_item"
__table_args__ = (
Index("idx_topic_item_bot_topic_created_at", "bot_id", "topic_key", "created_at"),
Index("idx_topic_item_bot_dedupe", "bot_id", "dedupe_key"),
)
id: Optional[int] = Field(default=None, primary_key=True)
bot_id: str = Field(foreign_key="botinstance.id", index=True)
topic_key: str = Field(index=True)
title: str = Field(default="")
content: str = Field(default="")
level: str = Field(default="info", index=True)
tags_json: Optional[str] = Field(default=None)
view_json: Optional[str] = Field(default=None)
source: str = Field(default="mcp", index=True)
dedupe_key: Optional[str] = Field(default=None)
is_read: bool = Field(default=False, index=True)
created_at: datetime = Field(default_factory=datetime.utcnow, index=True)

View File

@ -0,0 +1 @@

View File

@ -0,0 +1,4 @@
from .bridge import publish_runtime_topic_packet
from .publisher import build_topic_publish_payload
__all__ = ["build_topic_publish_payload", "publish_runtime_topic_packet"]

View File

@ -0,0 +1,35 @@
import logging
from typing import Any, Dict, Optional
from sqlmodel import Session
from services.topic_service import _topic_publish_internal
from .publisher import build_topic_publish_payload
def publish_runtime_topic_packet(
engine: Any,
bot_id: str,
packet: Dict[str, Any],
source_channel: str,
persisted_message_id: Optional[int],
logger: logging.Logger,
) -> None:
packet_type = str(packet.get("type") or "").strip().upper()
if packet_type not in {"ASSISTANT_MESSAGE", "BUS_EVENT"} or not persisted_message_id:
return
topic_payload = build_topic_publish_payload(
bot_id,
{**packet, "channel": source_channel},
persisted_message_id,
)
if not topic_payload:
return
try:
with Session(engine) as session:
_topic_publish_internal(session, bot_id, topic_payload)
except Exception:
logger.exception("topic auto publish failed for bot %s packet %s", bot_id, packet_type)

View File

@ -0,0 +1,117 @@
import re
from typing import Any, Dict, List, Optional
_MARKDOWN_PREFIX_RE = re.compile(r"^\s{0,3}(?:[#>*-]+|\d+[.)])\s*")
_TABLE_LINE_RE = re.compile(r"^\s*\|.*\|\s*$")
_SEPARATOR_LINE_RE = re.compile(r"^\s*[-=:_`~]{3,}\s*$")
def _clean_topic_line(raw: Any) -> str:
text = str(raw or "").strip()
if not text:
return ""
if _SEPARATOR_LINE_RE.fullmatch(text):
return ""
if _TABLE_LINE_RE.fullmatch(text):
return ""
text = _MARKDOWN_PREFIX_RE.sub("", text).strip()
return text
def _clean_topic_lines(content: str) -> List[str]:
rows: List[str] = []
for line in str(content or "").splitlines():
cleaned = _clean_topic_line(line)
if cleaned:
rows.append(cleaned)
return rows
def _extract_highlights(content: str) -> List[str]:
rows: List[str] = []
for line in str(content or "").splitlines():
raw = str(line or "").strip()
if not raw:
continue
cleaned = _clean_topic_line(raw)
if not cleaned:
continue
if raw.lstrip().startswith(("-", "*")) or ":" in cleaned or "" in cleaned:
value = cleaned[:120]
if value not in rows:
rows.append(value)
if len(rows) >= 3:
break
return rows
def _unique_rows(rows: List[str]) -> List[str]:
deduped: List[str] = []
seen = set()
for row in rows:
value = str(row or "").strip()
if not value or value in seen:
continue
seen.add(value)
deduped.append(value)
return deduped
def _build_summary_card_view(title: str, content: str) -> Dict[str, Any]:
lines = _clean_topic_lines(content)
fallback_title = title or (lines[0] if lines else "")
summary_source = [line for line in lines if line != fallback_title]
narrative_lines = [
line for line in summary_source
if not line.startswith(("-", "*")) and ":" not in line and "" not in line
]
summary = " ".join((narrative_lines or summary_source)[:2]).strip()
if not summary and lines:
summary = lines[0]
summary = summary[:220].strip()
highlights = _unique_rows(_extract_highlights(content))[:3]
snippet_source = _unique_rows(
[line for line in summary_source if line != summary and line not in highlights]
)
snippet = " ".join(snippet_source[:2]).strip()[:180].strip()
return {
"type": "summary_card",
"title": fallback_title[:120],
"summary": summary,
"highlights": highlights,
"snippet": snippet,
}
def build_topic_publish_payload(bot_id: str, packet: Dict[str, Any], message_id: Optional[int]) -> Optional[Dict[str, Any]]:
packet_type = str(packet.get("type") or "").strip().upper()
is_progress = bool(packet.get("is_progress"))
is_tool_hint = bool(packet.get("is_tool_hint"))
if packet_type == "BUS_EVENT" and is_progress:
return None
if packet_type == "BUS_EVENT":
content = str(packet.get("content") or packet.get("text") or "").strip()
else:
content = str(packet.get("text") or "").strip()
if not content:
return None
lines = _clean_topic_lines(content)
title = (lines[0] if lines else content[:120]).strip()
if len(title) > 120:
title = f"{title[:117].rstrip()}..."
source_channel = str(packet.get("channel") or "dashboard").strip().lower() or "dashboard"
dedupe_key = f"{bot_id}:message:{message_id}" if message_id else ""
return {
"title": title,
"content": content,
"level": "info",
"source": source_channel,
"dedupe_key": dedupe_key,
"is_progress": is_progress,
"is_tool_hint": is_tool_hint,
"view": _build_summary_card_view(title, content),
}

View File

@ -0,0 +1,717 @@
import json
import logging
import os
import re
import secrets
from datetime import datetime
from typing import Any, Dict, List, Optional
from sqlmodel import Session, select
from core.settings import BOTS_WORKSPACE_ROOT, TOPIC_MCP_INTERNAL_URL
from models.bot import BotInstance
from models.topic import TopicItem, TopicTopic
logger = logging.getLogger("dashboard.topic_mcp")
BOT_ID_PATTERN = re.compile(r"^[A-Za-z0-9_]+$")
TOPIC_MCP_SERVER_NAME = "topic_mcp"
TOPIC_MCP_TOKEN_HEADER = "x-topic-mcp-token"
TOPIC_MCP_DEFAULT_URL = TOPIC_MCP_INTERNAL_URL
TOPIC_MCP_DEFAULT_TIMEOUT = 30
TOPIC_MCP_PROTOCOL_VERSION = "2025-03-26"
TOPIC_DEDUPE_WINDOW_SECONDS = 10 * 60
TOPIC_LEVEL_SET = {"info", "warn", "error", "success"}
_TOPIC_KEY_RE = re.compile(r"^[a-z0-9][a-z0-9_.-]{0,63}$")
def _bot_data_root(bot_id: str) -> str:
return os.path.join(BOTS_WORKSPACE_ROOT, bot_id, ".nanobot")
def _config_json_path(bot_id: str) -> str:
return os.path.join(_bot_data_root(bot_id), "config.json")
def _read_bot_config(bot_id: str) -> Dict[str, Any]:
path = _config_json_path(bot_id)
if not os.path.isfile(path):
return {}
try:
with open(path, "r", encoding="utf-8") as f:
data = json.load(f)
return data if isinstance(data, dict) else {}
except Exception:
return {}
def _write_bot_config(bot_id: str, config_data: Dict[str, Any]) -> None:
path = _config_json_path(bot_id)
os.makedirs(os.path.dirname(path), exist_ok=True)
tmp = f"{path}.tmp"
with open(tmp, "w", encoding="utf-8") as f:
json.dump(config_data, f, ensure_ascii=False, indent=2)
os.replace(tmp, path)
def _dict_get_ci(raw: Any, key: str) -> Any:
if not isinstance(raw, dict):
return None
wanted = str(key or "").strip().lower()
for k, v in raw.items():
if str(k or "").strip().lower() == wanted:
return v
return None
def _as_bool(value: Any) -> bool:
if isinstance(value, bool):
return value
text = str(value or "").strip().lower()
return text in {"1", "true", "yes", "on", "y"}
def _extract_topic_mcp_token(server_cfg: Any) -> str:
headers = server_cfg.get("headers") if isinstance(server_cfg, dict) else None
return str(_dict_get_ci(headers, TOPIC_MCP_TOKEN_HEADER) or "").strip()
def _generate_topic_mcp_token(bot_id: str) -> str:
return f"{bot_id}.{secrets.token_urlsafe(24)}"
def _build_locked_topic_mcp_server(bot_id: str, token: str) -> Dict[str, Any]:
fixed_token = str(token or "").strip() or _generate_topic_mcp_token(bot_id)
return {
"type": "streamableHttp",
"url": TOPIC_MCP_DEFAULT_URL,
"headers": {TOPIC_MCP_TOKEN_HEADER: fixed_token},
"toolTimeout": TOPIC_MCP_DEFAULT_TIMEOUT,
}
def _annotate_locked_mcp_servers(raw_servers: Dict[str, Dict[str, Any]]) -> Dict[str, Dict[str, Any]]:
rows: Dict[str, Dict[str, Any]] = {}
for name, cfg in raw_servers.items():
if not isinstance(cfg, dict):
continue
row = dict(cfg)
row["locked"] = name == TOPIC_MCP_SERVER_NAME
rows[name] = row
return rows
def _ensure_topic_mcp_server(bot_id: str, config_data: Optional[Dict[str, Any]] = None, persist: bool = True) -> Dict[str, Any]:
working = dict(config_data) if isinstance(config_data, dict) else _read_bot_config(bot_id)
tools_cfg = working.get("tools")
if not isinstance(tools_cfg, dict):
tools_cfg = {}
mcp_servers = tools_cfg.get("mcpServers")
if not isinstance(mcp_servers, dict):
mcp_servers = {}
existing_server = mcp_servers.get(TOPIC_MCP_SERVER_NAME)
existing_token = _extract_topic_mcp_token(existing_server)
locked_server = _build_locked_topic_mcp_server(bot_id, existing_token)
changed = mcp_servers.get(TOPIC_MCP_SERVER_NAME) != locked_server
mcp_servers[TOPIC_MCP_SERVER_NAME] = locked_server
tools_cfg["mcpServers"] = mcp_servers
working["tools"] = tools_cfg
if persist and changed:
_write_bot_config(bot_id, working)
return locked_server
def _resolve_topic_mcp_bot_id_by_token(session: Session, token: str) -> Optional[str]:
incoming = str(token or "").strip()
if not incoming:
return None
candidates: List[str] = []
hinted_bot_id = incoming.split(".", 1)[0].strip()
if hinted_bot_id and BOT_ID_PATTERN.fullmatch(hinted_bot_id):
candidates.append(hinted_bot_id)
for bot in session.exec(select(BotInstance)).all():
if bot.id not in candidates:
candidates.append(bot.id)
for bot_id in candidates:
config_data = _read_bot_config(bot_id)
tools_cfg = config_data.get("tools")
if not isinstance(tools_cfg, dict):
continue
mcp_servers = tools_cfg.get("mcpServers")
if not isinstance(mcp_servers, dict):
continue
expected = _extract_topic_mcp_token(mcp_servers.get(TOPIC_MCP_SERVER_NAME))
if expected and secrets.compare_digest(expected, incoming):
return bot_id
return None
def _normalize_topic_key(raw: Any) -> str:
value = str(raw or "").strip().lower()
if not value:
return ""
return value
def _ensure_topic_defaults(session: Session, bot_id: str) -> None:
# Deprecated: topic feed global switch/fallback removed.
# Keep as no-op for call-site compatibility.
_ = session
_ = bot_id
return None
def _parse_json_dict(raw: str) -> Dict[str, Any]:
text = str(raw or "").strip()
if not text:
return {}
try:
data = json.loads(text)
return data if isinstance(data, dict) else {}
except Exception:
return {}
def _parse_json_list(raw: str) -> List[Any]:
text = str(raw or "").strip()
if not text:
return []
try:
data = json.loads(text)
except Exception:
return []
return data if isinstance(data, list) else []
def _topic_to_dict(row: TopicTopic) -> Dict[str, Any]:
return {
"id": row.id,
"bot_id": row.bot_id,
"topic_key": str(row.topic_key or "").strip().lower(),
"name": row.name or "",
"description": row.description or "",
"is_active": bool(row.is_active),
"routing": _parse_json_dict(row.routing_json or "{}"),
"view_schema": _parse_json_dict(row.view_schema_json or "{}"),
"created_at": row.created_at.isoformat() if row.created_at else None,
"updated_at": row.updated_at.isoformat() if row.updated_at else None,
}
def _list_topics(session: Session, bot_id: str) -> List[Dict[str, Any]]:
rows = session.exec(
select(TopicTopic)
.where(TopicTopic.bot_id == bot_id)
.order_by(TopicTopic.is_active.desc(), TopicTopic.topic_key.asc())
).all()
return [_topic_to_dict(row) for row in rows]
def _topic_item_to_dict(row: TopicItem) -> Dict[str, Any]:
return {
"id": row.id,
"bot_id": row.bot_id,
"topic_key": str(row.topic_key or "").strip().lower(),
"title": row.title or "",
"content": row.content or "",
"level": str(row.level or "info").strip().lower(),
"tags": _parse_json_list(row.tags_json or "[]"),
"view": _parse_json_dict(row.view_json or "{}"),
"source": row.source or "mcp",
"dedupe_key": row.dedupe_key or "",
"is_read": bool(row.is_read),
"created_at": row.created_at.isoformat() if row.created_at else None,
}
def _topic_get_row(session: Session, bot_id: str, topic_key: str) -> Optional[TopicTopic]:
normalized = _normalize_topic_key(topic_key)
if not normalized:
return None
return session.exec(
select(TopicTopic)
.where(TopicTopic.bot_id == bot_id)
.where(TopicTopic.topic_key == normalized)
.limit(1)
).first()
def _normalize_topic_keywords(raw: Any) -> List[str]:
rows: List[str] = []
if isinstance(raw, list):
for item in raw:
text = str(item or "").strip().lower()
if text and text not in rows:
rows.append(text)
elif isinstance(raw, str):
text = raw.strip().lower()
if text:
rows.append(text)
return rows
def _topic_filter_reason(payload: Dict[str, Any]) -> str:
if _as_bool(payload.get("is_progress")):
return "progress message is filtered"
if _as_bool(payload.get("is_tool_hint")):
return "tool hint message is filtered"
source = str(payload.get("source") or payload.get("type") or "").strip().lower()
if source in {"progress", "tool_hint", "sendprogress", "sendtoolhints"}:
return f"{source} message is filtered"
return ""
def _topic_route_pick(
session: Session,
bot_id: str,
payload: Dict[str, Any],
requested_topic_key: str = "",
) -> Dict[str, Any]:
active_topics = session.exec(
select(TopicTopic)
.where(TopicTopic.bot_id == bot_id)
.where(TopicTopic.is_active == True)
.order_by(TopicTopic.topic_key.asc())
).all()
if not active_topics:
return {
"matched": False,
"topic_key": None,
"confidence": 1.0,
"reason": "no active topic configured",
}
req_key = _normalize_topic_key(requested_topic_key or payload.get("topic_key") or payload.get("topic"))
if req_key:
row = _topic_get_row(session, bot_id, req_key)
if row and bool(row.is_active):
return {
"matched": True,
"topic_key": req_key,
"confidence": 0.99,
"reason": "explicit topic key accepted",
}
return {
"matched": False,
"topic_key": None,
"confidence": 0.72,
"reason": f"requested topic {req_key} unavailable or inactive",
}
text = " ".join(
[
str(payload.get("title") or "").strip(),
str(payload.get("content") or payload.get("text") or "").strip(),
" ".join([str(v or "").strip() for v in (payload.get("tags") or [])]),
]
).strip().lower()
if not text:
return {
"matched": False,
"topic_key": None,
"confidence": 1.0,
"reason": "no routing evidence",
}
best_key = ""
best_score = -10.0
best_reason = "no topic matched"
matched_include = False
for topic in active_topics:
key = _normalize_topic_key(topic.topic_key)
if not key:
continue
routing = _parse_json_dict(topic.routing_json or "{}")
include_when = _normalize_topic_keywords(routing.get("include_when"))
exclude_when = _normalize_topic_keywords(routing.get("exclude_when"))
priority_raw = routing.get("priority", 0)
try:
priority = max(0, min(int(priority_raw), 100))
except Exception:
priority = 0
include_hits = [kw for kw in include_when if kw in text]
exclude_hits = [kw for kw in exclude_when if kw in text]
if not include_hits:
continue
matched_include = True
score = float(len(include_hits) * 2 - len(exclude_hits) * 3) + (priority / 1000.0)
if score > best_score:
best_score = score
best_key = key
if include_hits:
best_reason = f"matched include_when: {', '.join(include_hits[:3])}"
elif exclude_hits:
best_reason = f"matched exclude_when: {', '.join(exclude_hits[:3])}"
else:
best_reason = "no include/exclude match, used highest priority active topic"
if not matched_include:
return {
"matched": False,
"topic_key": None,
"confidence": 0.68,
"reason": "no include_when matched",
}
if best_score <= 0:
return {
"matched": False,
"topic_key": None,
"confidence": 0.68,
"reason": "no positive routing score",
}
confidence = min(0.95, max(0.61, 0.61 + best_score / 12.0))
return {
"matched": True,
"topic_key": best_key,
"confidence": round(confidence, 3),
"reason": best_reason,
}
def _topic_publish_internal(session: Session, bot_id: str, payload: Dict[str, Any]) -> Dict[str, Any]:
filter_reason = _topic_filter_reason(payload)
if filter_reason:
return {
"published": False,
"skipped": True,
"reason": filter_reason,
}
title = str(payload.get("title") or "").strip()
content = str(payload.get("content") or payload.get("text") or "").strip()
if not title and not content:
return {
"published": False,
"skipped": True,
"reason": "empty title/content",
}
level = str(payload.get("level") or "info").strip().lower()
if level not in TOPIC_LEVEL_SET:
level = "info"
tags = payload.get("tags")
tags_rows: List[str] = []
if isinstance(tags, list):
for tag in tags:
text = str(tag or "").strip()
if text and text not in tags_rows:
tags_rows.append(text[:64])
route_result = _topic_route_pick(session, bot_id, payload, requested_topic_key=str(payload.get("topic_key") or ""))
if not bool(route_result.get("matched")):
return {
"published": False,
"skipped": True,
"reason": str(route_result.get("reason") or "no topic matched"),
"route": route_result,
}
topic_key = _normalize_topic_key(route_result.get("topic_key"))
if not topic_key:
return {
"published": False,
"skipped": True,
"reason": "invalid topic route result",
"route": route_result,
}
row = _topic_get_row(session, bot_id, topic_key)
if not row or not bool(row.is_active):
return {
"published": False,
"skipped": True,
"reason": f"topic {topic_key} unavailable or inactive",
"route": route_result,
}
dedupe_key = str(payload.get("dedupe_key") or "").strip()
if dedupe_key:
existing = session.exec(
select(TopicItem)
.where(TopicItem.bot_id == bot_id)
.where(TopicItem.dedupe_key == dedupe_key)
.order_by(TopicItem.id.desc())
.limit(1)
).first()
if existing and existing.created_at:
age_s = (datetime.utcnow() - existing.created_at).total_seconds()
if age_s <= TOPIC_DEDUPE_WINDOW_SECONDS:
return {
"published": False,
"deduped": True,
"dedupe_window_seconds": TOPIC_DEDUPE_WINDOW_SECONDS,
"topic_key": _normalize_topic_key(existing.topic_key),
"reason": "dedupe_key hit within dedupe window",
"item": _topic_item_to_dict(existing),
}
view = payload.get("view")
view_json = json.dumps(view, ensure_ascii=False) if isinstance(view, dict) else None
source = str(payload.get("source") or "mcp").strip().lower() or "mcp"
now = datetime.utcnow()
item = TopicItem(
bot_id=bot_id,
topic_key=topic_key,
title=title[:2000],
content=content[:20000],
level=level,
tags_json=json.dumps(tags_rows, ensure_ascii=False) if tags_rows else None,
view_json=view_json,
source=source[:64],
dedupe_key=dedupe_key[:200] if dedupe_key else None,
is_read=False,
created_at=now,
)
session.add(item)
session.commit()
session.refresh(item)
return {
"published": True,
"topic_key": topic_key,
"item": _topic_item_to_dict(item),
"route": route_result,
}
def _jsonrpc_success(rpc_id: Any, result: Any) -> Dict[str, Any]:
return {
"jsonrpc": "2.0",
"id": rpc_id,
"result": result,
}
def _jsonrpc_error(rpc_id: Any, code: int, message: str, data: Any = None) -> Dict[str, Any]:
payload: Dict[str, Any] = {
"jsonrpc": "2.0",
"id": rpc_id,
"error": {
"code": int(code),
"message": str(message or "unknown error"),
},
}
if data is not None:
payload["error"]["data"] = data
return payload
def _mcp_tool_result(structured: Dict[str, Any], is_error: bool = False) -> Dict[str, Any]:
return {
"content": [
{
"type": "text",
"text": json.dumps(structured, ensure_ascii=False),
}
],
"structuredContent": structured,
"isError": bool(is_error),
}
def _topic_mcp_tools() -> List[Dict[str, Any]]:
return [
{
"name": "topic_list_topics",
"description": "List available topics for the current bot.",
"inputSchema": {
"type": "object",
"properties": {
"include_inactive": {"type": "boolean"},
},
"additionalProperties": False,
},
},
{
"name": "topic_get_schema",
"description": "Get allowed view schema and optional topic-specific schema.",
"inputSchema": {
"type": "object",
"properties": {
"topic_key": {"type": "string"},
},
"additionalProperties": False,
},
},
{
"name": "topic_route",
"description": "Route candidate content to a topic and decide if publish is needed.",
"inputSchema": {
"type": "object",
"properties": {
"topic_key": {"type": "string"},
"title": {"type": "string"},
"content": {"type": "string"},
"tags": {"type": "array", "items": {"type": "string"}},
"is_progress": {"type": "boolean"},
"is_tool_hint": {"type": "boolean"},
"source": {"type": "string"},
},
"additionalProperties": True,
},
},
{
"name": "topic_publish",
"description": "Publish one item into topic feed with dedupe support.",
"inputSchema": {
"type": "object",
"properties": {
"topic_key": {"type": "string"},
"title": {"type": "string"},
"content": {"type": "string"},
"level": {"type": "string"},
"tags": {"type": "array", "items": {"type": "string"}},
"view": {"type": "object"},
"dedupe_key": {"type": "string"},
"source": {"type": "string"},
"is_progress": {"type": "boolean"},
"is_tool_hint": {"type": "boolean"},
},
"additionalProperties": True,
},
},
]
def _topic_mcp_list_topics(session: Session, bot_id: str, args: Dict[str, Any]) -> Dict[str, Any]:
_ensure_topic_defaults(session, bot_id)
include_inactive = _as_bool(args.get("include_inactive")) or ("include_inactive" not in args)
topics = _list_topics(session, bot_id)
if not include_inactive:
topics = [row for row in topics if bool(row.get("is_active"))]
return {
"bot_id": bot_id,
"topics": topics,
}
def _topic_mcp_get_schema(session: Session, bot_id: str, args: Dict[str, Any]) -> Dict[str, Any]:
_ensure_topic_defaults(session, bot_id)
topic_key = _normalize_topic_key(args.get("topic_key"))
topic_payload: Optional[Dict[str, Any]] = None
if topic_key:
row = _topic_get_row(session, bot_id, topic_key)
if row:
topic_payload = _topic_to_dict(row)
return {
"version": "v1",
"view_types": ["markdown", "card", "table", "checklist", "metric", "timeline"],
"topic": topic_payload,
"view_schema": {
"type": "object",
"description": "Declarative view payload only. Scripts and unsafe HTML are not allowed.",
},
"publish_constraints": {
"level": sorted(list(TOPIC_LEVEL_SET)),
"dedupe_window_seconds": TOPIC_DEDUPE_WINDOW_SECONDS,
},
}
def _topic_mcp_route(session: Session, bot_id: str, args: Dict[str, Any]) -> Dict[str, Any]:
_ensure_topic_defaults(session, bot_id)
filter_reason = _topic_filter_reason(args)
if filter_reason:
return {
"should_publish": False,
"topic_key": None,
"confidence": 1.0,
"reason": filter_reason,
}
title = str(args.get("title") or "").strip()
content = str(args.get("content") or args.get("text") or "").strip()
if not title and not content:
return {
"should_publish": False,
"topic_key": None,
"confidence": 1.0,
"reason": "empty title/content",
}
route = _topic_route_pick(session, bot_id, args, requested_topic_key=str(args.get("topic_key") or ""))
return {
"should_publish": bool(route.get("matched")),
"topic_key": route.get("topic_key"),
"confidence": route.get("confidence"),
"reason": route.get("reason"),
}
def _topic_mcp_publish(session: Session, bot_id: str, args: Dict[str, Any]) -> Dict[str, Any]:
return _topic_publish_internal(session, bot_id, args)
def _dispatch_topic_mcp_method(session: Session, bot_id: str, method: str, params: Dict[str, Any]) -> Any:
if method == "initialize":
return {
"protocolVersion": TOPIC_MCP_PROTOCOL_VERSION,
"capabilities": {
"tools": {},
},
"serverInfo": {
"name": TOPIC_MCP_SERVER_NAME,
"version": "0.1.0",
},
}
if method in {"notifications/initialized", "initialized"}:
return None
if method == "ping":
return {}
if method == "tools/list":
return {
"tools": _topic_mcp_tools(),
}
if method != "tools/call":
raise KeyError(f"Unknown method: {method}")
tool_name = str(params.get("name") or "").strip()
arguments = params.get("arguments")
if not isinstance(arguments, dict):
arguments = {}
if tool_name == "topic_list_topics":
return _mcp_tool_result(_topic_mcp_list_topics(session, bot_id, arguments))
if tool_name == "topic_get_schema":
return _mcp_tool_result(_topic_mcp_get_schema(session, bot_id, arguments))
if tool_name == "topic_route":
return _mcp_tool_result(_topic_mcp_route(session, bot_id, arguments))
if tool_name == "topic_publish":
return _mcp_tool_result(_topic_mcp_publish(session, bot_id, arguments))
return _mcp_tool_result(
{
"error": f"unknown tool: {tool_name}",
"available_tools": [tool["name"] for tool in _topic_mcp_tools()],
},
is_error=True,
)
def _handle_topic_mcp_rpc_item(session: Session, bot_id: str, item: Any) -> Optional[Dict[str, Any]]:
if not isinstance(item, dict):
return _jsonrpc_error(None, -32600, "Invalid Request")
rpc_id = item.get("id")
method = str(item.get("method") or "").strip()
if not method:
return _jsonrpc_error(rpc_id, -32600, "Invalid Request: method is required")
params = item.get("params")
if params is None:
params = {}
if not isinstance(params, dict):
return _jsonrpc_error(rpc_id, -32602, "Invalid params")
try:
result = _dispatch_topic_mcp_method(session, bot_id, method, params)
except KeyError as exc:
return _jsonrpc_error(rpc_id, -32601, str(exc))
except ValueError as exc:
return _jsonrpc_error(rpc_id, -32602, str(exc))
except Exception as exc:
logger.exception("topic_mcp method failed: %s", method)
return _jsonrpc_error(rpc_id, -32000, f"topic_mcp execution failed: {type(exc).__name__}: {exc}")
if rpc_id is None:
return None
return _jsonrpc_success(rpc_id, result)

View File

@ -0,0 +1,7 @@
{
"agents_md": "- 优先完成任务目标\n- 操作前先说明意图\n- 输出必须可执行\n\n## 默认输出规范\n\n- 每次执行任务时,在 workspace 中创建新目录保存本次输出。\n- 输出内容默认采用 Markdown.md格式。\n- 最终报告需求.md和.htm双格式。",
"soul_md": "你是专业的企业数字员工,表达清晰、可执行。",
"user_md": "- 语言: 中文\n- 风格: 专业\n- 偏好: 简明且有步骤",
"tools_md": "- 谨慎使用 shell\n- 修改文件后复核\n- 失败时说明原因并重试策略",
"identity_md": "- 角色: 企业数字员工\n- 领域: 运维与任务执行"
}

View File

@ -0,0 +1,134 @@
{
"presets": [
{
"id": "politics",
"topic_key": "politics_news",
"name": "时政新闻",
"description": "沉淀国内外时政动态、政策发布与重大公共治理事件,便于集中查看。",
"routing_purpose": "收录与政府决策、政策法规、外交事务及公共治理相关的关键信息。",
"routing_include_when": [
"时政",
"政策",
"法规",
"国务院",
"政府",
"部委",
"人大",
"政协",
"外交",
"国际关系",
"白宫",
"总统",
"议会",
"election",
"policy"
],
"routing_exclude_when": [
"娱乐",
"明星",
"综艺",
"体育",
"游戏",
"购物",
"种草",
"广告"
],
"routing_examples_positive": [
"国务院发布新一轮宏观政策措施。",
"外交部就国际热点事件发布声明。",
"某国总统宣布新的对外政策方向。"
],
"routing_examples_negative": [
"某明星新剧开播引发热议。",
"某球队转会新闻与赛果分析。",
"数码产品促销与购物推荐汇总。"
],
"routing_priority": 85
},
{
"id": "finance",
"topic_key": "finance_market",
"name": "财经信息",
"description": "聚合宏观经济、市场波动、公司财报与监管政策等财经信息。",
"routing_purpose": "沉淀与资本市场、行业景气、资产价格相关的关键结论与风险提示。",
"routing_include_when": [
"财经",
"金融",
"股市",
"A股",
"港股",
"美股",
"债券",
"汇率",
"利率",
"通胀",
"GDP",
"财报",
"央行",
"market",
"earnings"
],
"routing_exclude_when": [
"娱乐",
"体育",
"游戏",
"影视",
"八卦",
"生活方式",
"旅行攻略"
],
"routing_examples_positive": [
"央行公布最新利率决议并释放政策信号。",
"上市公司发布季度财报并上调全年指引。",
"美元指数走强导致主要货币普遍承压。"
],
"routing_examples_negative": [
"某综艺节目收视排名变化。",
"某球员转会传闻引发讨论。",
"新游上线玩法测评。"
],
"routing_priority": 80
},
{
"id": "tech",
"topic_key": "tech_updates",
"name": "技术资讯",
"description": "追踪 AI、云计算、开源社区与开发工具链的最新技术资讯。",
"routing_purpose": "沉淀技术发布、版本升级、兼容性变更与工程实践建议。",
"routing_include_when": [
"技术",
"开源",
"AI",
"模型",
"大语言模型",
"MCP",
"API",
"SDK",
"发布",
"版本",
"升级",
"breaking change",
"security advisory"
],
"routing_exclude_when": [
"娱乐",
"体育",
"美食",
"旅游",
"情感",
"八卦"
],
"routing_examples_positive": [
"某主流框架发布新版本并调整默认配置。",
"开源项目披露高危安全漏洞并给出修复方案。",
"AI 模型服务更新 API返回结构发生变化。"
],
"routing_examples_negative": [
"某艺人参加活动造型盘点。",
"旅游目的地打卡攻略合集。",
"比赛结果预测与竞猜。"
],
"routing_priority": 75
}
]
}

271
design/topic-mcp.md 100644
View File

@ -0,0 +1,271 @@
# Topic 消息流方案(`topic_mcp`
## 1. 目标与边界
### 1.1 目标
- 将“对话消息”与“任务反馈消息”分离。
- 新增 RSS 风格 Topic 面板,支持按 Topic 订阅和无干扰查看。
- 由模型决定“是否写入 Topic、写入哪个 Topic”。
### 1.2 约束
- 尽量不修改 nanobot 引擎源码(`engines/nanobot-*`)。
- 若必须扩展能力,优先通过 Dashboard 内嵌 MCP Server 实现。
- MCP Server 固定命名:`topic_mcp`。
## 2. 术语统一(避免混淆)
- `transport_channel`:机器人对外通信渠道(`dashboard/telegram/feishu/qq/...`)。
- `topic`Dashboard 内的消息流分组(例如 `inbox`、`build`、`alerts`)。
- `topic_item`:某个 topic 下的一条消息。
## 3. 总体架构
```mermaid
flowchart LR
U["User"] --> UI["Dashboard UI"]
UI --> API["Dashboard Backend (FastAPI)"]
API --> DB["SQLite topic_* tables"]
API --> MCP["topic_mcp (streamableHttp)"]
BOT["nanobot"] -->|"MCP call"| MCP
MCP --> DB
BOT -->|"normal chat via dashboard transport_channel"| API
API --> UI
```
说明:
- 聊天消息仍走现有 dashboard 渠道。
- Topic 写入走 `topic_mcp` 工具调用,不侵入 nanobot 核心 loop/channel。
- Topic 路由与投递对所有 `transport_channel` 生效(`dashboard/feishu/telegram/qq/...`),即无论用户从哪个渠道发起请求,只要模型调用 `topic_*` 工具,都会统一写入 Topic 面板。
## 4. `topic_mcp` 配置策略(默认写入、不可删除)
### 4.1 写入位置
写入 bot 的 `.nanobot/config.json`
```json
{
"tools": {
"mcpServers": {
"topic_mcp": {
"type": "streamableHttp",
"url": "http://host.docker.internal:8000/api/mcp/topic",
"headers": {
"x-topic-mcp-token": "<bot-scoped-token>"
},
"toolTimeout": 30
}
}
}
}
```
### 4.2 不可删除规则
在 Dashboard 后端实现硬约束:
1. Bot 创建后自动注入 `topic_mcp`
2. `PUT /api/bots/{bot_id}/mcp-config` 全量更新时,若缺失 `topic_mcp`,后端自动补回。
3. 对 `topic_mcp` 的关键字段做白名单校验(`type/url/headers/toolTimeout`)。
4. 返回给前端时标记 `locked=true`,前端禁用删除按钮(但以后端校验为准)。
## 5. `topic_mcp` 工具定义
建议工具集V1
1. `topic_list_topics`
- 作用:返回当前 bot 可投递的 topic 清单与路由说明。
- 用途:让模型先了解可投递目标,避免“靠名字猜”。
2. `topic_get_schema`
- 作用:返回允许的 `view` 结构定义(声明式渲染,不允许脚本)。
- 用途:让模型生成可渲染卡片,而非任意 HTML/JS。
3. `topic_route`
- 作用:输入候选内容,返回 `should_publish`、`topic_key`、`confidence`、`reason`。
- 用途:把“该不该发、发到哪”交给 Dashboard 规则层,降低模型随意性。
4. `topic_publish`
- 作用:写入一条 topic 消息。
- 关键字段:`topic_key/title/content/level/tags/view/dedupe_key`.
说明:
- 不建议 V1 暴露 `topic_create/topic_delete` 给模型。
- topic 由用户在 Dashboard 维护,模型仅消费已发布规则并投递。
- `sendProgress/sendToolHints` 产生的进度流和工具提示流不应投递到 Topic默认硬过滤
## 6. 数据模型Backend
### 6.1 `topic_bot_settings`
- `id` (PK)
- `bot_id` (unique, index)
- `topic_enabled`Topic 总开关,默认 `true`
- `created_at/updated_at`
### 6.2 `topic_topic`
- `id` (PK)
- `bot_id` (index)
- `topic_key` (unique with `bot_id`)
- `name`
- `description`
- `is_active`
- `is_default_fallback`
- `routing_json`(包含 include/exclude/examples/priority
- `view_schema_json`(允许的渲染模板)
- `created_at/updated_at`
### 6.3 `topic_item`
- `id` (PK)
- `bot_id` (index)
- `topic_key` (index)
- `title`
- `content`
- `level` (`info/warn/error/success`)
- `tags_json`
- `view_json`
- `source` (`mcp/manual/system`)
- `dedupe_key` (index)
- `is_read`
- `created_at`
约束:
- 同一 `bot_id + dedupe_key` 可设置幂等去重窗口(如 10 分钟)。
- 未命中 topic 时回退到默认 topic推荐 `inbox`)。
## 7. 后端 APIDashboard
面向 UI 的 REST示例
- `GET /api/bots/{bot_id}/topics`
- `POST /api/bots/{bot_id}/topics`
- `PUT /api/bots/{bot_id}/topics/{topic_key}`
- `DELETE /api/bots/{bot_id}/topics/{topic_key}`(禁止删除 fallback
- `GET /api/bots/{bot_id}/topic-settings`
- `PUT /api/bots/{bot_id}/topic-settings`(配置 `topic_enabled` 开关)
- `GET /api/bots/{bot_id}/topic-items?topic_key=...&cursor=...`
- `POST /api/bots/{bot_id}/topic-items/{id}/read`
面向 bot 的 MCP
- `POST /api/mcp/topic`streamableHttp
## 8. 路由策略(模型如何判断)
最终策略是“模型 + 规则引擎”:
1. 模型提取候选信息(摘要、类型、重要度)。
2. 调用 `topic_route` 获取决策建议。
3. `should_publish=true` 时调用 `topic_publish`
4. `should_publish=false` 不写 Topic仅保留聊天输出。
5. 当 `topic_enabled=false` 时,`topic_route` 固定返回 `should_publish=false`
6. 当消息被判定为 progress/tool-hint对应 `sendProgress/sendToolHints` 流)时,固定 `should_publish=false`
topic 的路由配置建议包含:
- `purpose`topic 用途
- `include_when`:应收条件
- `exclude_when`:排除条件
- `examples`:正反例
- `priority`:冲突优先级
- `system_filters`:系统硬过滤(如 progress/tool-hint
金融信息 topic 路由示例(`topic_key=finance_market`
```json
{
"purpose": "沉淀与金融市场相关的关键结论和告警,便于用户集中查看。",
"include_when": [
"内容涉及股票/指数/外汇/利率/宏观数据/财报",
"包含明确数值、时间点、事件影响判断",
"属于'可执行建议'或'风险提示'"
],
"exclude_when": [
"纯寒暄或无结论聊天",
"与金融无关的任务进度",
"重复内容且dedupe_key命中窗口"
],
"examples": {
"positive": [
"美联储议息后10Y美债收益率上行20bp成长股承压",
"AAPL财报超预期但指引下调盘后转跌建议关注开盘波动"
],
"negative": [
"我已经开始处理你的请求",
"文件上传成功"
]
},
"priority": 80,
"fallback_topic_key": "inbox"
}
```
## 9. 前端 Topic Panel
UI 结构建议:
- 左列topic 列表(未读计数、筛选)
- 中列topic item 流(时间倒序、分页)
- 右列item 详情与结构化视图渲染
- 顶部:`Topic Enabled` 开关(开/关)
开关行为:
- `ON`:正常执行 `topic_route/topic_publish`
- `OFF`Topic 面板只读历史,不接收新投递;`topic_route` 返回 `should_publish=false`
- `topic_mcp` 配置仍保留且不可删除,避免反复改写 `config.json`
渲染要求:
- `view` 仅支持声明式类型:`markdown/card/table/checklist/metric/timeline`
- 禁止执行脚本与任意 HTML 注入
- 超长内容折叠 + 展开
## 10. 安全与治理
- `topic_mcp` 使用 bot 级 token 鉴权。
- 限流:每 bot 每分钟最大 publish 次数。
- 去重:`dedupe_key` + 时间窗口。
- 噪声过滤:`sendProgress/sendToolHints` 的进度与工具提示消息不进入 Topic。
- 降级:无 topic/路由失败时投递 `inbox` 或直接跳过(按策略)。
- 审计:记录每次 `topic_route/topic_publish` 调用。
## 11. 分阶段落地
### Phase 1最小可用
- 新增 `topic_bot_settings` / `topic_topic` / `topic_item`
- 新增 Topic Panel 基础查询展示
- 实现 `topic_mcp` + `topic_publish`
- 注入并锁定 `config.json.tools.mcpServers.topic_mcp`
### Phase 2稳定化
- 增加 `topic_list_topics/topic_get_schema/topic_route`
- 加去重、限流、fallback、审计日志
- 增加 topic 管理 UI规则编辑
### Phase 3体验增强
- item 模板优化与卡片渲染
- 未读/置顶/归档
- 统计与告警topic 热点、失败率)
## 12. 与现有代码对接点
- MCP 配置接口:`/api/bots/{bot_id}/mcp-config`
文件:[main.py](/Users/jiliu/WorkSpace/dashboard-nanobot/backend/main.py)
- 前端主面板:
[BotDashboardModule.tsx](/Users/jiliu/WorkSpace/dashboard-nanobot/frontend/src/modules/dashboard/BotDashboardModule.tsx)
- 实时同步:
[useBotsSync.ts](/Users/jiliu/WorkSpace/dashboard-nanobot/frontend/src/hooks/useBotsSync.ts)
原则:
- 不改 nanobot 核心引擎行为;
- 通过 Dashboard 承担 Topic 规则、投递和展示。

View File

@ -24,6 +24,8 @@ services:
REDIS_PREFIX: ${REDIS_PREFIX:-dashboard_nanobot} REDIS_PREFIX: ${REDIS_PREFIX:-dashboard_nanobot}
REDIS_DEFAULT_TTL: ${REDIS_DEFAULT_TTL:-60} REDIS_DEFAULT_TTL: ${REDIS_DEFAULT_TTL:-60}
PANEL_ACCESS_PASSWORD: ${PANEL_ACCESS_PASSWORD:-} PANEL_ACCESS_PASSWORD: ${PANEL_ACCESS_PASSWORD:-}
AGENT_MD_TEMPLATES_FILE: ${AGENT_MD_TEMPLATES_FILE:-templates/agent_md_templates.json}
TOPIC_PRESETS_TEMPLATES_FILE: ${TOPIC_PRESETS_TEMPLATES_FILE:-templates/topic_presets.json}
STT_ENABLED: ${STT_ENABLED:-true} STT_ENABLED: ${STT_ENABLED:-true}
STT_MODEL: ${STT_MODEL:-ggml-small-q8_0.bin} STT_MODEL: ${STT_MODEL:-ggml-small-q8_0.bin}
STT_MODEL_DIR: ${STT_MODEL_DIR:-${HOST_DATA_ROOT}/model} STT_MODEL_DIR: ${STT_MODEL_DIR:-${HOST_DATA_ROOT}/model}
@ -65,6 +67,8 @@ services:
image: dashboard-nanobot/nginx:${FRONTEND_IMAGE_TAG:-latest} image: dashboard-nanobot/nginx:${FRONTEND_IMAGE_TAG:-latest}
container_name: dashboard-nanobot-nginx container_name: dashboard-nanobot-nginx
restart: unless-stopped restart: unless-stopped
environment:
UPLOAD_MAX_MB: ${UPLOAD_MAX_MB:-100}
depends_on: depends_on:
backend: backend:
condition: service_healthy condition: service_healthy

View File

@ -24,9 +24,11 @@ RUN yarn build
FROM ${NGINX_BASE_IMAGE} FROM ${NGINX_BASE_IMAGE}
COPY docker/nginx.conf /etc/nginx/conf.d/default.conf COPY docker/nginx.conf /etc/nginx/conf.d/default.conf.template
COPY docker/entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
COPY --from=build /app/dist /usr/share/nginx/html COPY --from=build /app/dist /usr/share/nginx/html
EXPOSE 80 EXPOSE 80
CMD ["nginx", "-g", "daemon off;"] CMD ["/entrypoint.sh"]

View File

@ -0,0 +1,19 @@
#!/bin/sh
set -eu
size="${UPLOAD_MAX_MB:-100}"
size="$(printf '%s' "$size" | tr -d '[:space:]')"
if [ -z "$size" ]; then
size="100"
fi
case "$size" in
*[kKmMgG]) ;;
*) size="${size}m" ;;
esac
sed "s/__CLIENT_MAX_BODY_SIZE__/${size}/g" \
/etc/nginx/conf.d/default.conf.template \
> /etc/nginx/conf.d/default.conf
exec nginx -g "daemon off;"

View File

@ -5,7 +5,7 @@ upstream nanobot_backend {
server { server {
listen 80; listen 80;
server_name _; server_name _;
client_max_body_size 100m; client_max_body_size __CLIENT_MAX_BODY_SIZE__;
root /usr/share/nginx/html; root /usr/share/nginx/html;
index index.html; index index.html;

View File

@ -59,6 +59,12 @@ body {
padding: 18px; padding: 18px;
} }
.app-shell-compact {
height: 100dvh;
min-height: 100dvh;
overflow: hidden;
}
.app-frame { .app-frame {
height: calc(100vh - 36px); height: calc(100vh - 36px);
display: grid; display: grid;
@ -66,6 +72,11 @@ body {
gap: 14px; gap: 14px;
} }
.app-shell-compact .app-frame {
height: calc(100dvh - 36px);
min-height: calc(100dvh - 36px);
}
.app-header { .app-header {
background: var(--panel); background: var(--panel);
border: 1px solid var(--line); border: 1px solid var(--line);
@ -220,6 +231,12 @@ body {
height: 100%; height: 100%;
} }
.app-shell-compact .main-stage {
min-height: 0;
height: 100%;
overflow: hidden;
}
.app-login-shell { .app-login-shell {
min-height: calc(100vh - 36px); min-height: calc(100vh - 36px);
display: flex; display: flex;
@ -1153,6 +1170,11 @@ body {
min-height: calc(100vh - 36px); min-height: calc(100vh - 36px);
} }
.app-shell-compact .app-frame {
height: calc(100dvh - 24px);
min-height: calc(100dvh - 24px);
}
.app-shell { .app-shell {
padding: 12px; padding: 12px;
} }
@ -1177,4 +1199,11 @@ body {
grid-template-columns: 1fr; grid-template-columns: 1fr;
grid-template-rows: auto auto; grid-template-rows: auto auto;
} }
.app-shell-compact .grid-ops.grid-ops-compact {
grid-template-columns: 1fr;
grid-template-rows: minmax(0, 1fr) auto;
height: 100%;
min-height: 0;
}
} }

View File

@ -1,4 +1,18 @@
import type { ReactNode } from 'react'; import {
Children,
cloneElement,
isValidElement,
useCallback,
useEffect,
useId,
useLayoutEffect,
useMemo,
useRef,
useState,
type ReactElement,
type ReactNode,
} from 'react';
import { createPortal } from 'react-dom';
import './lucent-tooltip.css'; import './lucent-tooltip.css';
interface LucentTooltipProps { interface LucentTooltipProps {
@ -7,16 +21,139 @@ interface LucentTooltipProps {
side?: 'top' | 'bottom'; side?: 'top' | 'bottom';
} }
type TooltipLayout = {
left: number;
top: number;
side: 'top' | 'bottom';
arrowLeft: number;
};
const VIEWPORT_GAP = 8;
const TOOLTIP_GAP = 10;
export function LucentTooltip({ content, children, side = 'top' }: LucentTooltipProps) { export function LucentTooltip({ content, children, side = 'top' }: LucentTooltipProps) {
const text = String(content || '').trim(); const text = String(content || '').trim();
const tooltipId = useId();
const wrapRef = useRef<HTMLSpanElement | null>(null);
const bubbleRef = useRef<HTMLSpanElement | null>(null);
const [visible, setVisible] = useState(false);
const [layout, setLayout] = useState<TooltipLayout | null>(null);
const child = useMemo(() => {
const first = Children.only(children) as ReactNode;
return isValidElement(first) ? (first as ReactElement<{ 'aria-describedby'?: string }>) : null;
}, [children]);
const updatePosition = useCallback(() => {
const wrap = wrapRef.current;
const bubble = bubbleRef.current;
if (!wrap || !bubble) return;
const anchorRect = wrap.getBoundingClientRect();
const bubbleRect = bubble.getBoundingClientRect();
const viewportWidth = window.innerWidth;
const viewportHeight = window.innerHeight;
let finalSide: 'top' | 'bottom' = side;
const topSpace = anchorRect.top - VIEWPORT_GAP;
const bottomSpace = viewportHeight - anchorRect.bottom - VIEWPORT_GAP;
if (side === 'top' && bubbleRect.height + TOOLTIP_GAP > topSpace && bottomSpace > topSpace) {
finalSide = 'bottom';
} else if (side === 'bottom' && bubbleRect.height + TOOLTIP_GAP > bottomSpace && topSpace > bottomSpace) {
finalSide = 'top';
}
const anchorCenterX = anchorRect.left + (anchorRect.width / 2);
const unclampedLeft = anchorCenterX - (bubbleRect.width / 2);
const minLeft = VIEWPORT_GAP;
const maxLeft = Math.max(VIEWPORT_GAP, viewportWidth - bubbleRect.width - VIEWPORT_GAP);
const left = Math.min(Math.max(unclampedLeft, minLeft), maxLeft);
const top = finalSide === 'top'
? anchorRect.top - bubbleRect.height - TOOLTIP_GAP
: anchorRect.bottom + TOOLTIP_GAP;
const minArrowLeft = 12;
const maxArrowLeft = Math.max(minArrowLeft, bubbleRect.width - 12);
const arrowLeft = Math.min(Math.max(anchorCenterX - left, minArrowLeft), maxArrowLeft);
setLayout((prev) => {
const next: TooltipLayout = { left, top, side: finalSide, arrowLeft };
if (
prev
&& Math.abs(prev.left - next.left) < 0.5
&& Math.abs(prev.top - next.top) < 0.5
&& Math.abs(prev.arrowLeft - next.arrowLeft) < 0.5
&& prev.side === next.side
) {
return prev;
}
return next;
});
}, [side]);
useLayoutEffect(() => {
if (!visible) {
setLayout(null);
return;
}
updatePosition();
}, [updatePosition, visible, text]);
useEffect(() => {
if (!visible) return;
const handleWindowChange = () => updatePosition();
window.addEventListener('scroll', handleWindowChange, true);
window.addEventListener('resize', handleWindowChange);
return () => {
window.removeEventListener('scroll', handleWindowChange, true);
window.removeEventListener('resize', handleWindowChange);
};
}, [updatePosition, visible]);
if (!text) return <>{children}</>; if (!text) return <>{children}</>;
const enhancedChild = child
? cloneElement(child, {
'aria-describedby': child.props['aria-describedby'] || tooltipId,
})
: children;
return ( return (
<span className={`lucent-tooltip-wrap side-${side}`}> <>
{children} <span
<span className="lucent-tooltip-bubble" role="tooltip"> ref={wrapRef}
{text} className="lucent-tooltip-wrap"
onMouseEnter={() => setVisible(true)}
onMouseLeave={() => setVisible(false)}
onFocusCapture={() => setVisible(true)}
onBlurCapture={(event) => {
if (!event.currentTarget.contains(event.relatedTarget as Node | null)) {
setVisible(false);
}
}}
>
{enhancedChild}
</span> </span>
</span> {visible && typeof document !== 'undefined'
? createPortal(
<span
ref={bubbleRef}
id={tooltipId}
className={`lucent-tooltip-bubble is-visible side-${layout?.side || side}`}
role="tooltip"
style={{
left: layout ? `${layout.left}px` : '-9999px',
top: layout ? `${layout.top}px` : '-9999px',
['--lucent-tooltip-arrow-left' as string]: layout ? `${layout.arrowLeft}px` : '50%',
}}
>
{text}
</span>,
document.body,
)
: null}
</>
); );
} }

View File

@ -1,12 +1,9 @@
.lucent-tooltip-wrap { .lucent-tooltip-wrap {
position: relative;
display: inline-flex; display: inline-flex;
} }
.lucent-tooltip-bubble { .lucent-tooltip-bubble {
position: absolute; position: fixed;
left: 50%;
transform: translateX(-50%);
border: 1px solid color-mix(in oklab, var(--line) 72%, var(--brand) 28%); border: 1px solid color-mix(in oklab, var(--line) 72%, var(--brand) 28%);
border-radius: 8px; border-radius: 8px;
background: color-mix(in oklab, var(--panel) 88%, #000 12%); background: color-mix(in oklab, var(--panel) 88%, #000 12%);
@ -19,15 +16,16 @@
pointer-events: none; pointer-events: none;
opacity: 0; opacity: 0;
visibility: hidden; visibility: hidden;
transform: translateY(2px);
transition: opacity 0.14s ease, transform 0.14s ease, visibility 0.14s ease; transition: opacity 0.14s ease, transform 0.14s ease, visibility 0.14s ease;
z-index: 40; z-index: 9999;
box-shadow: 0 8px 18px rgba(6, 12, 24, 0.24); box-shadow: 0 8px 18px rgba(6, 12, 24, 0.24);
} }
.lucent-tooltip-bubble::after { .lucent-tooltip-bubble::after {
content: ''; content: '';
position: absolute; position: absolute;
left: 50%; left: var(--lucent-tooltip-arrow-left, 50%);
transform: translateX(-50%) rotate(45deg); transform: translateX(-50%) rotate(45deg);
width: 7px; width: 7px;
height: 7px; height: 7px;
@ -36,25 +34,24 @@
background: color-mix(in oklab, var(--panel) 88%, #000 12%); background: color-mix(in oklab, var(--panel) 88%, #000 12%);
} }
.lucent-tooltip-wrap.side-top .lucent-tooltip-bubble { .lucent-tooltip-bubble.side-top {
bottom: calc(100% + 8px); transform: translateY(-2px);
} }
.lucent-tooltip-wrap.side-top .lucent-tooltip-bubble::after { .lucent-tooltip-bubble.side-top::after {
bottom: -5px; bottom: -5px;
} }
.lucent-tooltip-wrap.side-bottom .lucent-tooltip-bubble { .lucent-tooltip-bubble.side-bottom {
top: calc(100% + 8px); transform: translateY(2px);
} }
.lucent-tooltip-wrap.side-bottom .lucent-tooltip-bubble::after { .lucent-tooltip-bubble.side-bottom::after {
top: -5px; top: -5px;
transform: translateX(-50%) rotate(225deg); transform: translateX(-50%) rotate(225deg);
} }
.lucent-tooltip-wrap:hover .lucent-tooltip-bubble, .lucent-tooltip-bubble.is-visible {
.lucent-tooltip-wrap:focus-within .lucent-tooltip-bubble {
opacity: 1; opacity: 1;
visibility: visible; visibility: visible;
} }

View File

@ -1,4 +1,4 @@
import { useEffect, useRef } from 'react'; import { useCallback, useEffect, useRef } from 'react';
import axios from 'axios'; import axios from 'axios';
import { useAppStore } from '../store/appStore'; import { useAppStore } from '../store/appStore';
import { APP_ENDPOINTS } from '../config/env'; import { APP_ENDPOINTS } from '../config/env';
@ -79,6 +79,7 @@ export function useBotsSync(forcedBotId?: string) {
const { activeBots, setBots, updateBotState, addBotLog, addBotMessage, addBotEvent, setBotMessages } = useAppStore(); const { activeBots, setBots, updateBotState, addBotLog, addBotMessage, addBotEvent, setBotMessages } = useAppStore();
const socketsRef = useRef<Record<string, WebSocket>>({}); const socketsRef = useRef<Record<string, WebSocket>>({});
const heartbeatsRef = useRef<Record<string, number>>({}); const heartbeatsRef = useRef<Record<string, number>>({});
const activeBotsRef = useRef<Record<string, BotState>>({});
const lastUserEchoRef = useRef<Record<string, { text: string; ts: number }>>({}); const lastUserEchoRef = useRef<Record<string, { text: string; ts: number }>>({});
const lastAssistantRef = useRef<Record<string, { text: string; ts: number }>>({}); const lastAssistantRef = useRef<Record<string, { text: string; ts: number }>>({});
const lastProgressRef = useRef<Record<string, { text: string; ts: number }>>({}); const lastProgressRef = useRef<Record<string, { text: string; ts: number }>>({});
@ -88,6 +89,47 @@ export function useBotsSync(forcedBotId?: string) {
const t = pickLocale(locale, { 'zh-cn': botsSyncZhCn, en: botsSyncEn }); const t = pickLocale(locale, { 'zh-cn': botsSyncZhCn, en: botsSyncEn });
const forced = String(forcedBotId || '').trim(); const forced = String(forcedBotId || '').trim();
useEffect(() => {
activeBotsRef.current = activeBots;
}, [activeBots]);
const syncBotMessages = useCallback(
async (botId: string) => {
const target = String(botId || '').trim();
if (!target) return;
try {
const res = await axios.get<any[]>(`${APP_ENDPOINTS.apiBase}/bots/${target}/messages`, {
params: { limit: 300 },
});
const rows = Array.isArray(res.data) ? res.data : [];
const messages: ChatMessage[] = rows
.map((row) => {
const roleRaw = String(row?.role || '').toLowerCase();
const role: ChatMessage['role'] = roleRaw === 'user' || roleRaw === 'assistant' || roleRaw === 'system' ? roleRaw : 'assistant';
return {
id: normalizeMessageId(row?.id),
role,
text: String(row?.text || ''),
attachments: normalizeMedia(row?.media),
ts: Number(row?.ts || Date.now()),
feedback: normalizeFeedback(row?.feedback),
};
})
.filter((msg) => msg.text.trim().length > 0 || (msg.attachments || []).length > 0)
.slice(-300);
setBotMessages(target, messages);
const lastUser = [...messages].reverse().find((m) => m.role === 'user');
if (lastUser) lastUserEchoRef.current[target] = { text: lastUser.text, ts: lastUser.ts };
const lastAssistant = [...messages].reverse().find((m) => m.role === 'assistant');
if (lastAssistant) lastAssistantRef.current[target] = { text: lastAssistant.text, ts: lastAssistant.ts };
} catch (error) {
console.error(`Failed to sync bot messages for ${target}`, error);
}
},
[setBotMessages],
);
useEffect(() => { useEffect(() => {
const fetchBots = async () => { const fetchBots = async () => {
try { try {
@ -123,39 +165,29 @@ export function useBotsSync(forcedBotId?: string) {
botIds.forEach((botId) => { botIds.forEach((botId) => {
if (hydratedMessagesRef.current[botId]) return; if (hydratedMessagesRef.current[botId]) return;
hydratedMessagesRef.current[botId] = true; hydratedMessagesRef.current[botId] = true;
void (async () => { void syncBotMessages(botId);
try {
const res = await axios.get<any[]>(`${APP_ENDPOINTS.apiBase}/bots/${botId}/messages`, {
params: { limit: 300 },
});
const rows = Array.isArray(res.data) ? res.data : [];
const messages: ChatMessage[] = rows
.map((row) => {
const roleRaw = String(row?.role || '').toLowerCase();
const role: ChatMessage['role'] = roleRaw === 'user' || roleRaw === 'assistant' || roleRaw === 'system' ? roleRaw : 'assistant';
return {
id: normalizeMessageId(row?.id),
role,
text: String(row?.text || ''),
attachments: normalizeMedia(row?.media),
ts: Number(row?.ts || Date.now()),
feedback: normalizeFeedback(row?.feedback),
};
})
.filter((msg) => msg.text.trim().length > 0 || (msg.attachments || []).length > 0)
.slice(-300);
setBotMessages(botId, messages);
const lastUser = [...messages].reverse().find((m) => m.role === 'user');
if (lastUser) lastUserEchoRef.current[botId] = { text: lastUser.text, ts: lastUser.ts };
const lastAssistant = [...messages].reverse().find((m) => m.role === 'assistant');
if (lastAssistant) lastAssistantRef.current[botId] = { text: lastAssistant.text, ts: lastAssistant.ts };
} catch (error) {
console.error(`Failed to fetch bot messages for ${botId}`, error);
}
})();
}); });
}, [activeBots, setBotMessages]); }, [activeBots, syncBotMessages]);
useEffect(() => {
const syncVisibleBots = () => {
if (typeof document !== 'undefined' && document.visibilityState !== 'visible') return;
const botIds = Object.keys(activeBotsRef.current || {});
botIds.forEach((botId) => {
void syncBotMessages(botId);
});
};
window.addEventListener('focus', syncVisibleBots);
window.addEventListener('pageshow', syncVisibleBots);
document.addEventListener('visibilitychange', syncVisibleBots);
return () => {
window.removeEventListener('focus', syncVisibleBots);
window.removeEventListener('pageshow', syncVisibleBots);
document.removeEventListener('visibilitychange', syncVisibleBots);
};
}, [syncBotMessages]);
useEffect(() => { useEffect(() => {
const runningIds = new Set( const runningIds = new Set(
@ -187,6 +219,8 @@ export function useBotsSync(forcedBotId?: string) {
} }
}, 15000); }, 15000);
heartbeatsRef.current[bot.id] = beat; heartbeatsRef.current[bot.id] = beat;
// Backfill messages after (re)connect to avoid missing outputs while tab was backgrounded.
void syncBotMessages(bot.id);
}; };
ws.onmessage = (event) => { ws.onmessage = (event) => {
let data: any; let data: any;
@ -320,7 +354,7 @@ export function useBotsSync(forcedBotId?: string) {
return () => { return () => {
// no-op: clean in unmount effect below // no-op: clean in unmount effect below
}; };
}, [activeBots, addBotEvent, addBotLog, addBotMessage, isZh, t.progress, t.replied, t.stateUpdated, updateBotState]); }, [activeBots, addBotEvent, addBotLog, addBotMessage, isZh, syncBotMessages, t.progress, t.replied, t.stateUpdated, updateBotState]);
useEffect(() => { useEffect(() => {
return () => { return () => {

View File

@ -6,6 +6,13 @@ export const channelsEn = {
openManager: 'Manage Channels', openManager: 'Manage Channels',
defaultChannel: 'Default Channel', defaultChannel: 'Default Channel',
customChannel: 'Custom Channel', customChannel: 'Custom Channel',
channelEmpty: 'No external channels configured.',
channelConfigured: 'Configured',
channelPending: 'Pending setup',
channelDraftMeta: 'New channel draft',
channelType: 'Channel Type',
channelAddHint: 'Click Add, choose a channel type, then fill the credentials.',
disabled: 'Disabled',
dashboardLocked: 'Dashboard is required and cannot be removed.', dashboardLocked: 'Dashboard is required and cannot be removed.',
enabled: 'Enabled', enabled: 'Enabled',
saveChannel: 'Save', saveChannel: 'Save',

View File

@ -6,6 +6,13 @@ export const channelsZhCn = {
openManager: '管理渠道', openManager: '管理渠道',
defaultChannel: '默认渠道', defaultChannel: '默认渠道',
customChannel: '自定义渠道', customChannel: '自定义渠道',
channelEmpty: '暂无外部渠道。',
channelConfigured: '已配置',
channelPending: '待配置',
channelDraftMeta: '新增渠道草稿',
channelType: '渠道类型',
channelAddHint: '点击新增后选择渠道类型,再填写接入凭据。',
disabled: '停用',
dashboardLocked: 'Dashboard 为系统必选渠道,不能删除。', dashboardLocked: 'Dashboard 为系统必选渠道,不能删除。',
enabled: '启用', enabled: '启用',
saveChannel: '保存', saveChannel: '保存',

View File

@ -76,6 +76,28 @@ export const dashboardEn = {
paginationPage: (current: number, total: number) => `${current} / ${total}`, paginationPage: (current: number, total: number) => `${current} / ${total}`,
newBot: 'New Bot', newBot: 'New Bot',
manageImages: 'Image Manager', manageImages: 'Image Manager',
extensions: 'Extensions',
templateManager: 'Template Manager',
templateManagerTitle: 'Template Manager',
templateTabAgent: 'Agent Templates',
templateTabTopic: 'Topic Presets',
templateAgentFile: 'Agent Templates (5 MD files)',
templateTopicFile: 'Topic Presets Template (3 presets)',
templateJsonHint: 'Edit JSON and save. New configurations will use the latest templates.',
templateLoadFail: 'Failed to load templates.',
templateSaveFail: 'Failed to save templates.',
templateSaved: 'Templates saved.',
batchStart: 'Batch Start',
batchStop: 'Batch Stop',
batchStartNone: 'No bots available for batch start.',
batchStopNone: 'No bots available for batch stop.',
batchStartConfirm: (count: number) => `Start ${count} bots in batch?`,
batchStopConfirm: (count: number) => `Stop ${count} bots in batch?`,
batchStartDone: (ok: number, fail: number) => `Batch start finished: ${ok} succeeded, ${fail} failed`,
batchStopDone: (ok: number, fail: number) => `Batch stop finished: ${ok} succeeded, ${fail} failed`,
templateAgentInvalid: 'Invalid agent template JSON.',
templateTopicInvalid: 'Invalid topic preset JSON.',
templateParseFail: 'Template JSON parse failed.',
image: 'Image', image: 'Image',
stop: 'Stop', stop: 'Stop',
start: 'Start', start: 'Start',
@ -106,6 +128,40 @@ export const dashboardEn = {
base: 'Base', base: 'Base',
params: 'Model', params: 'Model',
channels: 'Channels', channels: 'Channels',
topic: 'Topic',
topicPanel: 'Topic Management',
topicPanelDesc: 'Manage Topic routing targets. Only active topics receive deliveries; no topic means no feed.',
topicActive: 'Active',
topicName: 'Topic Name',
topicDescription: 'Topic Description',
topicPurpose: 'Purpose',
topicIncludeWhen: 'Include When',
topicExcludeWhen: 'Exclude When',
topicExamplesPositive: 'Positive Examples',
topicExamplesNegative: 'Negative Examples',
topicPriority: 'Priority',
topicListHint: 'One rule/example per line',
topicFilterProgress: 'Filter progress system messages',
topicFilterToolHint: 'Filter tool-hint system messages',
topicSystemFilterHint: 'Recommended to keep enabled to avoid feed noise.',
topicEmpty: 'No topics configured.',
topicAdd: 'Add Topic',
topicAddHint: 'Topic key must use lowercase letters, numbers, dot, underscore, or hyphen.',
topicKey: 'Topic Key',
topicKeyPlaceholder: 'e.g. finance_market',
topicKeyRequired: 'Topic key is required.',
topicSaved: 'Topic configuration saved.',
topicSaveFail: 'Failed to save topic configuration.',
topicDeleted: 'Topic deleted.',
topicDeleteFail: 'Failed to delete topic.',
topicDeleteConfirm: (topicKey: string) => `Delete topic ${topicKey}?`,
topicPresetPolitics: 'Preset: Politics',
topicPresetFinance: 'Preset: Finance',
topicPresetTech: 'Preset: Tech',
topicPresetBlank: 'Blank Topic',
topicPresetPoliticsApplied: 'Applied preset: Politics News.',
topicPresetFinanceApplied: 'Applied preset: Finance & Market.',
topicPresetTechApplied: 'Applied preset: Tech Updates.',
skills: 'Skills', skills: 'Skills',
mcp: 'MCP', mcp: 'MCP',
tools: 'Tools', tools: 'Tools',
@ -145,6 +201,8 @@ export const dashboardEn = {
mcpTestFail: 'Connectivity test failed.', mcpTestFail: 'Connectivity test failed.',
mcpTestNeedUrl: 'Please provide MCP URL first.', mcpTestNeedUrl: 'Please provide MCP URL first.',
mcpTestBlockSave: 'MCP connectivity test failed. Save is blocked.', mcpTestBlockSave: 'MCP connectivity test failed. Save is blocked.',
mcpDraftRequired: 'MCP server name and URL are required.',
mcpDraftAdded: 'Added to the MCP list. Save config to apply.',
addMcpServer: 'Add MCP Server', addMcpServer: 'Add MCP Server',
saveMcpConfig: 'Save MCP Config', saveMcpConfig: 'Save MCP Config',
mcpSaved: 'MCP config saved.', mcpSaved: 'MCP config saved.',

View File

@ -76,6 +76,28 @@ export const dashboardZhCn = {
paginationPage: (current: number, total: number) => `${current} / ${total}`, paginationPage: (current: number, total: number) => `${current} / ${total}`,
newBot: '新建机器人', newBot: '新建机器人',
manageImages: '镜像管理', manageImages: '镜像管理',
extensions: '扩展菜单',
templateManager: '模板管理',
templateManagerTitle: '模板管理',
templateTabAgent: '代理模板',
templateTabTopic: '主题预设模板',
templateAgentFile: '代理模板5 个 MD',
templateTopicFile: '主题预设模板3 项)',
templateJsonHint: '请编辑 JSON 内容,保存后新建配置将读取最新模板。',
templateLoadFail: '读取模板失败。',
templateSaveFail: '保存模板失败。',
templateSaved: '模板已保存。',
batchStart: '批量启动',
batchStop: '批量停止',
batchStartNone: '当前没有可批量启动的 Bot。',
batchStopNone: '当前没有可批量停止的 Bot。',
batchStartConfirm: (count: number) => `确认批量启动 ${count} 个 Bot`,
batchStopConfirm: (count: number) => `确认批量停止 ${count} 个 Bot`,
batchStartDone: (ok: number, fail: number) => `批量启动完成:成功 ${ok},失败 ${fail}`,
batchStopDone: (ok: number, fail: number) => `批量停止完成:成功 ${ok},失败 ${fail}`,
templateAgentInvalid: '代理模板格式错误。',
templateTopicInvalid: '主题模板格式错误。',
templateParseFail: '模板 JSON 解析失败。',
image: '镜像', image: '镜像',
stop: '停止', stop: '停止',
start: '启动', start: '启动',
@ -106,6 +128,40 @@ export const dashboardZhCn = {
base: '基础', base: '基础',
params: '模型', params: '模型',
channels: '渠道', channels: '渠道',
topic: '主题',
topicPanel: '主题管理',
topicPanelDesc: '管理该 Bot 的主题路由目标。仅启用的主题会接收投递;未配置主题时不投递。',
topicActive: '启用',
topicName: '主题名称',
topicDescription: '主题描述',
topicPurpose: '用途purpose',
topicIncludeWhen: '命中条件include_when',
topicExcludeWhen: '排除条件exclude_when',
topicExamplesPositive: '正例examples.positive',
topicExamplesNegative: '反例examples.negative',
topicPriority: '优先级priority',
topicListHint: '每行一条规则或示例',
topicFilterProgress: '过滤 progress 系统消息',
topicFilterToolHint: '过滤 tool-hint 系统消息',
topicSystemFilterHint: '建议保持开启,避免 feed 被过程噪声污染。',
topicEmpty: '暂无主题。',
topicAdd: '新增主题',
topicAddHint: '主题 Key 仅支持小写字母、数字、点、下划线和连字符。',
topicKey: '主题 Key',
topicKeyPlaceholder: '例如 finance_market',
topicKeyRequired: '请先填写主题 Key。',
topicSaved: '主题配置已保存。',
topicSaveFail: '主题配置保存失败。',
topicDeleted: '主题已删除。',
topicDeleteFail: '主题删除失败。',
topicDeleteConfirm: (topicKey: string) => `确认删除主题 ${topicKey}`,
topicPresetPolitics: '预设:时政新闻',
topicPresetFinance: '预设:财经信息',
topicPresetTech: '预设:技术资讯',
topicPresetBlank: '空白主题',
topicPresetPoliticsApplied: '已填入“时政新闻”主题预设。',
topicPresetFinanceApplied: '已填入“财经信息”主题预设。',
topicPresetTechApplied: '已填入“技术资讯”主题预设。',
skills: '技能', skills: '技能',
mcp: 'MCP', mcp: 'MCP',
tools: '工具', tools: '工具',
@ -145,6 +201,8 @@ export const dashboardZhCn = {
mcpTestFail: '连通性测试失败。', mcpTestFail: '连通性测试失败。',
mcpTestNeedUrl: '请先填写 MCP URL。', mcpTestNeedUrl: '请先填写 MCP URL。',
mcpTestBlockSave: '存在未通过的 MCP 连通性测试,已阻止保存。', mcpTestBlockSave: '存在未通过的 MCP 连通性测试,已阻止保存。',
mcpDraftRequired: '请先填写 MCP 服务名称和 URL。',
mcpDraftAdded: '已加入 MCP 列表,记得保存配置。',
addMcpServer: '新增 MCP Server', addMcpServer: '新增 MCP Server',
saveMcpConfig: '保存 MCP 配置', saveMcpConfig: '保存 MCP 配置',
mcpSaved: 'MCP 配置已保存。', mcpSaved: 'MCP 配置已保存。',

View File

@ -110,6 +110,7 @@
} }
.ops-list-actions { .ops-list-actions {
position: relative;
display: inline-flex; display: inline-flex;
align-items: center; align-items: center;
gap: 8px; gap: 8px;
@ -408,41 +409,165 @@
.ops-chat-panel { .ops-chat-panel {
min-width: 0; min-width: 0;
min-height: 0;
height: 100%;
display: flex;
flex-direction: column;
padding: 12px; padding: 12px;
} }
.ops-chat-shell { .ops-chat-shell {
display: grid; display: grid;
grid-template-rows: 1fr; height: 100%;
gap: 0; min-height: 0;
flex: 1 1 auto;
}
.ops-main-content-shell {
display: block;
min-height: 0;
height: 100%;
}
.ops-main-content-frame {
display: grid;
grid-template-rows: auto minmax(0, 1fr);
min-height: 0;
height: 100%;
border: 1px solid var(--line);
border-radius: 16px;
background: var(--panel-soft);
overflow: hidden;
}
.ops-main-content-head {
display: flex;
align-items: center;
justify-content: center;
min-height: 44px;
padding: 6px 12px;
border-bottom: 1px solid color-mix(in oklab, var(--line) 78%, transparent);
background: color-mix(in oklab, var(--panel) 86%, transparent);
}
.ops-main-content-body {
min-height: 0;
height: 100%;
display: grid;
grid-template-rows: minmax(0, 1fr);
overflow: hidden;
}
.ops-main-mode-rail {
display: grid;
grid-template-columns: 1fr 1fr;
align-items: center;
width: min(320px, 100%);
min-height: 34px;
padding: 3px;
border: 1px solid color-mix(in oklab, var(--line) 74%, transparent);
border-radius: 999px;
background: color-mix(in oklab, var(--panel) 92%, white 8%);
}
.ops-chat-topic-frame {
min-height: 0;
overflow: hidden;
}
.ops-main-mode-tab {
position: relative;
display: flex;
align-items: center;
justify-content: center;
gap: 4px;
height: 28px;
padding: 0 10px;
border: 0;
border-radius: 999px;
background: transparent;
color: var(--muted);
cursor: pointer;
transition: color 160ms ease, background 160ms ease, opacity 160ms ease;
}
.ops-main-mode-tab:hover {
color: var(--text);
background: color-mix(in oklab, var(--panel-soft) 66%, transparent);
}
.ops-main-mode-tab.is-active {
color: #fff;
background: linear-gradient(180deg, color-mix(in oklab, var(--brand) 78%, #9ec1ff 22%), color-mix(in oklab, var(--brand) 66%, #7ca7ff 34%));
box-shadow: 0 6px 16px color-mix(in oklab, var(--brand) 24%, transparent);
}
.ops-main-mode-tab:focus-visible {
outline: 2px solid color-mix(in oklab, var(--brand) 42%, transparent);
outline-offset: 2px;
}
.ops-main-mode-tab + .ops-main-mode-tab {
margin-left: 0;
}
.ops-main-mode-tab svg {
color: color-mix(in oklab, var(--icon) 88%, var(--muted) 12%);
transition: color 160ms ease, fill 160ms ease, opacity 160ms ease;
}
.ops-main-mode-tab.is-active svg {
color: #fff;
}
.ops-main-mode-label {
font-size: 11px;
font-weight: 800;
letter-spacing: 0.01em;
white-space: nowrap;
opacity: 0.9;
}
.ops-main-mode-label-wrap {
display: inline-flex;
align-items: center;
gap: 6px;
}
.ops-main-content-frame .ops-chat-frame,
.ops-main-content-frame .ops-topic-feed.is-panel {
height: 100%; height: 100%;
min-height: 0; min-height: 0;
} }
.ops-chat-head { .ops-main-content-body > * {
border: 1px solid var(--line); min-height: 0;
border-radius: 12px;
background: var(--panel-soft);
padding: 10px 12px;
display: flex;
align-items: center;
justify-content: space-between;
gap: 12px;
flex-wrap: wrap;
} }
.ops-chat-title { .ops-main-content-body > .ops-chat-frame,
margin: 0; .ops-main-content-body > .ops-topic-feed.is-panel {
font-size: 18px; min-height: 0;
font-weight: 800;
color: var(--title);
} }
.ops-chat-subtitle { .ops-main-content-body .ops-chat-scroll {
margin: 4px 0 0; min-height: 0;
color: var(--subtitle); max-height: none;
font-size: 12px; border: 0;
font-weight: 600; border-radius: 0;
background: transparent;
padding: 14px 16px 10px;
}
.ops-main-content-body .ops-chat-dock {
display: grid;
gap: 10px;
min-height: 0;
padding: 0 14px 14px;
}
.ops-main-content-body .ops-topic-feed.is-panel {
padding: 12px 14px 14px;
grid-template-rows: auto auto minmax(0, 1fr) auto;
} }
.ops-status-group { .ops-status-group {
@ -485,8 +610,9 @@
.ops-chat-frame { .ops-chat-frame {
position: relative; position: relative;
min-height: 0; min-height: 0;
height: 100%;
display: grid; display: grid;
grid-template-rows: minmax(0, 1fr) auto auto; grid-template-rows: minmax(0, 1fr) auto;
gap: 10px; gap: 10px;
} }
@ -1421,6 +1547,20 @@
flex-wrap: wrap; flex-wrap: wrap;
} }
.ops-runtime-view-switch {
margin-right: 2px;
}
.ops-switch-dot {
position: static;
width: 9px;
height: 9px;
border-radius: 999px;
background: #ef4444;
border: 1.5px solid #fff;
box-shadow: 0 0 0 1px color-mix(in oklab, var(--panel-soft) 82%, transparent);
}
.ops-more-menu { .ops-more-menu {
position: absolute; position: absolute;
top: calc(100% + 8px); top: calc(100% + 8px);
@ -1466,6 +1606,70 @@
border-color: rgba(215, 102, 102, 0.34); border-color: rgba(215, 102, 102, 0.34);
} }
.ops-template-tabs {
position: relative;
display: grid;
grid-template-columns: repeat(2, minmax(0, 1fr));
gap: 14px;
margin: 2px 0 12px;
padding: 0 6px 8px;
}
.ops-template-tabs::after {
content: "";
position: absolute;
left: 0;
right: 0;
bottom: 0;
height: 2px;
background: color-mix(in oklab, var(--line) 78%, var(--panel-soft) 22%);
}
.ops-template-tab {
border: 1px solid transparent;
border-radius: 12px;
background: transparent;
color: color-mix(in oklab, var(--text) 82%, var(--muted) 18%);
min-height: 54px;
padding: 10px 14px;
font-size: 13px;
font-weight: 700;
letter-spacing: 0.02em;
cursor: pointer;
transition: all 0.16s ease;
}
.ops-template-tab:hover {
background: color-mix(in oklab, var(--brand-soft) 52%, var(--panel) 48%);
}
.ops-template-tab.is-active {
background: color-mix(in oklab, var(--brand-soft) 74%, var(--panel) 26%);
border-color: color-mix(in oklab, var(--brand) 30%, transparent);
color: color-mix(in oklab, var(--text) 90%, var(--brand) 10%);
box-shadow: 0 8px 20px rgba(45, 93, 185, 0.08);
}
.ops-template-tab.is-active::after {
content: "";
position: relative;
display: block;
height: 2px;
width: calc(100% + 28px);
left: -14px;
top: 11px;
background: color-mix(in oklab, var(--brand) 78%, #ffffff 22%);
}
.ops-template-tab-label {
display: inline-flex;
align-items: center;
justify-content: center;
width: 100%;
text-align: center;
white-space: nowrap;
}
.ops-runtime-scroll { .ops-runtime-scroll {
min-height: 0; min-height: 0;
overflow: auto; overflow: auto;
@ -1480,6 +1684,323 @@
gap: 8px; gap: 8px;
} }
.ops-topic-feed {
position: relative;
display: grid;
gap: 8px;
min-height: 0;
}
.ops-topic-feed.is-panel {
height: 100%;
grid-template-rows: auto auto minmax(0, 1fr) auto;
}
.ops-topic-feed-empty-state {
min-height: 180px;
display: grid;
place-items: center;
gap: 10px;
align-content: center;
text-align: center;
padding: 18px 10px;
}
.ops-topic-feed-empty-title {
color: var(--title);
font-size: 18px;
font-weight: 800;
letter-spacing: -0.01em;
}
.ops-topic-feed-empty-desc {
max-width: 480px;
color: var(--muted);
font-size: 13px;
line-height: 1.65;
}
.ops-topic-feed-toolbar {
display: grid;
grid-template-columns: 1fr auto;
gap: 8px;
align-items: center;
}
.ops-topic-feed-list {
display: grid;
gap: 8px;
min-height: 0;
max-height: 320px;
overflow: auto;
padding-right: 2px;
}
.ops-topic-feed-list.is-panel {
max-height: none;
height: 100%;
padding-right: 6px;
}
.ops-topic-feed-item {
border: 1px solid color-mix(in oklab, var(--line) 78%, transparent);
border-radius: 14px;
background:
linear-gradient(180deg, color-mix(in oklab, var(--panel) 92%, white 8%), color-mix(in oklab, var(--panel-soft) 78%, transparent)),
color-mix(in oklab, var(--panel-soft) 82%, transparent);
padding: 12px;
display: grid;
gap: 10px;
}
.ops-topic-feed-item.unread {
border-color: color-mix(in oklab, var(--brand) 56%, var(--line) 44%);
box-shadow:
inset 3px 0 0 color-mix(in oklab, var(--brand) 72%, transparent),
0 10px 24px color-mix(in oklab, var(--brand-soft) 12%, transparent);
}
.ops-topic-feed-item-head {
display: flex;
align-items: center;
justify-content: space-between;
gap: 10px;
flex-wrap: wrap;
}
.ops-topic-feed-meta {
display: inline-flex;
align-items: center;
gap: 6px;
min-width: 0;
flex-wrap: wrap;
}
.ops-topic-feed-meta-right {
display: inline-flex;
align-items: center;
gap: 8px;
margin-left: auto;
flex-wrap: wrap;
}
.ops-topic-feed-topic-chip,
.ops-topic-feed-source-chip {
display: inline-flex;
align-items: center;
height: 22px;
padding: 0 8px;
border-radius: 999px;
border: 1px solid color-mix(in oklab, var(--line) 75%, transparent);
background: color-mix(in oklab, var(--panel) 76%, transparent);
color: var(--muted);
font-size: 11px;
}
.ops-topic-feed-unread-dot {
width: 8px;
height: 8px;
border-radius: 999px;
background: #ef4444;
box-shadow: 0 0 0 3px color-mix(in oklab, #ef4444 18%, transparent);
}
.ops-topic-feed-level {
display: inline-flex;
align-items: center;
justify-content: center;
min-width: 52px;
height: 20px;
border-radius: 999px;
font-size: 11px;
font-weight: 800;
border: 1px solid transparent;
}
.ops-topic-feed-level.info {
color: color-mix(in oklab, var(--brand) 75%, white 25%);
background: color-mix(in oklab, var(--brand) 18%, transparent);
border-color: color-mix(in oklab, var(--brand) 46%, transparent);
}
.ops-topic-feed-level.warn {
color: #9b5d00;
background: color-mix(in oklab, #f5af48 26%, transparent);
border-color: color-mix(in oklab, #f5af48 56%, transparent);
}
.ops-topic-feed-level.error {
color: color-mix(in oklab, var(--err) 82%, white 18%);
background: color-mix(in oklab, var(--err) 18%, transparent);
border-color: color-mix(in oklab, var(--err) 52%, transparent);
}
.ops-topic-feed-level.success {
color: color-mix(in oklab, var(--ok) 78%, white 22%);
background: color-mix(in oklab, var(--ok) 20%, transparent);
border-color: color-mix(in oklab, var(--ok) 52%, transparent);
}
.ops-topic-feed-time {
font-size: 11px;
color: var(--muted);
}
.ops-topic-feed-title {
font-weight: 700;
color: var(--text);
}
.ops-topic-feed-content {
font-size: 13px;
line-height: 1.58;
color: var(--text);
}
.ops-topic-card-shell {
display: grid;
gap: 10px;
padding: 2px 0 0;
}
.ops-topic-card-title {
font-size: 16px;
font-weight: 800;
line-height: 1.4;
color: var(--text);
letter-spacing: -0.01em;
}
.ops-topic-card-summary {
font-size: 13px;
line-height: 1.68;
color: color-mix(in oklab, var(--text) 86%, var(--muted) 14%);
}
.ops-topic-card-highlights {
display: grid;
gap: 8px;
padding: 10px;
border: 1px solid color-mix(in oklab, var(--line) 68%, transparent);
border-radius: 12px;
background: linear-gradient(180deg, color-mix(in oklab, var(--brand-soft) 20%, transparent), color-mix(in oklab, var(--panel-soft) 76%, transparent));
}
.ops-topic-card-highlight {
display: grid;
grid-template-columns: 10px 1fr;
gap: 8px;
align-items: start;
font-size: 13px;
line-height: 1.56;
color: var(--text);
}
.ops-topic-card-bullet {
width: 6px;
height: 6px;
margin-top: 7px;
border-radius: 999px;
background: color-mix(in oklab, var(--brand) 72%, transparent);
box-shadow: 0 0 0 3px color-mix(in oklab, var(--brand-soft) 18%, transparent);
}
.ops-topic-card-snippet {
padding: 10px 12px;
border-left: 3px solid color-mix(in oklab, var(--brand) 54%, transparent);
border-radius: 0 10px 10px 0;
background: color-mix(in oklab, var(--panel) 74%, transparent);
color: color-mix(in oklab, var(--text) 72%, var(--muted) 28%);
font-size: 12px;
line-height: 1.62;
}
.ops-topic-detail-overlay {
display: none;
}
.ops-topic-feed-tags {
display: inline-flex;
flex-wrap: wrap;
gap: 6px;
}
.ops-topic-feed-tag {
display: inline-flex;
align-items: center;
height: 20px;
padding: 0 8px;
border-radius: 999px;
border: 1px solid color-mix(in oklab, var(--line) 75%, transparent);
background: color-mix(in oklab, var(--panel) 75%, transparent);
color: var(--muted);
font-size: 11px;
}
.ops-topic-feed-item-foot {
display: flex;
align-items: center;
justify-content: space-between;
gap: 8px;
}
.ops-topic-read-state {
display: inline-flex;
align-items: center;
min-width: 0;
font-size: 12px;
font-weight: 800;
letter-spacing: 0.01em;
}
.ops-topic-read-state.is-unread {
color: color-mix(in oklab, var(--brand) 70%, var(--text) 30%);
}
.ops-topic-read-state.is-read {
color: var(--muted);
}
.ops-topic-feed-item-actions {
display: inline-flex;
align-items: center;
gap: 8px;
flex-wrap: wrap;
justify-content: flex-end;
}
.ops-topic-feed-empty-action {
color: var(--muted);
opacity: 0.5;
}
@media (max-width: 900px) {
.ops-main-mode-tab {
padding: 0 5px;
}
.ops-main-mode-label {
font-size: 10px;
}
}
@media (max-width: 980px) {
.app-shell-compact .ops-chat-panel,
.app-shell-compact .ops-chat-shell,
.app-shell-compact .ops-main-content-shell,
.app-shell-compact .ops-main-content-frame,
.app-shell-compact .ops-main-content-body,
.app-shell-compact .ops-chat-frame {
min-height: 0;
height: 100%;
}
.app-shell-compact .ops-main-content-body .ops-chat-scroll {
min-height: 0;
max-height: none;
}
}
.ops-runtime-state-card { .ops-runtime-state-card {
min-height: 0; min-height: 0;
} }
@ -1819,6 +2340,177 @@
padding-right: 4px; padding-right: 4px;
} }
.ops-modal-scrollable {
max-height: min(92vh, 860px);
overflow: auto;
overscroll-behavior: contain;
}
.ops-config-modal {
min-height: clamp(480px, 68vh, 760px);
display: flex;
flex-direction: column;
}
.ops-config-list-scroll {
min-height: clamp(240px, 38vh, 380px);
max-height: min(56vh, 600px);
overflow: auto;
padding-right: 4px;
}
.ops-config-card-header {
display: flex;
align-items: flex-start;
justify-content: space-between;
gap: 10px;
}
.ops-config-card-main {
min-width: 0;
display: grid;
gap: 2px;
}
.ops-config-card-actions {
display: inline-flex;
align-items: center;
justify-content: flex-end;
gap: 6px;
flex-wrap: wrap;
}
.ops-config-collapsed-meta {
font-size: 12px;
color: var(--muted);
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
max-width: 520px;
}
.ops-config-new-card {
border-color: color-mix(in oklab, var(--brand) 56%, var(--line) 44%);
background:
linear-gradient(180deg, color-mix(in oklab, var(--brand-soft) 22%, var(--panel) 78%), color-mix(in oklab, var(--panel-soft) 82%, transparent)),
var(--panel);
box-shadow:
inset 0 0 0 1px color-mix(in oklab, var(--brand) 12%, transparent),
0 10px 28px color-mix(in oklab, var(--brand-soft) 14%, transparent);
}
.ops-plain-icon-btn {
width: 28px;
height: 28px;
padding: 0;
border: 0;
background: transparent;
color: var(--muted);
border-radius: 8px;
display: inline-flex;
align-items: center;
justify-content: center;
}
.ops-plain-icon-btn:hover {
background: color-mix(in oklab, var(--brand-soft) 22%, transparent);
color: var(--text);
}
.ops-plain-icon-btn:disabled {
opacity: 0.45;
cursor: not-allowed;
}
.ops-topic-grid {
display: grid;
grid-template-columns: repeat(2, minmax(0, 1fr));
gap: 10px;
margin-top: 8px;
align-items: start;
}
.ops-config-field {
min-width: 0;
display: grid;
gap: 6px;
}
.ops-config-field-full {
grid-column: 1 / -1;
}
.ops-config-footer {
position: sticky;
bottom: 0;
background: var(--panel);
border-top: 1px solid color-mix(in oklab, var(--line) 78%, transparent);
padding-top: 8px;
}
.ops-topic-create-menu-wrap {
position: relative;
display: inline-flex;
}
.ops-topic-create-menu {
position: absolute;
right: 0;
bottom: calc(100% + 8px);
z-index: 40;
min-width: 220px;
padding: 6px;
border: 1px solid var(--line);
border-radius: 12px;
background: var(--panel);
box-shadow: 0 14px 32px rgba(9, 16, 31, 0.28);
display: grid;
gap: 4px;
}
.ops-topic-create-menu-item {
border: 0;
background: transparent;
color: var(--text);
border-radius: 8px;
text-align: left;
padding: 8px 10px;
font-size: 13px;
cursor: pointer;
}
.ops-topic-create-menu-item:hover {
background: color-mix(in oklab, var(--brand-soft) 22%, transparent);
}
@media (max-width: 920px) {
.ops-config-modal {
min-height: clamp(420px, 62vh, 640px);
}
.ops-config-list-scroll {
min-height: 220px;
}
.ops-topic-grid {
grid-template-columns: minmax(0, 1fr);
}
.ops-config-card-actions {
justify-content: flex-start;
}
.ops-config-collapsed-meta {
max-width: 100%;
}
.ops-config-footer {
position: static;
border-top: 0;
padding-top: 0;
}
}
.ops-cron-list-scroll { .ops-cron-list-scroll {
max-height: min(58vh, 560px); max-height: min(58vh, 560px);
overflow: auto; overflow: auto;

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,205 @@
import type { AnchorHTMLAttributes, ReactNode } from 'react';
export const WORKSPACE_LINK_PREFIX = 'https://workspace.local/open/';
const WORKSPACE_ABS_PATH_PATTERN =
/\/root\/\.nanobot\/workspace\/[^\n\r<>"'`]+?\.(?:md|markdown|json|txt|log|csv|tsv|yaml|yml|toml|html|htm|pdf|png|jpg|jpeg|gif|webp|svg|mp3|wav|m4a|flac|ogg|opus|aac|amr|wma|mp4|mov|avi|mkv|webm|m4v|3gp|mpeg|mpg|ts|doc|docx|xls|xlsx|xlsm|ppt|pptx|odt|ods|odp|wps)\b/gi;
const WORKSPACE_RELATIVE_PATH_PATTERN =
/(^|[\s(\[])(\/[^\n\r<>"'`)\]]+?\.(?:md|markdown|json|txt|log|csv|tsv|yaml|yml|toml|html|htm|pdf|png|jpg|jpeg|gif|webp|svg|mp3|wav|m4a|flac|ogg|opus|aac|amr|wma|mp4|mov|avi|mkv|webm|m4v|3gp|mpeg|mpg|ts|doc|docx|xls|xlsx|xlsm|ppt|pptx|odt|ods|odp|wps))(?![A-Za-z0-9_./-])/gim;
const WORKSPACE_RENDER_PATTERN =
/\[(\/root\/\.nanobot\/workspace\/[^\]]+)\]\((https:\/\/workspace\.local\/open\/[^)\r\n]*)\)|\/root\/\.nanobot\/workspace\/[^\s<>"'`)\],,。!?;:]+|https:\/\/workspace\.local\/open\/[^)\r\n]+/gi;
export function normalizeDashboardAttachmentPath(path: string): string {
const v = String(path || '').trim().replace(/\\/g, '/');
if (!v) return '';
const prefix = '/root/.nanobot/workspace/';
if (v.startsWith(prefix)) return v.slice(prefix.length);
return v.replace(/^\/+/, '');
}
export function buildWorkspaceLink(path: string) {
return `${WORKSPACE_LINK_PREFIX}${encodeURIComponent(path)}`;
}
export function parseWorkspaceLink(href: string): string | null {
const link = String(href || '').trim();
if (!link.startsWith(WORKSPACE_LINK_PREFIX)) return null;
const encoded = link.slice(WORKSPACE_LINK_PREFIX.length);
try {
const decoded = decodeURIComponent(encoded || '').trim();
return decoded || null;
} catch {
return null;
}
}
function isExternalHttpLink(href: string): boolean {
return /^https?:\/\//i.test(String(href || '').trim());
}
function decorateWorkspacePathsInPlainChunk(source: string): string {
if (!source) return source;
const protectedLinks: string[] = [];
const withProtectedAbsoluteLinks = source.replace(WORKSPACE_ABS_PATH_PATTERN, (fullPath) => {
const normalized = normalizeDashboardAttachmentPath(fullPath);
if (!normalized) return fullPath;
const token = `@@WS_PATH_LINK_${protectedLinks.length}@@`;
protectedLinks.push(`[${fullPath}](${buildWorkspaceLink(normalized)})`);
return token;
});
const withRelativeLinks = withProtectedAbsoluteLinks.replace(
WORKSPACE_RELATIVE_PATH_PATTERN,
(full, prefix: string, rawPath: string) => {
const normalized = normalizeDashboardAttachmentPath(rawPath);
if (!normalized) return full;
return `${prefix}[${rawPath}](${buildWorkspaceLink(normalized)})`;
},
);
return withRelativeLinks.replace(/@@WS_PATH_LINK_(\d+)@@/g, (_full, idxRaw: string) => {
const idx = Number(idxRaw);
if (!Number.isFinite(idx) || idx < 0 || idx >= protectedLinks.length) return String(_full || '');
return protectedLinks[idx];
});
}
export function decorateWorkspacePathsForMarkdown(text: string) {
const source = String(text || '');
if (!source) return source;
const markdownLinkPattern = /\[[^\]]*?\]\((?:[^)(]|\([^)(]*\))*\)/g;
let result = '';
let last = 0;
let match = markdownLinkPattern.exec(source);
while (match) {
const idx = Number(match.index || 0);
if (idx > last) {
result += decorateWorkspacePathsInPlainChunk(source.slice(last, idx));
}
result += match[0];
last = idx + match[0].length;
match = markdownLinkPattern.exec(source);
}
if (last < source.length) {
result += decorateWorkspacePathsInPlainChunk(source.slice(last));
}
return result;
}
function renderWorkspaceAwareText(
text: string,
keyPrefix: string,
openWorkspacePath: (path: string) => void,
): ReactNode[] {
const source = String(text || '');
if (!source) return [source];
const nodes: ReactNode[] = [];
let lastIndex = 0;
let matchIndex = 0;
let match = WORKSPACE_RENDER_PATTERN.exec(source);
while (match) {
if (match.index > lastIndex) {
nodes.push(source.slice(lastIndex, match.index));
}
const raw = match[0];
const markdownPath = match[1] ? String(match[1]) : '';
const markdownHref = match[2] ? String(match[2]) : '';
let normalizedPath = '';
let displayText = raw;
if (markdownPath && markdownHref) {
normalizedPath = normalizeDashboardAttachmentPath(markdownPath);
displayText = markdownPath;
} else if (raw.startsWith(WORKSPACE_LINK_PREFIX)) {
normalizedPath = String(parseWorkspaceLink(raw) || '').trim();
displayText = normalizedPath ? `/root/.nanobot/workspace/${normalizedPath}` : raw;
} else if (raw.startsWith('/root/.nanobot/workspace/')) {
normalizedPath = normalizeDashboardAttachmentPath(raw);
displayText = raw;
}
if (normalizedPath) {
nodes.push(
<a
key={`${keyPrefix}-ws-${matchIndex}`}
href="#"
onClick={(event) => {
event.preventDefault();
event.stopPropagation();
openWorkspacePath(normalizedPath);
}}
>
{displayText}
</a>,
);
} else {
nodes.push(raw);
}
lastIndex = match.index + raw.length;
matchIndex += 1;
match = WORKSPACE_RENDER_PATTERN.exec(source);
}
if (lastIndex < source.length) {
nodes.push(source.slice(lastIndex));
}
return nodes;
}
function renderWorkspaceAwareChildren(
children: ReactNode,
keyPrefix: string,
openWorkspacePath: (path: string) => void,
): ReactNode {
const list = Array.isArray(children) ? children : [children];
const mapped = list.flatMap((child, idx) => {
if (typeof child === 'string') {
return renderWorkspaceAwareText(child, `${keyPrefix}-${idx}`, openWorkspacePath);
}
return [child];
});
return mapped;
}
export function createWorkspaceMarkdownComponents(openWorkspacePath: (path: string) => void) {
return {
a: ({ href, children, ...props }: AnchorHTMLAttributes<HTMLAnchorElement>) => {
const link = String(href || '').trim();
const workspacePath = parseWorkspaceLink(link);
if (workspacePath) {
return (
<a
href="#"
onClick={(event) => {
event.preventDefault();
openWorkspacePath(workspacePath);
}}
{...props}
>
{children}
</a>
);
}
if (isExternalHttpLink(link)) {
return (
<a href={link} target="_blank" rel="noopener noreferrer" {...props}>
{children}
</a>
);
}
return (
<a href={link || '#'} {...props}>
{children}
</a>
);
},
p: ({ children, ...props }: { children?: ReactNode }) => (
<p {...props}>{renderWorkspaceAwareChildren(children, 'md-p', openWorkspacePath)}</p>
),
li: ({ children, ...props }: { children?: ReactNode }) => (
<li {...props}>{renderWorkspaceAwareChildren(children, 'md-li', openWorkspacePath)}</li>
),
code: ({ children, ...props }: { children?: ReactNode }) => (
<code {...props}>{renderWorkspaceAwareChildren(children, 'md-code', openWorkspacePath)}</code>
),
};
}

View File

@ -0,0 +1,369 @@
import { useCallback, useEffect, useMemo, useState } from 'react';
import { createPortal } from 'react-dom';
import { Eye, RefreshCw, X } from 'lucide-react';
import ReactMarkdown from 'react-markdown';
import remarkGfm from 'remark-gfm';
import rehypeRaw from 'rehype-raw';
import rehypeSanitize from 'rehype-sanitize';
import { LucentIconButton } from '../../../components/lucent/LucentIconButton';
import { LucentSelect } from '../../../components/lucent/LucentSelect';
import { createWorkspaceMarkdownComponents, decorateWorkspacePathsForMarkdown } from '../shared/workspaceMarkdown';
export interface TopicFeedItem {
id: number;
bot_id: string;
topic_key: string;
title: string;
content: string;
level: string;
tags: string[];
view: Record<string, unknown>;
source: string;
dedupe_key: string;
is_read: boolean;
created_at?: string;
}
export interface TopicFeedOption {
key: string;
label: string;
}
interface TopicFeedPanelProps {
isZh: boolean;
topicKey: string;
topicOptions: TopicFeedOption[];
topicState?: 'none' | 'inactive' | 'ready';
items: TopicFeedItem[];
loading: boolean;
loadingMore: boolean;
nextCursor: number | null;
error: string;
readSavingById: Record<number, boolean>;
onTopicChange: (value: string) => void;
onRefresh: () => void;
onMarkRead: (itemId: number) => void;
onLoadMore: () => void;
onOpenWorkspacePath: (path: string) => void;
onOpenTopicSettings?: () => void;
onDetailOpenChange?: (open: boolean) => void;
layout?: 'compact' | 'panel';
}
interface TopicSummaryCard {
title: string;
summary: string;
highlights: string[];
snippet: string;
}
interface TopicDetailState {
itemId: number;
fallbackTitle: string;
fallbackContent: string;
}
function formatTopicItemTime(raw: string | undefined, isZh: boolean): string {
const text = String(raw || '').trim();
if (!text) return '-';
const dt = new Date(text);
if (Number.isNaN(dt.getTime())) return '-';
try {
return dt.toLocaleString(isZh ? 'zh-CN' : 'en-US', {
month: '2-digit',
day: '2-digit',
hour: '2-digit',
minute: '2-digit',
hour12: false,
});
} catch {
return dt.toLocaleString();
}
}
function cleanTopicLine(raw: unknown): string {
const text = String(raw || '').trim();
if (!text) return '';
if (/^\|.*\|$/.test(text)) return '';
if (/^[-=:_`~]{3,}$/.test(text)) return '';
return text.replace(/^\s{0,3}(?:[#>*-]+|\d+[.)])\s*/, '').trim();
}
function deriveTopicSummaryCard(item: TopicFeedItem): TopicSummaryCard {
const view = item.view && typeof item.view === 'object' ? item.view : {};
const titleFromView = String(view.title || '').trim();
const summaryFromView = String(view.summary || '').trim();
const snippetFromView = String(view.snippet || '').trim();
const highlightsFromView = Array.isArray(view.highlights)
? view.highlights.map((row) => String(row || '').trim()).filter(Boolean)
: [];
const lines = String(item.content || '')
.split('\n')
.map(cleanTopicLine)
.filter(Boolean);
const title = titleFromView || String(item.title || '').trim() || lines[0] || item.topic_key || 'Topic';
const summaryCandidates = lines.filter((line) => line !== title);
const summary = summaryFromView || summaryCandidates.slice(0, 2).join(' ').slice(0, 220).trim() || title;
const fallbackHighlights = String(item.content || '')
.split('\n')
.map((line) => ({ raw: String(line || '').trim(), cleaned: cleanTopicLine(line) }))
.filter((row) => row.cleaned)
.filter((row) => row.raw.startsWith('-') || row.raw.startsWith('*') || row.cleaned.includes(':') || row.cleaned.includes(''))
.map((row) => row.cleaned.slice(0, 120))
.filter((row, idx, arr) => arr.indexOf(row) === idx)
.slice(0, 3);
const snippetCandidates = summaryCandidates
.filter((line) => line !== summary)
.filter((line) => !fallbackHighlights.includes(line))
.slice(0, 2);
const snippet = snippetFromView || snippetCandidates.join(' ').slice(0, 180).trim();
return {
title,
summary,
highlights: highlightsFromView.length > 0 ? highlightsFromView.slice(0, 3) : fallbackHighlights,
snippet,
};
}
export function TopicFeedPanel({
isZh,
topicKey,
topicOptions,
topicState = 'ready',
items,
loading,
loadingMore,
nextCursor,
error,
readSavingById,
onTopicChange,
onRefresh,
onMarkRead,
onLoadMore,
onOpenWorkspacePath,
onOpenTopicSettings,
onDetailOpenChange,
layout = 'compact',
}: TopicFeedPanelProps) {
const markdownComponents = useMemo(
() => createWorkspaceMarkdownComponents((path) => onOpenWorkspacePath(path)),
[onOpenWorkspacePath],
);
const [detailState, setDetailState] = useState<TopicDetailState | null>(null);
const closeDetail = useCallback(() => setDetailState(null), []);
const detailItem = useMemo(
() => (detailState ? items.find((item) => Number(item.id || 0) === detailState.itemId) || null : null),
[detailState, items],
);
const detailTitle = detailItem
? String(detailItem.title || detailItem.topic_key || detailState?.fallbackTitle || '').trim()
: String(detailState?.fallbackTitle || '').trim();
const detailContent = detailItem
? String(detailItem.content || detailState?.fallbackContent || '').trim()
: String(detailState?.fallbackContent || '').trim();
const portalTarget = useMemo(() => {
if (typeof document === 'undefined') return null;
return document.querySelector('.app-shell[data-theme]') || document.body;
}, []);
useEffect(() => {
if (!detailState) return;
const onKeyDown = (event: KeyboardEvent) => {
if (event.key === 'Escape') {
closeDetail();
}
};
window.addEventListener('keydown', onKeyDown);
return () => window.removeEventListener('keydown', onKeyDown);
}, [closeDetail, detailState]);
useEffect(() => {
onDetailOpenChange?.(Boolean(detailState));
}, [detailState, onDetailOpenChange]);
return (
<div className={`ops-topic-feed ${layout === 'panel' ? 'is-panel' : ''}`}>
{topicState === 'ready' ? (
<div className="ops-topic-feed-toolbar">
<LucentSelect value={topicKey || '__all__'} onChange={(e) => onTopicChange(String(e.target.value || '__all__'))}>
<option value="__all__">{isZh ? '全部主题' : 'All Topics'}</option>
{topicOptions.map((row) => (
<option key={row.key} value={row.key}>
{row.label}
</option>
))}
</LucentSelect>
<LucentIconButton
className="btn btn-secondary btn-sm icon-btn"
disabled={loading || loadingMore}
onClick={onRefresh}
tooltip={isZh ? '刷新主题消息' : 'Refresh Topic feed'}
aria-label={isZh ? '刷新主题消息' : 'Refresh Topic feed'}
>
<RefreshCw size={14} className={loading ? 'animate-spin' : ''} />
</LucentIconButton>
</div>
) : null}
{error ? <div className="ops-empty-inline">{error}</div> : null}
<div className={`ops-topic-feed-list ${layout === 'panel' ? 'is-panel' : ''}`}>
{topicState === 'none' ? (
<div className="ops-topic-feed-empty-state">
<div className="ops-topic-feed-empty-title">{isZh ? '还没有配置主题' : 'No topics configured'}</div>
<div className="ops-topic-feed-empty-desc">
{isZh ? '请先到主题设置中新增至少一个主题Bot 的消息才能被路由到这里。' : 'Create at least one topic in settings before feed messages can appear here.'}
</div>
{onOpenTopicSettings ? (
<button className="btn btn-secondary btn-sm" onClick={onOpenTopicSettings}>
{isZh ? '打开主题设置' : 'Open Topic settings'}
</button>
) : null}
</div>
) : topicState === 'inactive' ? (
<div className="ops-topic-feed-empty-state">
<div className="ops-topic-feed-empty-title">{isZh ? '没有启用中的主题' : 'No active topics'}</div>
<div className="ops-topic-feed-empty-desc">
{isZh ? '你已经配置了主题,但当前都处于关闭状态。启用一个主题后,这里才会开始接收消息。' : 'Topics exist, but all of them are disabled. Enable one to start receiving feed items here.'}
</div>
{onOpenTopicSettings ? (
<button className="btn btn-secondary btn-sm" onClick={onOpenTopicSettings}>
{isZh ? '打开主题设置' : 'Open Topic settings'}
</button>
) : null}
</div>
) : loading ? (
<div className="ops-empty-inline">{isZh ? '读取主题消息中...' : 'Loading topic feed...'}</div>
) : items.length === 0 ? (
<div className="ops-empty-inline">{isZh ? '暂无主题消息。' : 'No topic messages.'}</div>
) : (
items.map((item) => {
const itemId = Number(item.id || 0);
const level = String(item.level || 'info').trim().toLowerCase();
const levelText = level === 'warn' ? 'WARN' : level === 'error' ? 'ERROR' : level === 'success' ? 'SUCCESS' : 'INFO';
const unread = !Boolean(item.is_read);
const card = deriveTopicSummaryCard(item);
const rawContent = String(item.content || '').trim();
return (
<article key={`topic-item-${itemId}`} className={`ops-topic-feed-item ${unread ? 'unread' : ''}`}>
<div className="ops-topic-feed-item-head">
<div className="ops-topic-feed-meta">
<span className={`ops-topic-feed-level ${level}`}>{levelText}</span>
<span className="ops-topic-feed-topic-chip mono">{item.topic_key || '-'}</span>
{unread ? <span className="ops-topic-feed-unread-dot" aria-label={isZh ? '未读' : 'Unread'} /> : null}
</div>
<div className="ops-topic-feed-meta-right">
<span className="ops-topic-feed-source-chip">{item.source || 'mcp'}</span>
<span className="ops-topic-feed-time">{formatTopicItemTime(item.created_at, isZh)}</span>
</div>
</div>
<div className="ops-topic-card-shell">
<div className="ops-topic-card-title">{card.title}</div>
<div className="ops-topic-card-summary">{card.summary}</div>
{card.highlights.length > 0 ? (
<div className="ops-topic-card-highlights">
{card.highlights.map((line) => (
<div key={`${itemId}-${line}`} className="ops-topic-card-highlight">
<span className="ops-topic-card-bullet" />
<span>{line}</span>
</div>
))}
</div>
) : null}
{card.snippet ? <div className="ops-topic-card-snippet">{card.snippet}</div> : null}
{rawContent ? null : null}
</div>
{(item.tags || []).length > 0 ? (
<div className="ops-topic-feed-tags">
{(item.tags || []).map((tag) => (
<span key={`${itemId}-${tag}`} className="ops-topic-feed-tag mono">
{tag}
</span>
))}
</div>
) : null}
<div className="ops-topic-feed-item-foot">
<span className={`ops-topic-read-state ${unread ? 'is-unread' : 'is-read'}`}>
{unread ? (isZh ? '新消息' : 'New') : (isZh ? '已读' : 'Read')}
</span>
<div className="ops-topic-feed-item-actions">
{rawContent ? (
<LucentIconButton
className="btn btn-secondary btn-sm icon-btn"
onClick={() => setDetailState({ itemId, fallbackTitle: card.title, fallbackContent: rawContent })}
tooltip={isZh ? '查看详情' : 'View details'}
aria-label={isZh ? '查看详情' : 'View details'}
>
<Eye size={14} />
</LucentIconButton>
) : null}
{unread ? (
<button
className="btn btn-secondary btn-sm"
disabled={Boolean(readSavingById[itemId])}
onClick={() => onMarkRead(itemId)}
>
{readSavingById[itemId] ? (isZh ? '处理中...' : 'Saving...') : (isZh ? '标记已读' : 'Mark read')}
</button>
) : null}
</div>
</div>
</article>
);
})
)}
</div>
{detailState && portalTarget
? createPortal(
<div className="modal-mask" onClick={closeDetail}>
<div className="modal-card modal-preview" onClick={(event) => event.stopPropagation()}>
<div className="modal-title-row workspace-preview-header">
<div className="workspace-preview-header-text">
<h3>{isZh ? '主题详情' : 'Topic detail'}</h3>
<span className="modal-sub">{detailTitle || (isZh ? '原文详情' : 'Raw detail')}</span>
</div>
<div className="workspace-preview-header-actions">
<LucentIconButton
className="btn btn-secondary btn-sm icon-btn"
onClick={closeDetail}
tooltip={isZh ? '关闭详情' : 'Close detail'}
aria-label={isZh ? '关闭详情' : 'Close detail'}
>
<X size={14} />
</LucentIconButton>
</div>
</div>
<div className="workspace-preview-body markdown">
<div className="workspace-markdown">
<ReactMarkdown
remarkPlugins={[remarkGfm]}
rehypePlugins={[rehypeRaw, rehypeSanitize]}
components={markdownComponents}
>
{decorateWorkspacePathsForMarkdown(detailContent)}
</ReactMarkdown>
</div>
</div>
</div>
</div>,
portalTarget,
)
: null}
{topicState === 'ready' && (items.length > 0 || nextCursor) ? (
<div className="row-between">
<span className="field-label">
{items.length > 0 ? (isZh ? `${items.length}` : `${items.length} items`) : ''}
</span>
{nextCursor ? (
<button className="btn btn-secondary btn-sm" disabled={loadingMore} onClick={onLoadMore}>
{loadingMore ? (isZh ? '加载中...' : 'Loading...') : (isZh ? '加载更多' : 'Load more')}
</button>
) : (
<span />
)}
</div>
) : null}
</div>
);
}