1043 lines
37 KiB
Python
1043 lines
37 KiB
Python
import json
|
|
import math
|
|
import os
|
|
import re
|
|
import uuid
|
|
from datetime import datetime, timedelta
|
|
from typing import Any, Dict, List, Optional
|
|
|
|
from sqlalchemy import delete as sql_delete, func
|
|
from sqlmodel import Session, select
|
|
|
|
from core.database import engine
|
|
from core.settings import (
|
|
BOTS_WORKSPACE_ROOT,
|
|
DEFAULT_CHAT_PULL_PAGE_SIZE,
|
|
DEFAULT_PAGE_SIZE,
|
|
DEFAULT_STT_AUDIO_FILTER,
|
|
DEFAULT_STT_AUDIO_PREPROCESS,
|
|
DEFAULT_STT_DEFAULT_LANGUAGE,
|
|
DEFAULT_STT_FORCE_SIMPLIFIED,
|
|
DEFAULT_STT_INITIAL_PROMPT,
|
|
DEFAULT_STT_MAX_AUDIO_SECONDS,
|
|
DEFAULT_UPLOAD_MAX_MB,
|
|
DEFAULT_WORKSPACE_DOWNLOAD_EXTENSIONS,
|
|
STT_DEVICE,
|
|
STT_ENABLED_DEFAULT,
|
|
STT_MODEL,
|
|
)
|
|
from models.bot import BotInstance, NanobotImage
|
|
from models.platform import BotActivityEvent, BotRequestUsage, PlatformSetting
|
|
from schemas.platform import (
|
|
LoadingPageSettings,
|
|
PlatformActivityItem,
|
|
PlatformSettingsPayload,
|
|
PlatformUsageResponse,
|
|
PlatformUsageItem,
|
|
PlatformUsageSummary,
|
|
SystemSettingItem,
|
|
SystemSettingPayload,
|
|
)
|
|
|
|
DEFAULT_ALLOWED_ATTACHMENT_EXTENSIONS: tuple[str, ...] = ()
|
|
DEFAULT_ACTIVITY_EVENT_RETENTION_DAYS = 7
|
|
ACTIVITY_EVENT_RETENTION_SETTING_KEY = "activity_event_retention_days"
|
|
ACTIVITY_EVENT_PRUNE_INTERVAL = timedelta(minutes=10)
|
|
OPERATIONAL_ACTIVITY_EVENT_TYPES = {
|
|
"bot_created",
|
|
"bot_started",
|
|
"bot_stopped",
|
|
"bot_enabled",
|
|
"bot_disabled",
|
|
"bot_deactivated",
|
|
"command_submitted",
|
|
"command_failed",
|
|
"history_cleared",
|
|
}
|
|
SETTING_KEYS = (
|
|
"page_size",
|
|
"chat_pull_page_size",
|
|
"upload_max_mb",
|
|
"allowed_attachment_extensions",
|
|
"workspace_download_extensions",
|
|
"speech_enabled",
|
|
)
|
|
PROTECTED_SETTING_KEYS = set(SETTING_KEYS) | {ACTIVITY_EVENT_RETENTION_SETTING_KEY}
|
|
DEPRECATED_SETTING_KEYS = {
|
|
"loading_page",
|
|
"speech_max_audio_seconds",
|
|
"speech_default_language",
|
|
"speech_force_simplified",
|
|
"speech_audio_preprocess",
|
|
"speech_audio_filter",
|
|
"speech_initial_prompt",
|
|
}
|
|
SYSTEM_SETTING_DEFINITIONS: Dict[str, Dict[str, Any]] = {
|
|
"page_size": {
|
|
"name": "分页大小",
|
|
"category": "ui",
|
|
"description": "平台各类列表默认每页条数。",
|
|
"value_type": "integer",
|
|
"value": DEFAULT_PAGE_SIZE,
|
|
"is_public": True,
|
|
"sort_order": 5,
|
|
},
|
|
"chat_pull_page_size": {
|
|
"name": "对话懒加载条数",
|
|
"category": "chat",
|
|
"description": "Bot 对话区向上懒加载时每次读取的消息条数。",
|
|
"value_type": "integer",
|
|
"value": DEFAULT_CHAT_PULL_PAGE_SIZE,
|
|
"is_public": True,
|
|
"sort_order": 8,
|
|
},
|
|
"upload_max_mb": {
|
|
"name": "上传大小限制",
|
|
"category": "upload",
|
|
"description": "单文件上传大小限制,单位 MB。",
|
|
"value_type": "integer",
|
|
"value": DEFAULT_UPLOAD_MAX_MB,
|
|
"is_public": False,
|
|
"sort_order": 10,
|
|
},
|
|
"allowed_attachment_extensions": {
|
|
"name": "允许附件后缀",
|
|
"category": "upload",
|
|
"description": "允许上传的附件后缀列表,留空表示不限制。",
|
|
"value_type": "json",
|
|
"value": list(DEFAULT_ALLOWED_ATTACHMENT_EXTENSIONS),
|
|
"is_public": False,
|
|
"sort_order": 20,
|
|
},
|
|
"workspace_download_extensions": {
|
|
"name": "工作区下载后缀",
|
|
"category": "workspace",
|
|
"description": "命中后缀的工作区文件默认走下载模式。",
|
|
"value_type": "json",
|
|
"value": list(DEFAULT_WORKSPACE_DOWNLOAD_EXTENSIONS),
|
|
"is_public": False,
|
|
"sort_order": 30,
|
|
},
|
|
"speech_enabled": {
|
|
"name": "语音识别开关",
|
|
"category": "speech",
|
|
"description": "控制 Bot 语音转写功能是否启用。",
|
|
"value_type": "boolean",
|
|
"value": STT_ENABLED_DEFAULT,
|
|
"is_public": True,
|
|
"sort_order": 32,
|
|
},
|
|
ACTIVITY_EVENT_RETENTION_SETTING_KEY: {
|
|
"name": "活动事件保留天数",
|
|
"category": "maintenance",
|
|
"description": "bot_activity_event 运维事件的保留天数,超期记录会自动清理。",
|
|
"value_type": "integer",
|
|
"value": DEFAULT_ACTIVITY_EVENT_RETENTION_DAYS,
|
|
"is_public": False,
|
|
"sort_order": 34,
|
|
},
|
|
}
|
|
|
|
_last_activity_event_prune_at: Optional[datetime] = None
|
|
|
|
|
|
def _utcnow() -> datetime:
|
|
return datetime.utcnow()
|
|
|
|
|
|
def _normalize_activity_event_retention_days(raw: Any) -> int:
|
|
try:
|
|
value = int(raw)
|
|
except Exception:
|
|
value = DEFAULT_ACTIVITY_EVENT_RETENTION_DAYS
|
|
return max(1, min(3650, value))
|
|
|
|
|
|
def _normalize_extension(raw: Any) -> str:
|
|
text = str(raw or "").strip().lower()
|
|
if not text:
|
|
return ""
|
|
if text.startswith("*."):
|
|
text = text[1:]
|
|
if not text.startswith("."):
|
|
text = f".{text}"
|
|
if not re.fullmatch(r"\.[a-z0-9][a-z0-9._+-]{0,31}", text):
|
|
return ""
|
|
return text
|
|
|
|
|
|
def _normalize_extension_list(rows: Any) -> List[str]:
|
|
if not isinstance(rows, list):
|
|
return []
|
|
normalized: List[str] = []
|
|
for item in rows:
|
|
ext = _normalize_extension(item)
|
|
if ext and ext not in normalized:
|
|
normalized.append(ext)
|
|
return normalized
|
|
|
|
|
|
def _legacy_env_int(name: str, default: int, min_value: int, max_value: int) -> int:
|
|
raw = os.getenv(name)
|
|
if raw is None:
|
|
return default
|
|
try:
|
|
value = int(str(raw).strip())
|
|
except Exception:
|
|
value = default
|
|
return max(min_value, min(max_value, value))
|
|
|
|
|
|
def _legacy_env_bool(name: str, default: bool) -> bool:
|
|
raw = os.getenv(name)
|
|
if raw is None:
|
|
return default
|
|
return str(raw).strip().lower() in {"1", "true", "yes", "on"}
|
|
|
|
|
|
def _legacy_env_extensions(name: str, default: List[str]) -> List[str]:
|
|
raw = os.getenv(name)
|
|
if raw is None:
|
|
return list(default)
|
|
source = re.split(r"[,;\s]+", str(raw))
|
|
normalized: List[str] = []
|
|
for item in source:
|
|
ext = _normalize_extension(item)
|
|
if ext and ext not in normalized:
|
|
normalized.append(ext)
|
|
return normalized
|
|
|
|
|
|
def _bootstrap_platform_setting_values() -> Dict[str, Any]:
|
|
return {
|
|
"page_size": _legacy_env_int("PAGE_SIZE", DEFAULT_PAGE_SIZE, 1, 100),
|
|
"chat_pull_page_size": _legacy_env_int(
|
|
"CHAT_PULL_PAGE_SIZE",
|
|
DEFAULT_CHAT_PULL_PAGE_SIZE,
|
|
10,
|
|
500,
|
|
),
|
|
"upload_max_mb": _legacy_env_int("UPLOAD_MAX_MB", DEFAULT_UPLOAD_MAX_MB, 1, 2048),
|
|
"allowed_attachment_extensions": _legacy_env_extensions(
|
|
"ALLOWED_ATTACHMENT_EXTENSIONS",
|
|
list(DEFAULT_ALLOWED_ATTACHMENT_EXTENSIONS),
|
|
),
|
|
"workspace_download_extensions": _legacy_env_extensions(
|
|
"WORKSPACE_DOWNLOAD_EXTENSIONS",
|
|
list(DEFAULT_WORKSPACE_DOWNLOAD_EXTENSIONS),
|
|
),
|
|
"speech_enabled": _legacy_env_bool("STT_ENABLED", STT_ENABLED_DEFAULT),
|
|
}
|
|
|
|
|
|
def _bot_workspace_root(bot_id: str) -> str:
|
|
return os.path.abspath(os.path.join(BOTS_WORKSPACE_ROOT, bot_id, ".nanobot", "workspace"))
|
|
|
|
|
|
def _bot_data_root(bot_id: str) -> str:
|
|
return os.path.abspath(os.path.join(BOTS_WORKSPACE_ROOT, bot_id, ".nanobot"))
|
|
|
|
|
|
def _calc_dir_size_bytes(path: str) -> int:
|
|
total = 0
|
|
if not os.path.isdir(path):
|
|
return 0
|
|
for root, _, files in os.walk(path):
|
|
for name in files:
|
|
target = os.path.join(root, name)
|
|
try:
|
|
if os.path.islink(target):
|
|
continue
|
|
total += int(os.path.getsize(target))
|
|
except OSError:
|
|
continue
|
|
return total
|
|
|
|
|
|
def _read_bot_resources(bot_id: str) -> Dict[str, Any]:
|
|
path = os.path.join(_bot_data_root(bot_id), "resources.json")
|
|
raw: Dict[str, Any] = {}
|
|
if os.path.isfile(path):
|
|
try:
|
|
with open(path, "r", encoding="utf-8") as f:
|
|
loaded = json.load(f)
|
|
if isinstance(loaded, dict):
|
|
raw = loaded
|
|
except Exception:
|
|
raw = {}
|
|
|
|
def _safe_float(value: Any, default: float) -> float:
|
|
try:
|
|
return float(value)
|
|
except Exception:
|
|
return default
|
|
|
|
def _safe_int(value: Any, default: int) -> int:
|
|
try:
|
|
return int(value)
|
|
except Exception:
|
|
return default
|
|
|
|
cpu = _safe_float(raw.get("cpuCores", raw.get("cpu_cores", 1.0)), 1.0)
|
|
memory = _safe_int(raw.get("memoryMB", raw.get("memory_mb", 1024)), 1024)
|
|
storage = _safe_int(raw.get("storageGB", raw.get("storage_gb", 10)), 10)
|
|
cpu = 0.0 if cpu == 0 else min(16.0, max(0.1, cpu))
|
|
memory = 0 if memory == 0 else min(65536, max(256, memory))
|
|
storage = 0 if storage == 0 else min(1024, max(1, storage))
|
|
return {
|
|
"cpu_cores": cpu,
|
|
"memory_mb": memory,
|
|
"storage_gb": storage,
|
|
}
|
|
|
|
|
|
def estimate_tokens(text: str) -> int:
|
|
content = str(text or "").strip()
|
|
if not content:
|
|
return 0
|
|
pieces = re.findall(r"[\u4e00-\u9fff]|[A-Za-z0-9_]+|[^\s]", content)
|
|
total = 0
|
|
for piece in pieces:
|
|
if re.fullmatch(r"[\u4e00-\u9fff]", piece):
|
|
total += 1
|
|
elif re.fullmatch(r"[A-Za-z0-9_]+", piece):
|
|
total += max(1, math.ceil(len(piece) / 4))
|
|
else:
|
|
total += 1
|
|
return max(1, total)
|
|
|
|
|
|
def default_platform_settings() -> PlatformSettingsPayload:
|
|
bootstrap = _bootstrap_platform_setting_values()
|
|
return PlatformSettingsPayload(
|
|
page_size=int(bootstrap["page_size"]),
|
|
chat_pull_page_size=int(bootstrap["chat_pull_page_size"]),
|
|
upload_max_mb=int(bootstrap["upload_max_mb"]),
|
|
allowed_attachment_extensions=list(bootstrap["allowed_attachment_extensions"]),
|
|
workspace_download_extensions=list(bootstrap["workspace_download_extensions"]),
|
|
speech_enabled=bool(bootstrap["speech_enabled"]),
|
|
speech_max_audio_seconds=DEFAULT_STT_MAX_AUDIO_SECONDS,
|
|
speech_default_language=DEFAULT_STT_DEFAULT_LANGUAGE,
|
|
speech_force_simplified=DEFAULT_STT_FORCE_SIMPLIFIED,
|
|
speech_audio_preprocess=DEFAULT_STT_AUDIO_PREPROCESS,
|
|
speech_audio_filter=DEFAULT_STT_AUDIO_FILTER,
|
|
speech_initial_prompt=DEFAULT_STT_INITIAL_PROMPT,
|
|
loading_page=LoadingPageSettings(),
|
|
)
|
|
|
|
|
|
def _normalize_setting_key(raw: Any) -> str:
|
|
text = str(raw or "").strip()
|
|
return re.sub(r"[^a-zA-Z0-9_.-]+", "_", text).strip("._-").lower()
|
|
|
|
|
|
def _normalize_setting_value(value: Any, value_type: str) -> Any:
|
|
normalized_type = str(value_type or "json").strip().lower() or "json"
|
|
if normalized_type == "integer":
|
|
return int(value or 0)
|
|
if normalized_type == "float":
|
|
return float(value or 0)
|
|
if normalized_type == "boolean":
|
|
if isinstance(value, bool):
|
|
return value
|
|
return str(value or "").strip().lower() in {"1", "true", "yes", "on"}
|
|
if normalized_type == "string":
|
|
return str(value or "")
|
|
if normalized_type == "json":
|
|
return value
|
|
raise ValueError(f"Unsupported value_type: {normalized_type}")
|
|
|
|
|
|
def _read_setting_value(row: PlatformSetting) -> Any:
|
|
try:
|
|
value = json.loads(row.value_json or "null")
|
|
except Exception:
|
|
value = None
|
|
return _normalize_setting_value(value, row.value_type)
|
|
|
|
|
|
def _setting_item_from_row(row: PlatformSetting) -> Dict[str, Any]:
|
|
return SystemSettingItem(
|
|
key=row.key,
|
|
name=row.name,
|
|
category=row.category,
|
|
description=row.description,
|
|
value_type=row.value_type,
|
|
value=_read_setting_value(row),
|
|
is_public=bool(row.is_public),
|
|
sort_order=int(row.sort_order or 100),
|
|
created_at=row.created_at.isoformat() + "Z",
|
|
updated_at=row.updated_at.isoformat() + "Z",
|
|
).model_dump()
|
|
|
|
|
|
def _upsert_setting_row(
|
|
session: Session,
|
|
key: str,
|
|
*,
|
|
name: str,
|
|
category: str,
|
|
description: str,
|
|
value_type: str,
|
|
value: Any,
|
|
is_public: bool,
|
|
sort_order: int,
|
|
) -> PlatformSetting:
|
|
normalized_key = _normalize_setting_key(key)
|
|
if not normalized_key:
|
|
raise ValueError("Setting key is required")
|
|
normalized_type = str(value_type or "json").strip().lower() or "json"
|
|
normalized_value = _normalize_setting_value(value, normalized_type)
|
|
now = _utcnow()
|
|
row = session.get(PlatformSetting, normalized_key)
|
|
if row is None:
|
|
row = PlatformSetting(
|
|
key=normalized_key,
|
|
name=str(name or normalized_key),
|
|
category=str(category or "general"),
|
|
description=str(description or ""),
|
|
value_type=normalized_type,
|
|
value_json=json.dumps(normalized_value, ensure_ascii=False),
|
|
is_public=bool(is_public),
|
|
sort_order=int(sort_order or 100),
|
|
created_at=now,
|
|
updated_at=now,
|
|
)
|
|
else:
|
|
row.name = str(name or row.name or normalized_key)
|
|
row.category = str(category or row.category or "general")
|
|
row.description = str(description or row.description or "")
|
|
row.value_type = normalized_type
|
|
row.value_json = json.dumps(normalized_value, ensure_ascii=False)
|
|
row.is_public = bool(is_public)
|
|
row.sort_order = int(sort_order or row.sort_order or 100)
|
|
row.updated_at = now
|
|
session.add(row)
|
|
return row
|
|
|
|
|
|
def ensure_default_system_settings(session: Session) -> None:
|
|
bootstrap_values = _bootstrap_platform_setting_values()
|
|
legacy_row = session.get(PlatformSetting, "global")
|
|
if legacy_row is not None:
|
|
try:
|
|
legacy_data = json.loads(legacy_row.value_json or "{}")
|
|
except Exception:
|
|
legacy_data = {}
|
|
if isinstance(legacy_data, dict):
|
|
for key in SETTING_KEYS:
|
|
meta = SYSTEM_SETTING_DEFINITIONS[key]
|
|
_upsert_setting_row(
|
|
session,
|
|
key,
|
|
name=str(meta["name"]),
|
|
category=str(meta["category"]),
|
|
description=str(meta["description"]),
|
|
value_type=str(meta["value_type"]),
|
|
value=legacy_data.get(key, bootstrap_values.get(key, meta["value"])),
|
|
is_public=bool(meta["is_public"]),
|
|
sort_order=int(meta["sort_order"]),
|
|
)
|
|
session.delete(legacy_row)
|
|
session.commit()
|
|
|
|
dirty = False
|
|
for key in DEPRECATED_SETTING_KEYS:
|
|
legacy_row = session.get(PlatformSetting, key)
|
|
if legacy_row is not None:
|
|
session.delete(legacy_row)
|
|
dirty = True
|
|
|
|
for key, meta in SYSTEM_SETTING_DEFINITIONS.items():
|
|
row = session.get(PlatformSetting, key)
|
|
if row is None:
|
|
_upsert_setting_row(
|
|
session,
|
|
key,
|
|
name=str(meta["name"]),
|
|
category=str(meta["category"]),
|
|
description=str(meta["description"]),
|
|
value_type=str(meta["value_type"]),
|
|
value=bootstrap_values.get(key, meta["value"]),
|
|
is_public=bool(meta["is_public"]),
|
|
sort_order=int(meta["sort_order"]),
|
|
)
|
|
dirty = True
|
|
continue
|
|
changed = False
|
|
for field in ("name", "category", "description", "value_type"):
|
|
value = str(meta[field])
|
|
if not getattr(row, field):
|
|
setattr(row, field, value)
|
|
changed = True
|
|
if getattr(row, "sort_order", None) is None:
|
|
row.sort_order = int(meta["sort_order"])
|
|
changed = True
|
|
if getattr(row, "is_public", None) is None:
|
|
row.is_public = bool(meta["is_public"])
|
|
changed = True
|
|
if changed:
|
|
row.updated_at = _utcnow()
|
|
session.add(row)
|
|
dirty = True
|
|
if dirty:
|
|
session.commit()
|
|
|
|
|
|
def list_system_settings(session: Session, search: str = "") -> List[Dict[str, Any]]:
|
|
ensure_default_system_settings(session)
|
|
stmt = select(PlatformSetting).order_by(PlatformSetting.sort_order.asc(), PlatformSetting.key.asc())
|
|
rows = session.exec(stmt).all()
|
|
keyword = str(search or "").strip().lower()
|
|
items = [_setting_item_from_row(row) for row in rows]
|
|
if not keyword:
|
|
return items
|
|
return [
|
|
item
|
|
for item in items
|
|
if keyword in str(item["key"]).lower()
|
|
or keyword in str(item["name"]).lower()
|
|
or keyword in str(item["category"]).lower()
|
|
or keyword in str(item["description"]).lower()
|
|
]
|
|
|
|
|
|
def create_or_update_system_setting(session: Session, payload: SystemSettingPayload) -> Dict[str, Any]:
|
|
ensure_default_system_settings(session)
|
|
normalized_key = _normalize_setting_key(payload.key)
|
|
definition = SYSTEM_SETTING_DEFINITIONS.get(normalized_key, {})
|
|
row = _upsert_setting_row(
|
|
session,
|
|
payload.key,
|
|
name=payload.name or str(definition.get("name") or payload.key),
|
|
category=payload.category or str(definition.get("category") or "general"),
|
|
description=payload.description or str(definition.get("description") or ""),
|
|
value_type=payload.value_type or str(definition.get("value_type") or "json"),
|
|
value=payload.value if payload.value is not None else definition.get("value"),
|
|
is_public=payload.is_public,
|
|
sort_order=payload.sort_order or int(definition.get("sort_order") or 100),
|
|
)
|
|
if normalized_key == ACTIVITY_EVENT_RETENTION_SETTING_KEY:
|
|
prune_expired_activity_events(session, force=True)
|
|
session.commit()
|
|
session.refresh(row)
|
|
return _setting_item_from_row(row)
|
|
|
|
|
|
def delete_system_setting(session: Session, key: str) -> None:
|
|
normalized_key = _normalize_setting_key(key)
|
|
if normalized_key in PROTECTED_SETTING_KEYS:
|
|
raise ValueError("Core platform settings cannot be deleted")
|
|
row = session.get(PlatformSetting, normalized_key)
|
|
if row is None:
|
|
raise ValueError("Setting not found")
|
|
session.delete(row)
|
|
session.commit()
|
|
|
|
|
|
def get_platform_settings(session: Session) -> PlatformSettingsPayload:
|
|
defaults = default_platform_settings()
|
|
ensure_default_system_settings(session)
|
|
rows = session.exec(select(PlatformSetting).where(PlatformSetting.key.in_(SETTING_KEYS))).all()
|
|
data: Dict[str, Any] = {row.key: _read_setting_value(row) for row in rows}
|
|
|
|
merged = defaults.model_dump()
|
|
merged["page_size"] = max(1, min(100, int(data.get("page_size") or merged["page_size"])))
|
|
merged["chat_pull_page_size"] = max(10, min(500, int(data.get("chat_pull_page_size") or merged["chat_pull_page_size"])))
|
|
merged["upload_max_mb"] = int(data.get("upload_max_mb") or merged["upload_max_mb"])
|
|
merged["allowed_attachment_extensions"] = _normalize_extension_list(
|
|
data.get("allowed_attachment_extensions", merged["allowed_attachment_extensions"])
|
|
)
|
|
merged["workspace_download_extensions"] = _normalize_extension_list(
|
|
data.get("workspace_download_extensions", merged["workspace_download_extensions"])
|
|
)
|
|
merged["speech_enabled"] = bool(data.get("speech_enabled", merged["speech_enabled"]))
|
|
loading_page = data.get("loading_page")
|
|
if isinstance(loading_page, dict):
|
|
current = dict(merged["loading_page"])
|
|
for key in ("title", "subtitle", "description"):
|
|
value = str(loading_page.get(key) or "").strip()
|
|
if value:
|
|
current[key] = value
|
|
merged["loading_page"] = current
|
|
return PlatformSettingsPayload.model_validate(merged)
|
|
|
|
|
|
def save_platform_settings(session: Session, payload: PlatformSettingsPayload) -> PlatformSettingsPayload:
|
|
normalized = PlatformSettingsPayload(
|
|
page_size=max(1, min(100, int(payload.page_size))),
|
|
chat_pull_page_size=max(10, min(500, int(payload.chat_pull_page_size))),
|
|
upload_max_mb=payload.upload_max_mb,
|
|
allowed_attachment_extensions=_normalize_extension_list(payload.allowed_attachment_extensions),
|
|
workspace_download_extensions=_normalize_extension_list(payload.workspace_download_extensions),
|
|
speech_enabled=bool(payload.speech_enabled),
|
|
loading_page=LoadingPageSettings.model_validate(payload.loading_page.model_dump()),
|
|
)
|
|
payload_by_key = normalized.model_dump()
|
|
for key in SETTING_KEYS:
|
|
definition = SYSTEM_SETTING_DEFINITIONS[key]
|
|
_upsert_setting_row(
|
|
session,
|
|
key,
|
|
name=str(definition["name"]),
|
|
category=str(definition["category"]),
|
|
description=str(definition["description"]),
|
|
value_type=str(definition["value_type"]),
|
|
value=payload_by_key[key],
|
|
is_public=bool(definition["is_public"]),
|
|
sort_order=int(definition["sort_order"]),
|
|
)
|
|
session.commit()
|
|
return normalized
|
|
|
|
|
|
def get_platform_settings_snapshot() -> PlatformSettingsPayload:
|
|
with Session(engine) as session:
|
|
return get_platform_settings(session)
|
|
|
|
|
|
def get_upload_max_mb() -> int:
|
|
return get_platform_settings_snapshot().upload_max_mb
|
|
|
|
|
|
def get_allowed_attachment_extensions() -> List[str]:
|
|
return get_platform_settings_snapshot().allowed_attachment_extensions
|
|
|
|
|
|
def get_workspace_download_extensions() -> List[str]:
|
|
return get_platform_settings_snapshot().workspace_download_extensions
|
|
|
|
|
|
def get_page_size() -> int:
|
|
return get_platform_settings_snapshot().page_size
|
|
|
|
|
|
def get_chat_pull_page_size() -> int:
|
|
return get_platform_settings_snapshot().chat_pull_page_size
|
|
|
|
|
|
def get_speech_runtime_settings() -> Dict[str, Any]:
|
|
settings = get_platform_settings_snapshot()
|
|
return {
|
|
"enabled": bool(settings.speech_enabled),
|
|
"max_audio_seconds": int(DEFAULT_STT_MAX_AUDIO_SECONDS),
|
|
"default_language": str(DEFAULT_STT_DEFAULT_LANGUAGE or "zh").strip().lower() or "zh",
|
|
"force_simplified": bool(DEFAULT_STT_FORCE_SIMPLIFIED),
|
|
"audio_preprocess": bool(DEFAULT_STT_AUDIO_PREPROCESS),
|
|
"audio_filter": str(DEFAULT_STT_AUDIO_FILTER or "").strip(),
|
|
"initial_prompt": str(DEFAULT_STT_INITIAL_PROMPT or "").strip(),
|
|
"model": STT_MODEL,
|
|
"device": STT_DEVICE,
|
|
}
|
|
|
|
|
|
def get_activity_event_retention_days(session: Session) -> int:
|
|
row = session.get(PlatformSetting, ACTIVITY_EVENT_RETENTION_SETTING_KEY)
|
|
if row is None:
|
|
return DEFAULT_ACTIVITY_EVENT_RETENTION_DAYS
|
|
try:
|
|
value = _read_setting_value(row)
|
|
except Exception:
|
|
value = DEFAULT_ACTIVITY_EVENT_RETENTION_DAYS
|
|
return _normalize_activity_event_retention_days(value)
|
|
|
|
|
|
def create_usage_request(
|
|
session: Session,
|
|
bot_id: str,
|
|
command: str,
|
|
attachments: Optional[List[str]] = None,
|
|
channel: str = "dashboard",
|
|
metadata: Optional[Dict[str, Any]] = None,
|
|
provider: Optional[str] = None,
|
|
model: Optional[str] = None,
|
|
) -> str:
|
|
request_id = uuid.uuid4().hex
|
|
rows = [str(item).strip() for item in (attachments or []) if str(item).strip()]
|
|
input_tokens = estimate_tokens(command)
|
|
usage = BotRequestUsage(
|
|
bot_id=bot_id,
|
|
request_id=request_id,
|
|
channel=channel,
|
|
status="PENDING",
|
|
provider=(str(provider or "").strip() or None),
|
|
model=(str(model or "").strip() or None),
|
|
token_source="estimated",
|
|
input_tokens=input_tokens,
|
|
output_tokens=0,
|
|
total_tokens=input_tokens,
|
|
input_text_preview=str(command or "")[:400],
|
|
attachments_json=json.dumps(rows, ensure_ascii=False) if rows else None,
|
|
metadata_json=json.dumps(metadata or {}, ensure_ascii=False),
|
|
started_at=_utcnow(),
|
|
created_at=_utcnow(),
|
|
updated_at=_utcnow(),
|
|
)
|
|
session.add(usage)
|
|
session.flush()
|
|
return request_id
|
|
|
|
|
|
def bind_usage_message(
|
|
session: Session,
|
|
bot_id: str,
|
|
request_id: str,
|
|
message_id: Optional[int],
|
|
) -> Optional[BotRequestUsage]:
|
|
if not request_id or not message_id:
|
|
return None
|
|
usage_row = _find_pending_usage_by_request_id(session, bot_id, request_id)
|
|
if not usage_row:
|
|
return None
|
|
usage_row.message_id = int(message_id)
|
|
usage_row.updated_at = _utcnow()
|
|
session.add(usage_row)
|
|
return usage_row
|
|
|
|
|
|
def _find_latest_pending_usage(session: Session, bot_id: str) -> Optional[BotRequestUsage]:
|
|
stmt = (
|
|
select(BotRequestUsage)
|
|
.where(BotRequestUsage.bot_id == bot_id)
|
|
.where(BotRequestUsage.status == "PENDING")
|
|
.order_by(BotRequestUsage.started_at.desc(), BotRequestUsage.id.desc())
|
|
.limit(1)
|
|
)
|
|
return session.exec(stmt).first()
|
|
|
|
|
|
def _find_pending_usage_by_request_id(session: Session, bot_id: str, request_id: str) -> Optional[BotRequestUsage]:
|
|
if not request_id:
|
|
return None
|
|
stmt = (
|
|
select(BotRequestUsage)
|
|
.where(BotRequestUsage.bot_id == bot_id)
|
|
.where(BotRequestUsage.request_id == request_id)
|
|
.where(BotRequestUsage.status == "PENDING")
|
|
.order_by(BotRequestUsage.started_at.desc(), BotRequestUsage.id.desc())
|
|
.limit(1)
|
|
)
|
|
return session.exec(stmt).first()
|
|
|
|
|
|
def finalize_usage_from_packet(session: Session, bot_id: str, packet: Dict[str, Any]) -> Optional[BotRequestUsage]:
|
|
request_id = str(packet.get("request_id") or "").strip()
|
|
usage_row = _find_pending_usage_by_request_id(session, bot_id, request_id) or _find_latest_pending_usage(session, bot_id)
|
|
if not usage_row:
|
|
return None
|
|
|
|
raw_usage = packet.get("usage")
|
|
input_tokens: Optional[int] = None
|
|
output_tokens: Optional[int] = None
|
|
source = "estimated"
|
|
if isinstance(raw_usage, dict):
|
|
for key in ("input_tokens", "prompt_tokens", "promptTokens"):
|
|
if raw_usage.get(key) is not None:
|
|
try:
|
|
input_tokens = int(raw_usage.get(key) or 0)
|
|
except Exception:
|
|
input_tokens = None
|
|
break
|
|
for key in ("output_tokens", "completion_tokens", "completionTokens"):
|
|
if raw_usage.get(key) is not None:
|
|
try:
|
|
output_tokens = int(raw_usage.get(key) or 0)
|
|
except Exception:
|
|
output_tokens = None
|
|
break
|
|
if input_tokens is not None or output_tokens is not None:
|
|
source = "exact"
|
|
|
|
text = str(packet.get("text") or packet.get("content") or "").strip()
|
|
provider = str(packet.get("provider") or "").strip()
|
|
model = str(packet.get("model") or "").strip()
|
|
message_id = packet.get("message_id")
|
|
if input_tokens is None:
|
|
input_tokens = usage_row.input_tokens
|
|
if output_tokens is None:
|
|
output_tokens = estimate_tokens(text)
|
|
if source == "exact":
|
|
source = "mixed"
|
|
|
|
if provider:
|
|
usage_row.provider = provider[:120]
|
|
if model:
|
|
usage_row.model = model[:255]
|
|
if message_id is not None:
|
|
try:
|
|
usage_row.message_id = int(message_id)
|
|
except Exception:
|
|
pass
|
|
usage_row.output_tokens = max(0, int(output_tokens or 0))
|
|
usage_row.input_tokens = max(0, int(input_tokens or 0))
|
|
usage_row.total_tokens = usage_row.input_tokens + usage_row.output_tokens
|
|
usage_row.output_text_preview = text[:400] if text else usage_row.output_text_preview
|
|
usage_row.status = "COMPLETED"
|
|
usage_row.token_source = source
|
|
usage_row.completed_at = _utcnow()
|
|
usage_row.updated_at = _utcnow()
|
|
session.add(usage_row)
|
|
return usage_row
|
|
|
|
|
|
def fail_latest_usage(session: Session, bot_id: str, detail: str) -> Optional[BotRequestUsage]:
|
|
usage_row = _find_latest_pending_usage(session, bot_id)
|
|
if not usage_row:
|
|
return None
|
|
usage_row.status = "ERROR"
|
|
usage_row.error_text = str(detail or "")[:500]
|
|
usage_row.completed_at = _utcnow()
|
|
usage_row.updated_at = _utcnow()
|
|
session.add(usage_row)
|
|
return usage_row
|
|
|
|
|
|
def prune_expired_activity_events(session: Session, force: bool = False) -> int:
|
|
global _last_activity_event_prune_at
|
|
|
|
now = _utcnow()
|
|
if not force and _last_activity_event_prune_at and now - _last_activity_event_prune_at < ACTIVITY_EVENT_PRUNE_INTERVAL:
|
|
return 0
|
|
|
|
retention_days = get_activity_event_retention_days(session)
|
|
cutoff = now - timedelta(days=retention_days)
|
|
result = session.exec(
|
|
sql_delete(BotActivityEvent).where(BotActivityEvent.created_at < cutoff)
|
|
)
|
|
_last_activity_event_prune_at = now
|
|
return int(getattr(result, "rowcount", 0) or 0)
|
|
|
|
|
|
def record_activity_event(
|
|
session: Session,
|
|
bot_id: str,
|
|
event_type: str,
|
|
request_id: Optional[str] = None,
|
|
channel: str = "dashboard",
|
|
detail: Optional[str] = None,
|
|
metadata: Optional[Dict[str, Any]] = None,
|
|
) -> None:
|
|
normalized_event_type = str(event_type or "unknown").strip().lower() or "unknown"
|
|
if normalized_event_type not in OPERATIONAL_ACTIVITY_EVENT_TYPES:
|
|
return
|
|
prune_expired_activity_events(session, force=False)
|
|
row = BotActivityEvent(
|
|
bot_id=bot_id,
|
|
request_id=request_id,
|
|
event_type=normalized_event_type,
|
|
channel=str(channel or "dashboard").strip().lower() or "dashboard",
|
|
detail=(str(detail or "").strip() or None),
|
|
metadata_json=json.dumps(metadata or {}, ensure_ascii=False) if metadata else None,
|
|
created_at=_utcnow(),
|
|
)
|
|
session.add(row)
|
|
|
|
|
|
def list_usage(
|
|
session: Session,
|
|
bot_id: Optional[str] = None,
|
|
limit: int = 100,
|
|
offset: int = 0,
|
|
) -> Dict[str, Any]:
|
|
safe_limit = max(1, min(int(limit), 500))
|
|
safe_offset = max(0, int(offset or 0))
|
|
stmt = (
|
|
select(BotRequestUsage)
|
|
.order_by(BotRequestUsage.started_at.desc(), BotRequestUsage.id.desc())
|
|
.offset(safe_offset)
|
|
.limit(safe_limit)
|
|
)
|
|
summary_stmt = select(
|
|
func.count(BotRequestUsage.id),
|
|
func.coalesce(func.sum(BotRequestUsage.input_tokens), 0),
|
|
func.coalesce(func.sum(BotRequestUsage.output_tokens), 0),
|
|
func.coalesce(func.sum(BotRequestUsage.total_tokens), 0),
|
|
)
|
|
total_stmt = select(func.count(BotRequestUsage.id))
|
|
if bot_id:
|
|
stmt = stmt.where(BotRequestUsage.bot_id == bot_id)
|
|
summary_stmt = summary_stmt.where(BotRequestUsage.bot_id == bot_id)
|
|
total_stmt = total_stmt.where(BotRequestUsage.bot_id == bot_id)
|
|
else:
|
|
since = _utcnow() - timedelta(days=1)
|
|
summary_stmt = summary_stmt.where(BotRequestUsage.created_at >= since)
|
|
rows = session.exec(stmt).all()
|
|
count, input_sum, output_sum, total_sum = session.exec(summary_stmt).one()
|
|
total = int(session.exec(total_stmt).one() or 0)
|
|
items = [
|
|
PlatformUsageItem(
|
|
id=int(row.id or 0),
|
|
bot_id=row.bot_id,
|
|
message_id=int(row.message_id) if row.message_id is not None else None,
|
|
request_id=row.request_id,
|
|
channel=row.channel,
|
|
status=row.status,
|
|
provider=row.provider,
|
|
model=row.model,
|
|
token_source=row.token_source,
|
|
content=row.input_text_preview or row.output_text_preview,
|
|
input_tokens=int(row.input_tokens or 0),
|
|
output_tokens=int(row.output_tokens or 0),
|
|
total_tokens=int(row.total_tokens or 0),
|
|
input_text_preview=row.input_text_preview,
|
|
output_text_preview=row.output_text_preview,
|
|
started_at=row.started_at.isoformat() + "Z",
|
|
completed_at=row.completed_at.isoformat() + "Z" if row.completed_at else None,
|
|
).model_dump()
|
|
for row in rows
|
|
]
|
|
return PlatformUsageResponse(
|
|
summary=PlatformUsageSummary(
|
|
request_count=int(count or 0),
|
|
input_tokens=int(input_sum or 0),
|
|
output_tokens=int(output_sum or 0),
|
|
total_tokens=int(total_sum or 0),
|
|
),
|
|
items=[PlatformUsageItem.model_validate(item) for item in items],
|
|
total=total,
|
|
limit=safe_limit,
|
|
offset=safe_offset,
|
|
has_more=safe_offset + len(items) < total,
|
|
).model_dump()
|
|
|
|
|
|
def list_activity_events(
|
|
session: Session,
|
|
bot_id: Optional[str] = None,
|
|
limit: int = 100,
|
|
) -> List[Dict[str, Any]]:
|
|
deleted = prune_expired_activity_events(session, force=False)
|
|
if deleted > 0:
|
|
session.commit()
|
|
safe_limit = max(1, min(int(limit), 500))
|
|
stmt = select(BotActivityEvent).order_by(BotActivityEvent.created_at.desc(), BotActivityEvent.id.desc()).limit(safe_limit)
|
|
if bot_id:
|
|
stmt = stmt.where(BotActivityEvent.bot_id == bot_id)
|
|
rows = session.exec(stmt).all()
|
|
items: List[Dict[str, Any]] = []
|
|
for row in rows:
|
|
try:
|
|
metadata = json.loads(row.metadata_json or "{}")
|
|
except Exception:
|
|
metadata = {}
|
|
items.append(
|
|
PlatformActivityItem(
|
|
id=int(row.id or 0),
|
|
bot_id=row.bot_id,
|
|
request_id=row.request_id,
|
|
event_type=row.event_type,
|
|
channel=row.channel,
|
|
detail=row.detail,
|
|
metadata=metadata if isinstance(metadata, dict) else {},
|
|
created_at=row.created_at.isoformat() + "Z",
|
|
).model_dump()
|
|
)
|
|
return items
|
|
|
|
|
|
def build_platform_overview(session: Session, docker_manager: Any) -> Dict[str, Any]:
|
|
deleted = prune_expired_activity_events(session, force=False)
|
|
if deleted > 0:
|
|
session.commit()
|
|
bots = session.exec(select(BotInstance)).all()
|
|
images = session.exec(select(NanobotImage).order_by(NanobotImage.created_at.desc())).all()
|
|
settings = get_platform_settings(session)
|
|
|
|
running = 0
|
|
stopped = 0
|
|
disabled = 0
|
|
configured_cpu_total = 0.0
|
|
configured_memory_total = 0
|
|
configured_storage_total = 0
|
|
workspace_used_total = 0
|
|
workspace_limit_total = 0
|
|
live_cpu_percent_total = 0.0
|
|
live_memory_used_total = 0
|
|
live_memory_limit_total = 0
|
|
|
|
bot_rows: List[Dict[str, Any]] = []
|
|
for bot in bots:
|
|
enabled = bool(getattr(bot, "enabled", True))
|
|
runtime_status = docker_manager.get_bot_status(bot.id) if docker_manager else str(bot.docker_status or "STOPPED")
|
|
resources = _read_bot_resources(bot.id)
|
|
runtime = docker_manager.get_bot_resource_snapshot(bot.id) if docker_manager else {"usage": {}, "limits": {}, "docker_status": runtime_status}
|
|
workspace_root = _bot_workspace_root(bot.id)
|
|
workspace_used = _calc_dir_size_bytes(workspace_root)
|
|
workspace_limit = int(resources["storage_gb"] or 0) * 1024 * 1024 * 1024
|
|
|
|
configured_cpu_total += float(resources["cpu_cores"] or 0)
|
|
configured_memory_total += int(resources["memory_mb"] or 0) * 1024 * 1024
|
|
configured_storage_total += workspace_limit
|
|
workspace_used_total += workspace_used
|
|
workspace_limit_total += workspace_limit
|
|
live_cpu_percent_total += float((runtime.get("usage") or {}).get("cpu_percent") or 0.0)
|
|
live_memory_used_total += int((runtime.get("usage") or {}).get("memory_bytes") or 0)
|
|
live_memory_limit_total += int((runtime.get("usage") or {}).get("memory_limit_bytes") or 0)
|
|
|
|
if not enabled:
|
|
disabled += 1
|
|
elif runtime_status == "RUNNING":
|
|
running += 1
|
|
else:
|
|
stopped += 1
|
|
|
|
bot_rows.append(
|
|
{
|
|
"id": bot.id,
|
|
"name": bot.name,
|
|
"enabled": enabled,
|
|
"docker_status": runtime_status,
|
|
"image_tag": bot.image_tag,
|
|
"llm_provider": getattr(bot, "llm_provider", None),
|
|
"llm_model": getattr(bot, "llm_model", None),
|
|
"current_state": bot.current_state,
|
|
"last_action": bot.last_action,
|
|
"resources": resources,
|
|
"workspace_usage_bytes": workspace_used,
|
|
"workspace_limit_bytes": workspace_limit if workspace_limit > 0 else None,
|
|
}
|
|
)
|
|
|
|
usage = list_usage(session, limit=20)
|
|
events = list_activity_events(session, limit=20)
|
|
|
|
return {
|
|
"summary": {
|
|
"bots": {
|
|
"total": len(bots),
|
|
"running": running,
|
|
"stopped": stopped,
|
|
"disabled": disabled,
|
|
},
|
|
"images": {
|
|
"total": len(images),
|
|
"ready": len([row for row in images if row.status == "READY"]),
|
|
"abnormal": len([row for row in images if row.status != "READY"]),
|
|
},
|
|
"resources": {
|
|
"configured_cpu_cores": round(configured_cpu_total, 2),
|
|
"configured_memory_bytes": configured_memory_total,
|
|
"configured_storage_bytes": configured_storage_total,
|
|
"live_cpu_percent": round(live_cpu_percent_total, 2),
|
|
"live_memory_used_bytes": live_memory_used_total,
|
|
"live_memory_limit_bytes": live_memory_limit_total,
|
|
"workspace_used_bytes": workspace_used_total,
|
|
"workspace_limit_bytes": workspace_limit_total,
|
|
},
|
|
},
|
|
"images": [
|
|
{
|
|
"tag": row.tag,
|
|
"version": row.version,
|
|
"status": row.status,
|
|
"source_dir": row.source_dir,
|
|
"created_at": row.created_at.isoformat() + "Z",
|
|
}
|
|
for row in images
|
|
],
|
|
"bots": bot_rows,
|
|
"settings": settings.model_dump(),
|
|
"usage": usage,
|
|
"events": events,
|
|
}
|