801 lines
29 KiB
Python
801 lines
29 KiB
Python
from sqlalchemy import inspect, text
|
|
from sqlmodel import SQLModel, Session, create_engine
|
|
|
|
from core.settings import (
|
|
DATABASE_ECHO,
|
|
DATABASE_ENGINE,
|
|
DATABASE_MAX_OVERFLOW,
|
|
DATABASE_POOL_RECYCLE,
|
|
DATABASE_POOL_SIZE,
|
|
DATABASE_POOL_TIMEOUT,
|
|
DATABASE_URL,
|
|
)
|
|
|
|
# Ensure table models are registered in SQLModel metadata before create_all.
|
|
from models import bot as _bot_models # noqa: F401
|
|
from models import platform as _platform_models # noqa: F401
|
|
from models import topic as _topic_models # noqa: F401
|
|
|
|
_engine_kwargs = {
|
|
"echo": DATABASE_ECHO,
|
|
}
|
|
if DATABASE_ENGINE == "sqlite":
|
|
_engine_kwargs["connect_args"] = {"check_same_thread": False}
|
|
else:
|
|
_engine_kwargs.update(
|
|
{
|
|
"pool_pre_ping": True,
|
|
"pool_size": DATABASE_POOL_SIZE,
|
|
"max_overflow": DATABASE_MAX_OVERFLOW,
|
|
"pool_timeout": DATABASE_POOL_TIMEOUT,
|
|
"pool_recycle": DATABASE_POOL_RECYCLE,
|
|
}
|
|
)
|
|
|
|
engine = create_engine(DATABASE_URL, **_engine_kwargs)
|
|
|
|
BOT_INSTANCE_TABLE = "bot_instance"
|
|
BOT_MESSAGE_TABLE = "bot_message"
|
|
BOT_IMAGE_TABLE = "bot_image"
|
|
BOT_REQUEST_USAGE_TABLE = "bot_request_usage"
|
|
BOT_ACTIVITY_EVENT_TABLE = "bot_activity_event"
|
|
SYS_SETTING_TABLE = "sys_setting"
|
|
POSTGRES_MIGRATION_LOCK_KEY = 2026031801
|
|
MYSQL_MIGRATION_LOCK_NAME = "dashboard_nanobot_schema_migration"
|
|
LEGACY_TABLE_PAIRS = [
|
|
("botinstance", BOT_INSTANCE_TABLE),
|
|
("botmessage", BOT_MESSAGE_TABLE),
|
|
("nanobotimage", BOT_IMAGE_TABLE),
|
|
("platformsetting", SYS_SETTING_TABLE),
|
|
("botrequestusage", BOT_REQUEST_USAGE_TABLE),
|
|
("botactivityevent", BOT_ACTIVITY_EVENT_TABLE),
|
|
]
|
|
|
|
|
|
def _quote_ident(name: str) -> str:
|
|
if engine.dialect.name == "mysql":
|
|
return f"`{str(name).replace('`', '``')}`"
|
|
return f'"{str(name).replace(chr(34), chr(34) * 2)}"'
|
|
|
|
|
|
def _rename_table_if_needed(old_name: str, new_name: str) -> None:
|
|
inspector = inspect(engine)
|
|
if not inspector.has_table(old_name) or inspector.has_table(new_name):
|
|
return
|
|
dialect = engine.dialect.name
|
|
with engine.connect() as conn:
|
|
if dialect == "mysql":
|
|
conn.execute(text(f"RENAME TABLE `{old_name}` TO `{new_name}`"))
|
|
else:
|
|
conn.execute(text(f'ALTER TABLE "{old_name}" RENAME TO "{new_name}"'))
|
|
conn.commit()
|
|
|
|
|
|
def _rename_legacy_tables() -> None:
|
|
_rename_table_if_needed("botinstance", BOT_INSTANCE_TABLE)
|
|
_rename_table_if_needed("botmessage", BOT_MESSAGE_TABLE)
|
|
_rename_table_if_needed("nanobotimage", BOT_IMAGE_TABLE)
|
|
_rename_table_if_needed("platformsetting", SYS_SETTING_TABLE)
|
|
_rename_table_if_needed("botrequestusage", BOT_REQUEST_USAGE_TABLE)
|
|
_rename_table_if_needed("botactivityevent", BOT_ACTIVITY_EVENT_TABLE)
|
|
|
|
|
|
def _acquire_migration_lock():
|
|
if engine.dialect.name == "postgresql":
|
|
conn = engine.connect()
|
|
conn.execute(text("SELECT pg_advisory_lock(:key)"), {"key": POSTGRES_MIGRATION_LOCK_KEY})
|
|
return conn
|
|
if engine.dialect.name == "mysql":
|
|
conn = engine.connect()
|
|
acquired = conn.execute(
|
|
text("SELECT GET_LOCK(:name, :timeout)"),
|
|
{"name": MYSQL_MIGRATION_LOCK_NAME, "timeout": 120},
|
|
).scalar()
|
|
if int(acquired or 0) != 1:
|
|
conn.close()
|
|
raise RuntimeError("Failed to acquire schema migration lock")
|
|
return conn
|
|
return None
|
|
|
|
|
|
def _release_migration_lock(lock_conn) -> None:
|
|
if lock_conn is None:
|
|
return
|
|
try:
|
|
if engine.dialect.name == "postgresql":
|
|
lock_conn.execute(text("SELECT pg_advisory_unlock(:key)"), {"key": POSTGRES_MIGRATION_LOCK_KEY})
|
|
elif engine.dialect.name == "mysql":
|
|
lock_conn.execute(text("SELECT RELEASE_LOCK(:name)"), {"name": MYSQL_MIGRATION_LOCK_NAME})
|
|
finally:
|
|
lock_conn.close()
|
|
|
|
|
|
def _table_row_count(table_name: str) -> int:
|
|
inspector = inspect(engine)
|
|
if not inspector.has_table(table_name):
|
|
return 0
|
|
with engine.connect() as conn:
|
|
value = conn.execute(text(f"SELECT COUNT(*) FROM {_quote_ident(table_name)}")).scalar()
|
|
return int(value or 0)
|
|
|
|
|
|
def _copy_legacy_table_rows(old_name: str, new_name: str) -> None:
|
|
inspector = inspect(engine)
|
|
if not inspector.has_table(old_name) or not inspector.has_table(new_name):
|
|
return
|
|
if _table_row_count(old_name) <= 0:
|
|
return
|
|
|
|
old_columns = {
|
|
str(row.get("name"))
|
|
for row in inspector.get_columns(old_name)
|
|
if row.get("name")
|
|
}
|
|
new_columns = [
|
|
str(row.get("name"))
|
|
for row in inspector.get_columns(new_name)
|
|
if row.get("name")
|
|
]
|
|
shared_columns = [col for col in new_columns if col in old_columns]
|
|
if not shared_columns:
|
|
return
|
|
pk = inspector.get_pk_constraint(new_name) or {}
|
|
pk_columns = [
|
|
str(col)
|
|
for col in (pk.get("constrained_columns") or [])
|
|
if col and col in shared_columns and col in old_columns
|
|
]
|
|
if not pk_columns:
|
|
return
|
|
|
|
columns_sql = ", ".join(_quote_ident(col) for col in shared_columns)
|
|
join_sql = " AND ".join(
|
|
f'n.{_quote_ident(col)} = o.{_quote_ident(col)}'
|
|
for col in pk_columns
|
|
)
|
|
null_check_col = _quote_ident(pk_columns[0])
|
|
with engine.connect() as conn:
|
|
conn.execute(
|
|
text(
|
|
f"INSERT INTO {_quote_ident(new_name)} ({columns_sql}) "
|
|
f"SELECT {', '.join(f'o.{_quote_ident(col)}' for col in shared_columns)} "
|
|
f"FROM {_quote_ident(old_name)} o "
|
|
f"LEFT JOIN {_quote_ident(new_name)} n ON {join_sql} "
|
|
f"WHERE n.{null_check_col} IS NULL"
|
|
)
|
|
)
|
|
conn.commit()
|
|
|
|
|
|
def _migrate_legacy_table_rows() -> None:
|
|
for old_name, new_name in LEGACY_TABLE_PAIRS:
|
|
_copy_legacy_table_rows(old_name, new_name)
|
|
|
|
|
|
def _topic_fk_target(table_name: str, constrained_column: str = "bot_id") -> str | None:
|
|
inspector = inspect(engine)
|
|
if not inspector.has_table(table_name):
|
|
return None
|
|
for fk in inspector.get_foreign_keys(table_name):
|
|
cols = [str(col) for col in (fk.get("constrained_columns") or []) if col]
|
|
if cols == [constrained_column]:
|
|
referred = fk.get("referred_table")
|
|
return str(referred) if referred else None
|
|
return None
|
|
|
|
|
|
def _repair_postgres_topic_foreign_keys() -> None:
|
|
if engine.dialect.name != "postgresql":
|
|
return
|
|
targets = {
|
|
"topic_topic": "topic_topic_bot_id_fkey",
|
|
"topic_item": "topic_item_bot_id_fkey",
|
|
}
|
|
with engine.connect() as conn:
|
|
changed = False
|
|
for table_name, constraint_name in targets.items():
|
|
if _topic_fk_target(table_name) == BOT_INSTANCE_TABLE:
|
|
continue
|
|
conn.execute(
|
|
text(
|
|
f'ALTER TABLE {_quote_ident(table_name)} '
|
|
f'DROP CONSTRAINT IF EXISTS {_quote_ident(constraint_name)}'
|
|
)
|
|
)
|
|
conn.execute(
|
|
text(
|
|
f'ALTER TABLE {_quote_ident(table_name)} '
|
|
f'ADD CONSTRAINT {_quote_ident(constraint_name)} '
|
|
f'FOREIGN KEY ({_quote_ident("bot_id")}) '
|
|
f'REFERENCES {_quote_ident(BOT_INSTANCE_TABLE)}({_quote_ident("id")}) '
|
|
f'ON DELETE CASCADE'
|
|
)
|
|
)
|
|
changed = True
|
|
if changed:
|
|
conn.commit()
|
|
|
|
|
|
def _legacy_rows_missing_in_new(old_name: str, new_name: str) -> int:
|
|
inspector = inspect(engine)
|
|
if not inspector.has_table(old_name) or not inspector.has_table(new_name):
|
|
return 0
|
|
pk = inspector.get_pk_constraint(new_name) or {}
|
|
pk_columns = [
|
|
str(col)
|
|
for col in (pk.get("constrained_columns") or [])
|
|
if col
|
|
]
|
|
if not pk_columns:
|
|
return _table_row_count(old_name)
|
|
join_sql = " AND ".join(
|
|
f'n.{_quote_ident(col)} = o.{_quote_ident(col)}'
|
|
for col in pk_columns
|
|
)
|
|
null_check_col = _quote_ident(pk_columns[0])
|
|
with engine.connect() as conn:
|
|
value = conn.execute(
|
|
text(
|
|
f'SELECT COUNT(*) FROM {_quote_ident(old_name)} o '
|
|
f'LEFT JOIN {_quote_ident(new_name)} n ON {join_sql} '
|
|
f'WHERE n.{null_check_col} IS NULL'
|
|
)
|
|
).scalar()
|
|
return int(value or 0)
|
|
|
|
|
|
def _drop_legacy_tables() -> None:
|
|
droppable = [
|
|
old_name
|
|
for old_name, new_name in LEGACY_TABLE_PAIRS
|
|
if _legacy_rows_missing_in_new(old_name, new_name) <= 0
|
|
]
|
|
if not droppable:
|
|
return
|
|
with engine.connect() as conn:
|
|
for old_name in droppable:
|
|
if engine.dialect.name == "postgresql":
|
|
conn.execute(text(f'DROP TABLE IF EXISTS {_quote_ident(old_name)} CASCADE'))
|
|
else:
|
|
conn.execute(text(f'DROP TABLE IF EXISTS {_quote_ident(old_name)}'))
|
|
conn.commit()
|
|
|
|
|
|
def _ensure_botinstance_columns() -> None:
|
|
dialect = engine.dialect.name
|
|
required_columns = {
|
|
"current_state": {
|
|
"sqlite": "TEXT DEFAULT 'IDLE'",
|
|
"postgresql": "TEXT DEFAULT 'IDLE'",
|
|
"mysql": "VARCHAR(64) DEFAULT 'IDLE'",
|
|
},
|
|
"last_action": {
|
|
"sqlite": "TEXT",
|
|
"postgresql": "TEXT",
|
|
"mysql": "LONGTEXT",
|
|
},
|
|
"image_tag": {
|
|
"sqlite": "TEXT DEFAULT 'nanobot-base:v0.1.4'",
|
|
"postgresql": "TEXT DEFAULT 'nanobot-base:v0.1.4'",
|
|
"mysql": "VARCHAR(255) DEFAULT 'nanobot-base:v0.1.4'",
|
|
},
|
|
"access_password": {
|
|
"sqlite": "TEXT DEFAULT ''",
|
|
"postgresql": "TEXT DEFAULT ''",
|
|
"mysql": "VARCHAR(255) DEFAULT ''",
|
|
},
|
|
"enabled": {
|
|
"sqlite": "INTEGER NOT NULL DEFAULT 1",
|
|
"postgresql": "BOOLEAN NOT NULL DEFAULT TRUE",
|
|
"mysql": "BOOLEAN NOT NULL DEFAULT TRUE",
|
|
},
|
|
}
|
|
|
|
inspector = inspect(engine)
|
|
if not inspector.has_table(BOT_INSTANCE_TABLE):
|
|
return
|
|
with engine.connect() as conn:
|
|
existing = {
|
|
str(row.get("name"))
|
|
for row in inspect(conn).get_columns(BOT_INSTANCE_TABLE)
|
|
if row.get("name")
|
|
}
|
|
for col, ddl_map in required_columns.items():
|
|
if col in existing:
|
|
continue
|
|
ddl = ddl_map.get(dialect) or ddl_map.get("sqlite")
|
|
conn.execute(text(f"ALTER TABLE {BOT_INSTANCE_TABLE} ADD COLUMN {col} {ddl}"))
|
|
if "enabled" in existing:
|
|
if dialect == "sqlite":
|
|
conn.execute(text(f"UPDATE {BOT_INSTANCE_TABLE} SET enabled = 1 WHERE enabled IS NULL"))
|
|
else:
|
|
conn.execute(text(f"UPDATE {BOT_INSTANCE_TABLE} SET enabled = TRUE WHERE enabled IS NULL"))
|
|
conn.commit()
|
|
|
|
|
|
def _drop_legacy_botinstance_columns() -> None:
|
|
legacy_columns = [
|
|
"avatar_model",
|
|
"avatar_skin",
|
|
"system_prompt",
|
|
"soul_md",
|
|
"agents_md",
|
|
"user_md",
|
|
"tools_md",
|
|
"tools_config_json",
|
|
"identity_md",
|
|
"llm_provider",
|
|
"llm_model",
|
|
"api_key",
|
|
"api_base",
|
|
"temperature",
|
|
"top_p",
|
|
"max_tokens",
|
|
"presence_penalty",
|
|
"frequency_penalty",
|
|
"send_progress",
|
|
"send_tool_hints",
|
|
"bot_env_json",
|
|
]
|
|
with engine.connect() as conn:
|
|
existing = {
|
|
str(col.get("name"))
|
|
for col in inspect(conn).get_columns(BOT_INSTANCE_TABLE)
|
|
if col.get("name")
|
|
}
|
|
for col in legacy_columns:
|
|
if col not in existing:
|
|
continue
|
|
try:
|
|
if engine.dialect.name == "mysql":
|
|
conn.execute(text(f"ALTER TABLE {BOT_INSTANCE_TABLE} DROP COLUMN `{col}`"))
|
|
elif engine.dialect.name == "sqlite":
|
|
conn.execute(text(f'ALTER TABLE {BOT_INSTANCE_TABLE} DROP COLUMN "{col}"'))
|
|
else:
|
|
conn.execute(text(f'ALTER TABLE {BOT_INSTANCE_TABLE} DROP COLUMN IF EXISTS "{col}"'))
|
|
except Exception:
|
|
# Keep startup resilient on mixed/legacy database engines.
|
|
continue
|
|
conn.commit()
|
|
|
|
|
|
def _ensure_botmessage_columns() -> None:
|
|
if engine.dialect.name != "sqlite":
|
|
return
|
|
required_columns = {
|
|
"media_json": "TEXT",
|
|
"feedback": "TEXT",
|
|
"feedback_at": "DATETIME",
|
|
}
|
|
with engine.connect() as conn:
|
|
existing_rows = conn.execute(text(f"PRAGMA table_info({BOT_MESSAGE_TABLE})")).fetchall()
|
|
existing = {str(row[1]) for row in existing_rows}
|
|
for col, ddl in required_columns.items():
|
|
if col in existing:
|
|
continue
|
|
conn.execute(text(f"ALTER TABLE {BOT_MESSAGE_TABLE} ADD COLUMN {col} {ddl}"))
|
|
conn.commit()
|
|
|
|
|
|
def _drop_legacy_skill_tables() -> None:
|
|
"""Drop deprecated skill registry tables (moved to workspace filesystem mode)."""
|
|
with engine.connect() as conn:
|
|
conn.execute(text("DROP TABLE IF EXISTS botskillmapping"))
|
|
conn.execute(text("DROP TABLE IF EXISTS skillregistry"))
|
|
conn.commit()
|
|
|
|
|
|
def _ensure_sys_setting_columns() -> None:
|
|
dialect = engine.dialect.name
|
|
required_columns = {
|
|
"name": {
|
|
"sqlite": "TEXT NOT NULL DEFAULT ''",
|
|
"postgresql": "TEXT NOT NULL DEFAULT ''",
|
|
"mysql": "VARCHAR(200) NOT NULL DEFAULT ''",
|
|
},
|
|
"category": {
|
|
"sqlite": "TEXT NOT NULL DEFAULT 'general'",
|
|
"postgresql": "TEXT NOT NULL DEFAULT 'general'",
|
|
"mysql": "VARCHAR(64) NOT NULL DEFAULT 'general'",
|
|
},
|
|
"description": {
|
|
"sqlite": "TEXT NOT NULL DEFAULT ''",
|
|
"postgresql": "TEXT NOT NULL DEFAULT ''",
|
|
"mysql": "LONGTEXT",
|
|
},
|
|
"value_type": {
|
|
"sqlite": "TEXT NOT NULL DEFAULT 'json'",
|
|
"postgresql": "TEXT NOT NULL DEFAULT 'json'",
|
|
"mysql": "VARCHAR(32) NOT NULL DEFAULT 'json'",
|
|
},
|
|
"is_public": {
|
|
"sqlite": "INTEGER NOT NULL DEFAULT 0",
|
|
"postgresql": "BOOLEAN NOT NULL DEFAULT FALSE",
|
|
"mysql": "BOOLEAN NOT NULL DEFAULT FALSE",
|
|
},
|
|
"sort_order": {
|
|
"sqlite": "INTEGER NOT NULL DEFAULT 100",
|
|
"postgresql": "INTEGER NOT NULL DEFAULT 100",
|
|
"mysql": "INTEGER NOT NULL DEFAULT 100",
|
|
},
|
|
}
|
|
inspector = inspect(engine)
|
|
if not inspector.has_table(SYS_SETTING_TABLE):
|
|
return
|
|
with engine.connect() as conn:
|
|
existing = {
|
|
str(row.get("name"))
|
|
for row in inspect(conn).get_columns(SYS_SETTING_TABLE)
|
|
if row.get("name")
|
|
}
|
|
for col, ddl_map in required_columns.items():
|
|
if col in existing:
|
|
continue
|
|
ddl = ddl_map.get(dialect) or ddl_map.get("sqlite")
|
|
conn.execute(text(f"ALTER TABLE {SYS_SETTING_TABLE} ADD COLUMN {col} {ddl}"))
|
|
conn.commit()
|
|
|
|
|
|
def _ensure_bot_request_usage_columns() -> None:
|
|
dialect = engine.dialect.name
|
|
required_columns = {
|
|
"message_id": {
|
|
"sqlite": "INTEGER",
|
|
"postgresql": "INTEGER",
|
|
"mysql": "INTEGER",
|
|
},
|
|
"provider": {
|
|
"sqlite": "TEXT",
|
|
"postgresql": "TEXT",
|
|
"mysql": "VARCHAR(120)",
|
|
},
|
|
"model": {
|
|
"sqlite": "TEXT",
|
|
"postgresql": "TEXT",
|
|
"mysql": "VARCHAR(255)",
|
|
},
|
|
}
|
|
inspector = inspect(engine)
|
|
if not inspector.has_table(BOT_REQUEST_USAGE_TABLE):
|
|
return
|
|
with engine.connect() as conn:
|
|
existing = {
|
|
str(row.get("name"))
|
|
for row in inspect(conn).get_columns(BOT_REQUEST_USAGE_TABLE)
|
|
if row.get("name")
|
|
}
|
|
for col, ddl_map in required_columns.items():
|
|
if col in existing:
|
|
continue
|
|
ddl = ddl_map.get(dialect) or ddl_map.get("sqlite")
|
|
conn.execute(text(f"ALTER TABLE {BOT_REQUEST_USAGE_TABLE} ADD COLUMN {col} {ddl}"))
|
|
conn.commit()
|
|
|
|
|
|
def _ensure_topic_tables_sqlite() -> None:
|
|
if engine.dialect.name != "sqlite":
|
|
return
|
|
with engine.connect() as conn:
|
|
conn.execute(
|
|
text(
|
|
"""
|
|
CREATE TABLE IF NOT EXISTS topic_topic (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
bot_id TEXT NOT NULL,
|
|
topic_key TEXT NOT NULL,
|
|
name TEXT NOT NULL DEFAULT '',
|
|
description TEXT NOT NULL DEFAULT '',
|
|
is_active INTEGER NOT NULL DEFAULT 1,
|
|
is_default_fallback INTEGER NOT NULL DEFAULT 0,
|
|
routing_json TEXT NOT NULL DEFAULT '{}',
|
|
view_schema_json TEXT NOT NULL DEFAULT '{}',
|
|
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
FOREIGN KEY(bot_id) REFERENCES bot_instance(id)
|
|
)
|
|
"""
|
|
)
|
|
)
|
|
conn.execute(
|
|
text(
|
|
"""
|
|
CREATE TABLE IF NOT EXISTS topic_item (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
bot_id TEXT NOT NULL,
|
|
topic_key TEXT NOT NULL,
|
|
title TEXT NOT NULL DEFAULT '',
|
|
content TEXT NOT NULL DEFAULT '',
|
|
level TEXT NOT NULL DEFAULT 'info',
|
|
tags_json TEXT,
|
|
view_json TEXT,
|
|
source TEXT NOT NULL DEFAULT 'mcp',
|
|
dedupe_key TEXT,
|
|
is_read INTEGER NOT NULL DEFAULT 0,
|
|
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
FOREIGN KEY(bot_id) REFERENCES bot_instance(id)
|
|
)
|
|
"""
|
|
)
|
|
)
|
|
|
|
conn.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS uq_topic_topic_bot_topic_key ON topic_topic(bot_id, topic_key)"))
|
|
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_topic_bot_id ON topic_topic(bot_id)"))
|
|
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_topic_topic_key ON topic_topic(topic_key)"))
|
|
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_topic_bot_fallback ON topic_topic(bot_id, is_default_fallback)"))
|
|
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_bot_id ON topic_item(bot_id)"))
|
|
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_topic_key ON topic_item(topic_key)"))
|
|
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_level ON topic_item(level)"))
|
|
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_source ON topic_item(source)"))
|
|
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_is_read ON topic_item(is_read)"))
|
|
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_created_at ON topic_item(created_at)"))
|
|
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_bot_topic_created_at ON topic_item(bot_id, topic_key, created_at)"))
|
|
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_topic_item_bot_dedupe ON topic_item(bot_id, dedupe_key)"))
|
|
conn.commit()
|
|
|
|
|
|
def _ensure_topic_columns() -> None:
|
|
dialect = engine.dialect.name
|
|
required_columns = {
|
|
"topic_topic": {
|
|
"name": {
|
|
"sqlite": "TEXT NOT NULL DEFAULT ''",
|
|
"postgresql": "TEXT NOT NULL DEFAULT ''",
|
|
"mysql": "VARCHAR(255) NOT NULL DEFAULT ''",
|
|
},
|
|
"description": {
|
|
"sqlite": "TEXT NOT NULL DEFAULT ''",
|
|
"postgresql": "TEXT NOT NULL DEFAULT ''",
|
|
"mysql": "LONGTEXT",
|
|
},
|
|
"is_active": {
|
|
"sqlite": "INTEGER NOT NULL DEFAULT 1",
|
|
"postgresql": "BOOLEAN NOT NULL DEFAULT TRUE",
|
|
"mysql": "BOOLEAN NOT NULL DEFAULT TRUE",
|
|
},
|
|
"is_default_fallback": {
|
|
"sqlite": "INTEGER NOT NULL DEFAULT 0",
|
|
"postgresql": "BOOLEAN NOT NULL DEFAULT FALSE",
|
|
"mysql": "BOOLEAN NOT NULL DEFAULT FALSE",
|
|
},
|
|
"routing_json": {
|
|
"sqlite": "TEXT NOT NULL DEFAULT '{}'",
|
|
"postgresql": "TEXT NOT NULL DEFAULT '{}'",
|
|
"mysql": "LONGTEXT",
|
|
},
|
|
"view_schema_json": {
|
|
"sqlite": "TEXT NOT NULL DEFAULT '{}'",
|
|
"postgresql": "TEXT NOT NULL DEFAULT '{}'",
|
|
"mysql": "LONGTEXT",
|
|
},
|
|
"created_at": {
|
|
"sqlite": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
|
"postgresql": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
|
"mysql": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
|
},
|
|
"updated_at": {
|
|
"sqlite": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
|
"postgresql": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
|
"mysql": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
|
},
|
|
},
|
|
"topic_item": {
|
|
"title": {
|
|
"sqlite": "TEXT NOT NULL DEFAULT ''",
|
|
"postgresql": "TEXT NOT NULL DEFAULT ''",
|
|
"mysql": "VARCHAR(2000) NOT NULL DEFAULT ''",
|
|
},
|
|
"level": {
|
|
"sqlite": "TEXT NOT NULL DEFAULT 'info'",
|
|
"postgresql": "TEXT NOT NULL DEFAULT 'info'",
|
|
"mysql": "VARCHAR(32) NOT NULL DEFAULT 'info'",
|
|
},
|
|
"tags_json": {
|
|
"sqlite": "TEXT",
|
|
"postgresql": "TEXT",
|
|
"mysql": "LONGTEXT",
|
|
},
|
|
"view_json": {
|
|
"sqlite": "TEXT",
|
|
"postgresql": "TEXT",
|
|
"mysql": "LONGTEXT",
|
|
},
|
|
"source": {
|
|
"sqlite": "TEXT NOT NULL DEFAULT 'mcp'",
|
|
"postgresql": "TEXT NOT NULL DEFAULT 'mcp'",
|
|
"mysql": "VARCHAR(64) NOT NULL DEFAULT 'mcp'",
|
|
},
|
|
"dedupe_key": {
|
|
"sqlite": "TEXT",
|
|
"postgresql": "TEXT",
|
|
"mysql": "VARCHAR(200)",
|
|
},
|
|
"is_read": {
|
|
"sqlite": "INTEGER NOT NULL DEFAULT 0",
|
|
"postgresql": "BOOLEAN NOT NULL DEFAULT FALSE",
|
|
"mysql": "BOOLEAN NOT NULL DEFAULT FALSE",
|
|
},
|
|
"created_at": {
|
|
"sqlite": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
|
"postgresql": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
|
"mysql": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
|
},
|
|
},
|
|
}
|
|
|
|
inspector = inspect(engine)
|
|
with engine.connect() as conn:
|
|
for table_name, cols in required_columns.items():
|
|
if not inspector.has_table(table_name):
|
|
continue
|
|
existing = {
|
|
str(row.get("name"))
|
|
for row in inspector.get_columns(table_name)
|
|
if row.get("name")
|
|
}
|
|
for col, ddl_map in cols.items():
|
|
if col in existing:
|
|
continue
|
|
ddl = ddl_map.get(dialect) or ddl_map.get("sqlite")
|
|
conn.execute(text(f"ALTER TABLE {table_name} ADD COLUMN {col} {ddl}"))
|
|
conn.commit()
|
|
|
|
|
|
def _ensure_topic_indexes() -> None:
|
|
required_indexes = [
|
|
("uq_topic_topic_bot_topic_key", "topic_topic", ["bot_id", "topic_key"], True),
|
|
("idx_topic_topic_bot_id", "topic_topic", ["bot_id"], False),
|
|
("idx_topic_topic_topic_key", "topic_topic", ["topic_key"], False),
|
|
("idx_topic_topic_bot_fallback", "topic_topic", ["bot_id", "is_default_fallback"], False),
|
|
("idx_topic_item_bot_id", "topic_item", ["bot_id"], False),
|
|
("idx_topic_item_topic_key", "topic_item", ["topic_key"], False),
|
|
("idx_topic_item_level", "topic_item", ["level"], False),
|
|
("idx_topic_item_source", "topic_item", ["source"], False),
|
|
("idx_topic_item_is_read", "topic_item", ["is_read"], False),
|
|
("idx_topic_item_created_at", "topic_item", ["created_at"], False),
|
|
("idx_topic_item_bot_topic_created_at", "topic_item", ["bot_id", "topic_key", "created_at"], False),
|
|
("idx_topic_item_bot_dedupe", "topic_item", ["bot_id", "dedupe_key"], False),
|
|
]
|
|
inspector = inspect(engine)
|
|
with engine.connect() as conn:
|
|
for name, table_name, columns, unique in required_indexes:
|
|
if not inspector.has_table(table_name):
|
|
continue
|
|
existing = {
|
|
str(item.get("name"))
|
|
for item in inspector.get_indexes(table_name)
|
|
if item.get("name")
|
|
}
|
|
existing.update(
|
|
str(item.get("name"))
|
|
for item in inspector.get_unique_constraints(table_name)
|
|
if item.get("name")
|
|
)
|
|
if name in existing:
|
|
continue
|
|
unique_sql = "UNIQUE " if unique else ""
|
|
cols_sql = ", ".join(columns)
|
|
conn.execute(text(f"CREATE {unique_sql}INDEX {name} ON {table_name} ({cols_sql})"))
|
|
conn.commit()
|
|
|
|
|
|
def _drop_obsolete_topic_tables() -> None:
|
|
with engine.connect() as conn:
|
|
if engine.dialect.name == "postgresql":
|
|
conn.execute(text('DROP TABLE IF EXISTS "topic_bot_settings"'))
|
|
elif engine.dialect.name == "mysql":
|
|
conn.execute(text("DROP TABLE IF EXISTS `topic_bot_settings`"))
|
|
else:
|
|
conn.execute(text('DROP TABLE IF EXISTS "topic_bot_settings"'))
|
|
conn.commit()
|
|
|
|
|
|
def _cleanup_legacy_default_topics() -> None:
|
|
"""
|
|
Remove legacy auto-created fallback topic rows from early topic-feed design.
|
|
|
|
Historical rows look like:
|
|
- topic_key = inbox
|
|
- name = Inbox
|
|
- description = Default topic for uncategorized items
|
|
- routing_json contains "Fallback topic"
|
|
"""
|
|
with engine.connect() as conn:
|
|
legacy_rows = conn.execute(
|
|
text(
|
|
"""
|
|
SELECT bot_id, topic_key
|
|
FROM topic_topic
|
|
WHERE lower(coalesce(topic_key, '')) = 'inbox'
|
|
AND lower(coalesce(name, '')) = 'inbox'
|
|
AND lower(coalesce(description, '')) = 'default topic for uncategorized items'
|
|
AND lower(coalesce(routing_json, '')) LIKE '%fallback topic%'
|
|
"""
|
|
)
|
|
).fetchall()
|
|
if not legacy_rows:
|
|
return
|
|
for row in legacy_rows:
|
|
bot_id = str(row[0] or "").strip()
|
|
topic_key = str(row[1] or "").strip().lower()
|
|
if not bot_id or not topic_key:
|
|
continue
|
|
conn.execute(
|
|
text(
|
|
"""
|
|
DELETE FROM topic_item
|
|
WHERE bot_id = :bot_id AND lower(coalesce(topic_key, '')) = :topic_key
|
|
"""
|
|
),
|
|
{"bot_id": bot_id, "topic_key": topic_key},
|
|
)
|
|
conn.execute(
|
|
text(
|
|
"""
|
|
DELETE FROM topic_topic
|
|
WHERE bot_id = :bot_id AND lower(coalesce(topic_key, '')) = :topic_key
|
|
"""
|
|
),
|
|
{"bot_id": bot_id, "topic_key": topic_key},
|
|
)
|
|
conn.commit()
|
|
|
|
|
|
def align_postgres_sequences() -> None:
|
|
if engine.dialect.name != "postgresql":
|
|
return
|
|
sequence_targets = [
|
|
(BOT_MESSAGE_TABLE, "id"),
|
|
(BOT_REQUEST_USAGE_TABLE, "id"),
|
|
(BOT_ACTIVITY_EVENT_TABLE, "id"),
|
|
]
|
|
with engine.connect() as conn:
|
|
for table_name, column_name in sequence_targets:
|
|
seq_name = conn.execute(
|
|
text("SELECT pg_get_serial_sequence(:table_name, :column_name)"),
|
|
{"table_name": table_name, "column_name": column_name},
|
|
).scalar()
|
|
if not seq_name:
|
|
continue
|
|
max_id = conn.execute(
|
|
text(f'SELECT COALESCE(MAX("{column_name}"), 0) FROM "{table_name}"')
|
|
).scalar()
|
|
max_id = int(max_id or 0)
|
|
conn.execute(
|
|
text("SELECT setval(:seq_name, :next_value, :is_called)"),
|
|
{
|
|
"seq_name": seq_name,
|
|
"next_value": max_id if max_id > 0 else 1,
|
|
"is_called": max_id > 0,
|
|
},
|
|
)
|
|
conn.commit()
|
|
|
|
|
|
def init_database() -> None:
|
|
lock_conn = _acquire_migration_lock()
|
|
try:
|
|
_rename_legacy_tables()
|
|
SQLModel.metadata.create_all(engine)
|
|
_migrate_legacy_table_rows()
|
|
_drop_legacy_skill_tables()
|
|
_ensure_sys_setting_columns()
|
|
_ensure_bot_request_usage_columns()
|
|
_ensure_botinstance_columns()
|
|
_drop_legacy_botinstance_columns()
|
|
_ensure_botmessage_columns()
|
|
_ensure_topic_tables_sqlite()
|
|
_repair_postgres_topic_foreign_keys()
|
|
_ensure_topic_columns()
|
|
_ensure_topic_indexes()
|
|
_drop_obsolete_topic_tables()
|
|
_cleanup_legacy_default_topics()
|
|
_drop_legacy_tables()
|
|
align_postgres_sequences()
|
|
finally:
|
|
_release_migration_lock(lock_conn)
|
|
|
|
|
|
def get_session():
|
|
with Session(engine) as session:
|
|
yield session
|