diff --git a/app.zip b/app.zip deleted file mode 100644 index c146afb..0000000 Binary files a/app.zip and /dev/null differ diff --git a/app/api/endpoints/meetings.py b/app/api/endpoints/meetings.py index 0206276..6dc17ea 100644 --- a/app/api/endpoints/meetings.py +++ b/app/api/endpoints/meetings.py @@ -1,6 +1,5 @@ -from fastapi import APIRouter, HTTPException, UploadFile, File, Form, Depends, BackgroundTasks -from fastapi.responses import StreamingResponse -from app.models.models import Meeting, TranscriptSegment, TranscriptionTaskStatus, CreateMeetingRequest, UpdateMeetingRequest, SpeakerTagUpdateRequest, BatchSpeakerTagUpdateRequest, TranscriptUpdateRequest, BatchTranscriptUpdateRequest, Tag +from fastapi import APIRouter, UploadFile, File, Form, Depends, BackgroundTasks +from app.models.models import Meeting, TranscriptSegment, TranscriptionTaskStatus, CreateMeetingRequest, UpdateMeetingRequest, SpeakerTagUpdateRequest, BatchSpeakerTagUpdateRequest, BatchTranscriptUpdateRequest, Tag from app.core.database import get_db_connection from app.core.config import BASE_DIR, AUDIO_DIR, MARKDOWN_DIR, ALLOWED_EXTENSIONS, ALLOWED_IMAGE_EXTENSIONS import app.core.config as config_module @@ -304,17 +303,6 @@ def get_audio_file(meeting_id: int, current_user: dict = Depends(get_current_use return create_api_response(code="404", message="Audio file not found for this meeting") return create_api_response(code="200", message="Audio file found", data=audio_file) -@router.get("/transcription/tasks/{task_id}/status") -def get_transcription_task_status(task_id: str, current_user: dict = Depends(get_current_user)): - try: - status_info = transcription_service.get_task_status(task_id) - return create_api_response(code="200", message="Task status retrieved", data=status_info) - except Exception as e: - if "Task not found" in str(e): - return create_api_response(code="404", message="Transcription task not found") - else: - return create_api_response(code="500", message=f"Failed to get task status: {str(e)}") - @router.get("/meetings/{meeting_id}/transcription/status") def get_meeting_transcription_status(meeting_id: int, current_user: dict = Depends(get_current_user)): try: @@ -375,7 +363,7 @@ async def upload_image(meeting_id: int, image_file: UploadFile = File(...), curr shutil.copyfileobj(image_file.file, buffer) except Exception as e: return create_api_response(code="500", message=f"Failed to save image: {str(e)}") - return create_api_response(code="200", message="Image uploaded successfully", data= { + return create_api_response(code="200", message="Image uploaded successfully", data={ "file_name": image_file.filename, "file_path": '/'+ str(relative_path) }) @@ -426,44 +414,6 @@ def batch_update_transcript(meeting_id: int, request: BatchTranscriptUpdateReque except Exception as e: return create_api_response(code="500", message=f"Failed to update transcript: {str(e)}") -@router.post("/meetings/{meeting_id}/generate-summary-stream") -def generate_meeting_summary_stream(meeting_id: int, request: GenerateSummaryRequest, current_user: dict = Depends(get_current_user)): - try: - with get_db_connection() as connection: - cursor = connection.cursor(dictionary=True) - cursor.execute("SELECT meeting_id FROM meetings WHERE meeting_id = %s", (meeting_id,)) - if not cursor.fetchone(): - raise HTTPException(status_code=404, detail="Meeting not found") - def generate_stream(): - for chunk in llm_service.generate_meeting_summary_stream(meeting_id, request.user_prompt): - if chunk.startswith("error:"): - yield f"data: {{'error': '{chunk[6:]}'}}\n\n" - break - else: - import json - yield f"data: {{'content': {json.dumps(chunk, ensure_ascii=False)}}}\\n\n" - yield "data: {\'done\': true}\\n\n" - return StreamingResponse(generate_stream(), media_type="text/plain", headers={"Cache-Control": "no-cache", "Connection": "keep-alive", "Content-Type": "text/plain; charset=utf-8"}) - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=f"Failed to start stream generation: {str(e)}") - -@router.post("/meetings/{meeting_id}/generate-summary") -def generate_meeting_summary(meeting_id: int, request: GenerateSummaryRequest, current_user: dict = Depends(get_current_user)): - try: - with get_db_connection() as connection: - cursor = connection.cursor(dictionary=True) - cursor.execute("SELECT meeting_id FROM meetings WHERE meeting_id = %s", (meeting_id,)) - if not cursor.fetchone(): - return create_api_response(code="404", message="Meeting not found") - result = llm_service.generate_meeting_summary(meeting_id, request.user_prompt) - if result.get("error"): - return create_api_response(code="500", message=result["error"]) - return create_api_response(code="200", message="Summary generated successfully", data=result) - except Exception as e: - return create_api_response(code="500", message=f"Failed to generate summary: {str(e)}") - @router.get("/meetings/{meeting_id}/summaries") def get_meeting_summaries(meeting_id: int, current_user: dict = Depends(get_current_user)): try: @@ -501,22 +451,12 @@ def generate_meeting_summary_async(meeting_id: int, request: GenerateSummaryRequ return create_api_response(code="404", message="Meeting not found") task_id = async_llm_service.start_summary_generation(meeting_id, request.user_prompt) background_tasks.add_task(async_llm_service._process_task, task_id) - return create_api_response(code="200", message="Summary generation task has been accepted.", data= { + return create_api_response(code="200", message="Summary generation task has been accepted.", data={ "task_id": task_id, "status": "pending", "meeting_id": meeting_id }) except Exception as e: return create_api_response(code="500", message=f"Failed to start summary generation: {str(e)}") -@router.get("/llm-tasks/{task_id}/status") -def get_llm_task_status(task_id: str, current_user: dict = Depends(get_current_user)): - try: - status = async_llm_service.get_task_status(task_id) - if status.get('status') == 'not_found': - return create_api_response(code="404", message="Task not found") - return create_api_response(code="200", message="Task status retrieved", data=status) - except Exception as e: - return create_api_response(code="500", message=f"Failed to get task status: {str(e)}") - @router.get("/meetings/{meeting_id}/llm-tasks") def get_meeting_llm_tasks(meeting_id: int, current_user: dict = Depends(get_current_user)): try: @@ -526,8 +466,8 @@ def get_meeting_llm_tasks(meeting_id: int, current_user: dict = Depends(get_curr if not cursor.fetchone(): return create_api_response(code="404", message="Meeting not found") tasks = async_llm_service.get_meeting_llm_tasks(meeting_id) - return create_api_response(code="200", message="LLM tasks retrieved successfully", data= { + return create_api_response(code="200", message="LLM tasks retrieved successfully", data={ "tasks": tasks, "total": len(tasks) }) except Exception as e: - return create_api_response(code="500", message=f"Failed to get LLM tasks: {str(e)}") \ No newline at end of file + return create_api_response(code="500", message=f"Failed to get LLM tasks: {str(e)}") diff --git a/app/api/endpoints/prompts.py b/app/api/endpoints/prompts.py new file mode 100644 index 0000000..b541228 --- /dev/null +++ b/app/api/endpoints/prompts.py @@ -0,0 +1,98 @@ +from fastapi import APIRouter, Depends +from pydantic import BaseModel +from typing import List, Optional + +from app.core.auth import get_current_admin_user +from app.core.database import get_db_connection +from app.core.response import create_api_response + +router = APIRouter() + +# Pydantic Models +class PromptIn(BaseModel): + name: str + tags: Optional[str] = "" + content: str + +class PromptOut(PromptIn): + id: int + created_at: str + +class PromptListResponse(BaseModel): + prompts: List[PromptOut] + total: int + +@router.post("/prompts") +def create_prompt(prompt: PromptIn, current_user: dict = Depends(get_current_admin_user)): + """Create a new prompt.""" + with get_db_connection() as connection: + cursor = connection.cursor(dictionary=True) + try: + cursor.execute( + "INSERT INTO prompts (name, tags, content) VALUES (%s, %s, %s)", + (prompt.name, prompt.tags, prompt.content) + ) + connection.commit() + new_id = cursor.lastrowid + return create_api_response(code="200", message="提示词创建成功", data={"id": new_id, **prompt.dict()}) + except Exception as e: + if "UNIQUE constraint failed" in str(e) or "Duplicate entry" in str(e): + return create_api_response(code="400", message="提示词名称已存在") + return create_api_response(code="500", message=f"创建提示词失败: {e}") + +@router.get("/prompts") +def get_prompts(page: int = 1, size: int = 12, current_user: dict = Depends(get_current_admin_user)): + """Get a paginated list of prompts.""" + with get_db_connection() as connection: + cursor = connection.cursor(dictionary=True) + cursor.execute("SELECT COUNT(*) as total FROM prompts") + total = cursor.fetchone()['total'] + + offset = (page - 1) * size + cursor.execute( + "SELECT id, name, tags, content, created_at FROM prompts ORDER BY created_at DESC LIMIT %s OFFSET %s", + (size, offset) + ) + prompts = cursor.fetchall() + return create_api_response(code="200", message="获取提示词列表成功", data={"prompts": prompts, "total": total}) + +@router.get("/prompts/{prompt_id}") +def get_prompt(prompt_id: int, current_user: dict = Depends(get_current_admin_user)): + """Get a single prompt by its ID.""" + with get_db_connection() as connection: + cursor = connection.cursor(dictionary=True) + cursor.execute("SELECT id, name, tags, content, created_at FROM prompts WHERE id = %s", (prompt_id,)) + prompt = cursor.fetchone() + if not prompt: + return create_api_response(code="404", message="提示词不存在") + return create_api_response(code="200", message="获取提示词成功", data=prompt) + +@router.put("/prompts/{prompt_id}") +def update_prompt(prompt_id: int, prompt: PromptIn, current_user: dict = Depends(get_current_admin_user)): + """Update an existing prompt.""" + with get_db_connection() as connection: + cursor = connection.cursor(dictionary=True) + try: + cursor.execute( + "UPDATE prompts SET name = %s, tags = %s, content = %s WHERE id = %s", + (prompt.name, prompt.tags, prompt.content, prompt_id) + ) + if cursor.rowcount == 0: + return create_api_response(code="404", message="提示词不存在") + connection.commit() + return create_api_response(code="200", message="提示词更新成功") + except Exception as e: + if "UNIQUE constraint failed" in str(e) or "Duplicate entry" in str(e): + return create_api_response(code="400", message="提示词名称已存在") + return create_api_response(code="500", message=f"更新提示词失败: {e}") + +@router.delete("/prompts/{prompt_id}") +def delete_prompt(prompt_id: int, current_user: dict = Depends(get_current_admin_user)): + """Delete a prompt.""" + with get_db_connection() as connection: + cursor = connection.cursor() + cursor.execute("DELETE FROM prompts WHERE id = %s", (prompt_id,)) + if cursor.rowcount == 0: + return create_api_response(code="404", message="提示词不存在") + connection.commit() + return create_api_response(code="200", message="提示词删除成功") diff --git a/app/api/endpoints/tasks.py b/app/api/endpoints/tasks.py new file mode 100644 index 0000000..dd64973 --- /dev/null +++ b/app/api/endpoints/tasks.py @@ -0,0 +1,31 @@ +from fastapi import APIRouter, Depends +from app.core.auth import get_current_user +from app.core.response import create_api_response +from app.services.async_transcription_service import AsyncTranscriptionService +from app.services.async_llm_service import async_llm_service + +router = APIRouter() + +transcription_service = AsyncTranscriptionService() + +@router.get("/tasks/transcription/{task_id}/status") +def get_transcription_task_status(task_id: str, current_user: dict = Depends(get_current_user)): + """获取转录任务状态""" + try: + status_info = transcription_service.get_task_status(task_id) + if not status_info or status_info.get('status') == 'not_found': + return create_api_response(code="404", message="Transcription task not found") + return create_api_response(code="200", message="Task status retrieved", data=status_info) + except Exception as e: + return create_api_response(code="500", message=f"Failed to get task status: {str(e)}") + +@router.get("/tasks/summaries/{task_id}/status") +def get_llm_task_status(task_id: str, current_user: dict = Depends(get_current_user)): + """获取LLM总结任务状态(包括进度)""" + try: + status = async_llm_service.get_task_status(task_id) + if status.get('status') == 'not_found': + return create_api_response(code="404", message="Task not found") + return create_api_response(code="200", message="Task status retrieved", data=status) + except Exception as e: + return create_api_response(code="500", message=f"Failed to get task status: {str(e)}") diff --git a/main.py b/main.py index 4e8d1d6..7819ec4 100644 --- a/main.py +++ b/main.py @@ -2,7 +2,7 @@ import uvicorn from fastapi import FastAPI, Request, HTTPException from fastapi.middleware.cors import CORSMiddleware from fastapi.staticfiles import StaticFiles -from app.api.endpoints import auth, users, meetings, tags, admin +from app.api.endpoints import auth, users, meetings, tags, admin, tasks, prompts from app.core.config import UPLOAD_DIR, API_CONFIG from app.api.endpoints.admin import load_system_config import os @@ -35,6 +35,8 @@ app.include_router(users.router, prefix="/api", tags=["Users"]) app.include_router(meetings.router, prefix="/api", tags=["Meetings"]) app.include_router(tags.router, prefix="/api", tags=["Tags"]) app.include_router(admin.router, prefix="/api", tags=["Admin"]) +app.include_router(tasks.router, prefix="/api", tags=["Tasks"]) +app.include_router(prompts.router, prefix="/api", tags=["Prompts"]) @app.get("/") def read_root(): diff --git a/stream_test.html b/test/stream_test.html similarity index 100% rename from stream_test.html rename to test/stream_test.html diff --git a/test_stream_llm.py b/test/test_stream_llm.py similarity index 100% rename from test_stream_llm.py rename to test/test_stream_llm.py diff --git a/venv.zip b/venv.zip deleted file mode 100644 index 4114a76..0000000 Binary files a/venv.zip and /dev/null differ