From 91aeeca9c8fd6caf33f4d26a9573fe7c6deaa24e Mon Sep 17 00:00:00 2001 From: "mula.liu" Date: Thu, 25 Sep 2025 11:48:02 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BC=98=E5=8C=96=E4=BA=86=E9=83=A8=E5=88=86?= =?UTF-8?q?=E4=BB=A3=E7=A0=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .DS_Store | Bin 8196 -> 8196 bytes app.zip | Bin 43320 -> 44904 bytes app/api/endpoints/admin.py | 145 +++++++++++++++++++++++++++ app/api/endpoints/meetings.py | 70 ++++++++++--- app/api/endpoints/users.py | 6 +- app/core/auth.py | 13 +++ app/core/config.py | 3 +- app/services/llm_service.py | 182 ++++++++++++++++++++++------------ config/system_config.json | 7 ++ docker-compose.prod.yml | 3 +- main.py | 7 +- stream_test.html | 1 + test_stream_llm.py | 54 ++++++++++ 13 files changed, 410 insertions(+), 81 deletions(-) create mode 100644 app/api/endpoints/admin.py create mode 100644 config/system_config.json create mode 100644 stream_test.html create mode 100644 test_stream_llm.py diff --git a/.DS_Store b/.DS_Store index bb896eeeb5aadfd5113bfdb4f73c7fe90e324d64..932e278b883b22682d019e31d57bc3940a8ecbd9 100644 GIT binary patch delta 79 zcmZp1XmOa}&nU4mU^hRb#AF@;8!dK*M1}$&)MKb($Ydz+%*jtq%E?b+U|_6nA$k?iBap2e;w}x8m;ZR@_~RTd^WVODXU}@7vz@{_C4L zbF!1{?CfOnWU|(?TV}xzH^Ae(A?WZYK)LY#wNF}L|H$;9dIBy`Gx(o6A5bks&_8uP zpn6E(zj8HH*q`?Gt_yCxHO7%djER^+y=&a!XF2GQXXV(S%bA3Pn05Ach zAd+l+grqNHR^E6MRxc?i4hs`FFN}J6u2mt976%T0DuXE_jz$NTC|5Q&^6s0!CoJ!! zZvhMDe~2rx&$Wju$4Zm%-qy#I+|Y70KC)eSMmxf--0vjMi+<12E}cjT_v* zIFB9X)vR2@r6?}_-=b$$8VojVkbIpP^-12`h;Ve}o07Ap0_RPJ9desSuRn1A#pJ4~O>@4h|iZxGIooWoLc2oY6hNY%x^$mJpD@aDFtAEl9#gw!{{ZhdrXkc{6i*0f5prIDR{rw5q#$BK{NLsLgzSII{}}i|xPtZvx%I3p z@lgL{{OiTJwBY|{h=qxv|ET>&e7B?$!fQc2nkvoz?gi~SJ*dA`(NqIa|FsQNKm!1H zplv5CpsTT^3$vrAhB^`e=GD*F4dFF)^FRQALEeA?0578k045TrZ03bAhtBUAh#?B$ zf*W8u85y^x-4E7jIbMw@K0Q)$Dg*%61dv! zfqMKk8Cjk}UmrI73^L;}@uEs*8-QCq^`g>9HvB=kv|O?z#8NYgKd5^}UE&{VVxYdt z^KwUGOT@uOfaRy3sf5h^U^`Ad9f%lqkw5O46v9AMTM#UILL=fKZlm`Q%qDhlF3h_9 zpjjHkX~hA5^kx=5*pFuMhVVedg%B5tS!c$zLNgo%U4zUyq_(rk@W9vj%m;YHizubZ zq%hZV>)x3SVSy%UqG5rTq+yHl)bZ?hzisR6#eVO~cmM15Zplzh=;1A`il!vu3KGv} zDKy8gVu~EsWN4*vHEe5s;5Se;0(vN67Zitd3IUR(!YUA_IB(~UPC2nJ1oPxGF^d@DRzE;vL~$Esf! zH@{4iUq5%_2CJrj#nqv;=I03=6G0<4&=V1vV07X_KM!S>G`w5k+paogZ@IeG3v3 z$(Jf91*g-ZQYSQ;3=7GuVfL!(o+@+BuNC>6T<6?$$h_^d?NB%2j*?z>6)644mMC92 z(aWp&8&`KjDwc8thq+?3sg^XvZtSRJ3-In0;{w9{bN-F!5Oa za&mWn-h+`SKU-B~0$LJQXc5*otX66B20gj0Sx=_c9BWqY?MHnW-`SQ}dOt5)U-V&_ zpzm|WjaS!kVnw9Xvnj@DBjaWj)7L1Sr3z1prK$&xMU#cHdlR(rDCmsu3f*98-<+A| znyr=~79WttuF%Y=6prNA*->=QDq5wFJqP%U4Bukv5=>T}U;}j#AVLtB`Kpq_tX+Dq z8Jo5w##j=u;U$VS>m#jN=NK4<~!AMp(B?5j>EnDG#I_Ntq6##{#l|E-v8p=>y z3qX$5uTi+qTmmo8SGn3%at!f;Rdb8f3RN}Qzpd1Npl<3sVp@oVGFY`6^XP2RmRBqwjZ;kzgH~-t28odhh-rE`rgY<9DlBX_kBRgU~@Sk z;ceFZVv4NAMDxc?M1TKV@-DnQsF)!^E`0Y0-&EQu5wunhx@`3Qrun?jOLoC`-MFio&$EJJvZBYbRu6PZgx(8wL9Cz1J&_e#kOE{r z`}o2|Z+ZeRTrM8NdOKGuGY&!{vF5I#K2)afgWp=fTm*m4kkpNqxr}o;BR=HNWcDRR2INPuW z$EF7ls;9a=IF7r~!Q!%~g6-z4>j(lgIN!DMo?RPyBAe@pv+H1MFDl-1Vyx(b74nnk z2}C&%mQ#nzMh=FI?Q2kM3D`&}>Qh-@Iiu^eh zg5P;8$RB`sG%rX|&p6eQpa6!%b&SQpj&%&w7sdzA$)8J}$ulx!FMwL*C9N4hsTT7N z#A?tC#GEDJzTJddIuPMBSEbO%gBD2r(%8`k;lXS)!LKQDE!#-Bg&r@EU{N6^nP`zO zIl}xCK)7&*r-5$a5?uQ&p$Ff6LQ1}Is#GWOKIRLlJViPR?fP}pg|~DyBn~WH2fGq* zlpU6oGrFCV`-HoK@%*z8sohuvXO`j>{PlEa;^?&6gc>B~G`H4O*D1v+HKSx7p6^|F zstC!)WkfnGd6GN-&f)iig+F@bB!~!clxBu;6rxyb!5by6$&iAMqU>T-l8&ED@nMl~ zE~7%(I+arG5iN2)d7_}<{eqB2Y6_VJP9p}HV}*>71!@&U3V_-z;lzv1AzHObzsQ4Y zgzMqXV0OtupbDZdU8GyellUW>M9lB6L(IY45uE=@;S8HM`PtLI2i6Lu*?|G#Xy4QC zmrylyJBqZ*xuh4%-R+1&Dn*yqXMnDzg9APGr5(ekhH|~Z8i*j!KLJm`m6n{X2Q0yQP8LdFys-EPOFn6q((6M6N_3D$tS|=yRCSd+X+Ls*0j35vGx+A zQ4Wks5YHufA5+tvS!5#$Li2f4sDXj_BBYC{TZ0_MLUX1 zd@1Y(u@8U>n5K0Oy6NP~eDN2qV=g(>M}G(*TeXy|q-&KwyNE$h}<;)B9J z4X(c$NA>xuaatA3W+2-WorQJVQTQ~0eZ*Sp|8>X=+1RRv87v=T0Id&kUe zwmQPI9oc#K+-uZYA$Y1Wp3}$_IPX~S)4@G_uhXbDyoOB-K+RlxaP-`; z71|ZW02~1t3Mhd?D`*`Yv)tN{brp{+c zJ6Cq`H2;C6tR2|6UOsxJr2j!<;j#2+qWrMLYw*_l`9H6~^n`BA7?``qcvXf$C za5&WL^?D&HDHXAd83~oVT?IP}K33aCpyO2JVMG=k7=N1}Kzkev8h)=|K~%W5CXuJ5 zM(T@}BgF}`ws@NJF;HNxa=T>D9`%>z$OVzv$uI*eEoa^sb-G3VU|nOvD10y{7&{^- z3}S?-jgjg~71S+3Ot~a${~$|x$x@)XQM`4MD7kW!WW#87)L^`0b}Fsr-Z(A(6nc}N zFts^-61n=C-g8HN8~kiioXg2x#NsiF^1}|&l(A{^Y2OB51Nbov3R^4f6Pdr1V`&$T zYmM!Y3Wlua3>}nJRZ8KuX(f#{u7o^IT4$5(cdNMTs(yk{DXYP`;Z%DxgfB_Ur-4Z4 z@w?(j9MWfmCg@8;qvgss&OK^{7atvbw+Xk0^Z4E1ZNHGd(R_VssoslNE-FBmx{4ETViPphm@m4>GSUu;}41uKaVXK7%xZ|$ry|<+F`sI;EF+TM_GGg6Z5JIw5th4T~0sstVv<= z;kTNxi+JEPj$H2WQf7Xd?Zz#kGcp4$Nfv56C(VHD0OX}x4E#3y!i))XZb79s9lqRW zdQrD=6&c3wIZ~ftj4MRdM}Fbbvj@x5I{U`b5c3fT>$M;c#@vk zwJMZEvY%4%g5jnmPYYGNPwByu4p9@c=`}?y@S^|Y8L(teua02d-$mvBs8QkhB z!?j=A2N*BPrm9DYu{x|fmp2_m#0!Rir#AQGd3CnHXGG@Kam$d+_ddXwm<3DSM*;Cb zP1+^LIN3tM7vBzALi5t?WYE6YVKECqsBPmVz8aZNwP;c^gsap7 zuL+t{=nn!oPK*YZ2|jCo+V3V*E0EIc%Od-<`ATTF=<3fx0h2V><%d2Z^(hsRAx*3* ztsu$oqrSQ}4mPJ7TSs|~To2p*>^?)XdOVYKsW{>RfBn=C^iKe{mL)fK*|gywp;pTmoI zzzRVOGc>-e$j~IerF*_nMLU`!+uuLjJQX=B*@>DFdz#~4E;PqmLJ^)^qqPUp5Zz6k z5Fh|z;HDp!G$Ek|rXy55K3Q56yn{DH@-#5bNAMF>K{akzNx-gV2y{(Co`z$k*8pRm z_SQ)acWIr@_X>6HFc(9X4Wz1H^T4*}_Ydt3={7=utVW@qrEd2oj$idoz6p@_ZuMz- zbUZ~(k&RMq$?!K)V0#H*cgKdsb#J}F8mo}VWz509c8<|bt$rgl!_XkDW~M5b3OYpH zQMk4ox$I}c<9;&Jv?VO=`(@_~5RLFYNWJnv8{hHp><_2Bg~v)my;)BFJOi9*S_ zbvfAh5EY43^$u~bcWEe4_X99!inxIpIQR2DY-=#BxcTJ^wlLJ&s5UVq)cADwHa z%fegKdeJLeU`d&S^*-LGFOb-uHhx&0T9LZZjF7I#KRrF1*n*}G&$Rh1Tt2ww2L+Nf zo(xZ-kMPOYnviYR`7JxRr|Hql{O;bptX_I!HtN5s4kcn0Jrd50-;o%Q)y%FAPX0KZ zxlqKBt0v%oB$zGLiw?B_8mUl2$d~5`_o|O*uVAe-S@~WWJ6=hiJXso*e}pe;b%#=q zDKU+!jB!}AGHk`kd}{UeZmjnTqf<|_jcmp z=V3Llo)F`3n0slGd#T%2;Oz<@@Ns{xd;Ptk4=mAw zt*yCamus+P>IYugc)dm8#^BOc&gxkz<)B%u{;xqJx3?g5gkFI~hS59+c(!JVyDO z3RoL(!uU!JKCkEjiLrcd$W`I@A(I;7}u3@51_2hW& zs(ow7)%IAYbx3D0z-bw@%wwvNEPqwkDa7`jVO$Y+?|vA}cL(KR+YZpCwOfQmp$J(S z>&T{@*0&ExCdpR>*wQ)yN!uxxDKjB_p4SJJ^6ut3+>ND2*32G?P?Qxr#-`I!^D3p) znqZP|`n;tn_B<=k&6cB;&=00kr7DyIB$0$+d~%{Vsee#Q0dZ8dn^-tU%^g$C)NnU3 z6Iu9fs5mR?FK`y_hG+byG=6&8>N}CgvaI#=!M9yJG{lAXm5$V%QR0H*WYZE&@~Vd> z!Rsx?&?QXY)(r0o7|@w!4Eq_$^~Lql;G`?r`!gagDmJiuLKJ-DPw}CQDtU)IvrqAh zD&o%3-b;2d7Fda1^{X|@%o=S0p+~SQ*&Z~#GYJ9dh7&2~PiONcNi{Q9zL~YG+3lyy z9W{x!5i%Y7uo)6;<&WG;tOS^3Vi_qdaS;&oB!-Z(Z@!cdW!0?c@xF|+k|h2+6o6M7 zEbTf7HIu@!(nG1{LMJ&Vyn`c*IK;5nF)|%2b2GuA>b~T6I zNphW|&SgJ0ZOa+|_QwZ8WU@_(VuXT|Ywt8$Ma*WHyKgIjLOHrB&VCuxCdnNB>YFdi zY_<(hj4q45xvr|;KHMnaks`fe%=1fbuwbHGk-b9x5vYw|tRk1xXvP=`S(hB|sk02}bLv9qI9ydc7qlPn-gIEA`AW zw2=x8SJ@KkJci=nu$^oc{n0hH4OVu#ih;DPlzaeP=R)XuuR1+J zn^VG2nWr-dd#>+#W6ZLmn3EBL z1TcpVN2!>{nu3{yxOPaM&Ruv=HBU773AF$jpMmLDdHtl@@AA5Z2KbMROq2pp+cSuoJiDtI$w=!+G? zbp@N8>yWAnhetg;Rx!Ww2@Jj*e&f`bFe;b?Y|#*8!hzHGp`Jra4jERfrQq*L6SKeq zjB^)GXp>A~-4*hC`;d3bXjRNIl1hTslXC2jErL`eCTBMBLT^UNMi}>wT||Y*zrWQL zQh@MrP9&OH4r0l!{4_JWAcrWG5Cv0*nttaWTj<)A;Wf;H3#}`7aDK|3W+3f!C3WK; zt^0u=r3;-$(OQ_Z)aytQ_x;i7K7uK5YJQ_sXLq9iBQT^C{rh4*6PAt*PDUAzc-Y(6 zf_PNi6Kl;QN0O0FG8)y1#)lq(;Lg2ikmZB5{@rfKX^~M;PX5@(`uvk5!rmsHs$d|jq0s7h1$ln2P_l1zVfLT zW*ZhL4k)X+$v~thm6nD^0ybI!r&n@b){ZQnRf_HO#GXV~c**qb$6l5n14fc48PY&B z;G#`E9xFKPst#+XMK3M-j{)=Etq2~cX<^F5rzo%sIr{zF| zT&U?0e1PqHNUC1p6k&M~f|o3$Bpft2WqO9pO#Yqw(47LTmZdeQ9zE%X3T^uReQpVH9Fc6_;YAROXNA7N}Hfqbv5b5MI$9tT`PQ6L3U zg@O*ub7Jb*ES6cvjB`4tKpFGfjS<|2@B$f516Y@+^XdEwJRM=7T^TCmC%y1dB89C~ zgzP;-ly#rPiX|&r0p3-?8(20tLFLK3P-`Bi7!Jc!gBQR z5hS6lXGNLdOy3(LIg$bEt1C%HHaJ#R(0*zzgY^(J5EQ50_HYiA5KHx`*30n%B z)4san5y6EyP-+Bf3eg;p##c!(E-|x6fIn|buRG_4B=wgl=dobnS-D42|X{byjjp|7oHw4yn?G==x$l>B9_RqAjoUDC70;*;PBMa+5Fsv zq`mlYi%zs_Nx`ocTnJOTx~-)+P_O`7V?0&~YKXX*1egb$@RLEbV41BMNA|*jpRyz! z`1K<~lLprFG1kbzprg5Z4PMcZx?^;)80%Dk5UP9F^my|izTW?I@^g1n_1bQCx#Dfa zHznz#Z_zs2ck5RIs2YcG2tvtKD3u9x(tXH^TM9R6(Ui^vz7i?O1?{C;pCo9+dui8Q z7G>UA$;}i3K@gnW?aC$8gDSoBoEB8pWA;)m43}RFF5?M5W4C9PK36c#&NR7}YImHq z?i)f`#z65*vfj_PdDOWbh1t=Cd`e*~ybzqR<9&R8;2v(Nh=q2aD9`IF4aJ0DEdd~3 zn?WtmN>+G89b~#V9CoJN)r1T+FEl%*Z4T{(?&~@Qa&-GoiQ7>pZrv(8d>_F%5TJIp zNH`I0#e8nU8IZPe3WHbsm6kk=OKuouMvQ}Tz5qN#N3cW|Yl(%c687NreSU8&A8o5J z$g01gcB$_pi2M}@Kch!qw&0YX9SmiU_XXf19|GTi0UJ!_j`h)PT@)SN8?G>3j=^g~X&~C%a4SM5)3H1BK2ymvgiW8!*4v8o|SbKP>v?tcRb;2fC zGlDvW9mm-2Bb|sKT{TV}EuyXF{m1xVd6vK`N(781TdAd#=IJiMYON;W#gFH!q{i;R zCvnHK_Q}DU#e9dIU_7Op7<7XroaX)-nD=7L`-N~5I<7xIL5DIMg(^Di7gn?^)u`{# zP;{vFH|*cFzcGJL4i}Y^{edz7W67OX#KRy&8GId^H&OU(C&KiYatF_mHDH=ag&_M~ zm?Be7mB~VlEt=o){(>q@cu)WsqIJ+HaPfl@VHC3RBT-bmnvUt479dT_t=z@#!lc$S zj^3t~;1mAfjx~7m!2<_igP58&rIo+MiLvDt)k9B(D7;+;W_a~RS~#sMecBHfFW$` z+iR1akJ@{uQtaZEyBt1v0=^$lR(^WcWmPU@^cD-nPk_ENjL%=$Xk4Nn!TK)4&1U{; zEUEC*Q&&<$WS>=k7a5ToX+RgK62#g(O7i3ZXLv?=<@iX?M4fmD z0DxC6?!O%03rh#`Ocwmh$^D1d^8oRsr2S^{*iuPBw<+1b<=%9{mjD|~&`@f~-@Hv) z)?b+>J>?&{^LHAM(f>PTOCSnefj^-RS?ht0%p_vZSv}TZ8jem5cDnE0L%a+_lfa$M=y^5 zjuuG!L;&$f005!cV4(h`>i>ikymHbAYfe-+aBP9Qy(1O=d808RyBN2i8bIic= zm_fz`_zAhVe}|Qm3kl-S#Rq#2(#*92kL7%=2YtyU_^n`^M+(Bt!-vq}0)Vu*-hxO! zA%cAKh{4|r{Z{iA`j6UcC*Sjk!8kyu`PSgDlD{iSB#HiZGMi5fHV^uqZx6NzGAelc zx4gZ87_12NrN9~-Qs;Lgqr(3-$}A*?nAQP+CUxXNafPTLydr#XUh5Y%Qjn%K)gK#V zFMO}=znNgX|CwqGo8J@nf3m@^AsH{OlfGK`_sgRE9}95yAkt!z|I@(hn|m>kPz(vm zD#C}Dv48E0?{$_EiiyEjU0)h<{P7dyTucm+@AlgpP*JfexHjyHZAEBXBI z1yA%pj*$5SK*l~4e;55L5&hCT>}#u6_dz`+*x;Wtep@Y0MrHd$SigAr|3`Ev%R#+x U<^TZm%l#Q00Em4-7Xa}809tA!e*gdg delta 8621 zcmaKxWmFs8*Y*R!-5rX%w*`tjG`I(Mch?fUxHLe47I$|oQYc!W6e(J~KyY_^=`DTk z|Ccv2S##}k?b&CNoSijmey}C@mI?Sof1n-H5HuGpsP;hz?jOwx4S}!uM+ZP_fuVnN zJ;Iwmx*GBAKRT$s6=?(bJE+e^3r7BD)Bvi&MPFZt#|{7A0q6q|1AZ0_YDM@A`P{3= z#w2b4x&kDC<`5!42zYSz4|TwKDDrUO7xd9MYSJLU#=9L4y>!FntQ154T5G}yji5vv zyaEYk0)pIv9J)v`DNc`T^w^FiY-{gBW#)pD`0RnP)1?thZY^GhIag`Kqr{^ZP_#j* zvbm~QaGf$$VKNh)DiA$vJvOE#Ok^lXK?2v4+ND<6t0e*wyb#OGy~09{(GzB;019gHk!h|3}BZ6bgQ{<9;H4kdyreGG=>UX`= z>Jj$1yPqKvyKLo@XrHBftGcWjw@A6zlR?<8L(*JzW1`qZzxS4(r*2rQKCWd=tzc`lwmv74fN;g8Q)&4F402y1*V~x8I|mIs%~oEOzE|p)zO5hx_+ux zIbTI#{?(R(;WEs~HeAZ_EfC)=rovJl!LYF@UV#B3Rn&NkW|_f&L1`mebY){eNXtsf z)~g%BIMBa%OHQs7bF)XDdRVFF)Ta+$M#2%nQ&@neZXv%eT4_k=jow*V=Gg%hpvPo; zk2Bd4r=X`~*pTpTGP!Ks-a(rJwQ8AE`zO{}?Y8NM9FqO@qD)47x*_JVy#@%QsceyY zA%O?v%?C}#aPH{?oeVhxB#kN2LFN%60?Ld)3=mZ7i0YHkOnyWh_#^N5fvI7y$b+HX zcI>#+jGSSnO>{%+{np;_-Pz*CzpB@qG=+vf8*FJa$m|*^K?I8!we%G-gW{(R#m<+#6G;gfA2z0D7ZJ-VlpRJUD`~p z&;@yDyE|%BZCdISguE2H78Sep@_d-jBG@a@o?E<0loZfE-!b!|@m+`!DQ0wx;qp~| zJMJ%Hd0mw;x3%D2wst#pZ{a*z66x>$l^*R5f;l{KGv1}DfQ-`YtwgyG4*<-f0RXBP z3P1z`0Ls(0;>!jdB6*4aPlMR})1viJq!j-))9)UA3EX;0M5ACv2YUC|{PTv2P>?|@ zJu#r>6fA$n|0w{i;KZn>qa4NkwxRnY&q zHR{Fx>(1+b#`)teIH~;W2Gv)3-lG2N;G=tS>jVCI`KJH?lmKcdZUsKMqot*Xt&_Do zr;D$aCME!x4`s;aIn$8M(-GlY?OMML657t3HYFMg$8A^cIi~~ zjT4I@J}f#CjP2ctn2CziICv%{4&3yL!EpGGq@D@^Yg~J+W1W{5`>DT(6J2#o3vPuk zf1Y+0SAVV2c=@$x>x1V)O}Xd5d0djnx_@Sbb1Sg_S#j}Y>>Is?ozDF!q*%zC^Jw!_ zuX#fI&p8mvWK5G~C}X0u&629_ zr>%SH)M+%fVwVqhEnkNI3#+1zb*KJ?jP5g|T4^jSh2QU8OmNRz)^ zfZL^}CSRxhuY8_TtC_)UbfE#Jel(5sM-kc1cd{^lgjDO+xBGb;8)A@Jd#`-)E?$&E zNu^>?VVPHY+U(1e*`>2hyS@;vgSL4H!pkIB-lPi|wHe`8(KO7*oEGc8h+TLIA~LEq zWX-je($Q#58QJ5l9r3`5TY5+N9h}lDuM}WUCv^VCV5s8SD+wQv_>Hrd{Y}H?!yK@7V2Rom8NtX-l?8rn@Q>mOrs`D>H zV5a9`2?ycX#sQ6YN;P-7K95)!uZ#SA@4>eR$16Q4)2-Pd3fLj_m+=CeF8cl(db>-n zV6#T8Q=OioS8Fn-*N!lw>{D#Pmq0jqvdvw0o4boprq!|R@JomjBLBB@LbjqU(r3Ca zhY|ObUY2Q1X6E0$-(VibJSRQMab%f-YxRUg9`*%}d0U%kr7IdQci~rTm}=Cgce5r@ z8d-gI;ix{C5-tzw`{B}ead)rTF!JjLq)j>rx0^_~Z8FF_C-aOkhnA;&Le(h+x({z<6&O9-Hby2I?LiR;~NtFV0ZNevDem&V#vI0KaU9^^4e{ z#U76@+`#)0|48u4vpwErAzPb>wz}v~`M6eFh0#&9nUe^#lb6BD?8DC$;kClFNX7{| zLBN2!f!Yr939`T~QmB7!D45ps+UD?Ar$|5YBf}_7W7NXF_0N-7RMmdUNYM%fJcO8M?H-y$mif-;2yrYaH?m4!P>G46fJ|P<8+`DQ`;@2Ni zKzyy9a8YQmT1do9axj~1rRmI7&57hIT#q$L7$?okI9mHYU!eb?ulQ6EHWZH zp7;^rmhgYv#J^%_-EXG^a?sPl1YwZWWhf)+ykb) zBV+gLy4@6!*f{>1MLC^{Q?HNnS=#6JB0IGnx@BE zwZQptB#~&{Ci@hmKYRJh^hUr!JZk1pZLRIi`G)afBXJf2aWx*VL*^R_Dxq&fP}Oqr z&`u#@xB_XJ$bvkAlDM>AGKFPE4yTBlZUgi43cBlX@kvPD$#W+W9%1#BpG59(2XkM z#a(DBTx!C;o{7~eOc_JU;Vv)AkFo}X9Z;z*jS%hlR(meRk)LD~ih~{84|wiY=o_BT9)-Y49u|#eeQ$aiLfLCQo8Q4$ql{DwkA+&Rnqdv4M@J8c}0` zj)`=_dif)zxZ>IkSb?x?32>wdVYyk;&%TM275{MMiVg-u!?7=H&5OMIa3m0i5Wzzm z7Ox&lN5~N+G`zaYs+q43W;c_vPn`QgHsp~<8s7TIvyFp|Abl&KC>E0Pd3b^%pjOTg zMxBRhie;2K4P<6oLPWc@#=&Qp%7JC7eM2DbvRM|I4!9T*D9a=i-nwds(0ZagkLeTi zL9~naLk*t~iWWapW2$YqEeVc5qD9p37T(qjwm~sl=3(gGPG@b+4f`_6FQ#4-C;1Kb zLhY?@=u|O$Mjn3ND69y1$MB{cduwx3iXo?p6rL?WD_q-%U=7 zN$)h~fW(_igFH#AfG@gjrR3G-j<*meMckvQa%jBu@; z!#<75+mYxqxnK**V!g&bj$Fxgn<6{ktXKAy5g1P5svxAVoJ6@zke?-*o1ZU6q&1}M zyQK+x73p5dzLr)7GO_eZWAV$H^U7y~jT*>0o3_g&9V(Rc!W{20)yE09B&gSoBMLc)%KXB&E^&qZ1*&pOtn^Wlcyn9Q4swz| z(6e6blTxNr#qi1UK+=k%PChTt+2iO*|B$~ESNQa0(FO~5MLF7BNl?OZ=!KaiJG&N# z%b6Unmk>&Mnfom5T__5gYa;4SjujeGJkhq;HtiwXvyIX5`jC@Y-gpU132pF&!>EA8 zC`}vS(=v-y|9j-5JXAe9eZjTQA)-SQTC zbtZ~2g!&Bn_zXpK1NlAT1&C9Y=4L&kDf+9&$d$vF8Dm6x!#WFHfya71J^Hr5r@f;9 zv!a?7KMV&A(mntr>Dumvvn+0^Lwwqob{ya;0?g&^TPtIOAqTN=cTDF&#;+ix4l6n{ zP3Cw(5=ZvpNKpxS(ePE&Yc#Y_n$UHL8@tBm@JA;aj9CTL+^D+T zh@4q}JL%<^v$`9qm=*XKcD-;zP=M{7$Fp@zo7Q)H5{Y|i)M1wYyu~5R&%u`_>fOTQ z5(W^Glc3(oi=+Ms(7WSkiNx`=3TJ=IKyLolOiMj9|Kps_DVgWl4j~SYQLdOy^GUxv zq6D~SS(th%P!)RJCE&$Dj?BuRt!6>Lz)11_bH3ljBiNx9URkyF82K8p^JRrvbMJ^U zpV+1&I>p|l*WT=Tuv%?*J}5}|ZWZivs(b}G4_%p9Tp8k8J%7&SEb4cCPrUj=V{|Ja z^if3RmyQ21&0V2es5B9}SLNP!ootsriAiWUwHawI`*^^+mi-^~xdBu4-e0V($9C4` zaZ$+e6(m*okLf|iQQB>Q;&1A5V{PG?W-vqjD_cYHLte=gc~Z5rkL1FoFA|4(g9
Z`nXi6+6or+d(ABcGesyVGX=$9x(kymPd+lmq0f3~r#8@nH;;X?Y{hVJ2UZ z3W$%$@0q!P{lgff@|lBVIxrAMJeEoS54@@$UWt?cP3w7(MKJ z*{{-gXiiF|kX}#VW|=PKyGH-2Ln>oX+&|pXY8xqwrH)@S4FCE-%45mUL<7=)2ksvQ zUW~AzYlyJ*WB}wG>{H^6%)O>hU{u|Fs(Lg{7&xjvj))`9AqKWOS)z;!-;G+QJ7v6? z@%T^QnKEt2I$rK*k8>`p1OhT41l|5w(4VNxGS!RqpQvIbESd)b#B)76{ZJmPc!|`@ zfrajrOzD&Z=qNTt3UE)_r@ia;cF?0vi$}g_7l^MTa^VYY!lh4stRwAhFbAqsgt3uLXA4#xw}Iezn+NaR&Y{6cl25MP ztKRnsNADSz@>{n}wYi!gpoSAnA(tuIGV-7|0WZf`bNe!9aHp`O2>w9!4Kq1)~qf9J6Gn7!@3a9q-;6oZXR4hkd&F@`0 zqZywGptLh)DyqN}PMIN@zEe7{&8v1;;D~coX+ILeQVNSaWIO*o0kTwl{qn^dT zl{ zMU9PG={>$!!*gd%@+ZvdB>31d7vTENPpcW?^ti5ULFpN!4pDUu%g~)7=}StZv~I0h zKjD;5|6U@XMfXm(9F{DvG*~%3#@**tyd)TR-IXULgcs%C>A}+7?=v%KRYPPQVjK@| zai&e~h5S6-;8*`^s`7ZHlvEUn@4Ug@`Tfbvxh(he)1}2`kvaLPc`O`f(UnMkyzQ1xV7+vSx^MQtH7{5`>$v-#bo){IJkyi$yUdx|!3lbXHGj zR&$X?aLt!KF6=W{t{(LE7JPvDD#?y>5&3{-kKwHsxBzPXSeY+fB%r>P7}8jPVSEI5 z%-3hI>wgHOm&hTqRQ_Rzd>sY*z?fLx$UH{}@{5b-XRESI-Lxr+-{eFSmi=|zXHulq z1_7%6OxdwiQfZn8cU?CuGQevPr0kLny{;+HK;gu_rQ{w95j|l$I95|{cQR#cg5PfR zDa}PH*VIw?SX#0ZH?^IyGQ~P2Flyddj`AZzSld8i;z4sU4&C()@la*?=u1QW2Wx~y zvJ5F43rI0d@#EcT7Ej#hVzF5 z#;&emm?_~l*V^HS3O=ALz?2QWXn~zX2Hx3-2qVjf5E)Zia(j#f4-55$9^*#h=g4fl znjpbh>%GqUqfZ)@w_Oo454c8eN)P}V2QbnC?|jEgr9e3nkrl!!of!YsySl92WeB%k zV|Kg_jzP1#z!K zPjuPfy2*3H8Mgr-s0$3@h|8fTf#F5kx=H9neP-?!50ZA5q4hgkIv6}qTg4J{bo0uf zt0+LN*;Tij#&vsM07E>ox481Kg)mL$fMEtz?qR6=}!OVMKMol;aNG9uv&J|@y z%WTZ^@ADg^er<`iux**D++v}*h^!w&{AVx|rT6ae(dTu_4vEZ%-|3vUK7C<5bWY72#hvnENGl&pd3^#Zgc4V&YQM&Z%bE;Ov;P$=uo_AtPHUHB>TmY+V% z=)tG+e#OT_j3@5pV*oJB72@-tS#yQ?3OUMC#G%#8my_r!L3Sq@oulCwuaxJI5Go2T zii|j{dUAR&Jgk1)3u3jSq||p_vDBd9Odh^b$l+< z;UnWFr@P$OSy^8}+^rE7(EOpj6H8+FW@y6^5PO1l`v_ue_A%Ba1Wa~VU$n6~}vbTQ$+AqArICeOL^>7edsuiI5) z{r88*+35_o(icp59vA%;NAAtnoRO?2Vd2a1Ki+B&5zw0=GaXs^jmYA)#&zZ?AZLt^ zl#J!!=b0oQyxJ|GYJgb6CWhe1J{U4<;;)yO5R%p7HCh(uxBB`zR$t4AM?FN+*1_V~Ij z!D^$c$_9_KiX}tcQR}cH$6XaHhnyhG>ttjC)&>>DyW3CM8$xEAJvUjd6sN2tQJuW? z$5foMF4MlI+g1TL)bHq|2Wi|O*EIjUcJ!|E+GZ2uV08YcT|24;{E%Y?H zne;KEcqqZ5KKa)G^D(Z5p+rN0`~+P-2vlXaY3`l11mRo11oVG_R_^kDxfRPXPhH4v z=FG%U?m(#)cIa$bGVmiZ&_m%ArC$kGSvmA>ZwAf!)=P_8u8R}fOt4jLIVV?#vJF1x zR-66-Hw+)8C4Fm=a#E&0OLNH!avJ?^VdxpuJbX)snr?()XiLQCFn1Bp<(&Iwu|%gy zd^+ZE0YYu+^&stYP<-*Vab<3i-n3t%V?Au z2Y=^KBhS)%tIUy8W%i@S0XHydV^keEGBg+t17a8Yb=q5pA{I;aem^$p`73jMpGPmB zbK5Hz==Syzy}I&6_9MXD^=0tF>Af!U(vqwyr@Ob$J=o`*=BOo45XT#emDbgK%%M+Q z7aftmC-t2*SwWhKxEPvM#L<6=)8Q;3lzR2Rw7gM3T$N0-={XGl&7hUS;8XXw0>@Md zB4p9Hf-qtz?ACh(HbCnv_Ks$r&3JtnwykeaojL~H#~^YJGZta|z_hNDR=PV=_KRTx z*RKY@;MKLg+N=APlB^;Ui^9?d0;o9QqB=r8lBrYf({nrg-YCE~G6b~XV2zK=+7Hiu z6ER!`gpv+Y0N^(qQvCzOfKS{AimOWnHLk>gK77DI|CbN_4Yq!xW(sK1$MnBA)A&aU z={~zcF zS40i{R7eIqpa4KwDPR3H3uO@%Fq9JjrO_pY&T^9c`E2n;RB#|@Wswcg<;CA_-wX0T zu6;2ToFFv1*aip^``gVH`#)E{gbGL?4uGnPi~co7{ZqUx^LL)8EajgnETvSyMp*## zSXT0{ytJpdS?%vUXFDY5Z0X-MP0OgEL}g@fZ=pP8cEAnr?+m^_RZh#OfVH~6s}Mk$ z%gumfcK=pj`;)_6i2xm~AcH@%`~AH^?<>^cA6%fSmCyfV;EX{!J_YyC9!v<-hO}P!KE~@cZ2VFS>mCasFDmL1pCd4yk|De_Fjf|5?2@ jo<8URID~UJ0N@G!{$BHlNWbS`f7*6%0RVybzoY*L$`k;o diff --git a/app/api/endpoints/admin.py b/app/api/endpoints/admin.py new file mode 100644 index 0000000..287df3a --- /dev/null +++ b/app/api/endpoints/admin.py @@ -0,0 +1,145 @@ +from fastapi import APIRouter, HTTPException, Depends +from app.core.auth import get_current_admin_user +from app.core.config import LLM_CONFIG, DEFAULT_RESET_PASSWORD, MAX_FILE_SIZE, MAX_IMAGE_SIZE +from pydantic import BaseModel +import os +import json +from pathlib import Path + +router = APIRouter() + +# 配置文件路径 +CONFIG_FILE = Path(__file__).parent.parent.parent.parent / "config" / "system_config.json" + +class SystemConfigModel(BaseModel): + model_name: str + system_prompt: str + DEFAULT_RESET_PASSWORD: str + MAX_FILE_SIZE: int # 字节为单位 + MAX_IMAGE_SIZE: int # 字节为单位 + +class SystemConfigResponse(BaseModel): + model_name: str + system_prompt: str + DEFAULT_RESET_PASSWORD: str + MAX_FILE_SIZE: int + MAX_IMAGE_SIZE: int + message: str = "" + +def load_config_from_file(): + """从文件加载配置,如果文件不存在则返回默认配置""" + try: + if CONFIG_FILE.exists(): + with open(CONFIG_FILE, 'r', encoding='utf-8') as f: + return json.load(f) + except Exception: + pass + + # 返回默认配置 + return { + 'model_name': LLM_CONFIG['model_name'], + 'system_prompt': LLM_CONFIG['system_prompt'], + 'DEFAULT_RESET_PASSWORD': DEFAULT_RESET_PASSWORD, + 'MAX_FILE_SIZE': MAX_FILE_SIZE, + 'MAX_IMAGE_SIZE': MAX_IMAGE_SIZE + } + +def save_config_to_file(config_data): + """将配置保存到文件""" + try: + # 确保配置目录存在 + CONFIG_FILE.parent.mkdir(parents=True, exist_ok=True) + + with open(CONFIG_FILE, 'w', encoding='utf-8') as f: + json.dump(config_data, f, ensure_ascii=False, indent=2) + return True + except Exception as e: + print(f"保存配置文件失败: {e}") + return False + +@router.get("/admin/system-config", response_model=SystemConfigResponse) +async def get_system_config(current_user=Depends(get_current_admin_user)): + """ + 获取系统配置 + 只有管理员才能访问 + """ + try: + # 优先从文件加载配置,然后从内存配置补充 + config = load_config_from_file() + + return SystemConfigResponse( + model_name=config.get('model_name', LLM_CONFIG['model_name']), + system_prompt=config.get('system_prompt', LLM_CONFIG['system_prompt']), + DEFAULT_RESET_PASSWORD=config.get('DEFAULT_RESET_PASSWORD', DEFAULT_RESET_PASSWORD), + MAX_FILE_SIZE=config.get('MAX_FILE_SIZE', MAX_FILE_SIZE), + MAX_IMAGE_SIZE=config.get('MAX_IMAGE_SIZE', MAX_IMAGE_SIZE), + message="配置获取成功" + ) + except Exception as e: + raise HTTPException(status_code=500, detail=f"获取配置失败: {str(e)}") + +@router.put("/admin/system-config", response_model=SystemConfigResponse) +async def update_system_config( + config: SystemConfigModel, + current_user=Depends(get_current_admin_user) +): + """ + 更新系统配置 + 只有管理员才能访问 + """ + try: + # 准备要保存的配置数据 + config_data = { + 'model_name': config.model_name, + 'system_prompt': config.system_prompt, + 'DEFAULT_RESET_PASSWORD': config.DEFAULT_RESET_PASSWORD, + 'MAX_FILE_SIZE': config.MAX_FILE_SIZE, + 'MAX_IMAGE_SIZE': config.MAX_IMAGE_SIZE + } + + # 保存到文件 + if not save_config_to_file(config_data): + raise HTTPException(status_code=500, detail="配置保存到文件失败") + + # 更新运行时配置 + LLM_CONFIG['model_name'] = config.model_name + LLM_CONFIG['system_prompt'] = config.system_prompt + + # 更新模块级别的配置 + import app.core.config as config_module + config_module.DEFAULT_RESET_PASSWORD = config.DEFAULT_RESET_PASSWORD + config_module.MAX_FILE_SIZE = config.MAX_FILE_SIZE + config_module.MAX_IMAGE_SIZE = config.MAX_IMAGE_SIZE + + return SystemConfigResponse( + model_name=config.model_name, + system_prompt=config.system_prompt, + DEFAULT_RESET_PASSWORD=config.DEFAULT_RESET_PASSWORD, + MAX_FILE_SIZE=config.MAX_FILE_SIZE, + MAX_IMAGE_SIZE=config.MAX_IMAGE_SIZE, + message="配置更新成功,重启服务后完全生效" + ) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"更新配置失败: {str(e)}") + +# 在应用启动时加载配置 +def load_system_config(): + """在应用启动时调用,加载保存的配置""" + try: + config = load_config_from_file() + + # 更新运行时配置 + LLM_CONFIG['model_name'] = config.get('model_name', LLM_CONFIG['model_name']) + LLM_CONFIG['system_prompt'] = config.get('system_prompt', LLM_CONFIG['system_prompt']) + + # 更新其他配置 + import app.core.config as config_module + config_module.DEFAULT_RESET_PASSWORD = config.get('DEFAULT_RESET_PASSWORD', DEFAULT_RESET_PASSWORD) + config_module.MAX_FILE_SIZE = config.get('MAX_FILE_SIZE', MAX_FILE_SIZE) + config_module.MAX_IMAGE_SIZE = config.get('MAX_IMAGE_SIZE', MAX_IMAGE_SIZE) + + print(f"系统配置加载成功: model={config.get('model_name')}") + except Exception as e: + print(f"加载系统配置失败,使用默认配置: {e}") \ No newline at end of file diff --git a/app/api/endpoints/meetings.py b/app/api/endpoints/meetings.py index a6c507e..ad2d378 100644 --- a/app/api/endpoints/meetings.py +++ b/app/api/endpoints/meetings.py @@ -1,13 +1,14 @@ from fastapi import APIRouter, HTTPException, UploadFile, File, Form, Depends, BackgroundTasks +from fastapi.responses import StreamingResponse from app.models.models import Meeting, TranscriptSegment, TranscriptionTaskStatus, CreateMeetingRequest, UpdateMeetingRequest, SpeakerTagUpdateRequest, BatchSpeakerTagUpdateRequest, TranscriptUpdateRequest, BatchTranscriptUpdateRequest, Tag from app.core.database import get_db_connection -from app.core.config import BASE_DIR, UPLOAD_DIR, AUDIO_DIR, MARKDOWN_DIR, ALLOWED_EXTENSIONS, ALLOWED_IMAGE_EXTENSIONS, MAX_FILE_SIZE, MAX_IMAGE_SIZE -from app.services.qiniu_service import qiniu_service +from app.core.config import BASE_DIR, AUDIO_DIR, MARKDOWN_DIR, ALLOWED_EXTENSIONS, ALLOWED_IMAGE_EXTENSIONS +import app.core.config as config_module from app.services.llm_service import LLMService from app.services.async_transcription_service import AsyncTranscriptionService from app.services.async_llm_service import async_llm_service -from app.core.auth import get_current_user, get_optional_current_user +from app.core.auth import get_current_user from typing import List, Optional from datetime import datetime from pydantic import BaseModel @@ -398,11 +399,13 @@ async def upload_audio( detail=f"Unsupported file type. Allowed types: {', '.join(ALLOWED_EXTENSIONS)}" ) - # Check file size - if audio_file.size > MAX_FILE_SIZE: + # Check file size using dynamic config + max_file_size = getattr(config_module, 'MAX_FILE_SIZE', 100 * 1024 * 1024) + if audio_file.size > max_file_size: + max_size_mb = max_file_size // (1024 * 1024) raise HTTPException( status_code=400, - detail="File size exceeds 100MB limit" + detail=f"File size exceeds {max_size_mb}MB limit" ) # 检查是否已有音频文件和转录记录 @@ -646,11 +649,13 @@ async def upload_image( detail=f"Unsupported image type. Allowed types: {', '.join(ALLOWED_IMAGE_EXTENSIONS)}" ) - # Check file size - if image_file.size > MAX_IMAGE_SIZE: + # Check file size using dynamic config + max_image_size = getattr(config_module, 'MAX_IMAGE_SIZE', 10 * 1024 * 1024) + if image_file.size > max_image_size: + max_size_mb = max_image_size // (1024 * 1024) raise HTTPException( status_code=400, - detail="Image size exceeds 10MB limit" + detail=f"Image size exceeds {max_size_mb}MB limit" ) # Check if meeting exists and user has permission @@ -767,6 +772,47 @@ def batch_update_transcript(meeting_id: int, request: BatchTranscriptUpdateReque raise HTTPException(status_code=500, detail=f"Failed to update transcript: {str(e)}") # AI总结相关接口 +@router.post("/meetings/{meeting_id}/generate-summary-stream") +def generate_meeting_summary_stream(meeting_id: int, request: GenerateSummaryRequest, current_user: dict = Depends(get_current_user)): + """生成会议AI总结(流式输出)""" + try: + # 检查会议是否存在 + with get_db_connection() as connection: + cursor = connection.cursor(dictionary=True) + cursor.execute("SELECT meeting_id FROM meetings WHERE meeting_id = %s", (meeting_id,)) + if not cursor.fetchone(): + raise HTTPException(status_code=404, detail="Meeting not found") + + # 创建流式生成器 + def generate_stream(): + for chunk in llm_service.generate_meeting_summary_stream(meeting_id, request.user_prompt): + if chunk.startswith("error:"): + # 如果遇到错误,发送错误信息并结束 + yield f"data: {{\"error\": \"{chunk[6:]}\"}}\n\n" + break + else: + # 发送正常的内容块 + import json + yield f"data: {{\"content\": {json.dumps(chunk, ensure_ascii=False)}}}\n\n" + + # 发送结束标记 + yield "data: {\"done\": true}\n\n" + + return StreamingResponse( + generate_stream(), + media_type="text/plain", + headers={ + "Cache-Control": "no-cache", + "Connection": "keep-alive", + "Content-Type": "text/plain; charset=utf-8" + } + ) + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to start stream generation: {str(e)}") + @router.post("/meetings/{meeting_id}/generate-summary") def generate_meeting_summary(meeting_id: int, request: GenerateSummaryRequest, current_user: dict = Depends(get_current_user)): """生成会议AI总结""" @@ -887,12 +933,12 @@ def get_llm_task_status(task_id: str, current_user: dict = Depends(get_current_u """获取LLM任务状态(包括进度)""" try: status = async_llm_service.get_task_status(task_id) - + if status.get('status') == 'not_found': raise HTTPException(status_code=404, detail="Task not found") - + return status - + except HTTPException: raise except Exception as e: diff --git a/app/api/endpoints/users.py b/app/api/endpoints/users.py index 051c725..c752036 100644 --- a/app/api/endpoints/users.py +++ b/app/api/endpoints/users.py @@ -3,7 +3,7 @@ from fastapi import APIRouter, HTTPException, Depends from app.models.models import UserInfo, PasswordChangeRequest, UserListResponse, CreateUserRequest, UpdateUserRequest, RoleInfo from app.core.database import get_db_connection from app.core.auth import get_current_user -from app.core.config import DEFAULT_RESET_PASSWORD +import app.core.config as config_module import hashlib import datetime import re @@ -48,7 +48,7 @@ def create_user(request: CreateUserRequest, current_user: dict = Depends(get_cur raise HTTPException(status_code=400, detail="用户名已存在") # Use provided password or default password - password = request.password if request.password else DEFAULT_RESET_PASSWORD + password = request.password if request.password else config_module.DEFAULT_RESET_PASSWORD hashed_password = hash_password(password) # Insert new user @@ -150,7 +150,7 @@ def reset_password(user_id: int, current_user: dict = Depends(get_current_user)) raise HTTPException(status_code=404, detail="用户不存在") # Hash password - hashed_password = hash_password(DEFAULT_RESET_PASSWORD) + hashed_password = hash_password(config_module.DEFAULT_RESET_PASSWORD) # Update user password query = "UPDATE users SET password_hash = %s WHERE user_id = %s" diff --git a/app/core/auth.py b/app/core/auth.py index 833e79e..cdd84ca 100644 --- a/app/core/auth.py +++ b/app/core/auth.py @@ -38,6 +38,19 @@ def get_current_user(credentials: HTTPAuthorizationCredentials = Depends(securit return user +def get_current_admin_user(credentials: HTTPAuthorizationCredentials = Depends(security)): + """获取当前管理员用户信息的依赖函数""" + user = get_current_user(credentials) + + # 检查用户是否是管理员 (role_id = 1) + if user.get('role_id') != 1: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Admin access required", + ) + + return user + def get_optional_current_user(request: Request) -> Optional[dict]: """可选的用户认证(不强制要求登录)""" auth_header = request.headers.get("Authorization") diff --git a/app/core/config.py b/app/core/config.py index c68e9ea..ffff929 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -60,8 +60,7 @@ QWEN_API_KEY = os.getenv('QWEN_API_KEY', 'sk-c2bf06ea56b4491ea3d1e37fdb472b8f') # LLM配置 - 阿里Qwen3大模型 LLM_CONFIG = { 'model_name': os.getenv('LLM_MODEL_NAME', 'qwen-plus'), - 'api_url': os.getenv('LLM_API_URL', 'https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation'), - 'max_tokens': int(os.getenv('LLM_MAX_TOKENS', '2000')), + 'time_out': int(os.getenv('LLM_TIMEOUT', '120')), 'temperature': float(os.getenv('LLM_TEMPERATURE', '0.7')), 'top_p': float(os.getenv('LLM_TOP_P', '0.9')), 'system_prompt': """你是一个专业的会议记录分析助手。请根据提供的会议转录内容,生成简洁明了的会议总结。 diff --git a/app/services/llm_service.py b/app/services/llm_service.py index 08132a3..e1c2e59 100644 --- a/app/services/llm_service.py +++ b/app/services/llm_service.py @@ -1,28 +1,87 @@ import json -import requests -from typing import Optional, Dict, List -from app.core.config import LLM_CONFIG, QWEN_API_KEY +import dashscope +from http import HTTPStatus +from typing import Optional, Dict, List, Generator +import app.core.config as config_module from app.core.database import get_db_connection class LLMService: def __init__(self): - self.api_key = QWEN_API_KEY - self.model_name = LLM_CONFIG["model_name"] - self.api_url = LLM_CONFIG["api_url"] - self.system_prompt = LLM_CONFIG["system_prompt"] - self.max_tokens = LLM_CONFIG["max_tokens"] - self.temperature = LLM_CONFIG["temperature"] - self.top_p = LLM_CONFIG["top_p"] + # 设置dashscope API key + dashscope.api_key = config_module.QWEN_API_KEY + + @property + def model_name(self): + """动态获取模型名称""" + return config_module.LLM_CONFIG["model_name"] + + @property + def system_prompt(self): + """动态获取系统提示词""" + return config_module.LLM_CONFIG["system_prompt"] + + @property + def time_out(self): + """动态获取超时时间""" + return config_module.LLM_CONFIG["time_out"] + + @property + def temperature(self): + """动态获取temperature""" + return config_module.LLM_CONFIG["temperature"] + + @property + def top_p(self): + """动态获取top_p""" + return config_module.LLM_CONFIG["top_p"] - def generate_meeting_summary(self, meeting_id: int, user_prompt: str = "") -> Optional[Dict]: + def generate_meeting_summary_stream(self, meeting_id: int, user_prompt: str = "") -> Generator[str, None, None]: """ - 生成会议总结 - + 流式生成会议总结 + Args: meeting_id: 会议ID user_prompt: 用户额外提示词 - + + Yields: + str: 流式输出的内容片段 + """ + try: + # 获取会议转录内容 + transcript_text = self._get_meeting_transcript(meeting_id) + if not transcript_text: + yield "error: 无法获取会议转录内容" + return + + # 构建完整提示词 + full_prompt = self._build_prompt(transcript_text, user_prompt) + + # 调用大模型API进行流式生成 + full_content = "" + for chunk in self._call_llm_api_stream(full_prompt): + if chunk.startswith("error:"): + yield chunk + return + full_content += chunk + yield chunk + + # 保存完整总结到数据库 + if full_content: + self._save_summary_to_db(meeting_id, full_content, user_prompt) + + except Exception as e: + print(f"流式生成会议总结错误: {e}") + yield f"error: {str(e)}" + + def generate_meeting_summary(self, meeting_id: int, user_prompt: str = "") -> Optional[Dict]: + """ + 生成会议总结(非流式,保持向后兼容) + + Args: + meeting_id: 会议ID + user_prompt: 用户额外提示词 + Returns: 包含总结内容的字典,如果失败返回None """ @@ -31,13 +90,13 @@ class LLMService: transcript_text = self._get_meeting_transcript(meeting_id) if not transcript_text: return {"error": "无法获取会议转录内容"} - + # 构建完整提示词 full_prompt = self._build_prompt(transcript_text, user_prompt) - + # 调用大模型API response = self._call_llm_api(full_prompt) - + if response: # 保存总结到数据库 summary_id = self._save_summary_to_db(meeting_id, response, user_prompt) @@ -48,7 +107,7 @@ class LLMService: } else: return {"error": "大模型API调用失败"} - + except Exception as e: print(f"生成会议总结错误: {e}") return {"error": str(e)} @@ -95,52 +154,53 @@ class LLMService: return prompt - def _call_llm_api(self, prompt: str) -> Optional[str]: - """调用阿里Qwen3大模型API""" - headers = { - "Authorization": f"Bearer {self.api_key}", - "Content-Type": "application/json" - } - - data = { - "model": self.model_name, - "input": { - "messages": [ - { - "role": "user", - "content": prompt - } - ] - }, - "parameters": { - "max_tokens": self.max_tokens, - "temperature": self.temperature, - "top_p": self.top_p, - "incremental_output": False - } - } - + def _call_llm_api_stream(self, prompt: str) -> Generator[str, None, None]: + """流式调用阿里Qwen3大模型API""" try: - response = requests.post(self.api_url, headers=headers, json=data, timeout=60) - response.raise_for_status() - - result = response.json() - - # 处理阿里Qwen API的响应格式 - if result.get("output") and result["output"].get("text"): - return result["output"]["text"] - elif result.get("output") and result["output"].get("choices"): - return result["output"]["choices"][0]["message"]["content"] + responses = dashscope.Generation.call( + model=self.model_name, + prompt=prompt, + stream=True, + timeout=self.time_out, + temperature=self.temperature, + top_p=self.top_p, + incremental_output=True # 开启增量输出模式 + ) + + for response in responses: + if response.status_code == HTTPStatus.OK: + # 增量输出内容 + new_content = response.output.get('text', '') + if new_content: + yield new_content + else: + error_msg = f"Request failed with status code: {response.status_code}, Error: {response.message}" + print(error_msg) + yield f"error: {error_msg}" + break + + except Exception as e: + error_msg = f"流式调用大模型API错误: {e}" + print(error_msg) + yield f"error: {error_msg}" + + def _call_llm_api(self, prompt: str) -> Optional[str]: + """调用阿里Qwen3大模型API(非流式,保持向后兼容)""" + try: + response = dashscope.Generation.call( + model=self.model_name, + prompt=prompt, + timeout=self.time_out, + temperature=self.temperature, + top_p=self.top_p + ) + + if response.status_code == HTTPStatus.OK: + return response.output.get('text', '') else: - print(f"API响应格式错误: {result}") + print(f"API调用失败: {response.status_code}, {response.message}") return None - - except requests.exceptions.RequestException as e: - print(f"API请求错误: {e}") - return None - except json.JSONDecodeError as e: - print(f"JSON解析错误: {e}") - return None + except Exception as e: print(f"调用大模型API错误: {e}") return None diff --git a/config/system_config.json b/config/system_config.json new file mode 100644 index 0000000..ccfc771 --- /dev/null +++ b/config/system_config.json @@ -0,0 +1,7 @@ +{ + "model_name": "qwen-plus", + "system_prompt": "你是一个专业的会议记录分析助手。请根据提供的会议转录内容,生成简洁明了的会议总结。\n\n总结应该包括以下几部分(生成MD二级目录):\n1. 会议概述 - 简要说明会议的主要目的和背景(生成MD引用)\n2. 主要讨论点 - 列出会议中讨论的重要话题和内容\n3. 决策事项 - 明确记录会议中做出的决定和结论\n4. 待办事项 - 列出需要后续跟进的任务和责任人\n5. 关键信息 - 其他重要的信息点\n\n输出要求:\n- 保持客观中性,不添加个人观点\n- 使用简洁、准确的中文表达\n- 按重要性排序各项内容\n- 如果某个部分没有相关内容,可以说明\"无相关内容\"\n- 总字数控制在500字以内", + "DEFAULT_RESET_PASSWORD": "123456", + "MAX_FILE_SIZE": 209715200, + "MAX_IMAGE_SIZE": 10485760 +} \ No newline at end of file diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml index fe9fbaf..f76e7f7 100644 --- a/docker-compose.prod.yml +++ b/docker-compose.prod.yml @@ -39,8 +39,7 @@ services: # LLM配置 - QWEN_API_KEY=sk-c2bf06ea56b4491ea3d1e37fdb472b8f - LLM_MODEL_NAME=qwen-plus - - LLM_API_URL=https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation - - LLM_MAX_TOKENS=2000 + - LLM_TIMEOUT=120 - LLM_TEMPERATURE=0.7 - LLM_TOP_P=0.9 diff --git a/main.py b/main.py index 8ef5fc9..e873a5e 100644 --- a/main.py +++ b/main.py @@ -2,9 +2,10 @@ import uvicorn from fastapi import FastAPI, Request, HTTPException from fastapi.middleware.cors import CORSMiddleware from fastapi.staticfiles import StaticFiles -from app.api.endpoints import auth, users, meetings, tags +from app.api.endpoints import auth, users, meetings, tags, admin from app.core.config import UPLOAD_DIR, API_CONFIG, MAX_FILE_SIZE from app.services.async_llm_service import async_llm_service +from app.api.endpoints.admin import load_system_config import os app = FastAPI( @@ -13,6 +14,9 @@ app = FastAPI( version="1.0.2" ) +# 加载系统配置 +load_system_config() + # 添加CORS中间件 app.add_middleware( CORSMiddleware, @@ -31,6 +35,7 @@ app.include_router(auth.router, prefix="/api", tags=["Authentication"]) app.include_router(users.router, prefix="/api", tags=["Users"]) app.include_router(meetings.router, prefix="/api", tags=["Meetings"]) app.include_router(tags.router, prefix="/api", tags=["Tags"]) +app.include_router(admin.router, prefix="/api", tags=["Admin"]) @app.get("/") def read_root(): diff --git a/stream_test.html b/stream_test.html new file mode 100644 index 0000000..588d60d --- /dev/null +++ b/stream_test.html @@ -0,0 +1 @@ +\n\n\n 流式LLM测试\n \n \n\n\n
\n

🤖 流式LLM总结测试

\n \n
\n
\n \n \n
\n
\n \n \n
\n \n \n \n
\n \n
\n
点击按钮开始测试...
\n
\n\n \n\n \ No newline at end of file diff --git a/test_stream_llm.py b/test_stream_llm.py new file mode 100644 index 0000000..98165e0 --- /dev/null +++ b/test_stream_llm.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python3 +""" +测试流式LLM服务 +""" +import sys +import os +sys.path.append(os.path.dirname(__file__)) + +from app.services.llm_service import LLMService + +def test_stream_generation(): + """测试流式生成功能""" + print("=== 测试流式LLM生成 ===") + + llm_service = LLMService() + test_meeting_id = 38 # 使用一个存在的会议ID + test_user_prompt = "请重点关注决策事项和待办任务" + + print(f"开始为会议 {test_meeting_id} 生成流式总结...") + print("输出内容:") + print("-" * 50) + + full_content = "" + chunk_count = 0 + + try: + for chunk in llm_service.generate_meeting_summary_stream(test_meeting_id, test_user_prompt): + if chunk.startswith("error:"): + print(f"\n生成过程中出现错误: {chunk}") + break + else: + print(chunk, end='', flush=True) + full_content += chunk + chunk_count += 1 + + print(f"\n\n-" * 50) + print(f"流式生成完成!") + print(f"总共接收到 {chunk_count} 个数据块") + print(f"完整内容长度: {len(full_content)} 字符") + + # 测试传统方式(对比) + print("\n=== 对比测试传统生成方式 ===") + result = llm_service.generate_meeting_summary(test_meeting_id, test_user_prompt) + if result.get("error"): + print(f"传统方式生成失败: {result['error']}") + else: + print("传统方式生成成功!") + print(f"内容长度: {len(result['content'])} 字符") + + except Exception as e: + print(f"\n测试过程中出现异常: {e}") + +if __name__ == '__main__': + test_stream_generation() \ No newline at end of file