ai chat init

This commit is contained in:
xie7654
2025-07-12 16:54:49 +08:00
parent 62bba7d23f
commit f5314efc81
10 changed files with 390 additions and 18 deletions

26
backend/ai/chat.py Normal file
View File

@@ -0,0 +1,26 @@
from channels.generic.websocket import AsyncWebsocketConsumer
import json
from ai.langchain_client import get_ai_reply_stream
from ai.utils import get_first_available_ai_config
class ChatConsumer(AsyncWebsocketConsumer):
async def connect(self):
await self.accept()
async def disconnect(self, close_code):
pass
async def receive(self, text_data):
data = json.loads(text_data)
user_message = data.get("message", "")
model, api_key, api_base = await get_first_available_ai_config()
async def send_chunk(chunk):
await self.send(text_data=json.dumps({"is_streaming": True, "message": chunk}))
await get_ai_reply_stream(user_message, send_chunk, model_name=model, api_key=api_key, api_base=api_base)
# 结束标记
await self.send(text_data=json.dumps({"done": True}))

View File

@@ -0,0 +1,25 @@
from langchain.schema import HumanMessage
from langchain_core.callbacks import AsyncCallbackHandler
from langchain_community.chat_models import ChatOpenAI
class MyHandler(AsyncCallbackHandler):
def __init__(self, send_func):
super().__init__()
self.send_func = send_func
async def on_llm_new_token(self, token: str, **kwargs):
await self.send_func(token)
async def get_ai_reply_stream(message: str, send_func, api_key, api_base, model_name):
# 实例化时就带回调
chat = ChatOpenAI(
openai_api_key=api_key,
openai_api_base=api_base,
model_name=model_name,
temperature=0.7,
streaming=True,
callbacks=[MyHandler(send_func)]
)
await chat.ainvoke([HumanMessage(content=message)])

View File

@@ -218,14 +218,12 @@ class ChatRole(CoreModel):
blank=True,
related_name="roles",
verbose_name="关联的知识库",
db_comment="关联的知识库"
)
tools = models.ManyToManyField(
'Tool',
blank=True,
related_name="roles",
verbose_name="关联的工具",
db_comment="关联的工具"
)
class Meta:

7
backend/ai/routing.py Normal file
View File

@@ -0,0 +1,7 @@
from django.urls import re_path
from ai.chat import ChatConsumer
websocket_urlpatterns = [
re_path(r'ws/chat/$', ChatConsumer.as_asgi()),
]

11
backend/ai/utils.py Normal file
View File

@@ -0,0 +1,11 @@
from ai.models import AIModel
from utils.models import CommonStatus
from asgiref.sync import sync_to_async
@sync_to_async
def get_first_available_ai_config():
# 这里只取第一个可用的,可以根据实际业务加筛选条件
ai = AIModel.objects.filter(status=CommonStatus.ENABLED).prefetch_related('key').first()
if not ai:
raise Exception('没有可用的AI配置')
return ai.model, ai.key.api_key, ai.key.url