实现deepseek 对话

This commit is contained in:
XIE7654
2025-07-17 14:42:40 +08:00
parent 15116d761b
commit 9b30115444
6 changed files with 149 additions and 29 deletions

View File

@@ -1,5 +1,14 @@
import os
import asyncio
from fastapi import APIRouter, Depends, Request
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
from langchain.memory import ConversationBufferMemory
from langchain.chains import ConversationChain
# from langchain.chat_models import ChatOpenAI
from langchain_community.chat_models import ChatOpenAI
from deps.auth import get_current_user
from services.chat_service import chat_service
@@ -11,12 +20,36 @@ class ChatRequest(BaseModel):
prompt: str
@router.post("/")
def chat_api(data: ChatRequest, user=Depends(get_current_user)):
# return {"msg": "pong"}
return resp_success(data="dasds")
def get_deepseek_llm(api_key: str, model: str, openai_api_base: str):
# deepseek 兼容 OpenAI API需指定 base_url
return ChatOpenAI(
openai_api_key=api_key,
model_name=model,
streaming=True,
openai_api_base=openai_api_base, # deepseek的API地址
)
# reply = chat_service.chat(data.prompt)
# return {"msg": "pong"}
@router.post('/stream')
async def chat_stream(request: Request):
body = await request.json()
content = body.get('content')
print(content, 'content')
model = 'deepseek-chat'
api_key = os.getenv("DEEPSEEK_API_KEY")
openai_api_base="https://api.deepseek.com/v1"
llm = get_deepseek_llm(api_key, model, openai_api_base)
# return ChatResponse(response=reply)
if not content or not isinstance(content, str):
from fastapi.responses import JSONResponse
return JSONResponse({"error": "content不能为空"}, status_code=400)
async def event_generator():
async for chunk in llm.astream(content):
# 只返回 chunk.content 内容
if hasattr(chunk, 'content'):
yield f"data: {chunk.content}\n\n"
else:
yield f"data: {chunk}\n\n"
await asyncio.sleep(0.01)
return StreamingResponse(event_generator(), media_type='text/event-stream')