实现deepseek 对话

This commit is contained in:
XIE7654
2025-07-17 14:42:40 +08:00
parent 15116d761b
commit 9b30115444
6 changed files with 149 additions and 29 deletions

View File

@@ -1,5 +1,14 @@
import os
import asyncio
from fastapi import APIRouter, Depends, Request
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
from langchain.memory import ConversationBufferMemory
from langchain.chains import ConversationChain
# from langchain.chat_models import ChatOpenAI
from langchain_community.chat_models import ChatOpenAI
from deps.auth import get_current_user
from services.chat_service import chat_service
@@ -11,12 +20,36 @@ class ChatRequest(BaseModel):
prompt: str
@router.post("/")
def chat_api(data: ChatRequest, user=Depends(get_current_user)):
# return {"msg": "pong"}
return resp_success(data="dasds")
def get_deepseek_llm(api_key: str, model: str, openai_api_base: str):
# deepseek 兼容 OpenAI API需指定 base_url
return ChatOpenAI(
openai_api_key=api_key,
model_name=model,
streaming=True,
openai_api_base=openai_api_base, # deepseek的API地址
)
# reply = chat_service.chat(data.prompt)
# return {"msg": "pong"}
@router.post('/stream')
async def chat_stream(request: Request):
body = await request.json()
content = body.get('content')
print(content, 'content')
model = 'deepseek-chat'
api_key = os.getenv("DEEPSEEK_API_KEY")
openai_api_base="https://api.deepseek.com/v1"
llm = get_deepseek_llm(api_key, model, openai_api_base)
# return ChatResponse(response=reply)
if not content or not isinstance(content, str):
from fastapi.responses import JSONResponse
return JSONResponse({"error": "content不能为空"}, status_code=400)
async def event_generator():
async for chunk in llm.astream(content):
# 只返回 chunk.content 内容
if hasattr(chunk, 'content'):
yield f"data: {chunk.content}\n\n"
else:
yield f"data: {chunk}\n\n"
await asyncio.sleep(0.01)
return StreamingResponse(event_generator(), media_type='text/event-stream')

View File

@@ -0,0 +1,47 @@
import { useAccessStore } from '@vben/stores';
import { formatToken } from '#/utils/auth';
export interface FetchAIStreamParams {
content: string;
}
export async function fetchAIStream({ content }: FetchAIStreamParams) {
const accessStore = useAccessStore();
const token = accessStore.accessToken;
const headers = new Headers();
headers.append('Content-Type', 'application/json');
headers.append('Authorization', <string>formatToken(token));
const response = await fetch('/chat/api/v1/stream', {
method: 'POST',
headers,
body: JSON.stringify({
content,
}),
});
if (!response.body) throw new Error('No stream body');
const reader = response.body.getReader();
const decoder = new TextDecoder('utf8');
let buffer = '';
return {
async *[Symbol.asyncIterator]() {
while (true) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const parts = buffer.split('\n\n');
buffer = parts.pop() || '';
for (const part of parts) {
if (part.startsWith('data: ')) {
yield part.replace('data: ', '');
}
}
}
},
};
}

View File

@@ -16,6 +16,7 @@ import { useAccessStore } from '@vben/stores';
import { message } from 'ant-design-vue';
import { useAuthStore } from '#/store';
import { formatToken } from '#/utils/auth';
import { refreshTokenApi } from './core';
@@ -56,10 +57,6 @@ function createRequestClient(baseURL: string, options?: RequestClientOptions) {
return newToken;
}
function formatToken(token: null | string) {
return token ? `Bearer ${token}` : null;
}
// 请求头处理
client.addRequestInterceptor({
fulfilled: async (config) => {

View File

@@ -0,0 +1,3 @@
export function formatToken(token: null | string) {
return token ? `Bearer ${token}` : null;
}

View File

@@ -14,6 +14,9 @@ import {
Select,
} from 'ant-design-vue';
import { fetchAIStream } from '#/api/ai/chat';
// 移除 import typingSound from '@/assets/typing.mp3';
interface Message {
id: number;
role: 'ai' | 'user';
@@ -24,7 +27,7 @@ interface Message {
const chatList = ref([
{
id: 1,
title: '和GPT-3.5的对话',
title: '和deepseek的对话',
lastMessage: 'AI: 你好,有什么可以帮您?',
},
{ id: 2, title: '工作助理', lastMessage: 'AI: 今天的日程已为您安排。' },
@@ -44,7 +47,7 @@ const messages = ref<Record<number, Message[]>>({
// mock 模型列表
const modelOptions = [
{ label: 'GPT-3.5', value: 'gpt-3.5' },
{ label: 'deepseek', value: 'deepseek' },
{ label: 'GPT-4', value: 'gpt-4' },
];
@@ -53,6 +56,8 @@ const selectedModel = ref(modelOptions[0].value);
const search = ref('');
const input = ref('');
const messagesRef = ref<HTMLElement | null>(null);
const currentAiMessage = ref<Message | null>(null);
const isAiTyping = ref(false);
const filteredChats = computed(() => {
if (!search.value) return chatList.value;
@@ -80,22 +85,36 @@ function handleNewChat() {
nextTick(scrollToBottom);
}
function handleSend() {
async function handleSend() {
if (!input.value.trim()) return;
const msg: Message = { id: Date.now(), role: 'user', content: input.value };
if (!messages.value[selectedChatId.value]) {
messages.value[selectedChatId.value] = [];
}
messages.value[selectedChatId.value].push(msg);
// mock AI 回复
setTimeout(() => {
messages.value[selectedChatId.value]?.push({
id: Date.now() + 1,
role: 'ai',
content: 'AI回复内容mock',
});
nextTick(scrollToBottom);
}, 600);
// 预留AI消息
const aiMsgObj: Message = { id: Date.now() + 1, role: 'ai', content: '' };
messages.value[selectedChatId.value].push(aiMsgObj);
currentAiMessage.value = aiMsgObj;
isAiTyping.value = true;
const stream = await fetchAIStream({
content: input.value,
});
// 移除打字音效播放
for await (const chunk of stream) {
for (const char of chunk) {
aiMsgObj.content += char;
currentAiMessage.value = { ...aiMsgObj };
// 移除打字音效播放
await new Promise(resolve => setTimeout(resolve, 15));
nextTick(scrollToBottom);
}
}
isAiTyping.value = false;
input.value = '';
nextTick(scrollToBottom);
}
@@ -165,7 +184,15 @@ function scrollToBottom() {
>
<div class="bubble" :class="[msg.role]">
<span class="role">{{ msg.role === 'user' ? '我' : 'AI' }}</span>
<span class="bubble-content">{{ msg.content }}</span>
<span class="bubble-content">
{{ msg.content }}
<span
v-if="
msg.role === 'ai' && isAiTyping && msg === currentAiMessage
"
class="typing-cursor"
></span>
</span>
</div>
</div>
</div>
@@ -327,4 +354,18 @@ function scrollToBottom() {
padding: 8px 4px 8px 4px;
}
}
.typing-cursor {
display: inline-block;
width: 8px;
height: 1.2em;
background: #1677ff;
margin-left: 2px;
animation: blink-cursor 1s steps(1) infinite;
vertical-align: bottom;
border-radius: 2px;
}
@keyframes blink-cursor {
0%, 50% { opacity: 1; }
51%, 100% { opacity: 0; }
}
</style>

View File

@@ -22,15 +22,14 @@ export default defineConfig(async ({ mode }) => {
host: '0.0.0.0', // 保证 docker 内外都能访问
port: 5678,
proxy: {
'/chat': {
target: 'http://localhost:8010',
changeOrigin: true,
},
'/api': {
target: backendUrl,
changeOrigin: true,
},
'/ws': {
target: backendUrl,
changeOrigin: true,
ws: true, // 启用WebSocket代理
},
},
},
plugins: [