From 7dc527118a91a6e1e397ce4a3144d35951b99f86 Mon Sep 17 00:00:00 2001 From: kim Date: Thu, 10 Jul 2025 19:08:10 +0800 Subject: [PATCH 1/4] =?UTF-8?q?feat:=20=E5=B0=86=E4=BA=8C=E8=BF=9B?= =?UTF-8?q?=E5=88=B6=E5=85=A8=E9=87=8F=E7=9A=84=E6=95=B0=E6=8D=AE=20?= =?UTF-8?q?=E6=94=B9=E4=B8=BAsse=E5=A2=9E=E9=87=8F=E7=9A=84=E6=96=B9?= =?UTF-8?q?=E5=BC=8F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- service/src/chatgpt/index.ts | 11 +- service/src/chatgpt/types.ts | 5 + service/src/routes/chat.ts | 75 ++++++-- src/api/index.ts | 133 +++++++++++++- src/views/chat/index.vue | 344 ++++++++++++++++++++++++----------- 5 files changed, 451 insertions(+), 117 deletions(-) diff --git a/service/src/chatgpt/index.ts b/service/src/chatgpt/index.ts index f8b21e8f..8a258d09 100644 --- a/service/src/chatgpt/index.ts +++ b/service/src/chatgpt/index.ts @@ -237,13 +237,18 @@ search result: ${searchResultContent}`, const finish_reason = chunk.choices[0]?.finish_reason - // Build response object similar to the original implementation + // 构建增量响应对象 const responseChunk = { id: chunk.id, - reasoning: responseReasoning, - text: responseText, + reasoning: responseReasoning, // 累积的推理内容 + text: responseText, // 累积的文本内容 role: 'assistant', finish_reason, + // 增量数据,只包含本次新增的内容 + delta: { + reasoning: reasoningContent, // 本次新增的推理内容 + text: content, // 本次新增的文本内容 + } } // Call the process callback if provided diff --git a/service/src/chatgpt/types.ts b/service/src/chatgpt/types.ts index dc79fbc4..cb68c8af 100644 --- a/service/src/chatgpt/types.ts +++ b/service/src/chatgpt/types.ts @@ -20,6 +20,11 @@ export interface ResponseChunk { reasoning?: string role?: string finish_reason?: string + // 支持增量响应 + delta?: { + reasoning?: string + text?: string + } } export interface RequestOptions { diff --git a/service/src/routes/chat.ts b/service/src/routes/chat.ts index 09656b05..0f3e0df5 100644 --- a/service/src/routes/chat.ts +++ b/service/src/routes/chat.ts @@ -225,7 +225,12 @@ router.post('/chat-clear', auth, async (req, res) => { }) router.post('/chat-process', [auth, limiter], async (req, res) => { - res.setHeader('Content-type', 'application/octet-stream') + // 设置 SSE 响应头 + res.setHeader('Content-Type', 'text/event-stream') + res.setHeader('Cache-Control', 'no-cache') + res.setHeader('Connection', 'keep-alive') + res.setHeader('Access-Control-Allow-Origin', '*') + res.setHeader('Access-Control-Allow-Headers', 'Cache-Control') let { roomId, uuid, regenerate, prompt, uploadFileKeys, options = {}, systemMessage, temperature, top_p } = req.body as RequestProps const userId = req.headers.userId.toString() @@ -241,6 +246,22 @@ router.post('/chat-process', [auth, limiter], async (req, res) => { let result let message: ChatInfo let user = await getUserById(userId) + + // SSE 辅助函数 + const sendSSEData = (eventType: string, data: any) => { + res.write(`event: ${eventType}\n`) + res.write(`data: ${JSON.stringify(data)}\n\n`) + } + + const sendSSEError = (error: string) => { + sendSSEData('error', JSON.stringify({ message: error })) + } + + const sendSSEEnd = () => { + res.write('event: end\n') + res.write('data: [DONE]\n\n') + } + try { // If use the fixed fakeuserid(some probability of duplicated with real ones), redefine user which is send to chatReplyProcess if (userId === '6406d8c50aedd633885fa16f') { @@ -251,7 +272,9 @@ router.post('/chat-process', [auth, limiter], async (req, res) => { if (config.siteConfig?.usageCountLimit) { const useAmount = user ? (user.useAmount ?? 0) : 0 if (Number(useAmount) <= 0 && user.limit_switch) { - res.send({ status: 'Fail', message: '提问次数用完啦 | Question limit reached', data: null }) + sendSSEError('提问次数用完啦 | Question limit reached') + sendSSEEnd() + res.end() return } } @@ -259,21 +282,45 @@ router.post('/chat-process', [auth, limiter], async (req, res) => { if (config.auditConfig.enabled || config.auditConfig.customizeEnabled) { if (!user.roles.includes(UserRole.Admin) && await containsSensitiveWords(config.auditConfig, prompt)) { - res.send({ status: 'Fail', message: '含有敏感词 | Contains sensitive words', data: null }) + sendSSEError('含有敏感词 | Contains sensitive words') + sendSSEEnd() + res.end() return } } + message = regenerate ? await getChat(roomId, uuid) : await insertChat(uuid, prompt, uploadFileKeys, roomId, model, options as ChatOptions) - let firstChunk = true + result = await chatReplyProcess({ message: prompt, uploadFileKeys, parentMessageId: options?.parentMessageId, process: (chunk: ResponseChunk) => { lastResponse = chunk - - res.write(firstChunk ? JSON.stringify(chunk) : `\n${JSON.stringify(chunk)}`) - firstChunk = false + + // 根据数据类型发送不同的 SSE 事件 + if (chunk.searchQuery) { + sendSSEData('search_query', { searchQuery: chunk.searchQuery }) + } + if (chunk.searchResults) { + sendSSEData('search_results', { + searchResults: chunk.searchResults, + searchUsageTime: chunk.searchUsageTime + }) + } + if (chunk.delta) { + // 发送增量数据 + sendSSEData('delta', {m: chunk.delta}) + } else { + // 兼容现有格式,发送完整数据但标记为增量类型 + sendSSEData('message', { + id: chunk.id, + reasoning: chunk.reasoning, + text: chunk.text, + role: chunk.role, + finish_reason: chunk.finish_reason + }) + } }, systemMessage, temperature, @@ -283,11 +330,17 @@ router.post('/chat-process', [auth, limiter], async (req, res) => { room, chatUuid: uuid, }) - // return the whole response including usage - res.write(`\n${JSON.stringify(result.data)}`) + + // 发送最终完成数据 + if (result && result.status === 'Success') { + sendSSEData('complete', result.data) + } + + sendSSEEnd() } catch (error) { - res.write(JSON.stringify({ message: error?.message })) + sendSSEError(error?.message || 'Unknown error') + sendSSEEnd() } finally { res.end() @@ -299,7 +352,7 @@ router.post('/chat-process', [auth, limiter], async (req, res) => { } if (result.data === undefined) - // eslint-disable-next-line no-unsafe-finally + // eslint-disable-next-line no-unsafe-finally return if (regenerate && message.options.messageId) { diff --git a/src/api/index.ts b/src/api/index.ts index 8a2e562f..d7f5d5df 100644 --- a/src/api/index.ts +++ b/src/api/index.ts @@ -1,7 +1,7 @@ import type { AxiosProgressEvent, GenericAbortSignal } from 'axios' import type { AnnounceConfig, AuditConfig, ConfigState, GiftCard, KeyConfig, MailConfig, SearchConfig, SiteConfig, Status, UserInfo, UserPassword, UserPrompt } from '@/components/common/Setting/model' import type { SettingsState } from '@/store/modules/user/helper' -import { useUserStore } from '@/store' +import { useAuthStore, useUserStore } from '@/store' import { get, post } from '@/utils/request' export function fetchAnnouncement() { @@ -16,6 +16,137 @@ export function fetchChatConfig() { }) } +// SSE 事件处理器接口 +interface SSEEventHandlers { + onMessage?: (data: any) => void + onDelta?: (delta: { reasoning?: string, text?: string }) => void + onSearchQuery?: (data: { searchQuery: string }) => void + onSearchResults?: (data: { searchResults: any[], searchUsageTime: number }) => void + onComplete?: (data: any) => void + onError?: (error: string) => void + onEnd?: () => void +} + +// 新的 SSE 聊天处理函数 +export function fetchChatAPIProcessSSE( + params: { + roomId: number + uuid: number + regenerate?: boolean + prompt: string + uploadFileKeys?: string[] + options?: { conversationId?: string, parentMessageId?: string } + signal?: AbortSignal + }, + handlers: SSEEventHandlers, +): Promise { + const userStore = useUserStore() + const authStore = useAuthStore() + + const data: Record = { + roomId: params.roomId, + uuid: params.uuid, + regenerate: params.regenerate || false, + prompt: params.prompt, + uploadFileKeys: params.uploadFileKeys, + options: params.options, + systemMessage: userStore.userInfo.advanced.systemMessage, + temperature: userStore.userInfo.advanced.temperature, + top_p: userStore.userInfo.advanced.top_p, + } + + return new Promise((resolve, reject) => { + const baseURL = import.meta.env.VITE_GLOB_API_URL || '' + const url = `${baseURL}/api/chat-process` + + fetch(url, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Authorization': authStore.token ? `Bearer ${authStore.token}` : '', + }, + body: JSON.stringify(data), + signal: params.signal, + }).then((response) => { + if (!response.ok) { + throw new Error(`HTTP error! status: ${response.status}`) + } + + const reader = response.body?.getReader() + if (!reader) { + throw new Error('No reader available') + } + + const decoder = new TextDecoder() + let buffer = '' + + function readStream(): void { + reader!.read().then(({ done, value }) => { + if (done) { + handlers.onEnd?.() + resolve() + return + } + + buffer += decoder.decode(value, { stream: true }) + const lines = buffer.split('\n') + buffer = lines.pop() || '' // Keep the incomplete line in buffer + + for (const line of lines) { + if (line.trim() === '') + continue + + if (line.startsWith('event: ')) { + const _eventType = line.substring(7).trim() + continue + } + + if (line.startsWith('data: ')) { + const data = line.substring(6).trim() + + if (data === '[DONE]') { + handlers.onEnd?.() + resolve() + return + } + + try { + const jsonData = JSON.parse(data) + + + // 根据前面的 event 类型分发到不同的处理器 + if (jsonData.message) { + handlers.onError?.(jsonData.message) + } + else if (jsonData.searchQuery) { + handlers.onSearchQuery?.(jsonData) + } + else if (jsonData.searchResults) { + handlers.onSearchResults?.(jsonData) + } + else if (jsonData.m) { + handlers.onDelta?.(jsonData.m) + } + else { + handlers.onMessage?.(jsonData) + } + } + catch (e) { + console.error('Failed to parse SSE data:', data, e) + } + } + } + + readStream() + }).catch(reject) + } + + readStream() + }).catch(reject) + }) +} + +// 保持向后兼容的函数(如果需要的话) export function fetchChatAPIProcess( params: { roomId: number diff --git a/src/views/chat/index.vue b/src/views/chat/index.vue index 893dffd0..2eac5288 100644 --- a/src/views/chat/index.vue +++ b/src/views/chat/index.vue @@ -3,6 +3,7 @@ import type { MessageReactive, UploadFileInfo } from 'naive-ui' import html2canvas from 'html2canvas' import { fetchChatAPIProcess, + fetchChatAPIProcessSSE, fetchChatResponseoHistory, fetchChatStopResponding, } from '@/api' @@ -132,77 +133,150 @@ async function onConversation() { try { let lastText = '' + let accumulatedReasoning = '' const fetchChatAPIOnce = async () => { let searchQuery: string let searchResults: Chat.SearchResult[] let searchUsageTime: number - await fetchChatAPIProcess({ + await fetchChatAPIProcessSSE({ roomId: currentChatRoom.value!.roomId, uuid: chatUuid, prompt: message, uploadFileKeys, options, signal: controller.signal, - onDownloadProgress: async ({ event }) => { - const xhr = event.target - const { responseText } = xhr - // Always process the final line - const lastIndex = responseText.lastIndexOf('\n', responseText.length - 2) - let chunk = responseText - if (lastIndex !== -1) - chunk = responseText.substring(lastIndex) - try { - const data = JSON.parse(chunk) - if (data.searchQuery) - searchQuery = data.searchQuery - if (data.searchResults) - searchResults = data.searchResults - if (data.searchUsageTime) - searchUsageTime = data.searchUsageTime - - const usage = (data.detail && data.detail.usage) - ? { - completion_tokens: data.detail.usage.completion_tokens || null, - prompt_tokens: data.detail.usage.prompt_tokens || null, - total_tokens: data.detail.usage.total_tokens || null, - estimated: data.detail.usage.estimated || null, - } - : undefined - await chatStore.updateChatMessage( - currentChatRoom.value!.roomId, - dataSources.value.length - 1, - { - dateTime: new Date().toLocaleString(), - searchQuery, - searchResults, - searchUsageTime, - reasoning: data?.reasoning, - text: lastText + (data.text ?? ''), - inversion: false, - error: false, - loading: true, - conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id }, - requestOptions: { prompt: message, options: { ...options } }, - usage, - }, - ) - - if (openLongReply && data.detail && data.detail.choices.length > 0 && data.detail.choices[0].finish_reason === 'length') { - options.parentMessageId = data.id - lastText = data.text - message = '' - return fetchChatAPIOnce() - } - - await scrollToBottomIfAtBottom() + }, { + onSearchQuery: (data) => { + searchQuery = data.searchQuery + }, + onSearchResults: (data) => { + searchResults = data.searchResults + searchUsageTime = data.searchUsageTime + }, + onDelta: async (delta) => { + // 处理增量数据 + if (delta.text) { + lastText += delta.text } - catch { - // + if (delta.reasoning) { + accumulatedReasoning += delta.reasoning + } + await chatStore.updateChatMessage( + currentChatRoom.value!.roomId, + dataSources.value.length - 1, + { + dateTime: new Date().toLocaleString(), + searchQuery, + searchResults, + searchUsageTime, + reasoning: accumulatedReasoning, + text: lastText, + inversion: false, + error: false, + loading: true, + conversationOptions: null, + requestOptions: { prompt: message, options: { ...options } }, + }, + ) + + await scrollToBottomIfAtBottom() + }, + onMessage: async (data) => { + // 处理完整消息数据(兼容模式) + if (data.searchQuery) + searchQuery = data.searchQuery + if (data.searchResults) + searchResults = data.searchResults + if (data.searchUsageTime) + searchUsageTime = data.searchUsageTime + + const usage = (data.detail && data.detail.usage) + ? { + completion_tokens: data.detail.usage.completion_tokens || null, + prompt_tokens: data.detail.usage.prompt_tokens || null, + total_tokens: data.detail.usage.total_tokens || null, + estimated: data.detail.usage.estimated || null, + } + : undefined + + await chatStore.updateChatMessage( + currentChatRoom.value!.roomId, + dataSources.value.length - 1, + { + dateTime: new Date().toLocaleString(), + searchQuery, + searchResults, + searchUsageTime, + reasoning: data?.reasoning, + text: data.text ?? '', + inversion: false, + error: false, + loading: true, + conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id }, + requestOptions: { prompt: message, options: { ...options } }, + usage, + }, + ) + + if (openLongReply && data.detail && data.detail.choices?.length > 0 && data.detail.choices[0].finish_reason === 'length') { + options.parentMessageId = data.id + lastText = data.text + message = '' + return fetchChatAPIOnce() } + + await scrollToBottomIfAtBottom() + }, + onComplete: async (data) => { + // 处理完成事件 + const usage = (data.detail && data.detail.usage) + ? { + completion_tokens: data.detail.usage.completion_tokens || null, + prompt_tokens: data.detail.usage.prompt_tokens || null, + total_tokens: data.detail.usage.total_tokens || null, + estimated: data.detail.usage.estimated || null, + } + : undefined + + await chatStore.updateChatMessage( + currentChatRoom.value!.roomId, + dataSources.value.length - 1, + { + dateTime: new Date().toLocaleString(), + searchQuery, + searchResults, + searchUsageTime, + reasoning: data?.reasoning || accumulatedReasoning, + text: data?.text || lastText, + inversion: false, + error: false, + loading: false, + conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id }, + requestOptions: { prompt: message, options: { ...options } }, + usage, + }, + ) + }, + onError: async (error) => { + await chatStore.updateChatMessage( + currentChatRoom.value!.roomId, + dataSources.value.length - 1, + { + dateTime: new Date().toLocaleString(), + text: error, + inversion: false, + error: true, + loading: false, + conversationOptions: null, + requestOptions: { prompt: message, options: { ...options } }, + }, + ) + }, + onEnd: () => { + updateChatSome(currentChatRoom.value!.roomId, dataSources.value.length - 1, { loading: false }) }, }) - updateChatSome(currentChatRoom.value!.roomId, dataSources.value.length - 1, { loading: false }) } await fetchChatAPIOnce() @@ -294,63 +368,129 @@ async function onRegenerate(index: number) { try { let lastText = '' + let accumulatedReasoning = '' const fetchChatAPIOnce = async () => { - await fetchChatAPIProcess({ + await fetchChatAPIProcessSSE({ roomId: currentChatRoom.value!.roomId, uuid: chatUuid || Date.now(), regenerate: true, prompt: message, options, signal: controller.signal, - onDownloadProgress: ({ event }) => { - const xhr = event.target - const { responseText } = xhr - // Always process the final line - const lastIndex = responseText.lastIndexOf('\n', responseText.length - 2) - let chunk = responseText - if (lastIndex !== -1) - chunk = responseText.substring(lastIndex) - try { - const data = JSON.parse(chunk) - const usage = (data.detail && data.detail.usage) - ? { - completion_tokens: data.detail.usage.completion_tokens || null, - prompt_tokens: data.detail.usage.prompt_tokens || null, - total_tokens: data.detail.usage.total_tokens || null, - estimated: data.detail.usage.estimated || null, - } - : undefined - updateChat( - currentChatRoom.value!.roomId, - index, - { - dateTime: new Date().toLocaleString(), - reasoning: data?.reasoning, - finish_reason: data?.finish_reason, - text: lastText + (data.text ?? ''), - inversion: false, - responseCount, - error: false, - loading: true, - conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id }, - requestOptions: { prompt: message, options: { ...options } }, - usage, - }, - ) - - if (openLongReply && data.detail && data.detail.choices.length > 0 && data.detail.choices[0].finish_reason === 'length') { - options.parentMessageId = data.id - lastText = data.text - message = '' - return fetchChatAPIOnce() - } + }, { + onDelta: async (delta) => { + // 处理增量数据 + if (delta.text) { + lastText += delta.text } - catch { - // + if (delta.reasoning) { + accumulatedReasoning += delta.reasoning } + + + updateChat( + currentChatRoom.value!.roomId, + index, + { + dateTime: new Date().toLocaleString(), + reasoning: accumulatedReasoning, + text: lastText, + inversion: false, + responseCount, + error: false, + loading: true, + conversationOptions: null, + requestOptions: { prompt: message, options: { ...options } }, + }, + ) + + scrollToBottomIfAtBottom() + }, + onMessage: async (data) => { + // 处理完整消息数据(兼容模式) + const usage = (data.detail && data.detail.usage) + ? { + completion_tokens: data.detail.usage.completion_tokens || null, + prompt_tokens: data.detail.usage.prompt_tokens || null, + total_tokens: data.detail.usage.total_tokens || null, + estimated: data.detail.usage.estimated || null, + } + : undefined + updateChat( + currentChatRoom.value!.roomId, + index, + { + dateTime: new Date().toLocaleString(), + reasoning: data?.reasoning, + finish_reason: data?.finish_reason, + text: data.text ?? '', + inversion: false, + responseCount, + error: false, + loading: true, + conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id }, + requestOptions: { prompt: message, options: { ...options } }, + usage, + }, + ) + + if (openLongReply && data.detail && data.detail.choices?.length > 0 && data.detail.choices[0].finish_reason === 'length') { + options.parentMessageId = data.id + lastText = data.text + message = '' + return fetchChatAPIOnce() + } + + scrollToBottomIfAtBottom() + }, + onComplete: async (data) => { + // 处理完成事件 + const usage = (data.detail && data.detail.usage) + ? { + completion_tokens: data.detail.usage.completion_tokens || null, + prompt_tokens: data.detail.usage.prompt_tokens || null, + total_tokens: data.detail.usage.total_tokens || null, + estimated: data.detail.usage.estimated || null, + } + : undefined + updateChat( + currentChatRoom.value!.roomId, + index, + { + dateTime: new Date().toLocaleString(), + reasoning: data?.reasoning || accumulatedReasoning, + finish_reason: data?.finish_reason, + text: data?.text || lastText, + inversion: false, + responseCount, + error: false, + loading: false, + conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id }, + requestOptions: { prompt: message, options: { ...options } }, + usage, + }, + ) + }, + onError: async (error) => { + updateChat( + currentChatRoom.value!.roomId, + index, + { + dateTime: new Date().toLocaleString(), + text: error, + inversion: false, + responseCount, + error: true, + loading: false, + conversationOptions: null, + requestOptions: { prompt: message, options: { ...options } }, + }, + ) + }, + onEnd: () => { + updateChatSome(currentChatRoom.value!.roomId, index, { loading: false }) }, }) - updateChatSome(currentChatRoom.value!.roomId, index, { loading: false }) } await fetchChatAPIOnce() } From dfd417613169964ce5af300bf33957b0c4a42f87 Mon Sep 17 00:00:00 2001 From: kim Date: Thu, 10 Jul 2025 19:12:59 +0800 Subject: [PATCH 2/4] =?UTF-8?q?feat:=20=E5=A4=84=E7=90=86lint=E6=A0=BC?= =?UTF-8?q?=E5=BC=8F=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/api/index.ts | 5 ++--- src/views/chat/index.vue | 2 -- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/src/api/index.ts b/src/api/index.ts index d7f5d5df..c12a501c 100644 --- a/src/api/index.ts +++ b/src/api/index.ts @@ -112,7 +112,6 @@ export function fetchChatAPIProcessSSE( try { const jsonData = JSON.parse(data) - // 根据前面的 event 类型分发到不同的处理器 if (jsonData.message) { @@ -225,7 +224,7 @@ export function fetchLogin(username: string, password: string, token?: export function fetchLogout() { return post({ url: '/user-logout', - data: { }, + data: {}, }) } @@ -440,7 +439,7 @@ export function fetchGetChatHistory(roomId: number, lastId?: number, al export function fetchClearAllChat() { return post({ url: '/chat-clear-all', - data: { }, + data: {}, }) } diff --git a/src/views/chat/index.vue b/src/views/chat/index.vue index 2eac5288..7851bb8d 100644 --- a/src/views/chat/index.vue +++ b/src/views/chat/index.vue @@ -2,7 +2,6 @@ import type { MessageReactive, UploadFileInfo } from 'naive-ui' import html2canvas from 'html2canvas' import { - fetchChatAPIProcess, fetchChatAPIProcessSSE, fetchChatResponseoHistory, fetchChatStopResponding, @@ -387,7 +386,6 @@ async function onRegenerate(index: number) { accumulatedReasoning += delta.reasoning } - updateChat( currentChatRoom.value!.roomId, index, From ae41e368e83003814a12f44bee8a76d091d84269 Mon Sep 17 00:00:00 2001 From: kim Date: Thu, 10 Jul 2025 19:19:50 +0800 Subject: [PATCH 3/4] feat: lint --- service/src/chatgpt/index.ts | 6 +++--- service/src/routes/chat.ts | 21 +++++++++++---------- src/api/index.ts | 2 +- 3 files changed, 15 insertions(+), 14 deletions(-) diff --git a/service/src/chatgpt/index.ts b/service/src/chatgpt/index.ts index 8a258d09..943d9d60 100644 --- a/service/src/chatgpt/index.ts +++ b/service/src/chatgpt/index.ts @@ -241,14 +241,14 @@ search result: ${searchResultContent}`, const responseChunk = { id: chunk.id, reasoning: responseReasoning, // 累积的推理内容 - text: responseText, // 累积的文本内容 + text: responseText, // 累积的文本内容 role: 'assistant', finish_reason, // 增量数据,只包含本次新增的内容 delta: { reasoning: reasoningContent, // 本次新增的推理内容 - text: content, // 本次新增的文本内容 - } + text: content, // 本次新增的文本内容 + }, } // Call the process callback if provided diff --git a/service/src/routes/chat.ts b/service/src/routes/chat.ts index 0f3e0df5..df48b7df 100644 --- a/service/src/routes/chat.ts +++ b/service/src/routes/chat.ts @@ -288,37 +288,38 @@ router.post('/chat-process', [auth, limiter], async (req, res) => { return } } - + message = regenerate ? await getChat(roomId, uuid) : await insertChat(uuid, prompt, uploadFileKeys, roomId, model, options as ChatOptions) - + result = await chatReplyProcess({ message: prompt, uploadFileKeys, parentMessageId: options?.parentMessageId, process: (chunk: ResponseChunk) => { lastResponse = chunk - + // 根据数据类型发送不同的 SSE 事件 if (chunk.searchQuery) { sendSSEData('search_query', { searchQuery: chunk.searchQuery }) } if (chunk.searchResults) { - sendSSEData('search_results', { + sendSSEData('search_results', { searchResults: chunk.searchResults, - searchUsageTime: chunk.searchUsageTime + searchUsageTime: chunk.searchUsageTime, }) } if (chunk.delta) { // 发送增量数据 - sendSSEData('delta', {m: chunk.delta}) - } else { + sendSSEData('delta', { m: chunk.delta }) + } + else { // 兼容现有格式,发送完整数据但标记为增量类型 sendSSEData('message', { id: chunk.id, reasoning: chunk.reasoning, text: chunk.text, role: chunk.role, - finish_reason: chunk.finish_reason + finish_reason: chunk.finish_reason, }) } }, @@ -330,12 +331,12 @@ router.post('/chat-process', [auth, limiter], async (req, res) => { room, chatUuid: uuid, }) - + // 发送最终完成数据 if (result && result.status === 'Success') { sendSSEData('complete', result.data) } - + sendSSEEnd() } catch (error) { diff --git a/src/api/index.ts b/src/api/index.ts index c12a501c..f51fc27d 100644 --- a/src/api/index.ts +++ b/src/api/index.ts @@ -97,7 +97,7 @@ export function fetchChatAPIProcessSSE( continue if (line.startsWith('event: ')) { - const _eventType = line.substring(7).trim() + // const _eventType = line.substring(7).trim() continue } From 318f8922702239ef1dea182398b4f71eaabbf337 Mon Sep 17 00:00:00 2001 From: kim Date: Fri, 11 Jul 2025 11:07:00 +0800 Subject: [PATCH 4/4] =?UTF-8?q?feat:=20=E8=8B=B1=E6=96=87=E6=B3=A8?= =?UTF-8?q?=E9=87=8A=E4=BB=A5=E5=8F=8A=E5=88=A0=E9=99=A4=E8=80=81=E6=8E=A5?= =?UTF-8?q?=E5=8F=A3=20=E6=96=B0=E9=97=BB=E5=86=85=E5=AE=B9=E9=BB=98?= =?UTF-8?q?=E8=AE=A4=E6=8A=98=E5=8F=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- service/src/chatgpt/index.ts | 12 +++--- service/src/routes/chat.ts | 10 ++--- src/api/index.ts | 40 +------------------- src/views/chat/components/Message/Search.vue | 2 +- src/views/chat/index.vue | 8 ++-- 5 files changed, 18 insertions(+), 54 deletions(-) diff --git a/service/src/chatgpt/index.ts b/service/src/chatgpt/index.ts index 943d9d60..f346a92e 100644 --- a/service/src/chatgpt/index.ts +++ b/service/src/chatgpt/index.ts @@ -237,17 +237,17 @@ search result: ${searchResultContent}`, const finish_reason = chunk.choices[0]?.finish_reason - // 构建增量响应对象 + // Build incremental response object const responseChunk = { id: chunk.id, - reasoning: responseReasoning, // 累积的推理内容 - text: responseText, // 累积的文本内容 + reasoning: responseReasoning, // Accumulated reasoning content + text: responseText, // Accumulated text content role: 'assistant', finish_reason, - // 增量数据,只包含本次新增的内容 + // Incremental data delta: { - reasoning: reasoningContent, // 本次新增的推理内容 - text: content, // 本次新增的文本内容 + reasoning: reasoningContent, // reasoning content in this chunk + text: content, // text content in this chunk }, } diff --git a/service/src/routes/chat.ts b/service/src/routes/chat.ts index df48b7df..d153d5e6 100644 --- a/service/src/routes/chat.ts +++ b/service/src/routes/chat.ts @@ -225,7 +225,7 @@ router.post('/chat-clear', auth, async (req, res) => { }) router.post('/chat-process', [auth, limiter], async (req, res) => { - // 设置 SSE 响应头 + // set headers for SSE res.setHeader('Content-Type', 'text/event-stream') res.setHeader('Cache-Control', 'no-cache') res.setHeader('Connection', 'keep-alive') @@ -247,7 +247,7 @@ router.post('/chat-process', [auth, limiter], async (req, res) => { let message: ChatInfo let user = await getUserById(userId) - // SSE 辅助函数 + // SSE helper functions const sendSSEData = (eventType: string, data: any) => { res.write(`event: ${eventType}\n`) res.write(`data: ${JSON.stringify(data)}\n\n`) @@ -298,7 +298,7 @@ router.post('/chat-process', [auth, limiter], async (req, res) => { process: (chunk: ResponseChunk) => { lastResponse = chunk - // 根据数据类型发送不同的 SSE 事件 + // set sse event by different data type if (chunk.searchQuery) { sendSSEData('search_query', { searchQuery: chunk.searchQuery }) } @@ -309,11 +309,11 @@ router.post('/chat-process', [auth, limiter], async (req, res) => { }) } if (chunk.delta) { - // 发送增量数据 + // send SSE event with delta type sendSSEData('delta', { m: chunk.delta }) } else { - // 兼容现有格式,发送完整数据但标记为增量类型 + // send all data sendSSEData('message', { id: chunk.id, reasoning: chunk.reasoning, diff --git a/src/api/index.ts b/src/api/index.ts index f51fc27d..c169d1e9 100644 --- a/src/api/index.ts +++ b/src/api/index.ts @@ -1,4 +1,3 @@ -import type { AxiosProgressEvent, GenericAbortSignal } from 'axios' import type { AnnounceConfig, AuditConfig, ConfigState, GiftCard, KeyConfig, MailConfig, SearchConfig, SiteConfig, Status, UserInfo, UserPassword, UserPrompt } from '@/components/common/Setting/model' import type { SettingsState } from '@/store/modules/user/helper' import { useAuthStore, useUserStore } from '@/store' @@ -16,7 +15,7 @@ export function fetchChatConfig() { }) } -// SSE 事件处理器接口 +// SSE event handler interface interface SSEEventHandlers { onMessage?: (data: any) => void onDelta?: (delta: { reasoning?: string, text?: string }) => void @@ -27,7 +26,7 @@ interface SSEEventHandlers { onEnd?: () => void } -// 新的 SSE 聊天处理函数 +// SSE chat processing function export function fetchChatAPIProcessSSE( params: { roomId: number @@ -145,41 +144,6 @@ export function fetchChatAPIProcessSSE( }) } -// 保持向后兼容的函数(如果需要的话) -export function fetchChatAPIProcess( - params: { - roomId: number - uuid: number - regenerate?: boolean - prompt: string - uploadFileKeys?: string[] - options?: { conversationId?: string, parentMessageId?: string } - signal?: GenericAbortSignal - onDownloadProgress?: (progressEvent: AxiosProgressEvent) => void - }, -) { - const userStore = useUserStore() - - const data: Record = { - roomId: params.roomId, - uuid: params.uuid, - regenerate: params.regenerate || false, - prompt: params.prompt, - uploadFileKeys: params.uploadFileKeys, - options: params.options, - systemMessage: userStore.userInfo.advanced.systemMessage, - temperature: userStore.userInfo.advanced.temperature, - top_p: userStore.userInfo.advanced.top_p, - } - - return post({ - url: '/chat-process', - data, - signal: params.signal, - onDownloadProgress: params.onDownloadProgress, - }) -} - export function fetchChatStopResponding(chatUuid: number) { return post({ url: '/chat-abort', diff --git a/src/views/chat/components/Message/Search.vue b/src/views/chat/components/Message/Search.vue index 250abfbb..c2b8d778 100644 --- a/src/views/chat/components/Message/Search.vue +++ b/src/views/chat/components/Message/Search.vue @@ -18,7 +18,7 @@ const instance = getCurrentInstance() const uid = instance?.uid || Date.now() + Math.random().toString(36).substring(2) const textRef = ref() -const isCollapsed = ref(false) +const isCollapsed = ref(true) const searchBtnTitle = computed(() => { return t('chat.expandCollapseSearchResults') diff --git a/src/views/chat/index.vue b/src/views/chat/index.vue index 7851bb8d..a5cd300c 100644 --- a/src/views/chat/index.vue +++ b/src/views/chat/index.vue @@ -154,7 +154,7 @@ async function onConversation() { searchUsageTime = data.searchUsageTime }, onDelta: async (delta) => { - // 处理增量数据 + // Handle incremental data if (delta.text) { lastText += delta.text } @@ -182,7 +182,7 @@ async function onConversation() { await scrollToBottomIfAtBottom() }, onMessage: async (data) => { - // 处理完整消息数据(兼容模式) + // Handle complete message data (compatibility mode) if (data.searchQuery) searchQuery = data.searchQuery if (data.searchResults) @@ -228,7 +228,7 @@ async function onConversation() { await scrollToBottomIfAtBottom() }, onComplete: async (data) => { - // 处理完成事件 + // Handle complete event const usage = (data.detail && data.detail.usage) ? { completion_tokens: data.detail.usage.completion_tokens || null, @@ -405,7 +405,7 @@ async function onRegenerate(index: number) { scrollToBottomIfAtBottom() }, onMessage: async (data) => { - // 处理完整消息数据(兼容模式) + // Handle complete message data (compatibility mode) const usage = (data.detail && data.detail.usage) ? { completion_tokens: data.detail.usage.completion_tokens || null,