From b579d24d198c674250ae0fbda0207746899ef5a9 Mon Sep 17 00:00:00 2001 From: assassinliujie <68693675+assassinliujie@users.noreply.github.com> Date: Tue, 28 Mar 2023 09:40:20 +0800 Subject: [PATCH] pref: message output optimization (#935) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update index.ts 修改后端,让保留打字机效果的同时优化前后端之间传输的内容,节省流量和性能 * Update index.vue 修改前端,和之前修改的后端匹配,保留打字机效果同时优化性能和流量传输 * chore: lint fix --------- Co-authored-by: ChenZhaoYu <790348264@qq.com> --- service/src/index.ts | 12 ++- src/views/chat/index.vue | 178 ++++++++++++++++++++------------------- 2 files changed, 100 insertions(+), 90 deletions(-) diff --git a/service/src/index.ts b/service/src/index.ts index 28041d1..f2e0e68 100644 --- a/service/src/index.ts +++ b/service/src/index.ts @@ -29,8 +29,16 @@ router.post('/chat-process', [auth, limiter], async (req, res) => { message: prompt, lastContext: options, process: (chat: ChatMessage) => { - res.write(firstChunk ? JSON.stringify(chat) : `\n${JSON.stringify(chat)}`) - firstChunk = false + if (firstChunk) { + res.write(`${JSON.stringify(chat)}t1h1i4s5i1s4a1s9i1l9l8y1s0plit`) + firstChunk = false + } + else { + let tmp = chat.delta + if (!(chat.delta)) + tmp = '' + res.write(tmp) + } }, systemMessage, }) diff --git a/src/views/chat/index.vue b/src/views/chat/index.vue index c37466a..c511921 100644 --- a/src/views/chat/index.vue +++ b/src/views/chat/index.vue @@ -107,7 +107,9 @@ async function onConversation() { scrollToBottom() try { - let lastText = '' + const magicSplit = 't1h1i4s5i1s4a1s9i1l9l8y1s0plit' + let renderText = '' + let firstTime = true const fetchChatAPIOnce = async () => { await fetchChatAPIProcess({ prompt: message, @@ -117,42 +119,49 @@ async function onConversation() { const xhr = event.target const { responseText } = xhr // Always process the final line - const lastIndex = responseText.lastIndexOf('\n', responseText.length - 2) - let chunk = responseText - if (lastIndex !== -1) - chunk = responseText.substring(lastIndex) - try { - const data = JSON.parse(chunk) - updateChat( - +uuid, - dataSources.value.length - 1, - { - dateTime: new Date().toLocaleString(), - text: lastText + data.text ?? '', - inversion: false, - error: false, - loading: false, - conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id }, - requestOptions: { prompt: message, options: { ...options } }, - }, - ) - - if (openLongReply && data.detail.choices[0].finish_reason === 'length') { - options.parentMessageId = data.id - lastText = data.text - message = '' - return fetchChatAPIOnce() - } - scrollToBottomIfAtBottom() - } - catch (error) { - // + const splitIndexBegin = responseText.search(magicSplit) + if (splitIndexBegin !== -1) { + const splitIndexEnd = splitIndexBegin + magicSplit.length + + const firstChunk = responseText.substring(0, splitIndexBegin) + const deltaText = responseText.substring(splitIndexEnd) + try { + const data = JSON.parse(firstChunk) + if (firstTime) { + firstTime = false + renderText = data.text ?? '' + } + else { + renderText = deltaText ?? '' + } + updateChat( + +uuid, + dataSources.value.length - 1, + { + dateTime: new Date().toLocaleString(), + text: renderText, + inversion: false, + error: false, + loading: false, + conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id }, + requestOptions: { prompt: message, ...options }, + }, + ) + + if (openLongReply && data.detail.choices[0].finish_reason === 'length') { + options.parentMessageId = data.id + message = '' + return fetchChatAPIOnce() + } + } + catch (error) { + // + } } }, }) } - await fetchChatAPIOnce() } catch (error: any) { @@ -237,7 +246,9 @@ async function onRegenerate(index: number) { ) try { - let lastText = '' + const magicSplit = 't1h1i4s5i1s4a1s9i1l9l8y1s0plit' + let renderText = '' + let firstTime = true const fetchChatAPIOnce = async () => { await fetchChatAPIProcess({ prompt: message, @@ -247,35 +258,45 @@ async function onRegenerate(index: number) { const xhr = event.target const { responseText } = xhr // Always process the final line - const lastIndex = responseText.lastIndexOf('\n', responseText.length - 2) - let chunk = responseText - if (lastIndex !== -1) - chunk = responseText.substring(lastIndex) - try { - const data = JSON.parse(chunk) - updateChat( - +uuid, - index, - { - dateTime: new Date().toLocaleString(), - text: lastText + data.text ?? '', - inversion: false, - error: false, - loading: false, - conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id }, - requestOptions: { prompt: message, ...options }, - }, - ) - - if (openLongReply && data.detail.choices[0].finish_reason === 'length') { - options.parentMessageId = data.id - lastText = data.text - message = '' - return fetchChatAPIOnce() + + const splitIndexBegin = responseText.search(magicSplit) + if (splitIndexBegin !== -1) { + const splitIndexEnd = splitIndexBegin + magicSplit.length + + const firstChunk = responseText.substring(0, splitIndexBegin) + const deltaText = responseText.substring(splitIndexEnd) + try { + const data = JSON.parse(firstChunk) + if (firstTime) { + firstTime = false + renderText = data.text ?? '' + } + else { + renderText = deltaText ?? '' + } + updateChat( + +uuid, + index, + { + dateTime: new Date().toLocaleString(), + text: renderText, + inversion: false, + error: false, + loading: false, + conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id }, + requestOptions: { prompt: message, ...options }, + }, + ) + + if (openLongReply && data.detail.choices[0].finish_reason === 'length') { + options.parentMessageId = data.id + message = '' + return fetchChatAPIOnce() + } + } + catch (error) { + // } - } - catch (error) { - // } }, }) @@ -467,20 +488,13 @@ onUnmounted(() => {