From 6ecc61ac5db7564e427f3fdc57c847cc6d6e5fcf Mon Sep 17 00:00:00 2001 From: quzard <1191890118@qq.com> Date: Wed, 22 Mar 2023 17:47:07 +0800 Subject: [PATCH] =?UTF-8?q?feat:=20=E6=B7=BB=E5=8A=A0=E8=A7=92=E8=89=B2?= =?UTF-8?q?=E8=AE=BE=E5=AE=9A=E9=A2=84=E7=95=99API=20=E8=AE=BE=E5=AE=9A?= =?UTF-8?q?=E9=A1=B5(#768)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add systemMessage * perf: 优化代码和类型 * perf: 补全翻译和为以后做准备 --------- Co-authored-by: ChenZhaoYu <790348264@qq.com> --- service/src/chatgpt/index.ts | 25 ++++++------ service/src/chatgpt/types.ts | 8 ++++ service/src/index.ts | 16 +++++--- service/src/types.ts | 6 +++ src/api/index.ts | 5 ++- src/components/common/Setting/Advanced.vue | 46 ++++++++++++++++++++++ src/components/common/Setting/General.vue | 1 - src/components/common/Setting/index.vue | 23 +++++++++-- src/locales/en-US.ts | 2 + src/locales/zh-CN.ts | 2 + src/locales/zh-TW.ts | 2 + src/store/modules/index.ts | 1 + src/store/modules/settings/helper.ts | 23 +++++++++++ src/store/modules/settings/index.ts | 22 +++++++++++ 14 files changed, 160 insertions(+), 22 deletions(-) create mode 100644 service/src/chatgpt/types.ts create mode 100644 src/components/common/Setting/Advanced.vue create mode 100644 src/store/modules/settings/helper.ts create mode 100644 src/store/modules/settings/index.ts diff --git a/service/src/chatgpt/index.ts b/service/src/chatgpt/index.ts index c0994ee..772e4df 100644 --- a/service/src/chatgpt/index.ts +++ b/service/src/chatgpt/index.ts @@ -9,6 +9,9 @@ import axios from 'axios' import { sendResponse } from '../utils' import { isNotEmptyString } from '../utils/is' import type { ApiModel, ChatContext, ChatGPTUnofficialProxyAPIOptions, ModelConfig } from '../types' +import type { RequestOptions } from './types' + +dotenv.config() const ErrorCodeMessage: Record = { 401: '[OpenAI] 提供错误的API密钥 | Incorrect API key provided', @@ -19,13 +22,11 @@ const ErrorCodeMessage: Record = { 500: '[OpenAI] 服务器繁忙,请稍后再试 | Internal Server Error', } -dotenv.config() - const timeoutMs: number = !isNaN(+process.env.TIMEOUT_MS) ? +process.env.TIMEOUT_MS : 30 * 1000 let apiModel: ApiModel -if (!process.env.OPENAI_API_KEY && !process.env.OPENAI_ACCESS_TOKEN) +if (!isNotEmptyString(process.env.OPENAI_API_KEY) && !isNotEmptyString(process.env.OPENAI_ACCESS_TOKEN)) throw new Error('Missing OPENAI_API_KEY or OPENAI_ACCESS_TOKEN environment variable') let api: ChatGPTAPI | ChatGPTUnofficialProxyAPI @@ -33,7 +34,7 @@ let api: ChatGPTAPI | ChatGPTUnofficialProxyAPI (async () => { // More Info: https://github.com/transitive-bullshit/chatgpt-api - if (process.env.OPENAI_API_KEY) { + if (isNotEmptyString(process.env.OPENAI_API_KEY)) { const OPENAI_API_MODEL = process.env.OPENAI_API_MODEL const model = isNotEmptyString(OPENAI_API_MODEL) ? OPENAI_API_MODEL : 'gpt-3.5-turbo' @@ -67,17 +68,19 @@ let api: ChatGPTAPI | ChatGPTUnofficialProxyAPI } })() -async function chatReplyProcess( - message: string, - lastContext?: { conversationId?: string; parentMessageId?: string }, - process?: (chat: ChatMessage) => void, -) { +async function chatReplyProcess(options: RequestOptions) { + const { message, lastContext, process, systemMessage } = options try { let options: SendMessageOptions = { timeoutMs } - if (lastContext) { + if (apiModel === 'ChatGPTAPI') { + if (isNotEmptyString(systemMessage)) + options.systemMessage = systemMessage + } + + if (lastContext != null) { if (apiModel === 'ChatGPTAPI') - options = { parentMessageId: lastContext.parentMessageId } + options.parentMessageId = lastContext.parentMessageId else options = { ...lastContext } } diff --git a/service/src/chatgpt/types.ts b/service/src/chatgpt/types.ts new file mode 100644 index 0000000..1e65f4c --- /dev/null +++ b/service/src/chatgpt/types.ts @@ -0,0 +1,8 @@ +import type { ChatMessage } from 'chatgpt' + +export interface RequestOptions { + message: string + lastContext?: { conversationId?: string; parentMessageId?: string } + process?: (chat: ChatMessage) => void + systemMessage?: string +} diff --git a/service/src/index.ts b/service/src/index.ts index a201b86..b011039 100644 --- a/service/src/index.ts +++ b/service/src/index.ts @@ -1,5 +1,6 @@ import express from 'express' -import type { ChatContext, ChatMessage } from './chatgpt' +import type { RequestProps } from './types' +import type { ChatMessage } from './chatgpt' import { chatConfig, chatReplyProcess, currentModel } from './chatgpt' import { auth } from './middleware/auth' import { limiter } from './middleware/limiter' @@ -22,11 +23,16 @@ router.post('/chat-process', [auth, limiter], async (req, res) => { res.setHeader('Content-type', 'application/octet-stream') try { - const { prompt, options = {} } = req.body as { prompt: string; options?: ChatContext } + const { prompt, options = {}, systemMessage } = req.body as RequestProps let firstChunk = true - await chatReplyProcess(prompt, options, (chat: ChatMessage) => { - res.write(firstChunk ? JSON.stringify(chat) : `\n${JSON.stringify(chat)}`) - firstChunk = false + await chatReplyProcess({ + message: prompt, + lastContext: options, + process: (chat: ChatMessage) => { + res.write(firstChunk ? JSON.stringify(chat) : `\n${JSON.stringify(chat)}`) + firstChunk = false + }, + systemMessage, }) } catch (error) { diff --git a/service/src/types.ts b/service/src/types.ts index 12c8b04..fdbf68c 100644 --- a/service/src/types.ts +++ b/service/src/types.ts @@ -1,5 +1,11 @@ import type { FetchFn } from 'chatgpt' +export interface RequestProps { + prompt: string + options?: ChatContext + systemMessage: string +} + export interface ChatContext { conversationId?: string parentMessageId?: string diff --git a/src/api/index.ts b/src/api/index.ts index e576ab1..9f61fc5 100644 --- a/src/api/index.ts +++ b/src/api/index.ts @@ -1,5 +1,6 @@ import type { AxiosProgressEvent, GenericAbortSignal } from 'axios' import { post } from '@/utils/request' +import { useSettingStore } from '@/store' export function fetchChatAPI( prompt: string, @@ -26,9 +27,11 @@ export function fetchChatAPIProcess( signal?: GenericAbortSignal onDownloadProgress?: (progressEvent: AxiosProgressEvent) => void }, ) { + const settingStore = useSettingStore() + return post({ url: '/chat-process', - data: { prompt: params.prompt, options: params.options }, + data: { prompt: params.prompt, options: params.options, systemMessage: settingStore.systemMessage }, signal: params.signal, onDownloadProgress: params.onDownloadProgress, }) diff --git a/src/components/common/Setting/Advanced.vue b/src/components/common/Setting/Advanced.vue new file mode 100644 index 0000000..62c5851 --- /dev/null +++ b/src/components/common/Setting/Advanced.vue @@ -0,0 +1,46 @@ + + + diff --git a/src/components/common/Setting/General.vue b/src/components/common/Setting/General.vue index bb9514a..d6383b9 100644 --- a/src/components/common/Setting/General.vue +++ b/src/components/common/Setting/General.vue @@ -150,7 +150,6 @@ function handleImportButtonClick(): void { {{ $t('common.save') }} -
() - -const emit = defineEmits() - interface Props { visible: boolean } @@ -17,6 +15,14 @@ interface Emit { (e: 'update:visible', visible: boolean): void } +const props = defineProps() + +const emit = defineEmits() + +const authStore = useAuthStore() + +const isChatGPTAPI = computed(() => !!authStore.isChatGPTAPI) + const active = ref('General') const show = computed({ @@ -42,6 +48,15 @@ const show = computed({
+ + +
+ +
+