diff --git a/service/src/chatgpt.ts b/service/src/chatgpt.ts index 07afbeb..aa80913 100644 --- a/service/src/chatgpt.ts +++ b/service/src/chatgpt.ts @@ -1,22 +1,18 @@ import * as dotenv from 'dotenv' import 'isomorphic-fetch' -import type { ChatGPTAPI, ChatMessage, SendMessageOptions } from 'chatgpt' -import { ChatGPTUnofficialProxyAPI } from 'chatgpt' +import type { ChatMessage, SendMessageOptions } from 'chatgpt' +import { ChatGPTAPI, ChatGPTUnofficialProxyAPI } from 'chatgpt' import { SocksProxyAgent } from 'socks-proxy-agent' import fetch from 'node-fetch' import { sendResponse } from './utils' +import type { ApiModel, ChatContext, ChatGPTAPIOptions, ChatGPTUnofficialProxyAPIOptions, ModelConfig } from './types' dotenv.config() -let apiModel: 'ChatGPTAPI' | 'ChatGPTUnofficialProxyAPI' | undefined - -interface ChatContext { - conversationId?: string - parentMessageId?: string -} - const timeoutMs: number = !isNaN(+process.env.TIMEOUT_MS) ? +process.env.TIMEOUT_MS : 30 * 1000 +let apiModel: ApiModel + if (!process.env.OPENAI_API_KEY && !process.env.OPENAI_ACCESS_TOKEN) throw new Error('Missing OPENAI_API_KEY or OPENAI_ACCESS_TOKEN environment variable') @@ -25,15 +21,20 @@ let api: ChatGPTAPI | ChatGPTUnofficialProxyAPI // To use ESM in CommonJS, you can use a dynamic import (async () => { // More Info: https://github.com/transitive-bullshit/chatgpt-api - const { ChatGPTAPI } = await import('chatgpt') if (process.env.OPENAI_API_KEY) { - api = new ChatGPTAPI({ apiKey: process.env.OPENAI_API_KEY }) + const options: ChatGPTAPIOptions = { + apiKey: process.env.OPENAI_API_KEY, + debug: false, + } + + api = new ChatGPTAPI({ ...options }) apiModel = 'ChatGPTAPI' } else { - const options = { - debug: true, + const options: ChatGPTUnofficialProxyAPIOptions = { + accessToken: process.env.OPENAI_ACCESS_TOKEN, + debug: false, } if (process.env.SOCKS_PROXY_HOST && process.env.SOCKS_PROXY_PORT) { @@ -41,16 +42,13 @@ let api: ChatGPTAPI | ChatGPTUnofficialProxyAPI hostname: process.env.SOCKS_PROXY_HOST, port: process.env.SOCKS_PROXY_PORT, }) - globalThis.console.log(`Using socks proxy: ${process.env.SOCKS_PROXY_HOST}:${process.env.SOCKS_PROXY_PORT}`) options.fetch = (url, options) => { return fetch(url, { agent, ...options }) } } - if (process.env.API_REVERSE_PROXY) { + if (process.env.API_REVERSE_PROXY) options.apiReverseProxyUrl = process.env.API_REVERSE_PROXY - globalThis.console.log(`Using api reverse proxy: ${process.env.API_REVERSE_PROXY}`) - } api = new ChatGPTUnofficialProxyAPI({ accessToken: process.env.OPENAI_ACCESS_TOKEN, @@ -82,7 +80,6 @@ async function chatReply( } } -/** 实验性质的函数,用于处理聊天过程中的中间结果 */ async function chatReplyProcess( message: string, lastContext?: { conversationId?: string; parentMessageId?: string }, @@ -119,7 +116,7 @@ async function chatConfig() { reverseProxy: process.env.API_REVERSE_PROXY, timeoutMs, socksProxy: (process.env.SOCKS_PROXY_HOST && process.env.SOCKS_PROXY_PORT) ? (`${process.env.SOCKS_PROXY_HOST}:${process.env.SOCKS_PROXY_PORT}`) : '-', - }, + } as ModelConfig, }) } diff --git a/service/src/index.ts b/service/src/index.ts index 5aae25d..20d2c4f 100644 --- a/service/src/index.ts +++ b/service/src/index.ts @@ -26,7 +26,6 @@ router.post('/chat', async (req, res) => { } }) -/** 实验性质的函数,用于处理聊天过程中的中间结果 */ router.post('/chat-process', async (req, res) => { res.setHeader('Content-type', 'application/octet-stream') diff --git a/service/src/types.ts b/service/src/types.ts new file mode 100644 index 0000000..9e4ef41 --- /dev/null +++ b/service/src/types.ts @@ -0,0 +1,30 @@ +import type { FetchFn, openai } from 'chatgpt' + +export interface ChatContext { + conversationId?: string + parentMessageId?: string +} + +export interface ChatGPTAPIOptions { + apiKey: string + debug?: boolean + completionParams?: Partial +} + +export interface ChatGPTUnofficialProxyAPIOptions { + accessToken: string + apiReverseProxyUrl?: string + model?: string + debug?: boolean + headers?: Record + fetch?: FetchFn +} + +export interface ModelConfig { + apiModel?: ApiModel + reverseProxy?: string + timeoutMs?: number + socksProxy?: string +} + +export type ApiModel = 'ChatGPTAPI' | 'ChatGPTUnofficialProxyAPI' | undefined diff --git a/src/api/index.ts b/src/api/index.ts index 024a9c0..7a5c419 100644 --- a/src/api/index.ts +++ b/src/api/index.ts @@ -19,7 +19,6 @@ export function fetchChatConfig() { }) } -/** 实验性质的函数,用于处理聊天过程中的中间结果 */ export function fetchChatAPIProcess( params: { prompt: string diff --git a/src/views/chat/components/Message/Text.vue b/src/views/chat/components/Message/Text.vue index 41b1901..c3ea7b4 100644 --- a/src/views/chat/components/Message/Text.vue +++ b/src/views/chat/components/Message/Text.vue @@ -2,9 +2,19 @@ import { computed } from 'vue' import { marked } from 'marked' import hljs from 'highlight.js' +import { useBasicLayout } from '@/hooks/useBasicLayout' + +interface Props { + inversion?: boolean + error?: boolean + text?: string + loading?: boolean +} const props = defineProps() +const { isMobile } = useBasicLayout() + marked.setOptions({ renderer: new marked.Renderer(), highlight(code) { @@ -12,19 +22,12 @@ marked.setOptions({ }, }) -interface Props { - inversion?: boolean - error?: boolean - text?: string - loading?: boolean -} - const wrapClass = computed(() => { return [ 'text-wrap', - 'p-3', 'min-w-[20px]', 'rounded-md', + isMobile.value ? 'p-2' : 'p-3', props.inversion ? 'bg-[#d2f9d1]' : 'bg-[#f4f6f8]', props.inversion ? 'dark:bg-[#a1dc95]' : 'dark:bg-[#1e1e20]', { 'text-red-500': props.error }, diff --git a/src/views/chat/components/Message/index.vue b/src/views/chat/components/Message/index.vue index bbd9a5e..1b45d97 100644 --- a/src/views/chat/components/Message/index.vue +++ b/src/views/chat/components/Message/index.vue @@ -13,7 +13,6 @@ interface Props { interface Emit { (ev: 'regenerate'): void - (ev: 'copy'): void (ev: 'delete'): void } @@ -33,15 +32,15 @@ function handleRegenerate() {