|
|
@ -25,6 +25,7 @@ const ErrorCodeMessage: Record<string, string> = {
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
const timeoutMs: number = !isNaN(+process.env.TIMEOUT_MS) ? +process.env.TIMEOUT_MS : 30 * 1000
|
|
|
|
const timeoutMs: number = !isNaN(+process.env.TIMEOUT_MS) ? +process.env.TIMEOUT_MS : 30 * 1000
|
|
|
|
|
|
|
|
const disableDebug: boolean = process.env.OPENAI_API_DISABLE_DEBUG === 'true'
|
|
|
|
|
|
|
|
|
|
|
|
let apiModel: ApiModel
|
|
|
|
let apiModel: ApiModel
|
|
|
|
|
|
|
|
|
|
|
@ -44,7 +45,7 @@ let api: ChatGPTAPI | ChatGPTUnofficialProxyAPI
|
|
|
|
const options: ChatGPTAPIOptions = {
|
|
|
|
const options: ChatGPTAPIOptions = {
|
|
|
|
apiKey: process.env.OPENAI_API_KEY,
|
|
|
|
apiKey: process.env.OPENAI_API_KEY,
|
|
|
|
completionParams: { model },
|
|
|
|
completionParams: { model },
|
|
|
|
debug: true,
|
|
|
|
debug: !disableDebug,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// increase max token limit if use gpt-4
|
|
|
|
// increase max token limit if use gpt-4
|
|
|
@ -72,7 +73,7 @@ let api: ChatGPTAPI | ChatGPTUnofficialProxyAPI
|
|
|
|
const OPENAI_API_MODEL = process.env.OPENAI_API_MODEL
|
|
|
|
const OPENAI_API_MODEL = process.env.OPENAI_API_MODEL
|
|
|
|
const options: ChatGPTUnofficialProxyAPIOptions = {
|
|
|
|
const options: ChatGPTUnofficialProxyAPIOptions = {
|
|
|
|
accessToken: process.env.OPENAI_ACCESS_TOKEN,
|
|
|
|
accessToken: process.env.OPENAI_ACCESS_TOKEN,
|
|
|
|
debug: true,
|
|
|
|
debug: !disableDebug,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (isNotEmptyString(OPENAI_API_MODEL))
|
|
|
|
if (isNotEmptyString(OPENAI_API_MODEL))
|
|
|
|
options.model = OPENAI_API_MODEL
|
|
|
|
options.model = OPENAI_API_MODEL
|
|
|
|