diff --git a/.env.example b/.env.example index 69cd3d2..4c5e848 100644 --- a/.env.example +++ b/.env.example @@ -19,3 +19,7 @@ OPENAI_API_KEY=sk-proj-xxxxxxxxxxxxxxx # 第三方 TTS(可选,用于调用第三方 TTS 服务) # TTS_BASE_URL=http://[你的局域网或公网地址]:[端口号]/[SECRET_PATH]/api # 比如:http://192.168.31.205:4321/xxxx/api(注意:不要使用 localhost 或 127.0.0.1) + +# 通义千问模型在生成文本时是否使用互联网搜索结果进行参考 +# qwen-vl系列、qwen开源系列与qwen-long模型暂时不支持配置该参数 +# QWEN_ENABLE_SEARCH=true \ No newline at end of file diff --git a/src/services/openai.ts b/src/services/openai.ts index bbef8f9..0043c67 100644 --- a/src/services/openai.ts +++ b/src/services/openai.ts @@ -19,6 +19,7 @@ export interface ChatOptions { jsonMode?: boolean; requestId?: string; trace?: boolean; + enableSearch?: boolean; } class OpenAIClient { @@ -115,6 +116,7 @@ class OpenAIClient { onStream, trace = false, model = this.deployment ?? kEnvs.OPENAI_MODEL ?? "gpt-4o", + enableSearch = kEnvs.QWEN_ENABLE_SEARCH, } = options; if (trace && this.traceInput) { this._logger.log( @@ -130,6 +132,7 @@ class OpenAIClient { stream: true, messages: [...systemMsg, { role: "user", content: user }], response_format: jsonMode ? { type: "json_object" } : undefined, + ...(enableSearch && { enable_search: true }) }).catch((e) => { this._logger.error("LLM 响应异常", e); return null; diff --git a/src/utils/env.ts b/src/utils/env.ts index 4a6af0c..a81e8bb 100644 --- a/src/utils/env.ts +++ b/src/utils/env.ts @@ -6,4 +6,8 @@ export const kEnvs: Partial<{ OPENAI_API_KEY: string; AZURE_OPENAI_API_KEY: string; AZURE_OPENAI_DEPLOYMENT: string; -}> = process.env as any; + QWEN_ENABLE_SEARCH: boolean; +}> = { + ...process.env, + QWEN_ENABLE_SEARCH: process.env.QWEN_ENABLE_SEARCH === 'true' +} as any;