From 02a3203c037f4f7457e3e666ff894339a5d496a5 Mon Sep 17 00:00:00 2001 From: WJG Date: Sat, 24 Feb 2024 23:06:31 +0800 Subject: [PATCH] feat: add stream mode for openai chat response --- TODO.md | 3 +- package.json | 3 +- src/services/http.ts | 5 +++ src/services/openai.ts | 92 ++++++++++++++++++++++++++++++++++++------ tests/index.ts | 4 +- tests/openai.ts | 31 ++++++++++++++ yarn.lock | 25 +++++++++--- 7 files changed, 142 insertions(+), 21 deletions(-) create mode 100644 tests/openai.ts diff --git a/TODO.md b/TODO.md index b4032c8..f2fc282 100644 --- a/TODO.md +++ b/TODO.md @@ -1,3 +1,4 @@ - ❌ Auto mute XiaoAi reply -- Stream response +- ✅ Stream response - Update long/short memories +- Deactivate Xiaoai diff --git a/package.json b/package.json index df14987..106a901 100644 --- a/package.json +++ b/package.json @@ -27,8 +27,9 @@ "dependencies": { "@prisma/client": "^5.8.1", "axios": "^1.6.5", + "https-proxy-agent": "^7.0.4", "mi-service-lite": "^2.0.0", - "openai": "^4.25.0", + "openai": "^4.28.0", "prisma": "^5.8.1" }, "devDependencies": { diff --git a/src/services/http.ts b/src/services/http.ts index bd7f99f..a265c25 100644 --- a/src/services/http.ts +++ b/src/services/http.ts @@ -1,6 +1,11 @@ import axios, { AxiosRequestConfig, CreateAxiosDefaults } from "axios"; +import { HttpsProxyAgent } from "https-proxy-agent"; import { isNotEmpty } from "../utils/is"; +export const kProxyAgent = new HttpsProxyAgent( + process.env.HTTP_PROXY ?? "http://127.0.0.1:7890" +); + const _baseConfig: CreateAxiosDefaults = { timeout: 10 * 1000, headers: { diff --git a/src/services/openai.ts b/src/services/openai.ts index 5a807f0..033f103 100644 --- a/src/services/openai.ts +++ b/src/services/openai.ts @@ -5,32 +5,98 @@ import { } from "openai/resources"; import { kEnvs } from "../utils/env"; +import { kProxyAgent } from "./http"; -const client = new OpenAI({ - apiKey: kEnvs.OPENAI_API_KEY!, -}); +class OpenAIClient { + private _client = new OpenAI({ + httpAgent: kProxyAgent, + apiKey: kEnvs.OPENAI_API_KEY!, + }); + + private _abortCallbacks: Record = { + // requestId: abortStreamCallback + }; + + abort(requestId: string) { + if (this._abortCallbacks[requestId]) { + this._abortCallbacks[requestId](); + delete this._abortCallbacks[requestId]; + } + } -export const openai = { async chat(options: { user: string; system?: string; tools?: Array; jsonMode?: boolean; }) { - const systemMsg: ChatCompletionMessageParam[] = options.system - ? [{ role: "system", content: options.system }] + const { user, system, tools, jsonMode } = options; + const systemMsg: ChatCompletionMessageParam[] = system + ? [{ role: "system", content: system }] : []; - const chatCompletion = await client.chat.completions + const chatCompletion = await this._client.chat.completions .create({ - tools: options.tools, - messages: [...systemMsg, { role: "user", content: options.user }], - model: kEnvs.OPENAI_MODEL ?? "gpt-3.5-turbo-1106", - response_format: options.jsonMode ? { type: "json_object" } : undefined, + tools, + messages: [...systemMsg, { role: "user", content: user }], + model: kEnvs.OPENAI_MODEL ?? "gpt-3.5-turbo-0125", + response_format: jsonMode ? { type: "json_object" } : undefined, }) .catch((e) => { console.error("❌ openai chat failed", e); return null; }); return chatCompletion?.choices?.[0]?.message; - }, -}; + } + + async chatStream(options: { + user: string; + system?: string; + tools?: Array; + jsonMode?: boolean; + requestId?: string; + onStream?: (text: string) => void; + }) { + const { user, system, tools, jsonMode, onStream, requestId } = options; + const systemMsg: ChatCompletionMessageParam[] = system + ? [{ role: "system", content: system }] + : []; + const stream = await this._client.chat.completions + .create({ + tools, + stream: true, + messages: [...systemMsg, { role: "user", content: user }], + model: kEnvs.OPENAI_MODEL ?? "gpt-3.5-turbo-0125", + response_format: jsonMode ? { type: "json_object" } : undefined, + }) + .catch((e) => { + console.error("❌ openai chat failed", e); + return null; + }); + if (!stream) { + return; + } + if (requestId) { + this._abortCallbacks[requestId] = () => stream.controller.abort(); + } + let content = ""; + try { + for await (const chunk of stream) { + const text = chunk.choices[0]?.delta?.content || ""; + const aborted = + requestId && !Object.keys(this._abortCallbacks).includes(requestId); + if (aborted) { + return undefined; + } + if (text) { + onStream?.(text); + content += text; + } + } + } catch { + return undefined; + } + return content; + } +} + +export const openai = new OpenAIClient(); diff --git a/tests/index.ts b/tests/index.ts index a8029e1..eaf024a 100644 --- a/tests/index.ts +++ b/tests/index.ts @@ -4,13 +4,15 @@ import { kBannerASCII } from "../src/utils/string"; import { runWithDB } from "../src/services/db"; import { testDB } from "./db"; import { testSpeaker } from "./speaker"; +import { testOpenAI } from "./openai"; dotenv.config(); async function main() { println(kBannerASCII); // testDB(); - testSpeaker(); + // testSpeaker(); + testOpenAI(); } runWithDB(main); diff --git a/tests/openai.ts b/tests/openai.ts new file mode 100644 index 0000000..6923d7d --- /dev/null +++ b/tests/openai.ts @@ -0,0 +1,31 @@ +import { randomUUID } from "crypto"; +import { openai } from "../src/services/openai"; + +export async function testOpenAI() { + await testStreamChat(); +} + +async function testStreamChat() { + const requestId = randomUUID(); + const res = await openai.chatStream({ + requestId, + user: "地球为什么是圆的?", + onStream: (text) => { + console.log(text); + }, + }); + console.log("\nFinal result:\n", res); +} + +async function testAbortStreamChat() { + const requestId = randomUUID(); + const res = await openai.chatStream({ + requestId, + user: "hello!", + onStream: (text) => { + console.log(text); + openai.abort(requestId); + }, + }); + console.log("xxx", res); +} diff --git a/yarn.lock b/yarn.lock index b927a33..c1fbcdf 100644 --- a/yarn.lock +++ b/yarn.lock @@ -379,6 +379,13 @@ acorn@^8.4.1: resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a" integrity sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg== +agent-base@^7.0.2: + version "7.1.0" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-7.1.0.tgz#536802b76bc0b34aa50195eb2442276d613e3434" + integrity sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg== + dependencies: + debug "^4.3.4" + agentkeepalive@^4.2.1: version "4.5.0" resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.5.0.tgz#2673ad1389b3c418c5a20c5d7364f93ca04be923" @@ -549,7 +556,7 @@ crypt@0.0.2: resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b" integrity sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow== -debug@^4.3.1: +debug@4, debug@^4.3.1, debug@^4.3.4: version "4.3.4" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== @@ -750,6 +757,14 @@ globby@^11.0.3: merge2 "^1.4.1" slash "^3.0.0" +https-proxy-agent@^7.0.4: + version "7.0.4" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz#8e97b841a029ad8ddc8731f26595bad868cb4168" + integrity sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg== + dependencies: + agent-base "^7.0.2" + debug "4" + human-signals@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" @@ -974,10 +989,10 @@ onetime@^5.1.2: dependencies: mimic-fn "^2.1.0" -openai@^4.25.0: - version "4.25.0" - resolved "https://registry.yarnpkg.com/openai/-/openai-4.25.0.tgz#b40099d625cccb19cbf1cab88915ac1699ece0ed" - integrity sha512-qLMFOizjxKuDfQkBrczZPYo6XVL4bdcuz9MR11Q+M91kGcs8dQw+O90nRcC+qWuhaGphQkfXQJMn4cd7Yew3Kg== +openai@^4.28.0: + version "4.28.0" + resolved "https://registry.yarnpkg.com/openai/-/openai-4.28.0.tgz#ded00e3d98c25758b5406c9675ec27a957e00930" + integrity sha512-JM8fhcpmpGN0vrUwGquYIzdcEQHtFuom6sRCbbCM6CfzZXNuRk33G7KfeRAIfnaCxSpzrP5iHtwJzIm6biUZ2Q== dependencies: "@types/node" "^18.11.18" "@types/node-fetch" "^2.6.4"