feat: add stream mode for openai chat response

This commit is contained in:
WJG 2024-02-24 23:06:31 +08:00
parent b2c96e808b
commit 02a3203c03
No known key found for this signature in database
GPG Key ID: 258474EF8590014A
7 changed files with 142 additions and 21 deletions

View File

@ -1,3 +1,4 @@
- ❌ Auto mute XiaoAi reply
- Stream response
- Stream response
- Update long/short memories
- Deactivate Xiaoai

View File

@ -27,8 +27,9 @@
"dependencies": {
"@prisma/client": "^5.8.1",
"axios": "^1.6.5",
"https-proxy-agent": "^7.0.4",
"mi-service-lite": "^2.0.0",
"openai": "^4.25.0",
"openai": "^4.28.0",
"prisma": "^5.8.1"
},
"devDependencies": {

View File

@ -1,6 +1,11 @@
import axios, { AxiosRequestConfig, CreateAxiosDefaults } from "axios";
import { HttpsProxyAgent } from "https-proxy-agent";
import { isNotEmpty } from "../utils/is";
export const kProxyAgent = new HttpsProxyAgent(
process.env.HTTP_PROXY ?? "http://127.0.0.1:7890"
);
const _baseConfig: CreateAxiosDefaults = {
timeout: 10 * 1000,
headers: {

View File

@ -5,32 +5,98 @@ import {
} from "openai/resources";
import { kEnvs } from "../utils/env";
import { kProxyAgent } from "./http";
const client = new OpenAI({
apiKey: kEnvs.OPENAI_API_KEY!,
});
class OpenAIClient {
private _client = new OpenAI({
httpAgent: kProxyAgent,
apiKey: kEnvs.OPENAI_API_KEY!,
});
private _abortCallbacks: Record<string, VoidFunction> = {
// requestId: abortStreamCallback
};
abort(requestId: string) {
if (this._abortCallbacks[requestId]) {
this._abortCallbacks[requestId]();
delete this._abortCallbacks[requestId];
}
}
export const openai = {
async chat(options: {
user: string;
system?: string;
tools?: Array<ChatCompletionTool>;
jsonMode?: boolean;
}) {
const systemMsg: ChatCompletionMessageParam[] = options.system
? [{ role: "system", content: options.system }]
const { user, system, tools, jsonMode } = options;
const systemMsg: ChatCompletionMessageParam[] = system
? [{ role: "system", content: system }]
: [];
const chatCompletion = await client.chat.completions
const chatCompletion = await this._client.chat.completions
.create({
tools: options.tools,
messages: [...systemMsg, { role: "user", content: options.user }],
model: kEnvs.OPENAI_MODEL ?? "gpt-3.5-turbo-1106",
response_format: options.jsonMode ? { type: "json_object" } : undefined,
tools,
messages: [...systemMsg, { role: "user", content: user }],
model: kEnvs.OPENAI_MODEL ?? "gpt-3.5-turbo-0125",
response_format: jsonMode ? { type: "json_object" } : undefined,
})
.catch((e) => {
console.error("❌ openai chat failed", e);
return null;
});
return chatCompletion?.choices?.[0]?.message;
},
};
}
async chatStream(options: {
user: string;
system?: string;
tools?: Array<ChatCompletionTool>;
jsonMode?: boolean;
requestId?: string;
onStream?: (text: string) => void;
}) {
const { user, system, tools, jsonMode, onStream, requestId } = options;
const systemMsg: ChatCompletionMessageParam[] = system
? [{ role: "system", content: system }]
: [];
const stream = await this._client.chat.completions
.create({
tools,
stream: true,
messages: [...systemMsg, { role: "user", content: user }],
model: kEnvs.OPENAI_MODEL ?? "gpt-3.5-turbo-0125",
response_format: jsonMode ? { type: "json_object" } : undefined,
})
.catch((e) => {
console.error("❌ openai chat failed", e);
return null;
});
if (!stream) {
return;
}
if (requestId) {
this._abortCallbacks[requestId] = () => stream.controller.abort();
}
let content = "";
try {
for await (const chunk of stream) {
const text = chunk.choices[0]?.delta?.content || "";
const aborted =
requestId && !Object.keys(this._abortCallbacks).includes(requestId);
if (aborted) {
return undefined;
}
if (text) {
onStream?.(text);
content += text;
}
}
} catch {
return undefined;
}
return content;
}
}
export const openai = new OpenAIClient();

View File

@ -4,13 +4,15 @@ import { kBannerASCII } from "../src/utils/string";
import { runWithDB } from "../src/services/db";
import { testDB } from "./db";
import { testSpeaker } from "./speaker";
import { testOpenAI } from "./openai";
dotenv.config();
async function main() {
println(kBannerASCII);
// testDB();
testSpeaker();
// testSpeaker();
testOpenAI();
}
runWithDB(main);

31
tests/openai.ts Normal file
View File

@ -0,0 +1,31 @@
import { randomUUID } from "crypto";
import { openai } from "../src/services/openai";
export async function testOpenAI() {
await testStreamChat();
}
async function testStreamChat() {
const requestId = randomUUID();
const res = await openai.chatStream({
requestId,
user: "地球为什么是圆的?",
onStream: (text) => {
console.log(text);
},
});
console.log("\nFinal result:\n", res);
}
async function testAbortStreamChat() {
const requestId = randomUUID();
const res = await openai.chatStream({
requestId,
user: "hello!",
onStream: (text) => {
console.log(text);
openai.abort(requestId);
},
});
console.log("xxx", res);
}

View File

@ -379,6 +379,13 @@ acorn@^8.4.1:
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a"
integrity sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==
agent-base@^7.0.2:
version "7.1.0"
resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-7.1.0.tgz#536802b76bc0b34aa50195eb2442276d613e3434"
integrity sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg==
dependencies:
debug "^4.3.4"
agentkeepalive@^4.2.1:
version "4.5.0"
resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.5.0.tgz#2673ad1389b3c418c5a20c5d7364f93ca04be923"
@ -549,7 +556,7 @@ crypt@0.0.2:
resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b"
integrity sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==
debug@^4.3.1:
debug@4, debug@^4.3.1, debug@^4.3.4:
version "4.3.4"
resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==
@ -750,6 +757,14 @@ globby@^11.0.3:
merge2 "^1.4.1"
slash "^3.0.0"
https-proxy-agent@^7.0.4:
version "7.0.4"
resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz#8e97b841a029ad8ddc8731f26595bad868cb4168"
integrity sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==
dependencies:
agent-base "^7.0.2"
debug "4"
human-signals@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0"
@ -974,10 +989,10 @@ onetime@^5.1.2:
dependencies:
mimic-fn "^2.1.0"
openai@^4.25.0:
version "4.25.0"
resolved "https://registry.yarnpkg.com/openai/-/openai-4.25.0.tgz#b40099d625cccb19cbf1cab88915ac1699ece0ed"
integrity sha512-qLMFOizjxKuDfQkBrczZPYo6XVL4bdcuz9MR11Q+M91kGcs8dQw+O90nRcC+qWuhaGphQkfXQJMn4cd7Yew3Kg==
openai@^4.28.0:
version "4.28.0"
resolved "https://registry.yarnpkg.com/openai/-/openai-4.28.0.tgz#ded00e3d98c25758b5406c9675ec27a957e00930"
integrity sha512-JM8fhcpmpGN0vrUwGquYIzdcEQHtFuom6sRCbbCM6CfzZXNuRk33G7KfeRAIfnaCxSpzrP5iHtwJzIm6biUZ2Q==
dependencies:
"@types/node" "^18.11.18"
"@types/node-fetch" "^2.6.4"