mirror of
https://github.com/idootop/mi-gpt.git
synced 2025-04-10 08:40:58 +00:00
feat: add stream mode for openai chat response
This commit is contained in:
parent
b2c96e808b
commit
02a3203c03
3
TODO.md
3
TODO.md
|
@ -1,3 +1,4 @@
|
||||||
- ❌ Auto mute XiaoAi reply
|
- ❌ Auto mute XiaoAi reply
|
||||||
- Stream response
|
- ✅ Stream response
|
||||||
- Update long/short memories
|
- Update long/short memories
|
||||||
|
- Deactivate Xiaoai
|
||||||
|
|
|
@ -27,8 +27,9 @@
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@prisma/client": "^5.8.1",
|
"@prisma/client": "^5.8.1",
|
||||||
"axios": "^1.6.5",
|
"axios": "^1.6.5",
|
||||||
|
"https-proxy-agent": "^7.0.4",
|
||||||
"mi-service-lite": "^2.0.0",
|
"mi-service-lite": "^2.0.0",
|
||||||
"openai": "^4.25.0",
|
"openai": "^4.28.0",
|
||||||
"prisma": "^5.8.1"
|
"prisma": "^5.8.1"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|
|
@ -1,6 +1,11 @@
|
||||||
import axios, { AxiosRequestConfig, CreateAxiosDefaults } from "axios";
|
import axios, { AxiosRequestConfig, CreateAxiosDefaults } from "axios";
|
||||||
|
import { HttpsProxyAgent } from "https-proxy-agent";
|
||||||
import { isNotEmpty } from "../utils/is";
|
import { isNotEmpty } from "../utils/is";
|
||||||
|
|
||||||
|
export const kProxyAgent = new HttpsProxyAgent(
|
||||||
|
process.env.HTTP_PROXY ?? "http://127.0.0.1:7890"
|
||||||
|
);
|
||||||
|
|
||||||
const _baseConfig: CreateAxiosDefaults = {
|
const _baseConfig: CreateAxiosDefaults = {
|
||||||
timeout: 10 * 1000,
|
timeout: 10 * 1000,
|
||||||
headers: {
|
headers: {
|
||||||
|
|
|
@ -5,32 +5,98 @@ import {
|
||||||
} from "openai/resources";
|
} from "openai/resources";
|
||||||
|
|
||||||
import { kEnvs } from "../utils/env";
|
import { kEnvs } from "../utils/env";
|
||||||
|
import { kProxyAgent } from "./http";
|
||||||
|
|
||||||
const client = new OpenAI({
|
class OpenAIClient {
|
||||||
apiKey: kEnvs.OPENAI_API_KEY!,
|
private _client = new OpenAI({
|
||||||
});
|
httpAgent: kProxyAgent,
|
||||||
|
apiKey: kEnvs.OPENAI_API_KEY!,
|
||||||
|
});
|
||||||
|
|
||||||
|
private _abortCallbacks: Record<string, VoidFunction> = {
|
||||||
|
// requestId: abortStreamCallback
|
||||||
|
};
|
||||||
|
|
||||||
|
abort(requestId: string) {
|
||||||
|
if (this._abortCallbacks[requestId]) {
|
||||||
|
this._abortCallbacks[requestId]();
|
||||||
|
delete this._abortCallbacks[requestId];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export const openai = {
|
|
||||||
async chat(options: {
|
async chat(options: {
|
||||||
user: string;
|
user: string;
|
||||||
system?: string;
|
system?: string;
|
||||||
tools?: Array<ChatCompletionTool>;
|
tools?: Array<ChatCompletionTool>;
|
||||||
jsonMode?: boolean;
|
jsonMode?: boolean;
|
||||||
}) {
|
}) {
|
||||||
const systemMsg: ChatCompletionMessageParam[] = options.system
|
const { user, system, tools, jsonMode } = options;
|
||||||
? [{ role: "system", content: options.system }]
|
const systemMsg: ChatCompletionMessageParam[] = system
|
||||||
|
? [{ role: "system", content: system }]
|
||||||
: [];
|
: [];
|
||||||
const chatCompletion = await client.chat.completions
|
const chatCompletion = await this._client.chat.completions
|
||||||
.create({
|
.create({
|
||||||
tools: options.tools,
|
tools,
|
||||||
messages: [...systemMsg, { role: "user", content: options.user }],
|
messages: [...systemMsg, { role: "user", content: user }],
|
||||||
model: kEnvs.OPENAI_MODEL ?? "gpt-3.5-turbo-1106",
|
model: kEnvs.OPENAI_MODEL ?? "gpt-3.5-turbo-0125",
|
||||||
response_format: options.jsonMode ? { type: "json_object" } : undefined,
|
response_format: jsonMode ? { type: "json_object" } : undefined,
|
||||||
})
|
})
|
||||||
.catch((e) => {
|
.catch((e) => {
|
||||||
console.error("❌ openai chat failed", e);
|
console.error("❌ openai chat failed", e);
|
||||||
return null;
|
return null;
|
||||||
});
|
});
|
||||||
return chatCompletion?.choices?.[0]?.message;
|
return chatCompletion?.choices?.[0]?.message;
|
||||||
},
|
}
|
||||||
};
|
|
||||||
|
async chatStream(options: {
|
||||||
|
user: string;
|
||||||
|
system?: string;
|
||||||
|
tools?: Array<ChatCompletionTool>;
|
||||||
|
jsonMode?: boolean;
|
||||||
|
requestId?: string;
|
||||||
|
onStream?: (text: string) => void;
|
||||||
|
}) {
|
||||||
|
const { user, system, tools, jsonMode, onStream, requestId } = options;
|
||||||
|
const systemMsg: ChatCompletionMessageParam[] = system
|
||||||
|
? [{ role: "system", content: system }]
|
||||||
|
: [];
|
||||||
|
const stream = await this._client.chat.completions
|
||||||
|
.create({
|
||||||
|
tools,
|
||||||
|
stream: true,
|
||||||
|
messages: [...systemMsg, { role: "user", content: user }],
|
||||||
|
model: kEnvs.OPENAI_MODEL ?? "gpt-3.5-turbo-0125",
|
||||||
|
response_format: jsonMode ? { type: "json_object" } : undefined,
|
||||||
|
})
|
||||||
|
.catch((e) => {
|
||||||
|
console.error("❌ openai chat failed", e);
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
if (!stream) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (requestId) {
|
||||||
|
this._abortCallbacks[requestId] = () => stream.controller.abort();
|
||||||
|
}
|
||||||
|
let content = "";
|
||||||
|
try {
|
||||||
|
for await (const chunk of stream) {
|
||||||
|
const text = chunk.choices[0]?.delta?.content || "";
|
||||||
|
const aborted =
|
||||||
|
requestId && !Object.keys(this._abortCallbacks).includes(requestId);
|
||||||
|
if (aborted) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (text) {
|
||||||
|
onStream?.(text);
|
||||||
|
content += text;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return content;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const openai = new OpenAIClient();
|
||||||
|
|
|
@ -4,13 +4,15 @@ import { kBannerASCII } from "../src/utils/string";
|
||||||
import { runWithDB } from "../src/services/db";
|
import { runWithDB } from "../src/services/db";
|
||||||
import { testDB } from "./db";
|
import { testDB } from "./db";
|
||||||
import { testSpeaker } from "./speaker";
|
import { testSpeaker } from "./speaker";
|
||||||
|
import { testOpenAI } from "./openai";
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
println(kBannerASCII);
|
println(kBannerASCII);
|
||||||
// testDB();
|
// testDB();
|
||||||
testSpeaker();
|
// testSpeaker();
|
||||||
|
testOpenAI();
|
||||||
}
|
}
|
||||||
|
|
||||||
runWithDB(main);
|
runWithDB(main);
|
||||||
|
|
31
tests/openai.ts
Normal file
31
tests/openai.ts
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
import { randomUUID } from "crypto";
|
||||||
|
import { openai } from "../src/services/openai";
|
||||||
|
|
||||||
|
export async function testOpenAI() {
|
||||||
|
await testStreamChat();
|
||||||
|
}
|
||||||
|
|
||||||
|
async function testStreamChat() {
|
||||||
|
const requestId = randomUUID();
|
||||||
|
const res = await openai.chatStream({
|
||||||
|
requestId,
|
||||||
|
user: "地球为什么是圆的?",
|
||||||
|
onStream: (text) => {
|
||||||
|
console.log(text);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log("\nFinal result:\n", res);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function testAbortStreamChat() {
|
||||||
|
const requestId = randomUUID();
|
||||||
|
const res = await openai.chatStream({
|
||||||
|
requestId,
|
||||||
|
user: "hello!",
|
||||||
|
onStream: (text) => {
|
||||||
|
console.log(text);
|
||||||
|
openai.abort(requestId);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log("xxx", res);
|
||||||
|
}
|
25
yarn.lock
25
yarn.lock
|
@ -379,6 +379,13 @@ acorn@^8.4.1:
|
||||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a"
|
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a"
|
||||||
integrity sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==
|
integrity sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==
|
||||||
|
|
||||||
|
agent-base@^7.0.2:
|
||||||
|
version "7.1.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-7.1.0.tgz#536802b76bc0b34aa50195eb2442276d613e3434"
|
||||||
|
integrity sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg==
|
||||||
|
dependencies:
|
||||||
|
debug "^4.3.4"
|
||||||
|
|
||||||
agentkeepalive@^4.2.1:
|
agentkeepalive@^4.2.1:
|
||||||
version "4.5.0"
|
version "4.5.0"
|
||||||
resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.5.0.tgz#2673ad1389b3c418c5a20c5d7364f93ca04be923"
|
resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.5.0.tgz#2673ad1389b3c418c5a20c5d7364f93ca04be923"
|
||||||
|
@ -549,7 +556,7 @@ crypt@0.0.2:
|
||||||
resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b"
|
resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b"
|
||||||
integrity sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==
|
integrity sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==
|
||||||
|
|
||||||
debug@^4.3.1:
|
debug@4, debug@^4.3.1, debug@^4.3.4:
|
||||||
version "4.3.4"
|
version "4.3.4"
|
||||||
resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
|
resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
|
||||||
integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==
|
integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==
|
||||||
|
@ -750,6 +757,14 @@ globby@^11.0.3:
|
||||||
merge2 "^1.4.1"
|
merge2 "^1.4.1"
|
||||||
slash "^3.0.0"
|
slash "^3.0.0"
|
||||||
|
|
||||||
|
https-proxy-agent@^7.0.4:
|
||||||
|
version "7.0.4"
|
||||||
|
resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz#8e97b841a029ad8ddc8731f26595bad868cb4168"
|
||||||
|
integrity sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==
|
||||||
|
dependencies:
|
||||||
|
agent-base "^7.0.2"
|
||||||
|
debug "4"
|
||||||
|
|
||||||
human-signals@^2.1.0:
|
human-signals@^2.1.0:
|
||||||
version "2.1.0"
|
version "2.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0"
|
resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0"
|
||||||
|
@ -974,10 +989,10 @@ onetime@^5.1.2:
|
||||||
dependencies:
|
dependencies:
|
||||||
mimic-fn "^2.1.0"
|
mimic-fn "^2.1.0"
|
||||||
|
|
||||||
openai@^4.25.0:
|
openai@^4.28.0:
|
||||||
version "4.25.0"
|
version "4.28.0"
|
||||||
resolved "https://registry.yarnpkg.com/openai/-/openai-4.25.0.tgz#b40099d625cccb19cbf1cab88915ac1699ece0ed"
|
resolved "https://registry.yarnpkg.com/openai/-/openai-4.28.0.tgz#ded00e3d98c25758b5406c9675ec27a957e00930"
|
||||||
integrity sha512-qLMFOizjxKuDfQkBrczZPYo6XVL4bdcuz9MR11Q+M91kGcs8dQw+O90nRcC+qWuhaGphQkfXQJMn4cd7Yew3Kg==
|
integrity sha512-JM8fhcpmpGN0vrUwGquYIzdcEQHtFuom6sRCbbCM6CfzZXNuRk33G7KfeRAIfnaCxSpzrP5iHtwJzIm6biUZ2Q==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@types/node" "^18.11.18"
|
"@types/node" "^18.11.18"
|
||||||
"@types/node-fetch" "^2.6.4"
|
"@types/node-fetch" "^2.6.4"
|
||||||
|
|
Loading…
Reference in New Issue
Block a user