fix: Resolve memory leak in openai._abortCallbacks

This commit is contained in:
WJG 2024-03-01 22:43:24 +08:00
parent cf8ffaebe8
commit 072e205431
No known key found for this signature in database
GPG Key ID: 258474EF8590014A
4 changed files with 11 additions and 5 deletions

View File

@ -163,7 +163,7 @@ export class MyBot {
trace: true,
onStream: (text) => {
if (stream.status === "canceled") {
return openai.abort(requestId);
return openai.cancel(requestId);
}
stream.addResponse(text);
},

View File

@ -63,8 +63,8 @@ export class MemoryManager {
private _onMemory(ctx: MessageContext, currentMemory: Memory) {
if (this._currentMemory) {
// 取消之前的更新记忆任务
openai.abort(`update-short-memory-${this._currentMemory.id}`);
openai.abort(`update-long-memory-${this._currentMemory.id}`);
openai.cancel(`update-short-memory-${this._currentMemory.id}`);
openai.cancel(`update-long-memory-${this._currentMemory.id}`);
}
this._currentMemory = currentMemory;
// 异步更新长短期记忆

View File

@ -39,7 +39,7 @@ class OpenAIClient {
// requestId: abortStreamCallback
};
abort(requestId: string) {
cancel(requestId: string) {
this._init();
if (this._abortCallbacks[requestId]) {
this._abortCallbacks[requestId]();
@ -84,6 +84,9 @@ class OpenAIClient {
this._logger.error("openai chat failed", e);
return null;
});
if (requestId) {
delete this._abortCallbacks[requestId];
}
const message = chatCompletion?.choices?.[0]?.message;
if (trace && this.traceOutput) {
this._logger.log(`✅ Answer: ${message?.content ?? "None"}`.trim());
@ -145,6 +148,9 @@ class OpenAIClient {
content += text;
}
}
if (requestId) {
delete this._abortCallbacks[requestId];
}
if (trace && this.traceOutput) {
this._logger.log(`✅ Answer: ${content ?? "None"}`.trim());
}

View File

@ -24,7 +24,7 @@ async function testAbortStreamChat() {
user: "hello!",
onStream: (text) => {
console.log(text);
openai.abort(requestId);
openai.cancel(requestId);
},
});
console.log("xxx", res);