Skip to content

Commit 8ac4cd7

Browse files
committed
fix: 修复网页标签切换后流式输出暂停问题
移除流式输出中的人为延迟(setTimeout 10ms),解决标签页切换到后台时 浏览器限制导致的输出暂停问题。 修改内容: - streamOpenAIMessage: 移除推理和内容输出的延迟 - streamOpenAIMessageWithTools: 移除推理和内容输出的延迟 - streamGeminiMessage: 移除文本输出的延迟 - streamGeminiMessageWithTools: 移除文本输出的延迟 修复效果: - 标签页切换到后台时流式输出继续进行 - 切回标签页时立即显示完整结果 - 提升整体性能和用户体验 Fixes #171
1 parent 692ae08 commit 8ac4cd7

File tree

1 file changed

+8
-17
lines changed

1 file changed

+8
-17
lines changed

packages/core/src/services/llm/service.ts

Lines changed: 8 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -548,23 +548,20 @@ export class LLMService implements ILLMService {
548548
const reasoningContent = chunk.choices[0]?.delta?.reasoning_content || '';
549549
if (reasoningContent) {
550550
accumulatedReasoning += reasoningContent;
551-
551+
552552
// 如果有推理回调,发送推理内容
553553
if (callbacks.onReasoningToken) {
554554
callbacks.onReasoningToken(reasoningContent);
555555
}
556-
await new Promise(resolve => setTimeout(resolve, 10));
557556
}
558557

559558
// 处理主要内容
560559
const content = chunk.choices[0]?.delta?.content || '';
561560
if (content) {
562561
accumulatedContent += content;
563-
562+
564563
// 使用流式think标签处理
565564
this.processStreamContentWithThinkTags(content, callbacks, thinkState);
566-
567-
await new Promise(resolve => setTimeout(resolve, 10));
568565
}
569566
}
570567

@@ -641,7 +638,6 @@ export class LLMService implements ILLMService {
641638
if (callbacks.onReasoningToken) {
642639
callbacks.onReasoningToken(reasoningContent);
643640
}
644-
await new Promise(resolve => setTimeout(resolve, 10));
645641
}
646642

647643
// 🆕 处理工具调用
@@ -652,9 +648,9 @@ export class LLMService implements ILLMService {
652648
while (toolCalls.length <= toolCallDelta.index) {
653649
toolCalls.push({ id: '', type: 'function' as const, function: { name: '', arguments: '' } });
654650
}
655-
651+
656652
const currentToolCall = toolCalls[toolCallDelta.index];
657-
653+
658654
if (toolCallDelta.id) currentToolCall.id = toolCallDelta.id;
659655
if (toolCallDelta.type) currentToolCall.type = toolCallDelta.type;
660656
if (toolCallDelta.function) {
@@ -664,9 +660,9 @@ export class LLMService implements ILLMService {
664660
if (toolCallDelta.function.arguments) {
665661
currentToolCall.function.arguments += toolCallDelta.function.arguments;
666662
}
667-
663+
668664
// 当工具调用完整时,通知回调
669-
if (currentToolCall.id && currentToolCall.function.name &&
665+
if (currentToolCall.id && currentToolCall.function.name &&
670666
toolCallDelta.function.arguments && callbacks.onToolCall) {
671667
try {
672668
JSON.parse(currentToolCall.function.arguments);
@@ -685,7 +681,6 @@ export class LLMService implements ILLMService {
685681
if (content) {
686682
accumulatedContent += content;
687683
this.processStreamContentWithThinkTags(content, callbacks, thinkState);
688-
await new Promise(resolve => setTimeout(resolve, 10));
689684
}
690685
}
691686

@@ -769,8 +764,6 @@ export class LLMService implements ILLMService {
769764
if (text) {
770765
accumulatedContent += text;
771766
callbacks.onToken(text);
772-
// 添加小延迟,让UI有时间更新
773-
await new Promise(resolve => setTimeout(resolve, 10));
774767
}
775768
}
776769

@@ -865,8 +858,6 @@ export class LLMService implements ILLMService {
865858
if (text) {
866859
accumulatedContent += text;
867860
callbacks.onToken(text);
868-
// 添加小延迟,让UI有时间更新
869-
await new Promise(resolve => setTimeout(resolve, 10));
870861
}
871862

872863
// 处理工具调用
@@ -881,9 +872,9 @@ export class LLMService implements ILLMService {
881872
arguments: JSON.stringify(functionCall.args)
882873
}
883874
};
884-
875+
885876
toolCalls.push(toolCall);
886-
877+
887878
console.log('[Gemini] Tool call received:', toolCall);
888879
if (callbacks.onToolCall) {
889880
callbacks.onToolCall(toolCall);

0 commit comments

Comments
 (0)