feat: support gemini 3.0 tools calling (#10301)

* fix error display

* 完整支持 gemini 的 Function calling 机制

* add fetchsse

* fix continue mode

* improve

* refactor

* fix
This commit is contained in:
Arvin Xu
2025-11-19 17:24:46 +08:00
committed by GitHub
parent 973367c7ac
commit 7114fc10c4
22 changed files with 1917 additions and 947 deletions

View File

@@ -42,7 +42,6 @@ const nextConfig: NextConfig = {
// so we need to disable it
// refs: https://github.com/lobehub/lobe-chat/pull/7430
serverMinification: false,
turbopackFileSystemCacheForDev: true,
webVitalsAttribution: ['CLS', 'LCP'],
webpackBuildWorker: true,
webpackMemoryOptimizations: true,

View File

@@ -141,7 +141,7 @@
"@emotion/react": "^11.14.0",
"@fal-ai/client": "^1.7.2",
"@formkit/auto-animate": "^0.9.0",
"@google/genai": "^1.29.1",
"@google/genai": "^1.30.0",
"@huggingface/inference": "^4.13.3",
"@icons-pack/react-simple-icons": "^13.8.0",
"@khmyznikov/pwa-install": "0.3.9",

View File

@@ -130,6 +130,7 @@ export class ToolCallProcessor extends BaseProcessor {
: `${tool.identifier}.${tool.apiName}`,
},
id: tool.id,
thoughtSignature: tool.thoughtSignature,
type: 'function',
}),
);

View File

@@ -72,6 +72,65 @@ describe('ToolCallProcessor', () => {
]);
});
it('should pass through thoughtSignature when present', async () => {
const processor = new ToolCallProcessor(defaultConfig);
const context = createContext([
{
content: '',
id: 'msg1',
role: 'assistant',
tools: [
{
apiName: 'search',
arguments: '{"query":"test"}',
id: 'call_1',
identifier: 'web',
thoughtSignature: 'Let me search for this information',
type: 'builtin',
},
],
},
]);
const result = await processor.process(context);
expect(result.messages[0].tool_calls).toEqual([
{
function: {
arguments: '{"query":"test"}',
name: 'web.search',
},
id: 'call_1',
thoughtSignature: 'Let me search for this information',
type: 'function',
},
]);
});
it('should handle missing thoughtSignature', async () => {
const processor = new ToolCallProcessor(defaultConfig);
const context = createContext([
{
content: '',
id: 'msg1',
role: 'assistant',
tools: [
{
apiName: 'search',
arguments: '{"query":"test"}',
id: 'call_1',
identifier: 'web',
type: 'builtin',
},
],
},
]);
const result = await processor.process(context);
expect(result.messages[0].tool_calls[0].thoughtSignature).toBeUndefined();
});
it('should use custom genToolCallingName function', async () => {
const genToolCallingName = vi.fn(
(identifier, apiName, type) => `custom_${identifier}_${apiName}_${type}`,

View File

@@ -82,6 +82,7 @@ export class ToolNameResolver {
arguments: toolCall.function.arguments,
id: toolCall.id,
identifier,
thoughtSignature: toolCall.thoughtSignature,
type: (type ?? 'default') as any,
};

View File

@@ -455,6 +455,63 @@ describe('ToolNameResolver', () => {
});
});
describe('resolve - thoughtSignature', () => {
it('should pass through thoughtSignature when present', () => {
const toolCalls = [
{
function: {
arguments: '{"query": "test"}',
name: 'test-plugin____myAction____builtin',
},
id: 'call_1',
thoughtSignature: 'thinking about this...',
type: 'function',
},
];
const manifests = {
'test-plugin': {
api: [{ description: 'My action', name: 'myAction', parameters: {} }],
identifier: 'test-plugin',
meta: {},
type: 'builtin' as const,
},
};
const result = resolver.resolve(toolCalls, manifests);
expect(result).toHaveLength(1);
expect(result[0].thoughtSignature).toBe('thinking about this...');
});
it('should handle missing thoughtSignature', () => {
const toolCalls = [
{
function: {
arguments: '{"query": "test"}',
name: 'test-plugin____myAction____builtin',
},
id: 'call_1',
type: 'function',
},
];
const manifests = {
'test-plugin': {
api: [{ description: 'My action', name: 'myAction', parameters: {} }],
identifier: 'test-plugin',
meta: {},
type: 'builtin' as const,
},
};
const result = resolver.resolve(toolCalls, manifests);
expect(result).toHaveLength(1);
expect(result[0].thoughtSignature).toBeUndefined();
});
});
describe('resolve - edge cases', () => {
it('should filter out invalid tool calls with missing apiName', () => {
const toolCalls = [

View File

@@ -30,6 +30,7 @@ export interface MessageToolCall {
name: string;
};
id: string;
thoughtSignature?: string;
type: 'function';
}
export interface Message {

View File

@@ -17,7 +17,7 @@ import { nanoid } from '@lobechat/utils/uuid';
import { getMessageError } from './parseError';
type SSEFinishType = 'done' | 'error' | 'abort';
type SSEFinishType = 'done' | 'error' | 'abort' | string;
export type OnFinishHandler = (
text: string,
@@ -48,6 +48,10 @@ export interface MessageTextChunk {
text: string;
type: 'text';
}
export interface MessageStopChunk {
reason: string;
type: 'stop';
}
export interface MessageBase64ImageChunk {
id: string;
@@ -86,7 +90,8 @@ export interface FetchSSEOptions {
| MessageGroundingChunk
| MessageUsageChunk
| MessageBase64ImageChunk
| MessageSpeedChunk,
| MessageSpeedChunk
| MessageStopChunk,
) => void;
responseAnimation?: ResponseAnimation;
}
@@ -387,6 +392,11 @@ export const fetchSSE = async (url: string, options: RequestInit & FetchSSEOptio
break;
}
case 'stop': {
options.onMessageHandle?.({ reason: data, type: 'stop' });
break;
}
case 'reasoning': {
if (textSmoothing) {
thinkingController.pushToQueue(data);

View File

@@ -6,6 +6,7 @@ import { describe, expect, it, vi } from 'vitest';
import { ChatCompletionTool, OpenAIChatMessage, UserMessageContentPart } from '../../types';
import { parseDataUri } from '../../utils/uriParser';
import {
GEMINI_MAGIC_THOUGHT_SIGNATURE,
buildGoogleMessage,
buildGoogleMessages,
buildGooglePart,
@@ -232,6 +233,415 @@ describe('google contextBuilders', () => {
});
});
it('should correctly convert function call message with thoughtSignature', async () => {
const message = {
role: 'assistant',
tool_calls: [
{
function: {
arguments: JSON.stringify({
language: ['JSON'],
path: 'package.json',
query: '"version":',
repo: 'lobehub/lobe-chat',
}),
name: 'grep____searchGitHub____mcp',
},
id: 'grep____searchGitHub____mcp_0_6RnOMTF0',
thoughtSignature:
'EsUHCsIHAdHtim9/MrjP+pnhM8DVkvulyfWQVf+isXQxEAbF32gbflE1hl6Te80qtp77Ywn8opB2uhQOIH/l6SStsj3+XRy1U1DTeKtqZxDBoLP2rNK6pi3/nk0ZOQIc8f6rxB70G/zOhk7d/1XQFqhmw5H+yDVRQjGD1cNPY5ctWGxQLAIk/HMWNovUJzz2c81jGWoXu7k2vtpuur2hcAL+J79BEVUTfvU3mSiXqJFTClmFPB6Fe79i0y3TwM2XdIBxzPgVgf8B+Pnv1S6YDxHNSm46jTlXKcSw30r3ixs5xEOzerbOUW5WG9BGukw/YQVvHiuoGLIALRa2Ig7dlOMH8+o+f0mKJtyYj8yF6wyBMol+G4mhSHvQSKJLj/Z5kFHvDZKeVUEOZed6vZivYLrVezjQPXgLHJMOmbp6QrZGxqW45QxDKY5X5F8giIOM8VgsUYhDQUBown+3vvwkIBA24icDsOwdhJ/roe9GabbGfxpkSzARIFh7rSI01cRKbh6cEaVFXf2WQftPeD7dBseQLiCdUYoy4ytECrjTpknrWnVUG6Ly4SKW6uN/IJXpm9JT9GgnGLIddFtEQzm9sIKWNpGEz6++lZpiCFS6LsYSnTP3vPj/7oSABRmwWywxA8EmLh+sv+jiK5aMjFi1sTuJ0Ujsvza3/SHZKewNi9WKQUDOa9Mqtjs2YGDnJxto4l5GMUzI5vhf6/+/A5eHALfVabaFP97v8FEPrXQU94dognwx4EnNqy/KWmGIlYZYqIfjaSAy7Z74viwl+oTtL9gyyBDc/FrQvXfyrYIq8N0pkLKAEh33fa/+YVocLL1LKI9rb2bg/RRr+Ee4NyIQKhIdEJaEh74d1COd/4r06J92ThkfVo5PEVTSsr8tBKiJ5wSmX9vyhbLWzxmXoq1xfGrs8kg7NMW53XEWGlQrIVOQmUtjjjBQKj6b4rBTAO6EKk63cGFbkSPohifiUBPHbxUUPy/hf0tQpeOo3jA01AuCFLOIZ5IYJ+Rm5+aZTU3Panv+Q7Yl1w5t5swhbNZfg7MlU/sxwLijLuWDDNfw+2Zw/aa3VDPgVw6Nv2vKkHi4tUU0XlgfiQgQYUMPxpGRV837uUxvZFNep2QUlAMog5h4sMYJWIAX1kK1pzsyR/KxuCn6nUq4ovWNBQHLC4aW2ZcGgW/6CbF81F1cewUz+vWNMMkJrL0d9celGEbFuY0Q709UipaDbCg49twlnLV9XUwqC5wYTFBiJbynBDqiZAvXn2YOxNIs8CCzuu2GSCQDo09ksJy5g/o=',
type: 'function',
},
],
} as OpenAIChatMessage;
const converted = await buildGoogleMessage(message);
expect(converted).toEqual({
parts: [
{
functionCall: {
args: {
language: ['JSON'],
path: 'package.json',
query: '"version":',
repo: 'lobehub/lobe-chat',
},
name: 'grep____searchGitHub____mcp',
},
thoughtSignature:
'EsUHCsIHAdHtim9/MrjP+pnhM8DVkvulyfWQVf+isXQxEAbF32gbflE1hl6Te80qtp77Ywn8opB2uhQOIH/l6SStsj3+XRy1U1DTeKtqZxDBoLP2rNK6pi3/nk0ZOQIc8f6rxB70G/zOhk7d/1XQFqhmw5H+yDVRQjGD1cNPY5ctWGxQLAIk/HMWNovUJzz2c81jGWoXu7k2vtpuur2hcAL+J79BEVUTfvU3mSiXqJFTClmFPB6Fe79i0y3TwM2XdIBxzPgVgf8B+Pnv1S6YDxHNSm46jTlXKcSw30r3ixs5xEOzerbOUW5WG9BGukw/YQVvHiuoGLIALRa2Ig7dlOMH8+o+f0mKJtyYj8yF6wyBMol+G4mhSHvQSKJLj/Z5kFHvDZKeVUEOZed6vZivYLrVezjQPXgLHJMOmbp6QrZGxqW45QxDKY5X5F8giIOM8VgsUYhDQUBown+3vvwkIBA24icDsOwdhJ/roe9GabbGfxpkSzARIFh7rSI01cRKbh6cEaVFXf2WQftPeD7dBseQLiCdUYoy4ytECrjTpknrWnVUG6Ly4SKW6uN/IJXpm9JT9GgnGLIddFtEQzm9sIKWNpGEz6++lZpiCFS6LsYSnTP3vPj/7oSABRmwWywxA8EmLh+sv+jiK5aMjFi1sTuJ0Ujsvza3/SHZKewNi9WKQUDOa9Mqtjs2YGDnJxto4l5GMUzI5vhf6/+/A5eHALfVabaFP97v8FEPrXQU94dognwx4EnNqy/KWmGIlYZYqIfjaSAy7Z74viwl+oTtL9gyyBDc/FrQvXfyrYIq8N0pkLKAEh33fa/+YVocLL1LKI9rb2bg/RRr+Ee4NyIQKhIdEJaEh74d1COd/4r06J92ThkfVo5PEVTSsr8tBKiJ5wSmX9vyhbLWzxmXoq1xfGrs8kg7NMW53XEWGlQrIVOQmUtjjjBQKj6b4rBTAO6EKk63cGFbkSPohifiUBPHbxUUPy/hf0tQpeOo3jA01AuCFLOIZ5IYJ+Rm5+aZTU3Panv+Q7Yl1w5t5swhbNZfg7MlU/sxwLijLuWDDNfw+2Zw/aa3VDPgVw6Nv2vKkHi4tUU0XlgfiQgQYUMPxpGRV837uUxvZFNep2QUlAMog5h4sMYJWIAX1kK1pzsyR/KxuCn6nUq4ovWNBQHLC4aW2ZcGgW/6CbF81F1cewUz+vWNMMkJrL0d9celGEbFuY0Q709UipaDbCg49twlnLV9XUwqC5wYTFBiJbynBDqiZAvXn2YOxNIs8CCzuu2GSCQDo09ksJy5g/o=',
},
],
role: 'model',
});
});
describe('should correctly convert function call message without thoughtSignature', () => {
it('should add magic signature when last message is tool message', async () => {
const messages: OpenAIChatMessage[] = [
{
content: '<plugins>Web Browsing plugin available</plugins>',
role: 'system',
},
{
content: '杭州天气如何',
role: 'user',
},
{
content: '',
role: 'assistant',
tool_calls: [
{
function: {
arguments: '{"query":"杭州天气","searchEngines":["google"]}',
name: 'lobe-web-browsing____search____builtin',
},
id: 'call_001',
type: 'function',
},
],
},
{
content: 'Tool execution was aborted by user.',
name: 'lobe-web-browsing____search____builtin',
role: 'tool',
tool_call_id: 'call_001',
},
{
content: '',
role: 'assistant',
tool_calls: [
{
function: {
arguments: '{"query":"杭州 天气","searchEngines":["bing"]}',
name: 'lobe-web-browsing____search____builtin',
},
id: 'call_002',
type: 'function',
},
],
},
{
content: 'no result',
name: 'lobe-web-browsing____search____builtin',
role: 'tool',
tool_call_id: 'call_002',
},
];
const contents = await buildGoogleMessages(messages);
expect(contents).toEqual([
{
parts: [{ text: '<plugins>Web Browsing plugin available</plugins>' }],
role: 'user',
},
{ parts: [{ text: '杭州天气如何' }], role: 'user' },
{
parts: [
{
functionCall: {
args: { query: '杭州天气', searchEngines: ['google'] },
name: 'lobe-web-browsing____search____builtin',
},
thoughtSignature: GEMINI_MAGIC_THOUGHT_SIGNATURE,
},
],
role: 'model',
},
{
parts: [
{
functionResponse: {
name: 'lobe-web-browsing____search____builtin',
response: { result: 'Tool execution was aborted by user.' },
},
},
],
role: 'user',
},
{
parts: [
{
functionCall: {
args: { query: '杭州 天气', searchEngines: ['bing'] },
name: 'lobe-web-browsing____search____builtin',
},
thoughtSignature: GEMINI_MAGIC_THOUGHT_SIGNATURE,
},
],
role: 'model',
},
{
parts: [
{
functionResponse: {
name: 'lobe-web-browsing____search____builtin',
response: { result: 'no result' },
},
},
],
role: 'user',
},
]);
});
it('should NOT add magic signature when thoughtSignature already exists', async () => {
const existingSignature = 'existing_signature_from_model';
const messages: OpenAIChatMessage[] = [
{
content: '杭州天气如何',
role: 'user',
},
{
content: '',
role: 'assistant',
tool_calls: [
{
function: {
arguments: '{"query":"杭州天气","searchEngines":["google"]}',
name: 'lobe-web-browsing____search____builtin',
},
id: 'call_001',
thoughtSignature: existingSignature,
type: 'function',
},
],
},
{
content: 'Tool result',
name: 'lobe-web-browsing____search____builtin',
role: 'tool',
tool_call_id: 'call_001',
},
];
const contents = await buildGoogleMessages(messages);
expect(contents).toEqual([
{
parts: [{ text: '杭州天气如何' }],
role: 'user',
},
{
parts: [
{
functionCall: {
args: { query: '杭州天气', searchEngines: ['google'] },
name: 'lobe-web-browsing____search____builtin',
},
// Should keep existing thoughtSignature, not add magic signature
thoughtSignature: existingSignature,
},
],
role: 'model',
},
{
parts: [
{
functionResponse: {
name: 'lobe-web-browsing____search____builtin',
response: { result: 'Tool result' },
},
},
],
role: 'user',
},
]);
});
it('should add magic signature only after last user message in multi-turn scenario', async () => {
const messages: OpenAIChatMessage[] = [
{
content: 'First question',
role: 'user',
},
{
content: '',
role: 'assistant',
tool_calls: [
{
function: {
arguments: '{"query":"first"}',
name: 'search',
},
id: 'call_001',
type: 'function',
},
],
},
{
content: 'First result',
name: 'search',
role: 'tool',
tool_call_id: 'call_001',
},
{
content: 'Second question',
role: 'user',
},
{
content: '',
role: 'assistant',
tool_calls: [
{
function: {
arguments: '{"query":"second"}',
name: 'search',
},
id: 'call_002',
type: 'function',
},
],
},
{
content: 'Second result',
name: 'search',
role: 'tool',
tool_call_id: 'call_002',
},
];
const contents = await buildGoogleMessages(messages);
expect(contents).toEqual([
{
parts: [{ text: 'First question' }],
role: 'user',
},
{
parts: [
{
functionCall: {
args: { query: 'first' },
name: 'search',
},
// No magic signature for this one (before last user message)
},
],
role: 'model',
},
{
parts: [
{
functionResponse: {
name: 'search',
response: { result: 'First result' },
},
},
],
role: 'user',
},
{
parts: [{ text: 'Second question' }],
role: 'user',
},
{
parts: [
{
functionCall: {
args: { query: 'second' },
name: 'search',
},
// Magic signature added (after last user message)
thoughtSignature: GEMINI_MAGIC_THOUGHT_SIGNATURE,
},
],
role: 'model',
},
{
parts: [
{
functionResponse: {
name: 'search',
response: { result: 'Second result' },
},
},
],
role: 'user',
},
]);
});
it('should NOT add magic signature when last message is user text message', async () => {
const messages: OpenAIChatMessage[] = [
{
content: '<plugins>Web Browsing plugin available</plugins>',
role: 'system',
},
{
content: '杭州天气如何',
role: 'user',
},
{
content: '',
role: 'assistant',
tool_calls: [
{
function: {
arguments: '{"query":"杭州天气","searchEngines":["google"]}',
name: 'lobe-web-browsing____search____builtin',
},
id: 'call_001',
type: 'function',
},
],
},
{
content: 'Tool execution was aborted by user.',
name: 'lobe-web-browsing____search____builtin',
role: 'tool',
tool_call_id: 'call_001',
},
{
content: 'Please try again',
role: 'user',
},
];
const contents = await buildGoogleMessages(messages);
expect(contents).toEqual([
{
parts: [{ text: '<plugins>Web Browsing plugin available</plugins>' }],
role: 'user',
},
{
parts: [{ text: '杭州天气如何' }],
role: 'user',
},
{
parts: [
{
functionCall: {
args: { query: '杭州天气', searchEngines: ['google'] },
name: 'lobe-web-browsing____search____builtin',
},
// No thoughtSignature should be added when last message is user text
},
],
role: 'model',
},
{
parts: [
{
functionResponse: {
name: 'lobe-web-browsing____search____builtin',
response: { result: 'Tool execution was aborted by user.' },
},
},
],
role: 'user',
},
{
parts: [{ text: 'Please try again' }],
role: 'user',
},
]);
});
});
it('should correctly handle empty content', async () => {
const message: OpenAIChatMessage = {
content: '' as any, // explicitly set as empty string
@@ -361,6 +771,7 @@ describe('google contextBuilders', () => {
args: { location: 'London', unit: 'celsius' },
name: 'get_current_weather',
},
thoughtSignature: GEMINI_MAGIC_THOUGHT_SIGNATURE,
},
],
role: 'model',
@@ -410,6 +821,74 @@ describe('google contextBuilders', () => {
{ parts: [{ text: 'Hi' }], role: 'model' },
]);
});
it('should correctly convert full conversation with thoughtSignature', async () => {
const messages: OpenAIChatMessage[] = [
{ content: 'system prompt', role: 'system' },
{ content: 'LobeChat 最新版本', role: 'user' },
{
content: '',
role: 'assistant',
tool_calls: [
{
function: {
arguments: JSON.stringify({
language: ['JSON'],
path: 'package.json',
query: '"version":',
repo: 'lobehub/lobe-chat',
}),
name: 'grep____searchGitHub____mcp',
},
id: 'grep____searchGitHub____mcp_0_6RnOMTF0',
thoughtSignature: 'test-signature',
type: 'function',
},
],
},
{
content: '',
name: 'grep____searchGitHub____mcp',
role: 'tool',
tool_call_id: 'grep____searchGitHub____mcp_0_6RnOMTF0',
},
];
const contents = await buildGoogleMessages(messages);
expect(contents).toEqual([
{ parts: [{ text: 'system prompt' }], role: 'user' },
{ parts: [{ text: 'LobeChat 最新版本' }], role: 'user' },
{
parts: [
{
functionCall: {
args: {
language: ['JSON'],
path: 'package.json',
query: '"version":',
repo: 'lobehub/lobe-chat',
},
name: 'grep____searchGitHub____mcp',
},
thoughtSignature: 'test-signature',
},
],
role: 'model',
},
{
parts: [
{
functionResponse: {
name: 'grep____searchGitHub____mcp',
response: { result: '' },
},
},
],
role: 'user',
},
]);
});
});
describe('buildGoogleTool', () => {

View File

@@ -11,6 +11,12 @@ import { ChatCompletionTool, OpenAIChatMessage, UserMessageContentPart } from '.
import { safeParseJSON } from '../../utils/safeParseJSON';
import { parseDataUri } from '../../utils/uriParser';
/**
* Magic thoughtSignature
* @see https://ai.google.dev/gemini-api/docs/thought-signatures#model-behavior:~:text=context_engineering_is_the_way_to_go
*/
export const GEMINI_MAGIC_THOUGHT_SIGNATURE = 'context_engineering_is_the_way_to_go';
/**
* Convert OpenAI content part to Google Part format
*/
@@ -95,6 +101,7 @@ export const buildGoogleMessage = async (
args: safeParseJSON(tool.function.arguments)!,
name: tool.function.name,
},
thoughtSignature: tool.thoughtSignature,
})),
role: 'model',
};
@@ -155,7 +162,43 @@ export const buildGoogleMessages = async (messages: OpenAIChatMessage[]): Promis
const contents = await Promise.all(pools);
// Filter out empty messages: contents.parts must not be empty.
return contents.filter((content: Content) => content.parts && content.parts.length > 0);
const filteredContents = contents.filter(
(content: Content) => content.parts && content.parts.length > 0,
);
// Check if the last message is a tool message
const lastMessage = messages.at(-1);
const shouldAddMagicSignature = lastMessage?.role === 'tool';
if (shouldAddMagicSignature) {
// Find the last user message index in filtered contents
let lastUserIndex = -1;
for (let i = filteredContents.length - 1; i >= 0; i--) {
if (filteredContents[i].role === 'user') {
// Skip if it's a functionResponse (tool result)
const hasFunctionResponse = filteredContents[i].parts?.some((p) => p.functionResponse);
if (!hasFunctionResponse) {
lastUserIndex = i;
break;
}
}
}
// Add magic signature to all function calls after last user message that don't have thoughtSignature
for (let i = lastUserIndex + 1; i < filteredContents.length; i++) {
const content = filteredContents[i];
if (content.role === 'model' && content.parts) {
for (const part of content.parts) {
if (part.functionCall && !part.thoughtSignature) {
// Only add magic signature if thoughtSignature doesn't exist
part.thoughtSignature = GEMINI_MAGIC_THOUGHT_SIGNATURE;
}
}
}
}
}
return filteredContents;
};
/**

View File

@@ -1,4 +1,4 @@
import { GenerateContentResponse } from '@google/genai';
import { GenerateContentResponse, Part } from '@google/genai';
import { GroundingSearch } from '@lobechat/types';
import { ChatStreamCallbacks } from '../../../types';
@@ -74,19 +74,27 @@ const transformGoogleGenerativeAIStream = (
}
}
const functionCalls = chunk.functionCalls;
// Parse function calls from candidate.content.parts
const functionCalls =
candidate?.content?.parts
?.filter((part: any) => part.functionCall)
.map((part: Part) => ({
...part.functionCall,
thoughtSignature: part.thoughtSignature,
})) || [];
if (functionCalls) {
if (functionCalls.length > 0) {
return [
{
data: functionCalls.map(
(value, index): StreamToolCallChunkData => ({
(value, index: number): StreamToolCallChunkData => ({
function: {
arguments: JSON.stringify(value.args),
name: value.name,
},
id: generateToolCallId(index, value.name),
index: index,
thoughtSignature: value.thoughtSignature,
type: 'function',
}),
),
@@ -97,7 +105,13 @@ const transformGoogleGenerativeAIStream = (
];
}
const text = chunk.text;
// Parse text from candidate.content.parts
// Filter out thought content (thought: true) and thoughtSignature
const text =
candidate?.content?.parts
?.filter((part: any) => part.text && !part.thought && !part.thoughtSignature)
.map((part: any) => part.text)
.join('') || '';
if (candidate) {
// 首先检查是否为 reasoning 内容 (thought: true)

View File

@@ -98,6 +98,7 @@ export interface StreamToolCallChunkData {
};
id?: string;
index: number;
thoughtSignature?: string;
type: 'function' | string;
}

View File

@@ -1,5 +1,5 @@
// @vitest-environment node
import { GenerateContentResponse, Tool } from '@google/genai';
import { GenerateContentResponse } from '@google/genai';
import OpenAI from 'openai';
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';

View File

@@ -267,19 +267,21 @@ export class LobeGoogleAI implements LobeRuntimeAI {
const inputStartAt = Date.now();
const geminiStreamResponse = await this.client.models.generateContentStream({
config,
contents,
model,
});
const googleStream = this.createEnhancedStream(geminiStreamResponse, controller.signal);
const [prod, useForDebug] = googleStream.tee();
const finalPayload = { config, contents, model };
const key = this.isVertexAi
? 'DEBUG_VERTEX_AI_CHAT_COMPLETION'
: 'DEBUG_GOOGLE_CHAT_COMPLETION';
if (process.env[key] === '1') {
console.log('[requestPayload]');
console.log(JSON.stringify(finalPayload), '\n');
}
const geminiStreamResponse = await this.client.models.generateContentStream(finalPayload);
const googleStream = this.createEnhancedStream(geminiStreamResponse, controller.signal);
const [prod, useForDebug] = googleStream.tee();
if (process.env[key] === '1') {
debugStream(useForDebug).catch();
}

View File

@@ -1,5 +1,5 @@
import { z } from 'zod';
import type { PartialDeep } from 'type-fest';
import { z } from 'zod';
/**
* The function that the model called.
@@ -30,6 +30,7 @@ export interface MessageToolCall {
*/
id: string;
thoughtSignature?: string;
/**
* The type of the tool. Currently, only `function` is supported.
*/
@@ -42,6 +43,7 @@ export const MessageToolCallSchema = z.object({
name: z.string(),
}),
id: z.string(),
thoughtSignature: z.string().optional(),
type: z.string(),
});

View File

@@ -30,6 +30,7 @@ export interface ChatToolPayload {
identifier: string;
intervention?: ToolIntervention;
result_msg_id?: string;
thoughtSignature?: string;
type: LobeToolRenderType;
}
@@ -84,6 +85,7 @@ export interface MessageToolCall {
*/
id: string;
thoughtSignature?: string;
/**
* The type of the tool. Currently, only `function` is supported.
*/
@@ -108,6 +110,7 @@ export const ChatToolPayloadSchema = z.object({
identifier: z.string(),
intervention: ToolInterventionSchema.optional(),
result_msg_id: z.string().optional(),
thoughtSignature: z.string().optional(),
type: z.string(),
});

View File

@@ -15,12 +15,13 @@ export interface ErrorContentProps {
const ErrorContent = memo<ErrorContentProps>(({ error, id }) => {
const { t } = useTranslation('common');
const errorProps = useErrorContent(error);
const [deleteMessage] = useChatStore((s) => [s.deleteDBMessage]);
const message = <ErrorMessageExtra block data={{ error, id }} />;
if (!error?.message) {
const errorProps = useErrorContent(error);
if (!errorProps?.message) {
if (!message) return null;
return <Flexbox>{message}</Flexbox>;
}

View File

@@ -30,10 +30,10 @@ const GroupItem = memo<GroupItemProps>(
});
}}
>
<ContentBlock index={index} {...item} />
<ContentBlock index={index} {...item} error={error} />
</Flexbox>
) : (
<ContentBlock index={index} {...item} />
<ContentBlock index={index} {...item} error={error} />
);
},
isEqual,

View File

@@ -76,7 +76,7 @@ export interface StreamingExecutorAction {
tool_calls?: MessageToolCall[];
content: string;
traceId?: string;
finishType?: 'done' | 'error' | 'abort';
finishType?: string;
usage?: ModelUsage;
}>;
/**
@@ -283,13 +283,13 @@ export const streamingExecutor: StateCreator<
let thinkingStartAt: number;
let duration: number | undefined;
let reasoningOperationId: string | undefined;
let finishType: 'done' | 'error' | 'abort' | undefined;
let finishType: string | undefined;
// to upload image
const uploadTasks: Map<string, Promise<{ id?: string; url?: string }>> = new Map();
// Throttle tool_calls updates to prevent excessive re-renders (max once per 300ms)
const throttledUpdateToolCalls = throttle(
(toolCalls: any[]) => {
(toolCalls: MessageToolCall[]) => {
internal_dispatchMessage(
{
id: messageId,
@@ -366,7 +366,6 @@ export const streamingExecutor: StateCreator<
throttledUpdateToolCalls.flush();
internal_toggleToolCallingStreaming(messageId, undefined);
tools = get().internal_transformToolCalls(parsedToolCalls);
tool_calls = toolCalls;
parsedToolCalls = parsedToolCalls.map((item) => ({
@@ -377,6 +376,8 @@ export const streamingExecutor: StateCreator<
},
}));
tools = get().internal_transformToolCalls(parsedToolCalls);
isFunctionCall = true;
}
@@ -395,7 +396,7 @@ export const streamingExecutor: StateCreator<
messageId,
content,
{
toolCalls: parsedToolCalls,
tools,
reasoning: !!reasoning
? { ...reasoning, duration: duration && !isNaN(duration) ? duration : undefined }
: undefined,

View File

@@ -3,11 +3,11 @@ import {
ChatImageItem,
ChatMessageError,
ChatMessagePluginError,
ChatToolPayload,
CreateMessageParams,
GroundingSearch,
MessageMetadata,
MessagePluginItem,
MessageToolCall,
ModelReasoning,
UIChatMessage,
UpdateMessageRAGParams,
@@ -69,7 +69,7 @@ export interface MessageOptimisticUpdateAction {
provider?: string;
reasoning?: ModelReasoning;
search?: GroundingSearch;
toolCalls?: MessageToolCall[];
tools?: ChatToolPayload[];
},
context?: OptimisticUpdateContext,
) => Promise<void>;
@@ -204,22 +204,17 @@ export const messageOptimisticUpdate: StateCreator<
},
optimisticUpdateMessageContent: async (id, content, extra, context) => {
const {
internal_dispatchMessage,
refreshMessages,
internal_transformToolCalls,
replaceMessages,
} = get();
const { internal_dispatchMessage, refreshMessages, replaceMessages } = get();
// Due to the async update method and refresh need about 100ms
// we need to update the message content at the frontend to avoid the update flick
// refs: https://medium.com/@kyledeguzmanx/what-are-optimistic-updates-483662c3e171
if (extra?.toolCalls) {
if (extra?.tools) {
internal_dispatchMessage(
{
id,
type: 'updateMessage',
value: { tools: internal_transformToolCalls(extra?.toolCalls) },
value: { tools: extra?.tools },
},
context,
);
@@ -246,7 +241,7 @@ export const messageOptimisticUpdate: StateCreator<
provider: extra?.provider,
reasoning: extra?.reasoning,
search: extra?.search,
tools: extra?.toolCalls ? internal_transformToolCalls(extra?.toolCalls) : undefined,
tools: extra?.tools,
},
{ sessionId, topicId },
);

View File

@@ -1,6 +1,6 @@
/* eslint-disable sort-keys-fix/sort-keys-fix, typescript-sort-keys/interface */
import { ToolNameResolver } from '@lobechat/context-engine';
import { MessageToolCall, ToolsCallingContext } from '@lobechat/types';
import { ChatToolPayload, MessageToolCall, ToolsCallingContext } from '@lobechat/types';
import { LobeChatPluginManifest } from '@lobehub/chat-plugin-sdk';
import { StateCreator } from 'zustand/vanilla';
@@ -19,7 +19,7 @@ export interface PluginInternalsAction {
/**
* Transform tool calls from runtime format to storage format
*/
internal_transformToolCalls: (toolCalls: MessageToolCall[]) => any[];
internal_transformToolCalls: (toolCalls: MessageToolCall[]) => ChatToolPayload[];
/**
* Construct tools calling context for plugin invocation