Skip to content

Commit f0cdd52

Browse files
feat: 为工具调用实现隔离的聊天会话
Co-authored-by: aider (vertex_ai/gemini-2.5-pro) <aider@aider.chat>
1 parent 68966e3 commit f0cdd52

File tree

1 file changed

+49
-1
lines changed

1 file changed

+49
-1
lines changed

packages/mcp-server/src/bridge/bridge.ts

Lines changed: 49 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ import {
77
type Tool as GcliTool,
88
type ToolResult,
99
GeminiChat,
10+
getResponseText,
1011
} from '@google/gemini-cli-core';
1112
import {
1213
type CallToolResult,
@@ -150,6 +151,52 @@ export class GcliMcpBridge {
150151
extra: { signal: AbortSignal },
151152
) => {
152153
try {
154+
// --- START: Isolation logic for tools that call the LLM ---
155+
if (tool.name === 'google_web_search' || tool.name === 'web_fetch') {
156+
// Create an isolated, one-shot chat session for this call
157+
const oneShotChat = new GeminiChat(
158+
this.config,
159+
this.config.getGeminiClient().getContentGenerator(),
160+
{}, // Use default generationConfig
161+
[], // Start with a clean history
162+
);
163+
164+
// Prepare the request for the Gemini API
165+
const request = {
166+
message: [{ text: args.query as string }],
167+
config: {
168+
tools: [{ googleSearch: {} }], // For web_search
169+
},
170+
};
171+
172+
// Adjust tool config for web_fetch
173+
if (tool.name === 'web_fetch') {
174+
// web_fetch uses a different tool configuration
175+
request.config.tools = [{ urlContext: {} }];
176+
}
177+
178+
// Send the request using the one-shot session
179+
const response = await oneShotChat.sendMessage(request);
180+
const resultText = getResponseText(response) || '';
181+
182+
// Convert the result to the MCP format
183+
const mcpResult = this.convertGcliResultToMcpResult({
184+
llmContent: resultText,
185+
returnDisplay: `Search results for "${args.query}" returned.`,
186+
});
187+
188+
// Attach grounding metadata if it exists
189+
if (response.candidates?.[0]?.groundingMetadata) {
190+
(mcpResult as any)._meta = {
191+
groundingMetadata: response.candidates[0].groundingMetadata,
192+
};
193+
}
194+
195+
return mcpResult;
196+
}
197+
// --- END: Isolation logic ---
198+
199+
// For other tools that don't call the LLM, use the original execute method
153200
const result = await tool.execute(args, extra.signal);
154201
return this.convertGcliResultToMcpResult(result);
155202
} catch (e) {
@@ -158,7 +205,8 @@ export class GcliMcpBridge {
158205
`${LOG_PREFIX} Error executing tool '${tool.name}': ${errorMessage}`,
159206
);
160207

161-
// 简单地抛出一个Error,MCP SDK会自动处理为适当的JSON-RPC错误
208+
// Simply throw an Error, and the MCP SDK will automatically handle it
209+
// as an appropriate JSON-RPC error.
162210
throw new Error(
163211
`Error executing tool '${tool.name}': ${errorMessage}`,
164212
);

0 commit comments

Comments
 (0)