Skip to content

Commit 103e391

Browse files
committed
feat: Enhance logging capabilities with debug mode
- Introduced a debug mode to selectively log incoming requests and responses in the MCP server. - Simplified the request logger to only log when debug mode is enabled. - Updated the GeminiApiClient to log detailed request history and responses based on debug mode. - Modified the OpenAI router and stream transformer to include debug logging. - Adjusted the GcliMcpBridge to accept a debug mode parameter and conditionally log relevant information. - Improved overall logging consistency and clarity across the application.
1 parent 33807af commit 103e391

File tree

6 files changed

+79
-1621
lines changed

6 files changed

+79
-1621
lines changed

commits.txt

Lines changed: 0 additions & 1567 deletions
This file was deleted.

packages/mcp-server/src/bridge/bridge.ts

Lines changed: 31 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -22,16 +22,10 @@ import { randomUUID } from 'node:crypto';
2222

2323
const LOG_PREFIX = '[MCP SERVER]';
2424

25-
// NEW: 日志中间件
26-
const requestLogger = (req: Request, res: Response, next: NextFunction) => {
27-
console.log(`${LOG_PREFIX} ⬇️ Incoming Request: ${req.method} ${req.url}`);
28-
console.log(`${LOG_PREFIX} Headers:`, JSON.stringify(req.headers, null, 2));
29-
if (req.body && Object.keys(req.body).length > 0) {
30-
const bodyStr = JSON.stringify(req.body);
31-
console.log(
32-
`${LOG_PREFIX} Body:`,
33-
bodyStr.length > 300 ? bodyStr.substring(0, 300) + '...' : bodyStr,
34-
);
25+
// Simplified request logger - only log on debug mode
26+
const requestLogger = (debugMode: boolean) => (req: Request, res: Response, next: NextFunction) => {
27+
if (debugMode) {
28+
console.log(`${LOG_PREFIX} ${req.method} ${req.url}`);
3529
}
3630
next();
3731
};
@@ -40,10 +34,12 @@ export class GcliMcpBridge {
4034
private readonly config: Config;
4135
private readonly cliVersion: string;
4236
private readonly mcpServer: McpServer;
37+
private readonly debugMode: boolean;
4338

44-
constructor(config: Config, cliVersion: string) {
39+
constructor(config: Config, cliVersion: string, debugMode = false) {
4540
this.config = config;
4641
this.cliVersion = cliVersion;
42+
this.debugMode = debugMode;
4743
this.mcpServer = new McpServer(
4844
{
4945
name: 'gemini-cli-mcp-server',
@@ -56,8 +52,10 @@ export class GcliMcpBridge {
5652
public async start(app: Application) {
5753
await this.registerAllGcliTools();
5854

59-
// NEW: 使用日志中间件
60-
app.use(requestLogger);
55+
// Only use request logger in debug mode
56+
if (this.debugMode) {
57+
app.use(requestLogger(this.debugMode));
58+
}
6159

6260
const transports: Record<string, StreamableHTTPServerTransport> = {};
6361

@@ -67,25 +65,31 @@ export class GcliMcpBridge {
6765

6866
if (!transport) {
6967
if (isInitializeRequest(req.body)) {
70-
console.log(
71-
`${LOG_PREFIX} creating new transport for initialize request.`,
72-
);
68+
if (this.debugMode) {
69+
console.log(
70+
`${LOG_PREFIX} Creating new transport for initialize request`,
71+
);
72+
}
7373
transport = new StreamableHTTPServerTransport({
7474
sessionIdGenerator: () => randomUUID(),
7575
onsessioninitialized: newSessionId => {
76-
console.log(
77-
`${LOG_PREFIX} ✅ Session initialized with ID: ${newSessionId}`,
78-
);
76+
if (this.debugMode) {
77+
console.log(
78+
`${LOG_PREFIX} Session initialized: ${newSessionId}`,
79+
);
80+
}
7981
transports[newSessionId] = transport!;
8082
},
8183
});
8284

8385
transport.onclose = () => {
8486
const sid = transport!.sessionId;
8587
if (sid && transports[sid]) {
86-
console.log(
87-
`${LOG_PREFIX} 🚪 Transport for session ${sid} closed.`,
88-
);
88+
if (this.debugMode) {
89+
console.log(
90+
`${LOG_PREFIX} Session ${sid} closed`,
91+
);
92+
}
8993
delete transports[sid];
9094
}
9195
};
@@ -94,7 +98,7 @@ export class GcliMcpBridge {
9498
await this.mcpServer.connect(transport);
9599
} else {
96100
console.error(
97-
`${LOG_PREFIX} Bad Request: Missing or invalid session ID for non-initialize request.`,
101+
`${LOG_PREFIX} Bad Request: Missing or invalid session ID`,
98102
);
99103
res.status(400).json({
100104
jsonrpc: '2.0',
@@ -106,16 +110,16 @@ export class GcliMcpBridge {
106110
});
107111
return;
108112
}
109-
} else {
113+
} else if (this.debugMode) {
110114
console.log(
111-
`${LOG_PREFIX} reusing transport for session: ${sessionId}`,
115+
`${LOG_PREFIX} Reusing transport for session: ${sessionId}`,
112116
);
113117
}
114118

115119
try {
116120
await transport.handleRequest(req, res, req.body);
117121
} catch (e) {
118-
console.error(`${LOG_PREFIX} 💥 Error handling request:`, e);
122+
console.error(`${LOG_PREFIX} Error handling request:`, e);
119123
if (!res.headersSent) {
120124
res.status(500).end();
121125
}
@@ -151,8 +155,7 @@ export class GcliMcpBridge {
151155
} catch (e) {
152156
const errorMessage = e instanceof Error ? e.message : String(e);
153157
console.error(
154-
`${LOG_PREFIX} 💥 Error executing tool '${tool.name}':`,
155-
errorMessage,
158+
`${LOG_PREFIX} Error executing tool '${tool.name}': ${errorMessage}`,
156159
);
157160

158161
// 简单地抛出一个Error,MCP SDK会自动处理为适当的JSON-RPC错误

packages/mcp-server/src/bridge/openai.ts

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -5,16 +5,18 @@ import { WritableStream } from 'node:stream/web';
55
import { GeminiApiClient } from '../gemini-client.js'; // <-- 引入新类
66
import { type OpenAIChatCompletionRequest } from '../types.js'; // <-- 引入新类型
77

8-
export function createOpenAIRouter(config: Config): Router {
8+
export function createOpenAIRouter(config: Config, debugMode = false): Router {
99
const router = Router();
1010

1111
router.post('/chat/completions', async (req: Request, res: Response) => {
1212
try {
1313
const body = req.body as OpenAIChatCompletionRequest;
14-
console.log(
15-
'[OpenAI Bridge] Received /chat/completions request:',
16-
JSON.stringify(body, null, 2),
17-
);
14+
if (debugMode) {
15+
console.log(
16+
'[OpenAI Bridge] Received /chat/completions request:',
17+
JSON.stringify(body, null, 2),
18+
);
19+
}
1820
const stream = body.stream !== false;
1921

2022
if (!stream) {
@@ -32,7 +34,7 @@ export function createOpenAIRouter(config: Config): Router {
3234
res.flushHeaders();
3335

3436
// 1. 使用新的 GeminiApiClient
35-
const client = new GeminiApiClient(config);
37+
const client = new GeminiApiClient(config, debugMode);
3638

3739
// 2. 发起请求,传递所有相关参数
3840
const geminiStream = await client.sendMessageStream({
@@ -43,7 +45,7 @@ export function createOpenAIRouter(config: Config): Router {
4345
});
4446

4547
// 3. 创建转换器和写入器
46-
const openAIStream = createOpenAIStreamTransformer(body.model);
48+
const openAIStream = createOpenAIStreamTransformer(body.model, debugMode);
4749
const writer = new WritableStream({
4850
write(chunk) {
4951
res.write(chunk);

packages/mcp-server/src/bridge/stream-transformer.ts

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ interface OpenAIChunk {
3131
// --- 新的、有状态的转换器 ---
3232
export function createOpenAIStreamTransformer(
3333
model: string,
34+
debugMode = false,
3435
): TransformStream<StreamChunk, Uint8Array> {
3536
const chatID = `chatcmpl-${randomUUID()}`;
3637
const creationTime = Math.floor(Date.now() / 1000);
@@ -65,10 +66,12 @@ export function createOpenAIStreamTransformer(
6566

6667
return new TransformStream({
6768
transform(chunk: StreamChunk, controller) {
68-
console.log(
69-
`[Stream Transformer] Received chunk: ${chunk.type}`,
70-
chunk.data ? JSON.stringify(chunk.data) : '',
71-
);
69+
if (debugMode) {
70+
console.log(
71+
`[Stream Transformer] Received chunk: ${chunk.type}`,
72+
chunk.data ? JSON.stringify(chunk.data) : '',
73+
);
74+
}
7275
let delta: OpenAIDelta = {};
7376

7477
if (isFirstChunk) {
@@ -123,7 +126,9 @@ export function createOpenAIStreamTransformer(
123126

124127
case 'reasoning':
125128
// 这些事件目前在 OpenAI 格式中没有直接对应项,可以选择忽略或以某种方式记录
126-
console.log(`[Stream Transformer] Ignoring chunk: ${chunk.type}`);
129+
if (debugMode) {
130+
console.log(`[Stream Transformer] Ignoring chunk: ${chunk.type}`);
131+
}
127132
break;
128133
}
129134
},

packages/mcp-server/src/gemini-client.ts

Lines changed: 23 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -60,10 +60,12 @@ function sanitizeGeminiSchema(schema: any): any {
6060
export class GeminiApiClient {
6161
private readonly config: Config;
6262
private readonly contentGenerator;
63+
private readonly debugMode: boolean;
6364

64-
constructor(config: Config) {
65+
constructor(config: Config, debugMode = false) {
6566
this.config = config;
6667
this.contentGenerator = this.config.getGeminiClient().getContentGenerator();
68+
this.debugMode = debugMode;
6769
}
6870

6971
/**
@@ -236,14 +238,22 @@ export class GeminiApiClient {
236238
}): Promise<AsyncGenerator<StreamChunk>> {
237239
const history = messages.map(msg => this.openAIMessageToGemini(msg));
238240
const lastMessage = history.pop();
239-
console.log(
240-
'[GeminiApiClient] Sending to Gemini. History:',
241-
JSON.stringify(history, null, 2),
242-
);
243-
console.log(
244-
'[GeminiApiClient] Last Message:',
245-
JSON.stringify(lastMessage, null, 2),
246-
);
241+
242+
// Always show the model being used
243+
console.log(`[GeminiApiClient] Using model: ${model}`);
244+
245+
// Only show detailed history in debug mode
246+
if (this.debugMode) {
247+
console.log(
248+
'[GeminiApiClient] History:',
249+
JSON.stringify(history, null, 2),
250+
);
251+
console.log(
252+
'[GeminiApiClient] Last Message:',
253+
JSON.stringify(lastMessage, null, 2),
254+
);
255+
}
256+
247257
if (!lastMessage) {
248258
throw new Error('No message to send.');
249259
}
@@ -281,7 +291,10 @@ export class GeminiApiClient {
281291
},
282292
});
283293

284-
console.log('[GeminiApiClient] Got stream from Gemini.');
294+
if (this.debugMode) {
295+
console.log('[GeminiApiClient] Got stream from Gemini.');
296+
}
297+
285298
// Transform the event stream to a simpler StreamChunk stream
286299
return (async function* (): AsyncGenerator<StreamChunk> {
287300
for await (const response of geminiStream) {

packages/mcp-server/src/index.ts

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ async function startMcpServer() {
6868
process.exit(1);
6969
}
7070

71-
console.log('Starting Gemini CLI in MCP Server Mode...');
71+
console.log('🚀 Starting Gemini CLI MCP Server...');
7272

7373
// 2. 复用配置加载的核心部分,但手动构造 Config
7474
const workspaceRoot = process.cwd();
@@ -93,7 +93,9 @@ async function startMcpServer() {
9393
}
9494
selectedAuthType = selectedAuthType || AuthType.USE_GEMINI;
9595
await config.refreshAuth(selectedAuthType);
96-
console.log(`Using authentication method: ${selectedAuthType}`);
96+
if (debugMode) {
97+
console.log(`Using authentication method: ${selectedAuthType}`);
98+
}
9799

98100
// Check for the custom tools model environment variable
99101
const toolsDefaultModel = process.env.GEMINI_TOOLS_DEFAULT_MODEL;
@@ -106,7 +108,7 @@ async function startMcpServer() {
106108
}
107109

108110
// 4. 初始化并启动 MCP 桥接服务 和 OpenAI 服务
109-
const mcpBridge = new GcliMcpBridge(config, cliVersion);
111+
const mcpBridge = new GcliMcpBridge(config, cliVersion, debugMode);
110112

111113
const app = express();
112114
app.use(express.json());
@@ -115,7 +117,7 @@ async function startMcpServer() {
115117
await mcpBridge.start(app); // 修改 start 方法以接收 express app 实例
116118

117119
// 启动 OpenAI 兼容端点
118-
const openAIRouter = createOpenAIRouter(config);
120+
const openAIRouter = createOpenAIRouter(config, debugMode);
119121
app.use('/v1', openAIRouter);
120122

121123
app.listen(port, () => {

0 commit comments

Comments
 (0)