1- import { GenerateContentResponse , FinishReason } from '@google/genai' ;
21import { randomUUID } from 'node:crypto' ;
2+ import {
3+ GeminiEventType ,
4+ ServerGeminiStreamEvent ,
5+ } from '@google/gemini-cli-core' ;
36
4- // --- 更新的 OpenAI 响应结构接口 ---
7+ // --- OpenAI 响应结构接口 ---
58interface OpenAIDelta {
69 role ?: 'assistant' ;
710 content ?: string | null ;
@@ -28,21 +31,15 @@ interface OpenAIChunk {
2831 } [ ] ;
2932}
3033
31- type ToolCallState = {
32- id : string ;
33- name : string ;
34- arguments : string ;
35- } ;
36-
37- // --- 更新的、有状态的转换器 ---
34+ // --- 新的、有状态的转换器 ---
3835export function createOpenAIStreamTransformer (
3936 model : string ,
40- ) : TransformStream < GenerateContentResponse , Uint8Array > {
37+ ) : TransformStream < ServerGeminiStreamEvent , Uint8Array > {
4138 const chatID = `chatcmpl-${ randomUUID ( ) } ` ;
4239 const creationTime = Math . floor ( Date . now ( ) / 1000 ) ;
4340 const encoder = new TextEncoder ( ) ;
4441 let isFirstChunk = true ;
45- const toolCallStates : ToolCallState [ ] = [ ] ;
42+ let toolCallIndex = 0 ;
4643
4744 const createChunk = (
4845 delta : OpenAIDelta ,
@@ -70,76 +67,78 @@ export function createOpenAIStreamTransformer(
7067 } ;
7168
7269 return new TransformStream ( {
73- transform ( geminiChunk , controller ) {
74- const parts = geminiChunk . candidates ?. [ 0 ] ?. content ?. parts || [ ] ;
75- const finishReason = geminiChunk . candidates ?. [ 0 ] ?. finishReason ;
76-
77- for ( const part of parts ) {
78- let delta : OpenAIDelta = { } ;
70+ transform ( event : ServerGeminiStreamEvent , controller ) {
71+ let delta : OpenAIDelta = { } ;
7972
80- if ( isFirstChunk ) {
81- delta . role = 'assistant' ;
82- isFirstChunk = false ;
83- }
73+ if ( isFirstChunk ) {
74+ delta . role = 'assistant' ;
75+ isFirstChunk = false ;
76+ }
8477
85- if ( part . text ) {
86- delta . content = part . text ;
87- enqueueChunk ( controller , createChunk ( delta ) ) ;
88- }
78+ switch ( event . type ) {
79+ case GeminiEventType . Content :
80+ if ( event . value ) {
81+ delta . content = event . value ;
82+ enqueueChunk ( controller , createChunk ( delta ) ) ;
83+ }
84+ break ;
8985
90- if ( part . functionCall && part . functionCall . name ) {
91- const callId = `call_${ randomUUID ( ) } ` ;
86+ case GeminiEventType . ToolCallRequest : {
87+ const { name, args } = event . value ;
88+ // **重要**: 在 ID 中嵌入函数名,以便在收到工具响应时可以解析它
89+ const toolCallId = `call_${ name } _${ randomUUID ( ) } ` ;
9290
93- // 模拟分块发送 tool_calls
94- // 1. 发送带有 name 的块
91+ // OpenAI 流式工具调用需要分块发送
92+ // 1. 发送包含函数名的块
9593 const nameDelta : OpenAIDelta = {
94+ ...delta , // 包含 role (如果是第一个块)
9695 tool_calls : [
9796 {
98- index : toolCallStates . length ,
99- id : callId ,
97+ index : toolCallIndex ,
98+ id : toolCallId ,
10099 type : 'function' ,
101- function : { name : part . functionCall . name , arguments : '' } ,
100+ function : { name : name , arguments : '' } ,
102101 } ,
103102 ] ,
104103 } ;
105- if ( isFirstChunk ) {
106- nameDelta . role = 'assistant' ;
107- isFirstChunk = false ;
108- }
109104 enqueueChunk ( controller , createChunk ( nameDelta ) ) ;
110105
111- // 2. 发送带有 arguments 的块
106+ // 2. 发送包含参数的块
112107 const argsDelta : OpenAIDelta = {
113108 tool_calls : [
114109 {
115- index : toolCallStates . length ,
116- id : callId ,
110+ index : toolCallIndex ,
111+ id : toolCallId ,
117112 type : 'function' ,
118- function : { arguments : JSON . stringify ( part . functionCall . args ) } ,
113+ function : { arguments : JSON . stringify ( args ) } ,
119114 } ,
120115 ] ,
121116 } ;
122117 enqueueChunk ( controller , createChunk ( argsDelta ) ) ;
123118
124- toolCallStates . push ( {
125- id : callId ,
126- name : part . functionCall . name ,
127- arguments : JSON . stringify ( part . functionCall . args ) ,
128- } ) ;
119+ toolCallIndex ++ ;
120+ break ;
129121 }
130- }
131122
132- if ( finishReason && finishReason !== 'FINISH_REASON_UNSPECIFIED' ) {
133- const reason =
134- finishReason === FinishReason . STOP
135- ? toolCallStates . length > 0
136- ? 'tool_calls'
137- : 'stop'
138- : finishReason . toLowerCase ( ) ;
139- enqueueChunk ( controller , createChunk ( { } , reason ) ) ;
123+ case GeminiEventType . ChatCompressed :
124+ case GeminiEventType . Thought :
125+ // 这些事件目前在 OpenAI 格式中没有直接对应项,可以选择忽略或以某种方式记录
126+ console . log ( `[Stream Transformer] Ignoring event: ${ event . type } ` ) ;
127+ break ;
128+
129+ // 错误和取消事件应在更高层处理,但为完整性起见
130+ case GeminiEventType . Error :
131+ case GeminiEventType . UserCancelled :
132+ // 可以在这里发送一个带有错误信息的 data chunk,如果需要的话
133+ break ;
140134 }
141135 } ,
136+
142137 flush ( controller ) {
138+ // 在流结束时,发送一个带有 `tool_calls` 或 `stop` 的 finish_reason
139+ const finish_reason = toolCallIndex > 0 ? 'tool_calls' : 'stop' ;
140+ enqueueChunk ( controller , createChunk ( { } , finish_reason ) ) ;
141+
143142 const doneString = `data: [DONE]\n\n` ;
144143 controller . enqueue ( encoder . encode ( doneString ) ) ;
145144 } ,
0 commit comments