Skip to content

Commit 7e3cae5

Browse files
committed
more work to normalize chunk format
1 parent fd9457c commit 7e3cae5

File tree

6 files changed

+102
-139
lines changed

6 files changed

+102
-139
lines changed

README.md

Lines changed: 55 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -124,8 +124,9 @@ client:create_response({
124124
}, {
125125
stream = true
126126
}, function(chunk)
127-
if chunk.text_delta then
128-
io.stdout:write(chunk.text_delta)
127+
-- Raw event object from API: check type and access delta directly
128+
if chunk.type == "response.output_text.delta" then
129+
io.stdout:write(chunk.delta)
129130
io.stdout:flush()
130131
end
131132
end)
@@ -246,7 +247,7 @@ Sends a request to the `/responses` endpoint (Responses API).
246247

247248
- `input`: A string or array of message objects (with `role` and `content` fields)
248249
- `opts`: Additional options passed directly to the API (eg. model, temperature, instructions, tools, previous_response_id, etc.) https://platform.openai.com/docs/api-reference/responses
249-
- `stream_callback`: Optional function called for each parsed chunk when `stream = true` is passed in opts
250+
- `stream_callback`: Optional function called for each raw event object when `stream = true` is passed in opts (eg. `{type = "response.output_text.delta", delta = "Hello"}`)
250251

251252
Returns HTTP status, response object, and output headers. The response object
252253
will be decoded from JSON if possible, otherwise the raw string is returned.
@@ -320,6 +321,57 @@ Sends a request to the `/images/generations` endpoint to generate images.
320321

321322
Returns HTTP status, response object, and output headers.
322323

324+
#### ResponsesChatSession
325+
326+
This class manages chat sessions using OpenAI's Responses API. Unlike
327+
ChatSession, conversation state is maintained server-side via
328+
`previous_response_id`. Typically created with `new_responses_chat_session`.
329+
330+
The field `response_history` stores an array of response objects from past
331+
interactions. The field `current_response_id` holds the ID of the most recent
332+
response, used to maintain conversation continuity.
333+
334+
##### `new(client, opts)`
335+
336+
Constructor for the ResponsesChatSession.
337+
338+
- `client`: An instance of the OpenAI client.
339+
- `opts`: An optional table of options.
340+
- `model`: Model to use (defaults to client's default_model)
341+
- `instructions`: System instructions for the conversation
342+
- `tools`: Array of tool definitions
343+
- `previous_response_id`: Resume from a previous response
344+
345+
##### `session:send(input, stream_callback)`
346+
347+
Sends input and returns the response, maintaining conversation state
348+
automatically.
349+
350+
- `input`: A string or array of message objects.
351+
- `stream_callback`: Optional function for streaming responses.
352+
353+
Returns a response object on success (or accumulated text string when
354+
streaming). On failure, returns `nil`, an error message, and the raw response.
355+
356+
Response objects have helper methods:
357+
- `response:get_output_text()`: Extract all text content as a string
358+
- `response:get_images()`: Extract generated images (when using image_generation tool)
359+
- `tostring(response)`: Converts to text string
360+
361+
The `stream_callback` receives two arguments: the delta text string and the raw
362+
event object. Each call provides an incremental piece of the response text.
363+
364+
##### `session:create_response(input, opts, stream_callback)`
365+
366+
Lower-level method to create a response with additional options.
367+
368+
- `input`: A string or array of message objects.
369+
- `opts`: Additional options (model, temperature, tools, previous_response_id, etc.)
370+
- `stream_callback`: Optional function for streaming responses.
371+
372+
Returns a response object on success. On failure, returns `nil`, an error
373+
message, and the raw response.
374+
323375
#### ChatSession
324376

325377
This class manages chat sessions and history with the OpenAI API. Typically

examples/chat_completions/streaming.lua

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,12 @@ client:create_chat_completion({
77
}, {
88
stream = true
99
}, function(chunk)
10-
io.stdout:write(chunk.content)
11-
io.stdout:flush()
10+
-- Raw event object from API: access content via choices[1].delta.content
11+
local delta = chunk.choices and chunk.choices[1] and chunk.choices[1].delta
12+
if delta and delta.content then
13+
io.stdout:write(delta.content)
14+
io.stdout:flush()
15+
end
1216
end)
1317

1418
print() -- print a newline

examples/responses/basic.lua

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -27,17 +27,14 @@ print(response.output[1].content[1].text)
2727
print("\n== Streaming example (via session) ==")
2828
local stream_session = client:new_responses_chat_session()
2929

30-
local streamed_text, err, raw = stream_session:send("Stream a brief greeting.", function(chunk)
31-
if chunk.text_delta then
32-
io.write(chunk.text_delta)
33-
io.flush()
34-
end
35-
36-
if chunk.response then
37-
print("\n\nCompleted response ID: " .. chunk.response.id)
38-
end
30+
local streamed_text, err, raw = stream_session:send("Stream a brief greeting.", function(delta)
31+
-- delta is the text string, second arg (raw) is the raw event object
32+
io.write(delta)
33+
io.flush()
3934
end)
4035

36+
print() -- newline after streaming
37+
4138
if not streamed_text then
4239
io.stderr:write("Streaming request failed: " .. err .. "\n")
4340
if raw then

openai/responses.lua

Lines changed: 15 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -124,45 +124,6 @@ local parse_responses_response = types.partial({
124124
usage = empty + types.table:tag("usage"),
125125
status = empty + types.string:tag("status")
126126
})
127-
local parse_response_stream_chunk
128-
parse_response_stream_chunk = function(chunk)
129-
if not (type(chunk) == "table") then
130-
return
131-
end
132-
if not (chunk.type) then
133-
return
134-
end
135-
if chunk.type == "response.output_text.delta" and type(chunk.delta) == "string" then
136-
return {
137-
type = chunk.type,
138-
text_delta = chunk.delta,
139-
raw = chunk
140-
}
141-
end
142-
if chunk.type == "response.completed" and type(chunk.response) == "table" then
143-
local parsed, err = parse_responses_response(chunk.response)
144-
if parsed then
145-
chunk.response = parsed
146-
return chunk
147-
else
148-
return nil, err
149-
end
150-
end
151-
if chunk.delta and type(chunk.delta.text) == "string" then
152-
return {
153-
type = chunk.type,
154-
text_delta = chunk.delta.text,
155-
raw = chunk
156-
}
157-
end
158-
if chunk.content_block_delta and type(chunk.content_block_delta.text) == "string" then
159-
return {
160-
type = chunk.type,
161-
text_delta = chunk.content_block_delta.text,
162-
raw = chunk
163-
}
164-
end
165-
end
166127
local create_response_stream_filter
167128
create_response_stream_filter = function(chunk_callback)
168129
assert(types["function"](chunk_callback), "Must provide chunk_callback function when streaming response")
@@ -186,12 +147,7 @@ create_response_stream_filter = function(chunk_callback)
186147
return cjson.decode(json_data)
187148
end)
188149
if success then
189-
do
190-
local chunk_data = parse_response_stream_chunk(parsed)
191-
if chunk_data then
192-
chunk_callback(chunk_data)
193-
end
194-
end
150+
chunk_callback(parsed)
195151
end
196152
end
197153
end
@@ -245,15 +201,20 @@ do
245201
local wrapped_callback
246202
if merged_opts.stream then
247203
wrapped_callback = function(chunk)
248-
if chunk.text_delta then
249-
table.insert(accumulated_text, chunk.text_delta)
250-
end
251-
if chunk.response then
252-
add_response_helpers(chunk.response)
253-
final_response = chunk.response
254-
end
255-
if stream_callback then
256-
return stream_callback(chunk)
204+
local _exp_0 = chunk.type
205+
if "response.output_text.delta" == _exp_0 then
206+
table.insert(accumulated_text, chunk.delta)
207+
if stream_callback then
208+
return stream_callback(chunk.delta, chunk)
209+
end
210+
elseif "response.completed" == _exp_0 then
211+
if type(chunk.response) == "table" then
212+
local parsed = parse_responses_response(chunk.response)
213+
if parsed then
214+
add_response_helpers(parsed)
215+
final_response = parsed
216+
end
217+
end
257218
end
258219
end
259220
end

openai/responses.moon

Lines changed: 12 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -95,42 +95,6 @@ parse_responses_response = types.partial {
9595
status: empty + types.string\tag "status"
9696
}
9797

98-
parse_response_stream_chunk = (chunk) ->
99-
return unless type(chunk) == "table"
100-
return unless chunk.type
101-
102-
-- New Responses streaming format
103-
if chunk.type == "response.output_text.delta" and type(chunk.delta) == "string"
104-
return {
105-
type: chunk.type
106-
text_delta: chunk.delta
107-
raw: chunk
108-
}
109-
110-
if chunk.type == "response.completed" and type(chunk.response) == "table"
111-
parsed, err = parse_responses_response chunk.response
112-
if parsed
113-
chunk.response = parsed
114-
return chunk
115-
else
116-
return nil, err
117-
118-
-- Support older/alternate streaming formats
119-
if chunk.delta and type(chunk.delta.text) == "string"
120-
return {
121-
type: chunk.type
122-
text_delta: chunk.delta.text
123-
raw: chunk
124-
}
125-
126-
if chunk.content_block_delta and type(chunk.content_block_delta.text) == "string"
127-
return {
128-
type: chunk.type
129-
text_delta: chunk.content_block_delta.text
130-
raw: chunk
131-
}
132-
133-
13498
-- creates a ltn12 compatible filter function that will call chunk_callback
13599
-- for each parsed json chunk from the server-sent events api response
136100
create_response_stream_filter = (chunk_callback) ->
@@ -159,8 +123,7 @@ create_response_stream_filter = (chunk_callback) ->
159123
if json_data != "[DONE]"
160124
success, parsed = pcall -> cjson.decode json_data
161125
if success
162-
if chunk_data = parse_response_stream_chunk parsed
163-
chunk_callback chunk_data
126+
chunk_callback parsed
164127

165128
...
166129

@@ -212,15 +175,17 @@ class ResponsesChatSession
212175

213176
wrapped_callback = if merged_opts.stream
214177
(chunk) ->
215-
if chunk.text_delta
216-
table.insert accumulated_text, chunk.text_delta
217-
218-
if chunk.response
219-
add_response_helpers chunk.response
220-
final_response = chunk.response
221-
222-
if stream_callback
223-
stream_callback chunk
178+
switch chunk.type
179+
when "response.output_text.delta"
180+
table.insert accumulated_text, chunk.delta
181+
if stream_callback
182+
stream_callback chunk.delta, chunk
183+
when "response.completed"
184+
if type(chunk.response) == "table"
185+
parsed = parse_responses_response chunk.response
186+
if parsed
187+
add_response_helpers parsed
188+
final_response = parsed
224189

225190
status, response = @client\create_response input, merged_opts, wrapped_callback
226191

spec/openai_spec.moon

Lines changed: 8 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -588,10 +588,10 @@ describe "OpenAI API Client", ->
588588
-- Raw API returns the concatenated SSE data
589589
assert.truthy response
590590

591-
-- Stream callback received parsed chunks
591+
-- Stream callback received raw event objects
592592
assert.same 3, #received
593-
assert.same "Hello", received[1].text_delta
594-
assert.same " world", received[2].text_delta
593+
assert.same {type: "response.output_text.delta", delta: "Hello"}, received[1]
594+
assert.same {type: "response.output_text.delta", delta: " world"}, received[2]
595595
assert.same "response.completed", received[3].type
596596

597597
it "streams response deltas via session", ->
@@ -612,33 +612,17 @@ describe "OpenAI API Client", ->
612612
200, table.concat chunks
613613

614614
received = {}
615-
stream_callback = (chunk) ->
616-
table.insert received, chunk
617-
618-
if chunk.response
619-
assert.same "resp_stream", chunk.response.id
620-
assert.same "Hello world", chunk.response\get_output_text!
615+
stream_callback = (delta, raw) ->
616+
table.insert received, {delta, raw}
621617

622618
session = client\new_responses_chat_session { model: "gpt-4.1-mini" }
623619
out = assert session\send "Say hello back", stream_callback
624620

625621
-- Session returns accumulated text
626622
assert.same "Hello world", out
627623

624+
-- Callback receives delta string and raw event object
628625
assert.same {
629-
{ type: "response.output_text.delta", text_delta: "Hello", raw: { type: "response.output_text.delta", delta: "Hello" } }
630-
{ type: "response.output_text.delta", text_delta: " world", raw: { type: "response.output_text.delta", delta: " world" } }
631-
{ type: "response.completed", response: {
632-
id: "resp_stream"
633-
object: "response"
634-
output: {
635-
{
636-
type: "message"
637-
role: "assistant"
638-
content: {
639-
{ type: "output_text", text: "Hello world" }
640-
}
641-
}
642-
}
643-
}}
626+
{"Hello", {type: "response.output_text.delta", delta: "Hello"}}
627+
{" world", {type: "response.output_text.delta", delta: " world"}}
644628
}, received

0 commit comments

Comments
 (0)