Skip to content

Commit 3f9b8f5

Browse files
committed
dont use nonstandard property on responses object, add metatable with method
1 parent cec62a5 commit 3f9b8f5

File tree

8 files changed

+124
-171
lines changed

8 files changed

+124
-171
lines changed

examples/responses/basic.lua

Lines changed: 42 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,64 +1,77 @@
1+
-- Simple demo script for the Responses API.
2+
-- Run with: lua examples/responses/basic.lua
3+
14
local openai = require("openai")
25
local cjson = require("cjson")
6+
37
local api_key = os.getenv("OPENAI_API_KEY")
48
assert(api_key, "Set OPENAI_API_KEY in your environment before running.")
9+
510
local client = openai.new(api_key)
6-
print("== One-off response ==")
7-
local response, err, raw = client:create_response("Write one short sentence about Lua.")
8-
if not (response) then
9-
io.stderr:write("Request failed: " .. tostring(err) .. "\n")
10-
if raw then
11-
io.stderr:write(cjson.encode(raw), "\n")
11+
12+
print("== One-off response (raw API) ==")
13+
local status, response = client:create_response("Write one short sentence about Lua.")
14+
15+
if status ~= 200 then
16+
io.stderr:write("Request failed with status: " .. status .. "\n")
17+
if response then
18+
io.stderr:write(cjson.encode(response) .. "\n")
1219
end
1320
os.exit(1)
1421
end
15-
print("Response ID: " .. tostring(response.id))
16-
print("Output text: " .. tostring(response.output_text))
17-
print("Stop reason: " .. tostring(response.stop_reason or 'unknown'))
18-
print("\n== Streaming example ==")
19-
local streamed_text = nil
20-
response, err, raw = client:create_response("Stream a brief greeting.", {
21-
stream = true
22-
}, function(chunk)
22+
23+
print("Response ID: " .. response.id)
24+
print("Status: " .. (response.status or "unknown"))
25+
26+
print("\n== Streaming example (via session) ==")
27+
local stream_session = client:new_responses_chat_session()
28+
29+
local streamed_text, err, raw = stream_session:send("Stream a brief greeting.", function(chunk)
2330
if chunk.text_delta then
2431
io.write(chunk.text_delta)
2532
io.flush()
2633
end
34+
2735
if chunk.response then
28-
return print("\n\nCompleted response ID: " .. tostring(chunk.response.id))
36+
print("\n\nCompleted response ID: " .. chunk.response.id)
2937
end
3038
end)
31-
streamed_text = response
32-
if not (streamed_text) then
33-
io.stderr:write("Streaming request failed: " .. tostring(err) .. "\n")
39+
40+
if not streamed_text then
41+
io.stderr:write("Streaming request failed: " .. err .. "\n")
3442
if raw then
35-
io.stderr:write(cjson.encode(raw), "\n")
43+
io.stderr:write(cjson.encode(raw) .. "\n")
3644
end
3745
os.exit(1)
3846
end
39-
print("Full streamed text: " .. tostring(streamed_text))
47+
48+
print("Full streamed text: " .. streamed_text)
49+
4050
print("\n== Stateful session ==")
4151
local session = client:new_responses_chat_session({
42-
model = "gpt-4.1-mini",
4352
instructions = "Keep answers concise."
4453
})
54+
4555
local first
4656
first, err, raw = session:send("Introduce yourself in 5 words.")
47-
if not (first) then
48-
io.stderr:write("Session request failed: " .. tostring(err) .. "\n")
57+
if not first then
58+
io.stderr:write("Session request failed: " .. err .. "\n")
4959
if raw then
50-
io.stderr:write(cjson.encode(raw), "\n")
60+
io.stderr:write(cjson.encode(raw) .. "\n")
5161
end
5262
os.exit(1)
5363
end
54-
print("First reply (" .. tostring(first.id) .. "): " .. tostring(first.output_text))
64+
65+
print("First reply (" .. first.id .. "): " .. first:get_output_text())
66+
5567
local second
5668
second, err, raw = session:send("Now say goodbye in the same style.")
57-
if not (second) then
58-
io.stderr:write("Second session request failed: " .. tostring(err) .. "\n")
69+
if not second then
70+
io.stderr:write("Second session request failed: " .. err .. "\n")
5971
if raw then
60-
io.stderr:write(cjson.encode(raw), "\n")
72+
io.stderr:write(cjson.encode(raw) .. "\n")
6173
end
6274
os.exit(1)
6375
end
64-
return print("Second reply (" .. tostring(second.id) .. "): " .. tostring(second.output_text))
76+
77+
print("Second reply (" .. second.id .. "): " .. second:get_output_text())

examples/responses/basic.moon

Lines changed: 0 additions & 64 deletions
This file was deleted.

examples/responses/image.lua

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ local response, err = session:send({
1919

2020
if response then
2121
print("Response ID:", response.id)
22-
print("Output:", response.output_text)
22+
print("Output:", response:get_output_text())
2323
else
2424
print("Error:", err)
2525
end

examples/responses/structured_output.lua

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -113,9 +113,9 @@ local recipe_response, err = session:create_response("Give me a recipe for choco
113113
})
114114

115115
if recipe_response then
116-
print(recipe_response.output_text)
116+
print(recipe_response:get_output_text())
117117
-- Verify it parses as valid JSON
118-
assert(cjson.decode(recipe_response.output_text), "Failed to parse JSON")
118+
assert(cjson.decode(recipe_response:get_output_text()), "Failed to parse JSON")
119119
else
120120
io.stderr:write("Error: " .. tostring(err) .. "\n")
121121
os.exit(1)

examples/responses/tools.lua

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -86,13 +86,13 @@ if #tool_calls > 0 then
8686
})
8787

8888
if follow_up then
89-
print("\nFinal response:", follow_up.output_text)
89+
print("\nFinal response:", follow_up:get_output_text())
9090
else
9191
print("Error sending tool result:", err2)
9292
end
9393
end
9494
end
9595
else
9696
-- No tool calls, just print the response
97-
print("Response:", response.output_text)
97+
print("Response:", response:get_output_text())
9898
end

openai/responses.lua

Lines changed: 44 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,47 @@
11
local cjson = require("cjson")
22
local types
33
types = require("tableshape").types
4+
local response_mt = {
5+
__index = {
6+
get_output_text = function(self)
7+
local parts = { }
8+
if self.output then
9+
local _list_0 = self.output
10+
for _index_0 = 1, #_list_0 do
11+
local block = _list_0[_index_0]
12+
if block.content then
13+
local _list_1 = block.content
14+
for _index_1 = 1, #_list_1 do
15+
local item = _list_1[_index_1]
16+
if item.type == "output_text" and item.text then
17+
table.insert(parts, item.text)
18+
end
19+
end
20+
end
21+
end
22+
elseif self.content then
23+
local _list_0 = self.content
24+
for _index_0 = 1, #_list_0 do
25+
local item = _list_0[_index_0]
26+
if item.type == "output_text" and item.text then
27+
table.insert(parts, item.text)
28+
end
29+
end
30+
end
31+
return table.concat(parts)
32+
end
33+
},
34+
__tostring = function(self)
35+
return self:get_output_text()
36+
end
37+
}
38+
local add_response_helpers
39+
add_response_helpers = function(response)
40+
if response then
41+
setmetatable(response, response_mt)
42+
end
43+
return response
44+
end
445
local empty = (types["nil"] + types.literal(cjson.null)):describe("nullable")
546
local input_content_item = types.one_of({
647
types.partial({
@@ -107,44 +148,6 @@ parse_response_stream_chunk = function(chunk)
107148
}
108149
end
109150
end
110-
local extract_output_text
111-
extract_output_text = function(response)
112-
if not (response) then
113-
return ""
114-
end
115-
local parts = { }
116-
if response.output then
117-
local _list_0 = response.output
118-
for _index_0 = 1, #_list_0 do
119-
local block = _list_0[_index_0]
120-
if block.content then
121-
local _list_1 = block.content
122-
for _index_1 = 1, #_list_1 do
123-
local item = _list_1[_index_1]
124-
if item.type == "output_text" and item.text then
125-
table.insert(parts, item.text)
126-
end
127-
end
128-
end
129-
end
130-
elseif response.content then
131-
local _list_0 = response.content
132-
for _index_0 = 1, #_list_0 do
133-
local item = _list_0[_index_0]
134-
if item.type == "output_text" and item.text then
135-
table.insert(parts, item.text)
136-
end
137-
end
138-
end
139-
return table.concat(parts)
140-
end
141-
local add_response_helpers
142-
add_response_helpers = function(response)
143-
if response then
144-
response.output_text = extract_output_text(response)
145-
end
146-
return response
147-
end
148151
local create_response_stream_filter
149152
create_response_stream_filter = function(chunk_callback)
150153
assert(types["function"](chunk_callback), "Must provide chunk_callback function when streaming response")
@@ -228,8 +231,8 @@ do
228231
table.insert(accumulated_text, chunk.text_delta)
229232
end
230233
if chunk.response then
231-
final_response = add_response_helpers(chunk.response)
232-
chunk.response = final_response
234+
add_response_helpers(chunk.response)
235+
final_response = chunk.response
233236
end
234237
if stream_callback then
235238
return stream_callback(chunk)
@@ -284,6 +287,5 @@ return {
284287
ResponsesChatSession = ResponsesChatSession,
285288
parse_responses_response = parse_responses_response,
286289
parse_response_stream_chunk = parse_response_stream_chunk,
287-
create_response_stream_filter = create_response_stream_filter,
288-
extract_output_text = extract_output_text
290+
create_response_stream_filter = create_response_stream_filter
289291
}

0 commit comments

Comments
 (0)