Skip to content

Commit c29caa1

Browse files
committed
initial responses implementation
1 parent 31885ee commit c29caa1

File tree

5 files changed

+767
-3
lines changed

5 files changed

+767
-3
lines changed

openai/init.lua

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -427,9 +427,18 @@ do
427427
_base_0.__class = _class_0
428428
OpenAI = _class_0
429429
end
430+
local responses_methods, ResponseSession
431+
do
432+
local _obj_0 = require("openai.responses")
433+
responses_methods, ResponseSession = _obj_0.responses_methods, _obj_0.ResponseSession
434+
end
435+
for k, v in pairs(responses_methods) do
436+
OpenAI.__base[k] = v
437+
end
430438
return {
431439
OpenAI = OpenAI,
432440
ChatSession = ChatSession,
441+
ResponseSession = ResponseSession,
433442
VERSION = VERSION,
434443
new = OpenAI
435444
}

openai/init.moon

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -364,6 +364,7 @@ class OpenAI
364364
image_generation: (params) =>
365365
@_request "POST", "/images/generations", params
366366

367+
-- Responses API methods
367368
_request: (method, path, payload, more_headers, stream_fn) =>
368369
assert path, "missing path"
369370
assert method, "missing method"
@@ -419,4 +420,9 @@ class OpenAI
419420

420421
require @config.http_provider
421422

422-
{:OpenAI, :ChatSession, :VERSION, new: OpenAI}
423+
import responses_methods, ResponseSession from require "openai.responses"
424+
425+
for k, v in pairs responses_methods
426+
OpenAI.__base[k] = v
427+
428+
{:OpenAI, :ChatSession, :ResponseSession, :VERSION, new: OpenAI}

openai/responses.lua

Lines changed: 325 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,325 @@
1+
local cjson = require("cjson")
2+
local types
3+
types = require("tableshape").types
4+
local empty = (types["nil"] + types.literal(cjson.null)):describe("nullable")
5+
local input_format = types.string + types.array_of(types.partial({
6+
role = types.one_of({
7+
"system",
8+
"user",
9+
"assistant"
10+
}),
11+
content = types.string
12+
}))
13+
local content_item = types.one_of({
14+
types.partial({
15+
type = "output_text",
16+
text = types.string,
17+
annotations = empty + types.array_of(types.table),
18+
logprobs = empty + types.table
19+
}),
20+
types.partial({
21+
type = "input_text",
22+
text = types.string,
23+
annotations = empty + types.array_of(types.table),
24+
logprobs = empty + types.table
25+
}),
26+
types.partial({
27+
type = "tool_use",
28+
id = types.string,
29+
name = types.string,
30+
input = types.table
31+
})
32+
})
33+
local response_message = types.partial({
34+
id = empty + types.string,
35+
type = types.literal("message"),
36+
role = types.literal("assistant") + types.string,
37+
content = types.array_of(content_item),
38+
status = empty + types.string
39+
})
40+
local parse_responses_response = types.partial({
41+
id = types.string:tag("id"),
42+
object = empty + types.literal("response"):tag("object"),
43+
output = types.array_of(response_message):tag("output"),
44+
model = empty + types.string:tag("model"),
45+
usage = empty + types.table:tag("usage"),
46+
stop_reason = empty + types.string:tag("stop_reason"),
47+
stop_sequence = empty + (types.string + empty):tag("stop_sequence")
48+
})
49+
local parse_response_stream_chunk
50+
parse_response_stream_chunk = function(chunk)
51+
if not (type(chunk) == "table") then
52+
return
53+
end
54+
if not (chunk.type) then
55+
return
56+
end
57+
if chunk.type == "response.output_text.delta" and type(chunk.delta) == "string" then
58+
return {
59+
type = chunk.type,
60+
text_delta = chunk.delta,
61+
raw = chunk
62+
}
63+
end
64+
if chunk.type == "response.completed" and type(chunk.response) == "table" then
65+
local parsed, err = parse_responses_response(chunk.response)
66+
if parsed then
67+
chunk.response = parsed
68+
return chunk
69+
else
70+
return nil, err
71+
end
72+
end
73+
if chunk.delta and type(chunk.delta.text) == "string" then
74+
return {
75+
type = chunk.type,
76+
text_delta = chunk.delta.text,
77+
raw = chunk
78+
}
79+
end
80+
if chunk.content_block_delta and type(chunk.content_block_delta.text) == "string" then
81+
return {
82+
type = chunk.type,
83+
text_delta = chunk.content_block_delta.text,
84+
raw = chunk
85+
}
86+
end
87+
end
88+
local extract_output_text
89+
extract_output_text = function(response)
90+
if not (response) then
91+
return ""
92+
end
93+
local parts = { }
94+
if response.output then
95+
local _list_0 = response.output
96+
for _index_0 = 1, #_list_0 do
97+
local block = _list_0[_index_0]
98+
if block.content then
99+
local _list_1 = block.content
100+
for _index_1 = 1, #_list_1 do
101+
local item = _list_1[_index_1]
102+
if item.type == "output_text" and item.text then
103+
table.insert(parts, item.text)
104+
end
105+
end
106+
end
107+
end
108+
elseif response.content then
109+
local _list_0 = response.content
110+
for _index_0 = 1, #_list_0 do
111+
local item = _list_0[_index_0]
112+
if item.type == "output_text" and item.text then
113+
table.insert(parts, item.text)
114+
end
115+
end
116+
end
117+
return table.concat(parts)
118+
end
119+
local add_response_helpers
120+
add_response_helpers = function(response)
121+
if response then
122+
response.output_text = extract_output_text(response)
123+
end
124+
return response
125+
end
126+
local create_response_stream_filter
127+
create_response_stream_filter = function(chunk_callback)
128+
assert(types["function"](chunk_callback), "Must provide chunk_callback function when streaming response")
129+
local buffer = ""
130+
return function(...)
131+
local chunk = ...
132+
if type(chunk) == "string" then
133+
buffer = buffer .. chunk
134+
while true do
135+
local newline_pos = buffer:find("\n")
136+
if not (newline_pos) then
137+
break
138+
end
139+
local line = buffer:sub(1, newline_pos - 1)
140+
buffer = buffer:sub(newline_pos + 1)
141+
line = line:gsub("%s*$", "")
142+
if line:match("^data: ") then
143+
local json_data = line:sub(7)
144+
if json_data ~= "[DONE]" then
145+
local success, parsed = pcall(function()
146+
return cjson.decode(json_data)
147+
end)
148+
if success then
149+
do
150+
local chunk_data = parse_response_stream_chunk(parsed)
151+
if chunk_data then
152+
chunk_callback(chunk_data)
153+
end
154+
end
155+
end
156+
end
157+
end
158+
end
159+
end
160+
return ...
161+
end
162+
end
163+
local ResponseSession
164+
do
165+
local _class_0
166+
local _base_0 = {
167+
send = function(self, input, stream_callback)
168+
if stream_callback == nil then
169+
stream_callback = nil
170+
end
171+
return self:create_response(input, {
172+
previous_response_id = self.current_response_id,
173+
stream = stream_callback and true or nil
174+
}, stream_callback)
175+
end,
176+
create_response = function(self, input, opts, stream_callback)
177+
if opts == nil then
178+
opts = { }
179+
end
180+
if stream_callback == nil then
181+
stream_callback = nil
182+
end
183+
assert(input, "input must be provided")
184+
assert(input_format(input))
185+
local payload = {
186+
model = self.opts.model or "gpt-4.1-mini",
187+
input = input
188+
}
189+
if self.opts.instructions then
190+
payload.instructions = self.opts.instructions
191+
end
192+
if opts then
193+
for k, v in pairs(opts) do
194+
payload[k] = v
195+
end
196+
end
197+
local accumulated_text = { }
198+
local final_response = nil
199+
local stream_filter
200+
if payload.stream then
201+
stream_filter = create_response_stream_filter(function(chunk)
202+
if chunk.text_delta then
203+
table.insert(accumulated_text, chunk.text_delta)
204+
end
205+
if chunk.response then
206+
final_response = add_response_helpers(chunk.response)
207+
chunk.response = final_response
208+
end
209+
if stream_callback then
210+
return stream_callback(chunk)
211+
end
212+
end)
213+
end
214+
local status, response = self.client:_request("POST", "/responses", payload, nil, stream_filter)
215+
if status ~= 200 then
216+
return nil, "Request failed with status: " .. tostring(status), response
217+
end
218+
if payload.stream then
219+
local text_out = table.concat(accumulated_text)
220+
if final_response then
221+
self.current_response_id = final_response.id
222+
table.insert(self.response_history, final_response)
223+
elseif text_out ~= "" then
224+
self.current_response_id = "stream_" .. tostring(os.time())
225+
end
226+
return text_out
227+
end
228+
local parsed_response, err = parse_responses_response(response)
229+
if not (parsed_response) then
230+
return nil, "Failed to parse response: " .. tostring(err), response
231+
end
232+
add_response_helpers(parsed_response)
233+
self.current_response_id = parsed_response.id
234+
table.insert(self.response_history, parsed_response)
235+
return parsed_response
236+
end
237+
}
238+
_base_0.__index = _base_0
239+
_class_0 = setmetatable({
240+
__init = function(self, client, opts)
241+
if opts == nil then
242+
opts = { }
243+
end
244+
self.client, self.opts = client, opts
245+
self.response_history = { }
246+
self.current_response_id = self.opts.previous_response_id
247+
end,
248+
__base = _base_0,
249+
__name = "ResponseSession"
250+
}, {
251+
__index = _base_0,
252+
__call = function(cls, ...)
253+
local _self_0 = setmetatable({}, _base_0)
254+
cls.__init(_self_0, ...)
255+
return _self_0
256+
end
257+
})
258+
_base_0.__class = _class_0
259+
ResponseSession = _class_0
260+
end
261+
local responses_methods = {
262+
new_response_session = function(self, ...)
263+
return ResponseSession(self, ...)
264+
end,
265+
create_response = function(self, input, opts, stream_callback)
266+
if opts == nil then
267+
opts = { }
268+
end
269+
if stream_callback == nil then
270+
stream_callback = nil
271+
end
272+
assert(input, "input must be provided")
273+
assert(input_format(input))
274+
local payload = {
275+
model = "gpt-4.1-mini",
276+
input = input
277+
}
278+
if opts then
279+
for k, v in pairs(opts) do
280+
payload[k] = v
281+
end
282+
end
283+
local accumulated_text = { }
284+
local final_response = nil
285+
local stream_filter
286+
if payload.stream then
287+
stream_filter = create_response_stream_filter(function(chunk)
288+
if chunk.text_delta then
289+
table.insert(accumulated_text, chunk.text_delta)
290+
end
291+
if chunk.response then
292+
final_response = add_response_helpers(chunk.response)
293+
chunk.response = final_response
294+
end
295+
if stream_callback then
296+
return stream_callback(chunk)
297+
end
298+
end)
299+
end
300+
local status, response = self:_request("POST", "/responses", payload, nil, stream_filter)
301+
if status ~= 200 then
302+
return nil, "Request failed with status: " .. tostring(status), response
303+
end
304+
if payload.stream then
305+
if final_response then
306+
add_response_helpers(final_response)
307+
end
308+
return table.concat(accumulated_text)
309+
end
310+
local parsed_response, err = parse_responses_response(response)
311+
if not (parsed_response) then
312+
return nil, "Failed to parse response: " .. tostring(err), response
313+
end
314+
add_response_helpers(parsed_response)
315+
return parsed_response
316+
end
317+
}
318+
return {
319+
ResponseSession = ResponseSession,
320+
responses_methods = responses_methods,
321+
parse_responses_response = parse_responses_response,
322+
parse_response_stream_chunk = parse_response_stream_chunk,
323+
add_response_helpers = add_response_helpers,
324+
extract_output_text = extract_output_text
325+
}

0 commit comments

Comments
 (0)