Skip to content

Commit 165cb5f

Browse files
committed
fix up the responses implementation
1 parent 2d80e98 commit 165cb5f

File tree

5 files changed

+217
-185
lines changed

5 files changed

+217
-185
lines changed

openai/init.lua

Lines changed: 40 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,11 @@ do
5252
new_chat_session = function(self, ...)
5353
return ChatSession(self, ...)
5454
end,
55+
new_response_chat_session = function(self, ...)
56+
local ResponsesChatSession
57+
ResponsesChatSession = require("openai.responses").ResponsesChatSession
58+
return ResponsesChatSession(self, ...)
59+
end,
5560
create_stream_filter = function(self, chunk_callback)
5661
assert(types["function"](chunk_callback), "Must provide chunk_callback function when streaming response")
5762
local accumulation_buffer = ""
@@ -171,6 +176,41 @@ do
171176
image_generation = function(self, params)
172177
return self:_request("POST", "/images/generations", params)
173178
end,
179+
response = function(self, response_id)
180+
assert(response_id, "response_id is required")
181+
return self:_request("GET", "/responses/" .. tostring(response_id))
182+
end,
183+
delete_response = function(self, response_id)
184+
assert(response_id, "response_id is required")
185+
return self:_request("DELETE", "/responses/" .. tostring(response_id))
186+
end,
187+
cancel_response = function(self, response_id)
188+
assert(response_id, "response_id is required")
189+
return self:_request("POST", "/responses/" .. tostring(response_id) .. "/cancel")
190+
end,
191+
create_response = function(self, input, opts, stream_callback)
192+
if opts == nil then
193+
opts = { }
194+
end
195+
if stream_callback == nil then
196+
stream_callback = nil
197+
end
198+
local create_response_stream_filter
199+
create_response_stream_filter = require("openai.responses").create_response_stream_filter
200+
local payload = {
201+
input = input
202+
}
203+
if opts then
204+
for k, v in pairs(opts) do
205+
payload[k] = v
206+
end
207+
end
208+
local stream_filter
209+
if payload.stream and stream_callback then
210+
stream_filter = create_response_stream_filter(stream_callback)
211+
end
212+
return self:_request("POST", "/responses", payload, nil, stream_filter)
213+
end,
174214
_request = function(self, method, path, payload, more_headers, stream_fn)
175215
assert(path, "missing path")
176216
assert(method, "missing method")
@@ -250,18 +290,9 @@ do
250290
_base_0.__class = _class_0
251291
OpenAI = _class_0
252292
end
253-
local responses_methods, ResponseSession
254-
do
255-
local _obj_0 = require("openai.responses")
256-
responses_methods, ResponseSession = _obj_0.responses_methods, _obj_0.ResponseSession
257-
end
258-
for k, v in pairs(responses_methods) do
259-
OpenAI.__base[k] = v
260-
end
261293
return {
262294
OpenAI = OpenAI,
263295
ChatSession = ChatSession,
264-
ResponseSession = ResponseSession,
265296
VERSION = VERSION,
266297
new = OpenAI
267298
}

openai/init.moon

Lines changed: 40 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,10 @@ class OpenAI
7979
new_chat_session: (...) =>
8080
ChatSession @, ...
8181

82+
new_response_chat_session: (...) =>
83+
import ResponsesChatSession from require "openai.responses"
84+
ResponsesChatSession @, ...
85+
8286
-- creates a ltn12 compatible filter function that will call chunk_callback
8387
-- for each parsed json chunk from the server-sent events api response
8488
create_stream_filter: (chunk_callback) =>
@@ -209,6 +213,41 @@ class OpenAI
209213
image_generation: (params) =>
210214
@_request "POST", "/images/generations", params
211215

216+
-- Get a stored response by ID
217+
-- Returns: status, response, headers (raw result from _request)
218+
response: (response_id) =>
219+
assert response_id, "response_id is required"
220+
@_request "GET", "/responses/#{response_id}"
221+
222+
-- Delete a stored response
223+
delete_response: (response_id) =>
224+
assert response_id, "response_id is required"
225+
@_request "DELETE", "/responses/#{response_id}"
226+
227+
-- Cancel an in-progress streaming response
228+
cancel_response: (response_id) =>
229+
assert response_id, "response_id is required"
230+
@_request "POST", "/responses/#{response_id}/cancel"
231+
232+
-- Create a single response (stateless)
233+
-- input: string or array of message objects
234+
-- opts: options like model, temperature, instructions, tools, etc.
235+
-- stream_callback: optional function for streaming responses
236+
-- Returns: status, response, headers (raw result from _request)
237+
create_response: (input, opts={}, stream_callback=nil) =>
238+
import create_response_stream_filter from require "openai.responses"
239+
240+
payload = { :input }
241+
242+
if opts
243+
for k, v in pairs opts
244+
payload[k] = v
245+
246+
stream_filter = if payload.stream and stream_callback
247+
create_response_stream_filter stream_callback
248+
249+
@_request "POST", "/responses", payload, nil, stream_filter
250+
212251
-- Responses API methods
213252
_request: (method, path, payload, more_headers, stream_fn) =>
214253
assert path, "missing path"
@@ -265,9 +304,5 @@ class OpenAI
265304

266305
require @config.http_provider
267306

268-
import responses_methods, ResponseSession from require "openai.responses"
269-
270-
for k, v in pairs responses_methods
271-
OpenAI.__base[k] = v
272307

273-
{:OpenAI, :ChatSession, :ResponseSession, :VERSION, new: OpenAI}
308+
{:OpenAI, :ChatSession, :VERSION, new: OpenAI}

openai/responses.lua

Lines changed: 22 additions & 79 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
local cjson = require("cjson")
22
local types
33
types = require("tableshape").types
4+
local DEFAULT_RESPONSES_MODEL = "gpt-4.1-mini"
45
local empty = (types["nil"] + types.literal(cjson.null)):describe("nullable")
56
local input_format = types.string + types.array_of(types.partial({
67
role = types.one_of({
@@ -43,8 +44,7 @@ local parse_responses_response = types.partial({
4344
output = types.array_of(response_message):tag("output"),
4445
model = empty + types.string:tag("model"),
4546
usage = empty + types.table:tag("usage"),
46-
stop_reason = empty + types.string:tag("stop_reason"),
47-
stop_sequence = empty + (types.string + empty):tag("stop_sequence")
47+
status = empty + types.string:tag("status")
4848
})
4949
local parse_response_stream_chunk
5050
parse_response_stream_chunk = function(chunk)
@@ -160,7 +160,7 @@ create_response_stream_filter = function(chunk_callback)
160160
return ...
161161
end
162162
end
163-
local ResponseSession
163+
local ResponsesChatSession
164164
do
165165
local _class_0
166166
local _base_0 = {
@@ -182,23 +182,26 @@ do
182182
end
183183
assert(input, "input must be provided")
184184
assert(input_format(input))
185-
local payload = {
186-
model = self.opts.model or "gpt-4.1-mini",
187-
input = input
185+
local merged_opts = {
186+
model = self.opts.model,
187+
previous_response_id = self.current_response_id
188188
}
189189
if self.opts.instructions then
190-
payload.instructions = self.opts.instructions
190+
merged_opts.instructions = self.opts.instructions
191191
end
192192
if opts then
193193
for k, v in pairs(opts) do
194-
payload[k] = v
194+
merged_opts[k] = v
195195
end
196196
end
197+
if stream_callback then
198+
merged_opts.stream = merged_opts.stream or true
199+
end
197200
local accumulated_text = { }
198201
local final_response = nil
199-
local stream_filter
200-
if payload.stream then
201-
stream_filter = create_response_stream_filter(function(chunk)
202+
local wrapped_callback
203+
if merged_opts.stream then
204+
wrapped_callback = function(chunk)
202205
if chunk.text_delta then
203206
table.insert(accumulated_text, chunk.text_delta)
204207
end
@@ -209,21 +212,18 @@ do
209212
if stream_callback then
210213
return stream_callback(chunk)
211214
end
212-
end)
215+
end
213216
end
214-
local status, response = self.client:_request("POST", "/responses", payload, nil, stream_filter)
217+
local status, response = self.client:create_response(input, merged_opts, wrapped_callback)
215218
if status ~= 200 then
216219
return nil, "Request failed with status: " .. tostring(status), response
217220
end
218-
if payload.stream then
219-
local text_out = table.concat(accumulated_text)
221+
if merged_opts.stream then
220222
if final_response then
221223
self.current_response_id = final_response.id
222224
table.insert(self.response_history, final_response)
223-
elseif text_out ~= "" then
224-
self.current_response_id = "stream_" .. tostring(os.time())
225225
end
226-
return text_out
226+
return table.concat(accumulated_text)
227227
end
228228
local parsed_response, err = parse_responses_response(response)
229229
if not (parsed_response) then
@@ -246,7 +246,7 @@ do
246246
self.current_response_id = self.opts.previous_response_id
247247
end,
248248
__base = _base_0,
249-
__name = "ResponseSession"
249+
__name = "ResponsesChatSession"
250250
}, {
251251
__index = _base_0,
252252
__call = function(cls, ...)
@@ -256,70 +256,13 @@ do
256256
end
257257
})
258258
_base_0.__class = _class_0
259-
ResponseSession = _class_0
259+
ResponsesChatSession = _class_0
260260
end
261-
local responses_methods = {
262-
new_response_session = function(self, ...)
263-
return ResponseSession(self, ...)
264-
end,
265-
create_response = function(self, input, opts, stream_callback)
266-
if opts == nil then
267-
opts = { }
268-
end
269-
if stream_callback == nil then
270-
stream_callback = nil
271-
end
272-
assert(input, "input must be provided")
273-
assert(input_format(input))
274-
local payload = {
275-
model = "gpt-4.1-mini",
276-
input = input
277-
}
278-
if opts then
279-
for k, v in pairs(opts) do
280-
payload[k] = v
281-
end
282-
end
283-
local accumulated_text = { }
284-
local final_response = nil
285-
local stream_filter
286-
if payload.stream then
287-
stream_filter = create_response_stream_filter(function(chunk)
288-
if chunk.text_delta then
289-
table.insert(accumulated_text, chunk.text_delta)
290-
end
291-
if chunk.response then
292-
final_response = add_response_helpers(chunk.response)
293-
chunk.response = final_response
294-
end
295-
if stream_callback then
296-
return stream_callback(chunk)
297-
end
298-
end)
299-
end
300-
local status, response = self:_request("POST", "/responses", payload, nil, stream_filter)
301-
if status ~= 200 then
302-
return nil, "Request failed with status: " .. tostring(status), response
303-
end
304-
if payload.stream then
305-
if final_response then
306-
add_response_helpers(final_response)
307-
end
308-
return table.concat(accumulated_text)
309-
end
310-
local parsed_response, err = parse_responses_response(response)
311-
if not (parsed_response) then
312-
return nil, "Failed to parse response: " .. tostring(err), response
313-
end
314-
add_response_helpers(parsed_response)
315-
return parsed_response
316-
end
317-
}
318261
return {
319-
ResponseSession = ResponseSession,
320-
responses_methods = responses_methods,
262+
ResponsesChatSession = ResponsesChatSession,
321263
parse_responses_response = parse_responses_response,
322264
parse_response_stream_chunk = parse_response_stream_chunk,
265+
create_response_stream_filter = create_response_stream_filter,
323266
add_response_helpers = add_response_helpers,
324267
extract_output_text = extract_output_text
325268
}

0 commit comments

Comments
 (0)