Skip to content

Commit cec62a5

Browse files
committed
rename responses chat session function
1 parent 7f16ec2 commit cec62a5

File tree

10 files changed

+14
-16
lines changed

10 files changed

+14
-16
lines changed

examples/responses/basic.lua

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ if not (streamed_text) then
3838
end
3939
print("Full streamed text: " .. tostring(streamed_text))
4040
print("\n== Stateful session ==")
41-
local session = client:new_response_session({
41+
local session = client:new_responses_chat_session({
4242
model = "gpt-4.1-mini",
4343
instructions = "Keep answers concise."
4444
})

examples/responses/basic.moon

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ print "Response ID: #{response.id}"
2222
print "Status: #{response.status or 'unknown'}"
2323

2424
print "\n== Streaming example (via session) =="
25-
stream_session = client\new_response_chat_session!
25+
stream_session = client\new_responses_chat_session!
2626

2727
streamed_text, err, raw = stream_session\send "Stream a brief greeting.", (chunk) ->
2828
if chunk.text_delta
@@ -41,7 +41,7 @@ unless streamed_text
4141
print "Full streamed text: #{streamed_text}"
4242

4343
print "\n== Stateful session =="
44-
session = client\new_response_chat_session {
44+
session = client\new_responses_chat_session {
4545
instructions: "Keep answers concise."
4646
}
4747

examples/responses/image.lua

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ local cjson = require("cjson")
55

66
local client = openai.new(os.getenv("OPENAI_API_KEY"))
77

8-
local session = client:new_response_chat_session()
8+
local session = client:new_responses_chat_session()
99

1010
local response, err = session:send({
1111
{

examples/responses/structured_output.lua

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ end
7373
io.stderr:write("\n--- Using ResponsesChatSession with structured output ---\n\n")
7474

7575
-- You can also use structured outputs with the chat session
76-
local session = client:new_response_chat_session()
76+
local session = client:new_responses_chat_session()
7777

7878
local recipe_schema = {
7979
type = "json_schema",

examples/responses/tools.lua

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ local function get_weather(location, unit)
4141
}
4242
end
4343

44-
local session = client:new_response_chat_session({
44+
local session = client:new_responses_chat_session({
4545
tools = tools
4646
})
4747

openai/init.lua

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
local VERSION = "1.4.3"
1+
local VERSION = "1.5.0"
22
local ltn12 = require("ltn12")
33
local cjson = require("cjson")
44
local unpack = table.unpack or unpack
@@ -19,7 +19,7 @@ do
1919
ChatSession = require("openai.chat_completions").ChatSession
2020
return ChatSession(self, ...)
2121
end,
22-
new_response_chat_session = function(self, ...)
22+
new_responses_chat_session = function(self, ...)
2323
local ResponsesChatSession
2424
ResponsesChatSession = require("openai.responses").ResponsesChatSession
2525
return ResponsesChatSession(self, ...)

openai/init.moon

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
VERSION = "1.4.3"
1+
VERSION = "1.5.0"
22

33
ltn12 = require "ltn12"
44
cjson = require "cjson"
@@ -30,7 +30,7 @@ class OpenAI
3030
import ChatSession from require "openai.chat_completions"
3131
ChatSession @, ...
3232

33-
new_response_chat_session: (...) =>
33+
new_responses_chat_session: (...) =>
3434
import ResponsesChatSession from require "openai.responses"
3535
ResponsesChatSession @, ...
3636

openai/responses.lua

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -285,6 +285,5 @@ return {
285285
parse_responses_response = parse_responses_response,
286286
parse_response_stream_chunk = parse_response_stream_chunk,
287287
create_response_stream_filter = create_response_stream_filter,
288-
add_response_helpers = add_response_helpers,
289288
extract_output_text = extract_output_text
290289
}

openai/responses.moon

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ extract_output_text = (response) ->
111111

112112
table.concat parts
113113

114-
-- Add helper method to response objects
114+
-- add helper fields to the response output to make it easier to access text output
115115
add_response_helpers = (response) ->
116116
if response
117117
response.output_text = extract_output_text response
@@ -231,6 +231,5 @@ class ResponsesChatSession
231231
:parse_responses_response
232232
:parse_response_stream_chunk
233233
:create_response_stream_filter
234-
:add_response_helpers
235234
:extract_output_text
236235
}

spec/openai_spec.moon

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -447,7 +447,7 @@ describe "OpenAI API Client", ->
447447
assert.same "resp_first", payload.previous_response_id
448448
return 200, build_response "resp_second", "Second reply"
449449

450-
session = client\new_response_chat_session { model: "gpt-4.1-mini" }
450+
session = client\new_responses_chat_session { model: "gpt-4.1-mini" }
451451

452452
first = assert session\send "Hello"
453453
assert.same "resp_first", first.id
@@ -487,7 +487,7 @@ describe "OpenAI API Client", ->
487487
status: "completed"
488488
}
489489

490-
session = client\new_response_chat_session { model: "my-custom-model" }
490+
session = client\new_responses_chat_session { model: "my-custom-model" }
491491
response = assert session\send "Hello"
492492

493493
assert.same "resp_custom", response.id
@@ -600,7 +600,7 @@ describe "OpenAI API Client", ->
600600
assert.same "resp_stream", chunk.response.id
601601
assert.same "Hello world", chunk.response.output_text
602602

603-
session = client\new_response_chat_session { model: "gpt-4.1-mini" }
603+
session = client\new_responses_chat_session { model: "gpt-4.1-mini" }
604604
out = assert session\send "Say hello back", stream_callback
605605

606606
-- Session returns accumulated text

0 commit comments

Comments
 (0)