Skip to content

Commit 99d9281

Browse files
committed
rename the chat method to create_chat_completion (keep alias method)
1 parent 62a7cf1 commit 99d9281

File tree

6 files changed

+24
-18
lines changed

6 files changed

+24
-18
lines changed

README.md

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -52,11 +52,11 @@ Using the Chat Completions API:
5252
local openai = require("openai")
5353
local client = openai.new(os.getenv("OPENAI_API_KEY"))
5454

55-
local status, response = client:chat({
55+
local status, response = client:create_chat_completion({
5656
{role = "system", content = "You are a Lua programmer"},
5757
{role = "user", content = "Write a 'Hello world' program in Lua"}
5858
}, {
59-
model = "gpt-3.5-turbo", -- this is the default model
59+
model = "gpt-3.5-turbo",
6060
temperature = 0.5
6161
})
6262

@@ -140,7 +140,7 @@ Using the Chat Completions API:
140140
local openai = require("openai")
141141
local client = openai.new(os.getenv("OPENAI_API_KEY"))
142142

143-
client:chat({
143+
client:create_chat_completion({
144144
{role = "system", content = "You work for Streak.Club, a website to track daily creative habits"},
145145
{role = "user", content = "Who do you work for?"}
146146
}, {
@@ -201,9 +201,9 @@ server-side via `previous_response_id`.
201201
- `tools`: Array of tool definitions
202202
- `previous_response_id`: Resume from a previous response
203203

204-
##### `client:chat(messages, opts, chunk_callback)`
204+
##### `client:create_chat_completion(messages, opts, chunk_callback)`
205205

206-
Sends a request to the `/chat/completions` endpoint.
206+
Sends a request to the `/chat/completions` endpoint. Also available as `client:chat(...)` for backward compatibility.
207207

208208
- `messages`: An array of message objects.
209209
- `opts`: Additional options for the chat, passed directly to the API (eg. model, temperature, etc.) https://platform.openai.com/docs/api-reference/chat

openai/chat_completions.lua

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -159,7 +159,7 @@ do
159159
if stream_callback == nil then
160160
stream_callback = nil
161161
end
162-
local status, response = self.client:chat(self.messages, {
162+
local status, response = self.client:create_chat_completion(self.messages, {
163163
function_call = self.opts.function_call,
164164
functions = self.functions,
165165
model = self.opts.model,

openai/chat_completions.moon

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@ class ChatSession
186186
-- append_response: should the response be appended to the chat history
187187
-- stream_callback: provide a function to enable streaming output. function will receive each chunk as it's generated
188188
generate_response: (append_response=true, stream_callback=nil) =>
189-
status, response = @client\chat @messages, {
189+
status, response = @client\create_chat_completion @messages, {
190190
function_call: @opts.function_call -- override the default function call behavior
191191
functions: @functions
192192
model: @opts.model

openai/init.lua

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ do
2424
ResponsesChatSession = require("openai.responses").ResponsesChatSession
2525
return ResponsesChatSession(self, ...)
2626
end,
27-
chat = function(self, messages, opts, chunk_callback)
27+
create_chat_completion = function(self, messages, opts, chunk_callback)
2828
if chunk_callback == nil then
2929
chunk_callback = nil
3030
end
@@ -50,6 +50,9 @@ do
5050
end
5151
return self:_request("POST", "/chat/completions", payload, nil, stream_filter)
5252
end,
53+
chat = function(self, ...)
54+
return self:create_chat_completion(...)
55+
end,
5356
completion = function(self, prompt, opts)
5457
local payload = {
5558
model = "text-davinci-003",

openai/init.moon

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ class OpenAI
3737
-- call /chat/completions
3838
-- opts: additional parameters as described in https://platform.openai.com/docs/api-reference/chat, eg. model, temperature, etc.
3939
-- completion_callback: function to be called for parsed streaming output when stream = true is passed to opts
40-
chat: (messages, opts, chunk_callback=nil) =>
40+
create_chat_completion: (messages, opts, chunk_callback=nil) =>
4141
import test_message, create_chat_stream_filter from require "openai.chat_completions"
4242

4343
test_messages = types.array_of test_message
@@ -57,6 +57,9 @@ class OpenAI
5757

5858
@_request "POST", "/chat/completions", payload, nil, stream_filter
5959

60+
-- legacy alias for create_chat_completion (for backward compatibility)
61+
chat: (...) => @create_chat_completion ...
62+
6063
-- call /completions
6164
-- opts: additional parameters as described in https://platform.openai.com/docs/api-reference/completions
6265
completion: (prompt, opts) =>

spec/openai_spec.moon

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ describe "OpenAI API Client", ->
6969

7070
it "generates chat response", ->
7171
client = OpenAI "test-api-key"
72-
status, response = assert client\chat {
72+
status, response = assert client\create_chat_completion {
7373
{role: "system", content: "You are an assistant that speaks like Shakespeare."}
7474
{role: "user", content: "tell me a joke"}
7575
}
@@ -94,7 +94,7 @@ describe "OpenAI API Client", ->
9494
client = OpenAI "test-api-key"
9595
chat = client\new_chat_session { temperature: .75 }
9696

97-
stub(client, "chat").invokes (c, messages, params) ->
97+
stub(client, "create_chat_completion").invokes (c, messages, params) ->
9898
assert.same {
9999
{
100100
role: "user"
@@ -132,7 +132,7 @@ describe "OpenAI API Client", ->
132132
}
133133
}, chat.messages
134134

135-
stub(client, "chat").invokes (c, messages, params) ->
135+
stub(client, "create_chat_completion").invokes (c, messages, params) ->
136136
assert.same {
137137
{
138138
role: "user"
@@ -170,13 +170,13 @@ describe "OpenAI API Client", ->
170170
chat = client\new_chat_session { model: "gpt-4" }
171171

172172
-- bad status
173-
stub(client, "chat").invokes (c, messages, params) ->
173+
stub(client, "create_chat_completion").invokes (c, messages, params) ->
174174
400, {}
175175

176176
assert.same {nil, "Bad status: 400", {}}, {chat\send "Hello"}
177177

178178
-- bad status with error
179-
stub(client, "chat").invokes (c, messages, params) ->
179+
stub(client, "create_chat_completion").invokes (c, messages, params) ->
180180
400, {
181181
error: {
182182
message: "Not valid thing"
@@ -190,7 +190,7 @@ describe "OpenAI API Client", ->
190190
}}, {chat\send "Hello"}
191191

192192
-- bad status with error message and code
193-
stub(client, "chat").invokes (c, messages, params) ->
193+
stub(client, "create_chat_completion").invokes (c, messages, params) ->
194194
400, {
195195
error: {
196196
message: "Not valid thing"
@@ -206,7 +206,7 @@ describe "OpenAI API Client", ->
206206
}}, {chat\send "Hello"}
207207

208208
-- malformed output
209-
stub(client, "chat").invokes (c, messages, params) ->
209+
stub(client, "create_chat_completion").invokes (c, messages, params) ->
210210
200, { usage: {} }
211211

212212
assert.same {
@@ -218,7 +218,7 @@ describe "OpenAI API Client", ->
218218
it "with functions", ->
219219
client = OpenAI "test-api-key"
220220

221-
stub(client, "chat").invokes (c, args, params) ->
221+
stub(client, "create_chat_completion").invokes (c, args, params) ->
222222
assert.same {
223223
{
224224
role: "system"
@@ -298,7 +298,7 @@ describe "OpenAI API Client", ->
298298
}, res
299299

300300

301-
stub(client, "chat").invokes (c, args, params) ->
301+
stub(client, "create_chat_completion").invokes (c, args, params) ->
302302
assert.same {
303303
{
304304
role: "system"

0 commit comments

Comments
 (0)