Skip to content

Commit 3ebca20

Browse files
committed
reorganize examples
1 parent 3e9454e commit 3ebca20

File tree

15 files changed

+507
-0
lines changed

15 files changed

+507
-0
lines changed

examples/example5.lua renamed to examples/chat_completions/function_calling.lua

File renamed without changes.

examples/gemini/chat_session.lua

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
-- Example: Using Gemini with a chat session
2+
-- Requires GEMINI_API_KEY environment variable
3+
4+
local Gemini = require("openai.compat.gemini")
5+
local client = Gemini.new(os.getenv("GEMINI_API_KEY"))
6+
7+
-- Create a chat session with an initial system message
8+
local chat = client:new_chat_session({
9+
messages = {
10+
{role = "system", content = "You are a helpful assistant who gives concise answers."}
11+
}
12+
})
13+
14+
-- Send a message and print the response
15+
print("User: What is the capital of France?")
16+
local response = chat:send("What is the capital of France?")
17+
print("Assistant:", response)
18+
print()
19+
20+
-- The chat history is maintained, so follow-up questions have context
21+
print("User: What is its population?")
22+
response = chat:send("What is its population?")
23+
print("Assistant:", response)
24+
print()
25+
26+
-- You can stream responses with a callback
27+
print("User: Tell me a fun fact about that city.")
28+
print("Assistant: ", "")
29+
chat:send("Tell me a fun fact about that city.", function(chunk)
30+
io.stdout:write(chunk.content or "")
31+
io.stdout:flush()
32+
end)
33+
print()
34+
print()
35+
36+
-- View the full conversation history
37+
print("--- Full conversation history ---")
38+
for idx, message in ipairs(chat.messages) do
39+
print(string.format("[%s]: %s", message.role, message.content:sub(1, 80) .. (message.content:len() > 80 and "..." or "")))
40+
end

examples/gemini/embedding.lua

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
-- Example: Using Gemini embeddings
2+
-- Requires GEMINI_API_KEY environment variable
3+
4+
local Gemini = require("openai.compat.gemini")
5+
local client = Gemini.new(os.getenv("GEMINI_API_KEY"))
6+
7+
-- Generate an embedding for a single text
8+
local status, response = client:embedding("Lua is the best programming language ever", {
9+
model = "text-embedding-004"
10+
})
11+
12+
if status == 200 then
13+
local embedding = response.data[1].embedding
14+
print("Embedding dimensions:", #embedding)
15+
print("First 5 values:", table.concat({embedding[1], embedding[2], embedding[3], embedding[4], embedding[5]}, ", "))
16+
else
17+
print("Error:", status)
18+
if response and response.error then
19+
print(response.error.message)
20+
end
21+
end
22+
23+
-- Generate embeddings for multiple texts at once
24+
print()
25+
print("--- Batch embeddings ---")
26+
local status2, response2 = client:embedding({
27+
"The quick brown fox jumps over the lazy dog",
28+
"A fast auburn canine leaps above a sleepy hound",
29+
"Hello world in Lua"
30+
}, {
31+
model = "text-embedding-004"
32+
})
33+
34+
if status2 == 200 then
35+
for i, item in ipairs(response2.data) do
36+
print(string.format("Text %d: %d dimensions", i, #item.embedding))
37+
end
38+
else
39+
print("Error:", status2)
40+
end

examples/gemini/list_models.lua

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
-- Example: List available Gemini models
2+
-- Requires GEMINI_API_KEY environment variable
3+
4+
local Gemini = require("openai.compat.gemini")
5+
local client = Gemini.new(os.getenv("GEMINI_API_KEY"))
6+
7+
local status, response = client:models()
8+
9+
if status == 200 then
10+
print("Available Gemini models:")
11+
print()
12+
for _, model in ipairs(response.data) do
13+
print(string.format(" %s", model.id))
14+
end
15+
else
16+
print("Error:", status)
17+
if response and response.error then
18+
print(response.error.message)
19+
end
20+
end

examples/responses/basic.lua

Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
local openai = require("openai")
2+
local cjson = require("cjson")
3+
local api_key = os.getenv("OPENAI_API_KEY")
4+
assert(api_key, "Set OPENAI_API_KEY in your environment before running.")
5+
local client = openai.new(api_key)
6+
print("== One-off response ==")
7+
local response, err, raw = client:create_response("Write one short sentence about Lua.")
8+
if not (response) then
9+
io.stderr:write("Request failed: " .. tostring(err) .. "\n")
10+
if raw then
11+
io.stderr:write(cjson.encode(raw), "\n")
12+
end
13+
os.exit(1)
14+
end
15+
print("Response ID: " .. tostring(response.id))
16+
print("Output text: " .. tostring(response.output_text))
17+
print("Stop reason: " .. tostring(response.stop_reason or 'unknown'))
18+
print("\n== Streaming example ==")
19+
local streamed_text = nil
20+
response, err, raw = client:create_response("Stream a brief greeting.", {
21+
stream = true
22+
}, function(chunk)
23+
if chunk.text_delta then
24+
io.write(chunk.text_delta)
25+
io.flush()
26+
end
27+
if chunk.response then
28+
return print("\n\nCompleted response ID: " .. tostring(chunk.response.id))
29+
end
30+
end)
31+
streamed_text = response
32+
if not (streamed_text) then
33+
io.stderr:write("Streaming request failed: " .. tostring(err) .. "\n")
34+
if raw then
35+
io.stderr:write(cjson.encode(raw), "\n")
36+
end
37+
os.exit(1)
38+
end
39+
print("Full streamed text: " .. tostring(streamed_text))
40+
print("\n== Stateful session ==")
41+
local session = client:new_response_session({
42+
model = "gpt-4.1-mini",
43+
instructions = "Keep answers concise."
44+
})
45+
local first
46+
first, err, raw = session:send("Introduce yourself in 5 words.")
47+
if not (first) then
48+
io.stderr:write("Session request failed: " .. tostring(err) .. "\n")
49+
if raw then
50+
io.stderr:write(cjson.encode(raw), "\n")
51+
end
52+
os.exit(1)
53+
end
54+
print("First reply (" .. tostring(first.id) .. "): " .. tostring(first.output_text))
55+
local second
56+
second, err, raw = session:send("Now say goodbye in the same style.")
57+
if not (second) then
58+
io.stderr:write("Second session request failed: " .. tostring(err) .. "\n")
59+
if raw then
60+
io.stderr:write(cjson.encode(raw), "\n")
61+
end
62+
os.exit(1)
63+
end
64+
return print("Second reply (" .. tostring(second.id) .. "): " .. tostring(second.output_text))

0 commit comments

Comments
 (0)