Skip to content

Commit afe9082

Browse files
committed
always return full message object if tool calls are in response
1 parent 7e492d7 commit afe9082

4 files changed

Lines changed: 61 additions & 7 deletions

File tree

README.md

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -349,16 +349,21 @@ Constructor for the ResponsesChatSession.
349349
- `tools`: Array of tool definitions
350350
- `previous_response_id`: Resume from a previous response
351351

352-
##### `session:send(input, stream_callback)`
352+
##### `session:send(input, opts={})`
353353

354354
Sends input and returns the response, maintaining conversation state
355355
automatically.
356356

357357
- `input`: A string or array of message objects.
358-
- `stream_callback`: Optional function for streaming responses.
358+
- `opts`: (optional) A table of per-request overrides. For backward
359+
compatibility, a function can be passed instead and will be treated as
360+
`{stream_callback = fn}`.
361+
- `stream_callback`: Optional function for streaming responses.
362+
- Any other Responses API parameter (e.g. `tool_choice`, `model`) to
363+
override the session default for this request.
359364

360-
Returns a response object on success (or accumulated text string when
361-
streaming). On failure, returns `nil`, an error message, and the raw response.
365+
Returns a response object on success, or accumulated text when streaming. On
366+
failure, returns `nil`, an error message, and the raw response.
362367

363368
Response objects have helper methods:
364369
- `response:get_output_text()`: Extract all text content as a string
@@ -385,8 +390,8 @@ Lower-level method to create a response with additional options.
385390
- `opts`: Additional options (model, temperature, tools, previous_response_id, etc.)
386391
- `stream_callback`: Optional function for streaming responses.
387392

388-
Returns a response object on success. On failure, returns `nil`, an error
389-
message, and the raw response.
393+
Returns a response object on success, or accumulated text when streaming. On
394+
failure, returns `nil`, an error message, and the raw response.
390395

391396
#### ChatSession
392397

@@ -724,4 +729,3 @@ else
724729
error("Unknown function: " .. name)
725730
end
726731
```
727-

openai/chat_completions.lua

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -279,6 +279,9 @@ do
279279
if append_response then
280280
self:append_message(message)
281281
end
282+
if message.tool_calls then
283+
return message
284+
end
282285
return message.content or message
283286
end
284287
local out, err = parse_chat_response(response)

openai/chat_completions.moon

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -294,6 +294,9 @@ class ChatSession
294294
if append_response
295295
@append_message message
296296

297+
if message.tool_calls
298+
return message
299+
297300
return message.content or message
298301

299302
out, err = parse_chat_response response

spec/openai_spec.moon

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -688,6 +688,50 @@ describe "OpenAI API Client", ->
688688
-- Verify response is appended to chat history
689689
assert.same response, chat.messages[#chat.messages]
690690

691+
it "returns the full assistant message when streaming includes text and tool calls", ->
692+
package.loaded["socket.http"] = {
693+
request: (opts={}) ->
694+
chunks = {
695+
"data: {\"object\": \"chat.completion.chunk\", \"choices\": [{\"delta\": {\"content\": \"Let me check.\"}, \"index\": 0}]}\n"
696+
"data: {\"object\": \"chat.completion.chunk\", \"choices\": [{\"delta\": {\"tool_calls\": [{\"index\": 0, \"id\": \"call_tool\", \"type\": \"function\", \"function\": {\"name\": \"lookup_weather\"}}]}, \"index\": 0}]}\n"
697+
"data: {\"object\": \"chat.completion.chunk\", \"choices\": [{\"delta\": {\"tool_calls\": [{\"index\": 0, \"function\": {\"arguments\": \"{\\\"location\\\":\\\"Paris\\\"}\"}}]}, \"index\": 0}]}\n"
698+
"data: [DONE]"
699+
}
700+
for chunk in *chunks
701+
opts.sink chunk
702+
true, 200, {}
703+
}
704+
705+
client = OpenAI "test-api-key"
706+
chat = client\new_chat_session {
707+
tools: {
708+
{
709+
type: "function"
710+
["function"]: {
711+
name: "lookup_weather"
712+
parameters: {}
713+
}
714+
}
715+
}
716+
}
717+
718+
response = assert chat\send "What's the weather in Paris?", (chunk) -> nil
719+
720+
assert.same {
721+
role: "assistant"
722+
content: "Let me check."
723+
tool_calls: {
724+
{
725+
id: "call_tool"
726+
type: "function"
727+
["function"]: {
728+
name: "lookup_weather"
729+
arguments: "{\"location\":\"Paris\"}"
730+
}
731+
}
732+
}
733+
}, response
734+
691735
describe "responses", ->
692736
it "creates a response (raw API)", ->
693737
client = OpenAI "test-api-key"

0 commit comments

Comments
 (0)