Skip to content

Commit 731e78c

Browse files
committed
fix: respect tool_call: false capability flag at runtime
Gate tool resolution, LiteLLM noop injection, activeTools, tools, and toolChoice behind input.model.capabilities.toolcall. Without this fix, tool_call: false in opencode.json is parsed and stored but never checked, causing Bedrock models that don't support streaming + tool use to fail. Adds regression test verifying body.tools is not sent when toolcall: false. Cherry-picked from lgarceau768#1 and rebased onto v1.4.0.
1 parent 23b54d3 commit 731e78c

2 files changed

Lines changed: 103 additions & 5 deletions

File tree

packages/opencode/src/session/llm.ts

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -196,7 +196,8 @@ export namespace LLM {
196196
},
197197
)
198198

199-
const tools = await resolveTools(input)
199+
const canTool = input.model.capabilities.toolcall
200+
const tools = canTool ? await resolveTools(input) : {}
200201

201202
// LiteLLM and some Anthropic proxies require the tools parameter to be present
202203
// when message history contains tool calls, even if no tools are being used.
@@ -213,7 +214,7 @@ export namespace LLM {
213214
// calls but no tools param is present. When there are no active tools (e.g.
214215
// during compaction), inject a stub tool to satisfy the validation requirement.
215216
// The stub description explicitly tells the model not to call it.
216-
if (isLiteLLMProxy && Object.keys(tools).length === 0 && hasToolCalls(input.messages)) {
217+
if (canTool && isLiteLLMProxy && Object.keys(tools).length === 0 && hasToolCalls(input.messages)) {
217218
tools["_noop"] = tool({
218219
description: "Do not call this tool. It exists only for API compatibility and must never be invoked.",
219220
inputSchema: jsonSchema({
@@ -286,9 +287,13 @@ export namespace LLM {
286287
topP: params.topP,
287288
topK: params.topK,
288289
providerOptions: ProviderTransform.providerOptions(input.model, params.options),
289-
activeTools: Object.keys(tools).filter((x) => x !== "invalid"),
290-
tools,
291-
toolChoice: input.toolChoice,
290+
...(canTool
291+
? {
292+
activeTools: Object.keys(tools).filter((x) => x !== "invalid"),
293+
tools,
294+
}
295+
: {}),
296+
...(canTool ? { toolChoice: input.toolChoice } : {}),
292297
maxOutputTokens: params.maxOutputTokens,
293298
abortSignal: input.abort,
294299
headers: {

packages/opencode/test/session/llm.test.ts

Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -628,6 +628,99 @@ describe("session.llm.stream", () => {
628628
})
629629
})
630630

631+
test("does not send tools when model toolcall is disabled", async () => {
632+
const server = state.server
633+
if (!server) {
634+
throw new Error("Server not initialized")
635+
}
636+
637+
const providerID = "alibaba"
638+
const modelID = "qwen-plus"
639+
const fixture = await loadFixture(providerID, modelID)
640+
const model = fixture.model
641+
642+
const request = waitRequest(
643+
"/chat/completions",
644+
new Response(createChatStream("Hello"), {
645+
status: 200,
646+
headers: { "Content-Type": "text/event-stream" },
647+
}),
648+
)
649+
650+
await using tmp = await tmpdir({
651+
init: async (dir) => {
652+
await Bun.write(
653+
path.join(dir, "opencode.json"),
654+
JSON.stringify({
655+
$schema: "https://opencode.ai/config.json",
656+
enabled_providers: [providerID],
657+
provider: {
658+
[providerID]: {
659+
options: {
660+
apiKey: "test-key",
661+
baseURL: `${server.url.origin}/v1`,
662+
},
663+
},
664+
},
665+
}),
666+
)
667+
},
668+
})
669+
670+
await Instance.provide({
671+
directory: tmp.path,
672+
fn: async () => {
673+
const resolved = await Provider.getModel(ProviderID.make(providerID), ModelID.make(model.id))
674+
const sessionID = SessionID.make("session-test-no-tools")
675+
const agent = {
676+
name: "test",
677+
mode: "primary",
678+
options: {},
679+
permission: [{ permission: "*", pattern: "*", action: "allow" }],
680+
} satisfies Agent.Info
681+
682+
const user = {
683+
id: MessageID.make("user-no-tools"),
684+
sessionID,
685+
role: "user",
686+
time: { created: Date.now() },
687+
agent: agent.name,
688+
model: { providerID: ProviderID.make(providerID), modelID: resolved.id },
689+
} satisfies MessageV2.User
690+
691+
const stream = await LLM.stream({
692+
user,
693+
sessionID,
694+
model: {
695+
...resolved,
696+
capabilities: {
697+
...resolved.capabilities,
698+
toolcall: false,
699+
},
700+
},
701+
agent,
702+
system: ["You are a helpful assistant."],
703+
abort: new AbortController().signal,
704+
messages: [{ role: "user", content: "Hello" }],
705+
tools: {
706+
question: tool({
707+
description: "Ask a question",
708+
inputSchema: z.object({}),
709+
execute: async () => ({ output: "" }),
710+
}),
711+
},
712+
})
713+
714+
for await (const _ of stream.fullStream) {
715+
}
716+
717+
const capture = await request
718+
expect(capture.body.tools).toBeUndefined()
719+
expect(capture.body.tool_choice).toBeUndefined()
720+
},
721+
})
722+
})
723+
631724
test("sends responses API payload for OpenAI models", async () => {
632725
const server = state.server
633726
if (!server) {

0 commit comments

Comments
 (0)