Skip to content

Commit 8fc831e

Browse files
committed
fix: respect tool_call: false capability flag at runtime
Gate tool resolution, LiteLLM noop injection, activeTools, tools, and toolChoice behind input.model.capabilities.toolcall. Without this fix, tool_call: false in opencode.json is parsed and stored but never checked, causing Bedrock models that don't support streaming + tool use to fail. Adds regression test verifying body.tools is not sent when toolcall: false. Cherry-picked from lgarceau768#1 and rebased onto v1.4.0.
1 parent 9043d92 commit 8fc831e

2 files changed

Lines changed: 99 additions & 4 deletions

File tree

packages/opencode/src/session/llm.ts

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,8 @@ export namespace LLM {
193193
},
194194
)
195195

196-
const tools = resolveTools(input)
196+
const canTool = input.model.capabilities.toolcall
197+
const tools = canTool ? resolveTools(input) : {}
197198

198199
// LiteLLM and some Anthropic proxies require the tools parameter to be present
199200
// when message history contains tool calls, even if no tools are being used.
@@ -211,6 +212,7 @@ export namespace LLM {
211212
// during compaction), inject a stub tool to satisfy the validation requirement.
212213
// The stub description explicitly tells the model not to call it.
213214
if (
215+
canTool &&
214216
(isLiteLLMProxy || input.model.providerID.includes("github-copilot")) &&
215217
Object.keys(tools).length === 0 &&
216218
hasToolCalls(input.messages)
@@ -350,9 +352,13 @@ export namespace LLM {
350352
topP: params.topP,
351353
topK: params.topK,
352354
providerOptions: ProviderTransform.providerOptions(input.model, params.options),
353-
activeTools: Object.keys(tools).filter((x) => x !== "invalid"),
354-
tools,
355-
toolChoice: input.toolChoice,
355+
...(canTool
356+
? {
357+
activeTools: Object.keys(tools).filter((x) => x !== "invalid"),
358+
tools,
359+
}
360+
: {}),
361+
...(canTool ? { toolChoice: input.toolChoice } : {}),
356362
maxOutputTokens: params.maxOutputTokens,
357363
abortSignal: input.abort,
358364
headers: {

packages/opencode/test/session/llm.test.ts

Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -561,6 +561,95 @@ describe("session.llm.stream", () => {
561561
})
562562
})
563563

564+
test("does not send tools when model toolcall is disabled", async () => {
565+
const server = state.server
566+
if (!server) {
567+
throw new Error("Server not initialized")
568+
}
569+
570+
const providerID = "alibaba"
571+
const modelID = "qwen-plus"
572+
const fixture = await loadFixture(providerID, modelID)
573+
const model = fixture.model
574+
575+
const request = waitRequest(
576+
"/chat/completions",
577+
new Response(createChatStream("Hello"), {
578+
status: 200,
579+
headers: { "Content-Type": "text/event-stream" },
580+
}),
581+
)
582+
583+
await using tmp = await tmpdir({
584+
init: async (dir) => {
585+
await Bun.write(
586+
path.join(dir, "opencode.json"),
587+
JSON.stringify({
588+
$schema: "https://opencode.ai/config.json",
589+
enabled_providers: [providerID],
590+
provider: {
591+
[providerID]: {
592+
options: {
593+
apiKey: "test-key",
594+
baseURL: `${server.url.origin}/v1`,
595+
},
596+
},
597+
},
598+
}),
599+
)
600+
},
601+
})
602+
603+
await Instance.provide({
604+
directory: tmp.path,
605+
fn: async () => {
606+
const resolved = await getModel(ProviderID.make(providerID), ModelID.make(model.id))
607+
const sessionID = SessionID.make("session-test-no-tools")
608+
const agent = {
609+
name: "test",
610+
mode: "primary",
611+
options: {},
612+
permission: [{ permission: "*", pattern: "*", action: "allow" }],
613+
} satisfies Agent.Info
614+
615+
const user = {
616+
id: MessageID.make("user-no-tools"),
617+
sessionID,
618+
role: "user",
619+
time: { created: Date.now() },
620+
agent: agent.name,
621+
model: { providerID: ProviderID.make(providerID), modelID: resolved.id },
622+
} satisfies MessageV2.User
623+
624+
await drain({
625+
user,
626+
sessionID,
627+
model: {
628+
...resolved,
629+
capabilities: {
630+
...resolved.capabilities,
631+
toolcall: false,
632+
},
633+
},
634+
agent,
635+
system: ["You are a helpful assistant."],
636+
messages: [{ role: "user", content: "Hello" }],
637+
tools: {
638+
question: tool({
639+
description: "Ask a question",
640+
inputSchema: z.object({}),
641+
execute: async () => ({ output: "" }),
642+
}),
643+
},
644+
})
645+
646+
const capture = await request
647+
expect(capture.body.tools).toBeUndefined()
648+
expect(capture.body.tool_choice).toBeUndefined()
649+
},
650+
})
651+
})
652+
564653
test("sends responses API payload for OpenAI models", async () => {
565654
const server = state.server
566655
if (!server) {

0 commit comments

Comments
 (0)