Skip to content

Commit c2ca149

Browse files
jquensejquense
andauthored
fix(opencode): preserve prompt tool enables with empty agent permissions (anomalyco#17064)
Co-authored-by: jquense <jquense@ramp.com>
1 parent 4ee426b commit c2ca149

3 files changed

Lines changed: 98 additions & 3 deletions

File tree

packages/opencode/src/session/llm.ts

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ export namespace LLM {
3232
sessionID: string
3333
model: Provider.Model
3434
agent: Agent.Info
35+
permission?: PermissionNext.Ruleset
3536
system: string[]
3637
abort: AbortSignal
3738
messages: ModelMessage[]
@@ -255,8 +256,11 @@ export namespace LLM {
255256
})
256257
}
257258

258-
async function resolveTools(input: Pick<StreamInput, "tools" | "agent" | "user">) {
259-
const disabled = PermissionNext.disabled(Object.keys(input.tools), input.agent.permission)
259+
async function resolveTools(input: Pick<StreamInput, "tools" | "agent" | "permission" | "user">) {
260+
const disabled = PermissionNext.disabled(
261+
Object.keys(input.tools),
262+
PermissionNext.merge(input.agent.permission, input.permission ?? []),
263+
)
260264
for (const tool of Object.keys(input.tools)) {
261265
if (input.user.tools?.[tool] === false || disabled.has(tool)) {
262266
delete input.tools[tool]

packages/opencode/src/session/prompt.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -666,6 +666,7 @@ export namespace SessionPrompt {
666666
const result = await processor.process({
667667
user: lastUser,
668668
agent,
669+
permission: session.permission,
669670
abort,
670671
sessionID,
671672
system,

packages/opencode/test/session/llm.test.ts

Lines changed: 91 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import { afterAll, beforeAll, beforeEach, describe, expect, test } from "bun:test"
22
import path from "path"
3-
import type { ModelMessage } from "ai"
3+
import { tool, type ModelMessage } from "ai"
4+
import z from "zod"
45
import { LLM } from "../../src/session/llm"
56
import { Global } from "../../src/global"
67
import { Instance } from "../../src/project/instance"
@@ -325,6 +326,95 @@ describe("session.llm.stream", () => {
325326
})
326327
})
327328

329+
test("keeps tools enabled by prompt permissions", async () => {
330+
const server = state.server
331+
if (!server) {
332+
throw new Error("Server not initialized")
333+
}
334+
335+
const providerID = "alibaba"
336+
const modelID = "qwen-plus"
337+
const fixture = await loadFixture(providerID, modelID)
338+
const model = fixture.model
339+
340+
const request = waitRequest(
341+
"/chat/completions",
342+
new Response(createChatStream("Hello"), {
343+
status: 200,
344+
headers: { "Content-Type": "text/event-stream" },
345+
}),
346+
)
347+
348+
await using tmp = await tmpdir({
349+
init: async (dir) => {
350+
await Bun.write(
351+
path.join(dir, "opencode.json"),
352+
JSON.stringify({
353+
$schema: "https://opencode.ai/config.json",
354+
enabled_providers: [providerID],
355+
provider: {
356+
[providerID]: {
357+
options: {
358+
apiKey: "test-key",
359+
baseURL: `${server.url.origin}/v1`,
360+
},
361+
},
362+
},
363+
}),
364+
)
365+
},
366+
})
367+
368+
await Instance.provide({
369+
directory: tmp.path,
370+
fn: async () => {
371+
const resolved = await Provider.getModel(providerID, model.id)
372+
const sessionID = "session-test-tools"
373+
const agent = {
374+
name: "test",
375+
mode: "primary",
376+
options: {},
377+
permission: [{ permission: "question", pattern: "*", action: "deny" }],
378+
} satisfies Agent.Info
379+
380+
const user = {
381+
id: "user-tools",
382+
sessionID,
383+
role: "user",
384+
time: { created: Date.now() },
385+
agent: agent.name,
386+
model: { providerID, modelID: resolved.id },
387+
tools: { question: true },
388+
} satisfies MessageV2.User
389+
390+
const stream = await LLM.stream({
391+
user,
392+
sessionID,
393+
model: resolved,
394+
agent,
395+
permission: [{ permission: "question", pattern: "*", action: "allow" }],
396+
system: ["You are a helpful assistant."],
397+
abort: new AbortController().signal,
398+
messages: [{ role: "user", content: "Hello" }],
399+
tools: {
400+
question: tool({
401+
description: "Ask a question",
402+
inputSchema: z.object({}),
403+
execute: async () => ({ output: "" }),
404+
}),
405+
},
406+
})
407+
408+
for await (const _ of stream.fullStream) {
409+
}
410+
411+
const capture = await request
412+
const tools = capture.body.tools as Array<{ function?: { name?: string } }> | undefined
413+
expect(tools?.some((item) => item.function?.name === "question")).toBe(true)
414+
},
415+
})
416+
})
417+
328418
test("sends responses API payload for OpenAI models", async () => {
329419
const server = state.server
330420
if (!server) {

0 commit comments

Comments
 (0)