feat: add litellmProxy provider option for explicit LiteLLM compatibility (#8658)

Co-authored-by: Mark Henderson <Mark.Henderson99@hotmail.com>
Co-authored-by: Aiden Cline <63023139+rekram1-node@users.noreply.github.com>
This commit is contained in:
seilk
2026-01-16 07:00:36 +09:00
committed by opencode
parent df8e6e6014
commit 9b57db30d1
4 changed files with 226 additions and 1 deletions

View File

@@ -569,4 +569,94 @@ describe("session.message-v2.toModelMessage", () => {
expect(MessageV2.toModelMessage(input)).toStrictEqual([])
})
test("converts pending/running tool calls to error results to prevent dangling tool_use", () => {
const userID = "m-user"
const assistantID = "m-assistant"
const input: MessageV2.WithParts[] = [
{
info: userInfo(userID),
parts: [
{
...basePart(userID, "u1"),
type: "text",
text: "run tool",
},
] as MessageV2.Part[],
},
{
info: assistantInfo(assistantID, userID),
parts: [
{
...basePart(assistantID, "a1"),
type: "tool",
callID: "call-pending",
tool: "bash",
state: {
status: "pending",
input: { cmd: "ls" },
raw: "",
},
},
{
...basePart(assistantID, "a2"),
type: "tool",
callID: "call-running",
tool: "read",
state: {
status: "running",
input: { path: "/tmp" },
time: { start: 0 },
},
},
] as MessageV2.Part[],
},
]
const result = MessageV2.toModelMessage(input)
expect(result).toStrictEqual([
{
role: "user",
content: [{ type: "text", text: "run tool" }],
},
{
role: "assistant",
content: [
{
type: "tool-call",
toolCallId: "call-pending",
toolName: "bash",
input: { cmd: "ls" },
providerExecuted: undefined,
},
{
type: "tool-call",
toolCallId: "call-running",
toolName: "read",
input: { path: "/tmp" },
providerExecuted: undefined,
},
],
},
{
role: "tool",
content: [
{
type: "tool-result",
toolCallId: "call-pending",
toolName: "bash",
output: { type: "error-text", value: "[Tool execution was interrupted]" },
},
{
type: "tool-result",
toolCallId: "call-running",
toolName: "read",
output: { type: "error-text", value: "[Tool execution was interrupted]" },
},
],
},
])
})
})