feat: agents integration

This commit is contained in:
Gab 2026-03-27 08:52:47 +11:00
parent b9ced47bf8
commit 16bdb0707f
4 changed files with 125 additions and 67 deletions

View File

@ -286,18 +286,18 @@ export namespace Agent {
return {
name: t.name,
description: t.description,
description: t.description ?? undefined,
mode: "primary" as const,
permission: Permission.fromConfig({ "*": "allow" }),
native: false,
prompt: t.interpolation_string,
goals: t.goals,
temperature: t.temperature,
prompt: t.interpolation_string ?? undefined,
goals: t.goals ?? undefined,
temperature: t.temperature ?? undefined,
model,
options: {
tf_agent_id: t.id,
tf_auth_via: t.auth_via,
tf_max_tokens: t.max_tokens,
tf_max_tokens: t.max_tokens ?? undefined,
},
}
})

View File

@ -34,6 +34,12 @@ export const { use: useLocal, provider: LocalProvider } = createSimpleContext({
}
}
const tfFallbackModel = createMemo(() => {
const m = { providerID: "toothfairyai", modelID: "mystica-15" }
if (isModelValid(m)) return m
return undefined
})
const agent = iife(() => {
const agents = createMemo(() => sync.data.agent.filter((x) => x.mode !== "subagent" && !x.hidden))
const visibleAgents = createMemo(() => sync.data.agent.filter((x) => !x.hidden))
@ -192,10 +198,12 @@ export const { use: useLocal, provider: LocalProvider } = createSimpleContext({
const currentModel = createMemo(() => {
const a = agent.current()
const isTFAgent = !!a.options?.tf_agent_id
return (
getFirstValidModel(
() => modelStore.model[a.name],
() => a.model,
() => (isTFAgent ? tfFallbackModel() : undefined),
fallbackModel,
) ?? undefined
)
@ -297,6 +305,9 @@ export const { use: useLocal, provider: LocalProvider } = createSimpleContext({
}
})
},
setDefault(model: { providerID: string; modelID: string }) {
setModelStore("model", agent.current().name, model)
},
toggleFavorite(model: { providerID: string; modelID: string }) {
batch(() => {
if (!isModelValid(model)) {
@ -381,18 +392,23 @@ export const { use: useLocal, provider: LocalProvider } = createSimpleContext({
// Automatically update model when agent changes
createEffect(() => {
const value = agent.current()
const isTFAgent = !!value.options?.tf_agent_id
if (value.model) {
if (isModelValid(value.model))
if (isModelValid(value.model)) {
model.set({
providerID: value.model.providerID,
modelID: value.model.modelID,
})
else
} else if (isTFAgent) {
// For TF agents with invalid model, force default to toothfairyai/mystica-15
model.setDefault({ providerID: "toothfairyai", modelID: "mystica-15" })
} else {
toast.show({
variant: "warning",
message: `Agent ${value.name}'s configured model ${value.model.providerID}/${value.model.modelID} is not valid`,
duration: 3000,
})
}
}
})

View File

@ -116,11 +116,10 @@ export namespace LLM {
mergeDeep(variant),
)
// Remove TF-specific options for non-ToothFairyAI providers
if (input.model.providerID !== "toothfairyai") {
delete options.tf_agent_id
delete options.tf_auth_via
}
// Remove TF-specific internal tracking fields (never passed to APIs)
delete options.tf_agent_id
delete options.tf_auth_via
delete options.tf_max_tokens
if (isOpenaiOauth) {
options.instructions = system.join("\n")

View File

@ -1,6 +1,7 @@
import { afterAll, beforeAll, beforeEach, afterEach, test, expect, describe } from "bun:test"
import path from "path"
import fs from "fs"
import os from "os"
import { tmpdir } from "../fixture/fixture"
import { Instance } from "../../src/project/instance"
import { Agent } from "../../src/agent/agent"
@ -13,6 +14,8 @@ import { ProviderID, ModelID } from "../../src/provider/schema"
import { SessionID, MessageID } from "../../src/session/schema"
import type { MessageV2 } from "../../src/session/message-v2"
const TF_TOOLS_PATH = path.join(os.homedir(), ".tfcode", "tools.json")
// Server for capturing LLM requests
const state = {
server: null as ReturnType<typeof Bun.serve> | null,
@ -75,17 +78,34 @@ async function loadFixture(providerID: string, modelID: string) {
describe("ToothFairyAI Agent Loading", () => {
let originalDataPath: string
let originalToolsContent: string | null = null
beforeEach(async () => {
originalDataPath = Global.Path.data
const testDataDir = path.join(path.dirname(originalDataPath), "tf-agent-test-data")
;(Global.Path as { data: string }).data = testDataDir
await fs.promises.mkdir(path.join(testDataDir, ".tfcode"), { recursive: true })
// Backup existing tools.json if it exists
try {
originalToolsContent = await Bun.file(TF_TOOLS_PATH).text()
} catch {
originalToolsContent = null
}
await fs.promises.mkdir(path.dirname(TF_TOOLS_PATH), { recursive: true })
})
afterEach(async () => {
await Instance.disposeAll()
;(Global.Path as { data: string }).data = originalDataPath
// Restore original tools.json
if (originalToolsContent !== null) {
await fs.promises.writeFile(TF_TOOLS_PATH, originalToolsContent)
} else {
try {
await fs.promises.unlink(TF_TOOLS_PATH)
} catch {}
}
})
describe("loadTFCoderAgents", () => {
@ -126,8 +146,8 @@ describe("ToothFairyAI Agent Loading", () => {
],
by_type: { coder_agent: 2 },
}
const toolsPath = path.join(Global.Path.data, ".tfcode", "tools.json")
const toolsPath = TF_TOOLS_PATH
await fs.promises.writeFile(toolsPath, JSON.stringify(toolsData, null, 2))
await using tmp = await tmpdir()
@ -138,7 +158,7 @@ describe("ToothFairyAI Agent Loading", () => {
const agents = await Agent.list()
const codeReviewer = agents.find((a) => a.name === "Code Reviewer")
const testWriter = agents.find((a) => a.name === "Test Writer")
expect(codeReviewer).toBeDefined()
expect(codeReviewer?.description).toBe("Reviews code for quality")
expect(codeReviewer?.prompt).toBe("You are a code reviewer. Review code thoroughly.")
@ -150,7 +170,7 @@ describe("ToothFairyAI Agent Loading", () => {
expect(codeReviewer?.options?.tf_max_tokens).toBe(4096)
expect(String(codeReviewer?.model?.providerID)).toBe("toothfairyai")
expect(String(codeReviewer?.model?.modelID)).toBe("claude-3-5-sonnet")
expect(testWriter).toBeDefined()
expect(String(testWriter?.model?.providerID)).toBe("toothfairyai")
expect(String(testWriter?.model?.modelID)).toBe("gpt-4")
@ -176,8 +196,8 @@ describe("ToothFairyAI Agent Loading", () => {
},
],
}
const toolsPath = path.join(Global.Path.data, ".tfcode", "tools.json")
const toolsPath = TF_TOOLS_PATH
await fs.promises.writeFile(toolsPath, JSON.stringify(toolsData))
await using tmp = await tmpdir()
@ -210,8 +230,8 @@ describe("ToothFairyAI Agent Loading", () => {
},
],
}
const toolsPath = path.join(Global.Path.data, ".tfcode", "tools.json")
const toolsPath = TF_TOOLS_PATH
await fs.promises.writeFile(toolsPath, JSON.stringify(toolsData))
await using tmp = await tmpdir()
@ -243,8 +263,8 @@ describe("ToothFairyAI Agent Loading", () => {
},
],
}
const toolsPath = path.join(Global.Path.data, ".tfcode", "tools.json")
const toolsPath = TF_TOOLS_PATH
await fs.promises.writeFile(toolsPath, JSON.stringify(toolsData))
await using tmp = await tmpdir()
@ -254,8 +274,8 @@ describe("ToothFairyAI Agent Loading", () => {
fn: async () => {
const agent = await Agent.get("Minimal Agent")
expect(agent).toBeDefined()
expect(agent?.prompt).toBeNull()
expect(agent?.goals).toBeNull()
expect(agent?.prompt).toBeUndefined()
expect(agent?.goals).toBeUndefined()
expect(agent?.model).toBeUndefined()
},
})
@ -266,7 +286,8 @@ describe("ToothFairyAI Agent Loading", () => {
// Separate describe block for LLM stream tests
describe("ToothFairyAI Agent Instructions in LLM Stream", () => {
let originalDataPath: string
let originalToolsContent: string | null = null
beforeAll(() => {
state.server = Bun.serve({
port: 0,
@ -281,32 +302,48 @@ describe("ToothFairyAI Agent Instructions in LLM Stream", () => {
},
})
})
afterAll(() => {
state.server?.stop()
})
beforeEach(async () => {
state.queue.length = 0
originalDataPath = Global.Path.data
const testDataDir = path.join(path.dirname(originalDataPath), "tf-agent-test-data")
;(Global.Path as { data: string }).data = testDataDir
await fs.promises.mkdir(path.join(testDataDir, ".tfcode"), { recursive: true })
// Backup existing tools.json if it exists
try {
originalToolsContent = await Bun.file(TF_TOOLS_PATH).text()
} catch {
originalToolsContent = null
}
await fs.promises.mkdir(path.dirname(TF_TOOLS_PATH), { recursive: true })
})
afterEach(async () => {
await Instance.disposeAll()
;(Global.Path as { data: string }).data = originalDataPath
// Restore original tools.json
if (originalToolsContent !== null) {
await fs.promises.writeFile(TF_TOOLS_PATH, originalToolsContent)
} else {
try {
await fs.promises.unlink(TF_TOOLS_PATH)
} catch {}
}
})
test("includes highlighted TF agent instructions in system prompt", async () => {
const server = state.server
if (!server) throw new Error("Server not initialized")
const providerID = "alibaba"
const modelID = "qwen-plus"
const fixture = await loadFixture(providerID, modelID)
// Setup TF agent with this model
const toolsData = {
success: true,
@ -317,7 +354,8 @@ describe("ToothFairyAI Agent Instructions in LLM Stream", () => {
description: "Reviews code for quality and best practices",
tool_type: "coder_agent",
auth_via: "tf_agent",
interpolation_string: "You are a code reviewer. Always check for bugs, security issues, and suggest improvements.",
interpolation_string:
"You are a code reviewer. Always check for bugs, security issues, and suggest improvements.",
goals: "Review all code thoroughly. Provide actionable feedback. Ensure code quality standards.",
temperature: 0.3,
max_tokens: 4096,
@ -326,10 +364,9 @@ describe("ToothFairyAI Agent Instructions in LLM Stream", () => {
},
],
}
const toolsPath = path.join(Global.Path.data, ".tfcode", "tools.json")
await fs.promises.writeFile(toolsPath, JSON.stringify(toolsData, null, 2))
await fs.promises.writeFile(TF_TOOLS_PATH, JSON.stringify(toolsData, null, 2))
const request = waitRequest(
"/chat/completions",
new Response(createChatStream("I'll review your code."), {
@ -337,7 +374,7 @@ describe("ToothFairyAI Agent Instructions in LLM Stream", () => {
headers: { "Content-Type": "text/event-stream" },
}),
)
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
@ -357,13 +394,13 @@ describe("ToothFairyAI Agent Instructions in LLM Stream", () => {
)
},
})
await Instance.provide({
directory: tmp.path,
fn: async () => {
const agent = await Agent.get("Code Reviewer")
expect(agent).toBeDefined()
const resolved = await Provider.getModel(ProviderID.make(providerID), ModelID.make(modelID))
const sessionID = SessionID.make("test-session")
const user: MessageV2.User = {
@ -374,7 +411,7 @@ describe("ToothFairyAI Agent Instructions in LLM Stream", () => {
agent: "Code Reviewer",
model: { providerID: ProviderID.make(providerID), modelID: ModelID.make(modelID) },
}
const stream = await LLM.stream({
user,
sessionID,
@ -385,37 +422,42 @@ describe("ToothFairyAI Agent Instructions in LLM Stream", () => {
messages: [{ role: "user", content: "Hello" }],
tools: {},
})
for await (const _ of stream.fullStream) {}
for await (const _ of stream.fullStream) {
}
const capture = await request
const body = capture.body
const messages = body.messages as Array<{ role: string; content: string }>
const systemMessage = messages.find((m) => m.role === "system")
expect(systemMessage).toBeDefined()
const systemContent = systemMessage!.content
expect(systemContent).toContain("ULTRA IMPORTANT - AGENT CONFIGURATION")
expect(systemContent).toContain('You are acting as the agent: "Code Reviewer"')
expect(systemContent).toContain("Reviews code for quality and best practices")
expect(systemContent).toContain("AGENT \"Code Reviewer\" INSTRUCTIONS")
expect(systemContent).toContain("You are a code reviewer. Always check for bugs, security issues, and suggest improvements.")
expect(systemContent).toContain("AGENT \"Code Reviewer\" GOALS")
expect(systemContent).toContain("Review all code thoroughly. Provide actionable feedback. Ensure code quality standards.")
expect(systemContent).toContain('AGENT "Code Reviewer" INSTRUCTIONS')
expect(systemContent).toContain(
"You are a code reviewer. Always check for bugs, security issues, and suggest improvements.",
)
expect(systemContent).toContain('AGENT "Code Reviewer" GOALS')
expect(systemContent).toContain(
"Review all code thoroughly. Provide actionable feedback. Ensure code quality standards.",
)
},
})
})
test("does NOT include highlighted instructions for native agents", async () => {
const server = state.server
if (!server) throw new Error("Server not initialized")
const providerID = "alibaba"
const modelID = "qwen-plus"
const fixture = await loadFixture(providerID, modelID)
const request = waitRequest(
"/chat/completions",
new Response(createChatStream("Hello"), {
@ -423,7 +465,7 @@ describe("ToothFairyAI Agent Instructions in LLM Stream", () => {
headers: { "Content-Type": "text/event-stream" },
}),
)
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
@ -443,14 +485,14 @@ describe("ToothFairyAI Agent Instructions in LLM Stream", () => {
)
},
})
await Instance.provide({
directory: tmp.path,
fn: async () => {
const agent = await Agent.get("build")
expect(agent).toBeDefined()
expect(agent?.native).toBe(true)
const resolved = await Provider.getModel(ProviderID.make(providerID), ModelID.make(modelID))
const sessionID = SessionID.make("test-session")
const user: MessageV2.User = {
@ -461,7 +503,7 @@ describe("ToothFairyAI Agent Instructions in LLM Stream", () => {
agent: "build",
model: { providerID: ProviderID.make(providerID), modelID: ModelID.make(modelID) },
}
const stream = await LLM.stream({
user,
sessionID,
@ -472,18 +514,19 @@ describe("ToothFairyAI Agent Instructions in LLM Stream", () => {
messages: [{ role: "user", content: "Hello" }],
tools: {},
})
for await (const _ of stream.fullStream) {}
for await (const _ of stream.fullStream) {
}
const capture = await request
const body = capture.body
const messages = body.messages as Array<{ role: string; content: string }>
const systemMessage = messages.find((m) => m.role === "system")
expect(systemMessage).toBeDefined()
const systemContent = systemMessage!.content
expect(systemContent).not.toContain("ULTRA IMPORTANT - AGENT CONFIGURATION")
expect(systemContent).not.toContain("You are acting as the agent:")
},