mirror of
https://gitea.toothfairyai.com/ToothFairyAI/tf_code.git
synced 2026-04-03 15:43:45 +00:00
core: make patch tool more reliable and consistent with other editing tools
The patch tool now works seamlessly alongside other file editing tools with improved error handling and a more intuitive permission system. Users will experience: - More reliable patch application with better error messages - Consistent permission prompts that match other editing tools - Smoother integration when applying complex multi-file changes - Better feedback on what changes are being made before applying patches This refactoring leverages the robust patch parsing engine while making the tool feel native to the opencode workflow, reducing friction when making bulk changes to your codebase.
This commit is contained in:
609
packages/opencode/src/patch/index.ts
Normal file
609
packages/opencode/src/patch/index.ts
Normal file
@@ -0,0 +1,609 @@
|
||||
import z from "zod"
|
||||
import * as path from "path"
|
||||
import * as fs from "fs/promises"
|
||||
import { Log } from "../util/log"
|
||||
|
||||
export namespace Patch {
|
||||
const log = Log.create({ service: "patch" })
|
||||
|
||||
// Schema definitions
|
||||
export const PatchSchema = z.object({
|
||||
patchText: z.string().describe("The full patch text that describes all changes to be made"),
|
||||
})
|
||||
|
||||
export type PatchParams = z.infer<typeof PatchSchema>
|
||||
|
||||
// Core types matching the Rust implementation
|
||||
export interface ApplyPatchArgs {
|
||||
patch: string
|
||||
hunks: Hunk[]
|
||||
workdir?: string
|
||||
}
|
||||
|
||||
export type Hunk =
|
||||
| { type: "add"; path: string; contents: string }
|
||||
| { type: "delete"; path: string }
|
||||
| { type: "update"; path: string; move_path?: string; chunks: UpdateFileChunk[] }
|
||||
|
||||
export interface UpdateFileChunk {
|
||||
old_lines: string[]
|
||||
new_lines: string[]
|
||||
change_context?: string
|
||||
is_end_of_file?: boolean
|
||||
}
|
||||
|
||||
export interface ApplyPatchAction {
|
||||
changes: Map<string, ApplyPatchFileChange>
|
||||
patch: string
|
||||
cwd: string
|
||||
}
|
||||
|
||||
export type ApplyPatchFileChange =
|
||||
| { type: "add"; content: string }
|
||||
| { type: "delete"; content: string }
|
||||
| { type: "update"; unified_diff: string; move_path?: string; new_content: string }
|
||||
|
||||
export interface AffectedPaths {
|
||||
added: string[]
|
||||
modified: string[]
|
||||
deleted: string[]
|
||||
}
|
||||
|
||||
export enum ApplyPatchError {
|
||||
ParseError = "ParseError",
|
||||
IoError = "IoError",
|
||||
ComputeReplacements = "ComputeReplacements",
|
||||
ImplicitInvocation = "ImplicitInvocation",
|
||||
}
|
||||
|
||||
export enum MaybeApplyPatch {
|
||||
Body = "Body",
|
||||
ShellParseError = "ShellParseError",
|
||||
PatchParseError = "PatchParseError",
|
||||
NotApplyPatch = "NotApplyPatch",
|
||||
}
|
||||
|
||||
export enum MaybeApplyPatchVerified {
|
||||
Body = "Body",
|
||||
ShellParseError = "ShellParseError",
|
||||
CorrectnessError = "CorrectnessError",
|
||||
NotApplyPatch = "NotApplyPatch",
|
||||
}
|
||||
|
||||
// Parser implementation
|
||||
function parsePatchHeader(lines: string[], startIdx: number): { filePath: string; movePath?: string; nextIdx: number } | null {
|
||||
const line = lines[startIdx]
|
||||
|
||||
if (line.startsWith("*** Add File:")) {
|
||||
const filePath = line.split(":", 2)[1]?.trim()
|
||||
return filePath ? { filePath, nextIdx: startIdx + 1 } : null
|
||||
}
|
||||
|
||||
if (line.startsWith("*** Delete File:")) {
|
||||
const filePath = line.split(":", 2)[1]?.trim()
|
||||
return filePath ? { filePath, nextIdx: startIdx + 1 } : null
|
||||
}
|
||||
|
||||
if (line.startsWith("*** Update File:")) {
|
||||
const filePath = line.split(":", 2)[1]?.trim()
|
||||
let movePath: string | undefined
|
||||
let nextIdx = startIdx + 1
|
||||
|
||||
// Check for move directive
|
||||
if (nextIdx < lines.length && lines[nextIdx].startsWith("*** Move to:")) {
|
||||
movePath = lines[nextIdx].split(":", 2)[1]?.trim()
|
||||
nextIdx++
|
||||
}
|
||||
|
||||
return filePath ? { filePath, movePath, nextIdx } : null
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
function parseUpdateFileChunks(lines: string[], startIdx: number): { chunks: UpdateFileChunk[]; nextIdx: number } {
|
||||
const chunks: UpdateFileChunk[] = []
|
||||
let i = startIdx
|
||||
|
||||
while (i < lines.length && !lines[i].startsWith("***")) {
|
||||
if (lines[i].startsWith("@@")) {
|
||||
// Parse context line
|
||||
const contextLine = lines[i].substring(2).trim()
|
||||
i++
|
||||
|
||||
const oldLines: string[] = []
|
||||
const newLines: string[] = []
|
||||
let isEndOfFile = false
|
||||
|
||||
// Parse change lines
|
||||
while (i < lines.length && !lines[i].startsWith("@@") && !lines[i].startsWith("***")) {
|
||||
const changeLine = lines[i]
|
||||
|
||||
if (changeLine === "*** End of File") {
|
||||
isEndOfFile = true
|
||||
i++
|
||||
break
|
||||
}
|
||||
|
||||
if (changeLine.startsWith(" ")) {
|
||||
// Keep line - appears in both old and new
|
||||
const content = changeLine.substring(1)
|
||||
oldLines.push(content)
|
||||
newLines.push(content)
|
||||
} else if (changeLine.startsWith("-")) {
|
||||
// Remove line - only in old
|
||||
oldLines.push(changeLine.substring(1))
|
||||
} else if (changeLine.startsWith("+")) {
|
||||
// Add line - only in new
|
||||
newLines.push(changeLine.substring(1))
|
||||
}
|
||||
|
||||
i++
|
||||
}
|
||||
|
||||
chunks.push({
|
||||
old_lines: oldLines,
|
||||
new_lines: newLines,
|
||||
change_context: contextLine || undefined,
|
||||
is_end_of_file: isEndOfFile || undefined,
|
||||
})
|
||||
} else {
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
return { chunks, nextIdx: i }
|
||||
}
|
||||
|
||||
function parseAddFileContent(lines: string[], startIdx: number): { content: string; nextIdx: number } {
|
||||
let content = ""
|
||||
let i = startIdx
|
||||
|
||||
while (i < lines.length && !lines[i].startsWith("***")) {
|
||||
if (lines[i].startsWith("+")) {
|
||||
content += lines[i].substring(1) + "\n"
|
||||
}
|
||||
i++
|
||||
}
|
||||
|
||||
// Remove trailing newline
|
||||
if (content.endsWith("\n")) {
|
||||
content = content.slice(0, -1)
|
||||
}
|
||||
|
||||
return { content, nextIdx: i }
|
||||
}
|
||||
|
||||
export function parsePatch(patchText: string): { hunks: Hunk[] } {
|
||||
const lines = patchText.split("\n")
|
||||
const hunks: Hunk[] = []
|
||||
let i = 0
|
||||
|
||||
// Look for Begin/End patch markers
|
||||
const beginMarker = "*** Begin Patch"
|
||||
const endMarker = "*** End Patch"
|
||||
|
||||
const beginIdx = lines.findIndex(line => line.trim() === beginMarker)
|
||||
const endIdx = lines.findIndex(line => line.trim() === endMarker)
|
||||
|
||||
if (beginIdx === -1 || endIdx === -1 || beginIdx >= endIdx) {
|
||||
throw new Error("Invalid patch format: missing Begin/End markers")
|
||||
}
|
||||
|
||||
// Parse content between markers
|
||||
i = beginIdx + 1
|
||||
|
||||
while (i < endIdx) {
|
||||
const header = parsePatchHeader(lines, i)
|
||||
if (!header) {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
if (lines[i].startsWith("*** Add File:")) {
|
||||
const { content, nextIdx } = parseAddFileContent(lines, header.nextIdx)
|
||||
hunks.push({
|
||||
type: "add",
|
||||
path: header.filePath,
|
||||
contents: content,
|
||||
})
|
||||
i = nextIdx
|
||||
} else if (lines[i].startsWith("*** Delete File:")) {
|
||||
hunks.push({
|
||||
type: "delete",
|
||||
path: header.filePath,
|
||||
})
|
||||
i = header.nextIdx
|
||||
} else if (lines[i].startsWith("*** Update File:")) {
|
||||
const { chunks, nextIdx } = parseUpdateFileChunks(lines, header.nextIdx)
|
||||
hunks.push({
|
||||
type: "update",
|
||||
path: header.filePath,
|
||||
move_path: header.movePath,
|
||||
chunks,
|
||||
})
|
||||
i = nextIdx
|
||||
} else {
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
return { hunks }
|
||||
}
|
||||
|
||||
// Apply patch functionality
|
||||
export function maybeParseApplyPatch(argv: string[]):
|
||||
| { type: MaybeApplyPatch.Body; args: ApplyPatchArgs }
|
||||
| { type: MaybeApplyPatch.PatchParseError; error: Error }
|
||||
| { type: MaybeApplyPatch.NotApplyPatch } {
|
||||
|
||||
const APPLY_PATCH_COMMANDS = ["apply_patch", "applypatch"]
|
||||
|
||||
// Direct invocation: apply_patch <patch>
|
||||
if (argv.length === 2 && APPLY_PATCH_COMMANDS.includes(argv[0])) {
|
||||
try {
|
||||
const { hunks } = parsePatch(argv[1])
|
||||
return {
|
||||
type: MaybeApplyPatch.Body,
|
||||
args: {
|
||||
patch: argv[1],
|
||||
hunks,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
type: MaybeApplyPatch.PatchParseError,
|
||||
error: error as Error,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Bash heredoc form: bash -lc 'apply_patch <<"EOF" ...'
|
||||
if (argv.length === 3 && argv[0] === "bash" && argv[1] === "-lc") {
|
||||
// Simple extraction - in real implementation would need proper bash parsing
|
||||
const script = argv[2]
|
||||
const heredocMatch = script.match(/apply_patch\s*<<['"](\w+)['"]\s*\n([\s\S]*?)\n\1/)
|
||||
|
||||
if (heredocMatch) {
|
||||
const patchContent = heredocMatch[2]
|
||||
try {
|
||||
const { hunks } = parsePatch(patchContent)
|
||||
return {
|
||||
type: MaybeApplyPatch.Body,
|
||||
args: {
|
||||
patch: patchContent,
|
||||
hunks,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
type: MaybeApplyPatch.PatchParseError,
|
||||
error: error as Error,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { type: MaybeApplyPatch.NotApplyPatch }
|
||||
}
|
||||
|
||||
// File content manipulation
|
||||
interface ApplyPatchFileUpdate {
|
||||
unified_diff: string
|
||||
content: string
|
||||
}
|
||||
|
||||
export function deriveNewContentsFromChunks(filePath: string, chunks: UpdateFileChunk[]): ApplyPatchFileUpdate {
|
||||
// Read original file content
|
||||
let originalContent: string
|
||||
try {
|
||||
originalContent = require("fs").readFileSync(filePath, "utf-8")
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to read file ${filePath}: ${error}`)
|
||||
}
|
||||
|
||||
let originalLines = originalContent.split("\n")
|
||||
|
||||
// Drop trailing empty element for consistent line counting
|
||||
if (originalLines.length > 0 && originalLines[originalLines.length - 1] === "") {
|
||||
originalLines.pop()
|
||||
}
|
||||
|
||||
const replacements = computeReplacements(originalLines, filePath, chunks)
|
||||
let newLines = applyReplacements(originalLines, replacements)
|
||||
|
||||
// Ensure trailing newline
|
||||
if (newLines.length === 0 || newLines[newLines.length - 1] !== "") {
|
||||
newLines.push("")
|
||||
}
|
||||
|
||||
const newContent = newLines.join("\n")
|
||||
|
||||
// Generate unified diff
|
||||
const unifiedDiff = generateUnifiedDiff(originalContent, newContent)
|
||||
|
||||
return {
|
||||
unified_diff: unifiedDiff,
|
||||
content: newContent,
|
||||
}
|
||||
}
|
||||
|
||||
function computeReplacements(originalLines: string[], filePath: string, chunks: UpdateFileChunk[]): Array<[number, number, string[]]> {
|
||||
const replacements: Array<[number, number, string[]]> = []
|
||||
let lineIndex = 0
|
||||
|
||||
for (const chunk of chunks) {
|
||||
// Handle context-based seeking
|
||||
if (chunk.change_context) {
|
||||
const contextIdx = seekSequence(originalLines, [chunk.change_context], lineIndex)
|
||||
if (contextIdx === -1) {
|
||||
throw new Error(`Failed to find context '${chunk.change_context}' in ${filePath}`)
|
||||
}
|
||||
lineIndex = contextIdx + 1
|
||||
}
|
||||
|
||||
// Handle pure addition (no old lines)
|
||||
if (chunk.old_lines.length === 0) {
|
||||
const insertionIdx = originalLines.length > 0 && originalLines[originalLines.length - 1] === ""
|
||||
? originalLines.length - 1
|
||||
: originalLines.length
|
||||
replacements.push([insertionIdx, 0, chunk.new_lines])
|
||||
continue
|
||||
}
|
||||
|
||||
// Try to match old lines in the file
|
||||
let pattern = chunk.old_lines
|
||||
let newSlice = chunk.new_lines
|
||||
let found = seekSequence(originalLines, pattern, lineIndex)
|
||||
|
||||
// Retry without trailing empty line if not found
|
||||
if (found === -1 && pattern.length > 0 && pattern[pattern.length - 1] === "") {
|
||||
pattern = pattern.slice(0, -1)
|
||||
if (newSlice.length > 0 && newSlice[newSlice.length - 1] === "") {
|
||||
newSlice = newSlice.slice(0, -1)
|
||||
}
|
||||
found = seekSequence(originalLines, pattern, lineIndex)
|
||||
}
|
||||
|
||||
if (found !== -1) {
|
||||
replacements.push([found, pattern.length, newSlice])
|
||||
lineIndex = found + pattern.length
|
||||
} else {
|
||||
throw new Error(
|
||||
`Failed to find expected lines in ${filePath}:\n${chunk.old_lines.join("\n")}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Sort replacements by index to apply in order
|
||||
replacements.sort((a, b) => a[0] - b[0])
|
||||
|
||||
return replacements
|
||||
}
|
||||
|
||||
function applyReplacements(lines: string[], replacements: Array<[number, number, string[]]>): string[] {
|
||||
// Apply replacements in reverse order to avoid index shifting
|
||||
const result = [...lines]
|
||||
|
||||
for (let i = replacements.length - 1; i >= 0; i--) {
|
||||
const [startIdx, oldLen, newSegment] = replacements[i]
|
||||
|
||||
// Remove old lines
|
||||
result.splice(startIdx, oldLen)
|
||||
|
||||
// Insert new lines
|
||||
for (let j = 0; j < newSegment.length; j++) {
|
||||
result.splice(startIdx + j, 0, newSegment[j])
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
function seekSequence(lines: string[], pattern: string[], startIndex: number): number {
|
||||
if (pattern.length === 0) return -1
|
||||
|
||||
// Simple substring search implementation
|
||||
for (let i = startIndex; i <= lines.length - pattern.length; i++) {
|
||||
let matches = true
|
||||
|
||||
for (let j = 0; j < pattern.length; j++) {
|
||||
if (lines[i + j] !== pattern[j]) {
|
||||
matches = false
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (matches) {
|
||||
return i
|
||||
}
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
function generateUnifiedDiff(oldContent: string, newContent: string): string {
|
||||
const oldLines = oldContent.split("\n")
|
||||
const newLines = newContent.split("\n")
|
||||
|
||||
// Simple diff generation - in a real implementation you'd use a proper diff algorithm
|
||||
let diff = "@@ -1 +1 @@\n"
|
||||
|
||||
// Find changes (simplified approach)
|
||||
const maxLen = Math.max(oldLines.length, newLines.length)
|
||||
let hasChanges = false
|
||||
|
||||
for (let i = 0; i < maxLen; i++) {
|
||||
const oldLine = oldLines[i] || ""
|
||||
const newLine = newLines[i] || ""
|
||||
|
||||
if (oldLine !== newLine) {
|
||||
if (oldLine) diff += `-${oldLine}\n`
|
||||
if (newLine) diff += `+${newLine}\n`
|
||||
hasChanges = true
|
||||
} else if (oldLine) {
|
||||
diff += ` ${oldLine}\n`
|
||||
}
|
||||
}
|
||||
|
||||
return hasChanges ? diff : ""
|
||||
}
|
||||
|
||||
// Apply hunks to filesystem
|
||||
export async function applyHunksToFiles(hunks: Hunk[]): Promise<AffectedPaths> {
|
||||
if (hunks.length === 0) {
|
||||
throw new Error("No files were modified.")
|
||||
}
|
||||
|
||||
const added: string[] = []
|
||||
const modified: string[] = []
|
||||
const deleted: string[] = []
|
||||
|
||||
for (const hunk of hunks) {
|
||||
switch (hunk.type) {
|
||||
case "add":
|
||||
// Create parent directories
|
||||
const addDir = path.dirname(hunk.path)
|
||||
if (addDir !== "." && addDir !== "/") {
|
||||
await fs.mkdir(addDir, { recursive: true })
|
||||
}
|
||||
|
||||
await fs.writeFile(hunk.path, hunk.contents, "utf-8")
|
||||
added.push(hunk.path)
|
||||
log.info(`Added file: ${hunk.path}`)
|
||||
break
|
||||
|
||||
case "delete":
|
||||
await fs.unlink(hunk.path)
|
||||
deleted.push(hunk.path)
|
||||
log.info(`Deleted file: ${hunk.path}`)
|
||||
break
|
||||
|
||||
case "update":
|
||||
const fileUpdate = deriveNewContentsFromChunks(hunk.path, hunk.chunks)
|
||||
|
||||
if (hunk.move_path) {
|
||||
// Handle file move
|
||||
const moveDir = path.dirname(hunk.move_path)
|
||||
if (moveDir !== "." && moveDir !== "/") {
|
||||
await fs.mkdir(moveDir, { recursive: true })
|
||||
}
|
||||
|
||||
await fs.writeFile(hunk.move_path, fileUpdate.content, "utf-8")
|
||||
await fs.unlink(hunk.path)
|
||||
modified.push(hunk.move_path)
|
||||
log.info(`Moved file: ${hunk.path} -> ${hunk.move_path}`)
|
||||
} else {
|
||||
// Regular update
|
||||
await fs.writeFile(hunk.path, fileUpdate.content, "utf-8")
|
||||
modified.push(hunk.path)
|
||||
log.info(`Updated file: ${hunk.path}`)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return { added, modified, deleted }
|
||||
}
|
||||
|
||||
// Main patch application function
|
||||
export async function applyPatch(patchText: string): Promise<AffectedPaths> {
|
||||
const { hunks } = parsePatch(patchText)
|
||||
return applyHunksToFiles(hunks)
|
||||
}
|
||||
|
||||
// Async version of maybeParseApplyPatchVerified
|
||||
export async function maybeParseApplyPatchVerified(argv: string[], cwd: string): Promise<
|
||||
| { type: MaybeApplyPatchVerified.Body; action: ApplyPatchAction }
|
||||
| { type: MaybeApplyPatchVerified.CorrectnessError; error: Error }
|
||||
| { type: MaybeApplyPatchVerified.NotApplyPatch }
|
||||
> {
|
||||
// Detect implicit patch invocation (raw patch without apply_patch command)
|
||||
if (argv.length === 1) {
|
||||
try {
|
||||
parsePatch(argv[0])
|
||||
return {
|
||||
type: MaybeApplyPatchVerified.CorrectnessError,
|
||||
error: new Error(ApplyPatchError.ImplicitInvocation),
|
||||
}
|
||||
} catch {
|
||||
// Not a patch, continue
|
||||
}
|
||||
}
|
||||
|
||||
const result = maybeParseApplyPatch(argv)
|
||||
|
||||
switch (result.type) {
|
||||
case MaybeApplyPatch.Body:
|
||||
const { args } = result
|
||||
const effectiveCwd = args.workdir ? path.resolve(cwd, args.workdir) : cwd
|
||||
const changes = new Map<string, ApplyPatchFileChange>()
|
||||
|
||||
for (const hunk of args.hunks) {
|
||||
const resolvedPath = path.resolve(effectiveCwd, hunk.type === "update" && hunk.move_path ? hunk.move_path : hunk.path)
|
||||
|
||||
switch (hunk.type) {
|
||||
case "add":
|
||||
changes.set(resolvedPath, {
|
||||
type: "add",
|
||||
content: hunk.contents,
|
||||
})
|
||||
break
|
||||
|
||||
case "delete":
|
||||
// For delete, we need to read the current content
|
||||
const deletePath = path.resolve(effectiveCwd, hunk.path)
|
||||
try {
|
||||
const content = await fs.readFile(deletePath, "utf-8")
|
||||
changes.set(resolvedPath, {
|
||||
type: "delete",
|
||||
content,
|
||||
})
|
||||
} catch (error) {
|
||||
return {
|
||||
type: MaybeApplyPatchVerified.CorrectnessError,
|
||||
error: new Error(`Failed to read file for deletion: ${deletePath}`),
|
||||
}
|
||||
}
|
||||
break
|
||||
|
||||
case "update":
|
||||
const updatePath = path.resolve(effectiveCwd, hunk.path)
|
||||
try {
|
||||
const fileUpdate = deriveNewContentsFromChunks(updatePath, hunk.chunks)
|
||||
changes.set(resolvedPath, {
|
||||
type: "update",
|
||||
unified_diff: fileUpdate.unified_diff,
|
||||
move_path: hunk.move_path ? path.resolve(effectiveCwd, hunk.move_path) : undefined,
|
||||
new_content: fileUpdate.content,
|
||||
})
|
||||
} catch (error) {
|
||||
return {
|
||||
type: MaybeApplyPatchVerified.CorrectnessError,
|
||||
error: error as Error,
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
type: MaybeApplyPatchVerified.Body,
|
||||
action: {
|
||||
changes,
|
||||
patch: args.patch,
|
||||
cwd: effectiveCwd,
|
||||
},
|
||||
}
|
||||
|
||||
case MaybeApplyPatch.PatchParseError:
|
||||
return {
|
||||
type: MaybeApplyPatchVerified.CorrectnessError,
|
||||
error: result.error,
|
||||
}
|
||||
|
||||
case MaybeApplyPatch.NotApplyPatch:
|
||||
return { type: MaybeApplyPatchVerified.NotApplyPatch }
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,340 +3,205 @@ import * as path from "path"
|
||||
import * as fs from "fs/promises"
|
||||
import { Tool } from "./tool"
|
||||
import { FileTime } from "../file/time"
|
||||
import DESCRIPTION from "./patch.txt"
|
||||
import { Permission } from "../permission"
|
||||
import { Bus } from "../bus"
|
||||
import { FileWatcher } from "../file/watcher"
|
||||
import { Instance } from "../project/instance"
|
||||
import { Agent } from "../agent/agent"
|
||||
import { Patch } from "../patch"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
import { createTwoFilesPatch } from "diff"
|
||||
|
||||
const PatchParams = z.object({
|
||||
patchText: z.string().describe("The full patch text that describes all changes to be made"),
|
||||
})
|
||||
|
||||
interface Change {
|
||||
type: "add" | "update" | "delete"
|
||||
old_content?: string
|
||||
new_content?: string
|
||||
}
|
||||
|
||||
interface Commit {
|
||||
changes: Record<string, Change>
|
||||
}
|
||||
|
||||
interface PatchOperation {
|
||||
type: "update" | "add" | "delete"
|
||||
filePath: string
|
||||
hunks?: PatchHunk[]
|
||||
content?: string
|
||||
}
|
||||
|
||||
interface PatchHunk {
|
||||
contextLine: string
|
||||
changes: PatchChange[]
|
||||
}
|
||||
|
||||
interface PatchChange {
|
||||
type: "keep" | "remove" | "add"
|
||||
content: string
|
||||
}
|
||||
|
||||
function identifyFilesNeeded(patchText: string): string[] {
|
||||
const files: string[] = []
|
||||
const lines = patchText.split("\n")
|
||||
for (const line of lines) {
|
||||
if (line.startsWith("*** Update File:") || line.startsWith("*** Delete File:")) {
|
||||
const filePath = line.split(":", 2)[1]?.trim()
|
||||
if (filePath) files.push(filePath)
|
||||
}
|
||||
}
|
||||
return files
|
||||
}
|
||||
|
||||
function identifyFilesAdded(patchText: string): string[] {
|
||||
const files: string[] = []
|
||||
const lines = patchText.split("\n")
|
||||
for (const line of lines) {
|
||||
if (line.startsWith("*** Add File:")) {
|
||||
const filePath = line.split(":", 2)[1]?.trim()
|
||||
if (filePath) files.push(filePath)
|
||||
}
|
||||
}
|
||||
return files
|
||||
}
|
||||
|
||||
function textToPatch(patchText: string, _currentFiles: Record<string, string>): [PatchOperation[], number] {
|
||||
const operations: PatchOperation[] = []
|
||||
const lines = patchText.split("\n")
|
||||
let i = 0
|
||||
let fuzz = 0
|
||||
|
||||
while (i < lines.length) {
|
||||
const line = lines[i]
|
||||
|
||||
if (line.startsWith("*** Update File:")) {
|
||||
const filePath = line.split(":", 2)[1]?.trim()
|
||||
if (!filePath) {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
const hunks: PatchHunk[] = []
|
||||
i++
|
||||
|
||||
while (i < lines.length && !lines[i].startsWith("***")) {
|
||||
if (lines[i].startsWith("@@")) {
|
||||
const contextLine = lines[i].substring(2).trim()
|
||||
const changes: PatchChange[] = []
|
||||
i++
|
||||
|
||||
while (i < lines.length && !lines[i].startsWith("@@") && !lines[i].startsWith("***")) {
|
||||
const changeLine = lines[i]
|
||||
if (changeLine.startsWith(" ")) {
|
||||
changes.push({ type: "keep", content: changeLine.substring(1) })
|
||||
} else if (changeLine.startsWith("-")) {
|
||||
changes.push({
|
||||
type: "remove",
|
||||
content: changeLine.substring(1),
|
||||
})
|
||||
} else if (changeLine.startsWith("+")) {
|
||||
changes.push({ type: "add", content: changeLine.substring(1) })
|
||||
}
|
||||
i++
|
||||
}
|
||||
|
||||
hunks.push({ contextLine, changes })
|
||||
} else {
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
operations.push({ type: "update", filePath, hunks })
|
||||
} else if (line.startsWith("*** Add File:")) {
|
||||
const filePath = line.split(":", 2)[1]?.trim()
|
||||
if (!filePath) {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
let content = ""
|
||||
i++
|
||||
|
||||
while (i < lines.length && !lines[i].startsWith("***")) {
|
||||
if (lines[i].startsWith("+")) {
|
||||
content += lines[i].substring(1) + "\n"
|
||||
}
|
||||
i++
|
||||
}
|
||||
|
||||
operations.push({ type: "add", filePath, content: content.slice(0, -1) })
|
||||
} else if (line.startsWith("*** Delete File:")) {
|
||||
const filePath = line.split(":", 2)[1]?.trim()
|
||||
if (filePath) {
|
||||
operations.push({ type: "delete", filePath })
|
||||
}
|
||||
i++
|
||||
} else {
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
return [operations, fuzz]
|
||||
}
|
||||
|
||||
function patchToCommit(operations: PatchOperation[], currentFiles: Record<string, string>): Commit {
|
||||
const changes: Record<string, Change> = {}
|
||||
|
||||
for (const op of operations) {
|
||||
if (op.type === "delete") {
|
||||
changes[op.filePath] = {
|
||||
type: "delete",
|
||||
old_content: currentFiles[op.filePath] || "",
|
||||
}
|
||||
} else if (op.type === "add") {
|
||||
changes[op.filePath] = {
|
||||
type: "add",
|
||||
new_content: op.content || "",
|
||||
}
|
||||
} else if (op.type === "update" && op.hunks) {
|
||||
const originalContent = currentFiles[op.filePath] || ""
|
||||
const lines = originalContent.split("\n")
|
||||
|
||||
for (const hunk of op.hunks) {
|
||||
const contextIndex = lines.findIndex((line) => line.includes(hunk.contextLine))
|
||||
if (contextIndex === -1) {
|
||||
throw new Error(`Context line not found: ${hunk.contextLine}`)
|
||||
}
|
||||
|
||||
let currentIndex = contextIndex
|
||||
for (const change of hunk.changes) {
|
||||
if (change.type === "keep") {
|
||||
currentIndex++
|
||||
} else if (change.type === "remove") {
|
||||
lines.splice(currentIndex, 1)
|
||||
} else if (change.type === "add") {
|
||||
lines.splice(currentIndex, 0, change.content)
|
||||
currentIndex++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
changes[op.filePath] = {
|
||||
type: "update",
|
||||
old_content: originalContent,
|
||||
new_content: lines.join("\n"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { changes }
|
||||
}
|
||||
|
||||
function generateDiff(oldContent: string, newContent: string, filePath: string): [string, number, number] {
|
||||
// Mock implementation - would need actual diff generation
|
||||
const lines1 = oldContent.split("\n")
|
||||
const lines2 = newContent.split("\n")
|
||||
const additions = Math.max(0, lines2.length - lines1.length)
|
||||
const removals = Math.max(0, lines1.length - lines2.length)
|
||||
return [`--- ${filePath}\n+++ ${filePath}\n`, additions, removals]
|
||||
}
|
||||
|
||||
async function applyCommit(
|
||||
commit: Commit,
|
||||
writeFile: (path: string, content: string) => Promise<void>,
|
||||
deleteFile: (path: string) => Promise<void>,
|
||||
): Promise<void> {
|
||||
for (const [filePath, change] of Object.entries(commit.changes)) {
|
||||
if (change.type === "delete") {
|
||||
await deleteFile(filePath)
|
||||
} else if (change.new_content !== undefined) {
|
||||
await writeFile(filePath, change.new_content)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const PatchTool = Tool.define("patch", {
|
||||
description: DESCRIPTION,
|
||||
description: "Apply a patch to modify multiple files. Supports adding, updating, and deleting files with context-aware changes.",
|
||||
parameters: PatchParams,
|
||||
execute: async (params, ctx) => {
|
||||
// Identify all files needed for the patch and verify they've been read
|
||||
const filesToRead = identifyFilesNeeded(params.patchText)
|
||||
for (const filePath of filesToRead) {
|
||||
let absPath = filePath
|
||||
if (!path.isAbsolute(absPath)) {
|
||||
absPath = path.resolve(process.cwd(), absPath)
|
||||
async execute(params, ctx) {
|
||||
if (!params.patchText) {
|
||||
throw new Error("patchText is required")
|
||||
}
|
||||
|
||||
// Parse the patch to get hunks
|
||||
let hunks: Patch.Hunk[]
|
||||
try {
|
||||
const parseResult = Patch.parsePatch(params.patchText)
|
||||
hunks = parseResult.hunks
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to parse patch: ${error}`)
|
||||
}
|
||||
|
||||
if (hunks.length === 0) {
|
||||
throw new Error("No file changes found in patch")
|
||||
}
|
||||
|
||||
// Validate file paths and check permissions
|
||||
const agent = await Agent.get(ctx.agent)
|
||||
const fileChanges: Array<{
|
||||
filePath: string
|
||||
oldContent: string
|
||||
newContent: string
|
||||
type: "add" | "update" | "delete" | "move"
|
||||
movePath?: string
|
||||
}> = []
|
||||
|
||||
let totalDiff = ""
|
||||
|
||||
for (const hunk of hunks) {
|
||||
const filePath = path.resolve(Instance.directory, hunk.path)
|
||||
|
||||
if (!Filesystem.contains(Instance.directory, filePath)) {
|
||||
throw new Error(`File ${filePath} is not in the current working directory`)
|
||||
}
|
||||
|
||||
await FileTime.assert(ctx.sessionID, absPath)
|
||||
|
||||
try {
|
||||
const stats = await fs.stat(absPath)
|
||||
if (stats.isDirectory()) {
|
||||
throw new Error(`path is a directory, not a file: ${absPath}`)
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (error.code === "ENOENT") {
|
||||
throw new Error(`file not found: ${absPath}`)
|
||||
}
|
||||
throw new Error(`failed to access file: ${error.message}`)
|
||||
switch (hunk.type) {
|
||||
case "add":
|
||||
if (hunk.type === "add") {
|
||||
const oldContent = ""
|
||||
const newContent = hunk.contents
|
||||
const diff = createTwoFilesPatch(filePath, filePath, oldContent, newContent)
|
||||
|
||||
fileChanges.push({
|
||||
filePath,
|
||||
oldContent,
|
||||
newContent,
|
||||
type: "add",
|
||||
})
|
||||
|
||||
totalDiff += diff + "\n"
|
||||
}
|
||||
break
|
||||
|
||||
case "update":
|
||||
// Check if file exists for update
|
||||
const stats = await fs.stat(filePath).catch(() => null)
|
||||
if (!stats || stats.isDirectory()) {
|
||||
throw new Error(`File not found or is directory: ${filePath}`)
|
||||
}
|
||||
|
||||
// Read file and update time tracking (like edit tool does)
|
||||
await FileTime.assert(ctx.sessionID, filePath)
|
||||
const oldContent = await fs.readFile(filePath, "utf-8")
|
||||
let newContent = oldContent
|
||||
|
||||
// Apply the update chunks to get new content
|
||||
try {
|
||||
const fileUpdate = Patch.deriveNewContentsFromChunks(filePath, hunk.chunks)
|
||||
newContent = fileUpdate.content
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to apply update to ${filePath}: ${error}`)
|
||||
}
|
||||
|
||||
const diff = createTwoFilesPatch(filePath, filePath, oldContent, newContent)
|
||||
|
||||
fileChanges.push({
|
||||
filePath,
|
||||
oldContent,
|
||||
newContent,
|
||||
type: hunk.move_path ? "move" : "update",
|
||||
movePath: hunk.move_path ? path.resolve(Instance.directory, hunk.move_path) : undefined,
|
||||
})
|
||||
|
||||
totalDiff += diff + "\n"
|
||||
break
|
||||
|
||||
case "delete":
|
||||
// Check if file exists for deletion
|
||||
await FileTime.assert(ctx.sessionID, filePath)
|
||||
const contentToDelete = await fs.readFile(filePath, "utf-8")
|
||||
const deleteDiff = createTwoFilesPatch(filePath, filePath, contentToDelete, "")
|
||||
|
||||
fileChanges.push({
|
||||
filePath,
|
||||
oldContent: contentToDelete,
|
||||
newContent: "",
|
||||
type: "delete",
|
||||
})
|
||||
|
||||
totalDiff += deleteDiff + "\n"
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Check for new files to ensure they don't already exist
|
||||
const filesToAdd = identifyFilesAdded(params.patchText)
|
||||
for (const filePath of filesToAdd) {
|
||||
let absPath = filePath
|
||||
if (!path.isAbsolute(absPath)) {
|
||||
absPath = path.resolve(process.cwd(), absPath)
|
||||
}
|
||||
|
||||
try {
|
||||
await fs.stat(absPath)
|
||||
throw new Error(`file already exists and cannot be added: ${absPath}`)
|
||||
} catch (error: any) {
|
||||
if (error.code !== "ENOENT") {
|
||||
throw new Error(`failed to check file: ${error.message}`)
|
||||
}
|
||||
}
|
||||
// Check permissions if needed
|
||||
if (agent.permission.edit === "ask") {
|
||||
await Permission.ask({
|
||||
type: "edit",
|
||||
sessionID: ctx.sessionID,
|
||||
messageID: ctx.messageID,
|
||||
callID: ctx.callID,
|
||||
title: `Apply patch to ${fileChanges.length} files`,
|
||||
metadata: {
|
||||
diff: totalDiff,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// Load all required files
|
||||
const currentFiles: Record<string, string> = {}
|
||||
for (const filePath of filesToRead) {
|
||||
let absPath = filePath
|
||||
if (!path.isAbsolute(absPath)) {
|
||||
absPath = path.resolve(process.cwd(), absPath)
|
||||
}
|
||||
|
||||
try {
|
||||
const content = await fs.readFile(absPath, "utf-8")
|
||||
currentFiles[filePath] = content
|
||||
} catch (error: any) {
|
||||
throw new Error(`failed to read file ${absPath}: ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
// Process the patch
|
||||
const [patch, fuzz] = textToPatch(params.patchText, currentFiles)
|
||||
if (fuzz > 3) {
|
||||
throw new Error(`patch contains fuzzy matches (fuzz level: ${fuzz}). Please make your context lines more precise`)
|
||||
}
|
||||
|
||||
// Convert patch to commit
|
||||
const commit = patchToCommit(patch, currentFiles)
|
||||
|
||||
// Apply the changes to the filesystem
|
||||
await applyCommit(
|
||||
commit,
|
||||
async (filePath: string, content: string) => {
|
||||
let absPath = filePath
|
||||
if (!path.isAbsolute(absPath)) {
|
||||
absPath = path.resolve(process.cwd(), absPath)
|
||||
}
|
||||
|
||||
// Create parent directories if needed
|
||||
const dir = path.dirname(absPath)
|
||||
await fs.mkdir(dir, { recursive: true })
|
||||
await fs.writeFile(absPath, content, "utf-8")
|
||||
},
|
||||
async (filePath: string) => {
|
||||
let absPath = filePath
|
||||
if (!path.isAbsolute(absPath)) {
|
||||
absPath = path.resolve(process.cwd(), absPath)
|
||||
}
|
||||
await fs.unlink(absPath)
|
||||
},
|
||||
)
|
||||
|
||||
// Calculate statistics
|
||||
// Apply the changes
|
||||
const changedFiles: string[] = []
|
||||
let totalAdditions = 0
|
||||
let totalRemovals = 0
|
||||
|
||||
for (const [filePath, change] of Object.entries(commit.changes)) {
|
||||
let absPath = filePath
|
||||
if (!path.isAbsolute(absPath)) {
|
||||
absPath = path.resolve(process.cwd(), absPath)
|
||||
|
||||
for (const change of fileChanges) {
|
||||
switch (change.type) {
|
||||
case "add":
|
||||
// Create parent directories
|
||||
const addDir = path.dirname(change.filePath)
|
||||
if (addDir !== "." && addDir !== "/") {
|
||||
await fs.mkdir(addDir, { recursive: true })
|
||||
}
|
||||
await fs.writeFile(change.filePath, change.newContent, "utf-8")
|
||||
changedFiles.push(change.filePath)
|
||||
break
|
||||
|
||||
case "update":
|
||||
await fs.writeFile(change.filePath, change.newContent, "utf-8")
|
||||
changedFiles.push(change.filePath)
|
||||
break
|
||||
|
||||
case "move":
|
||||
if (change.movePath) {
|
||||
// Create parent directories for destination
|
||||
const moveDir = path.dirname(change.movePath)
|
||||
if (moveDir !== "." && moveDir !== "/") {
|
||||
await fs.mkdir(moveDir, { recursive: true })
|
||||
}
|
||||
// Write to new location
|
||||
await fs.writeFile(change.movePath, change.newContent, "utf-8")
|
||||
// Remove original
|
||||
await fs.unlink(change.filePath)
|
||||
changedFiles.push(change.movePath)
|
||||
}
|
||||
break
|
||||
|
||||
case "delete":
|
||||
await fs.unlink(change.filePath)
|
||||
changedFiles.push(change.filePath)
|
||||
break
|
||||
}
|
||||
|
||||
// Update file time tracking
|
||||
FileTime.read(ctx.sessionID, change.filePath)
|
||||
if (change.movePath) {
|
||||
FileTime.read(ctx.sessionID, change.movePath)
|
||||
}
|
||||
changedFiles.push(absPath)
|
||||
|
||||
const oldContent = change.old_content || ""
|
||||
const newContent = change.new_content || ""
|
||||
|
||||
// Calculate diff statistics
|
||||
const [, additions, removals] = generateDiff(oldContent, newContent, filePath)
|
||||
totalAdditions += additions
|
||||
totalRemovals += removals
|
||||
|
||||
FileTime.read(ctx.sessionID, absPath)
|
||||
}
|
||||
|
||||
const result = `Patch applied successfully. ${changedFiles.length} files changed, ${totalAdditions} additions, ${totalRemovals} removals`
|
||||
const output = result
|
||||
// Publish file change events
|
||||
for (const filePath of changedFiles) {
|
||||
await Bus.publish(FileWatcher.Event.Updated, { file: filePath, event: "change" })
|
||||
}
|
||||
|
||||
// Generate output summary
|
||||
const relativePaths = changedFiles.map(filePath => path.relative(Instance.worktree, filePath))
|
||||
const summary = `${fileChanges.length} files changed`
|
||||
|
||||
return {
|
||||
title: `${filesToRead.length} files`,
|
||||
title: summary,
|
||||
metadata: {
|
||||
changed: changedFiles,
|
||||
additions: totalAdditions,
|
||||
removals: totalRemovals,
|
||||
diff: totalDiff,
|
||||
},
|
||||
output,
|
||||
output: `Patch applied successfully. ${summary}:\n${relativePaths.map(p => ` ${p}`).join("\n")}`,
|
||||
}
|
||||
},
|
||||
})
|
||||
})
|
||||
Reference in New Issue
Block a user