Browse Source

feat(hooks): add apply_patch stale-patch rescue (#268)

* feat(hooks): add apply_patch stale-patch rescue

* fix(hooks): harden apply_patch EOF and validation flow

* fix(hooks): reject absolute patch paths
Raxxoor 1 day ago
parent
commit
71a99193af

File diff suppressed because it is too large
+ 2 - 0
README.md


+ 1 - 0
codemap.md

@@ -118,6 +118,7 @@ Return plugin object with:
 
 
 6. **Hooks** (`src/hooks/`)
 6. **Hooks** (`src/hooks/`)
    - Auto-update checking
    - Auto-update checking
+   - `apply_patch` stale-patch rescue with strict parsing, bounded LCS fallback, stateful same-path helper updates, and safe canonical rewrites only inside root/worktree
    - Phase reminders for workflow compliance
    - Phase reminders for workflow compliance
    - Post-read nudges for delegation
    - Post-read nudges for delegation
 
 

File diff suppressed because it is too large
+ 4 - 0
docs/tools.md


+ 159 - 0
src/hooks/apply-patch/codec.test.ts

@@ -0,0 +1,159 @@
+import { describe, expect, test } from 'bun:test';
+
+import {
+  formatPatch,
+  normalizeUnicode,
+  parsePatch,
+  parsePatchStrict,
+  stripHeredoc,
+} from './codec';
+import type { ParsedPatch } from './types';
+
+describe('apply-patch/codec', () => {
+  test('stripHeredoc extrae el contenido real del patch', () => {
+    expect(
+      stripHeredoc(`cat <<'PATCH'
+*** Begin Patch
+*** End Patch
+PATCH`),
+    ).toBe('*** Begin Patch\n*** End Patch');
+  });
+
+  test('parsePatch reconoce add delete update y move', () => {
+    const parsed = parsePatch(`*** Begin Patch
+*** Add File: added.txt
++alpha
+*** Delete File: removed.txt
+*** Update File: before.txt
+*** Move to: after.txt
+@@ ctx
+ line-a
+-line-b
++line-c
+*** End of File
+*** End Patch`);
+
+    expect(parsed.hunks).toHaveLength(3);
+    expect(parsed.hunks[0]).toEqual({
+      type: 'add',
+      path: 'added.txt',
+      contents: 'alpha',
+    });
+    expect(parsed.hunks[1]).toEqual({ type: 'delete', path: 'removed.txt' });
+    expect(parsed.hunks[2]).toEqual({
+      type: 'update',
+      path: 'before.txt',
+      move_path: 'after.txt',
+      chunks: [
+        {
+          old_lines: ['line-a', 'line-b'],
+          new_lines: ['line-a', 'line-c'],
+          change_context: 'ctx',
+          is_end_of_file: true,
+        },
+      ],
+    });
+  });
+
+  test('parsePatch tolera heredoc con CRLF agresivo y conserva EOF', () => {
+    const parsed = parsePatch(`cat <<'PATCH'\r
+*** Begin Patch\r
+*** Update File: sample.txt\r
+@@\r
+-alpha\r
++beta\r
+*** End of File\r
+*** End Patch\r
+PATCH`);
+
+    expect(parsed.hunks).toEqual([
+      {
+        type: 'update',
+        path: 'sample.txt',
+        chunks: [
+          {
+            old_lines: ['alpha'],
+            new_lines: ['beta'],
+            change_context: undefined,
+            is_end_of_file: true,
+          },
+        ],
+      },
+    ]);
+  });
+
+  test('parsePatchStrict falla con basura dentro de @@', () => {
+    expect(() =>
+      parsePatchStrict(`*** Begin Patch
+*** Update File: sample.txt
+@@
+-alpha
+garbage
++beta
+*** End Patch`),
+    ).toThrow('unexpected line in patch chunk');
+  });
+
+  test('parsePatchStrict falla con basura dentro de Add File', () => {
+    expect(() =>
+      parsePatchStrict(`*** Begin Patch
+*** Add File: sample.txt
++alpha
+garbage
+*** End Patch`),
+    ).toThrow('unexpected line in Add File body');
+  });
+
+  test('parsePatchStrict falla con Delete File mal formado', () => {
+    expect(() =>
+      parsePatchStrict(`*** Begin Patch
+*** Delete File: sample.txt
++ghost
+*** End Patch`),
+    ).toThrow('unexpected line between hunks');
+  });
+
+  test('parsePatchStrict falla con basura después de End Patch', () => {
+    expect(() =>
+      parsePatchStrict(`*** Begin Patch
+*** Delete File: sample.txt
+*** End Patch
+garbage`),
+    ).toThrow('unexpected line after End Patch');
+  });
+
+  test('parsePatchStrict falla si Update File no trae chunks @@', () => {
+    expect(() =>
+      parsePatchStrict(`*** Begin Patch
+*** Update File: sample.txt
+*** End Patch`),
+    ).toThrow('missing @@ chunk body');
+  });
+
+  test('formatPatch permite roundtrip estable parse -> format -> parse', () => {
+    const parsed: ParsedPatch = {
+      hunks: [
+        {
+          type: 'update',
+          path: 'sample.txt',
+          chunks: [
+            {
+              old_lines: ['alpha', 'beta'],
+              new_lines: ['alpha', 'BETA'],
+            },
+          ],
+        },
+      ],
+    };
+
+    expect(parsePatch(formatPatch(parsed))).toEqual(parsed);
+  });
+
+  test('normalizeUnicode unifica variantes tipográficas esperadas', () => {
+    expect(normalizeUnicode('“uno”…\u00A0dos—tres')).toBe('"uno"... dos-tres');
+  });
+
+  test('normalizeUnicode cubre variantes tipográficas menos comunes', () => {
+    expect(normalizeUnicode('‛uno‟―dos')).toBe(`'uno"-dos`);
+  });
+});

+ 339 - 0
src/hooks/apply-patch/codec.ts

@@ -0,0 +1,339 @@
+import type { ParsedPatch, PatchChunk, PatchHunk } from './types';
+
+type ParseMode = 'permissive' | 'strict';
+
+function normalizeLineEndings(text: string): string {
+  return text.replace(/\r\n/g, '\n').replace(/\r/g, '\n');
+}
+
+export function normalizeUnicode(text: string): string {
+  return text
+    .replace(/[\u2018\u2019\u201A\u201B]/g, "'")
+    .replace(/[\u201C\u201D\u201E\u201F]/g, '"')
+    .replace(/[\u2010\u2011\u2012\u2013\u2014\u2015]/g, '-')
+    .replace(/\u2026/g, '...')
+    .replace(/\u00A0/g, ' ');
+}
+
+export function stripHeredoc(input: string): string {
+  const normalized = normalizeLineEndings(input);
+  const match = normalized.match(
+    /^(?:cat\s+)?<<['"]?(\w+)['"]?\s*\n([\s\S]*?)\n\1\s*$/,
+  );
+  return match ? match[2] : normalized;
+}
+
+export function normalizePatchText(patchText: string): string {
+  return stripHeredoc(normalizeLineEndings(patchText).trim());
+}
+
+function parseHeader(lines: string[], index: number) {
+  const line = lines[index];
+
+  if (line.startsWith('*** Add File:')) {
+    const file = line.slice('*** Add File:'.length).trim();
+    return file ? { file, next: index + 1 } : null;
+  }
+
+  if (line.startsWith('*** Delete File:')) {
+    const file = line.slice('*** Delete File:'.length).trim();
+    return file ? { file, next: index + 1 } : null;
+  }
+
+  if (line.startsWith('*** Update File:')) {
+    const file = line.slice('*** Update File:'.length).trim();
+    let move: string | undefined;
+    let next = index + 1;
+
+    if (next < lines.length && lines[next].startsWith('*** Move to:')) {
+      const moveTarget = lines[next].slice('*** Move to:'.length).trim();
+      if (!moveTarget) {
+        return null;
+      }
+
+      move = moveTarget;
+      next += 1;
+    }
+
+    return file ? { file, move, next } : null;
+  }
+
+  return null;
+}
+
+function unexpectedPatchLine(context: string, line: string): never {
+  const rendered = line.length === 0 ? '<empty>' : line;
+  throw new Error(
+    `Invalid patch format: unexpected line ${context}: ${rendered}`,
+  );
+}
+
+function parseChunks(lines: string[], index: number, mode: ParseMode) {
+  const chunks: PatchChunk[] = [];
+  let at = index;
+
+  while (at < lines.length && !lines[at].startsWith('***')) {
+    if (!lines[at].startsWith('@@')) {
+      if (mode === 'strict') {
+        unexpectedPatchLine('in update body', lines[at]);
+      }
+      at += 1;
+      continue;
+    }
+
+    const context = lines[at].slice(2).trim() || undefined;
+    at += 1;
+
+    const old_lines: string[] = [];
+    const new_lines: string[] = [];
+    let eof = false;
+
+    while (
+      at < lines.length &&
+      !lines[at].startsWith('@@') &&
+      (!lines[at].startsWith('***') || lines[at] === '*** End of File')
+    ) {
+      const line = lines[at];
+
+      if (line === '*** End of File') {
+        eof = true;
+        at += 1;
+        break;
+      }
+
+      if (line.startsWith(' ')) {
+        old_lines.push(line.slice(1));
+        new_lines.push(line.slice(1));
+        at += 1;
+        continue;
+      }
+
+      if (line.startsWith('-')) {
+        old_lines.push(line.slice(1));
+        at += 1;
+        continue;
+      }
+
+      if (line.startsWith('+')) {
+        new_lines.push(line.slice(1));
+        at += 1;
+        continue;
+      }
+
+      if (mode === 'strict') {
+        unexpectedPatchLine('in patch chunk', line);
+      }
+
+      at += 1;
+    }
+
+    chunks.push({
+      old_lines,
+      new_lines,
+      change_context: context,
+      is_end_of_file: eof || undefined,
+    });
+  }
+
+  return { chunks, next: at };
+}
+
+function parseAdd(lines: string[], index: number, mode: ParseMode) {
+  let contents = '';
+  let at = index;
+
+  while (at < lines.length && !lines[at].startsWith('***')) {
+    if (lines[at].startsWith('+')) {
+      contents += `${lines[at].slice(1)}\n`;
+      at += 1;
+      continue;
+    }
+
+    if (mode === 'strict') {
+      unexpectedPatchLine('in Add File body', lines[at]);
+    }
+
+    at += 1;
+  }
+
+  if (contents.endsWith('\n')) {
+    contents = contents.slice(0, -1);
+  }
+
+  return { content: contents, next: at };
+}
+
+function parsePatchInternal(patchText: string, mode: ParseMode): ParsedPatch {
+  const clean = normalizePatchText(patchText);
+  const lines = clean.split('\n');
+  const begin = lines.findIndex((line) => line.trim() === '*** Begin Patch');
+  const end = lines.findIndex((line) => line.trim() === '*** End Patch');
+
+  if (begin === -1 || end === -1 || begin >= end) {
+    throw new Error('Invalid patch format: missing Begin/End markers');
+  }
+
+  if (mode === 'strict') {
+    for (const line of lines.slice(0, begin)) {
+      unexpectedPatchLine('before Begin Patch', line);
+    }
+
+    for (const line of lines.slice(end + 1)) {
+      unexpectedPatchLine('after End Patch', line);
+    }
+  }
+
+  const hunks: PatchHunk[] = [];
+  let index = begin + 1;
+
+  while (index < end) {
+    const header = parseHeader(lines, index);
+
+    if (!header) {
+      if (mode === 'strict') {
+        unexpectedPatchLine('between hunks', lines[index]);
+      }
+      index += 1;
+      continue;
+    }
+
+    if (lines[index].startsWith('*** Add File:')) {
+      const next = parseAdd(lines, header.next, mode);
+      hunks.push({
+        type: 'add',
+        path: header.file,
+        contents: next.content,
+      });
+      index = next.next;
+      continue;
+    }
+
+    if (lines[index].startsWith('*** Delete File:')) {
+      hunks.push({ type: 'delete', path: header.file });
+      index = header.next;
+      continue;
+    }
+
+    const next = parseChunks(lines, header.next, mode);
+    if (mode === 'strict' && next.chunks.length === 0) {
+      throw new Error(
+        `Invalid patch format: Update File is missing @@ chunk body: ${header.file}`,
+      );
+    }
+
+    hunks.push({
+      type: 'update',
+      path: header.file,
+      move_path: header.move,
+      chunks: next.chunks,
+    });
+    index = next.next;
+  }
+
+  return { hunks };
+}
+
+export function parsePatch(patchText: string): ParsedPatch {
+  return parsePatchInternal(patchText, 'permissive');
+}
+
+export function parsePatchStrict(patchText: string): ParsedPatch {
+  return parsePatchInternal(patchText, 'strict');
+}
+
+function diffMatrix(old_lines: string[], new_lines: string[]): number[][] {
+  const dp = Array.from({ length: old_lines.length + 1 }, () =>
+    Array<number>(new_lines.length + 1).fill(0),
+  );
+
+  for (let oldIndex = 1; oldIndex <= old_lines.length; oldIndex += 1) {
+    for (let newIndex = 1; newIndex <= new_lines.length; newIndex += 1) {
+      dp[oldIndex][newIndex] =
+        old_lines[oldIndex - 1] === new_lines[newIndex - 1]
+          ? dp[oldIndex - 1][newIndex - 1] + 1
+          : Math.max(dp[oldIndex - 1][newIndex], dp[oldIndex][newIndex - 1]);
+    }
+  }
+
+  return dp;
+}
+
+function renderChunk(chunk: PatchChunk): string[] {
+  const lines = [chunk.change_context ? `@@ ${chunk.change_context}` : '@@'];
+  const dp = diffMatrix(chunk.old_lines, chunk.new_lines);
+  const body: string[] = [];
+  let oldIndex = chunk.old_lines.length;
+  let newIndex = chunk.new_lines.length;
+
+  while (oldIndex > 0 && newIndex > 0) {
+    if (chunk.old_lines[oldIndex - 1] === chunk.new_lines[newIndex - 1]) {
+      body.push(` ${chunk.old_lines[oldIndex - 1]}`);
+      oldIndex -= 1;
+      newIndex -= 1;
+      continue;
+    }
+
+    if (dp[oldIndex - 1][newIndex] >= dp[oldIndex][newIndex - 1]) {
+      body.push(`-${chunk.old_lines[oldIndex - 1]}`);
+      oldIndex -= 1;
+      continue;
+    }
+
+    body.push(`+${chunk.new_lines[newIndex - 1]}`);
+    newIndex -= 1;
+  }
+
+  while (oldIndex > 0) {
+    body.push(`-${chunk.old_lines[oldIndex - 1]}`);
+    oldIndex -= 1;
+  }
+
+  while (newIndex > 0) {
+    body.push(`+${chunk.new_lines[newIndex - 1]}`);
+    newIndex -= 1;
+  }
+
+  lines.push(...body.reverse());
+
+  if (chunk.is_end_of_file) {
+    lines.push('*** End of File');
+  }
+
+  return lines;
+}
+
+function renderAddContents(contents: string): string[] {
+  if (contents.length === 0) {
+    return [];
+  }
+
+  return contents.split('\n').map((line) => `+${line}`);
+}
+
+export function formatPatch(patch: ParsedPatch): string {
+  const lines = ['*** Begin Patch'];
+
+  for (const hunk of patch.hunks) {
+    if (hunk.type === 'add') {
+      lines.push(`*** Add File: ${hunk.path}`);
+      lines.push(...renderAddContents(hunk.contents));
+      continue;
+    }
+
+    if (hunk.type === 'delete') {
+      lines.push(`*** Delete File: ${hunk.path}`);
+      continue;
+    }
+
+    lines.push(`*** Update File: ${hunk.path}`);
+    if (hunk.move_path) {
+      lines.push(`*** Move to: ${hunk.move_path}`);
+    }
+    for (const chunk of hunk.chunks) {
+      lines.push(...renderChunk(chunk));
+    }
+  }
+
+  lines.push('*** End Patch');
+  return lines.join('\n');
+}

+ 82 - 0
src/hooks/apply-patch/codemap.md

@@ -0,0 +1,82 @@
+# apply-patch codemap
+
+## Hook responsibility
+
+`src/hooks/apply-patch/` intercepts only the `apply_patch` tool before OpenCode executes it and, when it detects a stale but recoverable patch, rewrites only the canonical old lines so the native runtime can apply it without inventing new changes. If any patch path falls outside `root`/`worktree`, the hook blocks `apply_patch` before native execution.
+
+## Flow
+
+1. `index.ts` keeps the hook always active and calls `rewritePatchText(...)`.
+2. `operations.ts` remains a thin internal barrel over the concrete modules.
+3. `codec.ts` parses the `*** Begin Patch` / `*** End Patch` format and can serialize it again.
+4. `resolution.ts` resolves each chunk against the real file and canonicalizes safe tolerant matches.
+5. `matching.ts` implements exact matching and tolerant rescue paths (unicode, trim, prefix/suffix, and a bounded conservative LCS).
+6. `patch.ts` remains a compatibility shim; internal code imports the concrete modules.
+
+## Modules
+
+### `types.ts`
+- Shared patch-domain types.
+- Separates public contracts (`PatchChunk`, `PreparedChange`, etc.) from the logic.
+
+### `codec.ts`
+- `normalizeUnicode()` and `stripHeredoc()`.
+- `parsePatch()` converts text into hunks, and `parsePatchStrict()` provides the strict validation used by rewrite/prepare.
+- `formatPatch()` rebuilds the patch by reusing the lines from `new_lines` byte-for-byte; if an insertion is re-anchored, it also adds the required intact anchor line.
+
+### `matching.ts`
+- Exact/unicode/trim comparators.
+- `seek()` / `seekMatch()` and `list()` search for sequences.
+- `prefix()` / `suffix()` provide edge-based rescue.
+- `rescueByPrefixSuffix()` and `rescueByLcs()` recover stale chunks deterministically.
+
+### `resolution.ts`
+- `readFileLines()` reads the file as logical lines; `deriveNewContent()` rebuilds it while preserving the detected physical EOL (`LF` or `CRLF`).
+- `resolveChunkStart()` uses `change_context` as the initial anchor.
+- `locateChunk()` chooses between exact match, canonicalized tolerant match, edge rescue, or LCS rescue.
+- `resolveUpdateChunks()` detects overlaps, prepares ordered hits, and handles the special anchored-insertion path for chunks without `old_lines`, including safe EOF canonicalization when the anchor only resolved through tolerant matching.
+- `deriveNewContent()` / `deriveNewContentFromText()` and `applyHits()` produce the final content while preserving `LF`/`CRLF` and the physical final-newline state in updates.
+
+### `execution-context.ts`
+- `parseValidatedPatch()` centralizes upfront validation.
+- Owns the path guard, realpath/stat caches, and the staged file-state machine.
+- `createPatchExecutionContext()` is the shared entry point for rewrite/prepare.
+
+### `rewrite.ts`
+- `rewritePatchText()` rewrites update chunks when rescue or safe canonicalization happened.
+- It performs a global pre-scan of `add`/`delete`/`update`/`move` before rewriting anything.
+- `rewritePatch()` validates `Delete File` with the same staged state machine as `preparePatchChanges()`; this makes it fail if the file no longer exists in the prepared context (real missing file, double delete, or delete after a previous move/delete) before delegating to the native runtime.
+- `rewritePatch()` also detects when an `Update File` is no longer self-contained because it consumes staged state from an earlier hunk (for example `add -> update`, `move -> update`, or `update -> update`) and collapses that chain into a canonical form that is safe for the native runtime.
+- It keeps the merge/minimize/collapse helpers used for dependent update groups.
+
+### `prepared-changes.ts`
+- `preparePatchChanges()` converts hunks into filesystem changes while accumulating state per path to support multiple sequential `Update File` hunks on the same file.
+- `applyPreparedChanges()` is documented as an internal best-effort rollback helper that consumes the output of `preparePatchChanges()`, not as a universal transactional engine; it also revalidates the basic shape of the legacy array (types/text/normalized absolute paths) and filesystem invariants before touching disk.
+
+### `operations.ts`
+- Thin internal facade/barrel that preserves existing imports for the hook, tests, and compatibility shim.
+
+### `patch.ts`
+- Thin facade/barrel.
+- Re-exports only the stable public API used by the hook and the tests.
+
+## Invariants
+
+- The hook remains always active and has no public config.
+- Content provided through `new_lines` is neither normalized nor rewritten; it is only reused byte-for-byte, except for the intact anchor line that may be added for re-anchored insertions.
+- Updates preserve the detected physical `EOL` and whether the original file ended with a newline.
+- If a path falls outside `root`/`worktree`, the hook blocks `apply_patch` before native execution.
+- No new limits, flags, or runtime settings are introduced.
+- The scope of this rescue remains limited to `apply_patch`; it does not rewrite `edit` or `write`.
+- Errors remain descriptive and keep the `apply_patch verification failed` prefix where it already existed.
+- Normal rewriting remains limited to `update` chunks; only dependent chains between hunks may collapse a previous `add` into the equivalent final state to make the patch self-contained again.
+- If an exact resolution depends on the staged result of earlier hunks, the patch is no longer considered "intact" and is collapsed into a self-contained form before handoff to native.
+- `Delete File` shares the same staged semantics in both rewrite and prepare; an already invalid delete must not reach native.
+- Resolved chunks cannot overlap.
+
+## Quick maintenance guide
+
+- Parsing or rendering issue for patches? → `codec.ts`
+- Issue locating stale lines? → `matching.ts` and `resolution.ts`
+- Issue writing files or moving paths? → `prepared-changes.ts`
+- Need to know what the real hook consumes? → `index.ts` and `patch.ts`

+ 117 - 0
src/hooks/apply-patch/errors.ts

@@ -0,0 +1,117 @@
+import type { ApplyPatchErrorCode, ApplyPatchErrorKind } from './types';
+
+const APPLY_PATCH_ERROR_PREFIX: Record<ApplyPatchErrorKind, string> = {
+  blocked: 'apply_patch blocked',
+  validation: 'apply_patch validation failed',
+  verification: 'apply_patch verification failed',
+  internal: 'apply_patch internal error',
+};
+
+export class ApplyPatchError extends Error {
+  override readonly cause?: unknown;
+
+  constructor(
+    readonly kind: ApplyPatchErrorKind,
+    readonly code: ApplyPatchErrorCode,
+    message: string,
+    options?: {
+      cause?: unknown;
+    },
+  ) {
+    super(`${APPLY_PATCH_ERROR_PREFIX[kind]}: ${message}`);
+    this.name = 'ApplyPatchError';
+    this.cause = options?.cause;
+  }
+}
+
+export function getErrorMessage(error: unknown): string {
+  return error instanceof Error ? error.message : String(error);
+}
+
+export function createApplyPatchBlockedError(
+  message: string,
+  cause?: unknown,
+): ApplyPatchError {
+  return new ApplyPatchError('blocked', 'outside_workspace', message, {
+    cause,
+  });
+}
+
+export function createApplyPatchValidationError(
+  message: string,
+  cause?: unknown,
+): ApplyPatchError {
+  return new ApplyPatchError('validation', 'malformed_patch', message, {
+    cause,
+  });
+}
+
+export function createApplyPatchVerificationError(
+  message: string,
+  cause?: unknown,
+): ApplyPatchError {
+  return new ApplyPatchError('verification', 'verification_failed', message, {
+    cause,
+  });
+}
+
+export function createApplyPatchInternalError(
+  message: string,
+  cause?: unknown,
+): ApplyPatchError {
+  return new ApplyPatchError('internal', 'internal_unexpected', message, {
+    cause,
+  });
+}
+
+export function isApplyPatchError(error: unknown): error is ApplyPatchError {
+  return error instanceof ApplyPatchError;
+}
+
+export function isApplyPatchBlockedError(error: unknown): boolean {
+  return isApplyPatchError(error) && error.kind === 'blocked';
+}
+
+export function isApplyPatchValidationError(error: unknown): boolean {
+  return isApplyPatchError(error) && error.kind === 'validation';
+}
+
+export function isApplyPatchVerificationError(error: unknown): boolean {
+  return isApplyPatchError(error) && error.kind === 'verification';
+}
+
+export function isApplyPatchInternalError(error: unknown): boolean {
+  return isApplyPatchError(error) && error.kind === 'internal';
+}
+
+export function getApplyPatchErrorDetails(error: unknown):
+  | {
+      kind: ApplyPatchErrorKind;
+      code: ApplyPatchErrorCode;
+      message: string;
+    }
+  | undefined {
+  if (!isApplyPatchError(error)) {
+    return undefined;
+  }
+
+  return {
+    kind: error.kind,
+    code: error.code,
+    message: error.message,
+  };
+}
+
+export function ensureApplyPatchError(
+  error: unknown,
+  context: string,
+): ApplyPatchError {
+  if (isApplyPatchError(error)) {
+    return error;
+  }
+
+  return createApplyPatchInternalError(
+    `${context}: ${getErrorMessage(error)}`,
+    error,
+  );
+}

+ 389 - 0
src/hooks/apply-patch/execution-context.ts

@@ -0,0 +1,389 @@
+import type { Stats } from 'node:fs';
+import * as fs from 'node:fs/promises';
+import path from 'node:path';
+
+import { parsePatchStrict } from './codec';
+import {
+  createApplyPatchBlockedError,
+  createApplyPatchInternalError,
+  createApplyPatchValidationError,
+  createApplyPatchVerificationError,
+  getErrorMessage,
+} from './errors';
+import { applyHits, resolveUpdateChunksFromText } from './resolution';
+import type {
+  ApplyPatchRuntimeOptions,
+  PatchHunk,
+  UpdatePatchHunk,
+} from './types';
+
+type PathGuardContext = {
+  rootReal: Promise<string>;
+  worktreeReal?: Promise<string>;
+  realCache: Map<string, Promise<string>>;
+};
+
+type FileCacheContext = {
+  stats: Map<string, Promise<Stats | null>>;
+};
+
+export type PreparedFileState =
+  | {
+      exists: false;
+      derived: boolean;
+    }
+  | {
+      exists: true;
+      text: string;
+      mode?: number;
+      derived: boolean;
+    };
+
+export type PatchExecutionContext = {
+  hunks: PatchHunk[];
+  staged: Map<string, PreparedFileState>;
+  getPreparedFileState: (
+    filePath: string,
+    verb: 'update' | 'delete',
+  ) => Promise<PreparedFileState>;
+  assertPreparedPathMissing: (
+    filePath: string,
+    verb: 'add' | 'move',
+  ) => Promise<void>;
+};
+
+export type ResolvedPreparedUpdate = {
+  resolved: Awaited<ReturnType<typeof resolveUpdateChunksFromText>>['resolved'];
+  nextText: string;
+};
+
+export function isMissingPathError(error: unknown): boolean {
+  return (
+    !!error &&
+    typeof error === 'object' &&
+    'code' in error &&
+    (error.code === 'ENOENT' || error.code === 'ENOTDIR')
+  );
+}
+
+async function real(target: string): Promise<string> {
+  const parts: string[] = [];
+  let current = path.resolve(target);
+
+  while (true) {
+    const exact = await fs.realpath(current).catch((error: unknown) => {
+      if (isMissingPathError(error)) {
+        return null;
+      }
+
+      throw createApplyPatchInternalError(
+        `Failed to resolve real path: ${current}`,
+        error,
+      );
+    });
+    if (exact) {
+      return parts.length === 0 ? exact : path.join(exact, ...parts.reverse());
+    }
+
+    const parent = path.dirname(current);
+    if (parent === current) {
+      return parts.length === 0
+        ? current
+        : path.join(current, ...parts.reverse());
+    }
+
+    parts.push(path.basename(current));
+    current = parent;
+  }
+}
+
+function inside(root: string, target: string): boolean {
+  const rel = path.relative(root, target);
+  return rel === '' || (!rel.startsWith('..') && !path.isAbsolute(rel));
+}
+
+function createPathGuardContext(
+  root: string,
+  worktree: string | undefined,
+): PathGuardContext {
+  return {
+    rootReal: real(root),
+    worktreeReal: worktree && worktree !== '/' ? real(worktree) : undefined,
+    realCache: new Map(),
+  };
+}
+
+async function realCached(
+  ctx: PathGuardContext,
+  target: string,
+): Promise<string> {
+  const resolvedTarget = path.resolve(target);
+  let pending = ctx.realCache.get(resolvedTarget);
+  if (!pending) {
+    pending = real(resolvedTarget);
+    ctx.realCache.set(resolvedTarget, pending);
+  }
+
+  return await pending;
+}
+
+async function guard(ctx: PathGuardContext, target: string): Promise<void> {
+  const [targetReal, rootReal] = await Promise.all([
+    realCached(ctx, target),
+    ctx.rootReal,
+  ]);
+  if (inside(rootReal, targetReal)) {
+    return;
+  }
+
+  if (!ctx.worktreeReal) {
+    throw createApplyPatchBlockedError(
+      `patch contains path outside workspace root: ${target}`,
+    );
+  }
+
+  const treeReal = await ctx.worktreeReal;
+  if (inside(treeReal, targetReal)) {
+    return;
+  }
+
+  throw createApplyPatchBlockedError(
+    `patch contains path outside workspace root: ${target}`,
+  );
+}
+
+function createFileCacheContext(): FileCacheContext {
+  return { stats: new Map() };
+}
+
+async function statCached(
+  ctx: FileCacheContext,
+  filePath: string,
+): Promise<Stats | null> {
+  let pending = ctx.stats.get(filePath);
+  if (!pending) {
+    const nextPending = fs.stat(filePath).catch((error: unknown) => {
+      if (isMissingPathError(error)) {
+        return null;
+      }
+
+      throw createApplyPatchInternalError(
+        `Failed to stat file for patch verification: ${filePath}`,
+        error,
+      );
+    });
+    ctx.stats.set(filePath, nextPending);
+    pending = nextPending;
+  }
+
+  return await pending;
+}
+
+async function assertRegularFile(
+  ctx: FileCacheContext,
+  filePath: string,
+  verb: 'update' | 'delete',
+): Promise<void> {
+  const stat = await statCached(ctx, filePath);
+  if (!stat || stat.isDirectory()) {
+    throw createApplyPatchVerificationError(
+      `Failed to read file to ${verb}: ${filePath}`,
+    );
+  }
+}
+
+function collectPatchTargets(root: string, hunks: PatchHunk[]): string[] {
+  const targets = new Set<string>();
+
+  for (const hunk of hunks) {
+    targets.add(path.resolve(root, hunk.path));
+
+    if (hunk.type === 'update' && hunk.move_path) {
+      targets.add(path.resolve(root, hunk.move_path));
+    }
+  }
+
+  return [...targets];
+}
+
+function validatePatchPaths(hunks: PatchHunk[]): void {
+  for (const hunk of hunks) {
+    if (path.isAbsolute(hunk.path)) {
+      throw createApplyPatchValidationError(
+        `absolute patch paths are not allowed: ${hunk.path}`,
+      );
+    }
+
+    if (
+      hunk.type === 'update' &&
+      hunk.move_path &&
+      path.isAbsolute(hunk.move_path)
+    ) {
+      throw createApplyPatchValidationError(
+        `absolute patch paths are not allowed: ${hunk.move_path}`,
+      );
+    }
+  }
+}
+
+async function guardPatchTargets(
+  root: string,
+  worktree: string | undefined,
+  targets: string[],
+): Promise<number> {
+  const guardContext = createPathGuardContext(root, worktree);
+
+  for (const target of targets) {
+    await guard(guardContext, target);
+  }
+
+  return targets.length;
+}
+
+export function parseValidatedPatch(patchText: string): PatchHunk[] {
+  let hunks: PatchHunk[];
+
+  try {
+    hunks = parsePatchStrict(patchText).hunks;
+  } catch (error) {
+    throw createApplyPatchValidationError(getErrorMessage(error));
+  }
+
+  if (hunks.length === 0) {
+    const clean = patchText.replace(/\r\n/g, '\n').replace(/\r/g, '\n').trim();
+    if (clean === '*** Begin Patch\n*** End Patch') {
+      throw createApplyPatchValidationError('empty patch');
+    }
+
+    throw createApplyPatchValidationError('no hunks found');
+  }
+
+  validatePatchPaths(hunks);
+
+  return hunks;
+}
+
+async function readPreparedFileText(
+  filePath: string,
+  verb: 'update' | 'delete',
+): Promise<string> {
+  try {
+    return await fs.readFile(filePath, 'utf-8');
+  } catch (error) {
+    if (isMissingPathError(error)) {
+      throw createApplyPatchVerificationError(
+        `Failed to read file to ${verb}: ${filePath}`,
+      );
+    }
+
+    throw createApplyPatchInternalError(
+      `Failed to read file for patch verification: ${filePath}`,
+      error,
+    );
+  }
+}
+
+export async function createPatchExecutionContext(
+  root: string,
+  patchText: string,
+  worktree?: string,
+): Promise<PatchExecutionContext> {
+  const hunks = parseValidatedPatch(patchText);
+  await guardPatchTargets(root, worktree, collectPatchTargets(root, hunks));
+  const files = createFileCacheContext();
+  const staged = new Map<string, PreparedFileState>();
+
+  async function assertPreparedPathMissing(
+    filePath: string,
+    verb: 'add' | 'move',
+  ): Promise<void> {
+    const existing = staged.get(filePath);
+    if (existing) {
+      if (!existing.exists) {
+        return;
+      }
+
+      throw createApplyPatchVerificationError(
+        verb === 'add'
+          ? `Add File target already exists: ${filePath}`
+          : `Move destination already exists: ${filePath}`,
+      );
+    }
+
+    const stat = await statCached(files, filePath);
+    if (!stat) {
+      return;
+    }
+
+    throw createApplyPatchVerificationError(
+      verb === 'add'
+        ? `Add File target already exists: ${filePath}`
+        : `Move destination already exists: ${filePath}`,
+    );
+  }
+
+  async function getPreparedFileState(
+    filePath: string,
+    verb: 'update' | 'delete',
+  ): Promise<PreparedFileState> {
+    const existing = staged.get(filePath);
+    if (existing) {
+      if (!existing.exists) {
+        throw createApplyPatchVerificationError(
+          `Failed to read file to ${verb}: ${filePath}`,
+        );
+      }
+
+      return existing;
+    }
+
+    await assertRegularFile(files, filePath, verb);
+    const stat = await statCached(files, filePath);
+    const text = await readPreparedFileText(filePath, verb);
+    const state: PreparedFileState = {
+      exists: true,
+      text,
+      mode: stat ? stat.mode & 0o7777 : undefined,
+      derived: false,
+    };
+    staged.set(filePath, state);
+    return state;
+  }
+
+  return {
+    hunks,
+    staged,
+    getPreparedFileState,
+    assertPreparedPathMissing,
+  };
+}
+
+export function resolvePreparedUpdate(
+  filePath: string,
+  currentText: string,
+  hunk: UpdatePatchHunk,
+  cfg: ApplyPatchRuntimeOptions,
+): ResolvedPreparedUpdate {
+  try {
+    const { lines, resolved, eol, hasFinalNewline } =
+      resolveUpdateChunksFromText(filePath, currentText, hunk.chunks, cfg);
+
+    return {
+      resolved,
+      nextText: applyHits(
+        lines,
+        resolved.map((chunk) => chunk.hit),
+        eol,
+        hasFinalNewline,
+      ),
+    };
+  } catch (error) {
+    throw createApplyPatchVerificationError(getErrorMessage(error), error);
+  }
+}
+
+export function stageAddedText(contents: string): string {
+  return contents.length === 0 || contents.endsWith('\n')
+    ? contents
+    : `${contents}\n`;
+}

+ 648 - 0
src/hooks/apply-patch/hook.test.ts

@@ -0,0 +1,648 @@
+import { describe, expect, test } from 'bun:test';
+import { chmod, mkdir, readFile, stat, writeFile } from 'node:fs/promises';
+import path from 'node:path';
+
+import { parsePatch } from './codec';
+import { createApplyPatchHook } from './index';
+import { applyPreparedChanges, preparePatchChanges } from './operations';
+import { createTempDir, DEFAULT_OPTIONS, writeFixture } from './test-helpers';
+
+function createHook() {
+  return createApplyPatchHook({
+    client: {} as never,
+    directory: '/tmp/hook-root',
+    worktree: '/tmp/hook-root',
+  } as never);
+}
+
+describe('apply-patch/hook', () => {
+  test('ignora tools distintos de apply_patch', async () => {
+    const hook = createHook();
+    const patchText = '*** Begin Patch\n*** End Patch';
+    const output = { args: { patchText } };
+
+    await hook['tool.execute.before']({ tool: 'read' }, output);
+
+    expect(output.args.patchText).toBe(patchText);
+  });
+
+  test('bloquea un patch no rescatable como verification antes del nativo', async () => {
+    const root = await createTempDir('apply-patch-hook-');
+    await writeFixture(root, 'sample.txt', 'alpha\nbeta\ngamma\n');
+    const hook = createHook();
+    const patchText = `*** Begin Patch
+*** Update File: sample.txt
+@@
+-missing
++omega
+*** End Patch`;
+    const output = { args: { patchText } };
+
+    await expect(
+      hook['tool.execute.before'](
+        { tool: 'apply_patch', directory: root },
+        output,
+      ),
+    ).rejects.toThrow(
+      'apply_patch verification failed: Failed to find expected lines',
+    );
+
+    expect(output.args.patchText).toBe(patchText);
+  });
+
+  test('normaliza un patch exacto envuelto en heredoc antes del nativo', async () => {
+    const root = await createTempDir('apply-patch-hook-');
+    await writeFixture(
+      root,
+      'sample.txt',
+      'line-01\nexact-top\nexact-old\nexact-bottom\nline-05\n',
+    );
+    const hook = createHook();
+    const cleanPatchText = `*** Begin Patch
+*** Update File: sample.txt
+@@ exact-top
+-exact-old
++exact-new
+ exact-bottom
+*** End Patch`;
+    const output = {
+      args: {
+        patchText: `cat <<'PATCH'
+${cleanPatchText}
+PATCH`,
+      },
+    };
+
+    await hook['tool.execute.before'](
+      { tool: 'apply_patch', directory: root },
+      output,
+    );
+
+    expect(output.args.patchText).toBe(cleanPatchText);
+
+    const changes = await preparePatchChanges(
+      root,
+      output.args.patchText as string,
+      DEFAULT_OPTIONS,
+    );
+    await applyPreparedChanges(changes);
+    expect(await readFile(path.join(root, 'sample.txt'), 'utf-8')).toBe(
+      'line-01\nexact-top\nexact-new\nexact-bottom\nline-05\n',
+    );
+  });
+
+  test('reescribe stale patch de prefijo y sigue siendo aplicable', async () => {
+    const root = await createTempDir('apply-patch-hook-');
+    await writeFixture(
+      root,
+      'sample.txt',
+      'top\nA\nB-stale\nC\nD\nE\nbottom\n',
+    );
+    const hook = createHook();
+    const patchText = `*** Begin Patch
+*** Update File: sample.txt
+@@ top
+ A
+-B
+-C
+-D
+-E
++B
++C
++D
++X
+*** End Patch`;
+    const output = { args: { patchText } };
+
+    await hook['tool.execute.before'](
+      { tool: 'apply_patch', directory: root },
+      output,
+    );
+
+    const rewritten = parsePatch(output.args.patchText as string).hunks[0];
+    expect(rewritten.type).toBe('update');
+    expect(
+      rewritten.type === 'update' && rewritten.chunks[0]?.old_lines,
+    ).toEqual(['A', 'B-stale', 'C', 'D', 'E']);
+
+    const changes = await preparePatchChanges(
+      root,
+      output.args.patchText as string,
+      DEFAULT_OPTIONS,
+    );
+    await applyPreparedChanges(changes);
+    expect(await readFile(path.join(root, 'sample.txt'), 'utf-8')).toBe(
+      'top\nA\nB\nC\nD\nX\nbottom\n',
+    );
+  });
+
+  test('no altera new_lines durante la reescritura', async () => {
+    const root = await createTempDir('apply-patch-hook-');
+    await writeFixture(
+      root,
+      'sample.txt',
+      'top\nprefix\nstale-value\nsuffix\nbottom\n',
+    );
+    const hook = createHook();
+    const patchText = `*** Begin Patch
+*** Update File: sample.txt
+@@ top
+ prefix
+-old-value
++ \tverbatim  ""  Ω  
+ suffix
+*** End Patch`;
+    const expected = parsePatch(patchText).hunks[0];
+    const output = { args: { patchText } };
+
+    await hook['tool.execute.before'](
+      { tool: 'apply_patch', directory: root },
+      output,
+    );
+
+    const rewritten = parsePatch(output.args.patchText as string).hunks[0];
+    expect(expected.type).toBe('update');
+    expect(rewritten.type).toBe('update');
+    expect(
+      expected.type === 'update' && rewritten.type === 'update'
+        ? rewritten.chunks[0]?.new_lines
+        : undefined,
+    ).toEqual(expected.type === 'update' ? expected.chunks[0]?.new_lines : []);
+  });
+
+  test('reescribe stale unicode-only y sigue siendo aplicable', async () => {
+    const root = await createTempDir('apply-patch-hook-');
+    await writeFixture(root, 'sample.txt', 'const title = “Hola”;\n');
+    const hook = createHook();
+    const patchText = `*** Begin Patch
+*** Update File: sample.txt
+@@
+-const title = "Hola";
++const title = "Hola mundo";
+*** End Patch`;
+    const output = { args: { patchText } };
+
+    await hook['tool.execute.before'](
+      { tool: 'apply_patch', directory: root },
+      output,
+    );
+
+    const rewritten = parsePatch(output.args.patchText as string).hunks[0];
+    expect(rewritten.type).toBe('update');
+    expect(
+      rewritten.type === 'update' ? rewritten.chunks[0]?.old_lines : undefined,
+    ).toEqual(['const title = “Hola”;']);
+
+    const changes = await preparePatchChanges(
+      root,
+      output.args.patchText as string,
+      DEFAULT_OPTIONS,
+    );
+    await applyPreparedChanges(changes);
+    expect(await readFile(path.join(root, 'sample.txt'), 'utf-8')).toBe(
+      'const title = "Hola mundo";\n',
+    );
+  });
+
+  test('reescribe stale trim-end y sigue siendo aplicable', async () => {
+    const root = await createTempDir('apply-patch-hook-');
+    await writeFixture(root, 'sample.txt', 'alpha  \n');
+    const hook = createHook();
+    const patchText = `*** Begin Patch
+*** Update File: sample.txt
+@@
+-alpha
++omega
+*** End Patch`;
+    const output = { args: { patchText } };
+
+    await hook['tool.execute.before'](
+      { tool: 'apply_patch', directory: root },
+      output,
+    );
+
+    const rewritten = parsePatch(output.args.patchText as string).hunks[0];
+    expect(rewritten.type).toBe('update');
+    expect(
+      rewritten.type === 'update' ? rewritten.chunks[0]?.old_lines : undefined,
+    ).toEqual(['alpha  ']);
+
+    const changes = await preparePatchChanges(
+      root,
+      output.args.patchText as string,
+      DEFAULT_OPTIONS,
+    );
+    await applyPreparedChanges(changes);
+    expect(await readFile(path.join(root, 'sample.txt'), 'utf-8')).toBe(
+      'omega\n',
+    );
+  });
+
+  test('bloquea un stale trim-only como verification', async () => {
+    const root = await createTempDir('apply-patch-hook-');
+    await writeFixture(root, 'sample.txt', '  alpha  \n');
+    const hook = createHook();
+    const patchText = `*** Begin Patch
+*** Update File: sample.txt
+@@
+-alpha
++omega
+*** End Patch`;
+    const output = { args: { patchText } };
+
+    await expect(
+      hook['tool.execute.before'](
+        { tool: 'apply_patch', directory: root },
+        output,
+      ),
+    ).rejects.toThrow(
+      'apply_patch verification failed: Failed to find expected lines',
+    );
+
+    expect(output.args.patchText).toBe(patchText);
+  });
+
+  test('bloquea en runtime un @@ mal formado antes del nativo', async () => {
+    const root = await createTempDir('apply-patch-hook-');
+    await writeFixture(root, 'sample.txt', 'alpha\nbeta\n');
+    const hook = createHook();
+    const patchText = `*** Begin Patch
+*** Update File: sample.txt
+@@
+ alpha
+garbage
+-beta
++BETA
+*** End Patch`;
+    const output = { args: { patchText } };
+
+    await expect(
+      hook['tool.execute.before'](
+        { tool: 'apply_patch', directory: root },
+        output,
+      ),
+    ).rejects.toThrow(
+      'apply_patch validation failed: Invalid patch format: unexpected line in patch chunk: garbage',
+    );
+
+    expect(output.args.patchText).toBe(patchText);
+  });
+
+  test('bloquea en runtime un Add File mal formado antes del nativo', async () => {
+    const root = await createTempDir('apply-patch-hook-');
+    const hook = createHook();
+    const patchText = `*** Begin Patch
+*** Add File: added.txt
++fresh
+garbage
+*** End Patch`;
+    const output = { args: { patchText } };
+
+    await expect(
+      hook['tool.execute.before'](
+        { tool: 'apply_patch', directory: root },
+        output,
+      ),
+    ).rejects.toThrow(
+      'apply_patch validation failed: Invalid patch format: unexpected line in Add File body: garbage',
+    );
+
+    expect(output.args.patchText).toBe(patchText);
+  });
+
+  test('bloquea errores internos del guard antes del nativo', async () => {
+    const root = await createTempDir('apply-patch-hook-');
+    const lockedDir = path.join(root, 'locked');
+    await mkdir(lockedDir, { recursive: true });
+    await chmod(lockedDir, 0o000);
+    const hook = createHook();
+    const patchText = `*** Begin Patch
+*** Add File: locked/child.txt
++fresh
+*** End Patch`;
+    const output = { args: { patchText } };
+
+    try {
+      await expect(
+        hook['tool.execute.before'](
+          { tool: 'apply_patch', directory: root },
+          output,
+        ),
+      ).rejects.toThrow('apply_patch internal error:');
+
+      expect(output.args.patchText).toBe(patchText);
+    } finally {
+      await chmod(lockedDir, 0o755);
+    }
+  });
+
+  test('bloquea un caso indentado peligroso como verification', async () => {
+    const root = await createTempDir('apply-patch-hook-');
+    await writeFixture(
+      root,
+      'sample.yml',
+      'root:\n  child:\n    enabled: false\nnext: true\n',
+    );
+    const hook = createHook();
+    const patchText = `*** Begin Patch
+*** Update File: sample.yml
+@@
+-enabled: false
++enabled: true
+*** End Patch`;
+    const output = { args: { patchText } };
+
+    await expect(
+      hook['tool.execute.before'](
+        { tool: 'apply_patch', directory: root },
+        output,
+      ),
+    ).rejects.toThrow(
+      'apply_patch verification failed: Failed to find expected lines',
+    );
+
+    expect(output.args.patchText).toBe(patchText);
+  });
+
+  test('reescribe inserción anclada para evitar EOF del nativo', async () => {
+    const root = await createTempDir('apply-patch-hook-');
+    await writeFixture(
+      root,
+      'sample.txt',
+      'top\nanchor-insert\nafter-anchor\nend\n',
+    );
+    const hook = createHook();
+    const patchText = `*** Begin Patch
+*** Update File: sample.txt
+@@ anchor-insert
++middle-inserted
+*** End Patch`;
+    const output = { args: { patchText } };
+
+    await hook['tool.execute.before'](
+      { tool: 'apply_patch', directory: root },
+      output,
+    );
+
+    const changes = await preparePatchChanges(
+      root,
+      output.args.patchText as string,
+      DEFAULT_OPTIONS,
+    );
+    await applyPreparedChanges(changes);
+    expect(await readFile(path.join(root, 'sample.txt'), 'utf-8')).toBe(
+      'top\nanchor-insert\nmiddle-inserted\nafter-anchor\nend\n',
+    );
+  });
+
+  test('bloquea una inserción pura si falta el anchor', async () => {
+    const root = await createTempDir('apply-patch-hook-');
+    await writeFixture(root, 'sample.txt', 'top\nafter-anchor\nend\n');
+    const hook = createHook();
+    const patchText = `*** Begin Patch
+*** Update File: sample.txt
+@@ anchor-insert
++middle-inserted
+*** End Patch`;
+    const output = { args: { patchText } };
+
+    await expect(
+      hook['tool.execute.before'](
+        { tool: 'apply_patch', directory: root },
+        output,
+      ),
+    ).rejects.toThrow(
+      'apply_patch verification failed: Failed to find insertion anchor',
+    );
+
+    expect(output.args.patchText).toBe(patchText);
+  });
+
+  test('bloquea una inserción pura si el anchor es ambiguo', async () => {
+    const root = await createTempDir('apply-patch-hook-');
+    await writeFixture(
+      root,
+      'sample.txt',
+      'top\nanchor-insert\nafter-first\nsplit\nanchor-insert\nafter-second\nend\n',
+    );
+    const hook = createHook();
+    const patchText = `*** Begin Patch
+*** Update File: sample.txt
+@@ anchor-insert
++middle-inserted
+*** End Patch`;
+    const output = { args: { patchText } };
+
+    await expect(
+      hook['tool.execute.before'](
+        { tool: 'apply_patch', directory: root },
+        output,
+      ),
+    ).rejects.toThrow(
+      'apply_patch verification failed: Insertion anchor was ambiguous',
+    );
+
+    expect(output.args.patchText).toBe(patchText);
+  });
+
+  test('bloquea ambigüedad real del patch antes del nativo', async () => {
+    const root = await createTempDir('apply-patch-hook-');
+    await writeFixture(
+      root,
+      'sample.txt',
+      'left\nstale-one\nright\nseparator\nleft\nstale-two\nright\n',
+    );
+    const hook = createHook();
+    const patchText = `*** Begin Patch
+*** Update File: sample.txt
+@@
+ left
+-old
++new
+ right
+*** End Patch`;
+    const output = { args: { patchText } };
+
+    await expect(
+      hook['tool.execute.before'](
+        { tool: 'apply_patch', directory: root },
+        output,
+      ),
+    ).rejects.toThrow('apply_patch verification failed:');
+
+    expect(output.args.patchText).toBe(patchText);
+  });
+
+  test('reescribe solo el hunk update en un patch con add + update', async () => {
+    const root = await createTempDir('apply-patch-hook-');
+    await writeFixture(
+      root,
+      'sample.txt',
+      'top\nprefix\nstale-value\nsuffix\n',
+    );
+    const hook = createHook();
+    const patchText = `*** Begin Patch
+*** Add File: added.txt
++fresh
+*** Update File: sample.txt
+@@ top
+ prefix
+-old-value
++new-value
+ suffix
+*** End Patch`;
+    const output = { args: { patchText } };
+
+    await hook['tool.execute.before'](
+      { tool: 'apply_patch', directory: root },
+      output,
+    );
+
+    const rewritten = parsePatch(output.args.patchText as string);
+    expect(rewritten.hunks[0]).toEqual({
+      type: 'add',
+      path: 'added.txt',
+      contents: 'fresh',
+    });
+    expect(rewritten.hunks[1]).toEqual({
+      type: 'update',
+      path: 'sample.txt',
+      chunks: [
+        {
+          old_lines: ['prefix', 'stale-value', 'suffix'],
+          new_lines: ['prefix', 'new-value', 'suffix'],
+          change_context: 'top',
+          is_end_of_file: undefined,
+        },
+      ],
+    });
+
+    const changes = await preparePatchChanges(
+      root,
+      output.args.patchText as string,
+      DEFAULT_OPTIONS,
+    );
+    await applyPreparedChanges(changes);
+    expect(await readFile(path.join(root, 'sample.txt'), 'utf-8')).toBe(
+      'top\nprefix\nnew-value\nsuffix\n',
+    );
+    expect(await readFile(path.join(root, 'added.txt'), 'utf-8')).toBe(
+      'fresh\n',
+    );
+  });
+
+  test('aborta temprano si el patch solo apunta fuera del root/worktree', async () => {
+    const root = await createTempDir('apply-patch-hook-');
+    const outside = path.join(path.dirname(root), 'outside.txt');
+    await writeFile(outside, 'outside\n', 'utf-8');
+    const hook = createHook();
+    const patchText = `*** Begin Patch
+*** Update File: ../outside.txt
+@@
+-outside
++changed
+*** End Patch`;
+    const output = { args: { patchText } };
+
+    await expect(
+      hook['tool.execute.before'](
+        { tool: 'apply_patch', directory: root },
+        output,
+      ),
+    ).rejects.toThrow(
+      `apply_patch blocked: patch contains path outside workspace root: ${outside}`,
+    );
+
+    expect(output.args.patchText).toBe(patchText);
+    expect(await readFile(outside, 'utf-8')).toBe('outside\n');
+  });
+
+  test('aborta temprano y no aplica nada si un patch mixto tiene rutas fuera', async () => {
+    const root = await createTempDir('apply-patch-hook-');
+    const outsideDir = await createTempDir('apply-patch-hook-outside-');
+    await writeFixture(root, 'sample.txt', 'prefix\nstale-value\nsuffix\n');
+    await writeFixture(outsideDir, 'outside.txt', 'legacy\n');
+    const hook = createHook();
+    const outsideAdded = path.join(path.dirname(root), 'outside-added.txt');
+    const patchText = `*** Begin Patch
+*** Add File: ../outside-added.txt
++fresh
+*** Update File: sample.txt
+@@
+ prefix
+-old-value
++new-value
+ suffix
+*** Delete File: ../${path.basename(outsideDir)}/outside.txt
+*** End Patch`;
+    const output = { args: { patchText } };
+
+    await expect(
+      hook['tool.execute.before'](
+        { tool: 'apply_patch', directory: root },
+        output,
+      ),
+    ).rejects.toThrow(
+      `apply_patch blocked: patch contains path outside workspace root: ${outsideAdded}`,
+    );
+
+    expect(output.args.patchText).toBe(patchText);
+    expect(await readFile(path.join(root, 'sample.txt'), 'utf-8')).toBe(
+      'prefix\nstale-value\nsuffix\n',
+    );
+    expect(await stat(outsideAdded).catch(() => null)).toBeNull();
+    expect(await readFile(path.join(outsideDir, 'outside.txt'), 'utf-8')).toBe(
+      'legacy\n',
+    );
+  });
+
+  test('mantiene el comportamiento normal para patches íntegramente dentro', async () => {
+    const root = await createTempDir('apply-patch-hook-');
+    await writeFixture(root, 'sample.txt', 'alpha\nbeta\n');
+    const hook = createHook();
+    const patchText = `*** Begin Patch
+*** Update File: sample.txt
+@@
+-alpha
++omega
+ beta
+*** End Patch`;
+    const output = { args: { patchText } };
+
+    await expect(
+      hook['tool.execute.before'](
+        { tool: 'apply_patch', directory: root },
+        output,
+      ),
+    ).resolves.toBeUndefined();
+
+    expect(output.args.patchText).toBe(patchText);
+  });
+
+  test('no expone hook tool.execute.after', () => {
+    const hook = createHook() as Record<string, unknown>;
+
+    expect(hook['tool.execute.after']).toBeUndefined();
+  });
+
+  test('no altera un patch exacto', async () => {
+    const root = await createTempDir('apply-patch-hook-');
+    await writeFixture(root, 'sample.txt', 'alpha\nbeta\n');
+    const hook = createHook();
+    const patchText = `*** Begin Patch
+*** Update File: sample.txt
+@@
+-alpha
++omega
+ beta
+*** End Patch`;
+    const output = { args: { patchText } };
+
+    await hook['tool.execute.before'](
+      { tool: 'apply_patch', directory: root },
+      output,
+    );
+
+    expect(output.args.patchText).toBe(patchText);
+  });
+});

+ 113 - 0
src/hooks/apply-patch/index.ts

@@ -0,0 +1,113 @@
+import type { PluginInput } from '@opencode-ai/plugin';
+
+import { log } from '../../utils/logger';
+import {
+  createApplyPatchInternalError,
+  getApplyPatchErrorDetails,
+  isApplyPatchError,
+  isApplyPatchVerificationError,
+} from './errors';
+import { rewritePatch } from './operations';
+import type { ApplyPatchRuntimeOptions } from './types';
+
+const APPLY_PATCH_RESCUE_OPTIONS: ApplyPatchRuntimeOptions = {
+  prefixSuffix: true,
+  lcsRescue: true,
+};
+
+interface ToolExecuteBeforeInput {
+  tool: string;
+  directory?: string;
+}
+
+interface ToolExecuteBeforeOutput {
+  args?: {
+    patchText?: unknown;
+    [key: string]: unknown;
+  };
+}
+
+export function createApplyPatchHook(ctx: PluginInput) {
+  function logHookStatus(
+    state:
+      | 'rewrite'
+      | 'unchanged'
+      | 'blocked'
+      | 'validation'
+      | 'verification'
+      | 'internal',
+    data?: Record<string, unknown>,
+  ) {
+    log(`apply-patch hook ${state}`, data);
+  }
+
+  return {
+    'tool.execute.before': async (
+      input: ToolExecuteBeforeInput,
+      output: ToolExecuteBeforeOutput,
+    ): Promise<void> => {
+      if (input.tool !== 'apply_patch') {
+        return;
+      }
+
+      if (typeof output.args?.patchText !== 'string') {
+        return;
+      }
+
+      const root = input.directory || ctx.directory || process.cwd();
+      const worktree = ctx.worktree || root;
+
+      try {
+        const result = await rewritePatch(
+          root,
+          output.args.patchText,
+          APPLY_PATCH_RESCUE_OPTIONS,
+          worktree,
+        );
+
+        if (result.changed) {
+          output.args.patchText = result.patchText;
+          logHookStatus('rewrite', {
+            rewrittenChunks: result.rewrittenChunks,
+            totalChunks: result.totalChunks,
+            strategies: result.rewriteModes,
+          });
+          return;
+        }
+
+        logHookStatus('unchanged', {
+          rewrittenChunks: 0,
+          totalChunks: result.totalChunks,
+        });
+        return;
+      } catch (error) {
+        const normalizedError = isApplyPatchError(error)
+          ? error
+          : createApplyPatchInternalError(
+              `Unexpected hook failure before native apply: ${error instanceof Error ? error.message : String(error)}`,
+              error,
+            );
+        const details = getApplyPatchErrorDetails(normalizedError);
+
+        logHookStatus(
+          isApplyPatchVerificationError(normalizedError)
+            ? 'verification'
+            : normalizedError.kind === 'validation'
+              ? 'validation'
+              : normalizedError.kind === 'internal'
+                ? 'internal'
+                : 'blocked',
+          {
+            kind: details?.kind ?? 'internal',
+            code: details?.code ?? 'internal_unexpected',
+            reason: normalizedError.message,
+            failOpen: false,
+            rescueOptions: APPLY_PATCH_RESCUE_OPTIONS,
+            rewriteStage: 'before-native',
+          },
+        );
+        throw normalizedError;
+      }
+    },
+  };
+}

+ 175 - 0
src/hooks/apply-patch/matching.test.ts

@@ -0,0 +1,175 @@
+import { describe, expect, test } from 'bun:test';
+
+import {
+  autoRescueComparators,
+  permissiveComparators,
+  prefix,
+  rescueByLcs,
+  rescueByPrefixSuffix,
+  seek,
+  seekMatch,
+  suffix,
+} from './matching';
+
+describe('apply-patch/matching', () => {
+  test('seek encuentra coincidencias con unicode y trim-end', () => {
+    expect(seek(['console.log(“hola”);  '], ['console.log("hola");'], 0)).toBe(
+      0,
+    );
+  });
+
+  test('seek no rescata coincidencias trim-only con indentación distinta', () => {
+    expect(seek(['  console.log("hola");'], ['console.log("hola");'], 0)).toBe(
+      -1,
+    );
+  });
+
+  test('prefix y suffix detectan bordes comunes', () => {
+    const oldLines = [
+      'const title = "Hola";',
+      'old-value',
+      'const footer = "Fin";',
+    ];
+    const newLines = [
+      'const title = “Hola”;',
+      'new-value',
+      'const footer = “Fin”;',
+    ];
+
+    expect(prefix(oldLines, newLines)).toBe(1);
+    expect(suffix(oldLines, newLines, 1)).toBe(1);
+  });
+
+  test('rescueByPrefixSuffix rescata un bloque stale único', () => {
+    const result = rescueByPrefixSuffix(
+      ['top', 'const title = “Hola”;', 'stale-value', 'const footer = “Fin”;'],
+      ['const title = "Hola";', 'old-value', 'const footer = "Fin";'],
+      ['const title = “Hola”;', 'new-value', 'const footer = “Fin”;'],
+      0,
+    );
+
+    expect(result).toEqual({
+      kind: 'match',
+      hit: {
+        start: 2,
+        del: 1,
+        add: ['new-value'],
+      },
+    });
+  });
+
+  test('rescueByPrefixSuffix marca ambigüedad cuando hay varias ubicaciones', () => {
+    expect(
+      rescueByPrefixSuffix(
+        ['left', 'stale-one', 'right', 'gap', 'left', 'stale-two', 'right'],
+        ['left', 'old', 'right'],
+        ['left', 'new', 'right'],
+        0,
+      ),
+    ).toEqual({ kind: 'ambiguous', phase: 'prefix_suffix' });
+  });
+
+  test('rescueByLcs respeta el start y encuentra un candidato único', () => {
+    const result = rescueByLcs(
+      [
+        'head',
+        'left',
+        'stable-old',
+        'keep',
+        'right',
+        'gap',
+        'anchor',
+        'left',
+        'stale-old',
+        'keep',
+        'right',
+        'tail',
+      ],
+      ['left', 'old', 'keep', 'right'],
+      ['left', 'new', 'keep', 'right'],
+      5,
+    );
+
+    expect(result).toEqual({
+      kind: 'match',
+      hit: {
+        start: 7,
+        del: 4,
+        add: ['left', 'new', 'keep', 'right'],
+      },
+    });
+  });
+
+  test('rescueByLcs marca ambigüedad cuando dos ventanas empatan sin bordes comunes', () => {
+    expect(
+      rescueByLcs(
+        ['head', 'alpha', 'beta', 'mid', 'alpha', 'beta', 'tail'],
+        ['alpha', 'beta'],
+        ['ALPHA', 'BETA'],
+        0,
+      ),
+    ).toEqual({ kind: 'ambiguous', phase: 'lcs' });
+  });
+
+  test('rescueByLcs rechaza ventanas con un solo borde coincidente aunque el score sea alto', () => {
+    expect(
+      rescueByLcs(
+        ['a', 'a', 'a', 'a', 'b', 'c'],
+        ['a', 'b', 'c', 'd'],
+        ['A', 'B', 'C', 'D'],
+        0,
+      ),
+    ).toEqual({ kind: 'miss' });
+  });
+
+  test('rescueByLcs poda un chunk desproporcionado aunque tenga bordes compatibles', () => {
+    const oldLines = Array.from({ length: 49 }, (_, index) => `line-${index}`);
+    const lines = [...oldLines];
+    lines[24] = 'line-24-stale';
+
+    expect(
+      rescueByLcs(
+        lines,
+        oldLines,
+        oldLines.map((line, index) => (index === 24 ? 'line-24-new' : line)),
+        0,
+      ),
+    ).toEqual({ kind: 'miss' });
+  });
+
+  test('rescueByLcs descarta una ventana poco plausible antes del scoring caro', () => {
+    expect(
+      rescueByLcs(
+        ['left', 'noise-a', 'keep', 'noise-b', 'right'],
+        ['left', 'old-a', 'old-b', 'old-c', 'right'],
+        ['left', 'new-a', 'new-b', 'new-c', 'right'],
+        0,
+      ),
+    ).toEqual({ kind: 'miss' });
+  });
+
+  test('seek empareja comillas curly y straight mezcladas', () => {
+    expect(
+      seek(
+        ['const title = “it’s ready”;'],
+        ['const title = "it\'s ready";'],
+        0,
+      ),
+    ).toBe(0);
+  });
+
+  test('seekMatch informa cuando el match solo fue tolerante y seguro', () => {
+    expect(
+      seekMatch(['console.log(“hola”);  '], ['console.log("hola");'], 0),
+    ).toEqual({
+      index: 0,
+      comparator: 'unicode-trim-end',
+      exact: false,
+    });
+  });
+
+  test('separación de comparadores distingue rescate seguro y comparadores permisivos', () => {
+    expect(autoRescueComparators).toHaveLength(4);
+    expect(permissiveComparators).toHaveLength(6);
+  });
+});

+ 455 - 0
src/hooks/apply-patch/matching.ts

@@ -0,0 +1,455 @@
+import { normalizeUnicode } from './codec';
+import type {
+  LineComparator,
+  MatchComparatorName,
+  MatchHit,
+  RescueResult,
+  SeekHit,
+} from './types';
+
+type NamedComparator = {
+  name: MatchComparatorName;
+  exact: boolean;
+  same: LineComparator;
+};
+
+const AUTO_RESCUE_COMPARATOR_NAMES = new Set<MatchComparatorName>([
+  'exact',
+  'unicode',
+  'trim-end',
+  'unicode-trim-end',
+]);
+
+export function equalExact(a: string, b: string): boolean {
+  return a === b;
+}
+
+export function equalUnicodeExact(a: string, b: string): boolean {
+  return normalizeUnicode(a) === normalizeUnicode(b);
+}
+
+export function equalTrimEnd(a: string, b: string): boolean {
+  return a.trimEnd() === b.trimEnd();
+}
+
+export function equalUnicodeTrimEnd(a: string, b: string): boolean {
+  return normalizeUnicode(a.trimEnd()) === normalizeUnicode(b.trimEnd());
+}
+
+export function equalTrim(a: string, b: string): boolean {
+  return a.trim() === b.trim();
+}
+
+export function equalUnicodeTrim(a: string, b: string): boolean {
+  return normalizeUnicode(a.trim()) === normalizeUnicode(b.trim());
+}
+
+const comparatorEntries: NamedComparator[] = [
+  { name: 'exact', exact: true, same: equalExact },
+  { name: 'unicode', exact: false, same: equalUnicodeExact },
+  { name: 'trim-end', exact: false, same: equalTrimEnd },
+  {
+    name: 'unicode-trim-end',
+    exact: false,
+    same: equalUnicodeTrimEnd,
+  },
+  { name: 'trim', exact: false, same: equalTrim },
+  { name: 'unicode-trim', exact: false, same: equalUnicodeTrim },
+];
+
+const autoRescueComparatorEntries = comparatorEntries.filter((entry) =>
+  AUTO_RESCUE_COMPARATOR_NAMES.has(entry.name),
+);
+
+const MAX_LCS_CHUNK_LINES = 48;
+const MAX_LCS_CANDIDATES = 64;
+
+export const autoRescueComparators: LineComparator[] =
+  autoRescueComparatorEntries.map((entry) => entry.same);
+
+// Full-trim comparators remain available as explicit utilities, but stay out
+// of automatic canonicalization because they can cross indentation levels and
+// rescue semantically unsafe patches.
+export const permissiveComparators: LineComparator[] = comparatorEntries.map(
+  (entry) => entry.same,
+);
+
+function tryMatch(
+  lines: string[],
+  pattern: string[],
+  start: number,
+  comparator: NamedComparator,
+  eof: boolean,
+): SeekHit | undefined {
+  if (eof) {
+    const at = lines.length - pattern.length;
+    if (at >= start) {
+      let ok = true;
+      for (let index = 0; index < pattern.length; index += 1) {
+        if (!comparator.same(lines[at + index], pattern[index])) {
+          ok = false;
+          break;
+        }
+      }
+
+      if (ok) {
+        return {
+          index: at,
+          comparator: comparator.name,
+          exact: comparator.exact,
+        };
+      }
+    }
+  }
+
+  for (let index = start; index <= lines.length - pattern.length; index += 1) {
+    let ok = true;
+
+    for (let inner = 0; inner < pattern.length; inner += 1) {
+      if (!comparator.same(lines[index + inner], pattern[inner])) {
+        ok = false;
+        break;
+      }
+    }
+
+    if (ok) {
+      return {
+        index,
+        comparator: comparator.name,
+        exact: comparator.exact,
+      };
+    }
+  }
+
+  return undefined;
+}
+
+export function seekMatch(
+  lines: string[],
+  pattern: string[],
+  start: number,
+  eof = false,
+): SeekHit | undefined {
+  if (pattern.length === 0) {
+    return undefined;
+  }
+
+  for (const comparator of autoRescueComparatorEntries) {
+    const hit = tryMatch(lines, pattern, start, comparator, eof);
+    if (hit) {
+      return hit;
+    }
+  }
+
+  return undefined;
+}
+
+export function seek(
+  lines: string[],
+  pattern: string[],
+  start: number,
+  eof = false,
+): number {
+  return seekMatch(lines, pattern, start, eof)?.index ?? -1;
+}
+
+export function list(
+  lines: string[],
+  pattern: string[],
+  start: number,
+  same: LineComparator,
+): number[] {
+  if (pattern.length === 0) {
+    return [];
+  }
+
+  const out: number[] = [];
+
+  for (let index = start; index <= lines.length - pattern.length; index += 1) {
+    let ok = true;
+
+    for (let inner = 0; inner < pattern.length; inner += 1) {
+      if (!same(lines[index + inner], pattern[inner])) {
+        ok = false;
+        break;
+      }
+    }
+
+    if (ok) {
+      out.push(index);
+    }
+  }
+
+  return out;
+}
+
+export function sameRescueLine(a: string, b: string): boolean {
+  return equalExact(a, b) || equalUnicodeExact(a, b);
+}
+
+export function prefix(old_lines: string[], new_lines: string[]): number {
+  let index = 0;
+
+  while (
+    index < old_lines.length &&
+    index < new_lines.length &&
+    sameRescueLine(old_lines[index], new_lines[index])
+  ) {
+    index += 1;
+  }
+
+  return index;
+}
+
+export function suffix(
+  old_lines: string[],
+  new_lines: string[],
+  prefixLength: number,
+): number {
+  let index = 0;
+
+  while (
+    old_lines.length - index - 1 >= prefixLength &&
+    new_lines.length - index - 1 >= prefixLength &&
+    sameRescueLine(
+      old_lines[old_lines.length - index - 1],
+      new_lines[new_lines.length - index - 1],
+    )
+  ) {
+    index += 1;
+  }
+
+  return index;
+}
+
+export function rescueByPrefixSuffix(
+  lines: string[],
+  old_lines: string[],
+  new_lines: string[],
+  start: number,
+): RescueResult {
+  const prefixLength = prefix(old_lines, new_lines);
+  const suffixLength = suffix(old_lines, new_lines, prefixLength);
+
+  if (prefixLength === 0 || suffixLength === 0) {
+    return { kind: 'miss' };
+  }
+
+  const left = old_lines.slice(0, prefixLength);
+  const right = old_lines.slice(old_lines.length - suffixLength);
+  const middle = new_lines.slice(prefixLength, new_lines.length - suffixLength);
+  const hits = new Map<string, MatchHit>();
+
+  for (const same of autoRescueComparators) {
+    for (const leftIndex of list(lines, left, start, same)) {
+      const from = leftIndex + left.length;
+
+      for (const rightIndex of list(lines, right, from, same)) {
+        const key = `${from}:${rightIndex}`;
+        hits.set(key, {
+          start: from,
+          del: rightIndex - from,
+          add: [...middle],
+        });
+      }
+    }
+  }
+
+  if (hits.size === 0) {
+    return { kind: 'miss' };
+  }
+
+  if (hits.size > 1) {
+    return { kind: 'ambiguous', phase: 'prefix_suffix' };
+  }
+
+  return { kind: 'match', hit: [...hits.values()][0] };
+}
+
+export function score(a: string[], b: string[]): number {
+  const dp = Array.from({ length: a.length + 1 }, () =>
+    Array<number>(b.length + 1).fill(0),
+  );
+
+  for (let i = 1; i <= a.length; i += 1) {
+    for (let j = 1; j <= b.length; j += 1) {
+      dp[i][j] =
+        normalizeUnicode(a[i - 1].trim()) === normalizeUnicode(b[j - 1].trim())
+          ? dp[i - 1][j - 1] + 1
+          : Math.max(dp[i - 1][j], dp[i][j - 1]);
+    }
+  }
+
+  return dp[a.length][b.length];
+}
+
+function normalizeLcsLine(line: string): string {
+  return normalizeUnicode(line).trim();
+}
+
+function countLcsUpperBound(a: string[], b: string[]): number {
+  const counts = new Map<string, number>();
+
+  for (const line of a) {
+    const key = normalizeLcsLine(line);
+    counts.set(key, (counts.get(key) ?? 0) + 1);
+  }
+
+  let shared = 0;
+  for (const line of b) {
+    const key = normalizeLcsLine(line);
+    const available = counts.get(key) ?? 0;
+    if (available === 0) {
+      continue;
+    }
+
+    shared += 1;
+    if (available === 1) {
+      counts.delete(key);
+      continue;
+    }
+
+    counts.set(key, available - 1);
+  }
+
+  return shared;
+}
+
+function hasStableBorders(oldLines: string[], candidate: string[]): boolean {
+  if (oldLines.length === 0 || candidate.length !== oldLines.length) {
+    return false;
+  }
+
+  // LCS keeps its current scoring, but only competes across windows whose
+  // edges pass safe comparators. Ignoring full-trim here prevents automatic
+  // rescue from changing indentation depth in format-sensitive files.
+  const same = autoRescueComparators.some((compare) =>
+    compare(oldLines[0], candidate[0]),
+  );
+  if (!same) {
+    return false;
+  }
+
+  if (oldLines.length === 1) {
+    return true;
+  }
+
+  return autoRescueComparators.some((compare) =>
+    compare(oldLines[oldLines.length - 1], candidate[candidate.length - 1]),
+  );
+}
+
+function collectBorderAnchoredStarts(
+  lines: string[],
+  oldLines: string[],
+  start: number,
+): number[] {
+  if (oldLines.length === 0) {
+    return [];
+  }
+
+  const firstHits = new Set<number>();
+  const lastHits = new Set<number>();
+  const lastLine = oldLines[oldLines.length - 1];
+
+  for (const same of autoRescueComparators) {
+    for (const index of list(lines, [oldLines[0]], start, same)) {
+      firstHits.add(index);
+    }
+
+    for (const index of list(lines, [lastLine], start, same)) {
+      lastHits.add(index);
+    }
+  }
+
+  const candidates: number[] = [];
+  for (const index of [...firstHits].sort((a, b) => a - b)) {
+    const end = index + oldLines.length - 1;
+    if (end >= lines.length || !lastHits.has(end)) {
+      continue;
+    }
+
+    const candidate = lines.slice(index, index + oldLines.length);
+    if (!hasStableBorders(oldLines, candidate)) {
+      continue;
+    }
+
+    candidates.push(index);
+  }
+
+  return candidates;
+}
+
+export function rescueByLcs(
+  lines: string[],
+  old_lines: string[],
+  new_lines: string[],
+  start: number,
+): RescueResult {
+  if (old_lines.length === 0 || lines.length === 0) {
+    return { kind: 'miss' };
+  }
+
+  const from = start;
+  const to = lines.length - old_lines.length;
+
+  if (to < from) {
+    return { kind: 'miss' };
+  }
+
+  if (old_lines.length > MAX_LCS_CHUNK_LINES) {
+    return { kind: 'miss' };
+  }
+
+  const needed =
+    old_lines.length <= 2
+      ? old_lines.length
+      : Math.max(2, Math.ceil(old_lines.length * 0.7));
+  const candidates = collectBorderAnchoredStarts(lines, old_lines, start);
+
+  if (candidates.length === 0 || candidates.length > MAX_LCS_CANDIDATES) {
+    return { kind: 'miss' };
+  }
+
+  let best: MatchHit | undefined;
+  let bestScore = 0;
+  let ties = 0;
+
+  for (const index of candidates) {
+    if (index < from || index > to) {
+      continue;
+    }
+
+    const window = lines.slice(index, index + old_lines.length);
+    if (countLcsUpperBound(old_lines, window) < needed) {
+      continue;
+    }
+
+    const current = score(old_lines, window);
+
+    if (current > bestScore) {
+      bestScore = current;
+      ties = 1;
+      best = {
+        start: index,
+        del: old_lines.length,
+        add: [...new_lines],
+      };
+      continue;
+    }
+
+    if (current === bestScore && current > 0) {
+      ties += 1;
+    }
+  }
+
+  if (!best || bestScore < needed) {
+    return { kind: 'miss' };
+  }
+
+  if (ties > 1) {
+    return { kind: 'ambiguous', phase: 'lcs' };
+  }
+
+  return { kind: 'match', hit: best };
+}

File diff suppressed because it is too large
+ 1397 - 0
src/hooks/apply-patch/operations.test.ts


+ 3 - 0
src/hooks/apply-patch/operations.ts

@@ -0,0 +1,3 @@
+export { parseValidatedPatch } from './execution-context';
+export { applyPreparedChanges, preparePatchChanges } from './prepared-changes';
+export { rewritePatch, rewritePatchText } from './rewrite';

+ 9 - 0
src/hooks/apply-patch/patch.ts

@@ -0,0 +1,9 @@
+// Compatibility shim for local deep imports; keep only the stable runtime
+// surface here. `applyPreparedChanges()` remains in operations.ts as an
+// internal best-effort helper for local tests/helpers.
+export { parsePatch } from './codec';
+export {
+  preparePatchChanges,
+  rewritePatch,
+  rewritePatchText,
+} from './operations';

+ 400 - 0
src/hooks/apply-patch/prepared-changes.ts

@@ -0,0 +1,400 @@
+import { randomUUID } from 'node:crypto';
+import * as fs from 'node:fs/promises';
+import path from 'node:path';
+
+import {
+  createApplyPatchInternalError,
+  createApplyPatchValidationError,
+  createApplyPatchVerificationError,
+  ensureApplyPatchError,
+  getErrorMessage,
+} from './errors';
+import {
+  createPatchExecutionContext,
+  isMissingPathError,
+  resolvePreparedUpdate,
+  stageAddedText,
+} from './execution-context';
+import type { ApplyPatchRuntimeOptions, PreparedChange } from './types';
+
+function isNormalizedAbsolutePath(filePath: string): boolean {
+  return path.isAbsolute(filePath) && path.normalize(filePath) === filePath;
+}
+
+function assertPreparedChangePath(
+  value: unknown,
+  field: 'file' | 'move',
+  index: number,
+): asserts value is string {
+  if (typeof value !== 'string' || value.length === 0) {
+    throw createApplyPatchValidationError(
+      `Prepared changes require a non-empty string ${field} at index ${index}`,
+    );
+  }
+
+  if (!isNormalizedAbsolutePath(value)) {
+    throw createApplyPatchValidationError(
+      `Prepared changes require absolute normalized ${field} paths at index ${index}: ${value}`,
+    );
+  }
+}
+
+function assertPreparedChangesContract(
+  changes: readonly PreparedChange[],
+): void {
+  for (const [index, change] of changes.entries()) {
+    if (!change || typeof change !== 'object') {
+      throw createApplyPatchValidationError(
+        `Prepared change at index ${index} must be an object`,
+      );
+    }
+
+    if (!('type' in change)) {
+      throw createApplyPatchValidationError(
+        `Prepared change at index ${index} is missing type`,
+      );
+    }
+
+    assertPreparedChangePath(change.file, 'file', index);
+
+    if (change.type === 'add') {
+      if (typeof change.text !== 'string') {
+        throw createApplyPatchValidationError(
+          `Prepared add at index ${index} is missing text`,
+        );
+      }
+      continue;
+    }
+
+    if (change.type === 'delete') {
+      continue;
+    }
+
+    if (change.type === 'update') {
+      if (typeof change.text !== 'string') {
+        throw createApplyPatchValidationError(
+          `Prepared update at index ${index} is missing text`,
+        );
+      }
+
+      if (change.move !== undefined) {
+        assertPreparedChangePath(change.move, 'move', index);
+      }
+
+      continue;
+    }
+
+    throw createApplyPatchValidationError(
+      `Prepared change at index ${index} has unsupported type`,
+    );
+  }
+}
+
+export async function preparePatchChanges(
+  root: string,
+  patchText: string,
+  cfg: ApplyPatchRuntimeOptions,
+  worktree?: string,
+): Promise<PreparedChange[]> {
+  try {
+    const { hunks, staged, getPreparedFileState, assertPreparedPathMissing } =
+      await createPatchExecutionContext(root, patchText, worktree);
+    const changes: PreparedChange[] = [];
+
+    for (const hunk of hunks) {
+      const filePath = path.resolve(root, hunk.path);
+
+      if (hunk.type === 'add') {
+        await assertPreparedPathMissing(filePath, 'add');
+        const text = stageAddedText(hunk.contents);
+        changes.push({
+          type: 'add',
+          file: filePath,
+          text,
+        });
+        staged.set(filePath, { exists: true, text, derived: true });
+        continue;
+      }
+
+      if (hunk.type === 'delete') {
+        await getPreparedFileState(filePath, 'delete');
+
+        changes.push({ type: 'delete', file: filePath });
+        staged.set(filePath, { exists: false, derived: true });
+        continue;
+      }
+
+      const current = await getPreparedFileState(filePath, 'update');
+      if (!current.exists) {
+        throw createApplyPatchVerificationError(
+          `Failed to read file to update: ${filePath}`,
+        );
+      }
+
+      const move = hunk.move_path
+        ? path.resolve(root, hunk.move_path)
+        : undefined;
+      if (move && move !== filePath) {
+        await assertPreparedPathMissing(move, 'move');
+      }
+      const { nextText } = resolvePreparedUpdate(
+        filePath,
+        current.text,
+        hunk,
+        cfg,
+      );
+
+      changes.push({
+        type: 'update',
+        file: filePath,
+        move,
+        text: nextText,
+      });
+
+      if (move && move !== filePath) {
+        staged.set(filePath, { exists: false, derived: true });
+        staged.set(move, {
+          exists: true,
+          text: nextText,
+          mode: current.mode,
+          derived: true,
+        });
+        continue;
+      }
+
+      staged.set(filePath, {
+        exists: true,
+        text: nextText,
+        mode: current.mode,
+        derived: true,
+      });
+    }
+
+    return changes;
+  } catch (error) {
+    throw ensureApplyPatchError(error, 'Unexpected prepare failure');
+  }
+}
+
+type FileSnapshot =
+  | { type: 'missing' }
+  | {
+      type: 'file';
+      text: string;
+      mode: number;
+    };
+
+async function readSnapshot(filePath: string): Promise<FileSnapshot> {
+  try {
+    const stat = await fs.stat(filePath);
+    if (stat.isDirectory()) {
+      throw createApplyPatchInternalError(
+        `Refusing to overwrite directory while applying prepared changes: ${filePath}`,
+      );
+    }
+
+    return {
+      type: 'file',
+      text: await fs.readFile(filePath, 'utf-8'),
+      mode: stat.mode & 0o7777,
+    };
+  } catch (error) {
+    if (isMissingPathError(error)) {
+      return { type: 'missing' };
+    }
+
+    throw createApplyPatchInternalError(
+      `Failed to snapshot file before apply: ${filePath}`,
+      error,
+    );
+  }
+}
+
+async function restoreSnapshot(
+  filePath: string,
+  snapshot: FileSnapshot,
+): Promise<void> {
+  if (snapshot.type === 'missing') {
+    await fs.rm(filePath, { force: true });
+    return;
+  }
+
+  await fs.mkdir(path.dirname(filePath), { recursive: true });
+  await writeFileAtomically(filePath, snapshot.text, snapshot.mode);
+}
+
+function createTempSiblingPath(target: string): string {
+  return path.join(
+    path.dirname(target),
+    `.${path.basename(target)}.apply-patch-${randomUUID()}.tmp`,
+  );
+}
+
+async function writeFileAtomically(
+  target: string,
+  text: string,
+  mode?: number,
+): Promise<void> {
+  const tempPath = createTempSiblingPath(target);
+
+  try {
+    await fs.mkdir(path.dirname(target), { recursive: true });
+    await fs.writeFile(tempPath, text, 'utf-8');
+    if (mode !== undefined) {
+      await fs.chmod(tempPath, mode);
+    }
+    await fs.rename(tempPath, target);
+  } finally {
+    await fs.rm(tempPath, { force: true }).catch(() => undefined);
+  }
+}
+
+function getSnapshotMode(snapshot: FileSnapshot): number | undefined {
+  return snapshot.type === 'file' ? snapshot.mode : undefined;
+}
+
+function assertPreparedApplyPreconditions(
+  changes: PreparedChange[],
+  snapshots: Map<string, FileSnapshot>,
+): void {
+  const staged = new Map<string, FileSnapshot['type']>();
+
+  function pathState(filePath: string): FileSnapshot['type'] {
+    if (staged.has(filePath)) {
+      return staged.get(filePath) ?? 'missing';
+    }
+
+    return snapshots.get(filePath)?.type ?? 'missing';
+  }
+
+  for (const change of changes) {
+    if (change.type === 'add') {
+      if (pathState(change.file) !== 'missing') {
+        throw createApplyPatchVerificationError(
+          `Prepared add target already exists: ${change.file}`,
+        );
+      }
+
+      staged.set(change.file, 'file');
+
+      continue;
+    }
+
+    if (change.type === 'delete') {
+      if (pathState(change.file) !== 'file') {
+        throw createApplyPatchVerificationError(
+          `Prepared delete source does not exist: ${change.file}`,
+        );
+      }
+
+      staged.set(change.file, 'missing');
+      continue;
+    }
+
+    if (pathState(change.file) !== 'file') {
+      throw createApplyPatchVerificationError(
+        change.move && change.move !== change.file
+          ? `Prepared move source does not exist: ${change.file}`
+          : `Prepared update source does not exist: ${change.file}`,
+      );
+    }
+
+    if (change.move && change.move !== change.file) {
+      if (pathState(change.move) !== 'missing') {
+        throw createApplyPatchVerificationError(
+          `Prepared move destination already exists: ${change.move}`,
+        );
+      }
+
+      staged.set(change.file, 'missing');
+      staged.set(change.move, 'file');
+      continue;
+    }
+
+    staged.set(change.file, 'file');
+  }
+}
+
+/**
+ * Internal best-effort helper that applies the output of
+ * `preparePatchChanges()`: it snapshots all touched paths first and uses
+ * temp + rename for writes to regular files. It is not a universal multi-file
+ * transaction and is not perfect against concurrent external interference,
+ * but it avoids leaving silent partial states on normal apply failures.
+ *
+ * Contract: although it is exported for local tests/helpers, its expected
+ * input is the already prepared output of `preparePatchChanges()`. If it
+ * receives manual arrays, it revalidates the basic shape
+ * (types/text/normalized absolute paths) and filesystem invariants: it
+ * rejects updates/deletes/moves whose source does not exist, and add/move
+ * operations whose destination is already occupied.
+ */
+export async function applyPreparedChanges(
+  changes: PreparedChange[],
+): Promise<void> {
+  assertPreparedChangesContract(changes);
+
+  const snapshots = new Map<string, FileSnapshot>();
+
+  for (const change of changes) {
+    if (!snapshots.has(change.file)) {
+      snapshots.set(change.file, await readSnapshot(change.file));
+    }
+
+    if (
+      change.type === 'update' &&
+      change.move &&
+      !snapshots.has(change.move)
+    ) {
+      snapshots.set(change.move, await readSnapshot(change.move));
+    }
+  }
+
+  assertPreparedApplyPreconditions(changes, snapshots);
+
+  try {
+    for (const change of changes) {
+      if (change.type === 'add') {
+        await writeFileAtomically(change.file, change.text);
+        continue;
+      }
+
+      if (change.type === 'delete') {
+        await fs.unlink(change.file);
+        continue;
+      }
+
+      if (change.move && change.move !== change.file) {
+        await writeFileAtomically(
+          change.move,
+          change.text,
+          getSnapshotMode(snapshots.get(change.file) ?? { type: 'missing' }),
+        );
+        await fs.unlink(change.file);
+        continue;
+      }
+
+      await writeFileAtomically(
+        change.file,
+        change.text,
+        getSnapshotMode(snapshots.get(change.file) ?? { type: 'missing' }),
+      );
+    }
+  } catch (error) {
+    const rollbackFailures: string[] = [];
+
+    for (const [filePath, snapshot] of [...snapshots.entries()].reverse()) {
+      try {
+        await restoreSnapshot(filePath, snapshot);
+      } catch (rollbackError) {
+        rollbackFailures.push(`${filePath}: ${getErrorMessage(rollbackError)}`);
+      }
+    }
+
+    const message = rollbackFailures.length
+      ? `Failed to apply prepared changes and rollback was incomplete: ${getErrorMessage(error)}; rollback issues: ${rollbackFailures.join('; ')}`
+      : `Failed to apply prepared changes; rolled back touched files: ${getErrorMessage(error)}`;
+
+    throw createApplyPatchInternalError(message, error);
+  }
+}

+ 375 - 0
src/hooks/apply-patch/resolution.test.ts

@@ -0,0 +1,375 @@
+import { describe, expect, test } from 'bun:test';
+import path from 'node:path';
+
+import {
+  applyHits,
+  deriveNewContent,
+  locateChunk,
+  readFileLines,
+  resolveChunkStart,
+  resolveUpdateChunks,
+} from './resolution';
+import { createTempDir, DEFAULT_OPTIONS, writeFixture } from './test-helpers';
+import type { PatchChunk } from './types';
+
+describe('apply-patch/resolution', () => {
+  test('readFileLines elimina la línea vacía sintética final', async () => {
+    const root = await createTempDir();
+    const file = path.join(root, 'sample.txt');
+    await writeFixture(root, 'sample.txt', 'alpha\nbeta\n');
+
+    expect(await readFileLines(file)).toEqual(['alpha', 'beta']);
+  });
+
+  test('resolveChunkStart usa change_context como ancla cuando existe', () => {
+    const chunk: PatchChunk = {
+      old_lines: [],
+      new_lines: ['middle'],
+      change_context: 'anchor',
+    };
+
+    expect(resolveChunkStart(['top', 'anchor', 'bottom'], chunk, 0)).toBe(2);
+  });
+
+  test('locateChunk rescata prefijo/sufijo y conserva new_lines', () => {
+    const chunk: PatchChunk = {
+      old_lines: [
+        'const title = "Hola";',
+        'old-value',
+        'const footer = "Fin";',
+      ],
+      new_lines: [
+        'const title = “Hola”;',
+        'new-value',
+        'const footer = “Fin”;',
+      ],
+    };
+
+    const resolved = locateChunk(
+      ['top', 'const title = “Hola”;', 'stale-value', 'const footer = “Fin”;'],
+      'sample.txt',
+      chunk,
+      0,
+      DEFAULT_OPTIONS,
+    );
+
+    expect(resolved.rewritten).toBe(true);
+    expect(resolved.canonical_old_lines).toEqual([
+      'const title = “Hola”;',
+      'stale-value',
+      'const footer = “Fin”;',
+    ]);
+    expect(resolved.canonical_new_lines).toEqual(chunk.new_lines);
+  });
+
+  test('locateChunk canoniza un match unicode tolerante', () => {
+    const chunk: PatchChunk = {
+      old_lines: ['const title = "Hola";'],
+      new_lines: ['const title = "Hola mundo";'],
+    };
+
+    const resolved = locateChunk(
+      ['const title = “Hola”;'],
+      'sample.txt',
+      chunk,
+      0,
+      DEFAULT_OPTIONS,
+    );
+
+    expect(resolved.rewritten).toBe(true);
+    expect(resolved.matchComparator).toBe('unicode');
+    expect(resolved.canonical_old_lines).toEqual(['const title = “Hola”;']);
+    expect(resolved.canonical_new_lines).toEqual([
+      'const title = "Hola mundo";',
+    ]);
+  });
+
+  test('locateChunk canoniza un match trim-end tolerante', () => {
+    const chunk: PatchChunk = {
+      old_lines: ['alpha'],
+      new_lines: ['omega'],
+    };
+
+    const resolved = locateChunk(
+      ['alpha  '],
+      'sample.txt',
+      chunk,
+      0,
+      DEFAULT_OPTIONS,
+    );
+
+    expect(resolved.rewritten).toBe(true);
+    expect(resolved.matchComparator).toBe('trim-end');
+    expect(resolved.canonical_old_lines).toEqual(['alpha  ']);
+    expect(resolved.canonical_new_lines).toEqual(['omega']);
+  });
+
+  test('locateChunk ya no rescata un stale trim-only', () => {
+    const chunk: PatchChunk = {
+      old_lines: ['alpha'],
+      new_lines: ['omega'],
+    };
+
+    expect(() =>
+      locateChunk([' alpha  '], 'sample.txt', chunk, 0, DEFAULT_OPTIONS),
+    ).toThrow('Failed to find expected lines');
+  });
+
+  test('locateChunk ya no canoniza un caso indentado peligroso', () => {
+    const chunk: PatchChunk = {
+      old_lines: ['enabled: false'],
+      new_lines: ['enabled: true'],
+    };
+
+    expect(() =>
+      locateChunk(
+        ['root:', '  child:', '    enabled: false', 'done: true'],
+        'sample.yml',
+        chunk,
+        0,
+        DEFAULT_OPTIONS,
+      ),
+    ).toThrow('Failed to find expected lines');
+  });
+
+  test('locateChunk conserva una blank line final real cuando existe en el archivo', () => {
+    const chunk: PatchChunk = {
+      old_lines: ['alpha', ''],
+      new_lines: ['omega', ''],
+    };
+
+    const resolved = locateChunk(
+      ['alpha', ''],
+      'sample.txt',
+      chunk,
+      0,
+      DEFAULT_OPTIONS,
+    );
+
+    expect(resolved.canonical_old_lines).toEqual(['alpha', '']);
+    expect(resolved.canonical_new_lines).toEqual(['omega', '']);
+  });
+
+  test('locateChunk falla si el patch agrega una blank line final inexistente', () => {
+    const chunk: PatchChunk = {
+      old_lines: ['alpha', ''],
+      new_lines: ['omega', ''],
+    };
+
+    expect(() =>
+      locateChunk(['alpha'], 'sample.txt', chunk, 0, DEFAULT_OPTIONS),
+    ).toThrow('Failed to find expected lines');
+  });
+
+  test('deriveNewContent resuelve actualizaciones EOF', async () => {
+    const root = await createTempDir();
+    const file = path.join(root, 'sample.txt');
+    await writeFixture(root, 'sample.txt', 'alpha\nbeta');
+
+    expect(
+      await deriveNewContent(
+        file,
+        [
+          {
+            old_lines: ['beta'],
+            new_lines: ['omega'],
+            is_end_of_file: true,
+          },
+        ],
+        DEFAULT_OPTIONS,
+      ),
+    ).toBe('alpha\nomega');
+  });
+
+  test('deriveNewContent preserva CRLF al recomponer contenido', async () => {
+    const root = await createTempDir();
+    const file = path.join(root, 'sample.txt');
+    await writeFixture(root, 'sample.txt', 'alpha\r\nbeta\r\ngamma\r\n');
+
+    expect(
+      await deriveNewContent(
+        file,
+        [
+          {
+            old_lines: ['alpha', 'beta', 'gamma'],
+            new_lines: ['alpha', 'BETA', 'gamma'],
+          },
+        ],
+        DEFAULT_OPTIONS,
+      ),
+    ).toBe('alpha\r\nBETA\r\ngamma\r\n');
+  });
+
+  test('deriveNewContent inserta bloque anclado sin desplazarlo a EOF', async () => {
+    const root = await createTempDir();
+    const file = path.join(root, 'sample.txt');
+    await writeFixture(root, 'sample.txt', 'top\nanchor\nbottom\n');
+
+    expect(
+      await deriveNewContent(
+        file,
+        [
+          {
+            old_lines: [],
+            new_lines: ['middle'],
+            change_context: 'anchor',
+          },
+        ],
+        DEFAULT_OPTIONS,
+      ),
+    ).toBe('top\nanchor\nmiddle\nbottom\n');
+  });
+
+  test('deriveNewContent soporta inserción pura al EOF con anchor único', async () => {
+    const root = await createTempDir();
+    const file = path.join(root, 'sample.txt');
+    await writeFixture(root, 'sample.txt', 'top\nanchor\n');
+
+    expect(
+      await deriveNewContent(
+        file,
+        [
+          {
+            old_lines: [],
+            new_lines: ['middle'],
+            change_context: 'anchor',
+          },
+        ],
+        DEFAULT_OPTIONS,
+      ),
+    ).toBe('top\nanchor\nmiddle\n');
+  });
+
+  test('resolveUpdateChunks canoniza inserción EOF con anchor tolerante', async () => {
+    const root = await createTempDir();
+    const file = path.join(root, 'sample.txt');
+    await writeFixture(root, 'sample.txt', 'top\n“anchor”\n');
+
+    const { resolved } = await resolveUpdateChunks(
+      file,
+      [
+        {
+          old_lines: [],
+          new_lines: ['middle'],
+          change_context: '"anchor"',
+        },
+      ],
+      DEFAULT_OPTIONS,
+    );
+
+    expect(resolved[0]).toMatchObject({
+      canonical_change_context: '“anchor”',
+      rewritten: true,
+      strategy: 'anchor',
+      matchComparator: 'unicode',
+    });
+  });
+
+  test('deriveNewContent falla si una inserción pura no encuentra su anchor', async () => {
+    const root = await createTempDir();
+    const file = path.join(root, 'sample.txt');
+    await writeFixture(root, 'sample.txt', 'top\nbottom\n');
+
+    await expect(
+      deriveNewContent(
+        file,
+        [
+          {
+            old_lines: [],
+            new_lines: ['middle'],
+            change_context: 'anchor',
+          },
+        ],
+        DEFAULT_OPTIONS,
+      ),
+    ).rejects.toThrow('Failed to find insertion anchor');
+  });
+
+  test('deriveNewContent falla si una inserción pura tiene anchor ambiguo', async () => {
+    const root = await createTempDir();
+    const file = path.join(root, 'sample.txt');
+    await writeFixture(
+      root,
+      'sample.txt',
+      'top\nanchor\none\nsplit\nanchor\ntwo\n',
+    );
+
+    await expect(
+      deriveNewContent(
+        file,
+        [
+          {
+            old_lines: [],
+            new_lines: ['middle'],
+            change_context: 'anchor',
+          },
+        ],
+        DEFAULT_OPTIONS,
+      ),
+    ).rejects.toThrow('Insertion anchor was ambiguous');
+  });
+
+  test('deriveNewContent falla si un chunk posterior queda ambiguo', async () => {
+    const root = await createTempDir();
+    const file = path.join(root, 'sample.txt');
+    await writeFixture(
+      root,
+      'sample.txt',
+      'alpha\none\nomega\nsplit\nleft\nstale-one\nright\ngap\nleft\nstale-two\nright\n',
+    );
+
+    await expect(
+      deriveNewContent(
+        file,
+        [
+          {
+            old_lines: ['one'],
+            new_lines: ['ONE'],
+          },
+          {
+            old_lines: ['left', 'old', 'right'],
+            new_lines: ['left', 'new', 'right'],
+          },
+        ],
+        DEFAULT_OPTIONS,
+      ),
+    ).rejects.toThrow('ambiguous');
+  });
+
+  test('deriveNewContent rescata un EOF stale y conserva el update final', async () => {
+    const root = await createTempDir();
+    const file = path.join(root, 'sample.txt');
+    await writeFixture(root, 'sample.txt', 'alpha\nstale\nomega');
+
+    expect(
+      await deriveNewContent(
+        file,
+        [
+          {
+            old_lines: ['alpha', 'old', 'omega'],
+            new_lines: ['alpha', 'new', 'omega'],
+            is_end_of_file: true,
+          },
+        ],
+        DEFAULT_OPTIONS,
+      ),
+    ).toBe('alpha\nnew\nomega');
+  });
+
+  test('applyHits preserva el salto de línea final', () => {
+    expect(
+      applyHits(['start', 'end'], [{ start: 0, del: 1, add: ['next'] }]),
+    ).toBe('next\nend\n');
+  });
+
+  test('applyHits puede preservar un archivo sin newline final', () => {
+    expect(
+      applyHits(
+        ['start', 'end'],
+        [{ start: 0, del: 1, add: ['next'] }],
+        '\n',
+        false,
+      ),
+    ).toBe('next\nend');
+  });
+});

+ 427 - 0
src/hooks/apply-patch/resolution.ts

@@ -0,0 +1,427 @@
+import * as fs from 'node:fs/promises';
+
+import {
+  autoRescueComparators,
+  list,
+  prefix,
+  rescueByLcs,
+  rescueByPrefixSuffix,
+  seek,
+  seekMatch,
+  suffix,
+} from './matching';
+import type {
+  ApplyPatchRescueStrategy,
+  ApplyPatchRuntimeOptions,
+  MatchComparatorName,
+  MatchHit,
+  PatchChunk,
+  ResolvedChunk,
+} from './types';
+
+type FileLines = {
+  lines: string[];
+  eol: '\n' | '\r\n';
+  hasFinalNewline: boolean;
+};
+
+function splitFileLines(text: string): FileLines {
+  const eol = text.match(/\r\n|\n|\r/)?.[0] === '\r\n' ? '\r\n' : '\n';
+  const normalized = text.replace(/\r\n/g, '\n').replace(/\r/g, '\n');
+  const hasFinalNewline = normalized.endsWith('\n');
+  const lines = normalized.split('\n');
+  if (hasFinalNewline) {
+    lines.pop();
+  }
+
+  return { lines, eol, hasFinalNewline };
+}
+
+async function readFileLinesWithEol(file: string): Promise<FileLines> {
+  let text: string;
+
+  try {
+    text = await fs.readFile(file, 'utf-8');
+  } catch (error) {
+    throw new Error(`Failed to read file ${file}: ${error}`);
+  }
+
+  return splitFileLines(text);
+}
+
+export async function readFileLines(file: string): Promise<string[]> {
+  return (await readFileLinesWithEol(file)).lines;
+}
+
+export function resolveChunkStart(
+  lines: string[],
+  chunk: PatchChunk,
+  start: number,
+): number {
+  if (!chunk.change_context) {
+    return start;
+  }
+
+  const at = seek(lines, [chunk.change_context], start);
+  return at === -1 ? start : at + 1;
+}
+
+function resolveUniqueAnchor(
+  lines: string[],
+  changeContext: string,
+  start: number,
+):
+  | { kind: 'missing' }
+  | { kind: 'ambiguous' }
+  | {
+      kind: 'match';
+      index: number;
+      exact: boolean;
+      comparator: MatchComparatorName;
+      canonicalLine: string;
+    } {
+  const hits = new Set<number>();
+
+  for (const same of autoRescueComparators) {
+    for (const index of list(lines, [changeContext], start, same)) {
+      hits.add(index);
+    }
+  }
+
+  if (hits.size === 0) {
+    return { kind: 'missing' };
+  }
+
+  if (hits.size > 1) {
+    return { kind: 'ambiguous' };
+  }
+
+  const index = [...hits][0];
+  const canonicalLine = lines[index];
+  const comparator = seekMatch(lines, [changeContext], index)?.comparator;
+
+  return {
+    kind: 'match',
+    index,
+    exact: canonicalLine === changeContext,
+    comparator: comparator ?? 'exact',
+    canonicalLine,
+  };
+}
+
+export function locateChunk(
+  lines: string[],
+  file: string,
+  chunk: PatchChunk,
+  start: number,
+  cfg: ApplyPatchRuntimeOptions,
+): ResolvedChunk {
+  const old_lines = chunk.old_lines;
+  const new_lines = chunk.new_lines;
+  const match = seekMatch(
+    lines,
+    old_lines,
+    start,
+    chunk.is_end_of_file ?? false,
+  );
+
+  if (match) {
+    const canonical_old_lines = lines.slice(
+      match.index,
+      match.index + old_lines.length,
+    );
+    const rewritten = !match.exact;
+
+    return {
+      hit: { start: match.index, del: old_lines.length, add: [...new_lines] },
+      old_lines,
+      canonical_old_lines,
+      canonical_new_lines: [...chunk.new_lines],
+      resolved_is_end_of_file:
+        match.index + canonical_old_lines.length === lines.length,
+      rewritten,
+      strategy: undefined,
+      matchComparator: match.comparator,
+    };
+  }
+
+  if (cfg.prefixSuffix) {
+    const rescued = rescueByPrefixSuffix(lines, old_lines, new_lines, start);
+
+    if (rescued.kind === 'ambiguous') {
+      throw new Error(
+        `Prefix/suffix rescue was ambiguous in ${file}:\n${chunk.old_lines.join(
+          '\n',
+        )}`,
+      );
+    }
+
+    if (rescued.kind === 'match') {
+      const prefixLength = prefix(old_lines, new_lines);
+      const suffixLength = suffix(old_lines, new_lines, prefixLength);
+      const canonicalStart = rescued.hit.start - prefixLength;
+      const canonicalEnd = rescued.hit.start + rescued.hit.del + suffixLength;
+
+      return {
+        hit: rescued.hit,
+        old_lines,
+        canonical_old_lines: lines.slice(canonicalStart, canonicalEnd),
+        canonical_new_lines: [...chunk.new_lines],
+        resolved_is_end_of_file: canonicalEnd === lines.length,
+        rewritten: true,
+        strategy: 'prefix/suffix',
+        matchComparator: 'exact',
+      };
+    }
+  }
+
+  if (cfg.lcsRescue) {
+    const rescued = rescueByLcs(lines, old_lines, new_lines, start);
+
+    if (rescued.kind === 'ambiguous') {
+      throw new Error(
+        `LCS rescue was ambiguous in ${file}:\n${chunk.old_lines.join('\n')}`,
+      );
+    }
+
+    if (rescued.kind === 'match') {
+      return {
+        hit: rescued.hit,
+        old_lines,
+        canonical_old_lines: lines.slice(
+          rescued.hit.start,
+          rescued.hit.start + rescued.hit.del,
+        ),
+        canonical_new_lines: [...chunk.new_lines],
+        resolved_is_end_of_file:
+          rescued.hit.start + rescued.hit.del === lines.length,
+        rewritten: true,
+        strategy: 'lcs',
+        matchComparator: 'exact',
+      };
+    }
+  }
+
+  throw new Error(
+    `Failed to find expected lines in ${file}:\n${chunk.old_lines.join('\n')}`,
+  );
+}
+
+export function applyHits(
+  lines: string[],
+  hits: MatchHit[],
+  eol: '\n' | '\r\n' = '\n',
+  hasFinalNewline = true,
+): string {
+  const out = [...lines];
+
+  for (let index = hits.length - 1; index >= 0; index -= 1) {
+    out.splice(hits[index].start, hits[index].del, ...hits[index].add);
+  }
+
+  if (out.length === 0) {
+    return '';
+  }
+
+  const rendered = out.join(eol);
+  return hasFinalNewline ? `${rendered}${eol}` : rendered;
+}
+
+function resolveUpdateChunksFromFileLines(
+  file: string,
+  state: FileLines,
+  chunks: PatchChunk[],
+  cfg: ApplyPatchRuntimeOptions,
+): {
+  lines: string[];
+  resolved: ResolvedChunk[];
+  eol: '\n' | '\r\n';
+  hasFinalNewline: boolean;
+} {
+  const lines = [...state.lines];
+  const resolved: ResolvedChunk[] = [];
+  let start = 0;
+
+  for (const chunk of chunks) {
+    const chunkStart = resolveChunkStart(lines, chunk, start);
+    let strategy: ApplyPatchRescueStrategy | undefined;
+
+    if (chunk.old_lines.length === 0) {
+      if (chunk.is_end_of_file) {
+        resolved.push({
+          hit: {
+            start: lines.length,
+            del: 0,
+            add: [...chunk.new_lines],
+          },
+          old_lines: [],
+          canonical_old_lines: [],
+          canonical_new_lines: [...chunk.new_lines],
+          resolved_is_end_of_file: true,
+          rewritten: false,
+          strategy,
+          matchComparator: 'exact',
+        });
+        start = lines.length;
+        continue;
+      }
+
+      if (!chunk.change_context) {
+        throw new Error(`Missing insertion anchor in ${file}`);
+      }
+
+      const anchorMatch = resolveUniqueAnchor(
+        lines,
+        chunk.change_context,
+        start,
+      );
+      if (anchorMatch.kind === 'missing') {
+        throw new Error(
+          `Failed to find insertion anchor in ${file}:\n${chunk.change_context}`,
+        );
+      }
+
+      if (anchorMatch.kind === 'ambiguous') {
+        throw new Error(
+          `Insertion anchor was ambiguous in ${file}:\n${chunk.change_context}`,
+        );
+      }
+
+      const insertAt = anchorMatch.index + 1;
+      if (insertAt === lines.length) {
+        resolved.push({
+          hit: {
+            start: insertAt,
+            del: 0,
+            add: [...chunk.new_lines],
+          },
+          old_lines: [],
+          canonical_old_lines: [],
+          canonical_new_lines: [...chunk.new_lines],
+          canonical_change_context: anchorMatch.exact
+            ? undefined
+            : anchorMatch.canonicalLine,
+          resolved_is_end_of_file: insertAt === lines.length,
+          rewritten: !anchorMatch.exact,
+          strategy: anchorMatch.exact ? strategy : 'anchor',
+          matchComparator: anchorMatch.comparator,
+        });
+        start = insertAt;
+        continue;
+      }
+
+      const anchor = lines[insertAt];
+
+      strategy = 'anchor';
+      resolved.push({
+        hit: {
+          start: insertAt,
+          del: 0,
+          add: [...chunk.new_lines],
+        },
+        old_lines: [],
+        canonical_old_lines: [anchor],
+        canonical_new_lines: [...chunk.new_lines, anchor],
+        resolved_is_end_of_file: insertAt + 1 === lines.length,
+        rewritten: true,
+        strategy,
+        matchComparator: 'exact',
+      });
+      start = insertAt;
+      continue;
+    }
+
+    const found = locateChunk(lines, file, chunk, chunkStart, cfg);
+    resolved.push(found);
+    start = found.hit.start + found.hit.del;
+  }
+
+  resolved.sort((a, b) => a.hit.start - b.hit.start);
+
+  for (let index = 1; index < resolved.length; index += 1) {
+    const previous = resolved[index - 1].hit;
+    const current = resolved[index].hit;
+    if (previous.start + previous.del > current.start) {
+      throw new Error(`Overlapping patch chunks in ${file}`);
+    }
+  }
+
+  return {
+    lines,
+    resolved,
+    eol: state.eol,
+    hasFinalNewline: state.hasFinalNewline,
+  };
+}
+
+export async function resolveUpdateChunks(
+  file: string,
+  chunks: PatchChunk[],
+  cfg: ApplyPatchRuntimeOptions,
+): Promise<{
+  lines: string[];
+  resolved: ResolvedChunk[];
+  eol: '\n' | '\r\n';
+  hasFinalNewline: boolean;
+}> {
+  return resolveUpdateChunksFromFileLines(
+    file,
+    await readFileLinesWithEol(file),
+    chunks,
+    cfg,
+  );
+}
+
+export function deriveNewContentFromText(
+  file: string,
+  text: string,
+  chunks: PatchChunk[],
+  cfg: ApplyPatchRuntimeOptions,
+): string {
+  const { lines, resolved, eol, hasFinalNewline } =
+    resolveUpdateChunksFromFileLines(file, splitFileLines(text), chunks, cfg);
+
+  return applyHits(
+    lines,
+    resolved.map((chunk) => chunk.hit),
+    eol,
+    hasFinalNewline,
+  );
+}
+
+export function resolveUpdateChunksFromText(
+  file: string,
+  text: string,
+  chunks: PatchChunk[],
+  cfg: ApplyPatchRuntimeOptions,
+): {
+  lines: string[];
+  resolved: ResolvedChunk[];
+  eol: '\n' | '\r\n';
+  hasFinalNewline: boolean;
+} {
+  return resolveUpdateChunksFromFileLines(
+    file,
+    splitFileLines(text),
+    chunks,
+    cfg,
+  );
+}
+
+export async function deriveNewContent(
+  file: string,
+  chunks: PatchChunk[],
+  cfg: ApplyPatchRuntimeOptions,
+): Promise<string> {
+  const { lines, resolved, eol, hasFinalNewline } = await resolveUpdateChunks(
+    file,
+    chunks,
+    cfg,
+  );
+  return applyHits(
+    lines,
+    resolved.map((chunk) => chunk.hit),
+    eol,
+    hasFinalNewline,
+  );
+}

+ 526 - 0
src/hooks/apply-patch/rewrite.ts

@@ -0,0 +1,526 @@
+import path from 'node:path';
+
+import { formatPatch, normalizePatchText } from './codec';
+import {
+  createApplyPatchVerificationError,
+  ensureApplyPatchError,
+} from './errors';
+import {
+  createPatchExecutionContext,
+  resolvePreparedUpdate,
+  stageAddedText,
+} from './execution-context';
+import { deriveNewContentFromText } from './resolution';
+import type {
+  ApplyPatchRuntimeOptions,
+  PatchHunk,
+  UpdatePatchHunk,
+} from './types';
+
+export type RewritePatchResult = {
+  patchText: string;
+  changed: boolean;
+  rewrittenChunks: number;
+  totalChunks: number;
+  rewriteModes: string[];
+};
+
+type RewriteUpdateGroup = {
+  index: number;
+  sourcePath: string;
+  outputPath: string;
+  sourceFilePath: string;
+  outputFilePath: string;
+  baseText: string;
+  finalText: string;
+  chunks?: UpdatePatchHunk['chunks'];
+};
+
+type RewriteAddGroup = {
+  index: number;
+  outputPath: string;
+  outputFilePath: string;
+  finalText: string;
+};
+
+type RewriteDependencyGroup =
+  | { kind: 'add'; group: RewriteAddGroup }
+  | { kind: 'update'; group: RewriteUpdateGroup };
+
+function normalizeTextLineEndings(text: string): string {
+  return text.replace(/\r\n/g, '\n').replace(/\r/g, '\n');
+}
+
+function splitPatchTextLines(text: string): string[] {
+  const normalized = normalizeTextLineEndings(text);
+  const lines = normalized.split('\n');
+  if (normalized.endsWith('\n')) {
+    lines.pop();
+  }
+  return lines;
+}
+
+function createCollapsedUpdateHunk(
+  pathValue: string,
+  filePath: string,
+  baseText: string,
+  finalText: string,
+  cfg: ApplyPatchRuntimeOptions,
+  movePath?: string,
+): UpdatePatchHunk {
+  const collapsedChunk = {
+    old_lines: splitPatchTextLines(baseText),
+    new_lines: splitPatchTextLines(finalText),
+    change_context: undefined,
+    is_end_of_file: true,
+  } satisfies UpdatePatchHunk['chunks'][number];
+
+  const minimizedChunk = minimizeMergedChunk(collapsedChunk);
+  const chunk =
+    minimizedChunk.old_lines.length === collapsedChunk.old_lines.length &&
+    minimizedChunk.new_lines.length === collapsedChunk.new_lines.length &&
+    minimizedChunk.change_context === collapsedChunk.change_context &&
+    minimizedChunk.is_end_of_file === collapsedChunk.is_end_of_file
+      ? collapsedChunk
+      : (() => {
+          try {
+            return deriveNewContentFromText(
+              filePath,
+              baseText,
+              [minimizedChunk],
+              cfg,
+            ) === finalText
+              ? minimizedChunk
+              : collapsedChunk;
+          } catch {
+            // Keep the whole-file chunk when trimming shared context would make
+            // the fallback ambiguous or no longer reproduce the same result.
+            return collapsedChunk;
+          }
+        })();
+
+  return {
+    type: 'update',
+    path: pathValue,
+    move_path: movePath,
+    chunks: [chunk],
+  };
+}
+
+function clonePatchChunks(
+  chunks: UpdatePatchHunk['chunks'],
+): UpdatePatchHunk['chunks'] {
+  return chunks.map((chunk) => ({
+    old_lines: [...chunk.old_lines],
+    new_lines: [...chunk.new_lines],
+    change_context: chunk.change_context,
+    is_end_of_file: chunk.is_end_of_file,
+  }));
+}
+
+function minimizeMergedChunk(chunk: UpdatePatchHunk['chunks'][number]) {
+  if (chunk.old_lines.length === 0 && chunk.new_lines.length === 0) {
+    return {
+      old_lines: [],
+      new_lines: [],
+      change_context: chunk.change_context,
+      is_end_of_file: chunk.is_end_of_file,
+    };
+  }
+
+  let prefixLength = 0;
+  while (
+    prefixLength < chunk.old_lines.length &&
+    prefixLength < chunk.new_lines.length &&
+    chunk.old_lines[prefixLength] === chunk.new_lines[prefixLength]
+  ) {
+    prefixLength += 1;
+  }
+
+  let suffixLength = 0;
+  while (
+    chunk.old_lines.length - suffixLength - 1 >= prefixLength &&
+    chunk.new_lines.length - suffixLength - 1 >= prefixLength &&
+    chunk.old_lines[chunk.old_lines.length - suffixLength - 1] ===
+      chunk.new_lines[chunk.new_lines.length - suffixLength - 1]
+  ) {
+    suffixLength += 1;
+  }
+
+  if (prefixLength === 0 && suffixLength === 0) {
+    return {
+      old_lines: [...chunk.old_lines],
+      new_lines: [...chunk.new_lines],
+      change_context: chunk.change_context,
+      is_end_of_file: chunk.is_end_of_file,
+    };
+  }
+
+  return {
+    old_lines: chunk.old_lines.slice(
+      prefixLength,
+      chunk.old_lines.length - suffixLength,
+    ),
+    new_lines: chunk.new_lines.slice(
+      prefixLength,
+      chunk.new_lines.length - suffixLength,
+    ),
+    change_context:
+      prefixLength > 0
+        ? chunk.old_lines[prefixLength - 1]
+        : chunk.change_context,
+    is_end_of_file:
+      chunk.is_end_of_file && suffixLength === 0 ? true : undefined,
+  };
+}
+
+function createUpdateHunk(
+  pathValue: string,
+  chunks: UpdatePatchHunk['chunks'],
+  movePath?: string,
+): UpdatePatchHunk {
+  return {
+    type: 'update',
+    path: pathValue,
+    move_path: movePath,
+    chunks: clonePatchChunks(chunks),
+  };
+}
+
+function mergeSameFileUpdateGroupChunks(
+  filePath: string,
+  group: RewriteUpdateGroup,
+  nextChunks: UpdatePatchHunk['chunks'],
+  finalText: string,
+  cfg: ApplyPatchRuntimeOptions,
+): UpdatePatchHunk['chunks'] | undefined {
+  if (!group.chunks) {
+    return undefined;
+  }
+
+  const mergedChunks = [
+    ...clonePatchChunks(group.chunks).map(minimizeMergedChunk),
+    ...clonePatchChunks(nextChunks).map(minimizeMergedChunk),
+  ];
+
+  try {
+    const mergedText = deriveNewContentFromText(
+      filePath,
+      group.baseText,
+      mergedChunks,
+      cfg,
+    );
+
+    return mergedText === finalText ? mergedChunks : undefined;
+  } catch {
+    return undefined;
+  }
+}
+
+function addContentsFromFinalText(text: string): string {
+  return text.endsWith('\n') ? text.slice(0, -1) : text;
+}
+
+function renderRewriteDependencyGroup(
+  group: RewriteDependencyGroup,
+  cfg: ApplyPatchRuntimeOptions,
+): PatchHunk {
+  if (group.kind === 'add') {
+    return {
+      type: 'add',
+      path: group.group.outputPath,
+      contents: addContentsFromFinalText(group.group.finalText),
+    };
+  }
+
+  return group.group.chunks
+    ? createUpdateHunk(
+        group.group.sourcePath,
+        group.group.chunks,
+        group.group.outputPath !== group.group.sourcePath
+          ? group.group.outputPath
+          : undefined,
+      )
+    : createCollapsedUpdateHunk(
+        group.group.sourcePath,
+        group.group.sourceFilePath,
+        group.group.baseText,
+        group.group.finalText,
+        cfg,
+        group.group.outputPath !== group.group.sourcePath
+          ? group.group.outputPath
+          : undefined,
+      );
+}
+
+function rewriteModeForDependentUpdate(group: RewriteDependencyGroup): string {
+  if (group.kind === 'add') {
+    return 'collapse:add-followed-by-update';
+  }
+
+  if (group.group.outputPath !== group.group.sourcePath) {
+    return 'collapse:move-followed-by-update';
+  }
+
+  return 'merge:same-file-updates';
+}
+
+function combineDependentUpdateGroup(
+  filePath: string,
+  group: RewriteDependencyGroup,
+  nextChunks: UpdatePatchHunk['chunks'],
+  finalText: string,
+  nextOutputPath: string,
+  nextOutputFilePath: string,
+  cfg: ApplyPatchRuntimeOptions,
+): RewriteDependencyGroup {
+  if (group.kind === 'add') {
+    return {
+      kind: 'add',
+      group: {
+        ...group.group,
+        outputPath: nextOutputPath,
+        outputFilePath: nextOutputFilePath,
+        finalText,
+      },
+    };
+  }
+
+  const mergedChunks =
+    group.group.outputFilePath === filePath &&
+    group.group.sourceFilePath === filePath &&
+    nextOutputFilePath === filePath
+      ? mergeSameFileUpdateGroupChunks(
+          filePath,
+          group.group,
+          nextChunks,
+          finalText,
+          cfg,
+        )
+      : undefined;
+
+  return {
+    kind: 'update',
+    group: {
+      ...group.group,
+      outputPath: nextOutputPath,
+      outputFilePath: nextOutputFilePath,
+      finalText,
+      chunks: mergedChunks,
+    },
+  };
+}
+
+export async function rewritePatch(
+  root: string,
+  patchText: string,
+  cfg: ApplyPatchRuntimeOptions,
+  worktree?: string,
+): Promise<RewritePatchResult> {
+  try {
+    const { hunks, staged, getPreparedFileState, assertPreparedPathMissing } =
+      await createPatchExecutionContext(root, patchText, worktree);
+    const normalizedPatchText = normalizePatchText(patchText);
+    const rewritten: PatchHunk[] = [];
+    let changed = false;
+    let rewrittenChunks = 0;
+    const rewriteModes = new Set<string>();
+    const totalChunks = hunks.reduce(
+      (count, hunk) =>
+        count + (hunk.type === 'update' ? hunk.chunks.length : 0),
+      0,
+    );
+
+    const dependencyGroups = new Map<string, RewriteDependencyGroup>();
+
+    function clearDependencyGroup(filePath: string) {
+      dependencyGroups.delete(filePath);
+    }
+
+    for (const hunk of hunks) {
+      if (hunk.type === 'add') {
+        const filePath = path.resolve(root, hunk.path);
+        await assertPreparedPathMissing(filePath, 'add');
+        rewritten.push(hunk);
+        clearDependencyGroup(filePath);
+        const finalText = stageAddedText(hunk.contents);
+        staged.set(filePath, {
+          exists: true,
+          text: finalText,
+          derived: true,
+        });
+        dependencyGroups.set(filePath, {
+          kind: 'add',
+          group: {
+            index: rewritten.length - 1,
+            outputPath: hunk.path,
+            outputFilePath: filePath,
+            finalText,
+          },
+        });
+        continue;
+      }
+
+      if (hunk.type === 'delete') {
+        const filePath = path.resolve(root, hunk.path);
+        await getPreparedFileState(filePath, 'delete');
+        clearDependencyGroup(filePath);
+        rewritten.push(hunk);
+        staged.set(filePath, { exists: false, derived: true });
+        continue;
+      }
+
+      const filePath = path.resolve(root, hunk.path);
+      const currentDependency = dependencyGroups.get(filePath);
+      const current = await getPreparedFileState(filePath, 'update');
+      if (!current.exists) {
+        throw createApplyPatchVerificationError(
+          `Failed to read file to update: ${filePath}`,
+        );
+      }
+
+      const movePath = hunk.move_path
+        ? path.resolve(root, hunk.move_path)
+        : undefined;
+      if (movePath && movePath !== filePath) {
+        await assertPreparedPathMissing(movePath, 'move');
+      }
+
+      const { resolved, nextText } = resolvePreparedUpdate(
+        filePath,
+        current.text,
+        hunk,
+        cfg,
+      );
+
+      const next = resolved.map((chunk, index) => ({
+        old_lines: [...chunk.canonical_old_lines],
+        new_lines: [...chunk.canonical_new_lines],
+        change_context:
+          chunk.canonical_change_context ?? hunk.chunks[index].change_context,
+        is_end_of_file:
+          hunk.chunks[index].is_end_of_file && chunk.resolved_is_end_of_file
+            ? true
+            : undefined,
+      }));
+
+      for (const chunk of resolved) {
+        if (!chunk.rewritten) {
+          continue;
+        }
+
+        changed = true;
+        rewrittenChunks += 1;
+        if (chunk.strategy) {
+          rewriteModes.add(chunk.strategy);
+          continue;
+        }
+
+        if (chunk.matchComparator && chunk.matchComparator !== 'exact') {
+          rewriteModes.add(`match:${chunk.matchComparator}`);
+        }
+      }
+
+      const nextOutputPath = hunk.move_path ?? hunk.path;
+      const nextOutputFilePath = movePath ?? filePath;
+
+      if (current.derived && currentDependency) {
+        const nextGroup = combineDependentUpdateGroup(
+          filePath,
+          currentDependency,
+          next,
+          nextText,
+          nextOutputPath,
+          nextOutputFilePath,
+          cfg,
+        );
+        rewritten[currentDependency.group.index] = renderRewriteDependencyGroup(
+          nextGroup,
+          cfg,
+        );
+        changed = true;
+        rewriteModes.add(rewriteModeForDependentUpdate(currentDependency));
+        clearDependencyGroup(filePath);
+        if (movePath && movePath !== filePath) {
+          clearDependencyGroup(movePath);
+        }
+        dependencyGroups.set(nextOutputFilePath, nextGroup);
+      } else {
+        rewritten.push(createUpdateHunk(hunk.path, next, hunk.move_path));
+        clearDependencyGroup(filePath);
+        if (movePath && movePath !== filePath) {
+          clearDependencyGroup(movePath);
+        }
+        dependencyGroups.set(nextOutputFilePath, {
+          kind: 'update',
+          group: {
+            index: rewritten.length - 1,
+            sourcePath: hunk.path,
+            outputPath: nextOutputPath,
+            sourceFilePath: filePath,
+            outputFilePath: nextOutputFilePath,
+            baseText: current.text,
+            finalText: nextText,
+            chunks: clonePatchChunks(next),
+          },
+        });
+      }
+
+      if (movePath && movePath !== filePath) {
+        staged.set(filePath, { exists: false, derived: true });
+        staged.set(movePath, {
+          exists: true,
+          text: nextText,
+          mode: current.mode,
+          derived: true,
+        });
+      } else {
+        staged.set(filePath, {
+          exists: true,
+          text: nextText,
+          mode: current.mode,
+          derived: true,
+        });
+      }
+    }
+
+    if (!changed) {
+      if (normalizedPatchText !== patchText) {
+        return {
+          patchText: normalizedPatchText,
+          changed: true,
+          rewrittenChunks: 0,
+          totalChunks,
+          rewriteModes: ['normalize:patch-text'],
+        };
+      }
+
+      return {
+        patchText,
+        changed: false,
+        rewrittenChunks: 0,
+        totalChunks,
+        rewriteModes: [],
+      };
+    }
+
+    return {
+      patchText: formatPatch({ hunks: rewritten }),
+      changed: true,
+      rewrittenChunks,
+      totalChunks,
+      rewriteModes: [...rewriteModes].sort(),
+    };
+  } catch (error) {
+    throw ensureApplyPatchError(error, 'Unexpected rewrite failure');
+  }
+}
+
+export async function rewritePatchText(
+  root: string,
+  patchText: string,
+  cfg: ApplyPatchRuntimeOptions,
+  worktree?: string,
+): Promise<string> {
+  return (await rewritePatch(root, patchText, cfg, worktree)).patchText;
+}

+ 52 - 0
src/hooks/apply-patch/test-helpers.ts

@@ -0,0 +1,52 @@
+import { afterEach } from 'bun:test';
+import { mkdir, mkdtemp, readFile, rm, writeFile } from 'node:fs/promises';
+import os from 'node:os';
+import path from 'node:path';
+
+import { applyPreparedChanges, preparePatchChanges } from './operations';
+import type { ApplyPatchRuntimeOptions } from './types';
+
+const tempDirs: string[] = [];
+
+afterEach(async () => {
+  await Promise.all(
+    tempDirs.splice(0).map((dir) => rm(dir, { recursive: true, force: true })),
+  );
+});
+
+export const DEFAULT_OPTIONS: ApplyPatchRuntimeOptions = {
+  prefixSuffix: true,
+  lcsRescue: true,
+};
+
+export async function createTempDir(prefix = 'apply-patch-'): Promise<string> {
+  const dir = await mkdtemp(path.join(os.tmpdir(), prefix));
+  tempDirs.push(dir);
+  return dir;
+}
+
+export async function writeFixture(
+  root: string,
+  relativePath: string,
+  contents: string,
+): Promise<void> {
+  const target = path.join(root, relativePath);
+  await mkdir(path.dirname(target), { recursive: true });
+  await writeFile(target, contents, 'utf-8');
+}
+
+export async function readText(
+  root: string,
+  relativePath: string,
+): Promise<string> {
+  return await readFile(path.join(root, relativePath), 'utf-8');
+}
+
+export async function applyPatch(
+  root: string,
+  patchText: string,
+  cfg: ApplyPatchRuntimeOptions = DEFAULT_OPTIONS,
+): Promise<void> {
+  const changes = await preparePatchChanges(root, patchText, cfg);
+  await applyPreparedChanges(changes);
+}

+ 111 - 0
src/hooks/apply-patch/types.ts

@@ -0,0 +1,111 @@
+export type ApplyPatchRuntimeOptions = {
+  prefixSuffix: boolean;
+  lcsRescue: boolean;
+};
+
+export type ApplyPatchErrorKind =
+  | 'blocked'
+  | 'validation'
+  | 'verification'
+  | 'internal';
+
+export type ApplyPatchErrorCode =
+  | 'malformed_patch'
+  | 'outside_workspace'
+  | 'verification_failed'
+  | 'internal_unexpected';
+
+export type ApplyPatchRescueStrategy = 'prefix/suffix' | 'lcs' | 'anchor';
+
+export type MatchComparatorName =
+  | 'exact'
+  | 'unicode'
+  | 'trim-end'
+  | 'unicode-trim-end'
+  | 'trim'
+  | 'unicode-trim';
+
+export type PatchChunk = {
+  old_lines: string[];
+  new_lines: string[];
+  change_context?: string;
+  is_end_of_file?: boolean;
+};
+
+export type AddPatchHunk = {
+  type: 'add';
+  path: string;
+  contents: string;
+};
+
+export type DeletePatchHunk = {
+  type: 'delete';
+  path: string;
+};
+
+export type UpdatePatchHunk = {
+  type: 'update';
+  path: string;
+  move_path?: string;
+  chunks: PatchChunk[];
+};
+
+export type PatchHunk = AddPatchHunk | DeletePatchHunk | UpdatePatchHunk;
+
+export type ParsedPatch = {
+  hunks: PatchHunk[];
+};
+
+export type AddPreparedChange = {
+  type: 'add';
+  file: string;
+  text: string;
+};
+
+export type DeletePreparedChange = {
+  type: 'delete';
+  file: string;
+};
+
+export type UpdatePreparedChange = {
+  type: 'update';
+  file: string;
+  move?: string;
+  text: string;
+};
+
+export type PreparedChange =
+  | AddPreparedChange
+  | DeletePreparedChange
+  | UpdatePreparedChange;
+
+export type MatchHit = {
+  start: number;
+  del: number;
+  add: string[];
+};
+
+export type SeekHit = {
+  index: number;
+  comparator: MatchComparatorName;
+  exact: boolean;
+};
+
+export type ResolvedChunk = {
+  hit: MatchHit;
+  old_lines: string[];
+  canonical_old_lines: string[];
+  canonical_new_lines: string[];
+  canonical_change_context?: string;
+  resolved_is_end_of_file: boolean;
+  rewritten: boolean;
+  strategy?: ApplyPatchRescueStrategy;
+  matchComparator?: MatchComparatorName;
+};
+
+export type RescueResult =
+  | { kind: 'miss' }
+  | { kind: 'ambiguous'; phase: 'prefix_suffix' | 'lcs' }
+  | { kind: 'match'; hit: MatchHit };
+
+export type LineComparator = (a: string, b: string) => boolean;

+ 1 - 0
src/hooks/index.ts

@@ -1,3 +1,4 @@
+export { createApplyPatchHook } from './apply-patch';
 export type { AutoUpdateCheckerOptions } from './auto-update-checker';
 export type { AutoUpdateCheckerOptions } from './auto-update-checker';
 export { createAutoUpdateCheckerHook } from './auto-update-checker';
 export { createAutoUpdateCheckerHook } from './auto-update-checker';
 export { createChatHeadersHook } from './chat-headers';
 export { createChatHeadersHook } from './chat-headers';

+ 14 - 0
src/index.ts

@@ -5,6 +5,7 @@ import { loadPluginConfig, type MultiplexerConfig } from './config';
 import { parseList } from './config/agent-mcps';
 import { parseList } from './config/agent-mcps';
 import { CouncilManager } from './council';
 import { CouncilManager } from './council';
 import {
 import {
+  createApplyPatchHook,
   createAutoUpdateCheckerHook,
   createAutoUpdateCheckerHook,
   createChatHeadersHook,
   createChatHeadersHook,
   createDelegateTaskRetryHook,
   createDelegateTaskRetryHook,
@@ -161,6 +162,8 @@ const OhMyOpenCodeLite: Plugin = async (ctx) => {
   // Initialize delegate-task retry guidance hook
   // Initialize delegate-task retry guidance hook
   const delegateTaskRetryHook = createDelegateTaskRetryHook(ctx);
   const delegateTaskRetryHook = createDelegateTaskRetryHook(ctx);
 
 
+  const applyPatchHook = createApplyPatchHook(ctx);
+
   // Initialize JSON parse error recovery hook
   // Initialize JSON parse error recovery hook
   const jsonErrorRecoveryHook = createJsonErrorRecoveryHook(ctx);
   const jsonErrorRecoveryHook = createJsonErrorRecoveryHook(ctx);
 
 
@@ -464,6 +467,17 @@ const OhMyOpenCodeLite: Plugin = async (ctx) => {
       );
       );
     },
     },
 
 
+    // Best-effort rescue only for stale apply_patch input before native execution
+    'tool.execute.before': async (input, output) => {
+      await applyPatchHook['tool.execute.before'](
+        input as {
+          tool: string;
+          directory?: string;
+        },
+        output as { args?: { patchText?: unknown; [key: string]: unknown } },
+      );
+    },
+
     // Direct interception of /auto-continue command — bypasses LLM round-trip
     // Direct interception of /auto-continue command — bypasses LLM round-trip
     'command.execute.before': async (input, output) => {
     'command.execute.before': async (input, output) => {
       await todoContinuationHook.handleCommandExecuteBefore(
       await todoContinuationHook.handleCommandExecuteBefore(