Compare commits

..

8 Commits

Author SHA1 Message Date
Cole Leavitt
f92d2e8f7f fix: add proc.kill fallback when process group kill fails 2026-02-22 15:06:09 +09:00
Cole Leavitt
7f0950230c fix: kill process group on timeout and handle stdin EPIPE
- Use detached process group (non-Windows) + process.kill(-pid) to kill
  the entire process tree, not just the outer shell wrapper
- Add proc.stdin error listener to absorb EPIPE when child exits before
  stdin write completes
2026-02-22 15:06:09 +09:00
Cole Leavitt
e1568a4705 fix: handle signal-killed exit code and guard SIGTERM kill
- code ?? 0 → code ?? 1: signal-terminated processes return null exit code,
  which was incorrectly coerced to 0 (success) instead of 1 (failure)
- wrap proc.kill(SIGTERM) in try/catch to match SIGKILL guard and prevent
  EPERM/ESRCH from crashing on already-dead processes
2026-02-22 15:06:09 +09:00
Cole Leavitt
b666ab24df fix: plug resource leaks and add hook command timeout
- LSP signal handlers: store refs, return unregister handle, call in stopAll()
- session-tools-store: add per-session deleteSessionTools(), wire into session.deleted
- executeHookCommand: add 30s timeout with SIGTERM→SIGKILL escalation
2026-02-22 15:06:09 +09:00
YeonGyu-Kim
ac81e1d7cd fix(hashline-edit): correct offset advancement and fuzzy index mapping in merge expand
- Track matchedLen separately for stripped continuation token matches
- Map fuzzy index back to original string position via character-by-character
  scan that skips operator chars, fixing positional correctness
2026-02-22 14:50:59 +09:00
YeonGyu-Kim
9390f98f01 fix(hashline-edit): integrate continuation/merge helpers into expand logic and strengthen tool description
- maybeExpandSingleLineMerge now uses stripTrailingContinuationTokens and
  stripMergeOperatorChars as fallback matching strategies
- Add 'refs interpreted against last read' atomicity clause to tool description
- Add 'output tool calls only; no prose' rule to tool description
2026-02-22 14:46:59 +09:00
YeonGyu-Kim
e6868e9112 fix(hashline-edit): align autocorrect, BOM/CRLF, and tool description with oh-my-pi
- Rewrite restoreOldWrappedLines to use oh-my-pi's span-scanning algorithm
- Add stripTrailingContinuationTokens and stripMergeOperatorChars helpers
- Fix detectLineEnding to use first-occurrence logic instead of any-match
- Fix applyAppend/applyPrepend to replace empty-line placeholder in empty files
- Enhance tool description with 7 critical rules, tag guidance, and anti-patterns
2026-02-22 14:40:18 +09:00
YeonGyu-Kim
5d1d87cc10 feat(hashline-edit): add autocorrect, BOM/CRLF normalization, and file creation support
Implements key features from oh-my-pi to improve agent editing success rates:

- Autocorrect v1: single-line merge expansion, wrapped line restoration,
  paired indent restoration (autocorrect-replacement-lines.ts)
- BOM/CRLF normalization: canonicalize on read, restore on write
  (file-text-canonicalization.ts)
- Pre-validate all hashes before mutation (edit-ordering.ts)
- File creation via append/prepend operations (new types + executor logic)
- Modular refactoring: split edit-operations.ts into focused modules
  (primitives, ordering, deduplication, diff, executor)
- Enhanced tool description with operation choice guide and recovery hints

All 50 tests pass. TypeScript clean. Build successful.
2026-02-22 14:13:59 +09:00
19 changed files with 1478 additions and 761 deletions

View File

@@ -1,53 +1,58 @@
import type { OhMyOpenCodeConfig } from "../config"
import type { PluginContext } from "./types"
import type { OhMyOpenCodeConfig } from "../config";
import type { PluginContext } from "./types";
import {
clearSessionAgent,
getMainSessionID,
getSessionAgent,
setMainSession,
subagentSessions,
syncSubagentSessions,
setMainSession,
updateSessionAgent,
} from "../features/claude-code-session-state"
import { resetMessageCursor } from "../shared"
import { lspManager } from "../tools"
import { shouldRetryError } from "../shared/model-error-classifier"
import { clearPendingModelFallback, clearSessionFallbackChain, setPendingModelFallback } from "../hooks/model-fallback/hook"
import { log } from "../shared/logger"
import { clearSessionModel, setSessionModel } from "../shared/session-model-state"
} from "../features/claude-code-session-state";
import {
clearPendingModelFallback,
clearSessionFallbackChain,
setPendingModelFallback,
} from "../hooks/model-fallback/hook";
import { resetMessageCursor } from "../shared";
import { log } from "../shared/logger";
import { shouldRetryError } from "../shared/model-error-classifier";
import { clearSessionModel, setSessionModel } from "../shared/session-model-state";
import { deleteSessionTools } from "../shared/session-tools-store";
import { lspManager } from "../tools";
import type { CreatedHooks } from "../create-hooks"
import type { Managers } from "../create-managers"
import { normalizeSessionStatusToIdle } from "./session-status-normalizer"
import { pruneRecentSyntheticIdles } from "./recent-synthetic-idles"
import type { CreatedHooks } from "../create-hooks";
import type { Managers } from "../create-managers";
import { pruneRecentSyntheticIdles } from "./recent-synthetic-idles";
import { normalizeSessionStatusToIdle } from "./session-status-normalizer";
type FirstMessageVariantGate = {
markSessionCreated: (sessionInfo: { id?: string; title?: string; parentID?: string } | undefined) => void
clear: (sessionID: string) => void
}
markSessionCreated: (sessionInfo: { id?: string; title?: string; parentID?: string } | undefined) => void;
clear: (sessionID: string) => void;
};
function isRecord(value: unknown): value is Record<string, unknown> {
return typeof value === "object" && value !== null
return typeof value === "object" && value !== null;
}
function normalizeFallbackModelID(modelID: string): string {
return modelID
.replace(/-thinking$/i, "")
.replace(/-max$/i, "")
.replace(/-high$/i, "")
.replace(/-high$/i, "");
}
function extractErrorName(error: unknown): string | undefined {
if (isRecord(error) && typeof error.name === "string") return error.name
if (error instanceof Error) return error.name
return undefined
if (isRecord(error) && typeof error.name === "string") return error.name;
if (error instanceof Error) return error.name;
return undefined;
}
function extractErrorMessage(error: unknown): string {
if (!error) return ""
if (typeof error === "string") return error
if (error instanceof Error) return error.message
if (!error) return "";
if (typeof error === "string") return error;
if (error instanceof Error) return error.message;
if (isRecord(error)) {
const candidates: unknown[] = [
@@ -56,116 +61,112 @@ function extractErrorMessage(error: unknown): string {
error.error,
isRecord(error.data) ? error.data.error : undefined,
error.cause,
]
];
for (const candidate of candidates) {
if (isRecord(candidate) && typeof candidate.message === "string" && candidate.message.length > 0) {
return candidate.message
return candidate.message;
}
}
}
try {
return JSON.stringify(error)
return JSON.stringify(error);
} catch {
return String(error)
return String(error);
}
}
function extractProviderModelFromErrorMessage(
message: string,
): { providerID?: string; modelID?: string } {
const lower = message.toLowerCase()
function extractProviderModelFromErrorMessage(message: string): { providerID?: string; modelID?: string } {
const lower = message.toLowerCase();
const providerModel = lower.match(/model\s+not\s+found:\s*([a-z0-9_-]+)\s*\/\s*([a-z0-9._-]+)/i)
const providerModel = lower.match(/model\s+not\s+found:\s*([a-z0-9_-]+)\s*\/\s*([a-z0-9._-]+)/i);
if (providerModel) {
return {
providerID: providerModel[1],
modelID: providerModel[2],
}
};
}
const modelOnly = lower.match(/unknown\s+provider\s+for\s+model\s+([a-z0-9._-]+)/i)
const modelOnly = lower.match(/unknown\s+provider\s+for\s+model\s+([a-z0-9._-]+)/i);
if (modelOnly) {
return {
modelID: modelOnly[1],
}
};
}
return {}
return {};
}
type EventInput = Parameters<
NonNullable<NonNullable<CreatedHooks["writeExistingFileGuard"]>["event"]>
>[0]
type EventInput = Parameters<NonNullable<NonNullable<CreatedHooks["writeExistingFileGuard"]>["event"]>>[0];
export function createEventHandler(args: {
ctx: PluginContext
pluginConfig: OhMyOpenCodeConfig
firstMessageVariantGate: FirstMessageVariantGate
managers: Managers
hooks: CreatedHooks
ctx: PluginContext;
pluginConfig: OhMyOpenCodeConfig;
firstMessageVariantGate: FirstMessageVariantGate;
managers: Managers;
hooks: CreatedHooks;
}): (input: EventInput) => Promise<void> {
const { ctx, firstMessageVariantGate, managers, hooks } = args
const { ctx, firstMessageVariantGate, managers, hooks } = args;
const pluginContext = ctx as {
directory: string
directory: string;
client: {
session: {
abort: (input: { path: { id: string } }) => Promise<unknown>
abort: (input: { path: { id: string } }) => Promise<unknown>;
prompt: (input: {
path: { id: string }
body: { parts: Array<{ type: "text"; text: string }> }
query: { directory: string }
}) => Promise<unknown>
}
}
}
path: { id: string };
body: { parts: Array<{ type: "text"; text: string }> };
query: { directory: string };
}) => Promise<unknown>;
};
};
};
const isRuntimeFallbackEnabled =
hooks.runtimeFallback !== null &&
hooks.runtimeFallback !== undefined &&
(typeof args.pluginConfig.runtime_fallback === "boolean"
? args.pluginConfig.runtime_fallback
: (args.pluginConfig.runtime_fallback?.enabled ?? false))
: (args.pluginConfig.runtime_fallback?.enabled ?? false));
// Avoid triggering multiple abort+continue cycles for the same failing assistant message.
const lastHandledModelErrorMessageID = new Map<string, string>()
const lastHandledRetryStatusKey = new Map<string, string>()
const lastKnownModelBySession = new Map<string, { providerID: string; modelID: string }>()
const lastHandledModelErrorMessageID = new Map<string, string>();
const lastHandledRetryStatusKey = new Map<string, string>();
const lastKnownModelBySession = new Map<string, { providerID: string; modelID: string }>();
const dispatchToHooks = async (input: EventInput): Promise<void> => {
await Promise.resolve(hooks.autoUpdateChecker?.event?.(input))
await Promise.resolve(hooks.claudeCodeHooks?.event?.(input))
await Promise.resolve(hooks.backgroundNotificationHook?.event?.(input))
await Promise.resolve(hooks.sessionNotification?.(input))
await Promise.resolve(hooks.todoContinuationEnforcer?.handler?.(input))
await Promise.resolve(hooks.unstableAgentBabysitter?.event?.(input))
await Promise.resolve(hooks.contextWindowMonitor?.event?.(input))
await Promise.resolve(hooks.directoryAgentsInjector?.event?.(input))
await Promise.resolve(hooks.directoryReadmeInjector?.event?.(input))
await Promise.resolve(hooks.rulesInjector?.event?.(input))
await Promise.resolve(hooks.thinkMode?.event?.(input))
await Promise.resolve(hooks.anthropicContextWindowLimitRecovery?.event?.(input))
await Promise.resolve(hooks.runtimeFallback?.event?.(input))
await Promise.resolve(hooks.agentUsageReminder?.event?.(input))
await Promise.resolve(hooks.categorySkillReminder?.event?.(input))
await Promise.resolve(hooks.interactiveBashSession?.event?.(input as EventInput))
await Promise.resolve(hooks.ralphLoop?.event?.(input))
await Promise.resolve(hooks.stopContinuationGuard?.event?.(input))
await Promise.resolve(hooks.compactionTodoPreserver?.event?.(input))
await Promise.resolve(hooks.writeExistingFileGuard?.event?.(input))
await Promise.resolve(hooks.atlasHook?.handler?.(input))
}
await Promise.resolve(hooks.autoUpdateChecker?.event?.(input));
await Promise.resolve(hooks.claudeCodeHooks?.event?.(input));
await Promise.resolve(hooks.backgroundNotificationHook?.event?.(input));
await Promise.resolve(hooks.sessionNotification?.(input));
await Promise.resolve(hooks.todoContinuationEnforcer?.handler?.(input));
await Promise.resolve(hooks.unstableAgentBabysitter?.event?.(input));
await Promise.resolve(hooks.contextWindowMonitor?.event?.(input));
await Promise.resolve(hooks.directoryAgentsInjector?.event?.(input));
await Promise.resolve(hooks.directoryReadmeInjector?.event?.(input));
await Promise.resolve(hooks.rulesInjector?.event?.(input));
await Promise.resolve(hooks.thinkMode?.event?.(input));
await Promise.resolve(hooks.anthropicContextWindowLimitRecovery?.event?.(input));
await Promise.resolve(hooks.runtimeFallback?.event?.(input));
await Promise.resolve(hooks.agentUsageReminder?.event?.(input));
await Promise.resolve(hooks.categorySkillReminder?.event?.(input));
await Promise.resolve(hooks.interactiveBashSession?.event?.(input as EventInput));
await Promise.resolve(hooks.ralphLoop?.event?.(input));
await Promise.resolve(hooks.stopContinuationGuard?.event?.(input));
await Promise.resolve(hooks.compactionTodoPreserver?.event?.(input));
await Promise.resolve(hooks.writeExistingFileGuard?.event?.(input));
await Promise.resolve(hooks.atlasHook?.handler?.(input));
};
const recentSyntheticIdles = new Map<string, number>()
const recentRealIdles = new Map<string, number>()
const DEDUP_WINDOW_MS = 500
const recentSyntheticIdles = new Map<string, number>();
const recentRealIdles = new Map<string, number>();
const DEDUP_WINDOW_MS = 500;
const shouldAutoRetrySession = (sessionID: string): boolean => {
if (syncSubagentSessions.has(sessionID)) return true
const mainSessionID = getMainSessionID()
if (mainSessionID) return sessionID === mainSessionID
if (syncSubagentSessions.has(sessionID)) return true;
const mainSessionID = getMainSessionID();
if (mainSessionID) return sessionID === mainSessionID;
// Headless runs (or resumed sessions) may not emit session.created, so mainSessionID can be unset.
// In that case, treat any non-subagent session as the "main" interactive session.
return !subagentSessions.has(sessionID)
}
return !subagentSessions.has(sessionID);
};
return async (input): Promise<void> => {
pruneRecentSyntheticIdles({
@@ -173,97 +174,98 @@ export function createEventHandler(args: {
recentRealIdles,
now: Date.now(),
dedupWindowMs: DEDUP_WINDOW_MS,
})
});
if (input.event.type === "session.idle") {
const sessionID = (input.event.properties as Record<string, unknown> | undefined)?.sessionID as string | undefined
const sessionID = (input.event.properties as Record<string, unknown> | undefined)?.sessionID as
| string
| undefined;
if (sessionID) {
const emittedAt = recentSyntheticIdles.get(sessionID)
const emittedAt = recentSyntheticIdles.get(sessionID);
if (emittedAt && Date.now() - emittedAt < DEDUP_WINDOW_MS) {
recentSyntheticIdles.delete(sessionID)
return
recentSyntheticIdles.delete(sessionID);
return;
}
recentRealIdles.set(sessionID, Date.now())
recentRealIdles.set(sessionID, Date.now());
}
}
await dispatchToHooks(input)
await dispatchToHooks(input);
const syntheticIdle = normalizeSessionStatusToIdle(input)
const syntheticIdle = normalizeSessionStatusToIdle(input);
if (syntheticIdle) {
const sessionID = (syntheticIdle.event.properties as Record<string, unknown>)?.sessionID as string
const emittedAt = recentRealIdles.get(sessionID)
const sessionID = (syntheticIdle.event.properties as Record<string, unknown>)?.sessionID as string;
const emittedAt = recentRealIdles.get(sessionID);
if (emittedAt && Date.now() - emittedAt < DEDUP_WINDOW_MS) {
recentRealIdles.delete(sessionID)
return
recentRealIdles.delete(sessionID);
return;
}
recentSyntheticIdles.set(sessionID, Date.now())
await dispatchToHooks(syntheticIdle as EventInput)
recentSyntheticIdles.set(sessionID, Date.now());
await dispatchToHooks(syntheticIdle as EventInput);
}
const { event } = input
const props = event.properties as Record<string, unknown> | undefined
const { event } = input;
const props = event.properties as Record<string, unknown> | undefined;
if (event.type === "session.created") {
const sessionInfo = props?.info as
| { id?: string; title?: string; parentID?: string }
| undefined
const sessionInfo = props?.info as { id?: string; title?: string; parentID?: string } | undefined;
if (!sessionInfo?.parentID) {
setMainSession(sessionInfo?.id)
setMainSession(sessionInfo?.id);
}
firstMessageVariantGate.markSessionCreated(sessionInfo)
firstMessageVariantGate.markSessionCreated(sessionInfo);
await managers.tmuxSessionManager.onSessionCreated(
event as {
type: string
type: string;
properties?: {
info?: { id?: string; parentID?: string; title?: string }
}
info?: { id?: string; parentID?: string; title?: string };
};
},
)
);
}
if (event.type === "session.deleted") {
const sessionInfo = props?.info as { id?: string } | undefined
const sessionInfo = props?.info as { id?: string } | undefined;
if (sessionInfo?.id === getMainSessionID()) {
setMainSession(undefined)
setMainSession(undefined);
}
if (sessionInfo?.id) {
clearSessionAgent(sessionInfo.id)
lastHandledModelErrorMessageID.delete(sessionInfo.id)
lastHandledRetryStatusKey.delete(sessionInfo.id)
lastKnownModelBySession.delete(sessionInfo.id)
clearPendingModelFallback(sessionInfo.id)
clearSessionFallbackChain(sessionInfo.id)
resetMessageCursor(sessionInfo.id)
firstMessageVariantGate.clear(sessionInfo.id)
clearSessionModel(sessionInfo.id)
syncSubagentSessions.delete(sessionInfo.id)
await managers.skillMcpManager.disconnectSession(sessionInfo.id)
await lspManager.cleanupTempDirectoryClients()
clearSessionAgent(sessionInfo.id);
lastHandledModelErrorMessageID.delete(sessionInfo.id);
lastHandledRetryStatusKey.delete(sessionInfo.id);
lastKnownModelBySession.delete(sessionInfo.id);
clearPendingModelFallback(sessionInfo.id);
clearSessionFallbackChain(sessionInfo.id);
resetMessageCursor(sessionInfo.id);
firstMessageVariantGate.clear(sessionInfo.id);
clearSessionModel(sessionInfo.id);
syncSubagentSessions.delete(sessionInfo.id);
deleteSessionTools(sessionInfo.id);
await managers.skillMcpManager.disconnectSession(sessionInfo.id);
await lspManager.cleanupTempDirectoryClients();
await managers.tmuxSessionManager.onSessionDeleted({
sessionID: sessionInfo.id,
})
});
}
}
if (event.type === "message.updated") {
const info = props?.info as Record<string, unknown> | undefined
const sessionID = info?.sessionID as string | undefined
const agent = info?.agent as string | undefined
const role = info?.role as string | undefined
const info = props?.info as Record<string, unknown> | undefined;
const sessionID = info?.sessionID as string | undefined;
const agent = info?.agent as string | undefined;
const role = info?.role as string | undefined;
if (sessionID && role === "user") {
if (agent) {
updateSessionAgent(sessionID, agent)
updateSessionAgent(sessionID, agent);
}
const providerID = info?.providerID as string | undefined
const modelID = info?.modelID as string | undefined
const providerID = info?.providerID as string | undefined;
const modelID = info?.modelID as string | undefined;
if (providerID && modelID) {
lastKnownModelBySession.set(sessionID, { providerID, modelID })
setSessionModel(sessionID, { providerID, modelID })
lastKnownModelBySession.set(sessionID, { providerID, modelID });
setSessionModel(sessionID, { providerID, modelID });
}
}
@@ -271,132 +273,128 @@ export function createEventHandler(args: {
// session.error events are not guaranteed for all providers, so we also observe message.updated.
if (sessionID && role === "assistant" && !isRuntimeFallbackEnabled) {
try {
const assistantMessageID = info?.id as string | undefined
const assistantError = info?.error
const assistantMessageID = info?.id as string | undefined;
const assistantError = info?.error;
if (assistantMessageID && assistantError) {
const lastHandled = lastHandledModelErrorMessageID.get(sessionID)
const lastHandled = lastHandledModelErrorMessageID.get(sessionID);
if (lastHandled === assistantMessageID) {
return
return;
}
const errorName = extractErrorName(assistantError)
const errorMessage = extractErrorMessage(assistantError)
const errorInfo = { name: errorName, message: errorMessage }
const errorName = extractErrorName(assistantError);
const errorMessage = extractErrorMessage(assistantError);
const errorInfo = { name: errorName, message: errorMessage };
if (shouldRetryError(errorInfo)) {
// Prefer the agent/model/provider from the assistant message payload.
let agentName = agent ?? getSessionAgent(sessionID)
let agentName = agent ?? getSessionAgent(sessionID);
if (!agentName && sessionID === getMainSessionID()) {
if (errorMessage.includes("claude-opus") || errorMessage.includes("opus")) {
agentName = "sisyphus"
agentName = "sisyphus";
} else if (errorMessage.includes("gpt-5")) {
agentName = "hephaestus"
agentName = "hephaestus";
} else {
agentName = "sisyphus"
agentName = "sisyphus";
}
}
if (agentName) {
const currentProvider = (info?.providerID as string | undefined) ?? "opencode"
const rawModel = (info?.modelID as string | undefined) ?? "claude-opus-4-6"
const currentModel = normalizeFallbackModelID(rawModel)
const currentProvider = (info?.providerID as string | undefined) ?? "opencode";
const rawModel = (info?.modelID as string | undefined) ?? "claude-opus-4-6";
const currentModel = normalizeFallbackModelID(rawModel);
const setFallback = setPendingModelFallback(
sessionID,
agentName,
currentProvider,
currentModel,
)
const setFallback = setPendingModelFallback(sessionID, agentName, currentProvider, currentModel);
if (setFallback && shouldAutoRetrySession(sessionID) && !hooks.stopContinuationGuard?.isStopped(sessionID)) {
lastHandledModelErrorMessageID.set(sessionID, assistantMessageID)
if (
setFallback &&
shouldAutoRetrySession(sessionID) &&
!hooks.stopContinuationGuard?.isStopped(sessionID)
) {
lastHandledModelErrorMessageID.set(sessionID, assistantMessageID);
await pluginContext.client.session.abort({ path: { id: sessionID } }).catch(() => {})
await pluginContext.client.session.abort({ path: { id: sessionID } }).catch(() => {});
await pluginContext.client.session
.prompt({
path: { id: sessionID },
body: { parts: [{ type: "text", text: "continue" }] },
query: { directory: pluginContext.directory },
})
.catch(() => {})
.catch(() => {});
}
}
}
}
} catch (err) {
log("[event] model-fallback error in message.updated:", { sessionID, error: err })
log("[event] model-fallback error in message.updated:", { sessionID, error: err });
}
}
}
if (event.type === "session.status") {
const sessionID = props?.sessionID as string | undefined
const status = props?.status as
| { type?: string; attempt?: number; message?: string; next?: number }
| undefined
const sessionID = props?.sessionID as string | undefined;
const status = props?.status as { type?: string; attempt?: number; message?: string; next?: number } | undefined;
if (sessionID && status?.type === "retry") {
try {
const retryMessage = typeof status.message === "string" ? status.message : ""
const retryKey = `${status.attempt ?? "?"}:${status.next ?? "?"}:${retryMessage}`
const retryMessage = typeof status.message === "string" ? status.message : "";
const retryKey = `${status.attempt ?? "?"}:${status.next ?? "?"}:${retryMessage}`;
if (lastHandledRetryStatusKey.get(sessionID) === retryKey) {
return
return;
}
lastHandledRetryStatusKey.set(sessionID, retryKey)
lastHandledRetryStatusKey.set(sessionID, retryKey);
const errorInfo = { name: undefined as string | undefined, message: retryMessage }
const errorInfo = { name: undefined as string | undefined, message: retryMessage };
if (shouldRetryError(errorInfo)) {
let agentName = getSessionAgent(sessionID)
let agentName = getSessionAgent(sessionID);
if (!agentName && sessionID === getMainSessionID()) {
if (retryMessage.includes("claude-opus") || retryMessage.includes("opus")) {
agentName = "sisyphus"
agentName = "sisyphus";
} else if (retryMessage.includes("gpt-5")) {
agentName = "hephaestus"
agentName = "hephaestus";
} else {
agentName = "sisyphus"
agentName = "sisyphus";
}
}
if (agentName) {
const parsed = extractProviderModelFromErrorMessage(retryMessage)
const lastKnown = lastKnownModelBySession.get(sessionID)
const currentProvider = parsed.providerID ?? lastKnown?.providerID ?? "opencode"
let currentModel = parsed.modelID ?? lastKnown?.modelID ?? "claude-opus-4-6"
currentModel = normalizeFallbackModelID(currentModel)
const parsed = extractProviderModelFromErrorMessage(retryMessage);
const lastKnown = lastKnownModelBySession.get(sessionID);
const currentProvider = parsed.providerID ?? lastKnown?.providerID ?? "opencode";
let currentModel = parsed.modelID ?? lastKnown?.modelID ?? "claude-opus-4-6";
currentModel = normalizeFallbackModelID(currentModel);
const setFallback = setPendingModelFallback(
sessionID,
agentName,
currentProvider,
currentModel,
)
const setFallback = setPendingModelFallback(sessionID, agentName, currentProvider, currentModel);
if (setFallback && shouldAutoRetrySession(sessionID) && !hooks.stopContinuationGuard?.isStopped(sessionID)) {
await pluginContext.client.session.abort({ path: { id: sessionID } }).catch(() => {})
if (
setFallback &&
shouldAutoRetrySession(sessionID) &&
!hooks.stopContinuationGuard?.isStopped(sessionID)
) {
await pluginContext.client.session.abort({ path: { id: sessionID } }).catch(() => {});
await pluginContext.client.session
.prompt({
path: { id: sessionID },
body: { parts: [{ type: "text", text: "continue" }] },
query: { directory: pluginContext.directory },
})
.catch(() => {})
.catch(() => {});
}
}
}
} catch (err) {
log("[event] model-fallback error in session.status:", { sessionID, error: err })
log("[event] model-fallback error in session.status:", { sessionID, error: err });
}
}
}
if (event.type === "session.error") {
try {
const sessionID = props?.sessionID as string | undefined
const error = props?.error
const sessionID = props?.sessionID as string | undefined;
const error = props?.error;
const errorName = extractErrorName(error)
const errorMessage = extractErrorMessage(error)
const errorInfo = { name: errorName, message: errorMessage }
const errorName = extractErrorName(error);
const errorMessage = extractErrorMessage(error);
const errorInfo = { name: errorName, message: errorMessage };
// First, try session recovery for internal errors (thinking blocks, tool results, etc.)
if (hooks.sessionRecovery?.isRecoverableError(error)) {
@@ -405,8 +403,8 @@ export function createEventHandler(args: {
role: "assistant" as const,
sessionID,
error,
}
const recovered = await hooks.sessionRecovery.handleSessionRecovery(messageInfo)
};
const recovered = await hooks.sessionRecovery.handleSessionRecovery(messageInfo);
if (
recovered &&
@@ -420,53 +418,52 @@ export function createEventHandler(args: {
body: { parts: [{ type: "text", text: "continue" }] },
query: { directory: pluginContext.directory },
})
.catch(() => {})
.catch(() => {});
}
}
}
// Second, try model fallback for model errors (rate limit, quota, provider issues, etc.)
else if (sessionID && shouldRetryError(errorInfo) && !isRuntimeFallbackEnabled) {
let agentName = getSessionAgent(sessionID)
let agentName = getSessionAgent(sessionID);
if (!agentName && sessionID === getMainSessionID()) {
if (errorMessage.includes("claude-opus") || errorMessage.includes("opus")) {
agentName = "sisyphus"
agentName = "sisyphus";
} else if (errorMessage.includes("gpt-5")) {
agentName = "hephaestus"
agentName = "hephaestus";
} else {
agentName = "sisyphus"
agentName = "sisyphus";
}
}
if (agentName) {
const parsed = extractProviderModelFromErrorMessage(errorMessage)
const currentProvider = props?.providerID as string || parsed.providerID || "opencode"
let currentModel = props?.modelID as string || parsed.modelID || "claude-opus-4-6"
currentModel = normalizeFallbackModelID(currentModel)
const setFallback = setPendingModelFallback(
sessionID,
agentName,
currentProvider,
currentModel,
)
if (setFallback && shouldAutoRetrySession(sessionID) && !hooks.stopContinuationGuard?.isStopped(sessionID)) {
await pluginContext.client.session.abort({ path: { id: sessionID } }).catch(() => {})
await pluginContext.client.session
.prompt({
path: { id: sessionID },
body: { parts: [{ type: "text", text: "continue" }] },
query: { directory: pluginContext.directory },
})
.catch(() => {})
if (agentName) {
const parsed = extractProviderModelFromErrorMessage(errorMessage);
const currentProvider = (props?.providerID as string) || parsed.providerID || "opencode";
let currentModel = (props?.modelID as string) || parsed.modelID || "claude-opus-4-6";
currentModel = normalizeFallbackModelID(currentModel);
const setFallback = setPendingModelFallback(sessionID, agentName, currentProvider, currentModel);
if (
setFallback &&
shouldAutoRetrySession(sessionID) &&
!hooks.stopContinuationGuard?.isStopped(sessionID)
) {
await pluginContext.client.session.abort({ path: { id: sessionID } }).catch(() => {});
await pluginContext.client.session
.prompt({
path: { id: sessionID },
body: { parts: [{ type: "text", text: "continue" }] },
query: { directory: pluginContext.directory },
})
.catch(() => {});
}
}
}
} catch (err) {
const sessionID = props?.sessionID as string | undefined
log("[event] model-fallback error in session.error:", { sessionID, error: err })
const sessionID = props?.sessionID as string | undefined;
log("[event] model-fallback error in session.error:", { sessionID, error: err });
}
}
}
};
}

View File

@@ -1,78 +1,129 @@
import { spawn } from "node:child_process"
import { getHomeDirectory } from "./home-directory"
import { findBashPath, findZshPath } from "./shell-path"
import { spawn } from "node:child_process";
import { getHomeDirectory } from "./home-directory";
import { findBashPath, findZshPath } from "./shell-path";
export interface CommandResult {
exitCode: number
stdout?: string
stderr?: string
exitCode: number;
stdout?: string;
stderr?: string;
}
const DEFAULT_HOOK_TIMEOUT_MS = 30_000;
const SIGKILL_GRACE_MS = 5_000;
export interface ExecuteHookOptions {
forceZsh?: boolean
zshPath?: string
forceZsh?: boolean;
zshPath?: string;
/** Timeout in milliseconds. Process is killed after this. Default: 30000 */
timeoutMs?: number;
}
export async function executeHookCommand(
command: string,
stdin: string,
cwd: string,
options?: ExecuteHookOptions,
command: string,
stdin: string,
cwd: string,
options?: ExecuteHookOptions,
): Promise<CommandResult> {
const home = getHomeDirectory()
const home = getHomeDirectory();
const timeoutMs = options?.timeoutMs ?? DEFAULT_HOOK_TIMEOUT_MS;
const expandedCommand = command
.replace(/^~(?=\/|$)/g, home)
.replace(/\s~(?=\/)/g, ` ${home}`)
.replace(/\$CLAUDE_PROJECT_DIR/g, cwd)
.replace(/\$\{CLAUDE_PROJECT_DIR\}/g, cwd)
const expandedCommand = command
.replace(/^~(?=\/|$)/g, home)
.replace(/\s~(?=\/)/g, ` ${home}`)
.replace(/\$CLAUDE_PROJECT_DIR/g, cwd)
.replace(/\$\{CLAUDE_PROJECT_DIR\}/g, cwd);
let finalCommand = expandedCommand
let finalCommand = expandedCommand;
if (options?.forceZsh) {
const zshPath = findZshPath(options.zshPath)
const escapedCommand = expandedCommand.replace(/'/g, "'\\''")
if (zshPath) {
finalCommand = `${zshPath} -lc '${escapedCommand}'`
} else {
const bashPath = findBashPath()
if (bashPath) {
finalCommand = `${bashPath} -lc '${escapedCommand}'`
}
}
}
if (options?.forceZsh) {
const zshPath = findZshPath(options.zshPath);
const escapedCommand = expandedCommand.replace(/'/g, "'\\''");
if (zshPath) {
finalCommand = `${zshPath} -lc '${escapedCommand}'`;
} else {
const bashPath = findBashPath();
if (bashPath) {
finalCommand = `${bashPath} -lc '${escapedCommand}'`;
}
}
}
return new Promise((resolve) => {
const proc = spawn(finalCommand, {
cwd,
shell: true,
env: { ...process.env, HOME: home, CLAUDE_PROJECT_DIR: cwd },
})
return new Promise(resolve => {
let settled = false;
let killTimer: ReturnType<typeof setTimeout> | null = null;
let stdout = ""
let stderr = ""
const isWin32 = process.platform === "win32";
const proc = spawn(finalCommand, {
cwd,
shell: true,
detached: !isWin32,
env: { ...process.env, HOME: home, CLAUDE_PROJECT_DIR: cwd },
});
proc.stdout?.on("data", (data) => {
stdout += data.toString()
})
let stdout = "";
let stderr = "";
proc.stderr?.on("data", (data) => {
stderr += data.toString()
})
proc.stdout?.on("data", (data: Buffer) => {
stdout += data.toString();
});
proc.stdin?.write(stdin)
proc.stdin?.end()
proc.stderr?.on("data", (data: Buffer) => {
stderr += data.toString();
});
proc.on("close", (code) => {
resolve({
exitCode: code ?? 0,
stdout: stdout.trim(),
stderr: stderr.trim(),
})
})
proc.stdin?.on("error", () => {});
proc.stdin?.write(stdin);
proc.stdin?.end();
proc.on("error", (err) => {
resolve({ exitCode: 1, stderr: err.message })
})
})
const settle = (result: CommandResult) => {
if (settled) return;
settled = true;
if (killTimer) clearTimeout(killTimer);
if (timeoutTimer) clearTimeout(timeoutTimer);
resolve(result);
};
proc.on("close", code => {
settle({
exitCode: code ?? 1,
stdout: stdout.trim(),
stderr: stderr.trim(),
});
});
proc.on("error", err => {
settle({ exitCode: 1, stderr: err.message });
});
const killProcessGroup = (signal: NodeJS.Signals) => {
try {
if (!isWin32 && proc.pid) {
try {
process.kill(-proc.pid, signal);
} catch {
proc.kill(signal);
}
} else {
proc.kill(signal);
}
} catch {}
};
const timeoutTimer = setTimeout(() => {
if (settled) return;
// Kill entire process group to avoid orphaned children
killProcessGroup("SIGTERM");
killTimer = setTimeout(() => {
if (settled) return;
killProcessGroup("SIGKILL");
}, SIGKILL_GRACE_MS);
// Append timeout notice to stderr
stderr += `\nHook command timed out after ${timeoutMs}ms`;
}, timeoutMs);
// Don't let the timeout timer keep the process alive
if (timeoutTimer && typeof timeoutTimer === "object" && "unref" in timeoutTimer) {
timeoutTimer.unref();
}
});
}

View File

@@ -1,14 +1,18 @@
const store = new Map<string, Record<string, boolean>>()
const store = new Map<string, Record<string, boolean>>();
export function setSessionTools(sessionID: string, tools: Record<string, boolean>): void {
store.set(sessionID, { ...tools })
store.set(sessionID, { ...tools });
}
export function getSessionTools(sessionID: string): Record<string, boolean> | undefined {
const tools = store.get(sessionID)
return tools ? { ...tools } : undefined
const tools = store.get(sessionID);
return tools ? { ...tools } : undefined;
}
export function deleteSessionTools(sessionID: string): void {
store.delete(sessionID);
}
export function clearSessionTools(): void {
store.clear()
store.clear();
}

View File

@@ -0,0 +1,175 @@
function normalizeTokens(text: string): string {
return text.replace(/\s+/g, "")
}
function stripAllWhitespace(text: string): string {
return normalizeTokens(text)
}
export function stripTrailingContinuationTokens(text: string): string {
return text.replace(/(?:&&|\|\||\?\?|\?|:|=|,|\+|-|\*|\/|\.|\()\s*$/u, "")
}
export function stripMergeOperatorChars(text: string): string {
return text.replace(/[|&?]/g, "")
}
function leadingWhitespace(text: string): string {
const match = text.match(/^\s*/)
return match ? match[0] : ""
}
export function restoreOldWrappedLines(originalLines: string[], replacementLines: string[]): string[] {
if (originalLines.length === 0 || replacementLines.length < 2) return replacementLines
const canonicalToOriginal = new Map<string, { line: string; count: number }>()
for (const line of originalLines) {
const canonical = stripAllWhitespace(line)
const existing = canonicalToOriginal.get(canonical)
if (existing) {
existing.count += 1
} else {
canonicalToOriginal.set(canonical, { line, count: 1 })
}
}
const candidates: { start: number; len: number; replacement: string; canonical: string }[] = []
for (let start = 0; start < replacementLines.length; start += 1) {
for (let len = 2; len <= 10 && start + len <= replacementLines.length; len += 1) {
const canonicalSpan = stripAllWhitespace(replacementLines.slice(start, start + len).join(""))
const original = canonicalToOriginal.get(canonicalSpan)
if (original && original.count === 1 && canonicalSpan.length >= 6) {
candidates.push({ start, len, replacement: original.line, canonical: canonicalSpan })
}
}
}
if (candidates.length === 0) return replacementLines
const canonicalCounts = new Map<string, number>()
for (const candidate of candidates) {
canonicalCounts.set(candidate.canonical, (canonicalCounts.get(candidate.canonical) ?? 0) + 1)
}
const uniqueCandidates = candidates.filter((candidate) => (canonicalCounts.get(candidate.canonical) ?? 0) === 1)
if (uniqueCandidates.length === 0) return replacementLines
uniqueCandidates.sort((a, b) => b.start - a.start)
const correctedLines = [...replacementLines]
for (const candidate of uniqueCandidates) {
correctedLines.splice(candidate.start, candidate.len, candidate.replacement)
}
return correctedLines
}
export function maybeExpandSingleLineMerge(
originalLines: string[],
replacementLines: string[]
): string[] {
if (replacementLines.length !== 1 || originalLines.length <= 1) {
return replacementLines
}
const merged = replacementLines[0]
const parts = originalLines.map((line) => line.trim()).filter((line) => line.length > 0)
if (parts.length !== originalLines.length) return replacementLines
const indices: number[] = []
let offset = 0
let orderedMatch = true
for (const part of parts) {
let idx = merged.indexOf(part, offset)
let matchedLen = part.length
if (idx === -1) {
const stripped = stripTrailingContinuationTokens(part)
if (stripped !== part) {
idx = merged.indexOf(stripped, offset)
if (idx !== -1) matchedLen = stripped.length
}
}
if (idx === -1) {
const segment = merged.slice(offset)
const segmentStripped = stripMergeOperatorChars(segment)
const partStripped = stripMergeOperatorChars(part)
const fuzzyIdx = segmentStripped.indexOf(partStripped)
if (fuzzyIdx !== -1) {
let strippedPos = 0
let originalPos = 0
while (strippedPos < fuzzyIdx && originalPos < segment.length) {
if (!/[|&?]/.test(segment[originalPos])) strippedPos += 1
originalPos += 1
}
idx = offset + originalPos
matchedLen = part.length
}
}
if (idx === -1) {
orderedMatch = false
break
}
indices.push(idx)
offset = idx + matchedLen
}
const expanded: string[] = []
if (orderedMatch) {
for (let i = 0; i < indices.length; i += 1) {
const start = indices[i]
const end = i + 1 < indices.length ? indices[i + 1] : merged.length
const candidate = merged.slice(start, end).trim()
if (candidate.length === 0) {
orderedMatch = false
break
}
expanded.push(candidate)
}
}
if (orderedMatch && expanded.length === originalLines.length) {
return expanded
}
const semicolonSplit = merged
.split(/;\s+/)
.map((line, idx, arr) => {
if (idx < arr.length - 1 && !line.endsWith(";")) {
return `${line};`
}
return line
})
.map((line) => line.trim())
.filter((line) => line.length > 0)
if (semicolonSplit.length === originalLines.length) {
return semicolonSplit
}
return replacementLines
}
export function restoreIndentForPairedReplacement(
originalLines: string[],
replacementLines: string[]
): string[] {
if (originalLines.length !== replacementLines.length) {
return replacementLines
}
return replacementLines.map((line, idx) => {
if (line.length === 0) return line
if (leadingWhitespace(line).length > 0) return line
const indent = leadingWhitespace(originalLines[idx])
if (indent.length === 0) return line
return `${indent}${line}`
})
}
export function autocorrectReplacementLines(
originalLines: string[],
replacementLines: string[]
): string[] {
let next = replacementLines
next = maybeExpandSingleLineMerge(originalLines, next)
next = restoreOldWrappedLines(originalLines, next)
next = restoreIndentForPairedReplacement(originalLines, next)
return next
}

View File

@@ -0,0 +1,47 @@
import type { HashlineEdit } from "./types"
import { toNewLines } from "./edit-text-normalization"
function normalizeEditPayload(payload: string | string[]): string {
return toNewLines(payload).join("\n")
}
function buildDedupeKey(edit: HashlineEdit): string {
switch (edit.type) {
case "set_line":
return `set_line|${edit.line}|${normalizeEditPayload(edit.text)}`
case "replace_lines":
return `replace_lines|${edit.start_line}|${edit.end_line}|${normalizeEditPayload(edit.text)}`
case "insert_after":
return `insert_after|${edit.line}|${normalizeEditPayload(edit.text)}`
case "insert_before":
return `insert_before|${edit.line}|${normalizeEditPayload(edit.text)}`
case "insert_between":
return `insert_between|${edit.after_line}|${edit.before_line}|${normalizeEditPayload(edit.text)}`
case "replace":
return `replace|${edit.old_text}|${normalizeEditPayload(edit.new_text)}`
case "append":
return `append|${normalizeEditPayload(edit.text)}`
case "prepend":
return `prepend|${normalizeEditPayload(edit.text)}`
default:
return JSON.stringify(edit)
}
}
export function dedupeEdits(edits: HashlineEdit[]): { edits: HashlineEdit[]; deduplicatedEdits: number } {
const seen = new Set<string>()
const deduped: HashlineEdit[] = []
let deduplicatedEdits = 0
for (const edit of edits) {
const key = buildDedupeKey(edit)
if (seen.has(key)) {
deduplicatedEdits += 1
continue
}
seen.add(key)
deduped.push(edit)
}
return { edits: deduped, deduplicatedEdits }
}

View File

@@ -0,0 +1,160 @@
import { autocorrectReplacementLines } from "./autocorrect-replacement-lines"
import {
restoreLeadingIndent,
stripInsertAnchorEcho,
stripInsertBeforeEcho,
stripInsertBoundaryEcho,
stripRangeBoundaryEcho,
toNewLines,
} from "./edit-text-normalization"
import { parseLineRef, validateLineRef } from "./validation"
interface EditApplyOptions {
skipValidation?: boolean
}
function shouldValidate(options?: EditApplyOptions): boolean {
return options?.skipValidation !== true
}
export function applySetLine(
lines: string[],
anchor: string,
newText: string | string[],
options?: EditApplyOptions
): string[] {
if (shouldValidate(options)) validateLineRef(lines, anchor)
const { line } = parseLineRef(anchor)
const result = [...lines]
const originalLine = lines[line - 1] ?? ""
const corrected = autocorrectReplacementLines([originalLine], toNewLines(newText))
const replacement = corrected.map((entry, idx) => {
if (idx !== 0) return entry
return restoreLeadingIndent(originalLine, entry)
})
result.splice(line - 1, 1, ...replacement)
return result
}
export function applyReplaceLines(
lines: string[],
startAnchor: string,
endAnchor: string,
newText: string | string[],
options?: EditApplyOptions
): string[] {
if (shouldValidate(options)) {
validateLineRef(lines, startAnchor)
validateLineRef(lines, endAnchor)
}
const { line: startLine } = parseLineRef(startAnchor)
const { line: endLine } = parseLineRef(endAnchor)
if (startLine > endLine) {
throw new Error(
`Invalid range: start line ${startLine} cannot be greater than end line ${endLine}`
)
}
const result = [...lines]
const originalRange = lines.slice(startLine - 1, endLine)
const stripped = stripRangeBoundaryEcho(lines, startLine, endLine, toNewLines(newText))
const corrected = autocorrectReplacementLines(originalRange, stripped)
const restored = corrected.map((entry, idx) => {
if (idx !== 0) return entry
return restoreLeadingIndent(lines[startLine - 1], entry)
})
result.splice(startLine - 1, endLine - startLine + 1, ...restored)
return result
}
export function applyInsertAfter(
lines: string[],
anchor: string,
text: string | string[],
options?: EditApplyOptions
): string[] {
if (shouldValidate(options)) validateLineRef(lines, anchor)
const { line } = parseLineRef(anchor)
const result = [...lines]
const newLines = stripInsertAnchorEcho(lines[line - 1], toNewLines(text))
if (newLines.length === 0) {
throw new Error(`insert_after requires non-empty text for ${anchor}`)
}
result.splice(line, 0, ...newLines)
return result
}
export function applyInsertBefore(
lines: string[],
anchor: string,
text: string | string[],
options?: EditApplyOptions
): string[] {
if (shouldValidate(options)) validateLineRef(lines, anchor)
const { line } = parseLineRef(anchor)
const result = [...lines]
const newLines = stripInsertBeforeEcho(lines[line - 1], toNewLines(text))
if (newLines.length === 0) {
throw new Error(`insert_before requires non-empty text for ${anchor}`)
}
result.splice(line - 1, 0, ...newLines)
return result
}
export function applyInsertBetween(
lines: string[],
afterAnchor: string,
beforeAnchor: string,
text: string | string[],
options?: EditApplyOptions
): string[] {
if (shouldValidate(options)) {
validateLineRef(lines, afterAnchor)
validateLineRef(lines, beforeAnchor)
}
const { line: afterLine } = parseLineRef(afterAnchor)
const { line: beforeLine } = parseLineRef(beforeAnchor)
if (beforeLine <= afterLine) {
throw new Error(`insert_between requires after_line (${afterLine}) < before_line (${beforeLine})`)
}
const result = [...lines]
const newLines = stripInsertBoundaryEcho(lines[afterLine - 1], lines[beforeLine - 1], toNewLines(text))
if (newLines.length === 0) {
throw new Error(`insert_between requires non-empty text for ${afterAnchor}..${beforeAnchor}`)
}
result.splice(beforeLine - 1, 0, ...newLines)
return result
}
export function applyAppend(lines: string[], text: string | string[]): string[] {
const normalized = toNewLines(text)
if (normalized.length === 0) {
throw new Error("append requires non-empty text")
}
if (lines.length === 1 && lines[0] === "") {
return [...normalized]
}
return [...lines, ...normalized]
}
export function applyPrepend(lines: string[], text: string | string[]): string[] {
const normalized = toNewLines(text)
if (normalized.length === 0) {
throw new Error("prepend requires non-empty text")
}
if (lines.length === 1 && lines[0] === "") {
return [...normalized]
}
return [...normalized, ...lines]
}
export function applyReplace(content: string, oldText: string, newText: string | string[]): string {
if (!content.includes(oldText)) {
throw new Error(`Text not found: "${oldText}"`)
}
const replacement = Array.isArray(newText) ? newText.join("\n") : newText
return content.replaceAll(oldText, replacement)
}

View File

@@ -1,5 +1,6 @@
import { describe, expect, it } from "bun:test"
import { applyHashlineEdits, applyInsertAfter, applyReplace, applyReplaceLines, applySetLine } from "./edit-operations"
import { applyAppend, applyPrepend } from "./edit-operation-primitives"
import { computeLineHash } from "./hash-computation"
import type { HashlineEdit } from "./types"
@@ -246,6 +247,124 @@ describe("hashline edit operations", () => {
const result = applyReplaceLines(lines, anchorFor(lines, 2), anchorFor(lines, 3), ["return 3", "return 4"])
//#then
expect(result).toEqual(["if (x) {", " return 3", "return 4", "}"])
expect(result).toEqual(["if (x) {", " return 3", " return 4", "}"])
})
it("collapses wrapped replacement span back to unique original single line", () => {
//#given
const lines = [
"const request = buildRequest({ method: \"GET\", retries: 3 })",
"const done = true",
]
//#when
const result = applyReplaceLines(
lines,
anchorFor(lines, 1),
anchorFor(lines, 1),
["const request = buildRequest({", "method: \"GET\", retries: 3 })"]
)
//#then
expect(result).toEqual([
"const request = buildRequest({ method: \"GET\", retries: 3 })",
"const done = true",
])
})
it("keeps wrapped replacement when canonical match is not unique in original lines", () => {
//#given
const lines = ["const query = a + b", "const query = a+b", "const done = true"]
//#when
const result = applyReplaceLines(lines, anchorFor(lines, 1), anchorFor(lines, 2), ["const query = a +", "b"])
//#then
expect(result).toEqual(["const query = a +", "b", "const done = true"])
})
it("keeps wrapped replacement when same canonical candidate appears multiple times", () => {
//#given
const lines = ["const expression = alpha + beta + gamma", "const done = true"]
//#when
const result = applyReplaceLines(lines, anchorFor(lines, 1), anchorFor(lines, 1), [
"const expression = alpha +",
"beta + gamma",
"const expression = alpha +",
"beta + gamma",
])
//#then
expect(result).toEqual([
"const expression = alpha +",
"beta + gamma",
"const expression = alpha +",
"beta + gamma",
"const done = true",
])
})
it("keeps wrapped replacement when canonical match is shorter than threshold", () => {
//#given
const lines = ["a + b", "const done = true"]
//#when
const result = applyReplaceLines(lines, anchorFor(lines, 1), anchorFor(lines, 1), ["a +", "b"])
//#then
expect(result).toEqual(["a +", "b", "const done = true"])
})
it("applies append and prepend operations", () => {
//#given
const content = "line 1\nline 2"
//#when
const result = applyHashlineEdits(content, [
{ type: "append", text: ["line 3"] },
{ type: "prepend", text: ["line 0"] },
])
//#then
expect(result).toEqual("line 0\nline 1\nline 2\nline 3")
})
it("appends to empty file without extra blank line", () => {
//#given
const lines = [""]
//#when
const result = applyAppend(lines, ["line1"])
//#then
expect(result).toEqual(["line1"])
})
it("prepends to empty file without extra blank line", () => {
//#given
const lines = [""]
//#when
const result = applyPrepend(lines, ["line1"])
//#then
expect(result).toEqual(["line1"])
})
it("autocorrects single-line merged replacement into original line count", () => {
//#given
const lines = ["const a = 1;", "const b = 2;"]
//#when
const result = applyReplaceLines(
lines,
anchorFor(lines, 1),
anchorFor(lines, 2),
"const a = 10; const b = 20;"
)
//#then
expect(result).toEqual(["const a = 10;", "const b = 20;"])
})
})

View File

@@ -1,13 +1,17 @@
import { parseLineRef, validateLineRef, validateLineRefs } from "./validation"
import { dedupeEdits } from "./edit-deduplication"
import { collectLineRefs, getEditLineNumber } from "./edit-ordering"
import type { HashlineEdit } from "./types"
import {
restoreLeadingIndent,
stripInsertAnchorEcho,
stripInsertBeforeEcho,
stripInsertBoundaryEcho,
stripRangeBoundaryEcho,
toNewLines,
} from "./edit-text-normalization"
applyAppend,
applyInsertAfter,
applyInsertBefore,
applyInsertBetween,
applyPrepend,
applyReplace,
applyReplaceLines,
applySetLine,
} from "./edit-operation-primitives"
import { validateLineRefs } from "./validation"
export interface HashlineApplyReport {
content: string
@@ -15,158 +19,6 @@ export interface HashlineApplyReport {
deduplicatedEdits: number
}
export function applySetLine(lines: string[], anchor: string, newText: string | string[]): string[] {
validateLineRef(lines, anchor)
const { line } = parseLineRef(anchor)
const result = [...lines]
const replacement = toNewLines(newText).map((entry, idx) => {
if (idx !== 0) return entry
return restoreLeadingIndent(lines[line - 1], entry)
})
result.splice(line - 1, 1, ...replacement)
return result
}
export function applyReplaceLines(
lines: string[],
startAnchor: string,
endAnchor: string,
newText: string | string[]
): string[] {
validateLineRef(lines, startAnchor)
validateLineRef(lines, endAnchor)
const { line: startLine } = parseLineRef(startAnchor)
const { line: endLine } = parseLineRef(endAnchor)
if (startLine > endLine) {
throw new Error(
`Invalid range: start line ${startLine} cannot be greater than end line ${endLine}`
)
}
const result = [...lines]
const stripped = stripRangeBoundaryEcho(lines, startLine, endLine, toNewLines(newText))
const restored = stripped.map((entry, idx) => {
if (idx !== 0) return entry
return restoreLeadingIndent(lines[startLine - 1], entry)
})
result.splice(startLine - 1, endLine - startLine + 1, ...restored)
return result
}
export function applyInsertAfter(lines: string[], anchor: string, text: string | string[]): string[] {
validateLineRef(lines, anchor)
const { line } = parseLineRef(anchor)
const result = [...lines]
const newLines = stripInsertAnchorEcho(lines[line - 1], toNewLines(text))
if (newLines.length === 0) {
throw new Error(`insert_after requires non-empty text for ${anchor}`)
}
result.splice(line, 0, ...newLines)
return result
}
export function applyInsertBefore(lines: string[], anchor: string, text: string | string[]): string[] {
validateLineRef(lines, anchor)
const { line } = parseLineRef(anchor)
const result = [...lines]
const newLines = stripInsertBeforeEcho(lines[line - 1], toNewLines(text))
if (newLines.length === 0) {
throw new Error(`insert_before requires non-empty text for ${anchor}`)
}
result.splice(line - 1, 0, ...newLines)
return result
}
export function applyInsertBetween(
lines: string[],
afterAnchor: string,
beforeAnchor: string,
text: string | string[]
): string[] {
validateLineRef(lines, afterAnchor)
validateLineRef(lines, beforeAnchor)
const { line: afterLine } = parseLineRef(afterAnchor)
const { line: beforeLine } = parseLineRef(beforeAnchor)
if (beforeLine <= afterLine) {
throw new Error(`insert_between requires after_line (${afterLine}) < before_line (${beforeLine})`)
}
const result = [...lines]
const newLines = stripInsertBoundaryEcho(lines[afterLine - 1], lines[beforeLine - 1], toNewLines(text))
if (newLines.length === 0) {
throw new Error(`insert_between requires non-empty text for ${afterAnchor}..${beforeAnchor}`)
}
result.splice(beforeLine - 1, 0, ...newLines)
return result
}
export function applyReplace(content: string, oldText: string, newText: string | string[]): string {
if (!content.includes(oldText)) {
throw new Error(`Text not found: "${oldText}"`)
}
const replacement = Array.isArray(newText) ? newText.join("\n") : newText
return content.replaceAll(oldText, replacement)
}
function getEditLineNumber(edit: HashlineEdit): number {
switch (edit.type) {
case "set_line":
return parseLineRef(edit.line).line
case "replace_lines":
return parseLineRef(edit.end_line).line
case "insert_after":
return parseLineRef(edit.line).line
case "insert_before":
return parseLineRef(edit.line).line
case "insert_between":
return parseLineRef(edit.before_line).line
case "replace":
return Number.NEGATIVE_INFINITY
default:
return Number.POSITIVE_INFINITY
}
}
function normalizeEditPayload(payload: string | string[]): string {
return toNewLines(payload).join("\n")
}
function dedupeEdits(edits: HashlineEdit[]): { edits: HashlineEdit[]; deduplicatedEdits: number } {
const seen = new Set<string>()
const deduped: HashlineEdit[] = []
let deduplicatedEdits = 0
for (const edit of edits) {
const key = (() => {
switch (edit.type) {
case "set_line":
return `set_line|${edit.line}|${normalizeEditPayload(edit.text)}`
case "replace_lines":
return `replace_lines|${edit.start_line}|${edit.end_line}|${normalizeEditPayload(edit.text)}`
case "insert_after":
return `insert_after|${edit.line}|${normalizeEditPayload(edit.text)}`
case "insert_before":
return `insert_before|${edit.line}|${normalizeEditPayload(edit.text)}`
case "insert_between":
return `insert_between|${edit.after_line}|${edit.before_line}|${normalizeEditPayload(edit.text)}`
case "replace":
return `replace|${edit.old_text}|${normalizeEditPayload(edit.new_text)}`
}
})()
if (seen.has(key)) {
deduplicatedEdits += 1
continue
}
seen.add(key)
deduped.push(edit)
}
return { edits: deduped, deduplicatedEdits }
}
export function applyHashlineEditsWithReport(content: string, edits: HashlineEdit[]): HashlineApplyReport {
if (edits.length === 0) {
return {
@@ -182,40 +34,23 @@ export function applyHashlineEditsWithReport(content: string, edits: HashlineEdi
let noopEdits = 0
let result = content
let lines = result.split("\n")
let lines = result.length === 0 ? [] : result.split("\n")
const refs = sortedEdits.flatMap((edit) => {
switch (edit.type) {
case "set_line":
return [edit.line]
case "replace_lines":
return [edit.start_line, edit.end_line]
case "insert_after":
return [edit.line]
case "insert_before":
return [edit.line]
case "insert_between":
return [edit.after_line, edit.before_line]
case "replace":
return []
default:
return []
}
})
const refs = collectLineRefs(sortedEdits)
validateLineRefs(lines, refs)
for (const edit of sortedEdits) {
switch (edit.type) {
case "set_line": {
lines = applySetLine(lines, edit.line, edit.text)
lines = applySetLine(lines, edit.line, edit.text, { skipValidation: true })
break
}
case "replace_lines": {
lines = applyReplaceLines(lines, edit.start_line, edit.end_line, edit.text)
lines = applyReplaceLines(lines, edit.start_line, edit.end_line, edit.text, { skipValidation: true })
break
}
case "insert_after": {
const next = applyInsertAfter(lines, edit.line, edit.text)
const next = applyInsertAfter(lines, edit.line, edit.text, { skipValidation: true })
if (next.join("\n") === lines.join("\n")) {
noopEdits += 1
break
@@ -224,7 +59,7 @@ export function applyHashlineEditsWithReport(content: string, edits: HashlineEdi
break
}
case "insert_before": {
const next = applyInsertBefore(lines, edit.line, edit.text)
const next = applyInsertBefore(lines, edit.line, edit.text, { skipValidation: true })
if (next.join("\n") === lines.join("\n")) {
noopEdits += 1
break
@@ -233,7 +68,25 @@ export function applyHashlineEditsWithReport(content: string, edits: HashlineEdi
break
}
case "insert_between": {
const next = applyInsertBetween(lines, edit.after_line, edit.before_line, edit.text)
const next = applyInsertBetween(lines, edit.after_line, edit.before_line, edit.text, { skipValidation: true })
if (next.join("\n") === lines.join("\n")) {
noopEdits += 1
break
}
lines = next
break
}
case "append": {
const next = applyAppend(lines, edit.text)
if (next.join("\n") === lines.join("\n")) {
noopEdits += 1
break
}
lines = next
break
}
case "prepend": {
const next = applyPrepend(lines, edit.text)
if (next.join("\n") === lines.join("\n")) {
noopEdits += 1
break
@@ -243,11 +96,7 @@ export function applyHashlineEditsWithReport(content: string, edits: HashlineEdi
}
case "replace": {
result = lines.join("\n")
if (!result.includes(edit.old_text)) {
throw new Error(`Text not found: "${edit.old_text}"`)
}
const replacement = Array.isArray(edit.new_text) ? edit.new_text.join("\n") : edit.new_text
const replaced = result.replaceAll(edit.old_text, replacement)
const replaced = applyReplace(result, edit.old_text, edit.new_text)
if (replaced === result) {
noopEdits += 1
break
@@ -269,3 +118,12 @@ export function applyHashlineEditsWithReport(content: string, edits: HashlineEdi
export function applyHashlineEdits(content: string, edits: HashlineEdit[]): string {
return applyHashlineEditsWithReport(content, edits).content
}
export {
applySetLine,
applyReplaceLines,
applyInsertAfter,
applyInsertBefore,
applyInsertBetween,
applyReplace,
} from "./edit-operation-primitives"

View File

@@ -0,0 +1,48 @@
import { parseLineRef } from "./validation"
import type { HashlineEdit } from "./types"
export function getEditLineNumber(edit: HashlineEdit): number {
switch (edit.type) {
case "set_line":
return parseLineRef(edit.line).line
case "replace_lines":
return parseLineRef(edit.end_line).line
case "insert_after":
return parseLineRef(edit.line).line
case "insert_before":
return parseLineRef(edit.line).line
case "insert_between":
return parseLineRef(edit.before_line).line
case "append":
return Number.NEGATIVE_INFINITY
case "prepend":
return Number.NEGATIVE_INFINITY
case "replace":
return Number.NEGATIVE_INFINITY
default:
return Number.POSITIVE_INFINITY
}
}
export function collectLineRefs(edits: HashlineEdit[]): string[] {
return edits.flatMap((edit) => {
switch (edit.type) {
case "set_line":
return [edit.line]
case "replace_lines":
return [edit.start_line, edit.end_line]
case "insert_after":
return [edit.line]
case "insert_before":
return [edit.line]
case "insert_between":
return [edit.after_line, edit.before_line]
case "append":
case "prepend":
case "replace":
return []
default:
return []
}
})
}

View File

@@ -0,0 +1,44 @@
export interface FileTextEnvelope {
content: string
hadBom: boolean
lineEnding: "\n" | "\r\n"
}
function detectLineEnding(content: string): "\n" | "\r\n" {
const crlfIndex = content.indexOf("\r\n")
const lfIndex = content.indexOf("\n")
if (lfIndex === -1) return "\n"
if (crlfIndex === -1) return "\n"
return crlfIndex < lfIndex ? "\r\n" : "\n"
}
function stripBom(content: string): { content: string; hadBom: boolean } {
if (!content.startsWith("\uFEFF")) {
return { content, hadBom: false }
}
return { content: content.slice(1), hadBom: true }
}
function normalizeToLf(content: string): string {
return content.replace(/\r\n/g, "\n").replace(/\r/g, "\n")
}
function restoreLineEndings(content: string, lineEnding: "\n" | "\r\n"): string {
if (lineEnding === "\n") return content
return content.replace(/\n/g, "\r\n")
}
export function canonicalizeFileText(content: string): FileTextEnvelope {
const stripped = stripBom(content)
return {
content: normalizeToLf(stripped.content),
hadBom: stripped.hadBom,
lineEnding: detectLineEnding(stripped.content),
}
}
export function restoreFileText(content: string, envelope: FileTextEnvelope): string {
const withLineEnding = restoreLineEndings(content, envelope.lineEnding)
if (!envelope.hadBom) return withLineEnding
return `\uFEFF${withLineEnding}`
}

View File

@@ -0,0 +1,31 @@
import { computeLineHash } from "./hash-computation"
export function generateHashlineDiff(oldContent: string, newContent: string, filePath: string): string {
const oldLines = oldContent.split("\n")
const newLines = newContent.split("\n")
let diff = `--- ${filePath}\n+++ ${filePath}\n`
const maxLines = Math.max(oldLines.length, newLines.length)
for (let i = 0; i < maxLines; i += 1) {
const oldLine = oldLines[i] ?? ""
const newLine = newLines[i] ?? ""
const lineNum = i + 1
const hash = computeLineHash(lineNum, newLine)
if (i >= oldLines.length) {
diff += `+ ${lineNum}#${hash}:${newLine}\n`
continue
}
if (i >= newLines.length) {
diff += `- ${lineNum}# :${oldLine}\n`
continue
}
if (oldLine !== newLine) {
diff += `- ${lineNum}# :${oldLine}\n`
diff += `+ ${lineNum}#${hash}:${newLine}\n`
}
}
return diff
}

View File

@@ -0,0 +1,146 @@
import type { ToolContext } from "@opencode-ai/plugin/tool"
import { storeToolMetadata } from "../../features/tool-metadata-store"
import { applyHashlineEditsWithReport } from "./edit-operations"
import { countLineDiffs, generateUnifiedDiff, toHashlineContent } from "./diff-utils"
import { canonicalizeFileText, restoreFileText } from "./file-text-canonicalization"
import { generateHashlineDiff } from "./hashline-edit-diff"
import type { HashlineEdit } from "./types"
interface HashlineEditArgs {
filePath: string
edits: HashlineEdit[]
delete?: boolean
rename?: string
}
type ToolContextWithCallID = ToolContext & {
callID?: string
callId?: string
call_id?: string
}
type ToolContextWithMetadata = ToolContextWithCallID & {
metadata?: (value: unknown) => void
}
function resolveToolCallID(ctx: ToolContextWithCallID): string | undefined {
if (typeof ctx.callID === "string" && ctx.callID.trim() !== "") return ctx.callID
if (typeof ctx.callId === "string" && ctx.callId.trim() !== "") return ctx.callId
if (typeof ctx.call_id === "string" && ctx.call_id.trim() !== "") return ctx.call_id
return undefined
}
function canCreateFromMissingFile(edits: HashlineEdit[]): boolean {
if (edits.length === 0) return false
return edits.every((edit) => edit.type === "append" || edit.type === "prepend")
}
function buildSuccessMeta(
effectivePath: string,
beforeContent: string,
afterContent: string,
noopEdits: number,
deduplicatedEdits: number
) {
const unifiedDiff = generateUnifiedDiff(beforeContent, afterContent, effectivePath)
const { additions, deletions } = countLineDiffs(beforeContent, afterContent)
return {
title: effectivePath,
metadata: {
filePath: effectivePath,
path: effectivePath,
file: effectivePath,
diff: unifiedDiff,
noopEdits,
deduplicatedEdits,
filediff: {
file: effectivePath,
path: effectivePath,
filePath: effectivePath,
before: beforeContent,
after: afterContent,
additions,
deletions,
},
},
}
}
export async function executeHashlineEditTool(args: HashlineEditArgs, context: ToolContext): Promise<string> {
try {
const metadataContext = context as ToolContextWithMetadata
const filePath = args.filePath
const { edits, delete: deleteMode, rename } = args
if (deleteMode && rename) {
return "Error: delete and rename cannot be used together"
}
if (!deleteMode && (!edits || !Array.isArray(edits) || edits.length === 0)) {
return "Error: edits parameter must be a non-empty array"
}
if (deleteMode && edits.length > 0) {
return "Error: delete mode requires edits to be an empty array"
}
const file = Bun.file(filePath)
const exists = await file.exists()
if (!exists && !deleteMode && !canCreateFromMissingFile(edits)) {
return `Error: File not found: ${filePath}`
}
if (deleteMode) {
if (!exists) return `Error: File not found: ${filePath}`
await Bun.file(filePath).delete()
return `Successfully deleted ${filePath}`
}
const rawOldContent = exists ? Buffer.from(await file.arrayBuffer()).toString("utf8") : ""
const oldEnvelope = canonicalizeFileText(rawOldContent)
const applyResult = applyHashlineEditsWithReport(oldEnvelope.content, edits)
const canonicalNewContent = applyResult.content
const writeContent = restoreFileText(canonicalNewContent, oldEnvelope)
await Bun.write(filePath, writeContent)
if (rename && rename !== filePath) {
await Bun.write(rename, writeContent)
await Bun.file(filePath).delete()
}
const effectivePath = rename && rename !== filePath ? rename : filePath
const diff = generateHashlineDiff(oldEnvelope.content, canonicalNewContent, effectivePath)
const newHashlined = toHashlineContent(canonicalNewContent)
const meta = buildSuccessMeta(
effectivePath,
oldEnvelope.content,
canonicalNewContent,
applyResult.noopEdits,
applyResult.deduplicatedEdits
)
if (typeof metadataContext.metadata === "function") {
metadataContext.metadata(meta)
}
const callID = resolveToolCallID(metadataContext)
if (callID) {
storeToolMetadata(context.sessionID, callID, meta)
}
return `Successfully applied ${edits.length} edit(s) to ${effectivePath}
No-op edits: ${applyResult.noopEdits}, deduplicated edits: ${applyResult.deduplicatedEdits}
${diff}
Updated file (LINE#ID:content):
${newHashlined}`
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
if (message.toLowerCase().includes("hash")) {
return `Error: hash mismatch - ${message}\nTip: reuse LINE#ID entries from the latest read/edit output, or batch related edits in one call.`
}
return `Error: ${message}`
}
}

View File

@@ -7,7 +7,17 @@ export {
} from "./hash-computation"
export { parseLineRef, validateLineRef } from "./validation"
export type { LineRef } from "./validation"
export type { SetLine, ReplaceLines, InsertAfter, InsertBefore, InsertBetween, Replace, HashlineEdit } from "./types"
export type {
SetLine,
ReplaceLines,
InsertAfter,
InsertBefore,
InsertBetween,
Replace,
Append,
Prepend,
HashlineEdit,
} from "./types"
export { NIBBLE_STR, HASHLINE_DICT, HASHLINE_REF_PATTERN, HASHLINE_OUTPUT_PATTERN } from "./constants"
export {
applyHashlineEdits,

View File

@@ -1,34 +1,70 @@
export const HASHLINE_EDIT_DESCRIPTION = `Edit files using LINE#ID format for precise, safe modifications.
WORKFLOW:
1. Read the file and copy exact LINE#ID anchors.
2. Submit one edit call with all related operations for that file.
3. If more edits are needed after success, use the latest anchors from read/edit output.
4. Use anchors as "LINE#ID" only (never include trailing ":content").
1. Read target file/range and copy exact LINE#ID tags.
2. Pick the smallest operation per logical mutation site.
3. Submit one edit call per file with all related operations.
4. If same file needs another call, re-read first.
5. Use anchors as "LINE#ID" only (never include trailing ":content").
VALIDATION:
- Payload shape: { "filePath": string, "edits": [...], "delete"?: boolean, "rename"?: string }
- Each edit must be one of: set_line, replace_lines, insert_after, insert_before, insert_between, replace
- text/new_text must contain plain replacement text only (no LINE#ID prefixes, no diff + markers)
Payload shape: { "filePath": string, "edits": [...], "delete"?: boolean, "rename"?: string }
Each edit must be one of: set_line, replace_lines, insert_after, insert_before, insert_between, replace, append, prepend
text/new_text must contain plain replacement text only (no LINE#ID prefixes, no diff + markers)
CRITICAL: all operations validate against the same pre-edit file snapshot and apply bottom-up. Refs/tags are interpreted against the last-read version of the file.
LINE#ID FORMAT (CRITICAL - READ CAREFULLY):
Each line reference must be in "LINE#ID" format where:
- LINE: 1-based line number
- ID: Two CID letters from the set ZPMQVRWSNKTXJBYH
OPERATION TYPES:
1. set_line
2. replace_lines
3. insert_after
4. insert_before
5. insert_between
6. replace
LINE#ID FORMAT (CRITICAL):
Each line reference must be in "LINE#ID" format where:
LINE: 1-based line number
ID: Two CID letters from the set ZPMQVRWSNKTXJBYH
FILE MODES:
- delete=true deletes file and requires edits=[] with no rename
- rename moves final content to a new path and removes old path
delete=true deletes file and requires edits=[] with no rename
rename moves final content to a new path and removes old path
CONTENT FORMAT:
- text/new_text can be a string (single line) or string[] (multi-line, preferred).
- If you pass a multi-line string, it is split by real newline characters.
- Literal "\\n" is preserved as text.`
text/new_text can be a string (single line) or string[] (multi-line, preferred).
If you pass a multi-line string, it is split by real newline characters.
Literal "\\n" is preserved as text.
FILE CREATION:
append: adds content at EOF. If file does not exist, creates it.
prepend: adds content at BOF. If file does not exist, creates it.
CRITICAL: append/prepend are the only operations that work without an existing file.
OPERATION CHOICE:
One line wrong -> set_line
Adjacent block rewrite or swap/move -> replace_lines (prefer one range op over many single-line ops)
Both boundaries known -> insert_between (ALWAYS prefer over insert_after/insert_before)
One boundary known -> insert_after or insert_before
New file or EOF/BOF addition -> append or prepend
No LINE#ID available -> replace (last resort)
RULES (CRITICAL):
1. Minimize scope: one logical mutation site per operation.
2. Preserve formatting: keep indentation, punctuation, line breaks, trailing commas, brace style.
3. Prefer insertion over neighbor rewrites: anchor to structural boundaries (}, ], },), not interior property lines.
4. No no-ops: replacement content must differ from current content.
5. Touch only requested code: avoid incidental edits.
6. Use exact current tokens: NEVER rewrite approximately.
7. For swaps/moves: prefer one range operation over multiple single-line operations.
8. Output tool calls only; no prose or commentary between them.
TAG CHOICE (ALWAYS):
- Copy tags exactly from read output or >>> mismatch output.
- NEVER guess tags.
- Prefer insert_between over insert_after/insert_before when both boundaries are known.
- Anchor to structural lines (function/class/brace), NEVER blank lines.
- Anti-pattern warning: blank/whitespace anchors are fragile.
- Re-read after each successful edit call before issuing another on the same file.
AUTOCORRECT (built-in - you do NOT need to handle these):
Merged lines are auto-expanded back to original line count.
Indentation is auto-restored from original lines.
BOM and CRLF line endings are preserved automatically.
Hashline prefixes and diff markers in text are auto-stripped.
RECOVERY (when >>> mismatch error appears):
Copy the updated LINE#ID tags shown in the error output directly.
Re-read only if the needed tags are missing from the error snippet.
ALWAYS batch all edits for one file in a single call.`

View File

@@ -2,6 +2,7 @@ import { describe, it, expect, beforeEach, afterEach, mock } from "bun:test"
import type { ToolContext } from "@opencode-ai/plugin/tool"
import { createHashlineEditTool } from "./tools"
import { computeLineHash } from "./hash-computation"
import { canonicalizeFileText } from "./file-text-canonicalization"
import * as fs from "node:fs"
import * as os from "node:os"
import * as path from "node:path"
@@ -216,4 +217,72 @@ describe("createHashlineEditTool", () => {
expect(fs.existsSync(filePath)).toBe(false)
expect(result).toContain("Successfully deleted")
})
it("creates missing file with append and prepend", async () => {
//#given
const filePath = path.join(tempDir, "created.txt")
//#when
const result = await tool.execute(
{
filePath,
edits: [
{ type: "append", text: ["line2"] },
{ type: "prepend", text: ["line1"] },
],
},
createMockContext(),
)
//#then
expect(fs.existsSync(filePath)).toBe(true)
expect(fs.readFileSync(filePath, "utf-8")).toBe("line1\nline2")
expect(result).toContain("Successfully applied 2 edit(s)")
})
it("preserves BOM and CRLF through hashline_edit", async () => {
//#given
const filePath = path.join(tempDir, "crlf-bom.txt")
const bomCrLf = "\uFEFFline1\r\nline2\r\n"
fs.writeFileSync(filePath, bomCrLf)
const line2Hash = computeLineHash(2, "line2")
//#when
await tool.execute(
{
filePath,
edits: [{ type: "set_line", line: `2#${line2Hash}`, text: "line2-updated" }],
},
createMockContext(),
)
//#then
const bytes = fs.readFileSync(filePath)
expect(bytes[0]).toBe(0xef)
expect(bytes[1]).toBe(0xbb)
expect(bytes[2]).toBe(0xbf)
expect(bytes.toString("utf-8")).toBe("\uFEFFline1\r\nline2-updated\r\n")
})
it("detects LF as line ending when LF appears before CRLF", () => {
//#given
const content = "line1\nline2\r\nline3"
//#when
const envelope = canonicalizeFileText(content)
//#then
expect(envelope.lineEnding).toBe("\n")
})
it("detects CRLF as line ending when CRLF appears before LF", () => {
//#given
const content = "line1\r\nline2\nline3"
//#when
const envelope = canonicalizeFileText(content)
//#then
expect(envelope.lineEnding).toBe("\r\n")
})
})

View File

@@ -1,9 +1,6 @@
import { tool, type ToolContext, type ToolDefinition } from "@opencode-ai/plugin/tool"
import { storeToolMetadata } from "../../features/tool-metadata-store"
import type { HashlineEdit } from "./types"
import { applyHashlineEditsWithReport } from "./edit-operations"
import { computeLineHash } from "./hash-computation"
import { toHashlineContent, generateUnifiedDiff, countLineDiffs } from "./diff-utils"
import { executeHashlineEditTool } from "./hashline-edit-executor"
import { HASHLINE_EDIT_DESCRIPTION } from "./tool-description"
interface HashlineEditArgs {
@@ -13,49 +10,6 @@ interface HashlineEditArgs {
rename?: string
}
type ToolContextWithCallID = ToolContext & {
callID?: string
callId?: string
call_id?: string
}
type ToolContextWithMetadata = ToolContextWithCallID & {
metadata?: (value: unknown) => void
}
function resolveToolCallID(ctx: ToolContextWithCallID): string | undefined {
if (typeof ctx.callID === "string" && ctx.callID.trim() !== "") return ctx.callID
if (typeof ctx.callId === "string" && ctx.callId.trim() !== "") return ctx.callId
if (typeof ctx.call_id === "string" && ctx.call_id.trim() !== "") return ctx.call_id
return undefined
}
function generateDiff(oldContent: string, newContent: string, filePath: string): string {
const oldLines = oldContent.split("\n")
const newLines = newContent.split("\n")
let diff = `--- ${filePath}\n+++ ${filePath}\n`
const maxLines = Math.max(oldLines.length, newLines.length)
for (let i = 0; i < maxLines; i++) {
const oldLine = oldLines[i] ?? ""
const newLine = newLines[i] ?? ""
const lineNum = i + 1
const hash = computeLineHash(lineNum, newLine)
if (i >= oldLines.length) {
diff += `+ ${lineNum}#${hash}:${newLine}\n`
} else if (i >= newLines.length) {
diff += `- ${lineNum}# :${oldLine}\n`
} else if (oldLine !== newLine) {
diff += `- ${lineNum}# :${oldLine}\n`
diff += `+ ${lineNum}#${hash}:${newLine}\n`
}
}
return diff
}
export function createHashlineEditTool(): ToolDefinition {
return tool({
description: HASHLINE_EDIT_DESCRIPTION,
@@ -110,101 +64,22 @@ export function createHashlineEditTool(): ToolDefinition {
.union([tool.schema.string(), tool.schema.array(tool.schema.string())])
.describe("Replacement text (string or string[] for multiline)"),
}),
tool.schema.object({
type: tool.schema.literal("append"),
text: tool.schema
.union([tool.schema.string(), tool.schema.array(tool.schema.string())])
.describe("Content to append at EOF; also creates missing file"),
}),
tool.schema.object({
type: tool.schema.literal("prepend"),
text: tool.schema
.union([tool.schema.string(), tool.schema.array(tool.schema.string())])
.describe("Content to prepend at BOF; also creates missing file"),
}),
])
)
.describe("Array of edit operations to apply (empty when delete=true)"),
},
execute: async (args: HashlineEditArgs, context: ToolContext) => {
try {
const metadataContext = context as ToolContextWithMetadata
const filePath = args.filePath
const { edits, delete: deleteMode, rename } = args
if (deleteMode && rename) {
return "Error: delete and rename cannot be used together"
}
if (!deleteMode && (!edits || !Array.isArray(edits) || edits.length === 0)) {
return "Error: edits parameter must be a non-empty array"
}
if (deleteMode && edits.length > 0) {
return "Error: delete mode requires edits to be an empty array"
}
const file = Bun.file(filePath)
const exists = await file.exists()
if (!exists) {
return `Error: File not found: ${filePath}`
}
if (deleteMode) {
await Bun.file(filePath).delete()
return `Successfully deleted ${filePath}`
}
const oldContent = await file.text()
const applyResult = applyHashlineEditsWithReport(oldContent, edits)
const newContent = applyResult.content
await Bun.write(filePath, newContent)
if (rename && rename !== filePath) {
await Bun.write(rename, newContent)
await Bun.file(filePath).delete()
}
const effectivePath = rename && rename !== filePath ? rename : filePath
const diff = generateDiff(oldContent, newContent, effectivePath)
const newHashlined = toHashlineContent(newContent)
const unifiedDiff = generateUnifiedDiff(oldContent, newContent, effectivePath)
const { additions, deletions } = countLineDiffs(oldContent, newContent)
const meta = {
title: effectivePath,
metadata: {
filePath: effectivePath,
path: effectivePath,
file: effectivePath,
diff: unifiedDiff,
noopEdits: applyResult.noopEdits,
deduplicatedEdits: applyResult.deduplicatedEdits,
filediff: {
file: effectivePath,
path: effectivePath,
filePath: effectivePath,
before: oldContent,
after: newContent,
additions,
deletions,
},
},
}
if (typeof metadataContext.metadata === "function") {
metadataContext.metadata(meta)
}
const callID = resolveToolCallID(metadataContext)
if (callID) {
storeToolMetadata(context.sessionID, callID, meta)
}
return `Successfully applied ${edits.length} edit(s) to ${effectivePath}
No-op edits: ${applyResult.noopEdits}, deduplicated edits: ${applyResult.deduplicatedEdits}
${diff}
Updated file (LINE#ID:content):
${newHashlined}`
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
if (message.toLowerCase().includes("hash")) {
return `Error: hash mismatch - ${message}\nTip: reuse LINE#ID entries from the latest read/edit output, or batch related edits in one call.`
}
return `Error: ${message}`
}
},
execute: async (args: HashlineEditArgs, context: ToolContext) => executeHashlineEditTool(args, context),
})
}

View File

@@ -36,4 +36,22 @@ export interface Replace {
new_text: string | string[]
}
export type HashlineEdit = SetLine | ReplaceLines | InsertAfter | InsertBefore | InsertBetween | Replace
export interface Append {
type: "append"
text: string | string[]
}
export interface Prepend {
type: "prepend"
text: string | string[]
}
export type HashlineEdit =
| SetLine
| ReplaceLines
| InsertAfter
| InsertBefore
| InsertBetween
| Replace
| Append
| Prepend

View File

@@ -1,45 +1,71 @@
type ManagedClientForCleanup = {
client: {
stop: () => Promise<void>
}
}
stop: () => Promise<void>;
};
};
type ProcessCleanupOptions = {
getClients: () => IterableIterator<[string, ManagedClientForCleanup]>
clearClients: () => void
clearCleanupInterval: () => void
}
getClients: () => IterableIterator<[string, ManagedClientForCleanup]>;
clearClients: () => void;
clearCleanupInterval: () => void;
};
type RegisteredHandler = {
event: string;
listener: (...args: unknown[]) => void;
};
export type LspProcessCleanupHandle = {
unregister: () => void;
};
export function registerLspManagerProcessCleanup(options: ProcessCleanupOptions): LspProcessCleanupHandle {
const handlers: RegisteredHandler[] = [];
export function registerLspManagerProcessCleanup(options: ProcessCleanupOptions): void {
// Synchronous cleanup for 'exit' event (cannot await)
const syncCleanup = () => {
for (const [, managed] of options.getClients()) {
try {
// Fire-and-forget during sync exit - process is terminating
void managed.client.stop().catch(() => {})
void managed.client.stop().catch(() => {});
} catch {}
}
options.clearClients()
options.clearCleanupInterval()
}
options.clearClients();
options.clearCleanupInterval();
};
// Async cleanup for signal handlers - properly await all stops
const asyncCleanup = async () => {
const stopPromises: Promise<void>[] = []
const stopPromises: Promise<void>[] = [];
for (const [, managed] of options.getClients()) {
stopPromises.push(managed.client.stop().catch(() => {}))
stopPromises.push(managed.client.stop().catch(() => {}));
}
await Promise.allSettled(stopPromises)
options.clearClients()
options.clearCleanupInterval()
}
await Promise.allSettled(stopPromises);
options.clearClients();
options.clearCleanupInterval();
};
process.on("exit", syncCleanup)
const registerHandler = (event: string, listener: (...args: unknown[]) => void) => {
handlers.push({ event, listener });
process.on(event, listener);
};
registerHandler("exit", syncCleanup);
// Don't call process.exit() here; other handlers (background-agent manager) handle final exit.
process.on("SIGINT", () => void asyncCleanup().catch(() => {}))
process.on("SIGTERM", () => void asyncCleanup().catch(() => {}))
const signalCleanup = () => void asyncCleanup().catch(() => {});
registerHandler("SIGINT", signalCleanup);
registerHandler("SIGTERM", signalCleanup);
if (process.platform === "win32") {
process.on("SIGBREAK", () => void asyncCleanup().catch(() => {}))
registerHandler("SIGBREAK", signalCleanup);
}
return {
unregister: () => {
for (const { event, listener } of handlers) {
process.off(event, listener);
}
handlers.length = 0;
},
};
}

View File

@@ -1,73 +1,74 @@
import type { ResolvedServer } from "./types"
import { registerLspManagerProcessCleanup } from "./lsp-manager-process-cleanup"
import { cleanupTempDirectoryLspClients } from "./lsp-manager-temp-directory-cleanup"
import { LSPClient } from "./lsp-client"
import { LSPClient } from "./lsp-client";
import { registerLspManagerProcessCleanup, type LspProcessCleanupHandle } from "./lsp-manager-process-cleanup";
import { cleanupTempDirectoryLspClients } from "./lsp-manager-temp-directory-cleanup";
import type { ResolvedServer } from "./types";
interface ManagedClient {
client: LSPClient
lastUsedAt: number
refCount: number
initPromise?: Promise<void>
isInitializing: boolean
initializingSince?: number
client: LSPClient;
lastUsedAt: number;
refCount: number;
initPromise?: Promise<void>;
isInitializing: boolean;
initializingSince?: number;
}
class LSPServerManager {
private static instance: LSPServerManager
private clients = new Map<string, ManagedClient>()
private cleanupInterval: ReturnType<typeof setInterval> | null = null
private readonly IDLE_TIMEOUT = 5 * 60 * 1000
private readonly INIT_TIMEOUT = 60 * 1000
private static instance: LSPServerManager;
private clients = new Map<string, ManagedClient>();
private cleanupInterval: ReturnType<typeof setInterval> | null = null;
private readonly IDLE_TIMEOUT = 5 * 60 * 1000;
private readonly INIT_TIMEOUT = 60 * 1000;
private cleanupHandle: LspProcessCleanupHandle | null = null;
private constructor() {
this.startCleanupTimer()
this.registerProcessCleanup()
this.startCleanupTimer();
this.registerProcessCleanup();
}
private registerProcessCleanup(): void {
registerLspManagerProcessCleanup({
this.cleanupHandle = registerLspManagerProcessCleanup({
getClients: () => this.clients.entries(),
clearClients: () => {
this.clients.clear()
this.clients.clear();
},
clearCleanupInterval: () => {
if (this.cleanupInterval) {
clearInterval(this.cleanupInterval)
this.cleanupInterval = null
clearInterval(this.cleanupInterval);
this.cleanupInterval = null;
}
},
})
});
}
static getInstance(): LSPServerManager {
if (!LSPServerManager.instance) {
LSPServerManager.instance = new LSPServerManager()
LSPServerManager.instance = new LSPServerManager();
}
return LSPServerManager.instance
return LSPServerManager.instance;
}
private getKey(root: string, serverId: string): string {
return `${root}::${serverId}`
return `${root}::${serverId}`;
}
private startCleanupTimer(): void {
if (this.cleanupInterval) return
if (this.cleanupInterval) return;
this.cleanupInterval = setInterval(() => {
this.cleanupIdleClients()
}, 60000)
this.cleanupIdleClients();
}, 60000);
}
private cleanupIdleClients(): void {
const now = Date.now()
const now = Date.now();
for (const [key, managed] of this.clients) {
if (managed.refCount === 0 && now - managed.lastUsedAt > this.IDLE_TIMEOUT) {
managed.client.stop()
this.clients.delete(key)
managed.client.stop();
this.clients.delete(key);
}
}
}
async getClient(root: string, server: ResolvedServer): Promise<LSPClient> {
const key = this.getKey(root, server.id)
let managed = this.clients.get(key)
const key = this.getKey(root, server.id);
let managed = this.clients.get(key);
if (managed) {
const now = Date.now()
const now = Date.now();
if (
managed.isInitializing &&
managed.initializingSince !== undefined &&
@@ -75,45 +76,45 @@ class LSPServerManager {
) {
// Stale init can permanently block subsequent calls (e.g., LSP process hang)
try {
await managed.client.stop()
await managed.client.stop();
} catch {}
this.clients.delete(key)
managed = undefined
this.clients.delete(key);
managed = undefined;
}
}
if (managed) {
if (managed.initPromise) {
try {
await managed.initPromise
await managed.initPromise;
} catch {
// Failed init should not keep the key blocked forever.
try {
await managed.client.stop()
await managed.client.stop();
} catch {}
this.clients.delete(key)
managed = undefined
this.clients.delete(key);
managed = undefined;
}
}
if (managed) {
if (managed.client.isAlive()) {
managed.refCount++
managed.lastUsedAt = Date.now()
return managed.client
managed.refCount++;
managed.lastUsedAt = Date.now();
return managed.client;
}
try {
await managed.client.stop()
await managed.client.stop();
} catch {}
this.clients.delete(key)
this.clients.delete(key);
}
}
const client = new LSPClient(root, server)
const client = new LSPClient(root, server);
const initPromise = (async () => {
await client.start()
await client.initialize()
})()
const initStartedAt = Date.now()
await client.start();
await client.initialize();
})();
const initStartedAt = Date.now();
this.clients.set(key, {
client,
lastUsedAt: initStartedAt,
@@ -121,37 +122,37 @@ class LSPServerManager {
initPromise,
isInitializing: true,
initializingSince: initStartedAt,
})
});
try {
await initPromise
await initPromise;
} catch (error) {
this.clients.delete(key)
this.clients.delete(key);
try {
await client.stop()
await client.stop();
} catch {}
throw error
throw error;
}
const m = this.clients.get(key)
const m = this.clients.get(key);
if (m) {
m.initPromise = undefined
m.isInitializing = false
m.initializingSince = undefined
m.initPromise = undefined;
m.isInitializing = false;
m.initializingSince = undefined;
}
return client
return client;
}
warmupClient(root: string, server: ResolvedServer): void {
const key = this.getKey(root, server.id)
if (this.clients.has(key)) return
const client = new LSPClient(root, server)
const key = this.getKey(root, server.id);
if (this.clients.has(key)) return;
const client = new LSPClient(root, server);
const initPromise = (async () => {
await client.start()
await client.initialize()
})()
await client.start();
await client.initialize();
})();
const initStartedAt = Date.now()
const initStartedAt = Date.now();
this.clients.set(key, {
client,
lastUsedAt: initStartedAt,
@@ -159,53 +160,55 @@ class LSPServerManager {
initPromise,
isInitializing: true,
initializingSince: initStartedAt,
})
});
initPromise
.then(() => {
const m = this.clients.get(key)
const m = this.clients.get(key);
if (m) {
m.initPromise = undefined
m.isInitializing = false
m.initializingSince = undefined
m.initPromise = undefined;
m.isInitializing = false;
m.initializingSince = undefined;
}
})
.catch(() => {
// Warmup failures must not permanently block future initialization.
this.clients.delete(key)
void client.stop().catch(() => {})
})
this.clients.delete(key);
void client.stop().catch(() => {});
});
}
releaseClient(root: string, serverId: string): void {
const key = this.getKey(root, serverId)
const managed = this.clients.get(key)
const key = this.getKey(root, serverId);
const managed = this.clients.get(key);
if (managed && managed.refCount > 0) {
managed.refCount--
managed.lastUsedAt = Date.now()
managed.refCount--;
managed.lastUsedAt = Date.now();
}
}
isServerInitializing(root: string, serverId: string): boolean {
const key = this.getKey(root, serverId)
const managed = this.clients.get(key)
return managed?.isInitializing ?? false
const key = this.getKey(root, serverId);
const managed = this.clients.get(key);
return managed?.isInitializing ?? false;
}
async stopAll(): Promise<void> {
this.cleanupHandle?.unregister();
this.cleanupHandle = null;
for (const [, managed] of this.clients) {
await managed.client.stop()
await managed.client.stop();
}
this.clients.clear()
this.clients.clear();
if (this.cleanupInterval) {
clearInterval(this.cleanupInterval)
this.cleanupInterval = null
clearInterval(this.cleanupInterval);
this.cleanupInterval = null;
}
}
async cleanupTempDirectoryClients(): Promise<void> {
await cleanupTempDirectoryLspClients(this.clients)
await cleanupTempDirectoryLspClients(this.clients);
}
}
export const lspManager = LSPServerManager.getInstance()
export const lspManager = LSPServerManager.getInstance();