Compare commits

...

29 Commits

Author SHA1 Message Date
YeonGyu-Kim
deef9d864b fix(slashcommand): use "commands" (plural) for OpenCode command directories
Closes #1918
2026-02-24 21:37:02 +09:00
github-actions[bot]
55b9ad60d8 release: v3.8.5 2026-02-24 09:45:36 +00:00
YeonGyu-Kim
e997e0071c Merge pull request #2088 from minpeter/feat/hashline-edit-error-hints
fix(hashline-edit): improve error messages for invalid LINE#ID references
2026-02-24 18:36:04 +09:00
YeonGyu-Kim
b8257dc59c fix(hashline-edit): tolerate >>> prefix and spaces around # in line refs 2026-02-24 18:21:05 +09:00
YeonGyu-Kim
365d863e3a fix(hashline-edit): use instanceof for hash mismatch error detection 2026-02-24 18:21:05 +09:00
YeonGyu-Kim
1785313f3b fix(hashline-read-enhancer): skip hashifying OpenCode-truncated lines 2026-02-24 18:21:05 +09:00
YeonGyu-Kim
ac962d62ab fix(hashline-edit): add same-line operation precedence ordering 2026-02-24 18:21:05 +09:00
YeonGyu-Kim
d61c0f8cb5 fix(hashline-read-enhancer): guard against overwriting error output with success message 2026-02-24 17:52:04 +09:00
YeonGyu-Kim
a567cd0d68 fix(hashline-edit): address Oracle review feedback
- Extract WRITE_SUCCESS_MARKER constant to couple guard and output string
- Remove double blank line after parseLineRefWithHint
- Add comment clarifying normalized equals ref.trim() in error paths
2026-02-24 17:41:30 +09:00
YeonGyu-Kim
55ad4297d4 fix(hashline-edit): widen non-numeric prefix detection and remove duplicate try-catch
- Replace regex /^([A-Za-z_]+)#.../ with indexOf-based prefix check to catch
  line-ref#VK and line.ref#VK style inputs that were previously giving generic errors
- Extract parseLineRefWithHint helper to eliminate duplicated try-catch in
  validateLineRef and validateLineRefs
- Restore idempotency guard in appendWriteHashlineOutput using new output format
- Add tests for LINE42 extraction, line-ref hint, line.ref hint, and guard behavior

Ultraworked with [Sisyphus](https://github.com/code-yeongyu/oh-my-opencode)

Co-authored-by: Sisyphus <clio-agent@sisyphuslabs.ai>
2026-02-24 17:32:44 +09:00
minpeter
c6a69899d8 fix(hashline-read-enhancer): simplify write tool output to line count summary
Replace full hashlined file content in write tool response with a simple
'File written successfully. N lines written.' summary to reduce context
bloat.
2026-02-24 16:00:23 +09:00
minpeter
2aeb96c3f6 fix(hashline-edit): improve error messages for invalid LINE#ID references
- Detect non-numeric prefixes (e.g., "LINE#HK", "POS#VK") and explain
  that the prefix must be an actual line number, not literal text
- Add suggestLineForHash() that reverse-looks up a hash in file lines
  to suggest the correct reference (e.g., Did you mean "1#HK"?)
- Unify error message format from "LINE#ID" to "{line_number}#{hash_id}"
  matching the tool description convention
- Add 3 tests covering non-numeric prefix detection and hash suggestion
2026-02-24 16:00:23 +09:00
YeonGyu-Kim
5fd65f2935 Merge pull request #2086 from code-yeongyu/refactor/hashline-legacy-cleanup
refactor(hashline-edit): clean up legacy code and dead exports
2026-02-24 15:44:32 +09:00
YeonGyu-Kim
b03aae57f3 fix: remove accidentally committed node_modules symlink 2026-02-24 15:39:31 +09:00
YeonGyu-Kim
8c3a0ca2fe refactor(hashline-edit): rename legacy operation names in error messages
Update error messages to match current op schema:
- insert_after → append (anchored)
- insert_before → prepend (anchored)
2026-02-24 15:33:48 +09:00
YeonGyu-Kim
9a2e0f1add refactor(hashline-edit): remove unnecessary barrel re-exports of internal primitives
applySetLine, applyReplaceLines, applyInsertAfter, applyInsertBefore
were re-exported from both edit-operations.ts and index.ts but have no
external consumers — they are only used internally within the module.
Only applyHashlineEdits (the public API) remains exported.
2026-02-24 15:33:17 +09:00
YeonGyu-Kim
d28ebd10c1 refactor(hashline-edit): remove HASHLINE_LEGACY_REF_PATTERN and legacy ref compat
Remove the old LINE:HEX (e.g. "42:ab") reference format support. All
refs now use LINE#ID format exclusively (e.g. "42#VK"). Also fixes
HASHLINE_OUTPUT_PATTERN to use | separator (was missed in PR #2079).
2026-02-24 15:32:24 +09:00
YeonGyu-Kim
fb92babee7 refactor(hashline-edit): remove dead applyInsertBetween function
This function is no longer called from edit-operations.ts after the
op/pos/end/lines schema refactor in PR #2079. Remove the function
definition and its 3 dedicated test cases.
2026-02-24 15:31:43 +09:00
YeonGyu-Kim
5d30ec80df Merge pull request #2079 from minpeter/feat/hashline-edit-op-schema
refactor(hashline-edit): align tool payload to op/pos/end/lines
2026-02-24 15:13:45 +09:00
YeonGyu-Kim
f50f3d3c37 fix(hashline-edit): clarify LINE#ID placeholder to prevent literal interpretation 2026-02-24 15:00:06 +09:00
YeonGyu-Kim
833c26ae5c sisyphus waits for oracle 2026-02-24 14:50:00 +09:00
minpeter
60cf2de16f fix(hashline-edit): detect overlapping ranges and prevent false unwrap of blank-line spans
- Add detectOverlappingRanges() to reject edits with overlapping pos..end ranges
  instead of crashing with undefined.match()
- Add bounds guard (?? "") in edit-operation-primitives for out-of-range line access
- Add null guard in leadingWhitespace() for undefined/empty input
- Fix restoreOldWrappedLines false unwrap: skip candidate spans containing
  blank/whitespace-only lines, preventing incorrect collapse of structural
  blank lines and indentation (the "애국가 bug")
- Improve tool description for range replace clarity
- Add tests: overlapping range detection, false unwrap prevention
2026-02-24 14:46:17 +09:00
minpeter
c7efe8f002 fix(hashline-edit): preserve intentional whitespace removal in autocorrect
restoreIndentForPairedReplacement() and restoreLeadingIndent() unconditionally
restored original indentation when replacement had none, preventing intentional
indentation changes (e.g. removing a tab from '\t1절' to '1절'). Skip indent
restoration when trimmed content is identical, indicating a whitespace-only edit.
2026-02-24 14:07:21 +09:00
minpeter
54b756c145 refactor(hashline): change content separator from colon to pipe
Change LINE#HASH:content format to LINE#HASH|content across the entire
codebase. The pipe separator is more visually distinct and avoids
conflicts with TypeScript colons in code content.

15 files updated: implementation, prompts, tests, and READMEs.
2026-02-24 06:01:24 +09:00
minpeter
1cb362773b fix(hashline-read-enhancer): handle inline <content> tag from updated OpenCode read tool
OpenCode updated its read tool output format — the <content> tag now shares
a line with the first content line (<content>1: content) with no newline.

The hook's exact indexOf('<content>') detection returned -1, causing all
read output to pass through unmodified (no hash anchors). This silently
disabled the entire hashline-edit workflow.

Fixes:
- Sub-bug 1: Use findIndex + startsWith instead of exact indexOf match
- Sub-bug 2: Extract inline content after <content> prefix as first line
- Sub-bug 3: Normalize open-tag line to bare tag in output (no duplicate)

Also adds backward compat for legacy <file> + 00001| pipe format.
2026-02-24 05:47:05 +09:00
minpeter
08b663df86 refactor(hashline-edit): enforce three-op edit model
Unify internal hashline edit handling around replace/append/prepend to remove legacy operation shapes. This keeps normalization, ordering, deduplication, execution, and tests aligned with the new op/pos/end/lines contract.
2026-02-24 05:06:41 +09:00
github-actions[bot]
fddd6f1306 @Firstbober has signed the CLA in code-yeongyu/oh-my-opencode#2080 2026-02-23 19:28:23 +00:00
YeonGyu-Kim
e11c217d15 fix(tools/background-task): respect block=true even when full_session=true
Move blocking/polling logic before full_session branch so that
block=true waits for task completion regardless of output format.

🤖 Generated with assistance of oh-my-opencode
2026-02-24 03:52:20 +09:00
minpeter
6ec0ff732b refactor(hashline-edit): align tool payload to op/pos/end/lines
Unify hashline_edit input with replace/append/prepend + pos/end/lines semantics so callers use a single stable shape. Add normalization coverage and refresh tool guidance/tests to reduce schema confusion and stale legacy payload usage.
2026-02-24 03:00:38 +09:00
46 changed files with 1072 additions and 657 deletions

View File

@@ -217,9 +217,9 @@ MCPサーバーがあなたのコンテキスト予算を食いつぶしてい
[oh-my-pi](https://github.com/can1357/oh-my-pi) に触発され、**Hashline**を実装しました。エージェントが読むすべての行にコンテンツハッシュがタグ付けされて返されます:
```
11#VK: function hello() {
22#XJ: return "world";
33#MB: }
11#VK| function hello() {
22#XJ| return "world";
33#MB| }
```
エージェントはこのタグを参照して編集します。最後に読んだ後でファイルが変更されていた場合、ハッシュが一致せず、コードが壊れる前に編集が拒否されます。空白を正確に再現する必要もなく、間違った行を編集するエラー (stale-line) もありません。

View File

@@ -216,9 +216,9 @@ MCP 서버들이 당신의 컨텍스트 예산을 다 잡아먹죠. 우리가
[oh-my-pi](https://github.com/can1357/oh-my-pi)에서 영감을 받아, **Hashline**을 구현했습니다. 에이전트가 읽는 모든 줄에는 콘텐츠 해시 태그가 붙어 나옵니다:
```
11#VK: function hello() {
22#XJ: return "world";
33#MB: }
11#VK| function hello() {
22#XJ| return "world";
33#MB| }
```
에이전트는 이 태그를 참조해서 편집합니다. 마지막으로 읽은 후 파일이 변경되었다면 해시가 일치하지 않아 코드가 망가지기 전에 편집이 거부됩니다. 공백을 똑같이 재현할 필요도 없고, 엉뚱한 줄을 수정하는 에러(stale-line)도 없습니다.

View File

@@ -220,9 +220,9 @@ The harness problem is real. Most agent failures aren't the model. It's the edit
Inspired by [oh-my-pi](https://github.com/can1357/oh-my-pi), we implemented **Hashline**. Every line the agent reads comes back tagged with a content hash:
```
11#VK: function hello() {
22#XJ: return "world";
33#MB: }
11#VK| function hello() {
22#XJ| return "world";
33#MB| }
```
The agent edits by referencing those tags. If the file changed since the last read, the hash won't match and the edit is rejected before corruption. No whitespace reproduction. No stale-line errors.

View File

@@ -218,9 +218,9 @@ Harness 问题是真的。绝大多数所谓的 Agent 故障,其实并不是
受 [oh-my-pi](https://github.com/can1357/oh-my-pi) 的启发,我们实现了 **Hashline** 技术。Agent 读到的每一行代码,末尾都会打上一个强绑定的内容哈希值:
```
11#VK: function hello() {
22#XJ: return "world";
33#MB: }
11#VK| function hello() {
22#XJ| return "world";
33#MB| }
```
Agent 发起修改时,必须通过这些标签引用目标行。如果在此期间文件发生过变化,哈希验证就会失败,从而在代码被污染前直接驳回。不再有缩进空格错乱,彻底告别改错行的惨剧。

View File

@@ -1,6 +1,6 @@
{
"name": "oh-my-opencode",
"version": "3.8.4",
"version": "3.8.5",
"description": "The Best AI Agent Harness - Batteries-Included OpenCode Plugin with Multi-Model Orchestration, Parallel Background Agents, and Crafted LSP/AST Tools",
"main": "dist/index.js",
"types": "dist/index.d.ts",
@@ -74,13 +74,13 @@
"typescript": "^5.7.3"
},
"optionalDependencies": {
"oh-my-opencode-darwin-arm64": "3.8.4",
"oh-my-opencode-darwin-x64": "3.8.4",
"oh-my-opencode-linux-arm64": "3.8.4",
"oh-my-opencode-linux-arm64-musl": "3.8.4",
"oh-my-opencode-linux-x64": "3.8.4",
"oh-my-opencode-linux-x64-musl": "3.8.4",
"oh-my-opencode-windows-x64": "3.8.4"
"oh-my-opencode-darwin-arm64": "3.8.5",
"oh-my-opencode-darwin-x64": "3.8.5",
"oh-my-opencode-linux-arm64": "3.8.5",
"oh-my-opencode-linux-arm64-musl": "3.8.5",
"oh-my-opencode-linux-x64": "3.8.5",
"oh-my-opencode-linux-x64-musl": "3.8.5",
"oh-my-opencode-windows-x64": "3.8.5"
},
"trustedDependencies": [
"@ast-grep/cli",

View File

@@ -1,6 +1,6 @@
{
"name": "oh-my-opencode-darwin-arm64",
"version": "3.8.4",
"version": "3.8.5",
"description": "Platform-specific binary for oh-my-opencode (darwin-arm64)",
"license": "MIT",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "oh-my-opencode-darwin-x64",
"version": "3.8.4",
"version": "3.8.5",
"description": "Platform-specific binary for oh-my-opencode (darwin-x64)",
"license": "MIT",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "oh-my-opencode-linux-arm64-musl",
"version": "3.8.4",
"version": "3.8.5",
"description": "Platform-specific binary for oh-my-opencode (linux-arm64-musl)",
"license": "MIT",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "oh-my-opencode-linux-arm64",
"version": "3.8.4",
"version": "3.8.5",
"description": "Platform-specific binary for oh-my-opencode (linux-arm64)",
"license": "MIT",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "oh-my-opencode-linux-x64-musl",
"version": "3.8.4",
"version": "3.8.5",
"description": "Platform-specific binary for oh-my-opencode (linux-x64-musl)",
"license": "MIT",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "oh-my-opencode-linux-x64",
"version": "3.8.4",
"version": "3.8.5",
"description": "Platform-specific binary for oh-my-opencode (linux-x64)",
"license": "MIT",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "oh-my-opencode-windows-x64",
"version": "3.8.4",
"version": "3.8.5",
"description": "Platform-specific binary for oh-my-opencode (windows-x64)",
"license": "MIT",
"repository": {

View File

@@ -1695,6 +1695,14 @@
"created_at": "2026-02-23T07:06:14Z",
"repoId": 1108837393,
"pullRequestNo": 2068
},
{
"name": "Firstbober",
"id": 22197465,
"comment_id": 3946848526,
"created_at": "2026-02-23T19:27:59Z",
"repoId": 1108837393,
"pullRequestNo": 2080
}
]
}

View File

@@ -336,12 +336,11 @@ result = task(..., run_in_background=false) // Never wait synchronously for exp
\`\`\`
### Background Result Collection:
1. Launch parallel agents receive task_ids
2. Continue immediate work
1. Launch parallel agents \u2192 receive task_ids
2. Continue immediate work (explore, librarian results)
3. When results needed: \`background_output(task_id="...")\`
4. Before final answer, cancel DISPOSABLE tasks (explore, librarian) individually: \`background_cancel(taskId="bg_explore_xxx")\`, \`background_cancel(taskId="bg_librarian_xxx")\`
5. **NEVER cancel Oracle.** ALWAYS collect Oracle result via \`background_output(task_id="bg_oracle_xxx")\` before answering — even if you already have enough context.
6. **NEVER use \`background_cancel(all=true)\`** — it kills Oracle. Cancel each disposable task by its specific taskId.
4. **If Oracle is running**: STOP all other output. Follow Oracle Completion Protocol in <Oracle_Usage>.
5. Cleanup: Cancel disposable tasks (explore, librarian) individually via \`background_cancel(taskId="...")\`. Never use \`background_cancel(all=true)\`.
### Search Stop Conditions
@@ -478,9 +477,9 @@ If verification fails:
3. Report: "Done. Note: found N pre-existing lint errors unrelated to my changes."
### Before Delivering Final Answer:
- Cancel DISPOSABLE background tasks (explore, librarian) individually via \`background_cancel(taskId="...")\`
- **NEVER use \`background_cancel(all=true)\`.** Always cancel individually by taskId.
- **Always wait for Oracle**: When Oracle is running and you have gathered enough context from your own exploration, your next action is \`background_output\` on Oracle — NOT delivering a final answer. Oracle's value is highest when you think you don't need it.
- **If Oracle is running**: STOP. Follow Oracle Completion Protocol in <Oracle_Usage>. Do NOT deliver any answer.
- Cancel disposable background tasks (explore, librarian) individually via \`background_cancel(taskId="...")\`.
- **Never use \`background_cancel(all=true)\`.**
</Behavior_Instructions>
${oracleSection}

View File

@@ -0,0 +1,59 @@
/// <reference types="bun-types" />
import { afterEach, describe, expect, it } from "bun:test"
import { mkdirSync, rmSync, writeFileSync } from "node:fs"
import { tmpdir } from "node:os"
import { join } from "node:path"
import { loadOpencodeGlobalCommands, loadOpencodeProjectCommands } from "./loader"
const testRoots: string[] = []
function createTempRoot(): string {
const root = join(tmpdir(), `command-loader-${Date.now()}-${Math.random().toString(16).slice(2)}`)
mkdirSync(root, { recursive: true })
testRoots.push(root)
return root
}
function writeCommand(dir: string, name: string): void {
mkdirSync(dir, { recursive: true })
writeFileSync(
join(dir, `${name}.md`),
"---\ndescription: command from test\n---\nUse this command"
)
}
afterEach(() => {
for (const root of testRoots.splice(0)) {
rmSync(root, { recursive: true, force: true })
}
delete process.env.OPENCODE_CONFIG_DIR
})
describe("claude-code-command-loader OpenCode paths", () => {
it("loads commands from global OpenCode commands directory", async () => {
// given
const root = createTempRoot()
const opencodeConfigDir = join(root, "config")
writeCommand(join(opencodeConfigDir, "commands"), "global-opencode")
process.env.OPENCODE_CONFIG_DIR = opencodeConfigDir
// when
const commands = await loadOpencodeGlobalCommands()
// then
expect(commands["global-opencode"]).toBeDefined()
})
it("loads commands from project OpenCode commands directory", async () => {
// given
const root = createTempRoot()
writeCommand(join(root, ".opencode", "commands"), "project-opencode")
// when
const commands = await loadOpencodeProjectCommands(root)
// then
expect(commands["project-opencode"]).toBeDefined()
})
})

View File

@@ -122,13 +122,13 @@ export async function loadProjectCommands(directory?: string): Promise<Record<st
export async function loadOpencodeGlobalCommands(): Promise<Record<string, CommandDefinition>> {
const configDir = getOpenCodeConfigDir({ binary: "opencode" })
const opencodeCommandsDir = join(configDir, "command")
const opencodeCommandsDir = join(configDir, "commands")
const commands = await loadCommandsFromDir(opencodeCommandsDir, "opencode")
return commandsToRecord(commands)
}
export async function loadOpencodeProjectCommands(directory?: string): Promise<Record<string, CommandDefinition>> {
const opencodeProjectDir = join(directory ?? process.cwd(), ".opencode", "command")
const opencodeProjectDir = join(directory ?? process.cwd(), ".opencode", "commands")
const commands = await loadCommandsFromDir(opencodeProjectDir, "opencode-project")
return commandsToRecord(commands)
}

View File

@@ -0,0 +1,63 @@
/// <reference types="bun-types" />
import { afterEach, describe, expect, it } from "bun:test"
import { mkdirSync, rmSync, writeFileSync } from "node:fs"
import { tmpdir } from "node:os"
import { join } from "node:path"
import { executeSlashCommand } from "./executor"
const testRoots: string[] = []
function createTempRoot(): string {
const root = join(tmpdir(), `auto-slash-executor-${Date.now()}-${Math.random().toString(16).slice(2)}`)
mkdirSync(root, { recursive: true })
testRoots.push(root)
return root
}
function writeCommand(dir: string, name: string): void {
mkdirSync(dir, { recursive: true })
writeFileSync(
join(dir, `${name}.md`),
"---\ndescription: command from test\n---\nRun from OpenCode command directory"
)
}
afterEach(() => {
for (const root of testRoots.splice(0)) {
rmSync(root, { recursive: true, force: true })
}
delete process.env.OPENCODE_CONFIG_DIR
})
describe("auto-slash-command executor OpenCode paths", () => {
it("resolves commands from OpenCode global and project plural directories", async () => {
// given
const root = createTempRoot()
const opencodeConfigDir = join(root, "config")
writeCommand(join(opencodeConfigDir, "commands"), "global-cmd")
writeCommand(join(root, ".opencode", "commands"), "project-cmd")
process.env.OPENCODE_CONFIG_DIR = opencodeConfigDir
const originalCwd = process.cwd()
process.chdir(root)
try {
// when
const globalResult = await executeSlashCommand(
{ command: "global-cmd", args: "", raw: "/global-cmd" },
{ skills: [] }
)
const projectResult = await executeSlashCommand(
{ command: "project-cmd", args: "", raw: "/project-cmd" },
{ skills: [] }
)
// then
expect(globalResult.success).toBe(true)
expect(projectResult.success).toBe(true)
} finally {
process.chdir(originalCwd)
}
})
})

View File

@@ -105,8 +105,8 @@ async function discoverAllCommands(options?: ExecutorOptions): Promise<CommandIn
const configDir = getOpenCodeConfigDir({ binary: "opencode" })
const userCommandsDir = join(getClaudeConfigDir(), "commands")
const projectCommandsDir = join(process.cwd(), ".claude", "commands")
const opencodeGlobalDir = join(configDir, "command")
const opencodeProjectDir = join(process.cwd(), ".opencode", "command")
const opencodeGlobalDir = join(configDir, "commands")
const opencodeProjectDir = join(process.cwd(), ".opencode", "commands")
const userCommands = discoverCommandsFromDir(userCommandsDir, "user")
const opencodeGlobalCommands = discoverCommandsFromDir(opencodeGlobalDir, "opencode")

View File

@@ -1,14 +1,19 @@
import type { PluginInput } from "@opencode-ai/plugin"
import { computeLineHash } from "../../tools/hashline-edit/hash-computation"
import { toHashlineContent } from "../../tools/hashline-edit/diff-utils"
const WRITE_SUCCESS_MARKER = "File written successfully."
interface HashlineReadEnhancerConfig {
hashline_edit?: { enabled: boolean }
}
const READ_LINE_PATTERN = /^(\d+): ?(.*)$/
const COLON_READ_LINE_PATTERN = /^\s*(\d+): ?(.*)$/
const PIPE_READ_LINE_PATTERN = /^\s*(\d+)\| ?(.*)$/
const CONTENT_OPEN_TAG = "<content>"
const CONTENT_CLOSE_TAG = "</content>"
const FILE_OPEN_TAG = "<file>"
const FILE_CLOSE_TAG = "</file>"
const OPENCODE_LINE_TRUNCATION_SUFFIX = "... (line truncated to 2000 chars)"
function isReadTool(toolName: string): boolean {
return toolName.toLowerCase() === "read"
@@ -24,18 +29,39 @@ function shouldProcess(config: HashlineReadEnhancerConfig): boolean {
function isTextFile(output: string): boolean {
const firstLine = output.split("\n")[0] ?? ""
return READ_LINE_PATTERN.test(firstLine)
return COLON_READ_LINE_PATTERN.test(firstLine) || PIPE_READ_LINE_PATTERN.test(firstLine)
}
function parseReadLine(line: string): { lineNumber: number; content: string } | null {
const colonMatch = COLON_READ_LINE_PATTERN.exec(line)
if (colonMatch) {
return {
lineNumber: Number.parseInt(colonMatch[1], 10),
content: colonMatch[2],
}
}
const pipeMatch = PIPE_READ_LINE_PATTERN.exec(line)
if (pipeMatch) {
return {
lineNumber: Number.parseInt(pipeMatch[1], 10),
content: pipeMatch[2],
}
}
return null
}
function transformLine(line: string): string {
const match = READ_LINE_PATTERN.exec(line)
if (!match) {
const parsed = parseReadLine(line)
if (!parsed) {
return line
}
const lineNumber = parseInt(match[1], 10)
const content = match[2]
const hash = computeLineHash(lineNumber, content)
return `${lineNumber}#${hash}:${content}`
if (parsed.content.endsWith(OPENCODE_LINE_TRUNCATION_SUFFIX)) {
return line
}
const hash = computeLineHash(parsed.lineNumber, parsed.content)
return `${parsed.lineNumber}#${hash}|${parsed.content}`
}
function transformOutput(output: string): string {
@@ -44,25 +70,43 @@ function transformOutput(output: string): string {
}
const lines = output.split("\n")
const contentStart = lines.indexOf(CONTENT_OPEN_TAG)
const contentStart = lines.findIndex(
(line) => line === CONTENT_OPEN_TAG || line.startsWith(CONTENT_OPEN_TAG)
)
const contentEnd = lines.indexOf(CONTENT_CLOSE_TAG)
const fileStart = lines.findIndex((line) => line === FILE_OPEN_TAG || line.startsWith(FILE_OPEN_TAG))
const fileEnd = lines.indexOf(FILE_CLOSE_TAG)
if (contentStart !== -1 && contentEnd !== -1 && contentEnd > contentStart + 1) {
const fileLines = lines.slice(contentStart + 1, contentEnd)
const blockStart = contentStart !== -1 ? contentStart : fileStart
const blockEnd = contentStart !== -1 ? contentEnd : fileEnd
const openTag = contentStart !== -1 ? CONTENT_OPEN_TAG : FILE_OPEN_TAG
if (blockStart !== -1 && blockEnd !== -1 && blockEnd > blockStart) {
const openLine = lines[blockStart] ?? ""
const inlineFirst = openLine.startsWith(openTag) && openLine !== openTag
? openLine.slice(openTag.length)
: null
const fileLines = inlineFirst !== null
? [inlineFirst, ...lines.slice(blockStart + 1, blockEnd)]
: lines.slice(blockStart + 1, blockEnd)
if (!isTextFile(fileLines[0] ?? "")) {
return output
}
const result: string[] = []
for (const line of fileLines) {
if (!READ_LINE_PATTERN.test(line)) {
if (!parseReadLine(line)) {
result.push(...fileLines.slice(result.length))
break
}
result.push(transformLine(line))
}
return [...lines.slice(0, contentStart + 1), ...result, ...lines.slice(contentEnd)].join("\n")
const prefixLines = inlineFirst !== null
? [...lines.slice(0, blockStart), openTag]
: lines.slice(0, blockStart + 1)
return [...prefixLines, ...result, ...lines.slice(blockEnd)].join("\n")
}
if (!isTextFile(lines[0] ?? "")) {
@@ -71,7 +115,7 @@ function transformOutput(output: string): string {
const result: string[] = []
for (const line of lines) {
if (!READ_LINE_PATTERN.test(line)) {
if (!parseReadLine(line)) {
result.push(...lines.slice(result.length))
break
}
@@ -98,7 +142,12 @@ function extractFilePath(metadata: unknown): string | undefined {
}
async function appendWriteHashlineOutput(output: { output: string; metadata: unknown }): Promise<void> {
if (output.output.includes("Updated file (LINE#ID:content):")) {
if (output.output.startsWith(WRITE_SUCCESS_MARKER)) {
return
}
const outputLower = output.output.toLowerCase()
if (outputLower.startsWith("error") || outputLower.includes("failed")) {
return
}
@@ -113,8 +162,8 @@ async function appendWriteHashlineOutput(output: { output: string; metadata: unk
}
const content = await file.text()
const hashlined = toHashlineContent(content)
output.output = `${output.output}\n\nUpdated file (LINE#ID:content):\n${hashlined}`
const lineCount = content === "" ? 0 : content.split("\n").length
output.output = `${WRITE_SUCCESS_MARKER} ${lineCount} lines written.`
}
export function createHashlineReadEnhancerHook(

View File

@@ -1,3 +1,5 @@
/// <reference types="bun-types" />
import { describe, it, expect } from "bun:test"
import type { PluginInput } from "@opencode-ai/plugin"
import { createHashlineReadEnhancerHook } from "./hook"
@@ -45,11 +47,70 @@ describe("hashline-read-enhancer", () => {
//#then
const lines = output.output.split("\n")
expect(lines[3]).toMatch(/^1#[ZPMQVRWSNKTXJBYH]{2}:const x = 1$/)
expect(lines[4]).toMatch(/^2#[ZPMQVRWSNKTXJBYH]{2}:const y = 2$/)
expect(lines[3]).toMatch(/^1#[ZPMQVRWSNKTXJBYH]{2}\|const x = 1$/)
expect(lines[4]).toMatch(/^2#[ZPMQVRWSNKTXJBYH]{2}\|const y = 2$/)
expect(lines[10]).toBe("1: keep this unchanged")
})
it("hashifies inline <content> format from updated OpenCode read tool", async () => {
//#given
const hook = createHashlineReadEnhancerHook(mockCtx(), { hashline_edit: { enabled: true } })
const input = { tool: "read", sessionID: "s", callID: "c" }
const output = {
title: "demo.ts",
output: [
"<path>/tmp/demo.ts</path>",
"<type>file</type>",
"<content>1: const x = 1",
"2: const y = 2",
"",
"(End of file - total 2 lines)",
"</content>",
].join("\n"),
metadata: {},
}
//#when
await hook["tool.execute.after"](input, output)
//#then
const lines = output.output.split("\n")
expect(lines[0]).toBe("<path>/tmp/demo.ts</path>")
expect(lines[1]).toBe("<type>file</type>")
expect(lines[2]).toBe("<content>")
expect(lines[3]).toMatch(/^1#[ZPMQVRWSNKTXJBYH]{2}\|const x = 1$/)
expect(lines[4]).toMatch(/^2#[ZPMQVRWSNKTXJBYH]{2}\|const y = 2$/)
expect(lines[6]).toBe("(End of file - total 2 lines)")
expect(lines[7]).toBe("</content>")
})
it("keeps OpenCode-truncated lines unhashed while hashifying normal lines", async () => {
//#given
const hook = createHashlineReadEnhancerHook(mockCtx(), { hashline_edit: { enabled: true } })
const input = { tool: "read", sessionID: "s", callID: "c" }
const truncatedLine = `${"x".repeat(60)}... (line truncated to 2000 chars)`
const output = {
title: "demo.ts",
output: [
"<path>/tmp/demo.ts</path>",
"<type>file</type>",
"<content>",
`1: ${truncatedLine}`,
"2: normal line",
"</content>",
].join("\n"),
metadata: {},
}
//#when
await hook["tool.execute.after"](input, output)
//#then
const lines = output.output.split("\n")
expect(lines[3]).toBe(`1: ${truncatedLine}`)
expect(lines[4]).toMatch(/^2#[ZPMQVRWSNKTXJBYH]{2}\|normal line$/)
})
it("hashifies plain read output without content tags", async () => {
//#given
const hook = createHashlineReadEnhancerHook(mockCtx(), { hashline_edit: { enabled: true } })
@@ -71,13 +132,66 @@ describe("hashline-read-enhancer", () => {
//#then
const lines = output.output.split("\n")
expect(lines[0]).toMatch(/^1#[ZPMQVRWSNKTXJBYH]{2}:# Oh-My-OpenCode Features$/)
expect(lines[1]).toMatch(/^2#[ZPMQVRWSNKTXJBYH]{2}:$/)
expect(lines[2]).toMatch(/^3#[ZPMQVRWSNKTXJBYH]{2}:Hashline test$/)
expect(lines[0]).toMatch(/^1#[ZPMQVRWSNKTXJBYH]{2}\|# Oh-My-OpenCode Features$/)
expect(lines[1]).toMatch(/^2#[ZPMQVRWSNKTXJBYH]{2}\|$/)
expect(lines[2]).toMatch(/^3#[ZPMQVRWSNKTXJBYH]{2}\|Hashline test$/)
expect(lines[4]).toBe("(End of file - total 3 lines)")
})
it("appends LINE#ID output for write tool using metadata filepath", async () => {
it("hashifies read output with <file> and zero-padded pipe format", async () => {
//#given
const hook = createHashlineReadEnhancerHook(mockCtx(), { hashline_edit: { enabled: true } })
const input = { tool: "read", sessionID: "s", callID: "c" }
const output = {
title: "demo.ts",
output: [
"<file>",
"00001| const x = 1",
"00002| const y = 2",
"",
"(End of file - total 2 lines)",
"</file>",
].join("\n"),
metadata: {},
}
//#when
await hook["tool.execute.after"](input, output)
//#then
const lines = output.output.split("\n")
expect(lines[1]).toMatch(/^1#[ZPMQVRWSNKTXJBYH]{2}\|const x = 1$/)
expect(lines[2]).toMatch(/^2#[ZPMQVRWSNKTXJBYH]{2}\|const y = 2$/)
expect(lines[5]).toBe("</file>")
})
it("hashifies pipe format even with leading spaces", async () => {
//#given
const hook = createHashlineReadEnhancerHook(mockCtx(), { hashline_edit: { enabled: true } })
const input = { tool: "read", sessionID: "s", callID: "c" }
const output = {
title: "demo.ts",
output: [
"<file>",
" 00001| const x = 1",
" 00002| const y = 2",
"",
"(End of file - total 2 lines)",
"</file>",
].join("\n"),
metadata: {},
}
//#when
await hook["tool.execute.after"](input, output)
//#then
const lines = output.output.split("\n")
expect(lines[1]).toMatch(/^1#[ZPMQVRWSNKTXJBYH]{2}\|const x = 1$/)
expect(lines[2]).toMatch(/^2#[ZPMQVRWSNKTXJBYH]{2}\|const y = 2$/)
})
it("appends simple summary for write tool instead of full hashlined content", async () => {
//#given
const hook = createHashlineReadEnhancerHook(mockCtx(), { hashline_edit: { enabled: true } })
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "hashline-write-"))
@@ -94,9 +208,55 @@ describe("hashline-read-enhancer", () => {
await hook["tool.execute.after"](input, output)
//#then
expect(output.output).toContain("Updated file (LINE#ID:content):")
expect(output.output).toMatch(/1#[ZPMQVRWSNKTXJBYH]{2}:const x = 1/)
expect(output.output).toMatch(/2#[ZPMQVRWSNKTXJBYH]{2}:const y = 2/)
expect(output.output).toContain("File written successfully.")
expect(output.output).toContain("2 lines written.")
expect(output.output).not.toContain("Updated file (LINE#ID|content):")
expect(output.output).not.toContain("const x = 1")
fs.rmSync(tempDir, { recursive: true, force: true })
})
it("does not re-process write output that already contains the success marker", async () => {
//#given
const hook = createHashlineReadEnhancerHook(mockCtx(), { hashline_edit: { enabled: true } })
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "hashline-idem-"))
const filePath = path.join(tempDir, "demo.ts")
fs.writeFileSync(filePath, "a\nb\nc\nd\ne")
const input = { tool: "write", sessionID: "s", callID: "c" }
const output = {
title: "write",
output: "File written successfully. 99 lines written.",
metadata: { filepath: filePath },
}
//#when
await hook["tool.execute.after"](input, output)
//#then — guard should prevent re-reading the file and updating the count
expect(output.output).toBe("File written successfully. 99 lines written.")
fs.rmSync(tempDir, { recursive: true, force: true })
})
it("does not overwrite write tool error output with success message", async () => {
//#given — write tool failed, but stale file exists from previous write
const hook = createHashlineReadEnhancerHook(mockCtx(), { hashline_edit: { enabled: true } })
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "hashline-err-"))
const filePath = path.join(tempDir, "demo.ts")
fs.writeFileSync(filePath, "const x = 1")
const input = { tool: "write", sessionID: "s", callID: "c" }
const output = {
title: "write",
output: "Error: EACCES: permission denied, open '" + filePath + "'",
metadata: { filepath: filePath },
}
//#when
await hook["tool.execute.after"](input, output)
//#then — error output must be preserved, not overwritten with success message
expect(output.output).toContain("Error: EACCES")
expect(output.output).not.toContain("File written successfully.")
fs.rmSync(tempDir, { recursive: true, force: true })
})

View File

@@ -77,13 +77,40 @@ export function createBackgroundOutput(manager: BackgroundOutputManager, client:
storeToolMetadata(ctx.sessionID, callID, meta)
}
const isActive = task.status === "pending" || task.status === "running"
const shouldBlock = args.block === true
const timeoutMs = Math.min(args.timeout ?? 60000, 600000)
const fullSession = args.full_session ?? true
let resolvedTask = task
if (shouldBlock && (task.status === "pending" || task.status === "running")) {
const startTime = Date.now()
while (Date.now() - startTime < timeoutMs) {
await delay(1000)
const currentTask = manager.getTask(args.task_id)
if (!currentTask) {
return `Task was deleted: ${args.task_id}`
}
if (currentTask.status !== "pending" && currentTask.status !== "running") {
resolvedTask = currentTask
break
}
}
const finalCheck = manager.getTask(args.task_id)
if (finalCheck) {
resolvedTask = finalCheck
}
}
const isActive = resolvedTask.status === "pending" || resolvedTask.status === "running"
const includeThinking = isActive || (args.include_thinking ?? false)
const includeToolResults = isActive || (args.include_tool_results ?? false)
if (fullSession) {
return await formatFullSession(task, client, {
return await formatFullSession(resolvedTask, client, {
includeThinking,
messageLimit: args.message_limit,
sinceMessageId: args.since_message_id,
@@ -92,44 +119,15 @@ export function createBackgroundOutput(manager: BackgroundOutputManager, client:
})
}
const shouldBlock = args.block === true
const timeoutMs = Math.min(args.timeout ?? 60000, 600000)
if (task.status === "completed") {
return await formatTaskResult(task, client)
if (resolvedTask.status === "completed") {
return await formatTaskResult(resolvedTask, client)
}
if (task.status === "error" || task.status === "cancelled" || task.status === "interrupt") {
return formatTaskStatus(task)
if (resolvedTask.status === "error" || resolvedTask.status === "cancelled" || resolvedTask.status === "interrupt") {
return formatTaskStatus(resolvedTask)
}
if (!shouldBlock) {
return formatTaskStatus(task)
}
const startTime = Date.now()
while (Date.now() - startTime < timeoutMs) {
await delay(1000)
const currentTask = manager.getTask(args.task_id)
if (!currentTask) {
return `Task was deleted: ${args.task_id}`
}
if (currentTask.status === "completed") {
return await formatTaskResult(currentTask, client)
}
if (currentTask.status === "error" || currentTask.status === "cancelled" || currentTask.status === "interrupt") {
return formatTaskStatus(currentTask)
}
}
const finalTask = manager.getTask(args.task_id)
if (!finalTask) {
return `Task was deleted: ${args.task_id}`
}
return `Timeout exceeded (${timeoutMs}ms). Task still ${finalTask.status}.\n\n${formatTaskStatus(finalTask)}`
return formatTaskStatus(resolvedTask)
} catch (error) {
return `Error getting output: ${error instanceof Error ? error.message : String(error)}`
}

View File

@@ -339,6 +339,48 @@ describe("background_output full_session", () => {
})
})
describe("background_output blocking", () => {
test("block=true waits for task completion even with default full_session=true", async () => {
// #given a task that transitions running → completed after 2 polls
let pollCount = 0
const task = createTask({ status: "running" })
const manager: BackgroundOutputManager = {
getTask: (id: string) => {
if (id !== task.id) return undefined
pollCount++
if (pollCount >= 3) {
task.status = "completed"
}
return task
},
}
const client = createMockClient({
"ses-1": [
{
id: "m1",
info: { role: "assistant", time: "2026-01-01T00:00:00Z" },
parts: [{ type: "text", text: "completed result" }],
},
],
})
const tool = createBackgroundOutput(manager, client)
// #when block=true, full_session not specified (defaults to true)
const output = await tool.execute({
task_id: "task-1",
block: true,
timeout: 10000,
}, mockContext)
// #then should have waited and returned full session output
expect(task.status).toBe("completed")
expect(pollCount).toBeGreaterThanOrEqual(3)
expect(output).toContain("# Full Session Output")
expect(output).toContain("completed result")
})
})
describe("background_cancel", () => {
test("cancels a running task via manager", async () => {
// #given

View File

@@ -15,6 +15,7 @@ export function stripMergeOperatorChars(text: string): string {
}
function leadingWhitespace(text: string): string {
if (!text) return ""
const match = text.match(/^\s*/)
return match ? match[0] : ""
}
@@ -36,7 +37,9 @@ export function restoreOldWrappedLines(originalLines: string[], replacementLines
const candidates: { start: number; len: number; replacement: string; canonical: string }[] = []
for (let start = 0; start < replacementLines.length; start += 1) {
for (let len = 2; len <= 10 && start + len <= replacementLines.length; len += 1) {
const canonicalSpan = stripAllWhitespace(replacementLines.slice(start, start + len).join(""))
const span = replacementLines.slice(start, start + len)
if (span.some((line) => line.trim().length === 0)) continue
const canonicalSpan = stripAllWhitespace(span.join(""))
const original = canonicalToOriginal.get(canonicalSpan)
if (original && original.count === 1 && canonicalSpan.length >= 6) {
candidates.push({ start, len, replacement: original.line, canonical: canonicalSpan })
@@ -159,6 +162,7 @@ export function restoreIndentForPairedReplacement(
if (leadingWhitespace(line).length > 0) return line
const indent = leadingWhitespace(originalLines[idx])
if (indent.length === 0) return line
if (originalLines[idx].trim() === line.trim()) return line
return `${indent}${line}`
})
}

View File

@@ -7,5 +7,4 @@ export const HASHLINE_DICT = Array.from({ length: 256 }, (_, i) => {
})
export const HASHLINE_REF_PATTERN = /^([0-9]+)#([ZPMQVRWSNKTXJBYH]{2})$/
export const HASHLINE_OUTPUT_PATTERN = /^([0-9]+)#([ZPMQVRWSNKTXJBYH]{2}):(.*)$/
export const HASHLINE_LEGACY_REF_PATTERN = /^([0-9]+):([0-9a-fA-F]{2,})$/
export const HASHLINE_OUTPUT_PATTERN = /^([0-9]+)#([ZPMQVRWSNKTXJBYH]{2})\|(.*)$/

View File

@@ -9,7 +9,7 @@ export function toHashlineContent(content: string): string {
const hashlined = contentLines.map((line, i) => {
const lineNum = i + 1
const hash = computeLineHash(lineNum, line)
return `${lineNum}#${hash}:${line}`
return `${lineNum}#${hash}|${line}`
})
return hasTrailingNewline ? hashlined.join("\n") + "\n" : hashlined.join("\n")
}

View File

@@ -6,23 +6,13 @@ function normalizeEditPayload(payload: string | string[]): string {
}
function buildDedupeKey(edit: HashlineEdit): string {
switch (edit.type) {
case "set_line":
return `set_line|${edit.line}|${normalizeEditPayload(edit.text)}`
case "replace_lines":
return `replace_lines|${edit.start_line}|${edit.end_line}|${normalizeEditPayload(edit.text)}`
case "insert_after":
return `insert_after|${edit.line}|${normalizeEditPayload(edit.text)}`
case "insert_before":
return `insert_before|${edit.line}|${normalizeEditPayload(edit.text)}`
case "insert_between":
return `insert_between|${edit.after_line}|${edit.before_line}|${normalizeEditPayload(edit.text)}`
switch (edit.op) {
case "replace":
return `replace|${edit.old_text}|${normalizeEditPayload(edit.new_text)}`
return `replace|${edit.pos}|${edit.end ?? ""}|${normalizeEditPayload(edit.lines)}`
case "append":
return `append|${normalizeEditPayload(edit.text)}`
return `append|${edit.pos ?? ""}|${normalizeEditPayload(edit.lines)}`
case "prepend":
return `prepend|${normalizeEditPayload(edit.text)}`
return `prepend|${edit.pos ?? ""}|${normalizeEditPayload(edit.lines)}`
default:
return JSON.stringify(edit)
}

View File

@@ -63,7 +63,7 @@ export function applyReplaceLines(
const corrected = autocorrectReplacementLines(originalRange, stripped)
const restored = corrected.map((entry, idx) => {
if (idx !== 0) return entry
return restoreLeadingIndent(lines[startLine - 1], entry)
return restoreLeadingIndent(lines[startLine - 1] ?? "", entry)
})
result.splice(startLine - 1, endLine - startLine + 1, ...restored)
return result
@@ -80,7 +80,7 @@ export function applyInsertAfter(
const result = [...lines]
const newLines = stripInsertAnchorEcho(lines[line - 1], toNewLines(text))
if (newLines.length === 0) {
throw new Error(`insert_after requires non-empty text for ${anchor}`)
throw new Error(`append (anchored) requires non-empty text for ${anchor}`)
}
result.splice(line, 0, ...newLines)
return result
@@ -97,38 +97,12 @@ export function applyInsertBefore(
const result = [...lines]
const newLines = stripInsertBeforeEcho(lines[line - 1], toNewLines(text))
if (newLines.length === 0) {
throw new Error(`insert_before requires non-empty text for ${anchor}`)
throw new Error(`prepend (anchored) requires non-empty text for ${anchor}`)
}
result.splice(line - 1, 0, ...newLines)
return result
}
export function applyInsertBetween(
lines: string[],
afterAnchor: string,
beforeAnchor: string,
text: string | string[],
options?: EditApplyOptions
): string[] {
if (shouldValidate(options)) {
validateLineRef(lines, afterAnchor)
validateLineRef(lines, beforeAnchor)
}
const { line: afterLine } = parseLineRef(afterAnchor)
const { line: beforeLine } = parseLineRef(beforeAnchor)
if (beforeLine <= afterLine) {
throw new Error(`insert_between requires after_line (${afterLine}) < before_line (${beforeLine})`)
}
const result = [...lines]
const newLines = stripInsertBoundaryEcho(lines[afterLine - 1], lines[beforeLine - 1], toNewLines(text))
if (newLines.length === 0) {
throw new Error(`insert_between requires non-empty text for ${afterAnchor}..${beforeAnchor}`)
}
result.splice(beforeLine - 1, 0, ...newLines)
return result
}
export function applyAppend(lines: string[], text: string | string[]): string[] {
const normalized = toNewLines(text)
if (normalized.length === 0) {
@@ -150,11 +124,3 @@ export function applyPrepend(lines: string[], text: string | string[]): string[]
}
return [...normalized, ...lines]
}
export function applyReplace(content: string, oldText: string, newText: string | string[]): string {
if (!content.includes(oldText)) {
throw new Error(`Text not found: "${oldText}"`)
}
const replacement = Array.isArray(newText) ? newText.join("\n") : newText
return content.replaceAll(oldText, replacement)
}

View File

@@ -1,6 +1,6 @@
import { describe, expect, it } from "bun:test"
import { applyHashlineEdits, applyInsertAfter, applyReplace, applyReplaceLines, applySetLine } from "./edit-operations"
import { applyAppend, applyPrepend } from "./edit-operation-primitives"
import { applyHashlineEdits } from "./edit-operations"
import { applyAppend, applyInsertAfter, applyPrepend, applyReplaceLines, applySetLine } from "./edit-operation-primitives"
import { computeLineHash } from "./hash-computation"
import type { HashlineEdit } from "./types"
@@ -49,31 +49,13 @@ describe("hashline edit operations", () => {
//#when
const result = applyHashlineEdits(
lines.join("\n"),
[{ type: "insert_before", line: anchorFor(lines, 2), text: "before 2" }]
[{ op: "prepend", pos: anchorFor(lines, 2), lines: "before 2" }]
)
//#then
expect(result).toEqual("line 1\nbefore 2\nline 2\nline 3")
})
it("applies insert_between with dual anchors", () => {
//#given
const lines = ["line 1", "line 2", "line 3"]
//#when
const result = applyHashlineEdits(
lines.join("\n"),
[{
type: "insert_between",
after_line: anchorFor(lines, 1),
before_line: anchorFor(lines, 2),
text: ["between"],
}]
)
//#then
expect(result).toEqual("line 1\nbetween\nline 2\nline 3")
})
it("throws when insert_after receives empty text array", () => {
//#given
@@ -89,46 +71,18 @@ describe("hashline edit operations", () => {
//#when / #then
expect(() =>
applyHashlineEdits(lines.join("\n"), [{ type: "insert_before", line: anchorFor(lines, 1), text: [] }])
applyHashlineEdits(lines.join("\n"), [{ op: "prepend", pos: anchorFor(lines, 1), lines: [] }])
).toThrow(/non-empty/i)
})
it("throws when insert_between receives empty text array", () => {
//#given
const lines = ["line 1", "line 2"]
//#when / #then
expect(() =>
applyHashlineEdits(
lines.join("\n"),
[{
type: "insert_between",
after_line: anchorFor(lines, 1),
before_line: anchorFor(lines, 2),
text: [],
}]
)
).toThrow(/non-empty/i)
})
it("applies replace operation", () => {
//#given
const content = "hello world foo"
//#when
const result = applyReplace(content, "world", "universe")
//#then
expect(result).toEqual("hello universe foo")
})
it("applies mixed edits in one pass", () => {
//#given
const content = "line 1\nline 2\nline 3"
const lines = content.split("\n")
const edits: HashlineEdit[] = [
{ type: "insert_after", line: anchorFor(lines, 1), text: "inserted" },
{ type: "set_line", line: anchorFor(lines, 3), text: "modified" },
{ op: "append", pos: anchorFor(lines, 1), lines: "inserted" },
{ op: "replace", pos: anchorFor(lines, 3), lines: "modified" },
]
//#when
@@ -138,13 +92,29 @@ describe("hashline edit operations", () => {
expect(result).toEqual("line 1\ninserted\nline 2\nmodified")
})
it("applies replace before prepend when both target same line", () => {
//#given
const content = "line 1\nline 2\nline 3"
const lines = content.split("\n")
const edits: HashlineEdit[] = [
{ op: "prepend", pos: anchorFor(lines, 2), lines: "before line 2" },
{ op: "replace", pos: anchorFor(lines, 2), lines: "modified line 2" },
]
//#when
const result = applyHashlineEdits(content, edits)
//#then
expect(result).toEqual("line 1\nbefore line 2\nmodified line 2\nline 3")
})
it("deduplicates identical insert edits in one pass", () => {
//#given
const content = "line 1\nline 2"
const lines = content.split("\n")
const edits: HashlineEdit[] = [
{ type: "insert_after", line: anchorFor(lines, 1), text: "inserted" },
{ type: "insert_after", line: anchorFor(lines, 1), text: "inserted" },
{ op: "append", pos: anchorFor(lines, 1), lines: "inserted" },
{ op: "append", pos: anchorFor(lines, 1), lines: "inserted" },
]
//#when
@@ -170,7 +140,7 @@ describe("hashline edit operations", () => {
const lines = ["line 1", "line 2", "line 3"]
//#when
const result = applySetLine(lines, anchorFor(lines, 2), "1#VK:first\n2#NP:second")
const result = applySetLine(lines, anchorFor(lines, 2), "1#VK|first\n2#NP|second")
//#then
expect(result).toEqual(["line 1", "first", "second", "line 3"])
@@ -206,6 +176,28 @@ describe("hashline edit operations", () => {
expect(result).toEqual(["if (x) {", " return 2", "}"])
})
it("preserves intentional indentation removal (tab to no-tab)", () => {
//#given
const lines = ["# Title", "\t1절", "content"]
//#when
const result = applySetLine(lines, anchorFor(lines, 2), "1절")
//#then
expect(result).toEqual(["# Title", "1절", "content"])
})
it("preserves intentional indentation removal (spaces to no-spaces)", () => {
//#given
const lines = ["function foo() {", " indented", "}"]
//#when
const result = applySetLine(lines, anchorFor(lines, 2), "indented")
//#then
expect(result).toEqual(["function foo() {", "indented", "}"])
})
it("strips boundary echo around replace_lines content", () => {
//#given
const lines = ["before", "old 1", "old 2", "after"]
@@ -222,22 +214,6 @@ describe("hashline edit operations", () => {
expect(result).toEqual(["before", "new 1", "new 2", "after"])
})
it("throws when insert_between payload contains only boundary echoes", () => {
//#given
const lines = ["line 1", "line 2", "line 3"]
//#when / #then
expect(() =>
applyHashlineEdits(lines.join("\n"), [
{
type: "insert_between",
after_line: anchorFor(lines, 1),
before_line: anchorFor(lines, 2),
text: ["line 1", "line 2"],
},
])
).toThrow(/non-empty/i)
})
it("restores indentation for first replace_lines entry", () => {
//#given
@@ -250,6 +226,22 @@ describe("hashline edit operations", () => {
expect(result).toEqual(["if (x) {", " return 3", " return 4", "}"])
})
it("preserves blank lines and indentation in range replace (no false unwrap)", () => {
//#given — reproduces the 애국가 bug where blank+indented lines collapse
const lines = ["", "동해물과 백두산이 마르고 닳도록", "하느님이 보우하사 우리나라 만세", "", "무궁화 삼천리 화려강산", "대한사람 대한으로 길이 보전하세", ""]
//#when — replace the range with indented version (blank lines preserved)
const result = applyReplaceLines(
lines,
anchorFor(lines, 1),
anchorFor(lines, 7),
["", " 동해물과 백두산이 마르고 닳도록", " 하느님이 보우하사 우리나라 만세", "", " 무궁화 삼천리 화려강산", " 대한사람 대한으로 길이 보전하세", ""]
)
//#then — all 7 lines preserved with indentation, not collapsed to 3
expect(result).toEqual(["", " 동해물과 백두산이 마르고 닳도록", " 하느님이 보우하사 우리나라 만세", "", " 무궁화 삼천리 화려강산", " 대한사람 대한으로 길이 보전하세", ""])
})
it("collapses wrapped replacement span back to unique original single line", () => {
//#given
const lines = [
@@ -322,8 +314,8 @@ describe("hashline edit operations", () => {
//#when
const result = applyHashlineEdits(content, [
{ type: "append", text: ["line 3"] },
{ type: "prepend", text: ["line 0"] },
{ op: "append", lines: ["line 3"] },
{ op: "prepend", lines: ["line 0"] },
])
//#then
@@ -367,4 +359,33 @@ describe("hashline edit operations", () => {
//#then
expect(result).toEqual(["const a = 10;", "const b = 20;"])
})
it("throws on overlapping range edits", () => {
//#given
const content = "line 1\nline 2\nline 3\nline 4\nline 5"
const lines = content.split("\n")
const edits: HashlineEdit[] = [
{ op: "replace", pos: anchorFor(lines, 1), end: anchorFor(lines, 3), lines: "replaced A" },
{ op: "replace", pos: anchorFor(lines, 2), end: anchorFor(lines, 4), lines: "replaced B" },
]
//#when / #then
expect(() => applyHashlineEdits(content, edits)).toThrow(/overlapping/i)
})
it("allows non-overlapping range edits", () => {
//#given
const content = "line 1\nline 2\nline 3\nline 4\nline 5"
const lines = content.split("\n")
const edits: HashlineEdit[] = [
{ op: "replace", pos: anchorFor(lines, 1), end: anchorFor(lines, 2), lines: "replaced A" },
{ op: "replace", pos: anchorFor(lines, 4), end: anchorFor(lines, 5), lines: "replaced B" },
]
//#when
const result = applyHashlineEdits(content, edits)
//#then
expect(result).toEqual("replaced A\nline 3\nreplaced B")
})
})

View File

@@ -1,13 +1,11 @@
import { dedupeEdits } from "./edit-deduplication"
import { collectLineRefs, getEditLineNumber } from "./edit-ordering"
import { collectLineRefs, detectOverlappingRanges, getEditLineNumber } from "./edit-ordering"
import type { HashlineEdit } from "./types"
import {
applyAppend,
applyInsertAfter,
applyInsertBefore,
applyInsertBetween,
applyPrepend,
applyReplace,
applyReplaceLines,
applySetLine,
} from "./edit-operation-primitives"
@@ -29,46 +27,30 @@ export function applyHashlineEditsWithReport(content: string, edits: HashlineEdi
}
const dedupeResult = dedupeEdits(edits)
const sortedEdits = [...dedupeResult.edits].sort((a, b) => getEditLineNumber(b) - getEditLineNumber(a))
const EDIT_PRECEDENCE: Record<string, number> = { replace: 0, append: 1, prepend: 2 }
const sortedEdits = [...dedupeResult.edits].sort((a, b) => {
const lineA = getEditLineNumber(a)
const lineB = getEditLineNumber(b)
if (lineB !== lineA) return lineB - lineA
return (EDIT_PRECEDENCE[a.op] ?? 3) - (EDIT_PRECEDENCE[b.op] ?? 3)
})
let noopEdits = 0
let result = content
let lines = result.length === 0 ? [] : result.split("\n")
let lines = content.length === 0 ? [] : content.split("\n")
const refs = collectLineRefs(sortedEdits)
validateLineRefs(lines, refs)
const overlapError = detectOverlappingRanges(sortedEdits)
if (overlapError) throw new Error(overlapError)
for (const edit of sortedEdits) {
switch (edit.type) {
case "set_line": {
lines = applySetLine(lines, edit.line, edit.text, { skipValidation: true })
break
}
case "replace_lines": {
lines = applyReplaceLines(lines, edit.start_line, edit.end_line, edit.text, { skipValidation: true })
break
}
case "insert_after": {
const next = applyInsertAfter(lines, edit.line, edit.text, { skipValidation: true })
if (next.join("\n") === lines.join("\n")) {
noopEdits += 1
break
}
lines = next
break
}
case "insert_before": {
const next = applyInsertBefore(lines, edit.line, edit.text, { skipValidation: true })
if (next.join("\n") === lines.join("\n")) {
noopEdits += 1
break
}
lines = next
break
}
case "insert_between": {
const next = applyInsertBetween(lines, edit.after_line, edit.before_line, edit.text, { skipValidation: true })
switch (edit.op) {
case "replace": {
const next = edit.end
? applyReplaceLines(lines, edit.pos, edit.end, edit.lines, { skipValidation: true })
: applySetLine(lines, edit.pos, edit.lines, { skipValidation: true })
if (next.join("\n") === lines.join("\n")) {
noopEdits += 1
break
@@ -77,7 +59,9 @@ export function applyHashlineEditsWithReport(content: string, edits: HashlineEdi
break
}
case "append": {
const next = applyAppend(lines, edit.text)
const next = edit.pos
? applyInsertAfter(lines, edit.pos, edit.lines, { skipValidation: true })
: applyAppend(lines, edit.lines)
if (next.join("\n") === lines.join("\n")) {
noopEdits += 1
break
@@ -86,7 +70,9 @@ export function applyHashlineEditsWithReport(content: string, edits: HashlineEdi
break
}
case "prepend": {
const next = applyPrepend(lines, edit.text)
const next = edit.pos
? applyInsertBefore(lines, edit.pos, edit.lines, { skipValidation: true })
: applyPrepend(lines, edit.lines)
if (next.join("\n") === lines.join("\n")) {
noopEdits += 1
break
@@ -94,17 +80,6 @@ export function applyHashlineEditsWithReport(content: string, edits: HashlineEdi
lines = next
break
}
case "replace": {
result = lines.join("\n")
const replaced = applyReplace(result, edit.old_text, edit.new_text)
if (replaced === result) {
noopEdits += 1
break
}
result = replaced
lines = result.split("\n")
break
}
}
}
@@ -118,12 +93,3 @@ export function applyHashlineEditsWithReport(content: string, edits: HashlineEdi
export function applyHashlineEdits(content: string, edits: HashlineEdit[]): string {
return applyHashlineEditsWithReport(content, edits).content
}
export {
applySetLine,
applyReplaceLines,
applyInsertAfter,
applyInsertBefore,
applyInsertBetween,
applyReplace,
} from "./edit-operation-primitives"

View File

@@ -2,23 +2,13 @@ import { parseLineRef } from "./validation"
import type { HashlineEdit } from "./types"
export function getEditLineNumber(edit: HashlineEdit): number {
switch (edit.type) {
case "set_line":
return parseLineRef(edit.line).line
case "replace_lines":
return parseLineRef(edit.end_line).line
case "insert_after":
return parseLineRef(edit.line).line
case "insert_before":
return parseLineRef(edit.line).line
case "insert_between":
return parseLineRef(edit.before_line).line
case "append":
return Number.NEGATIVE_INFINITY
case "prepend":
return Number.NEGATIVE_INFINITY
switch (edit.op) {
case "replace":
return Number.NEGATIVE_INFINITY
return parseLineRef(edit.end ?? edit.pos).line
case "append":
return edit.pos ? parseLineRef(edit.pos).line : Number.NEGATIVE_INFINITY
case "prepend":
return edit.pos ? parseLineRef(edit.pos).line : Number.NEGATIVE_INFINITY
default:
return Number.POSITIVE_INFINITY
}
@@ -26,23 +16,41 @@ export function getEditLineNumber(edit: HashlineEdit): number {
export function collectLineRefs(edits: HashlineEdit[]): string[] {
return edits.flatMap((edit) => {
switch (edit.type) {
case "set_line":
return [edit.line]
case "replace_lines":
return [edit.start_line, edit.end_line]
case "insert_after":
return [edit.line]
case "insert_before":
return [edit.line]
case "insert_between":
return [edit.after_line, edit.before_line]
switch (edit.op) {
case "replace":
return edit.end ? [edit.pos, edit.end] : [edit.pos]
case "append":
case "prepend":
case "replace":
return []
return edit.pos ? [edit.pos] : []
default:
return []
}
})
}
export function detectOverlappingRanges(edits: HashlineEdit[]): string | null {
const ranges: { start: number; end: number; idx: number }[] = []
for (let i = 0; i < edits.length; i++) {
const edit = edits[i]
if (edit.op !== "replace" || !edit.end) continue
const start = parseLineRef(edit.pos).line
const end = parseLineRef(edit.end).line
ranges.push({ start, end, idx: i })
}
if (ranges.length < 2) return null
ranges.sort((a, b) => a.start - b.start || a.end - b.end)
for (let i = 1; i < ranges.length; i++) {
const prev = ranges[i - 1]
const curr = ranges[i]
if (curr.start <= prev.end) {
return (
`Overlapping range edits detected: ` +
`edit ${prev.idx + 1} (lines ${prev.start}-${prev.end}) overlaps with ` +
`edit ${curr.idx + 1} (lines ${curr.start}-${curr.end}). ` +
`Use pos-only replace for single-line edits.`
)
}
}
return null
}

View File

@@ -1,4 +1,4 @@
const HASHLINE_PREFIX_RE = /^\s*(?:>>>|>>)?\s*\d+\s*#\s*[ZPMQVRWSNKTXJBYH]{2}:/
const HASHLINE_PREFIX_RE = /^\s*(?:>>>|>>)?\s*\d+\s*#\s*[ZPMQVRWSNKTXJBYH]{2}\|/
const DIFF_PLUS_RE = /^[+](?![+])/
function equalsIgnoringWhitespace(a: string, b: string): boolean {
@@ -7,6 +7,7 @@ function equalsIgnoringWhitespace(a: string, b: string): boolean {
}
function leadingWhitespace(text: string): string {
if (!text) return ""
const match = text.match(/^\s*/)
return match ? match[0] : ""
}
@@ -53,6 +54,7 @@ export function restoreLeadingIndent(templateLine: string, line: string): string
const templateIndent = leadingWhitespace(templateLine)
if (templateIndent.length === 0) return line
if (leadingWhitespace(line).length > 0) return line
if (templateLine.trim() === line.trim()) return line
return `${templateIndent}${line}`
}

View File

@@ -60,7 +60,7 @@ describe("computeLineHash", () => {
})
describe("formatHashLine", () => {
it("formats single line as LINE#ID:content", () => {
it("formats single line as LINE#ID|content", () => {
//#given
const lineNumber = 42
const content = "const x = 42"
@@ -69,12 +69,12 @@ describe("formatHashLine", () => {
const result = formatHashLine(lineNumber, content)
//#then
expect(result).toMatch(/^42#[ZPMQVRWSNKTXJBYH]{2}:const x = 42$/)
expect(result).toMatch(/^42#[ZPMQVRWSNKTXJBYH]{2}\|const x = 42$/)
})
})
describe("formatHashLines", () => {
it("formats all lines as LINE#ID:content", () => {
it("formats all lines as LINE#ID|content", () => {
//#given
const content = "a\nb\nc"
@@ -84,9 +84,9 @@ describe("formatHashLines", () => {
//#then
const lines = result.split("\n")
expect(lines).toHaveLength(3)
expect(lines[0]).toMatch(/^1#[ZPMQVRWSNKTXJBYH]{2}:a$/)
expect(lines[1]).toMatch(/^2#[ZPMQVRWSNKTXJBYH]{2}:b$/)
expect(lines[2]).toMatch(/^3#[ZPMQVRWSNKTXJBYH]{2}:c$/)
expect(lines[0]).toMatch(/^1#[ZPMQVRWSNKTXJBYH]{2}\|a$/)
expect(lines[1]).toMatch(/^2#[ZPMQVRWSNKTXJBYH]{2}\|b$/)
expect(lines[2]).toMatch(/^3#[ZPMQVRWSNKTXJBYH]{2}\|c$/)
})
})

View File

@@ -13,7 +13,7 @@ export function computeLineHash(lineNumber: number, content: string): string {
export function formatHashLine(lineNumber: number, content: string): string {
const hash = computeLineHash(lineNumber, content)
return `${lineNumber}#${hash}:${content}`
return `${lineNumber}#${hash}|${content}`
}
export function formatHashLines(content: string): string {

View File

@@ -14,16 +14,16 @@ export function generateHashlineDiff(oldContent: string, newContent: string, fil
const hash = computeLineHash(lineNum, newLine)
if (i >= oldLines.length) {
diff += `+ ${lineNum}#${hash}:${newLine}\n`
diff += `+ ${lineNum}#${hash}|${newLine}\n`
continue
}
if (i >= newLines.length) {
diff += `- ${lineNum}# :${oldLine}\n`
diff += `- ${lineNum}# |${oldLine}\n`
continue
}
if (oldLine !== newLine) {
diff += `- ${lineNum}# :${oldLine}\n`
diff += `+ ${lineNum}#${hash}:${newLine}\n`
diff += `- ${lineNum}# |${oldLine}\n`
diff += `+ ${lineNum}#${hash}|${newLine}\n`
}
}

View File

@@ -5,6 +5,7 @@ import { countLineDiffs, generateUnifiedDiff } from "./diff-utils"
import { canonicalizeFileText, restoreFileText } from "./file-text-canonicalization"
import { normalizeHashlineEdits, type RawHashlineEdit } from "./normalize-edits"
import type { HashlineEdit } from "./types"
import { HashlineMismatchError } from "./validation"
interface HashlineEditArgs {
filePath: string
@@ -32,7 +33,7 @@ function resolveToolCallID(ctx: ToolContextWithCallID): string | undefined {
function canCreateFromMissingFile(edits: HashlineEdit[]): boolean {
if (edits.length === 0) return false
return edits.every((edit) => edit.type === "append" || edit.type === "prepend")
return edits.every((edit) => edit.op === "append" || edit.op === "prepend")
}
function buildSuccessMeta(
@@ -158,7 +159,7 @@ export async function executeHashlineEditTool(args: HashlineEditArgs, context: T
return `Updated ${effectivePath}`
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
if (message.toLowerCase().includes("hash")) {
if (error instanceof HashlineMismatchError) {
return `Error: hash mismatch - ${message}\nTip: reuse LINE#ID entries from the latest read/edit output, or batch related edits in one call.`
}
return `Error: ${message}`

View File

@@ -8,24 +8,13 @@ export {
export { parseLineRef, validateLineRef } from "./validation"
export type { LineRef } from "./validation"
export type {
SetLine,
ReplaceLines,
InsertAfter,
InsertBefore,
InsertBetween,
Replace,
Append,
Prepend,
ReplaceEdit,
AppendEdit,
PrependEdit,
HashlineEdit,
} from "./types"
export { NIBBLE_STR, HASHLINE_DICT, HASHLINE_REF_PATTERN, HASHLINE_OUTPUT_PATTERN } from "./constants"
export {
applyHashlineEdits,
applyInsertAfter,
applyInsertBefore,
applyInsertBetween,
applyReplace,
applyReplaceLines,
applySetLine,
} from "./edit-operations"
export { createHashlineEditTool } from "./tools"

View File

@@ -0,0 +1,61 @@
import { describe, expect, it } from "bun:test"
import { normalizeHashlineEdits, type RawHashlineEdit } from "./normalize-edits"
describe("normalizeHashlineEdits", () => {
it("maps replace with pos to replace", () => {
//#given
const input: RawHashlineEdit[] = [{ op: "replace", pos: "2#VK", lines: "updated" }]
//#when
const result = normalizeHashlineEdits(input)
//#then
expect(result).toEqual([{ op: "replace", pos: "2#VK", lines: "updated" }])
})
it("maps replace with pos and end to replace", () => {
//#given
const input: RawHashlineEdit[] = [{ op: "replace", pos: "2#VK", end: "4#MB", lines: ["a", "b"] }]
//#when
const result = normalizeHashlineEdits(input)
//#then
expect(result).toEqual([{ op: "replace", pos: "2#VK", end: "4#MB", lines: ["a", "b"] }])
})
it("maps anchored append and prepend preserving op", () => {
//#given
const input: RawHashlineEdit[] = [
{ op: "append", pos: "2#VK", lines: ["after"] },
{ op: "prepend", pos: "4#MB", lines: ["before"] },
]
//#when
const result = normalizeHashlineEdits(input)
//#then
expect(result).toEqual([{ op: "append", pos: "2#VK", lines: ["after"] }, { op: "prepend", pos: "4#MB", lines: ["before"] }])
})
it("prefers pos over end for prepend anchors", () => {
//#given
const input: RawHashlineEdit[] = [{ op: "prepend", pos: "3#AA", end: "7#BB", lines: ["before"] }]
//#when
const result = normalizeHashlineEdits(input)
//#then
expect(result).toEqual([{ op: "prepend", pos: "3#AA", lines: ["before"] }])
})
it("rejects legacy payload without op", () => {
//#given
const input = [{ type: "set_line", line: "2#VK", text: "updated" }] as unknown as Parameters<
typeof normalizeHashlineEdits
>[0]
//#when / #then
expect(() => normalizeHashlineEdits(input)).toThrow(/legacy format was removed/i)
})
})

View File

@@ -1,142 +1,95 @@
import type { HashlineEdit } from "./types"
import type { AppendEdit, HashlineEdit, PrependEdit, ReplaceEdit } from "./types"
type HashlineToolOp = "replace" | "append" | "prepend"
export interface RawHashlineEdit {
type?:
| "set_line"
| "replace_lines"
| "insert_after"
| "insert_before"
| "insert_between"
| "replace"
| "append"
| "prepend"
line?: string
start_line?: string
end_line?: string
after_line?: string
before_line?: string
text?: string | string[]
old_text?: string
new_text?: string | string[]
op?: HashlineToolOp
pos?: string
end?: string
lines?: string | string[] | null
}
function firstDefined(...values: Array<string | undefined>): string | undefined {
for (const value of values) {
if (typeof value === "string" && value.trim() !== "") return value
function normalizeAnchor(value: string | undefined): string | undefined {
if (typeof value !== "string") return undefined
const trimmed = value.trim()
return trimmed === "" ? undefined : trimmed
}
function requireLines(edit: RawHashlineEdit, index: number): string | string[] {
if (edit.lines === undefined) {
throw new Error(`Edit ${index}: lines is required for ${edit.op ?? "unknown"}`)
}
return undefined
}
function requireText(edit: RawHashlineEdit, index: number): string | string[] {
const text = edit.text ?? edit.new_text
if (text === undefined) {
throw new Error(`Edit ${index}: text is required for ${edit.type ?? "unknown"}`)
if (edit.lines === null) {
return []
}
return text
return edit.lines
}
function requireLine(anchor: string | undefined, index: number, op: string): string {
function requireLine(anchor: string | undefined, index: number, op: HashlineToolOp): string {
if (!anchor) {
throw new Error(`Edit ${index}: ${op} requires at least one anchor line reference`)
throw new Error(`Edit ${index}: ${op} requires at least one anchor line reference (pos or end)`)
}
return anchor
}
export function normalizeHashlineEdits(rawEdits: RawHashlineEdit[]): HashlineEdit[] {
const normalized: HashlineEdit[] = []
function normalizeReplaceEdit(edit: RawHashlineEdit, index: number): HashlineEdit {
const pos = normalizeAnchor(edit.pos)
const end = normalizeAnchor(edit.end)
const anchor = requireLine(pos ?? end, index, "replace")
const lines = requireLines(edit, index)
for (let index = 0; index < rawEdits.length; index += 1) {
const edit = rawEdits[index] ?? {}
const type = edit.type
switch (type) {
case "set_line": {
const anchor = firstDefined(edit.line, edit.start_line, edit.end_line, edit.after_line, edit.before_line)
normalized.push({
type: "set_line",
line: requireLine(anchor, index, "set_line"),
text: requireText(edit, index),
})
break
}
case "replace_lines": {
const startAnchor = firstDefined(edit.start_line, edit.line, edit.after_line)
const endAnchor = firstDefined(edit.end_line, edit.line, edit.before_line)
if (!startAnchor && !endAnchor) {
throw new Error(`Edit ${index}: replace_lines requires start_line or end_line`)
}
if (startAnchor && endAnchor) {
normalized.push({
type: "replace_lines",
start_line: startAnchor,
end_line: endAnchor,
text: requireText(edit, index),
})
} else {
normalized.push({
type: "set_line",
line: requireLine(startAnchor ?? endAnchor, index, "replace_lines"),
text: requireText(edit, index),
})
}
break
}
case "insert_after": {
const anchor = firstDefined(edit.line, edit.after_line, edit.end_line, edit.start_line)
normalized.push({
type: "insert_after",
line: requireLine(anchor, index, "insert_after"),
text: requireText(edit, index),
})
break
}
case "insert_before": {
const anchor = firstDefined(edit.line, edit.before_line, edit.start_line, edit.end_line)
normalized.push({
type: "insert_before",
line: requireLine(anchor, index, "insert_before"),
text: requireText(edit, index),
})
break
}
case "insert_between": {
const afterLine = firstDefined(edit.after_line, edit.line, edit.start_line)
const beforeLine = firstDefined(edit.before_line, edit.end_line, edit.line)
normalized.push({
type: "insert_between",
after_line: requireLine(afterLine, index, "insert_between.after_line"),
before_line: requireLine(beforeLine, index, "insert_between.before_line"),
text: requireText(edit, index),
})
break
}
case "replace": {
const oldText = edit.old_text
const newText = edit.new_text ?? edit.text
if (!oldText) {
throw new Error(`Edit ${index}: replace requires old_text`)
}
if (newText === undefined) {
throw new Error(`Edit ${index}: replace requires new_text or text`)
}
normalized.push({ type: "replace", old_text: oldText, new_text: newText })
break
}
case "append": {
normalized.push({ type: "append", text: requireText(edit, index) })
break
}
case "prepend": {
normalized.push({ type: "prepend", text: requireText(edit, index) })
break
}
default: {
throw new Error(`Edit ${index}: unsupported type "${String(type)}"`)
}
}
const normalized: ReplaceEdit = {
op: "replace",
pos: anchor,
lines,
}
if (end) normalized.end = end
return normalized
}
function normalizeAppendEdit(edit: RawHashlineEdit, index: number): HashlineEdit {
const pos = normalizeAnchor(edit.pos)
const end = normalizeAnchor(edit.end)
const anchor = pos ?? end
const lines = requireLines(edit, index)
const normalized: AppendEdit = {
op: "append",
lines,
}
if (anchor) normalized.pos = anchor
return normalized
}
function normalizePrependEdit(edit: RawHashlineEdit, index: number): HashlineEdit {
const pos = normalizeAnchor(edit.pos)
const end = normalizeAnchor(edit.end)
const anchor = pos ?? end
const lines = requireLines(edit, index)
const normalized: PrependEdit = {
op: "prepend",
lines,
}
if (anchor) normalized.pos = anchor
return normalized
}
export function normalizeHashlineEdits(rawEdits: RawHashlineEdit[]): HashlineEdit[] {
return rawEdits.map((rawEdit, index) => {
const edit = rawEdit ?? {}
switch (edit.op) {
case "replace":
return normalizeReplaceEdit(edit, index)
case "append":
return normalizeAppendEdit(edit, index)
case "prepend":
return normalizePrependEdit(edit, index)
default:
throw new Error(
`Edit ${index}: unsupported op "${String(edit.op)}". Legacy format was removed; use op/pos/end/lines.`
)
}
})
}

View File

@@ -5,40 +5,40 @@ WORKFLOW:
2. Pick the smallest operation per logical mutation site.
3. Submit one edit call per file with all related operations.
4. If same file needs another call, re-read first.
5. Use anchors as "LINE#ID" only (never include trailing ":content").
5. Use anchors as "LINE#ID" only (never include trailing "|content").
VALIDATION:
Payload shape: { "filePath": string, "edits": [...], "delete"?: boolean, "rename"?: string }
Each edit must be one of: set_line, replace_lines, insert_after, insert_before, insert_between, replace, append, prepend
text/new_text must contain plain replacement text only (no LINE#ID prefixes, no diff + markers)
CRITICAL: all operations validate against the same pre-edit file snapshot and apply bottom-up. Refs/tags are interpreted against the last-read version of the file.
Payload shape: { "filePath": string, "edits": [...], "delete"?: boolean, "rename"?: string }
Each edit must be one of: replace, append, prepend
Edit shape: { "op": "replace"|"append"|"prepend", "pos"?: "LINE#ID", "end"?: "LINE#ID", "lines"?: string|string[]|null }
lines must contain plain replacement text only (no LINE#ID prefixes, no diff + markers)
CRITICAL: all operations validate against the same pre-edit file snapshot and apply bottom-up. Refs/tags are interpreted against the last-read version of the file.
LINE#ID FORMAT (CRITICAL):
Each line reference must be in "LINE#ID" format where:
LINE: 1-based line number
ID: Two CID letters from the set ZPMQVRWSNKTXJBYH
Each line reference must be in "{line_number}#{hash_id}" format where:
{line_number}: 1-based line number
{hash_id}: Two CID letters from the set ZPMQVRWSNKTXJBYH
FILE MODES:
delete=true deletes file and requires edits=[] with no rename
rename moves final content to a new path and removes old path
CONTENT FORMAT:
text/new_text can be a string (single line) or string[] (multi-line, preferred).
If you pass a multi-line string, it is split by real newline characters.
Literal "\\n" is preserved as text.
lines can be a string (single line) or string[] (multi-line, preferred).
If you pass a multi-line string, it is split by real newline characters.
Literal "\\n" is preserved as text.
FILE CREATION:
append: adds content at EOF. If file does not exist, creates it.
prepend: adds content at BOF. If file does not exist, creates it.
CRITICAL: append/prepend are the only operations that work without an existing file.
append without anchors adds content at EOF. If file does not exist, creates it.
prepend without anchors adds content at BOF. If file does not exist, creates it.
CRITICAL: only unanchored append/prepend can create a missing file.
OPERATION CHOICE:
One line wrong -> set_line
Adjacent block rewrite or swap/move -> replace_lines (prefer one range op over many single-line ops)
Both boundaries known -> insert_between (ALWAYS prefer over insert_after/insert_before)
One boundary known -> insert_after or insert_before
New file or EOF/BOF addition -> append or prepend
No LINE#ID available -> replace (last resort)
replace with pos only -> replace one line at pos (MOST COMMON for single-line edits)
replace with pos+end -> replace ENTIRE range pos..end as a block (ranges MUST NOT overlap across edits)
append with pos/end anchor -> insert after that anchor
prepend with pos/end anchor -> insert before that anchor
append/prepend without anchors -> EOF/BOF insertion
RULES (CRITICAL):
1. Minimize scope: one logical mutation site per operation.
@@ -53,10 +53,9 @@ RULES (CRITICAL):
TAG CHOICE (ALWAYS):
- Copy tags exactly from read output or >>> mismatch output.
- NEVER guess tags.
- Prefer insert_between over insert_after/insert_before when both boundaries are known.
- Anchor to structural lines (function/class/brace), NEVER blank lines.
- Anti-pattern warning: blank/whitespace anchors are fragile.
- Re-read after each successful edit call before issuing another on the same file.
- Anchor to structural lines (function/class/brace), NEVER blank lines.
- Anti-pattern warning: blank/whitespace anchors are fragile.
- Re-read after each successful edit call before issuing another on the same file.
AUTOCORRECT (built-in - you do NOT need to handle these):
Merged lines are auto-expanded back to original line count.

View File

@@ -31,7 +31,7 @@ describe("createHashlineEditTool", () => {
fs.rmSync(tempDir, { recursive: true, force: true })
})
it("applies set_line with LINE#ID anchor", async () => {
it("applies replace with single LINE#ID anchor", async () => {
//#given
const filePath = path.join(tempDir, "test.txt")
fs.writeFileSync(filePath, "line1\nline2\nline3")
@@ -41,7 +41,7 @@ describe("createHashlineEditTool", () => {
const result = await tool.execute(
{
filePath,
edits: [{ type: "set_line", line: `2#${hash}`, text: "modified line2" }],
edits: [{ op: "replace", pos: `2#${hash}`, lines: "modified line2" }],
},
createMockContext(),
)
@@ -51,7 +51,7 @@ describe("createHashlineEditTool", () => {
expect(result).toBe(`Updated ${filePath}`)
})
it("applies replace_lines and insert_after", async () => {
it("applies ranged replace and anchored append", async () => {
//#given
const filePath = path.join(tempDir, "test.txt")
fs.writeFileSync(filePath, "line1\nline2\nline3\nline4")
@@ -65,15 +65,15 @@ describe("createHashlineEditTool", () => {
filePath,
edits: [
{
type: "replace_lines",
start_line: `2#${line2Hash}`,
end_line: `3#${line3Hash}`,
text: "replaced",
op: "replace",
pos: `2#${line2Hash}`,
end: `3#${line3Hash}`,
lines: "replaced",
},
{
type: "insert_after",
line: `4#${line4Hash}`,
text: "inserted",
op: "append",
pos: `4#${line4Hash}`,
lines: "inserted",
},
],
},
@@ -93,7 +93,7 @@ describe("createHashlineEditTool", () => {
const result = await tool.execute(
{
filePath,
edits: [{ type: "set_line", line: "1#ZZ", text: "new" }],
edits: [{ op: "replace", pos: "1#ZZ", lines: "new" }],
},
createMockContext(),
)
@@ -103,6 +103,25 @@ describe("createHashlineEditTool", () => {
expect(result).toContain(">>>")
})
it("does not classify invalid pos format as hash mismatch", async () => {
//#given
const filePath = path.join(tempDir, "invalid-format.txt")
fs.writeFileSync(filePath, "line1\nline2")
//#when
const result = await tool.execute(
{
filePath,
edits: [{ op: "replace", pos: "42", lines: "updated" }],
},
createMockContext(),
)
//#then
expect(result).toContain("Error")
expect(result.toLowerCase()).not.toContain("hash mismatch")
})
it("preserves literal backslash-n and supports string[] payload", async () => {
//#given
const filePath = path.join(tempDir, "test.txt")
@@ -113,7 +132,7 @@ describe("createHashlineEditTool", () => {
await tool.execute(
{
filePath,
edits: [{ type: "set_line", line: `1#${line1Hash}`, text: "join(\\n)" }],
edits: [{ op: "replace", pos: `1#${line1Hash}`, lines: "join(\\n)" }],
},
createMockContext(),
)
@@ -121,7 +140,7 @@ describe("createHashlineEditTool", () => {
await tool.execute(
{
filePath,
edits: [{ type: "insert_after", line: `1#${computeLineHash(1, "join(\\n)")}`, text: ["a", "b"] }],
edits: [{ op: "append", pos: `1#${computeLineHash(1, "join(\\n)")}`, lines: ["a", "b"] }],
},
createMockContext(),
)
@@ -130,12 +149,11 @@ describe("createHashlineEditTool", () => {
expect(fs.readFileSync(filePath, "utf-8")).toBe("join(\\n)\na\nb\nline2")
})
it("supports insert_before and insert_between", async () => {
it("supports anchored prepend and anchored append", async () => {
//#given
const filePath = path.join(tempDir, "test.txt")
fs.writeFileSync(filePath, "line1\nline2\nline3")
const line1 = computeLineHash(1, "line1")
const line2 = computeLineHash(2, "line2")
const line3 = computeLineHash(3, "line3")
//#when
@@ -143,8 +161,8 @@ describe("createHashlineEditTool", () => {
{
filePath,
edits: [
{ type: "insert_before", line: `3#${line3}`, text: ["before3"] },
{ type: "insert_between", after_line: `1#${line1}`, before_line: `2#${line2}`, text: ["between"] },
{ op: "prepend", pos: `3#${line3}`, lines: ["before3"] },
{ op: "append", pos: `1#${line1}`, lines: ["between"] },
],
},
createMockContext(),
@@ -164,7 +182,7 @@ describe("createHashlineEditTool", () => {
const result = await tool.execute(
{
filePath,
edits: [{ type: "insert_after", line: `1#${line1}`, text: [] }],
edits: [{ op: "append", pos: `1#${line1}`, lines: [] }],
},
createMockContext(),
)
@@ -186,7 +204,7 @@ describe("createHashlineEditTool", () => {
{
filePath,
rename: renamedPath,
edits: [{ type: "set_line", line: `2#${line2}`, text: "line2-updated" }],
edits: [{ op: "replace", pos: `2#${line2}`, lines: "line2-updated" }],
},
createMockContext(),
)
@@ -226,8 +244,8 @@ describe("createHashlineEditTool", () => {
{
filePath,
edits: [
{ type: "append", text: ["line2"] },
{ type: "prepend", text: ["line1"] },
{ op: "append", lines: ["line2"] },
{ op: "prepend", lines: ["line1"] },
],
},
createMockContext(),
@@ -239,7 +257,7 @@ describe("createHashlineEditTool", () => {
expect(result).toBe(`Updated ${filePath}`)
})
it("accepts replace_lines with one anchor and downgrades to set_line", async () => {
it("accepts replace with one anchor", async () => {
//#given
const filePath = path.join(tempDir, "degrade.txt")
fs.writeFileSync(filePath, "line1\nline2\nline3")
@@ -249,7 +267,7 @@ describe("createHashlineEditTool", () => {
const result = await tool.execute(
{
filePath,
edits: [{ type: "replace_lines", start_line: `2#${line2Hash}`, text: ["line2-updated"] }],
edits: [{ op: "replace", pos: `2#${line2Hash}`, lines: ["line2-updated"] }],
},
createMockContext(),
)
@@ -259,7 +277,7 @@ describe("createHashlineEditTool", () => {
expect(result).toBe(`Updated ${filePath}`)
})
it("accepts insert_after using after_line alias", async () => {
it("accepts anchored append using end alias", async () => {
//#given
const filePath = path.join(tempDir, "alias.txt")
fs.writeFileSync(filePath, "line1\nline2")
@@ -269,7 +287,7 @@ describe("createHashlineEditTool", () => {
await tool.execute(
{
filePath,
edits: [{ type: "insert_after", after_line: `1#${line1Hash}`, text: ["inserted"] }],
edits: [{ op: "append", end: `1#${line1Hash}`, lines: ["inserted"] }],
},
createMockContext(),
)
@@ -289,7 +307,7 @@ describe("createHashlineEditTool", () => {
await tool.execute(
{
filePath,
edits: [{ type: "set_line", line: `2#${line2Hash}`, text: "line2-updated" }],
edits: [{ op: "replace", pos: `2#${line2Hash}`, lines: "line2-updated" }],
},
createMockContext(),
)

View File

@@ -20,32 +20,19 @@ export function createHashlineEditTool(): ToolDefinition {
edits: tool.schema
.array(
tool.schema.object({
type: tool.schema
op: tool.schema
.union([
tool.schema.literal("set_line"),
tool.schema.literal("replace_lines"),
tool.schema.literal("insert_after"),
tool.schema.literal("insert_before"),
tool.schema.literal("insert_between"),
tool.schema.literal("replace"),
tool.schema.literal("append"),
tool.schema.literal("prepend"),
])
.describe("Edit operation type"),
line: tool.schema.string().optional().describe("Anchor line in LINE#ID format"),
start_line: tool.schema.string().optional().describe("Range start in LINE#ID format"),
end_line: tool.schema.string().optional().describe("Range end in LINE#ID format"),
after_line: tool.schema.string().optional().describe("Insert boundary (after) in LINE#ID format"),
before_line: tool.schema.string().optional().describe("Insert boundary (before) in LINE#ID format"),
text: tool.schema
.union([tool.schema.string(), tool.schema.array(tool.schema.string())])
.describe("Hashline edit operation mode"),
pos: tool.schema.string().optional().describe("Primary anchor in LINE#ID format"),
end: tool.schema.string().optional().describe("Range end anchor in LINE#ID format"),
lines: tool.schema
.union([tool.schema.string(), tool.schema.array(tool.schema.string()), tool.schema.null()])
.optional()
.describe("Operation content"),
old_text: tool.schema.string().optional().describe("Legacy text replacement source"),
new_text: tool.schema
.union([tool.schema.string(), tool.schema.array(tool.schema.string())])
.optional()
.describe("Legacy text replacement target"),
.describe("Replacement or inserted lines. null/[] deletes with replace"),
})
)
.describe("Array of edit operations to apply (empty when delete=true)"),

View File

@@ -1,57 +1,20 @@
export interface SetLine {
type: "set_line"
line: string
text: string | string[]
export interface ReplaceEdit {
op: "replace"
pos: string
end?: string
lines: string | string[]
}
export interface ReplaceLines {
type: "replace_lines"
start_line: string
end_line: string
text: string | string[]
export interface AppendEdit {
op: "append"
pos?: string
lines: string | string[]
}
export interface InsertAfter {
type: "insert_after"
line: string
text: string | string[]
export interface PrependEdit {
op: "prepend"
pos?: string
lines: string | string[]
}
export interface InsertBefore {
type: "insert_before"
line: string
text: string | string[]
}
export interface InsertBetween {
type: "insert_between"
after_line: string
before_line: string
text: string | string[]
}
export interface Replace {
type: "replace"
old_text: string
new_text: string | string[]
}
export interface Append {
type: "append"
text: string | string[]
}
export interface Prepend {
type: "prepend"
text: string | string[]
}
export type HashlineEdit =
| SetLine
| ReplaceLines
| InsertAfter
| InsertBefore
| InsertBetween
| Replace
| Append
| Prepend
export type HashlineEdit = ReplaceEdit | AppendEdit | PrependEdit

View File

@@ -19,12 +19,76 @@ describe("parseLineRef", () => {
const ref = "42:VK"
//#when / #then
expect(() => parseLineRef(ref)).toThrow("LINE#ID")
expect(() => parseLineRef(ref)).toThrow("{line_number}#{hash_id}")
})
it("gives specific hint when literal text is used instead of line number", () => {
//#given — model sends "LINE#HK" instead of "1#HK"
const ref = "LINE#HK"
//#when / #then — error should mention that LINE is not a valid number
expect(() => parseLineRef(ref)).toThrow(/not a line number/i)
})
it("gives specific hint for other non-numeric prefixes like POS#VK", () => {
//#given
const ref = "POS#VK"
//#when / #then
expect(() => parseLineRef(ref)).toThrow(/not a line number/i)
})
it("extracts valid line number from mixed prefix like LINE42 without throwing", () => {
//#given — normalizeLineRef extracts 42#VK from LINE42#VK
const ref = "LINE42#VK"
//#when / #then — should parse successfully as line 42
const result = parseLineRef(ref)
expect(result.line).toBe(42)
expect(result.hash).toBe("VK")
})
it("gives specific hint when hyphenated prefix like line-ref is used", () => {
//#given
const ref = "line-ref#VK"
//#when / #then
expect(() => parseLineRef(ref)).toThrow(/not a line number/i)
})
it("gives specific hint when prefix contains a period like line.ref", () => {
//#given
const ref = "line.ref#VK"
//#when / #then
expect(() => parseLineRef(ref)).toThrow(/not a line number/i)
})
it("accepts refs copied with markers and trailing content", () => {
//#given
const ref = ">>> 42#VK:const value = 1"
const ref = ">>> 42#VK|const value = 1"
//#when
const result = parseLineRef(ref)
//#then
expect(result).toEqual({ line: 42, hash: "VK" })
})
it("accepts refs copied with >>> marker only", () => {
//#given
const ref = ">>> 42#VK"
//#when
const result = parseLineRef(ref)
//#then
expect(result).toEqual({ line: 42, hash: "VK" })
})
it("accepts refs with spaces around hash separator", () => {
//#given
const ref = "42 # VK"
//#when
const result = parseLineRef(ref)
@@ -49,7 +113,7 @@ describe("validateLineRef", () => {
const lines = ["function hello() {"]
//#when / #then
expect(() => validateLineRef(lines, "1#ZZ")).toThrow(/>>>\s+1#[ZPMQVRWSNKTXJBYH]{2}:/)
expect(() => validateLineRef(lines, "1#ZZ")).toThrow(/>>>\s+1#[ZPMQVRWSNKTXJBYH]{2}\|/)
})
it("shows >>> mismatch context in batched validation", () => {
@@ -58,49 +122,15 @@ describe("validateLineRef", () => {
//#when / #then
expect(() => validateLineRefs(lines, ["2#ZZ"]))
.toThrow(/>>>\s+2#[ZPMQVRWSNKTXJBYH]{2}:two/)
})
})
describe("legacy LINE:HEX backward compatibility", () => {
it("parses legacy LINE:HEX ref", () => {
//#given
const ref = "42:ab"
//#when
const result = parseLineRef(ref)
//#then
expect(result).toEqual({ line: 42, hash: "ab" })
})
it("parses legacy LINE:HEX ref with uppercase hex", () => {
//#given
const ref = "10:FF"
//#when
const result = parseLineRef(ref)
//#then
expect(result).toEqual({ line: 10, hash: "FF" })
})
it("legacy ref fails validation with hash mismatch, not parse error", () => {
//#given
const lines = ["function hello() {"]
//#when / #then
expect(() => validateLineRef(lines, "1:ab")).toThrow(/>>>\s+1#[ZPMQVRWSNKTXJBYH]{2}:/)
})
it("extracts legacy ref from content with markers", () => {
//#given
const ref = ">>> 42:ab|const x = 1"
//#when
const result = parseLineRef(ref)
//#then
expect(result).toEqual({ line: 42, hash: "ab" })
.toThrow(/>>>\s+2#[ZPMQVRWSNKTXJBYH]{2}\|two/)
})
it("suggests correct line number when hash matches a file line", () => {
//#given — model sends LINE#XX where XX is the actual hash for line 1
const lines = ["function hello() {", " return 42", "}"]
const hash = computeLineHash(1, lines[0])
//#when / #then — error should suggest the correct reference
expect(() => validateLineRefs(lines, [`LINE#${hash}`])).toThrow(new RegExp(`1#${hash}`))
})
})

View File

@@ -1,5 +1,5 @@
import { computeLineHash } from "./hash-computation"
import { HASHLINE_REF_PATTERN, HASHLINE_LEGACY_REF_PATTERN } from "./constants"
import { HASHLINE_REF_PATTERN } from "./constants"
export interface LineRef {
line: number
@@ -13,23 +13,26 @@ interface HashMismatch {
const MISMATCH_CONTEXT = 2
const LINE_REF_EXTRACT_PATTERN = /([0-9]+#[ZPMQVRWSNKTXJBYH]{2}|[0-9]+:[0-9a-fA-F]{2,})/
const LINE_REF_EXTRACT_PATTERN = /([0-9]+#[ZPMQVRWSNKTXJBYH]{2})/
function normalizeLineRef(ref: string): string {
const trimmed = ref.trim()
const originalTrimmed = ref.trim()
let trimmed = originalTrimmed
trimmed = trimmed.replace(/^(?:>>>|[+-])\s*/, "")
trimmed = trimmed.replace(/\s*#\s*/, "#")
trimmed = trimmed.replace(/\|.*$/, "")
trimmed = trimmed.trim()
if (HASHLINE_REF_PATTERN.test(trimmed)) {
return trimmed
}
if (HASHLINE_LEGACY_REF_PATTERN.test(trimmed)) {
return trimmed
}
const extracted = trimmed.match(LINE_REF_EXTRACT_PATTERN)
if (extracted) {
return extracted[1]
}
return trimmed
return originalTrimmed
}
export function parseLineRef(ref: string): LineRef {
@@ -41,20 +44,25 @@ export function parseLineRef(ref: string): LineRef {
hash: match[2],
}
}
const legacyMatch = normalized.match(HASHLINE_LEGACY_REF_PATTERN)
if (legacyMatch) {
return {
line: Number.parseInt(legacyMatch[1], 10),
hash: legacyMatch[2],
// normalized equals ref.trim() in all error paths — extraction only succeeds for valid refs
const hashIdx = normalized.indexOf('#')
if (hashIdx > 0) {
const prefix = normalized.slice(0, hashIdx)
const suffix = normalized.slice(hashIdx + 1)
if (!/^\d+$/.test(prefix) && /^[ZPMQVRWSNKTXJBYH]{2}$/.test(suffix)) {
throw new Error(
`Invalid line reference: "${ref}". "${prefix}" is not a line number. ` +
`Use the actual line number from the read output.`
)
}
}
throw new Error(
`Invalid line reference format: "${ref}". Expected format: "LINE#ID" (e.g., "42#VK")`
`Invalid line reference format: "${ref}". Expected format: "{line_number}#{hash_id}"`
)
}
export function validateLineRef(lines: string[], ref: string): void {
const { line, hash } = parseLineRef(ref)
const { line, hash } = parseLineRefWithHint(ref, lines)
if (line < 1 || line > lines.length) {
throw new Error(
@@ -102,7 +110,7 @@ export class HashlineMismatchError extends Error {
const output: string[] = []
output.push(
`${mismatches.length} line${mismatches.length > 1 ? "s have" : " has"} changed since last read. ` +
"Use updated LINE#ID references below (>>> marks changed lines)."
"Use updated {line_number}#{hash_id} references below (>>> marks changed lines)."
)
output.push("")
@@ -115,7 +123,7 @@ export class HashlineMismatchError extends Error {
const content = fileLines[line - 1] ?? ""
const hash = computeLineHash(line, content)
const prefix = `${line}#${hash}:${content}`
const prefix = `${line}#${hash}|${content}`
if (mismatchByLine.has(line)) {
output.push(`>>> ${prefix}`)
} else {
@@ -127,11 +135,34 @@ export class HashlineMismatchError extends Error {
}
}
function suggestLineForHash(ref: string, lines: string[]): string | null {
const hashMatch = ref.trim().match(/#([ZPMQVRWSNKTXJBYH]{2})$/)
if (!hashMatch) return null
const hash = hashMatch[1]
for (let i = 0; i < lines.length; i++) {
if (computeLineHash(i + 1, lines[i]) === hash) {
return `Did you mean "${i + 1}#${hash}"?`
}
}
return null
}
function parseLineRefWithHint(ref: string, lines: string[]): LineRef {
try {
return parseLineRef(ref)
} catch (parseError) {
const hint = suggestLineForHash(ref, lines)
if (hint && parseError instanceof Error) {
throw new Error(`${parseError.message} ${hint}`)
}
throw parseError
}
}
export function validateLineRefs(lines: string[], refs: string[]): void {
const mismatches: HashMismatch[] = []
for (const ref of refs) {
const { line, hash } = parseLineRef(ref)
const { line, hash } = parseLineRefWithHint(ref, lines)
if (line < 1 || line > lines.length) {
throw new Error(`Line number ${line} out of bounds (file has ${lines.length} lines)`)

View File

@@ -52,8 +52,8 @@ export function discoverCommandsSync(directory?: string): CommandInfo[] {
const configDir = getOpenCodeConfigDir({ binary: "opencode" })
const userCommandsDir = join(getClaudeConfigDir(), "commands")
const projectCommandsDir = join(directory ?? process.cwd(), ".claude", "commands")
const opencodeGlobalDir = join(configDir, "command")
const opencodeProjectDir = join(directory ?? process.cwd(), ".opencode", "command")
const opencodeGlobalDir = join(configDir, "commands")
const opencodeProjectDir = join(directory ?? process.cwd(), ".opencode", "commands")
const userCommands = discoverCommandsFromDir(userCommandsDir, "user")
const opencodeGlobalCommands = discoverCommandsFromDir(opencodeGlobalDir, "opencode")

View File

@@ -1,6 +1,27 @@
import { describe, expect, it } from "bun:test"
/// <reference types="bun-types" />
import { afterEach, describe, expect, it } from "bun:test"
import { mkdirSync, rmSync, writeFileSync } from "node:fs"
import { tmpdir } from "node:os"
import { join } from "node:path"
import * as slashcommand from "./index"
const testRoots: string[] = []
function createTempRoot(): string {
const root = join(tmpdir(), `slashcommand-discovery-${Date.now()}-${Math.random().toString(16).slice(2)}`)
mkdirSync(root, { recursive: true })
testRoots.push(root)
return root
}
afterEach(() => {
for (const root of testRoots.splice(0)) {
rmSync(root, { recursive: true, force: true })
}
delete process.env.OPENCODE_CONFIG_DIR
})
describe("slashcommand module exports", () => {
it("exports discovery API only", () => {
// given
@@ -14,4 +35,32 @@ describe("slashcommand module exports", () => {
expect(exportNames).not.toContain("createSlashcommandTool")
expect(exportNames).not.toContain("slashcommand")
})
it("discovers commands from OpenCode plural command directories", () => {
// given
const root = createTempRoot()
const opencodeConfigDir = join(root, "config")
const globalCommandsDir = join(opencodeConfigDir, "commands")
const projectCommandsDir = join(root, ".opencode", "commands")
mkdirSync(globalCommandsDir, { recursive: true })
mkdirSync(projectCommandsDir, { recursive: true })
writeFileSync(
join(globalCommandsDir, "global-cmd.md"),
"---\ndescription: global command\n---\nGlobal command body"
)
writeFileSync(
join(projectCommandsDir, "project-cmd.md"),
"---\ndescription: project command\n---\nProject command body"
)
process.env.OPENCODE_CONFIG_DIR = opencodeConfigDir
// when
const commands = slashcommand.discoverCommandsSync(root)
// then
expect(commands.some((cmd) => cmd.name === "global-cmd" && cmd.scope === "opencode")).toBe(true)
expect(commands.some((cmd) => cmd.name === "project-cmd" && cmd.scope === "opencode-project")).toBe(true)
})
})