Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
57757a345d | ||
|
|
3caae14192 | ||
|
|
55ac653eaa | ||
|
|
1d5652dfa9 | ||
|
|
76c460536d | ||
|
|
b067d4a284 | ||
|
|
94838ec039 | ||
|
|
224ecea8c7 | ||
|
|
5d5755f29d | ||
|
|
1fdce01fd2 | ||
|
|
c8213c970e | ||
|
|
576ff453e5 | ||
|
|
9b8aca45f9 | ||
|
|
f1f20f5a79 | ||
|
|
de40caf76d |
98
.github/workflows/publish-platform.yml
vendored
98
.github/workflows/publish-platform.yml
vendored
@@ -59,20 +59,39 @@ jobs:
|
||||
- name: Check if already published
|
||||
id: check
|
||||
run: |
|
||||
PKG_NAME="oh-my-opencode-${{ matrix.platform }}"
|
||||
VERSION="${{ inputs.version }}"
|
||||
STATUS=$(curl -s -o /dev/null -w "%{http_code}" "https://registry.npmjs.org/${PKG_NAME}/${VERSION}")
|
||||
# Convert platform name for output (replace - with _)
|
||||
PLATFORM_KEY="${{ matrix.platform }}"
|
||||
PLATFORM_KEY="${PLATFORM_KEY//-/_}"
|
||||
if [ "$STATUS" = "200" ]; then
|
||||
|
||||
# Check oh-my-opencode
|
||||
OC_STATUS=$(curl -s -o /dev/null -w "%{http_code}" "https://registry.npmjs.org/oh-my-opencode-${{ matrix.platform }}/${VERSION}")
|
||||
# Check oh-my-openagent
|
||||
OA_STATUS=$(curl -s -o /dev/null -w "%{http_code}" "https://registry.npmjs.org/oh-my-openagent-${{ matrix.platform }}/${VERSION}")
|
||||
|
||||
echo "oh-my-opencode-${{ matrix.platform }}@${VERSION}: ${OC_STATUS}"
|
||||
echo "oh-my-openagent-${{ matrix.platform }}@${VERSION}: ${OA_STATUS}"
|
||||
|
||||
if [ "$OC_STATUS" = "200" ]; then
|
||||
echo "skip_opencode=true" >> $GITHUB_OUTPUT
|
||||
echo "✓ oh-my-opencode-${{ matrix.platform }}@${VERSION} already published"
|
||||
else
|
||||
echo "skip_opencode=false" >> $GITHUB_OUTPUT
|
||||
echo "→ oh-my-opencode-${{ matrix.platform }}@${VERSION} needs publishing"
|
||||
fi
|
||||
|
||||
if [ "$OA_STATUS" = "200" ]; then
|
||||
echo "skip_openagent=true" >> $GITHUB_OUTPUT
|
||||
echo "✓ oh-my-openagent-${{ matrix.platform }}@${VERSION} already published"
|
||||
else
|
||||
echo "skip_openagent=false" >> $GITHUB_OUTPUT
|
||||
echo "→ oh-my-openagent-${{ matrix.platform }}@${VERSION} needs publishing"
|
||||
fi
|
||||
|
||||
# Skip build only if BOTH are already published
|
||||
if [ "$OC_STATUS" = "200" ] && [ "$OA_STATUS" = "200" ]; then
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
echo "skip_${PLATFORM_KEY}=true" >> $GITHUB_OUTPUT
|
||||
echo "✓ ${PKG_NAME}@${VERSION} already published"
|
||||
else
|
||||
echo "skip=false" >> $GITHUB_OUTPUT
|
||||
echo "skip_${PLATFORM_KEY}=false" >> $GITHUB_OUTPUT
|
||||
echo "→ ${PKG_NAME}@${VERSION} needs publishing"
|
||||
fi
|
||||
|
||||
- name: Update version in package.json
|
||||
@@ -207,23 +226,38 @@ jobs:
|
||||
matrix:
|
||||
platform: [darwin-arm64, darwin-x64, darwin-x64-baseline, linux-x64, linux-x64-baseline, linux-arm64, linux-x64-musl, linux-x64-musl-baseline, linux-arm64-musl, windows-x64, windows-x64-baseline]
|
||||
steps:
|
||||
- name: Check if oh-my-opencode already published
|
||||
- name: Check if already published
|
||||
id: check
|
||||
run: |
|
||||
PKG_NAME="oh-my-opencode-${{ matrix.platform }}"
|
||||
VERSION="${{ inputs.version }}"
|
||||
STATUS=$(curl -s -o /dev/null -w "%{http_code}" "https://registry.npmjs.org/${PKG_NAME}/${VERSION}")
|
||||
if [ "$STATUS" = "200" ]; then
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
echo "✓ ${PKG_NAME}@${VERSION} already published, skipping"
|
||||
|
||||
OC_STATUS=$(curl -s -o /dev/null -w "%{http_code}" "https://registry.npmjs.org/oh-my-opencode-${{ matrix.platform }}/${VERSION}")
|
||||
OA_STATUS=$(curl -s -o /dev/null -w "%{http_code}" "https://registry.npmjs.org/oh-my-openagent-${{ matrix.platform }}/${VERSION}")
|
||||
|
||||
if [ "$OC_STATUS" = "200" ]; then
|
||||
echo "skip_opencode=true" >> $GITHUB_OUTPUT
|
||||
echo "✓ oh-my-opencode-${{ matrix.platform }}@${VERSION} already published"
|
||||
else
|
||||
echo "skip=false" >> $GITHUB_OUTPUT
|
||||
echo "→ ${PKG_NAME}@${VERSION} will be published"
|
||||
echo "skip_opencode=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [ "$OA_STATUS" = "200" ]; then
|
||||
echo "skip_openagent=true" >> $GITHUB_OUTPUT
|
||||
echo "✓ oh-my-openagent-${{ matrix.platform }}@${VERSION} already published"
|
||||
else
|
||||
echo "skip_openagent=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Need artifact if either package needs publishing
|
||||
if [ "$OC_STATUS" = "200" ] && [ "$OA_STATUS" = "200" ]; then
|
||||
echo "skip_all=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "skip_all=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Download artifact
|
||||
id: download
|
||||
if: steps.check.outputs.skip != 'true'
|
||||
if: steps.check.outputs.skip_all != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
@@ -231,7 +265,7 @@ jobs:
|
||||
path: .
|
||||
|
||||
- name: Extract artifact
|
||||
if: steps.check.outputs.skip != 'true' && steps.download.outcome == 'success'
|
||||
if: steps.check.outputs.skip_all != 'true' && steps.download.outcome == 'success'
|
||||
run: |
|
||||
PLATFORM="${{ matrix.platform }}"
|
||||
mkdir -p packages/${PLATFORM}
|
||||
@@ -247,13 +281,13 @@ jobs:
|
||||
ls -la packages/${PLATFORM}/bin/
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
if: steps.check.outputs.skip != 'true' && steps.download.outcome == 'success'
|
||||
if: steps.check.outputs.skip_all != 'true' && steps.download.outcome == 'success'
|
||||
with:
|
||||
node-version: "24"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
|
||||
- name: Publish ${{ matrix.platform }}
|
||||
if: steps.check.outputs.skip != 'true' && steps.download.outcome == 'success'
|
||||
- name: Publish oh-my-opencode-${{ matrix.platform }}
|
||||
if: steps.check.outputs.skip_opencode != 'true' && steps.download.outcome == 'success'
|
||||
run: |
|
||||
cd packages/${{ matrix.platform }}
|
||||
|
||||
@@ -267,3 +301,25 @@ jobs:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NODE_AUTH_TOKEN }}
|
||||
NPM_CONFIG_PROVENANCE: true
|
||||
timeout-minutes: 15
|
||||
|
||||
- name: Publish oh-my-openagent-${{ matrix.platform }}
|
||||
if: steps.check.outputs.skip_openagent != 'true' && steps.download.outcome == 'success'
|
||||
run: |
|
||||
cd packages/${{ matrix.platform }}
|
||||
|
||||
# Rename package for oh-my-openagent
|
||||
jq --arg name "oh-my-openagent-${{ matrix.platform }}" \
|
||||
--arg desc "Platform-specific binary for oh-my-openagent (${{ matrix.platform }})" \
|
||||
'.name = $name | .description = $desc | .bin = {"oh-my-openagent": (.bin | to_entries | .[0].value)}' \
|
||||
package.json > tmp.json && mv tmp.json package.json
|
||||
|
||||
TAG_ARG=""
|
||||
if [ -n "${{ inputs.dist_tag }}" ]; then
|
||||
TAG_ARG="--tag ${{ inputs.dist_tag }}"
|
||||
fi
|
||||
|
||||
npm publish --access public --provenance $TAG_ARG
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NODE_AUTH_TOKEN }}
|
||||
NPM_CONFIG_PROVENANCE: true
|
||||
timeout-minutes: 15
|
||||
|
||||
42
.github/workflows/publish.yml
vendored
42
.github/workflows/publish.yml
vendored
@@ -216,6 +216,48 @@ jobs:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NODE_AUTH_TOKEN }}
|
||||
NPM_CONFIG_PROVENANCE: true
|
||||
|
||||
- name: Check if oh-my-openagent already published
|
||||
id: check-openagent
|
||||
run: |
|
||||
VERSION="${{ steps.version.outputs.version }}"
|
||||
STATUS=$(curl -s -o /dev/null -w "%{http_code}" "https://registry.npmjs.org/oh-my-openagent/${VERSION}")
|
||||
if [ "$STATUS" = "200" ]; then
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
echo "✓ oh-my-openagent@${VERSION} already published"
|
||||
else
|
||||
echo "skip=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Publish oh-my-openagent
|
||||
if: steps.check-openagent.outputs.skip != 'true'
|
||||
run: |
|
||||
VERSION="${{ steps.version.outputs.version }}"
|
||||
|
||||
# Update package name, version, and optionalDependencies for oh-my-openagent
|
||||
jq --arg v "$VERSION" '
|
||||
.name = "oh-my-openagent" |
|
||||
.version = $v |
|
||||
.optionalDependencies = (
|
||||
.optionalDependencies | to_entries |
|
||||
map(.key = (.key | sub("^oh-my-opencode-"; "oh-my-openagent-")) | .value = $v) |
|
||||
from_entries
|
||||
)
|
||||
' package.json > tmp.json && mv tmp.json package.json
|
||||
|
||||
TAG_ARG=""
|
||||
if [ -n "${{ steps.version.outputs.dist_tag }}" ]; then
|
||||
TAG_ARG="--tag ${{ steps.version.outputs.dist_tag }}"
|
||||
fi
|
||||
npm publish --access public --provenance $TAG_ARG || echo "::warning::oh-my-openagent publish failed"
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NODE_AUTH_TOKEN }}
|
||||
NPM_CONFIG_PROVENANCE: true
|
||||
|
||||
- name: Restore package.json
|
||||
if: steps.check-openagent.outputs.skip != 'true'
|
||||
run: |
|
||||
git checkout -- package.json
|
||||
|
||||
trigger-platform:
|
||||
runs-on: ubuntu-latest
|
||||
needs: publish-main
|
||||
|
||||
@@ -136,7 +136,36 @@ fi
|
||||
|
||||
---
|
||||
|
||||
## Phase 3: Spawn Subagents
|
||||
## Phase 3: Spawn Subagents (Individual Tool Calls)
|
||||
|
||||
**CRITICAL: Create tasks ONE BY ONE using individual `task_create` tool calls. NEVER batch or script.**
|
||||
|
||||
For each item, execute these steps sequentially:
|
||||
|
||||
### Step 3.1: Create Task Record
|
||||
```typescript
|
||||
task_create(
|
||||
subject="Triage: #{number} {title}",
|
||||
description="GitHub {issue|PR} triage analysis - {type}",
|
||||
metadata={"type": "{ISSUE_QUESTION|ISSUE_BUG|ISSUE_FEATURE|ISSUE_OTHER|PR_BUGFIX|PR_OTHER}", "number": {number}}
|
||||
)
|
||||
```
|
||||
|
||||
### Step 3.2: Spawn Analysis Subagent (Background)
|
||||
```typescript
|
||||
task(
|
||||
category="quick",
|
||||
run_in_background=true,
|
||||
load_skills=[],
|
||||
prompt=SUBAGENT_PROMPT
|
||||
)
|
||||
```
|
||||
|
||||
**ABSOLUTE RULES for Subagents:**
|
||||
- **ONLY ANALYZE** - Never take action on GitHub (no comments, merges, closes)
|
||||
- **READ-ONLY** - Use tools only for reading code/GitHub data
|
||||
- **WRITE REPORT ONLY** - Output goes to `{REPORT_DIR}/{issue|pr}-{number}.md` via Write tool
|
||||
- **EVIDENCE REQUIRED** - Every claim must have GitHub permalink as proof
|
||||
|
||||
```
|
||||
For each item:
|
||||
@@ -170,6 +199,7 @@ ABSOLUTE RULES (violating ANY = critical failure):
|
||||
- Your ONLY writable output: {REPORT_DIR}/{issue|pr}-{number}.md via the Write tool
|
||||
```
|
||||
|
||||
|
||||
---
|
||||
|
||||
### ISSUE_QUESTION
|
||||
|
||||
@@ -3708,6 +3708,9 @@
|
||||
"circuitBreaker": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"enabled": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"maxToolCalls": {
|
||||
"type": "integer",
|
||||
"minimum": 10,
|
||||
|
||||
@@ -2223,6 +2223,22 @@
|
||||
"created_at": "2026-03-17T08:27:45Z",
|
||||
"repoId": 1108837393,
|
||||
"pullRequestNo": 2640
|
||||
},
|
||||
{
|
||||
"name": "tad-hq",
|
||||
"id": 213478119,
|
||||
"comment_id": 4077697128,
|
||||
"created_at": "2026-03-17T20:07:09Z",
|
||||
"repoId": 1108837393,
|
||||
"pullRequestNo": 2655
|
||||
},
|
||||
{
|
||||
"name": "ogormans-deptstack",
|
||||
"id": 208788555,
|
||||
"comment_id": 4077893096,
|
||||
"created_at": "2026-03-17T20:42:42Z",
|
||||
"repoId": 1108837393,
|
||||
"pullRequestNo": 2656
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,20 +1,32 @@
|
||||
import { afterAll, beforeAll, describe, expect, test } from "bun:test"
|
||||
import { afterAll, beforeAll, describe, expect, mock, test } from "bun:test"
|
||||
import { mkdirSync, rmSync, writeFileSync } from "node:fs"
|
||||
import { homedir, tmpdir } from "node:os"
|
||||
import * as os from "node:os"
|
||||
import { tmpdir } from "node:os"
|
||||
import { join } from "node:path"
|
||||
import { resolvePromptAppend } from "./resolve-file-uri"
|
||||
|
||||
const originalHomedir = os.homedir.bind(os)
|
||||
let mockedHomeDir = ""
|
||||
let moduleImportCounter = 0
|
||||
let resolvePromptAppend: typeof import("./resolve-file-uri").resolvePromptAppend
|
||||
|
||||
mock.module("node:os", () => ({
|
||||
...os,
|
||||
homedir: () => mockedHomeDir || originalHomedir(),
|
||||
}))
|
||||
|
||||
describe("resolvePromptAppend", () => {
|
||||
const fixtureRoot = join(tmpdir(), `resolve-file-uri-${Date.now()}`)
|
||||
const configDir = join(fixtureRoot, "config")
|
||||
const homeFixtureDir = join(homedir(), `.resolve-file-uri-home-${Date.now()}`)
|
||||
const homeFixtureRoot = join(fixtureRoot, "home")
|
||||
const homeFixtureDir = join(homeFixtureRoot, "fixture-home")
|
||||
|
||||
const absoluteFilePath = join(fixtureRoot, "absolute.txt")
|
||||
const relativeFilePath = join(configDir, "relative.txt")
|
||||
const spacedFilePath = join(fixtureRoot, "with space.txt")
|
||||
const homeFilePath = join(homeFixtureDir, "home.txt")
|
||||
|
||||
beforeAll(() => {
|
||||
beforeAll(async () => {
|
||||
mockedHomeDir = homeFixtureRoot
|
||||
mkdirSync(fixtureRoot, { recursive: true })
|
||||
mkdirSync(configDir, { recursive: true })
|
||||
mkdirSync(homeFixtureDir, { recursive: true })
|
||||
@@ -23,11 +35,14 @@ describe("resolvePromptAppend", () => {
|
||||
writeFileSync(relativeFilePath, "relative-content", "utf8")
|
||||
writeFileSync(spacedFilePath, "encoded-content", "utf8")
|
||||
writeFileSync(homeFilePath, "home-content", "utf8")
|
||||
|
||||
moduleImportCounter += 1
|
||||
;({ resolvePromptAppend } = await import(`./resolve-file-uri?test=${moduleImportCounter}`))
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
rmSync(fixtureRoot, { recursive: true, force: true })
|
||||
rmSync(homeFixtureDir, { recursive: true, force: true })
|
||||
mock.restore()
|
||||
})
|
||||
|
||||
test("returns non-file URI strings unchanged", () => {
|
||||
@@ -65,7 +80,7 @@ describe("resolvePromptAppend", () => {
|
||||
|
||||
test("resolves home directory URI path", () => {
|
||||
//#given
|
||||
const input = `file://~/${homeFixtureDir.split("/").pop()}/home.txt`
|
||||
const input = "file://~/fixture-home/home.txt"
|
||||
|
||||
//#when
|
||||
const resolved = resolvePromptAppend(input)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { z } from "zod"
|
||||
|
||||
const CircuitBreakerConfigSchema = z.object({
|
||||
enabled: z.boolean().optional(),
|
||||
maxToolCalls: z.number().int().min(10).optional(),
|
||||
windowSize: z.number().int().min(5).optional(),
|
||||
repetitionThresholdPercent: z.number().gt(0).max(100).optional(),
|
||||
|
||||
@@ -51,6 +51,7 @@ export const HookNameSchema = z.enum([
|
||||
"anthropic-effort",
|
||||
"hashline-read-enhancer",
|
||||
"read-image-resizer",
|
||||
"todo-description-override",
|
||||
])
|
||||
|
||||
export type HookName = z.infer<typeof HookNameSchema>
|
||||
|
||||
@@ -9,6 +9,7 @@ export const DEFAULT_MESSAGE_STALENESS_TIMEOUT_MS = 1_800_000
|
||||
export const DEFAULT_MAX_TOOL_CALLS = 200
|
||||
export const DEFAULT_CIRCUIT_BREAKER_WINDOW_SIZE = 20
|
||||
export const DEFAULT_CIRCUIT_BREAKER_REPETITION_THRESHOLD_PERCENT = 80
|
||||
export const DEFAULT_CIRCUIT_BREAKER_ENABLED = true
|
||||
export const MIN_RUNTIME_BEFORE_STALE_MS = 30_000
|
||||
export const MIN_IDLE_TIME_MS = 5000
|
||||
export const POLLING_INTERVAL_MS = 3000
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import {
|
||||
createToolCallSignature,
|
||||
detectRepetitiveToolUse,
|
||||
recordToolCall,
|
||||
resolveCircuitBreakerSettings,
|
||||
@@ -17,6 +18,17 @@ function buildWindow(
|
||||
)
|
||||
}
|
||||
|
||||
function buildWindowWithInputs(
|
||||
calls: Array<{ tool: string; input?: Record<string, unknown> }>,
|
||||
override?: Parameters<typeof resolveCircuitBreakerSettings>[0]
|
||||
) {
|
||||
const settings = resolveCircuitBreakerSettings(override)
|
||||
return calls.reduce(
|
||||
(window, { tool, input }) => recordToolCall(window, tool, settings, input),
|
||||
undefined as ReturnType<typeof recordToolCall> | undefined
|
||||
)
|
||||
}
|
||||
|
||||
describe("loop-detector", () => {
|
||||
describe("resolveCircuitBreakerSettings", () => {
|
||||
describe("#given nested circuit breaker config", () => {
|
||||
@@ -31,12 +43,90 @@ describe("loop-detector", () => {
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
enabled: true,
|
||||
maxToolCalls: 120,
|
||||
windowSize: 10,
|
||||
repetitionThresholdPercent: 70,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("#given no enabled config", () => {
|
||||
test("#when resolved #then enabled defaults to true", () => {
|
||||
const result = resolveCircuitBreakerSettings({
|
||||
circuitBreaker: {
|
||||
maxToolCalls: 100,
|
||||
windowSize: 5,
|
||||
repetitionThresholdPercent: 60,
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.enabled).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe("#given enabled is false in config", () => {
|
||||
test("#when resolved #then enabled is false", () => {
|
||||
const result = resolveCircuitBreakerSettings({
|
||||
circuitBreaker: {
|
||||
enabled: false,
|
||||
maxToolCalls: 100,
|
||||
windowSize: 5,
|
||||
repetitionThresholdPercent: 60,
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.enabled).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe("#given enabled is true in config", () => {
|
||||
test("#when resolved #then enabled is true", () => {
|
||||
const result = resolveCircuitBreakerSettings({
|
||||
circuitBreaker: {
|
||||
enabled: true,
|
||||
maxToolCalls: 100,
|
||||
windowSize: 5,
|
||||
repetitionThresholdPercent: 60,
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.enabled).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("createToolCallSignature", () => {
|
||||
test("#given tool with input #when signature created #then includes tool and sorted input", () => {
|
||||
const result = createToolCallSignature("read", { filePath: "/a.ts" })
|
||||
|
||||
expect(result).toBe('read::{"filePath":"/a.ts"}')
|
||||
})
|
||||
|
||||
test("#given tool with undefined input #when signature created #then returns bare tool name", () => {
|
||||
const result = createToolCallSignature("read", undefined)
|
||||
|
||||
expect(result).toBe("read")
|
||||
})
|
||||
|
||||
test("#given tool with null input #when signature created #then returns bare tool name", () => {
|
||||
const result = createToolCallSignature("read", null)
|
||||
|
||||
expect(result).toBe("read")
|
||||
})
|
||||
|
||||
test("#given tool with empty object input #when signature created #then returns bare tool name", () => {
|
||||
const result = createToolCallSignature("read", {})
|
||||
|
||||
expect(result).toBe("read")
|
||||
})
|
||||
|
||||
test("#given same input different key order #when signatures compared #then they are equal", () => {
|
||||
const first = createToolCallSignature("read", { filePath: "/a.ts", offset: 0 })
|
||||
const second = createToolCallSignature("read", { offset: 0, filePath: "/a.ts" })
|
||||
|
||||
expect(first).toBe(second)
|
||||
})
|
||||
})
|
||||
|
||||
describe("detectRepetitiveToolUse", () => {
|
||||
@@ -113,5 +203,56 @@ describe("loop-detector", () => {
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("#given same tool with different file inputs", () => {
|
||||
test("#when evaluated #then it does not trigger", () => {
|
||||
const calls = Array.from({ length: 20 }, (_, i) => ({
|
||||
tool: "read",
|
||||
input: { filePath: `/src/file-${i}.ts` },
|
||||
}))
|
||||
const window = buildWindowWithInputs(calls, {
|
||||
circuitBreaker: { windowSize: 20, repetitionThresholdPercent: 80 },
|
||||
})
|
||||
const result = detectRepetitiveToolUse(window)
|
||||
expect(result.triggered).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe("#given same tool with identical file inputs", () => {
|
||||
test("#when evaluated #then it triggers with bare tool name", () => {
|
||||
const calls = [
|
||||
...Array.from({ length: 16 }, () => ({ tool: "read", input: { filePath: "/src/same.ts" } })),
|
||||
{ tool: "grep", input: { pattern: "foo" } },
|
||||
{ tool: "edit", input: { filePath: "/src/other.ts" } },
|
||||
{ tool: "bash", input: { command: "ls" } },
|
||||
{ tool: "glob", input: { pattern: "**/*.ts" } },
|
||||
]
|
||||
const window = buildWindowWithInputs(calls, {
|
||||
circuitBreaker: { windowSize: 20, repetitionThresholdPercent: 80 },
|
||||
})
|
||||
const result = detectRepetitiveToolUse(window)
|
||||
expect(result.triggered).toBe(true)
|
||||
expect(result.toolName).toBe("read")
|
||||
expect(result.repeatedCount).toBe(16)
|
||||
})
|
||||
})
|
||||
|
||||
describe("#given tool calls with no input", () => {
|
||||
test("#when the same tool dominates #then falls back to name-only detection", () => {
|
||||
const calls = [
|
||||
...Array.from({ length: 16 }, () => ({ tool: "read" })),
|
||||
{ tool: "grep" },
|
||||
{ tool: "edit" },
|
||||
{ tool: "bash" },
|
||||
{ tool: "glob" },
|
||||
]
|
||||
const window = buildWindowWithInputs(calls, {
|
||||
circuitBreaker: { windowSize: 20, repetitionThresholdPercent: 80 },
|
||||
})
|
||||
const result = detectRepetitiveToolUse(window)
|
||||
expect(result.triggered).toBe(true)
|
||||
expect(result.toolName).toBe("read")
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { BackgroundTaskConfig } from "../../config/schema"
|
||||
import {
|
||||
DEFAULT_CIRCUIT_BREAKER_ENABLED,
|
||||
DEFAULT_CIRCUIT_BREAKER_REPETITION_THRESHOLD_PERCENT,
|
||||
DEFAULT_CIRCUIT_BREAKER_WINDOW_SIZE,
|
||||
DEFAULT_MAX_TOOL_CALLS,
|
||||
@@ -7,6 +8,7 @@ import {
|
||||
import type { ToolCallWindow } from "./types"
|
||||
|
||||
export interface CircuitBreakerSettings {
|
||||
enabled: boolean
|
||||
maxToolCalls: number
|
||||
windowSize: number
|
||||
repetitionThresholdPercent: number
|
||||
@@ -24,6 +26,7 @@ export function resolveCircuitBreakerSettings(
|
||||
config?: BackgroundTaskConfig
|
||||
): CircuitBreakerSettings {
|
||||
return {
|
||||
enabled: config?.circuitBreaker?.enabled ?? DEFAULT_CIRCUIT_BREAKER_ENABLED,
|
||||
maxToolCalls:
|
||||
config?.circuitBreaker?.maxToolCalls ?? config?.maxToolCalls ?? DEFAULT_MAX_TOOL_CALLS,
|
||||
windowSize: config?.circuitBreaker?.windowSize ?? DEFAULT_CIRCUIT_BREAKER_WINDOW_SIZE,
|
||||
@@ -36,28 +39,56 @@ export function resolveCircuitBreakerSettings(
|
||||
export function recordToolCall(
|
||||
window: ToolCallWindow | undefined,
|
||||
toolName: string,
|
||||
settings: CircuitBreakerSettings
|
||||
settings: CircuitBreakerSettings,
|
||||
toolInput?: Record<string, unknown> | null
|
||||
): ToolCallWindow {
|
||||
const previous = window?.toolNames ?? []
|
||||
const toolNames = [...previous, toolName].slice(-settings.windowSize)
|
||||
const previous = window?.toolSignatures ?? []
|
||||
const signature = createToolCallSignature(toolName, toolInput)
|
||||
const toolSignatures = [...previous, signature].slice(-settings.windowSize)
|
||||
|
||||
return {
|
||||
toolNames,
|
||||
toolSignatures,
|
||||
windowSize: settings.windowSize,
|
||||
thresholdPercent: settings.repetitionThresholdPercent,
|
||||
}
|
||||
}
|
||||
|
||||
function sortObject(obj: unknown): unknown {
|
||||
if (obj === null || obj === undefined) return obj
|
||||
if (typeof obj !== "object") return obj
|
||||
if (Array.isArray(obj)) return obj.map(sortObject)
|
||||
|
||||
const sorted: Record<string, unknown> = {}
|
||||
const keys = Object.keys(obj as Record<string, unknown>).sort()
|
||||
for (const key of keys) {
|
||||
sorted[key] = sortObject((obj as Record<string, unknown>)[key])
|
||||
}
|
||||
return sorted
|
||||
}
|
||||
|
||||
export function createToolCallSignature(
|
||||
toolName: string,
|
||||
toolInput?: Record<string, unknown> | null
|
||||
): string {
|
||||
if (toolInput === undefined || toolInput === null) {
|
||||
return toolName
|
||||
}
|
||||
if (Object.keys(toolInput).length === 0) {
|
||||
return toolName
|
||||
}
|
||||
return `${toolName}::${JSON.stringify(sortObject(toolInput))}`
|
||||
}
|
||||
|
||||
export function detectRepetitiveToolUse(
|
||||
window: ToolCallWindow | undefined
|
||||
): ToolLoopDetectionResult {
|
||||
if (!window || window.toolNames.length === 0) {
|
||||
if (!window || window.toolSignatures.length === 0) {
|
||||
return { triggered: false }
|
||||
}
|
||||
|
||||
const counts = new Map<string, number>()
|
||||
for (const toolName of window.toolNames) {
|
||||
counts.set(toolName, (counts.get(toolName) ?? 0) + 1)
|
||||
for (const signature of window.toolSignatures) {
|
||||
counts.set(signature, (counts.get(signature) ?? 0) + 1)
|
||||
}
|
||||
|
||||
let repeatedTool: string | undefined
|
||||
@@ -70,7 +101,7 @@ export function detectRepetitiveToolUse(
|
||||
}
|
||||
}
|
||||
|
||||
const sampleSize = window.toolNames.length
|
||||
const sampleSize = window.toolSignatures.length
|
||||
const minimumSampleSize = Math.min(
|
||||
window.windowSize,
|
||||
Math.ceil((window.windowSize * window.thresholdPercent) / 100)
|
||||
@@ -88,7 +119,7 @@ export function detectRepetitiveToolUse(
|
||||
|
||||
return {
|
||||
triggered: true,
|
||||
toolName: repeatedTool,
|
||||
toolName: repeatedTool.split("::")[0],
|
||||
repeatedCount,
|
||||
sampleSize,
|
||||
thresholdPercent: window.thresholdPercent,
|
||||
|
||||
@@ -236,4 +236,181 @@ describe("BackgroundManager circuit breaker", () => {
|
||||
expect(task.progress?.countedToolPartIDs).toEqual(["tool-1"])
|
||||
})
|
||||
})
|
||||
|
||||
describe("#given same tool reading different files", () => {
|
||||
test("#when tool events arrive with state.input #then task keeps running", async () => {
|
||||
const manager = createManager({
|
||||
circuitBreaker: {
|
||||
windowSize: 20,
|
||||
repetitionThresholdPercent: 80,
|
||||
},
|
||||
})
|
||||
const task: BackgroundTask = {
|
||||
id: "task-diff-files-1",
|
||||
sessionID: "session-diff-files-1",
|
||||
parentSessionID: "parent-1",
|
||||
parentMessageID: "msg-1",
|
||||
description: "Reading different files",
|
||||
prompt: "work",
|
||||
agent: "explore",
|
||||
status: "running",
|
||||
startedAt: new Date(Date.now() - 60_000),
|
||||
progress: {
|
||||
toolCalls: 0,
|
||||
lastUpdate: new Date(Date.now() - 60_000),
|
||||
},
|
||||
}
|
||||
getTaskMap(manager).set(task.id, task)
|
||||
|
||||
for (let i = 0; i < 20; i++) {
|
||||
manager.handleEvent({
|
||||
type: "message.part.updated",
|
||||
properties: {
|
||||
part: {
|
||||
sessionID: task.sessionID,
|
||||
type: "tool",
|
||||
tool: "read",
|
||||
state: { status: "running", input: { filePath: `/src/file-${i}.ts` } },
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
await flushAsyncWork()
|
||||
|
||||
expect(task.status).toBe("running")
|
||||
expect(task.progress?.toolCalls).toBe(20)
|
||||
})
|
||||
})
|
||||
|
||||
describe("#given same tool reading same file repeatedly", () => {
|
||||
test("#when tool events arrive with state.input #then task is cancelled with bare tool name in error", async () => {
|
||||
const manager = createManager({
|
||||
circuitBreaker: {
|
||||
windowSize: 20,
|
||||
repetitionThresholdPercent: 80,
|
||||
},
|
||||
})
|
||||
const task: BackgroundTask = {
|
||||
id: "task-same-file-1",
|
||||
sessionID: "session-same-file-1",
|
||||
parentSessionID: "parent-1",
|
||||
parentMessageID: "msg-1",
|
||||
description: "Reading same file repeatedly",
|
||||
prompt: "work",
|
||||
agent: "explore",
|
||||
status: "running",
|
||||
startedAt: new Date(Date.now() - 60_000),
|
||||
progress: {
|
||||
toolCalls: 0,
|
||||
lastUpdate: new Date(Date.now() - 60_000),
|
||||
},
|
||||
}
|
||||
getTaskMap(manager).set(task.id, task)
|
||||
|
||||
for (let i = 0; i < 20; i++) {
|
||||
manager.handleEvent({
|
||||
type: "message.part.updated",
|
||||
properties: {
|
||||
part: {
|
||||
sessionID: task.sessionID,
|
||||
type: "tool",
|
||||
tool: "read",
|
||||
state: { status: "running", input: { filePath: "/src/same.ts" } },
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
await flushAsyncWork()
|
||||
|
||||
expect(task.status).toBe("cancelled")
|
||||
expect(task.error).toContain("repeatedly called read")
|
||||
expect(task.error).not.toContain("::")
|
||||
})
|
||||
})
|
||||
|
||||
describe("#given circuit breaker enabled is false", () => {
|
||||
test("#when repetitive tools arrive #then task keeps running", async () => {
|
||||
const manager = createManager({
|
||||
circuitBreaker: {
|
||||
enabled: false,
|
||||
windowSize: 20,
|
||||
repetitionThresholdPercent: 80,
|
||||
},
|
||||
})
|
||||
const task: BackgroundTask = {
|
||||
id: "task-disabled-1",
|
||||
sessionID: "session-disabled-1",
|
||||
parentSessionID: "parent-1",
|
||||
parentMessageID: "msg-1",
|
||||
description: "Disabled circuit breaker task",
|
||||
prompt: "work",
|
||||
agent: "explore",
|
||||
status: "running",
|
||||
startedAt: new Date(Date.now() - 60_000),
|
||||
progress: {
|
||||
toolCalls: 0,
|
||||
lastUpdate: new Date(Date.now() - 60_000),
|
||||
},
|
||||
}
|
||||
getTaskMap(manager).set(task.id, task)
|
||||
|
||||
for (let i = 0; i < 20; i++) {
|
||||
manager.handleEvent({
|
||||
type: "message.part.updated",
|
||||
properties: {
|
||||
sessionID: task.sessionID,
|
||||
type: "tool",
|
||||
tool: "read",
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
await flushAsyncWork()
|
||||
|
||||
expect(task.status).toBe("running")
|
||||
})
|
||||
})
|
||||
|
||||
describe("#given circuit breaker enabled is false but absolute cap is low", () => {
|
||||
test("#when max tool calls exceeded #then task is still cancelled by absolute cap", async () => {
|
||||
const manager = createManager({
|
||||
maxToolCalls: 3,
|
||||
circuitBreaker: {
|
||||
enabled: false,
|
||||
windowSize: 10,
|
||||
repetitionThresholdPercent: 95,
|
||||
},
|
||||
})
|
||||
const task: BackgroundTask = {
|
||||
id: "task-cap-disabled-1",
|
||||
sessionID: "session-cap-disabled-1",
|
||||
parentSessionID: "parent-1",
|
||||
parentMessageID: "msg-1",
|
||||
description: "Backstop task with disabled circuit breaker",
|
||||
prompt: "work",
|
||||
agent: "explore",
|
||||
status: "running",
|
||||
startedAt: new Date(Date.now() - 60_000),
|
||||
progress: {
|
||||
toolCalls: 0,
|
||||
lastUpdate: new Date(Date.now() - 60_000),
|
||||
},
|
||||
}
|
||||
getTaskMap(manager).set(task.id, task)
|
||||
|
||||
for (const toolName of ["read", "grep", "edit"]) {
|
||||
manager.handleEvent({
|
||||
type: "message.part.updated",
|
||||
properties: { sessionID: task.sessionID, type: "tool", tool: toolName },
|
||||
})
|
||||
}
|
||||
|
||||
await flushAsyncWork()
|
||||
|
||||
expect(task.status).toBe("cancelled")
|
||||
expect(task.error).toContain("maximum tool call limit (3)")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -74,7 +74,7 @@ interface MessagePartInfo {
|
||||
sessionID?: string
|
||||
type?: string
|
||||
tool?: string
|
||||
state?: { status?: string }
|
||||
state?: { status?: string; input?: Record<string, unknown> }
|
||||
}
|
||||
|
||||
interface EventProperties {
|
||||
@@ -918,29 +918,32 @@ export class BackgroundManager {
|
||||
task.progress.lastTool = partInfo.tool
|
||||
const circuitBreaker = resolveCircuitBreakerSettings(this.config)
|
||||
if (partInfo.tool) {
|
||||
task.progress.toolCallWindow = recordToolCall(
|
||||
task.progress.toolCallWindow,
|
||||
partInfo.tool,
|
||||
circuitBreaker
|
||||
)
|
||||
task.progress.toolCallWindow = recordToolCall(
|
||||
task.progress.toolCallWindow,
|
||||
partInfo.tool,
|
||||
circuitBreaker,
|
||||
partInfo.state?.input
|
||||
)
|
||||
|
||||
const loopDetection = detectRepetitiveToolUse(task.progress.toolCallWindow)
|
||||
if (loopDetection.triggered) {
|
||||
log("[background-agent] Circuit breaker: repetitive tool usage detected", {
|
||||
taskId: task.id,
|
||||
agent: task.agent,
|
||||
sessionID,
|
||||
toolName: loopDetection.toolName,
|
||||
repeatedCount: loopDetection.repeatedCount,
|
||||
sampleSize: loopDetection.sampleSize,
|
||||
thresholdPercent: loopDetection.thresholdPercent,
|
||||
})
|
||||
void this.cancelTask(task.id, {
|
||||
source: "circuit-breaker",
|
||||
reason: `Subagent repeatedly called ${loopDetection.toolName} ${loopDetection.repeatedCount}/${loopDetection.sampleSize} times in the recent tool-call window (${loopDetection.thresholdPercent}% threshold). This usually indicates an infinite loop. The task was automatically cancelled to prevent excessive token usage.`,
|
||||
})
|
||||
return
|
||||
}
|
||||
if (circuitBreaker.enabled) {
|
||||
const loopDetection = detectRepetitiveToolUse(task.progress.toolCallWindow)
|
||||
if (loopDetection.triggered) {
|
||||
log("[background-agent] Circuit breaker: repetitive tool usage detected", {
|
||||
taskId: task.id,
|
||||
agent: task.agent,
|
||||
sessionID,
|
||||
toolName: loopDetection.toolName,
|
||||
repeatedCount: loopDetection.repeatedCount,
|
||||
sampleSize: loopDetection.sampleSize,
|
||||
thresholdPercent: loopDetection.thresholdPercent,
|
||||
})
|
||||
void this.cancelTask(task.id, {
|
||||
source: "circuit-breaker",
|
||||
reason: `Subagent repeatedly called ${loopDetection.toolName} ${loopDetection.repeatedCount}/${loopDetection.sampleSize} times in the recent tool-call window (${loopDetection.thresholdPercent}% threshold). This usually indicates an infinite loop. The task was automatically cancelled to prevent excessive token usage.`,
|
||||
})
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const maxToolCalls = circuitBreaker.maxToolCalls
|
||||
|
||||
@@ -10,7 +10,7 @@ export type BackgroundTaskStatus =
|
||||
| "interrupt"
|
||||
|
||||
export interface ToolCallWindow {
|
||||
toolNames: string[]
|
||||
toolSignatures: string[]
|
||||
windowSize: number
|
||||
thresholdPercent: number
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ export const START_WORK_TEMPLATE = `You are starting a Sisyphus work session.
|
||||
- \`--worktree <path>\` (optional): absolute path to an existing git worktree to work in
|
||||
- If specified and valid: hook pre-sets worktree_path in boulder.json
|
||||
- If specified but invalid: you must run \`git worktree add <path> <branch>\` first
|
||||
- If omitted: you MUST choose or create a worktree (see Worktree Setup below)
|
||||
- If omitted: work directly in the current project directory (no worktree)
|
||||
|
||||
## WHAT TO DO
|
||||
|
||||
@@ -24,7 +24,7 @@ export const START_WORK_TEMPLATE = `You are starting a Sisyphus work session.
|
||||
- If ONE plan: auto-select it
|
||||
- If MULTIPLE plans: show list with timestamps, ask user to select
|
||||
|
||||
4. **Worktree Setup** (when \`worktree_path\` not already set in boulder.json):
|
||||
4. **Worktree Setup** (ONLY when \`--worktree\` was explicitly specified and \`worktree_path\` not already set in boulder.json):
|
||||
1. \`git worktree list --porcelain\` — see available worktrees
|
||||
2. Create: \`git worktree add <absolute-path> <branch-or-HEAD>\`
|
||||
3. Update boulder.json to add \`"worktree_path": "<absolute-path>"\`
|
||||
@@ -86,6 +86,38 @@ Reading plan and beginning execution...
|
||||
|
||||
- The session_id is injected by the hook - use it directly
|
||||
- Always update boulder.json BEFORE starting work
|
||||
- Always set worktree_path in boulder.json before executing any tasks
|
||||
- If worktree_path is set in boulder.json, all work happens inside that worktree directory
|
||||
- Read the FULL plan file before delegating any tasks
|
||||
- Follow atlas delegation protocols (7-section format)`
|
||||
- Follow atlas delegation protocols (7-section format)
|
||||
|
||||
## TASK BREAKDOWN (MANDATORY)
|
||||
|
||||
After reading the plan file, you MUST decompose every plan task into granular, implementation-level sub-steps and register ALL of them as task/todo items BEFORE starting any work.
|
||||
|
||||
**How to break down**:
|
||||
- Each plan checkbox item (e.g., \`- [ ] Add user authentication\`) must be split into concrete, actionable sub-tasks
|
||||
- Sub-tasks should be specific enough that each one touches a clear set of files/functions
|
||||
- Include: file to modify, what to change, expected behavior, and how to verify
|
||||
- Do NOT leave any task vague — "implement feature X" is NOT acceptable; "add validateToken() to src/auth/middleware.ts that checks JWT expiry and returns 401" IS acceptable
|
||||
|
||||
**Example breakdown**:
|
||||
Plan task: \`- [ ] Add rate limiting to API\`
|
||||
→ Todo items:
|
||||
1. Create \`src/middleware/rate-limiter.ts\` with sliding window algorithm (max 100 req/min per IP)
|
||||
2. Add RateLimiter middleware to \`src/app.ts\` router chain, before auth middleware
|
||||
3. Add rate limit headers (X-RateLimit-Limit, X-RateLimit-Remaining) to response in \`rate-limiter.ts\`
|
||||
4. Add test: verify 429 response after exceeding limit in \`src/middleware/rate-limiter.test.ts\`
|
||||
5. Add test: verify headers are present on normal responses
|
||||
|
||||
Register these as task/todo items so progress is tracked and visible throughout the session.
|
||||
|
||||
## WORKTREE COMPLETION
|
||||
|
||||
When working in a worktree (\`worktree_path\` is set in boulder.json) and ALL plan tasks are complete:
|
||||
1. Commit all remaining changes in the worktree
|
||||
2. Switch to the main working directory (the original repo, NOT the worktree)
|
||||
3. Merge the worktree branch into the current branch: \`git merge <worktree-branch>\`
|
||||
4. If merge succeeds, clean up: \`git worktree remove <worktree-path>\`
|
||||
5. Remove the boulder.json state
|
||||
|
||||
This is the DEFAULT behavior when \`--worktree\` was used. Skip merge only if the user explicitly instructs otherwise (e.g., asks to create a PR instead).`
|
||||
|
||||
@@ -52,3 +52,4 @@ export { createWriteExistingFileGuardHook } from "./write-existing-file-guard";
|
||||
export { createHashlineReadEnhancerHook } from "./hashline-read-enhancer";
|
||||
export { createJsonErrorRecoveryHook, JSON_ERROR_TOOL_EXCLUDE_LIST, JSON_ERROR_PATTERNS, JSON_ERROR_REMINDER } from "./json-error-recovery";
|
||||
export { createReadImageResizerHook } from "./read-image-resizer"
|
||||
export { createTodoDescriptionOverrideHook } from "./todo-description-override"
|
||||
|
||||
@@ -23,6 +23,10 @@ export async function handleDetectedCompletion(
|
||||
const { sessionID, state, loopState, directory, apiTimeoutMs } = input
|
||||
|
||||
if (state.ultrawork && !state.verification_pending) {
|
||||
if (state.verification_session_id) {
|
||||
ctx.client.session.abort({ path: { id: state.verification_session_id } }).catch(() => {})
|
||||
}
|
||||
|
||||
const verificationState = loopState.markVerificationPending(sessionID)
|
||||
if (!verificationState) {
|
||||
log(`[${HOOK_NAME}] Failed to transition ultrawork loop to verification`, {
|
||||
|
||||
@@ -10,6 +10,7 @@ describe("ulw-loop verification", () => {
|
||||
const testDir = join(tmpdir(), `ulw-loop-verification-${Date.now()}`)
|
||||
let promptCalls: Array<{ sessionID: string; text: string }>
|
||||
let toastCalls: Array<{ title: string; message: string; variant: string }>
|
||||
let abortCalls: Array<{ id: string }>
|
||||
let parentTranscriptPath: string
|
||||
let oracleTranscriptPath: string
|
||||
|
||||
@@ -25,6 +26,10 @@ describe("ulw-loop verification", () => {
|
||||
return {}
|
||||
},
|
||||
messages: async () => ({ data: [] }),
|
||||
abort: async (opts: { path: { id: string } }) => {
|
||||
abortCalls.push({ id: opts.path.id })
|
||||
return {}
|
||||
},
|
||||
},
|
||||
tui: {
|
||||
showToast: async (opts: { body: { title: string; message: string; variant: string } }) => {
|
||||
@@ -40,6 +45,7 @@ describe("ulw-loop verification", () => {
|
||||
beforeEach(() => {
|
||||
promptCalls = []
|
||||
toastCalls = []
|
||||
abortCalls = []
|
||||
parentTranscriptPath = join(testDir, "transcript-parent.jsonl")
|
||||
oracleTranscriptPath = join(testDir, "transcript-oracle.jsonl")
|
||||
|
||||
@@ -385,4 +391,96 @@ describe("ulw-loop verification", () => {
|
||||
expect(promptCalls).toHaveLength(2)
|
||||
expect(promptCalls[1]?.text).toContain("Verification failed")
|
||||
})
|
||||
|
||||
test("#given oracle verification fails #when loop restarts #then old oracle session is aborted", async () => {
|
||||
const sessionMessages: Record<string, unknown[]> = {
|
||||
"session-123": [{}, {}, {}],
|
||||
}
|
||||
const hook = createRalphLoopHook({
|
||||
...createMockPluginInput(),
|
||||
client: {
|
||||
...createMockPluginInput().client,
|
||||
session: {
|
||||
...createMockPluginInput().client.session,
|
||||
messages: async (opts: { path: { id: string } }) => ({
|
||||
data: sessionMessages[opts.path.id] ?? [],
|
||||
}),
|
||||
},
|
||||
},
|
||||
} as Parameters<typeof createRalphLoopHook>[0], {
|
||||
getTranscriptPath: (sessionID) => sessionID === "ses-oracle" ? oracleTranscriptPath : parentTranscriptPath,
|
||||
})
|
||||
hook.startLoop("session-123", "Build API", { ultrawork: true })
|
||||
writeFileSync(
|
||||
parentTranscriptPath,
|
||||
`${JSON.stringify({ type: "tool_result", timestamp: new Date().toISOString(), tool_output: { output: "done <promise>DONE</promise>" } })}\n`,
|
||||
)
|
||||
|
||||
await hook.event({ event: { type: "session.idle", properties: { sessionID: "session-123" } } })
|
||||
writeState(testDir, {
|
||||
...hook.getState()!,
|
||||
verification_session_id: "ses-oracle",
|
||||
})
|
||||
writeFileSync(
|
||||
oracleTranscriptPath,
|
||||
`${JSON.stringify({ type: "tool_result", timestamp: new Date().toISOString(), tool_output: { output: "verification failed: missing tests" } })}\n`,
|
||||
)
|
||||
|
||||
await hook.event({ event: { type: "session.idle", properties: { sessionID: "ses-oracle" } } })
|
||||
|
||||
expect(abortCalls).toHaveLength(1)
|
||||
expect(abortCalls[0].id).toBe("ses-oracle")
|
||||
})
|
||||
|
||||
test("#given ulw loop re-enters verification #when DONE detected again after failed verification #then previous verification session is aborted", async () => {
|
||||
const sessionMessages: Record<string, unknown[]> = {
|
||||
"session-123": [{}, {}, {}],
|
||||
}
|
||||
const hook = createRalphLoopHook({
|
||||
...createMockPluginInput(),
|
||||
client: {
|
||||
...createMockPluginInput().client,
|
||||
session: {
|
||||
...createMockPluginInput().client.session,
|
||||
messages: async (opts: { path: { id: string } }) => ({
|
||||
data: sessionMessages[opts.path.id] ?? [],
|
||||
}),
|
||||
},
|
||||
},
|
||||
} as Parameters<typeof createRalphLoopHook>[0], {
|
||||
getTranscriptPath: (sessionID) => sessionID === "ses-oracle" ? oracleTranscriptPath : parentTranscriptPath,
|
||||
})
|
||||
hook.startLoop("session-123", "Build API", { ultrawork: true })
|
||||
writeFileSync(
|
||||
parentTranscriptPath,
|
||||
`${JSON.stringify({ type: "tool_result", timestamp: new Date().toISOString(), tool_output: { output: "done <promise>DONE</promise>" } })}\n`,
|
||||
)
|
||||
|
||||
await hook.event({ event: { type: "session.idle", properties: { sessionID: "session-123" } } })
|
||||
writeState(testDir, {
|
||||
...hook.getState()!,
|
||||
verification_session_id: "ses-oracle",
|
||||
})
|
||||
writeFileSync(
|
||||
oracleTranscriptPath,
|
||||
`${JSON.stringify({ type: "tool_result", timestamp: new Date().toISOString(), tool_output: { output: "failed" } })}\n`,
|
||||
)
|
||||
|
||||
await hook.event({ event: { type: "session.idle", properties: { sessionID: "ses-oracle" } } })
|
||||
abortCalls.length = 0
|
||||
|
||||
writeFileSync(
|
||||
parentTranscriptPath,
|
||||
`${JSON.stringify({ type: "tool_result", timestamp: new Date().toISOString(), tool_output: { output: "fixed it <promise>DONE</promise>" } })}\n`,
|
||||
)
|
||||
writeState(testDir, {
|
||||
...hook.getState()!,
|
||||
verification_session_id: "ses-oracle-old",
|
||||
})
|
||||
|
||||
await hook.event({ event: { type: "session.idle", properties: { sessionID: "session-123" } } })
|
||||
|
||||
expect(abortCalls).toHaveLength(1)
|
||||
expect(abortCalls[0].id).toBe("ses-oracle-old")
|
||||
})
|
||||
})
|
||||
|
||||
@@ -68,6 +68,10 @@ export async function handleFailedVerification(
|
||||
return false
|
||||
}
|
||||
|
||||
if (state.verification_session_id) {
|
||||
ctx.client.session.abort({ path: { id: state.verification_session_id } }).catch(() => {})
|
||||
}
|
||||
|
||||
const resumedState = loopState.restartAfterFailedVerification(
|
||||
parentSessionID,
|
||||
messageCountAtStart,
|
||||
|
||||
28
src/hooks/todo-description-override/description.ts
Normal file
28
src/hooks/todo-description-override/description.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
export const TODOWRITE_DESCRIPTION = `Use this tool to create and manage a structured task list for tracking progress on multi-step work.
|
||||
|
||||
## Todo Format (MANDATORY)
|
||||
|
||||
Each todo title MUST encode four elements: WHERE, WHY, HOW, and EXPECTED RESULT.
|
||||
|
||||
Format: "[WHERE] [HOW] to [WHY] — expect [RESULT]"
|
||||
|
||||
GOOD:
|
||||
- "src/utils/validation.ts: Add validateEmail() for input sanitization — returns boolean"
|
||||
- "UserService.create(): Call validateEmail() before DB insert — rejects invalid emails with 400"
|
||||
- "validation.test.ts: Add test for missing @ sign — expect validateEmail('foo') to return false"
|
||||
|
||||
BAD:
|
||||
- "Implement email validation" (where? how? what result?)
|
||||
- "Add dark mode" (this is a feature, not a todo)
|
||||
- "Fix auth" (what file? what changes? what's expected?)
|
||||
|
||||
## Granularity Rules
|
||||
|
||||
Each todo MUST be a single atomic action completable in 1-3 tool calls. If it needs more, split it.
|
||||
|
||||
**Size test**: Can you complete this todo by editing one file or running one command? If not, it's too big.
|
||||
|
||||
## Task Management
|
||||
- One in_progress at a time. Complete it before starting the next.
|
||||
- Mark completed immediately after finishing each item.
|
||||
- Skip this tool for single trivial tasks (one-step, obvious action).`
|
||||
14
src/hooks/todo-description-override/hook.ts
Normal file
14
src/hooks/todo-description-override/hook.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { TODOWRITE_DESCRIPTION } from "./description"
|
||||
|
||||
export function createTodoDescriptionOverrideHook() {
|
||||
return {
|
||||
"tool.definition": async (
|
||||
input: { toolID: string },
|
||||
output: { description: string; parameters: unknown },
|
||||
) => {
|
||||
if (input.toolID === "todowrite") {
|
||||
output.description = TODOWRITE_DESCRIPTION
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
40
src/hooks/todo-description-override/index.test.ts
Normal file
40
src/hooks/todo-description-override/index.test.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { describe, it, expect } from "bun:test"
|
||||
import { createTodoDescriptionOverrideHook } from "./hook"
|
||||
import { TODOWRITE_DESCRIPTION } from "./description"
|
||||
|
||||
describe("createTodoDescriptionOverrideHook", () => {
|
||||
describe("#given hook is created", () => {
|
||||
describe("#when tool.definition is called with todowrite", () => {
|
||||
it("#then should override the description", async () => {
|
||||
const hook = createTodoDescriptionOverrideHook()
|
||||
const output = { description: "original description", parameters: {} }
|
||||
|
||||
await hook["tool.definition"]({ toolID: "todowrite" }, output)
|
||||
|
||||
expect(output.description).toBe(TODOWRITE_DESCRIPTION)
|
||||
})
|
||||
})
|
||||
|
||||
describe("#when tool.definition is called with non-todowrite tool", () => {
|
||||
it("#then should not modify the description", async () => {
|
||||
const hook = createTodoDescriptionOverrideHook()
|
||||
const output = { description: "original description", parameters: {} }
|
||||
|
||||
await hook["tool.definition"]({ toolID: "bash" }, output)
|
||||
|
||||
expect(output.description).toBe("original description")
|
||||
})
|
||||
})
|
||||
|
||||
describe("#when tool.definition is called with TodoWrite (case-insensitive)", () => {
|
||||
it("#then should not override for different casing since OpenCode sends lowercase", async () => {
|
||||
const hook = createTodoDescriptionOverrideHook()
|
||||
const output = { description: "original description", parameters: {} }
|
||||
|
||||
await hook["tool.definition"]({ toolID: "TodoWrite" }, output)
|
||||
|
||||
expect(output.description).toBe("original description")
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
1
src/hooks/todo-description-override/index.ts
Normal file
1
src/hooks/todo-description-override/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export { createTodoDescriptionOverrideHook } from "./hook"
|
||||
@@ -71,5 +71,9 @@ export function createPluginInterface(args: {
|
||||
ctx,
|
||||
hooks,
|
||||
}),
|
||||
|
||||
"tool.definition": async (input, output) => {
|
||||
await hooks.todoDescriptionOverride?.["tool.definition"]?.(input, output)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ import {
|
||||
createHashlineReadEnhancerHook,
|
||||
createReadImageResizerHook,
|
||||
createJsonErrorRecoveryHook,
|
||||
createTodoDescriptionOverrideHook,
|
||||
} from "../../hooks"
|
||||
import {
|
||||
getOpenCodeVersion,
|
||||
@@ -35,6 +36,7 @@ export type ToolGuardHooks = {
|
||||
hashlineReadEnhancer: ReturnType<typeof createHashlineReadEnhancerHook> | null
|
||||
jsonErrorRecovery: ReturnType<typeof createJsonErrorRecoveryHook> | null
|
||||
readImageResizer: ReturnType<typeof createReadImageResizerHook> | null
|
||||
todoDescriptionOverride: ReturnType<typeof createTodoDescriptionOverrideHook> | null
|
||||
}
|
||||
|
||||
export function createToolGuardHooks(args: {
|
||||
@@ -111,6 +113,10 @@ export function createToolGuardHooks(args: {
|
||||
? safeHook("read-image-resizer", () => createReadImageResizerHook(ctx))
|
||||
: null
|
||||
|
||||
const todoDescriptionOverride = isHookEnabled("todo-description-override")
|
||||
? safeHook("todo-description-override", () => createTodoDescriptionOverrideHook())
|
||||
: null
|
||||
|
||||
return {
|
||||
commentChecker,
|
||||
toolOutputTruncator,
|
||||
@@ -123,5 +129,6 @@ export function createToolGuardHooks(args: {
|
||||
hashlineReadEnhancer,
|
||||
jsonErrorRecovery,
|
||||
readImageResizer,
|
||||
todoDescriptionOverride,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,45 +1,30 @@
|
||||
/// <reference types="bun-types" />
|
||||
|
||||
import { beforeAll, beforeEach, afterEach, describe, expect, mock, test } from "bun:test"
|
||||
import { beforeEach, afterEach, describe, expect, test } from "bun:test"
|
||||
|
||||
import { existsSync, mkdirSync, mkdtempSync, readFileSync, rmSync, writeFileSync } from "node:fs"
|
||||
import { tmpdir } from "node:os"
|
||||
import { join } from "node:path"
|
||||
import * as dataPath from "./data-path"
|
||||
import {
|
||||
createConnectedProvidersCacheStore,
|
||||
} from "./connected-providers-cache"
|
||||
|
||||
let fakeUserCacheRoot = ""
|
||||
let testCacheDir = ""
|
||||
let moduleImportCounter = 0
|
||||
|
||||
const getOmoOpenCodeCacheDirMock = mock(() => testCacheDir)
|
||||
|
||||
let updateConnectedProvidersCache: typeof import("./connected-providers-cache").updateConnectedProvidersCache
|
||||
let readProviderModelsCache: typeof import("./connected-providers-cache").readProviderModelsCache
|
||||
|
||||
async function prepareConnectedProvidersCacheTestModule(): Promise<void> {
|
||||
testCacheDir = mkdtempSync(join(tmpdir(), "connected-providers-cache-test-"))
|
||||
getOmoOpenCodeCacheDirMock.mockClear()
|
||||
mock.module("./data-path", () => ({
|
||||
getOmoOpenCodeCacheDir: getOmoOpenCodeCacheDirMock,
|
||||
}))
|
||||
moduleImportCounter += 1
|
||||
;({ updateConnectedProvidersCache, readProviderModelsCache } = await import(`./connected-providers-cache?test=${moduleImportCounter}`))
|
||||
}
|
||||
let testCacheStore: ReturnType<typeof createConnectedProvidersCacheStore>
|
||||
|
||||
describe("updateConnectedProvidersCache", () => {
|
||||
beforeAll(() => {
|
||||
mock.restore()
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
mock.restore()
|
||||
await prepareConnectedProvidersCacheTestModule()
|
||||
beforeEach(() => {
|
||||
fakeUserCacheRoot = mkdtempSync(join(tmpdir(), "connected-providers-user-cache-"))
|
||||
testCacheDir = join(fakeUserCacheRoot, "oh-my-opencode")
|
||||
testCacheStore = createConnectedProvidersCacheStore(() => testCacheDir)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
mock.restore()
|
||||
if (existsSync(testCacheDir)) {
|
||||
rmSync(testCacheDir, { recursive: true, force: true })
|
||||
if (existsSync(fakeUserCacheRoot)) {
|
||||
rmSync(fakeUserCacheRoot, { recursive: true, force: true })
|
||||
}
|
||||
fakeUserCacheRoot = ""
|
||||
testCacheDir = ""
|
||||
})
|
||||
|
||||
@@ -76,10 +61,10 @@ describe("updateConnectedProvidersCache", () => {
|
||||
}
|
||||
|
||||
//#when
|
||||
await updateConnectedProvidersCache(mockClient)
|
||||
await testCacheStore.updateConnectedProvidersCache(mockClient)
|
||||
|
||||
//#then
|
||||
const cache = readProviderModelsCache()
|
||||
const cache = testCacheStore.readProviderModelsCache()
|
||||
expect(cache).not.toBeNull()
|
||||
expect(cache!.connected).toEqual(["openai", "anthropic"])
|
||||
expect(cache!.models).toEqual({
|
||||
@@ -109,10 +94,10 @@ describe("updateConnectedProvidersCache", () => {
|
||||
}
|
||||
|
||||
//#when
|
||||
await updateConnectedProvidersCache(mockClient)
|
||||
await testCacheStore.updateConnectedProvidersCache(mockClient)
|
||||
|
||||
//#then
|
||||
const cache = readProviderModelsCache()
|
||||
const cache = testCacheStore.readProviderModelsCache()
|
||||
expect(cache).not.toBeNull()
|
||||
expect(cache!.models).toEqual({})
|
||||
})
|
||||
@@ -130,10 +115,10 @@ describe("updateConnectedProvidersCache", () => {
|
||||
}
|
||||
|
||||
//#when
|
||||
await updateConnectedProvidersCache(mockClient)
|
||||
await testCacheStore.updateConnectedProvidersCache(mockClient)
|
||||
|
||||
//#then
|
||||
const cache = readProviderModelsCache()
|
||||
const cache = testCacheStore.readProviderModelsCache()
|
||||
expect(cache).not.toBeNull()
|
||||
expect(cache!.models).toEqual({})
|
||||
})
|
||||
@@ -143,25 +128,44 @@ describe("updateConnectedProvidersCache", () => {
|
||||
const mockClient = {}
|
||||
|
||||
//#when
|
||||
await updateConnectedProvidersCache(mockClient)
|
||||
await testCacheStore.updateConnectedProvidersCache(mockClient)
|
||||
|
||||
//#then
|
||||
const cache = readProviderModelsCache()
|
||||
const cache = testCacheStore.readProviderModelsCache()
|
||||
expect(cache).toBeNull()
|
||||
})
|
||||
|
||||
test("does not remove the user's real cache directory during test setup", async () => {
|
||||
test("does not remove unrelated files in the cache directory", async () => {
|
||||
//#given
|
||||
const realCacheDir = join(dataPath.getCacheDir(), "oh-my-opencode")
|
||||
const realCacheDir = join(fakeUserCacheRoot, "oh-my-opencode")
|
||||
const sentinelPath = join(realCacheDir, "connected-providers-cache.test-sentinel.json")
|
||||
mkdirSync(realCacheDir, { recursive: true })
|
||||
writeFileSync(sentinelPath, JSON.stringify({ keep: true }))
|
||||
|
||||
const mockClient = {
|
||||
provider: {
|
||||
list: async () => ({
|
||||
data: {
|
||||
connected: ["openai"],
|
||||
all: [
|
||||
{
|
||||
id: "openai",
|
||||
models: {
|
||||
"gpt-5.4": { id: "gpt-5.4" },
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
},
|
||||
}
|
||||
|
||||
try {
|
||||
//#when
|
||||
await prepareConnectedProvidersCacheTestModule()
|
||||
await testCacheStore.updateConnectedProvidersCache(mockClient)
|
||||
|
||||
//#then
|
||||
expect(testCacheStore.readConnectedProvidersCache()).toEqual(["openai"])
|
||||
expect(existsSync(sentinelPath)).toBe(true)
|
||||
expect(readFileSync(sentinelPath, "utf-8")).toBe(JSON.stringify({ keep: true }))
|
||||
} finally {
|
||||
|
||||
@@ -25,172 +25,177 @@ interface ProviderModelsCache {
|
||||
updatedAt: string
|
||||
}
|
||||
|
||||
function getCacheFilePath(filename: string): string {
|
||||
return join(dataPath.getOmoOpenCodeCacheDir(), filename)
|
||||
}
|
||||
|
||||
function ensureCacheDir(): void {
|
||||
const cacheDir = dataPath.getOmoOpenCodeCacheDir()
|
||||
if (!existsSync(cacheDir)) {
|
||||
mkdirSync(cacheDir, { recursive: true })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the connected providers cache.
|
||||
* Returns the list of connected provider IDs, or null if cache doesn't exist.
|
||||
*/
|
||||
export function readConnectedProvidersCache(): string[] | null {
|
||||
const cacheFile = getCacheFilePath(CONNECTED_PROVIDERS_CACHE_FILE)
|
||||
|
||||
if (!existsSync(cacheFile)) {
|
||||
log("[connected-providers-cache] Cache file not found", { cacheFile })
|
||||
return null
|
||||
export function createConnectedProvidersCacheStore(
|
||||
getCacheDir: () => string = dataPath.getOmoOpenCodeCacheDir
|
||||
) {
|
||||
function getCacheFilePath(filename: string): string {
|
||||
return join(getCacheDir(), filename)
|
||||
}
|
||||
|
||||
try {
|
||||
const content = readFileSync(cacheFile, "utf-8")
|
||||
const data = JSON.parse(content) as ConnectedProvidersCache
|
||||
log("[connected-providers-cache] Read cache", { count: data.connected.length, updatedAt: data.updatedAt })
|
||||
return data.connected
|
||||
} catch (err) {
|
||||
log("[connected-providers-cache] Error reading cache", { error: String(err) })
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if connected providers cache exists.
|
||||
*/
|
||||
export function hasConnectedProvidersCache(): boolean {
|
||||
const cacheFile = getCacheFilePath(CONNECTED_PROVIDERS_CACHE_FILE)
|
||||
return existsSync(cacheFile)
|
||||
}
|
||||
|
||||
/**
|
||||
* Write the connected providers cache.
|
||||
*/
|
||||
function writeConnectedProvidersCache(connected: string[]): void {
|
||||
ensureCacheDir()
|
||||
const cacheFile = getCacheFilePath(CONNECTED_PROVIDERS_CACHE_FILE)
|
||||
|
||||
const data: ConnectedProvidersCache = {
|
||||
connected,
|
||||
updatedAt: new Date().toISOString(),
|
||||
function ensureCacheDir(): void {
|
||||
const cacheDir = getCacheDir()
|
||||
if (!existsSync(cacheDir)) {
|
||||
mkdirSync(cacheDir, { recursive: true })
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
writeFileSync(cacheFile, JSON.stringify(data, null, 2))
|
||||
log("[connected-providers-cache] Cache written", { count: connected.length })
|
||||
} catch (err) {
|
||||
log("[connected-providers-cache] Error writing cache", { error: String(err) })
|
||||
}
|
||||
}
|
||||
function readConnectedProvidersCache(): string[] | null {
|
||||
const cacheFile = getCacheFilePath(CONNECTED_PROVIDERS_CACHE_FILE)
|
||||
|
||||
/**
|
||||
* Read the provider-models cache.
|
||||
* Returns the cache data, or null if cache doesn't exist.
|
||||
*/
|
||||
export function readProviderModelsCache(): ProviderModelsCache | null {
|
||||
const cacheFile = getCacheFilePath(PROVIDER_MODELS_CACHE_FILE)
|
||||
|
||||
if (!existsSync(cacheFile)) {
|
||||
log("[connected-providers-cache] Provider-models cache file not found", { cacheFile })
|
||||
return null
|
||||
}
|
||||
|
||||
try {
|
||||
const content = readFileSync(cacheFile, "utf-8")
|
||||
const data = JSON.parse(content) as ProviderModelsCache
|
||||
log("[connected-providers-cache] Read provider-models cache", {
|
||||
providerCount: Object.keys(data.models).length,
|
||||
updatedAt: data.updatedAt
|
||||
})
|
||||
return data
|
||||
} catch (err) {
|
||||
log("[connected-providers-cache] Error reading provider-models cache", { error: String(err) })
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if provider-models cache exists.
|
||||
*/
|
||||
export function hasProviderModelsCache(): boolean {
|
||||
const cacheFile = getCacheFilePath(PROVIDER_MODELS_CACHE_FILE)
|
||||
return existsSync(cacheFile)
|
||||
}
|
||||
|
||||
/**
|
||||
* Write the provider-models cache.
|
||||
*/
|
||||
export function writeProviderModelsCache(data: { models: Record<string, string[]>; connected: string[] }): void {
|
||||
ensureCacheDir()
|
||||
const cacheFile = getCacheFilePath(PROVIDER_MODELS_CACHE_FILE)
|
||||
|
||||
const cacheData: ProviderModelsCache = {
|
||||
...data,
|
||||
updatedAt: new Date().toISOString(),
|
||||
}
|
||||
|
||||
try {
|
||||
writeFileSync(cacheFile, JSON.stringify(cacheData, null, 2))
|
||||
log("[connected-providers-cache] Provider-models cache written", {
|
||||
providerCount: Object.keys(data.models).length
|
||||
})
|
||||
} catch (err) {
|
||||
log("[connected-providers-cache] Error writing provider-models cache", { error: String(err) })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the connected providers cache by fetching from the client.
|
||||
* Also updates the provider-models cache with model lists per provider.
|
||||
*/
|
||||
export async function updateConnectedProvidersCache(client: {
|
||||
provider?: {
|
||||
list?: () => Promise<{
|
||||
data?: {
|
||||
connected?: string[]
|
||||
all?: Array<{ id: string; models?: Record<string, unknown> }>
|
||||
}
|
||||
}>
|
||||
}
|
||||
}): Promise<void> {
|
||||
if (!client?.provider?.list) {
|
||||
log("[connected-providers-cache] client.provider.list not available")
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await client.provider.list()
|
||||
const connected = result.data?.connected ?? []
|
||||
log("[connected-providers-cache] Fetched connected providers", { count: connected.length, providers: connected })
|
||||
|
||||
writeConnectedProvidersCache(connected)
|
||||
|
||||
const modelsByProvider: Record<string, string[]> = {}
|
||||
const allProviders = result.data?.all ?? []
|
||||
|
||||
for (const provider of allProviders) {
|
||||
if (provider.models) {
|
||||
const modelIds = Object.keys(provider.models)
|
||||
if (modelIds.length > 0) {
|
||||
modelsByProvider[provider.id] = modelIds
|
||||
}
|
||||
}
|
||||
if (!existsSync(cacheFile)) {
|
||||
log("[connected-providers-cache] Cache file not found", { cacheFile })
|
||||
return null
|
||||
}
|
||||
|
||||
log("[connected-providers-cache] Extracted models from provider list", {
|
||||
providerCount: Object.keys(modelsByProvider).length,
|
||||
totalModels: Object.values(modelsByProvider).reduce((sum, ids) => sum + ids.length, 0),
|
||||
})
|
||||
try {
|
||||
const content = readFileSync(cacheFile, "utf-8")
|
||||
const data = JSON.parse(content) as ConnectedProvidersCache
|
||||
log("[connected-providers-cache] Read cache", { count: data.connected.length, updatedAt: data.updatedAt })
|
||||
return data.connected
|
||||
} catch (err) {
|
||||
log("[connected-providers-cache] Error reading cache", { error: String(err) })
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
writeProviderModelsCache({
|
||||
models: modelsByProvider,
|
||||
function hasConnectedProvidersCache(): boolean {
|
||||
const cacheFile = getCacheFilePath(CONNECTED_PROVIDERS_CACHE_FILE)
|
||||
return existsSync(cacheFile)
|
||||
}
|
||||
|
||||
function writeConnectedProvidersCache(connected: string[]): void {
|
||||
ensureCacheDir()
|
||||
const cacheFile = getCacheFilePath(CONNECTED_PROVIDERS_CACHE_FILE)
|
||||
|
||||
const data: ConnectedProvidersCache = {
|
||||
connected,
|
||||
})
|
||||
} catch (err) {
|
||||
log("[connected-providers-cache] Error updating cache", { error: String(err) })
|
||||
updatedAt: new Date().toISOString(),
|
||||
}
|
||||
|
||||
try {
|
||||
writeFileSync(cacheFile, JSON.stringify(data, null, 2))
|
||||
log("[connected-providers-cache] Cache written", { count: connected.length })
|
||||
} catch (err) {
|
||||
log("[connected-providers-cache] Error writing cache", { error: String(err) })
|
||||
}
|
||||
}
|
||||
|
||||
function readProviderModelsCache(): ProviderModelsCache | null {
|
||||
const cacheFile = getCacheFilePath(PROVIDER_MODELS_CACHE_FILE)
|
||||
|
||||
if (!existsSync(cacheFile)) {
|
||||
log("[connected-providers-cache] Provider-models cache file not found", { cacheFile })
|
||||
return null
|
||||
}
|
||||
|
||||
try {
|
||||
const content = readFileSync(cacheFile, "utf-8")
|
||||
const data = JSON.parse(content) as ProviderModelsCache
|
||||
log("[connected-providers-cache] Read provider-models cache", {
|
||||
providerCount: Object.keys(data.models).length,
|
||||
updatedAt: data.updatedAt,
|
||||
})
|
||||
return data
|
||||
} catch (err) {
|
||||
log("[connected-providers-cache] Error reading provider-models cache", { error: String(err) })
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
function hasProviderModelsCache(): boolean {
|
||||
const cacheFile = getCacheFilePath(PROVIDER_MODELS_CACHE_FILE)
|
||||
return existsSync(cacheFile)
|
||||
}
|
||||
|
||||
function writeProviderModelsCache(data: { models: Record<string, string[]>; connected: string[] }): void {
|
||||
ensureCacheDir()
|
||||
const cacheFile = getCacheFilePath(PROVIDER_MODELS_CACHE_FILE)
|
||||
|
||||
const cacheData: ProviderModelsCache = {
|
||||
...data,
|
||||
updatedAt: new Date().toISOString(),
|
||||
}
|
||||
|
||||
try {
|
||||
writeFileSync(cacheFile, JSON.stringify(cacheData, null, 2))
|
||||
log("[connected-providers-cache] Provider-models cache written", {
|
||||
providerCount: Object.keys(data.models).length,
|
||||
})
|
||||
} catch (err) {
|
||||
log("[connected-providers-cache] Error writing provider-models cache", { error: String(err) })
|
||||
}
|
||||
}
|
||||
|
||||
async function updateConnectedProvidersCache(client: {
|
||||
provider?: {
|
||||
list?: () => Promise<{
|
||||
data?: {
|
||||
connected?: string[]
|
||||
all?: Array<{ id: string; models?: Record<string, unknown> }>
|
||||
}
|
||||
}>
|
||||
}
|
||||
}): Promise<void> {
|
||||
if (!client?.provider?.list) {
|
||||
log("[connected-providers-cache] client.provider.list not available")
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await client.provider.list()
|
||||
const connected = result.data?.connected ?? []
|
||||
log("[connected-providers-cache] Fetched connected providers", {
|
||||
count: connected.length,
|
||||
providers: connected,
|
||||
})
|
||||
|
||||
writeConnectedProvidersCache(connected)
|
||||
|
||||
const modelsByProvider: Record<string, string[]> = {}
|
||||
const allProviders = result.data?.all ?? []
|
||||
|
||||
for (const provider of allProviders) {
|
||||
if (provider.models) {
|
||||
const modelIds = Object.keys(provider.models)
|
||||
if (modelIds.length > 0) {
|
||||
modelsByProvider[provider.id] = modelIds
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log("[connected-providers-cache] Extracted models from provider list", {
|
||||
providerCount: Object.keys(modelsByProvider).length,
|
||||
totalModels: Object.values(modelsByProvider).reduce((sum, ids) => sum + ids.length, 0),
|
||||
})
|
||||
|
||||
writeProviderModelsCache({
|
||||
models: modelsByProvider,
|
||||
connected,
|
||||
})
|
||||
} catch (err) {
|
||||
log("[connected-providers-cache] Error updating cache", { error: String(err) })
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
readConnectedProvidersCache,
|
||||
hasConnectedProvidersCache,
|
||||
readProviderModelsCache,
|
||||
hasProviderModelsCache,
|
||||
writeProviderModelsCache,
|
||||
updateConnectedProvidersCache,
|
||||
}
|
||||
}
|
||||
|
||||
const defaultConnectedProvidersCacheStore = createConnectedProvidersCacheStore(
|
||||
() => dataPath.getOmoOpenCodeCacheDir()
|
||||
)
|
||||
|
||||
export const {
|
||||
readConnectedProvidersCache,
|
||||
hasConnectedProvidersCache,
|
||||
readProviderModelsCache,
|
||||
hasProviderModelsCache,
|
||||
writeProviderModelsCache,
|
||||
updateConnectedProvidersCache,
|
||||
} = defaultConnectedProvidersCacheStore
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { describe, it, expect, beforeAll, afterAll } from "bun:test"
|
||||
import { afterEach, beforeEach, describe, expect, it, spyOn } from "bun:test"
|
||||
import {
|
||||
isPortAvailable,
|
||||
findAvailablePort,
|
||||
@@ -6,96 +6,283 @@ import {
|
||||
DEFAULT_SERVER_PORT,
|
||||
} from "./port-utils"
|
||||
|
||||
const HOSTNAME = "127.0.0.1"
|
||||
const REAL_PORT_SEARCH_WINDOW = 200
|
||||
|
||||
function supportsRealSocketBinding(): boolean {
|
||||
try {
|
||||
const server = Bun.serve({
|
||||
port: 0,
|
||||
hostname: HOSTNAME,
|
||||
fetch: () => new Response("probe"),
|
||||
})
|
||||
server.stop(true)
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
const canBindRealSockets = supportsRealSocketBinding()
|
||||
|
||||
describe("port-utils", () => {
|
||||
describe("isPortAvailable", () => {
|
||||
it("#given unused port #when checking availability #then returns true", async () => {
|
||||
const port = 59999
|
||||
const result = await isPortAvailable(port)
|
||||
expect(result).toBe(true)
|
||||
})
|
||||
|
||||
it("#given port in use #when checking availability #then returns false", async () => {
|
||||
const port = 59998
|
||||
const blocker = Bun.serve({
|
||||
if (canBindRealSockets) {
|
||||
function startRealBlocker(port: number = 0) {
|
||||
return Bun.serve({
|
||||
port,
|
||||
hostname: "127.0.0.1",
|
||||
hostname: HOSTNAME,
|
||||
fetch: () => new Response("blocked"),
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await isPortAvailable(port)
|
||||
expect(result).toBe(false)
|
||||
} finally {
|
||||
blocker.stop(true)
|
||||
async function findContiguousAvailableStart(length: number): Promise<number> {
|
||||
const probe = startRealBlocker()
|
||||
const seedPort = probe.port
|
||||
probe.stop(true)
|
||||
|
||||
for (let candidate = seedPort; candidate < seedPort + REAL_PORT_SEARCH_WINDOW; candidate++) {
|
||||
const checks = await Promise.all(
|
||||
Array.from({ length }, async (_, offset) => isPortAvailable(candidate + offset, HOSTNAME))
|
||||
)
|
||||
if (checks.every(Boolean)) {
|
||||
return candidate
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe("findAvailablePort", () => {
|
||||
it("#given start port available #when finding port #then returns start port", async () => {
|
||||
const startPort = 59997
|
||||
const result = await findAvailablePort(startPort)
|
||||
expect(result).toBe(startPort)
|
||||
})
|
||||
throw new Error(`Could not find ${length} contiguous available ports`)
|
||||
}
|
||||
|
||||
it("#given start port blocked #when finding port #then returns next available", async () => {
|
||||
const startPort = 59996
|
||||
const blocker = Bun.serve({
|
||||
port: startPort,
|
||||
hostname: "127.0.0.1",
|
||||
fetch: () => new Response("blocked"),
|
||||
describe("with real sockets", () => {
|
||||
describe("isPortAvailable", () => {
|
||||
it("#given unused port #when checking availability #then returns true", async () => {
|
||||
const blocker = startRealBlocker()
|
||||
const port = blocker.port
|
||||
blocker.stop(true)
|
||||
|
||||
const result = await isPortAvailable(port)
|
||||
expect(result).toBe(true)
|
||||
})
|
||||
|
||||
it("#given port in use #when checking availability #then returns false", async () => {
|
||||
const blocker = startRealBlocker()
|
||||
const port = blocker.port
|
||||
|
||||
try {
|
||||
const result = await isPortAvailable(port)
|
||||
expect(result).toBe(false)
|
||||
} finally {
|
||||
blocker.stop(true)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await findAvailablePort(startPort)
|
||||
expect(result).toBe(startPort + 1)
|
||||
} finally {
|
||||
blocker.stop(true)
|
||||
}
|
||||
})
|
||||
describe("findAvailablePort", () => {
|
||||
it("#given start port available #when finding port #then returns start port", async () => {
|
||||
const startPort = await findContiguousAvailableStart(1)
|
||||
const result = await findAvailablePort(startPort)
|
||||
expect(result).toBe(startPort)
|
||||
})
|
||||
|
||||
it("#given multiple ports blocked #when finding port #then skips all blocked", async () => {
|
||||
const startPort = 59993
|
||||
const blockers = [
|
||||
Bun.serve({ port: startPort, hostname: "127.0.0.1", fetch: () => new Response() }),
|
||||
Bun.serve({ port: startPort + 1, hostname: "127.0.0.1", fetch: () => new Response() }),
|
||||
Bun.serve({ port: startPort + 2, hostname: "127.0.0.1", fetch: () => new Response() }),
|
||||
]
|
||||
it("#given start port blocked #when finding port #then returns next available", async () => {
|
||||
const startPort = await findContiguousAvailableStart(2)
|
||||
const blocker = startRealBlocker(startPort)
|
||||
|
||||
try {
|
||||
const result = await findAvailablePort(startPort)
|
||||
expect(result).toBe(startPort + 3)
|
||||
} finally {
|
||||
blockers.forEach((b) => b.stop(true))
|
||||
}
|
||||
})
|
||||
})
|
||||
try {
|
||||
const result = await findAvailablePort(startPort)
|
||||
expect(result).toBe(startPort + 1)
|
||||
} finally {
|
||||
blocker.stop(true)
|
||||
}
|
||||
})
|
||||
|
||||
describe("getAvailableServerPort", () => {
|
||||
it("#given preferred port available #when getting port #then returns preferred with wasAutoSelected=false", async () => {
|
||||
const preferredPort = 59990
|
||||
const result = await getAvailableServerPort(preferredPort)
|
||||
expect(result.port).toBe(preferredPort)
|
||||
expect(result.wasAutoSelected).toBe(false)
|
||||
})
|
||||
it("#given multiple ports blocked #when finding port #then skips all blocked", async () => {
|
||||
const startPort = await findContiguousAvailableStart(4)
|
||||
const blockers = [
|
||||
startRealBlocker(startPort),
|
||||
startRealBlocker(startPort + 1),
|
||||
startRealBlocker(startPort + 2),
|
||||
]
|
||||
|
||||
it("#given preferred port blocked #when getting port #then returns alternative with wasAutoSelected=true", async () => {
|
||||
const preferredPort = 59989
|
||||
const blocker = Bun.serve({
|
||||
port: preferredPort,
|
||||
hostname: "127.0.0.1",
|
||||
fetch: () => new Response("blocked"),
|
||||
try {
|
||||
const result = await findAvailablePort(startPort)
|
||||
expect(result).toBe(startPort + 3)
|
||||
} finally {
|
||||
blockers.forEach((blocker) => blocker.stop(true))
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await getAvailableServerPort(preferredPort)
|
||||
expect(result.port).toBeGreaterThan(preferredPort)
|
||||
expect(result.wasAutoSelected).toBe(true)
|
||||
} finally {
|
||||
blocker.stop(true)
|
||||
}
|
||||
describe("getAvailableServerPort", () => {
|
||||
it("#given preferred port available #when getting port #then returns preferred with wasAutoSelected=false", async () => {
|
||||
const preferredPort = await findContiguousAvailableStart(1)
|
||||
const result = await getAvailableServerPort(preferredPort)
|
||||
expect(result.port).toBe(preferredPort)
|
||||
expect(result.wasAutoSelected).toBe(false)
|
||||
})
|
||||
|
||||
it("#given preferred port blocked #when getting port #then returns alternative with wasAutoSelected=true", async () => {
|
||||
const preferredPort = await findContiguousAvailableStart(2)
|
||||
const blocker = startRealBlocker(preferredPort)
|
||||
|
||||
try {
|
||||
const result = await getAvailableServerPort(preferredPort)
|
||||
expect(result.port).toBe(preferredPort + 1)
|
||||
expect(result.wasAutoSelected).toBe(true)
|
||||
} finally {
|
||||
blocker.stop(true)
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
} else {
|
||||
const blockedSockets = new Set<string>()
|
||||
let serveSpy: ReturnType<typeof spyOn>
|
||||
|
||||
function getSocketKey(port: number, hostname: string): string {
|
||||
return `${hostname}:${port}`
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
blockedSockets.clear()
|
||||
serveSpy = spyOn(Bun, "serve").mockImplementation(({ port, hostname }) => {
|
||||
if (typeof port !== "number") {
|
||||
throw new Error("Test expected numeric port")
|
||||
}
|
||||
const resolvedHostname = typeof hostname === "string" ? hostname : HOSTNAME
|
||||
const socketKey = getSocketKey(port, resolvedHostname)
|
||||
|
||||
if (blockedSockets.has(socketKey)) {
|
||||
const error = new Error(`Failed to start server. Is port ${port} in use?`) as Error & {
|
||||
code?: string
|
||||
syscall?: string
|
||||
errno?: number
|
||||
address?: string
|
||||
port?: number
|
||||
}
|
||||
error.code = "EADDRINUSE"
|
||||
error.syscall = "listen"
|
||||
error.errno = 0
|
||||
error.address = resolvedHostname
|
||||
error.port = port
|
||||
throw error
|
||||
}
|
||||
|
||||
blockedSockets.add(socketKey)
|
||||
return {
|
||||
stop: (_force?: boolean) => {
|
||||
blockedSockets.delete(socketKey)
|
||||
},
|
||||
} as { stop: (force?: boolean) => void }
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
expect(blockedSockets.size).toBe(0)
|
||||
serveSpy.mockRestore()
|
||||
blockedSockets.clear()
|
||||
})
|
||||
|
||||
describe("with mocked sockets fallback", () => {
|
||||
describe("isPortAvailable", () => {
|
||||
it("#given unused port #when checking availability #then returns true", async () => {
|
||||
const port = 59999
|
||||
|
||||
const result = await isPortAvailable(port)
|
||||
expect(result).toBe(true)
|
||||
expect(blockedSockets.size).toBe(0)
|
||||
})
|
||||
|
||||
it("#given port in use #when checking availability #then returns false", async () => {
|
||||
const port = 59998
|
||||
const blocker = Bun.serve({
|
||||
port,
|
||||
hostname: HOSTNAME,
|
||||
fetch: () => new Response("blocked"),
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await isPortAvailable(port)
|
||||
expect(result).toBe(false)
|
||||
} finally {
|
||||
blocker.stop(true)
|
||||
}
|
||||
})
|
||||
|
||||
it("#given custom hostname #when checking availability #then passes hostname through to Bun.serve", async () => {
|
||||
const hostname = "192.0.2.10"
|
||||
await isPortAvailable(59995, hostname)
|
||||
|
||||
expect(serveSpy.mock.calls[0]?.[0]?.hostname).toBe(hostname)
|
||||
})
|
||||
})
|
||||
|
||||
describe("findAvailablePort", () => {
|
||||
it("#given start port available #when finding port #then returns start port", async () => {
|
||||
const startPort = 59997
|
||||
const result = await findAvailablePort(startPort)
|
||||
expect(result).toBe(startPort)
|
||||
})
|
||||
|
||||
it("#given start port blocked #when finding port #then returns next available", async () => {
|
||||
const startPort = 59996
|
||||
const blocker = Bun.serve({
|
||||
port: startPort,
|
||||
hostname: HOSTNAME,
|
||||
fetch: () => new Response("blocked"),
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await findAvailablePort(startPort)
|
||||
expect(result).toBe(startPort + 1)
|
||||
} finally {
|
||||
blocker.stop(true)
|
||||
}
|
||||
})
|
||||
|
||||
it("#given multiple ports blocked #when finding port #then skips all blocked", async () => {
|
||||
const startPort = 59993
|
||||
const blockers = [
|
||||
Bun.serve({ port: startPort, hostname: HOSTNAME, fetch: () => new Response() }),
|
||||
Bun.serve({ port: startPort + 1, hostname: HOSTNAME, fetch: () => new Response() }),
|
||||
Bun.serve({ port: startPort + 2, hostname: HOSTNAME, fetch: () => new Response() }),
|
||||
]
|
||||
|
||||
try {
|
||||
const result = await findAvailablePort(startPort)
|
||||
expect(result).toBe(startPort + 3)
|
||||
} finally {
|
||||
blockers.forEach((blocker) => blocker.stop(true))
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe("getAvailableServerPort", () => {
|
||||
it("#given preferred port available #when getting port #then returns preferred with wasAutoSelected=false", async () => {
|
||||
const preferredPort = 59990
|
||||
const result = await getAvailableServerPort(preferredPort)
|
||||
expect(result.port).toBe(preferredPort)
|
||||
expect(result.wasAutoSelected).toBe(false)
|
||||
})
|
||||
|
||||
it("#given preferred port blocked #when getting port #then returns alternative with wasAutoSelected=true", async () => {
|
||||
const preferredPort = 59989
|
||||
const blocker = Bun.serve({
|
||||
port: preferredPort,
|
||||
hostname: HOSTNAME,
|
||||
fetch: () => new Response("blocked"),
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await getAvailableServerPort(preferredPort)
|
||||
expect(result.port).toBe(preferredPort + 1)
|
||||
expect(result.wasAutoSelected).toBe(true)
|
||||
} finally {
|
||||
blocker.stop(true)
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
describe("DEFAULT_SERVER_PORT", () => {
|
||||
it("#given constant #when accessed #then returns 4096", () => {
|
||||
|
||||
Reference in New Issue
Block a user