Compare commits

..

1 Commits

Author SHA1 Message Date
YeonGyu-Kim
ef8f22caba fix(boulder-state): treat plans without checkboxes as incomplete (fixes #2648)
GPT/Gemini Prometheus plans sometimes lack markdown checkboxes.
Previously getPlanProgress() returned isComplete=true for 0/0,
causing /start-work to skip Atlas execution.

Now total=0 correctly returns isComplete=false so start-work
detects the invalid plan format.

🤖 Generated with assistance of [OhMyOpenCode](https://github.com/code-yeongyu/oh-my-opencode)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-18 12:06:24 +09:00
10 changed files with 291 additions and 638 deletions

View File

@@ -136,36 +136,7 @@ fi
---
## Phase 3: Spawn Subagents (Individual Tool Calls)
**CRITICAL: Create tasks ONE BY ONE using individual `task_create` tool calls. NEVER batch or script.**
For each item, execute these steps sequentially:
### Step 3.1: Create Task Record
```typescript
task_create(
subject="Triage: #{number} {title}",
description="GitHub {issue|PR} triage analysis - {type}",
metadata={"type": "{ISSUE_QUESTION|ISSUE_BUG|ISSUE_FEATURE|ISSUE_OTHER|PR_BUGFIX|PR_OTHER}", "number": {number}}
)
```
### Step 3.2: Spawn Analysis Subagent (Background)
```typescript
task(
category="quick",
run_in_background=true,
load_skills=[],
prompt=SUBAGENT_PROMPT
)
```
**ABSOLUTE RULES for Subagents:**
- **ONLY ANALYZE** - Never take action on GitHub (no comments, merges, closes)
- **READ-ONLY** - Use tools only for reading code/GitHub data
- **WRITE REPORT ONLY** - Output goes to `{REPORT_DIR}/{issue|pr}-{number}.md` via Write tool
- **EVIDENCE REQUIRED** - Every claim must have GitHub permalink as proof
## Phase 3: Spawn Subagents
```
For each item:
@@ -199,7 +170,6 @@ ABSOLUTE RULES (violating ANY = critical failure):
- Your ONLY writable output: {REPORT_DIR}/{issue|pr}-{number}.md via the Write tool
```
---
### ISSUE_QUESTION

View File

@@ -1,32 +1,20 @@
import { afterAll, beforeAll, describe, expect, mock, test } from "bun:test"
import { afterAll, beforeAll, describe, expect, test } from "bun:test"
import { mkdirSync, rmSync, writeFileSync } from "node:fs"
import * as os from "node:os"
import { tmpdir } from "node:os"
import { homedir, tmpdir } from "node:os"
import { join } from "node:path"
const originalHomedir = os.homedir.bind(os)
let mockedHomeDir = ""
let moduleImportCounter = 0
let resolvePromptAppend: typeof import("./resolve-file-uri").resolvePromptAppend
mock.module("node:os", () => ({
...os,
homedir: () => mockedHomeDir || originalHomedir(),
}))
import { resolvePromptAppend } from "./resolve-file-uri"
describe("resolvePromptAppend", () => {
const fixtureRoot = join(tmpdir(), `resolve-file-uri-${Date.now()}`)
const configDir = join(fixtureRoot, "config")
const homeFixtureRoot = join(fixtureRoot, "home")
const homeFixtureDir = join(homeFixtureRoot, "fixture-home")
const homeFixtureDir = join(homedir(), `.resolve-file-uri-home-${Date.now()}`)
const absoluteFilePath = join(fixtureRoot, "absolute.txt")
const relativeFilePath = join(configDir, "relative.txt")
const spacedFilePath = join(fixtureRoot, "with space.txt")
const homeFilePath = join(homeFixtureDir, "home.txt")
beforeAll(async () => {
mockedHomeDir = homeFixtureRoot
beforeAll(() => {
mkdirSync(fixtureRoot, { recursive: true })
mkdirSync(configDir, { recursive: true })
mkdirSync(homeFixtureDir, { recursive: true })
@@ -35,14 +23,11 @@ describe("resolvePromptAppend", () => {
writeFileSync(relativeFilePath, "relative-content", "utf8")
writeFileSync(spacedFilePath, "encoded-content", "utf8")
writeFileSync(homeFilePath, "home-content", "utf8")
moduleImportCounter += 1
;({ resolvePromptAppend } = await import(`./resolve-file-uri?test=${moduleImportCounter}`))
})
afterAll(() => {
rmSync(fixtureRoot, { recursive: true, force: true })
mock.restore()
rmSync(homeFixtureDir, { recursive: true, force: true })
})
test("returns non-file URI strings unchanged", () => {
@@ -80,7 +65,7 @@ describe("resolvePromptAppend", () => {
test("resolves home directory URI path", () => {
//#given
const input = "file://~/fixture-home/home.txt"
const input = `file://~/${homeFixtureDir.split("/").pop()}/home.txt`
//#when
const resolved = resolvePromptAppend(input)

View File

@@ -351,7 +351,7 @@ describe("boulder-state", () => {
expect(progress.isComplete).toBe(true)
})
test("should return isComplete true for empty plan", () => {
test("should return isComplete false for plan with content but no checkboxes", () => {
// given - plan with no checkboxes
const planPath = join(TEST_DIR, "empty-plan.md")
writeFileSync(planPath, "# Plan\nNo tasks here")
@@ -361,7 +361,7 @@ describe("boulder-state", () => {
// then
expect(progress.total).toBe(0)
expect(progress.isComplete).toBe(true)
expect(progress.isComplete).toBe(false)
})
test("should handle non-existent file", () => {

View File

@@ -133,7 +133,7 @@ export function getPlanProgress(planPath: string): PlanProgress {
return {
total,
completed,
isComplete: total === 0 || completed === total,
isComplete: total > 0 && completed === total,
}
} catch {
return { total: 0, completed: 0, isComplete: true }

View File

@@ -23,10 +23,6 @@ export async function handleDetectedCompletion(
const { sessionID, state, loopState, directory, apiTimeoutMs } = input
if (state.ultrawork && !state.verification_pending) {
if (state.verification_session_id) {
ctx.client.session.abort({ path: { id: state.verification_session_id } }).catch(() => {})
}
const verificationState = loopState.markVerificationPending(sessionID)
if (!verificationState) {
log(`[${HOOK_NAME}] Failed to transition ultrawork loop to verification`, {

View File

@@ -10,7 +10,6 @@ describe("ulw-loop verification", () => {
const testDir = join(tmpdir(), `ulw-loop-verification-${Date.now()}`)
let promptCalls: Array<{ sessionID: string; text: string }>
let toastCalls: Array<{ title: string; message: string; variant: string }>
let abortCalls: Array<{ id: string }>
let parentTranscriptPath: string
let oracleTranscriptPath: string
@@ -26,10 +25,6 @@ describe("ulw-loop verification", () => {
return {}
},
messages: async () => ({ data: [] }),
abort: async (opts: { path: { id: string } }) => {
abortCalls.push({ id: opts.path.id })
return {}
},
},
tui: {
showToast: async (opts: { body: { title: string; message: string; variant: string } }) => {
@@ -45,7 +40,6 @@ describe("ulw-loop verification", () => {
beforeEach(() => {
promptCalls = []
toastCalls = []
abortCalls = []
parentTranscriptPath = join(testDir, "transcript-parent.jsonl")
oracleTranscriptPath = join(testDir, "transcript-oracle.jsonl")
@@ -391,96 +385,4 @@ describe("ulw-loop verification", () => {
expect(promptCalls).toHaveLength(2)
expect(promptCalls[1]?.text).toContain("Verification failed")
})
test("#given oracle verification fails #when loop restarts #then old oracle session is aborted", async () => {
const sessionMessages: Record<string, unknown[]> = {
"session-123": [{}, {}, {}],
}
const hook = createRalphLoopHook({
...createMockPluginInput(),
client: {
...createMockPluginInput().client,
session: {
...createMockPluginInput().client.session,
messages: async (opts: { path: { id: string } }) => ({
data: sessionMessages[opts.path.id] ?? [],
}),
},
},
} as Parameters<typeof createRalphLoopHook>[0], {
getTranscriptPath: (sessionID) => sessionID === "ses-oracle" ? oracleTranscriptPath : parentTranscriptPath,
})
hook.startLoop("session-123", "Build API", { ultrawork: true })
writeFileSync(
parentTranscriptPath,
`${JSON.stringify({ type: "tool_result", timestamp: new Date().toISOString(), tool_output: { output: "done <promise>DONE</promise>" } })}\n`,
)
await hook.event({ event: { type: "session.idle", properties: { sessionID: "session-123" } } })
writeState(testDir, {
...hook.getState()!,
verification_session_id: "ses-oracle",
})
writeFileSync(
oracleTranscriptPath,
`${JSON.stringify({ type: "tool_result", timestamp: new Date().toISOString(), tool_output: { output: "verification failed: missing tests" } })}\n`,
)
await hook.event({ event: { type: "session.idle", properties: { sessionID: "ses-oracle" } } })
expect(abortCalls).toHaveLength(1)
expect(abortCalls[0].id).toBe("ses-oracle")
})
test("#given ulw loop re-enters verification #when DONE detected again after failed verification #then previous verification session is aborted", async () => {
const sessionMessages: Record<string, unknown[]> = {
"session-123": [{}, {}, {}],
}
const hook = createRalphLoopHook({
...createMockPluginInput(),
client: {
...createMockPluginInput().client,
session: {
...createMockPluginInput().client.session,
messages: async (opts: { path: { id: string } }) => ({
data: sessionMessages[opts.path.id] ?? [],
}),
},
},
} as Parameters<typeof createRalphLoopHook>[0], {
getTranscriptPath: (sessionID) => sessionID === "ses-oracle" ? oracleTranscriptPath : parentTranscriptPath,
})
hook.startLoop("session-123", "Build API", { ultrawork: true })
writeFileSync(
parentTranscriptPath,
`${JSON.stringify({ type: "tool_result", timestamp: new Date().toISOString(), tool_output: { output: "done <promise>DONE</promise>" } })}\n`,
)
await hook.event({ event: { type: "session.idle", properties: { sessionID: "session-123" } } })
writeState(testDir, {
...hook.getState()!,
verification_session_id: "ses-oracle",
})
writeFileSync(
oracleTranscriptPath,
`${JSON.stringify({ type: "tool_result", timestamp: new Date().toISOString(), tool_output: { output: "failed" } })}\n`,
)
await hook.event({ event: { type: "session.idle", properties: { sessionID: "ses-oracle" } } })
abortCalls.length = 0
writeFileSync(
parentTranscriptPath,
`${JSON.stringify({ type: "tool_result", timestamp: new Date().toISOString(), tool_output: { output: "fixed it <promise>DONE</promise>" } })}\n`,
)
writeState(testDir, {
...hook.getState()!,
verification_session_id: "ses-oracle-old",
})
await hook.event({ event: { type: "session.idle", properties: { sessionID: "session-123" } } })
expect(abortCalls).toHaveLength(1)
expect(abortCalls[0].id).toBe("ses-oracle-old")
})
})

View File

@@ -68,10 +68,6 @@ export async function handleFailedVerification(
return false
}
if (state.verification_session_id) {
ctx.client.session.abort({ path: { id: state.verification_session_id } }).catch(() => {})
}
const resumedState = loopState.restartAfterFailedVerification(
parentSessionID,
messageCountAtStart,

View File

@@ -1,30 +1,45 @@
/// <reference types="bun-types" />
import { beforeEach, afterEach, describe, expect, test } from "bun:test"
import { beforeAll, beforeEach, afterEach, describe, expect, mock, test } from "bun:test"
import { existsSync, mkdirSync, mkdtempSync, readFileSync, rmSync, writeFileSync } from "node:fs"
import { tmpdir } from "node:os"
import { join } from "node:path"
import {
createConnectedProvidersCacheStore,
} from "./connected-providers-cache"
import * as dataPath from "./data-path"
let fakeUserCacheRoot = ""
let testCacheDir = ""
let testCacheStore: ReturnType<typeof createConnectedProvidersCacheStore>
let moduleImportCounter = 0
const getOmoOpenCodeCacheDirMock = mock(() => testCacheDir)
let updateConnectedProvidersCache: typeof import("./connected-providers-cache").updateConnectedProvidersCache
let readProviderModelsCache: typeof import("./connected-providers-cache").readProviderModelsCache
async function prepareConnectedProvidersCacheTestModule(): Promise<void> {
testCacheDir = mkdtempSync(join(tmpdir(), "connected-providers-cache-test-"))
getOmoOpenCodeCacheDirMock.mockClear()
mock.module("./data-path", () => ({
getOmoOpenCodeCacheDir: getOmoOpenCodeCacheDirMock,
}))
moduleImportCounter += 1
;({ updateConnectedProvidersCache, readProviderModelsCache } = await import(`./connected-providers-cache?test=${moduleImportCounter}`))
}
describe("updateConnectedProvidersCache", () => {
beforeEach(() => {
fakeUserCacheRoot = mkdtempSync(join(tmpdir(), "connected-providers-user-cache-"))
testCacheDir = join(fakeUserCacheRoot, "oh-my-opencode")
testCacheStore = createConnectedProvidersCacheStore(() => testCacheDir)
beforeAll(() => {
mock.restore()
})
beforeEach(async () => {
mock.restore()
await prepareConnectedProvidersCacheTestModule()
})
afterEach(() => {
if (existsSync(fakeUserCacheRoot)) {
rmSync(fakeUserCacheRoot, { recursive: true, force: true })
mock.restore()
if (existsSync(testCacheDir)) {
rmSync(testCacheDir, { recursive: true, force: true })
}
fakeUserCacheRoot = ""
testCacheDir = ""
})
@@ -61,10 +76,10 @@ describe("updateConnectedProvidersCache", () => {
}
//#when
await testCacheStore.updateConnectedProvidersCache(mockClient)
await updateConnectedProvidersCache(mockClient)
//#then
const cache = testCacheStore.readProviderModelsCache()
const cache = readProviderModelsCache()
expect(cache).not.toBeNull()
expect(cache!.connected).toEqual(["openai", "anthropic"])
expect(cache!.models).toEqual({
@@ -94,10 +109,10 @@ describe("updateConnectedProvidersCache", () => {
}
//#when
await testCacheStore.updateConnectedProvidersCache(mockClient)
await updateConnectedProvidersCache(mockClient)
//#then
const cache = testCacheStore.readProviderModelsCache()
const cache = readProviderModelsCache()
expect(cache).not.toBeNull()
expect(cache!.models).toEqual({})
})
@@ -115,10 +130,10 @@ describe("updateConnectedProvidersCache", () => {
}
//#when
await testCacheStore.updateConnectedProvidersCache(mockClient)
await updateConnectedProvidersCache(mockClient)
//#then
const cache = testCacheStore.readProviderModelsCache()
const cache = readProviderModelsCache()
expect(cache).not.toBeNull()
expect(cache!.models).toEqual({})
})
@@ -128,44 +143,25 @@ describe("updateConnectedProvidersCache", () => {
const mockClient = {}
//#when
await testCacheStore.updateConnectedProvidersCache(mockClient)
await updateConnectedProvidersCache(mockClient)
//#then
const cache = testCacheStore.readProviderModelsCache()
const cache = readProviderModelsCache()
expect(cache).toBeNull()
})
test("does not remove unrelated files in the cache directory", async () => {
test("does not remove the user's real cache directory during test setup", async () => {
//#given
const realCacheDir = join(fakeUserCacheRoot, "oh-my-opencode")
const realCacheDir = join(dataPath.getCacheDir(), "oh-my-opencode")
const sentinelPath = join(realCacheDir, "connected-providers-cache.test-sentinel.json")
mkdirSync(realCacheDir, { recursive: true })
writeFileSync(sentinelPath, JSON.stringify({ keep: true }))
const mockClient = {
provider: {
list: async () => ({
data: {
connected: ["openai"],
all: [
{
id: "openai",
models: {
"gpt-5.4": { id: "gpt-5.4" },
},
},
],
},
}),
},
}
try {
//#when
await testCacheStore.updateConnectedProvidersCache(mockClient)
await prepareConnectedProvidersCacheTestModule()
//#then
expect(testCacheStore.readConnectedProvidersCache()).toEqual(["openai"])
expect(existsSync(sentinelPath)).toBe(true)
expect(readFileSync(sentinelPath, "utf-8")).toBe(JSON.stringify({ keep: true }))
} finally {

View File

@@ -25,177 +25,172 @@ interface ProviderModelsCache {
updatedAt: string
}
export function createConnectedProvidersCacheStore(
getCacheDir: () => string = dataPath.getOmoOpenCodeCacheDir
) {
function getCacheFilePath(filename: string): string {
return join(getCacheDir(), filename)
}
function getCacheFilePath(filename: string): string {
return join(dataPath.getOmoOpenCodeCacheDir(), filename)
}
function ensureCacheDir(): void {
const cacheDir = getCacheDir()
if (!existsSync(cacheDir)) {
mkdirSync(cacheDir, { recursive: true })
}
}
function readConnectedProvidersCache(): string[] | null {
const cacheFile = getCacheFilePath(CONNECTED_PROVIDERS_CACHE_FILE)
if (!existsSync(cacheFile)) {
log("[connected-providers-cache] Cache file not found", { cacheFile })
return null
}
try {
const content = readFileSync(cacheFile, "utf-8")
const data = JSON.parse(content) as ConnectedProvidersCache
log("[connected-providers-cache] Read cache", { count: data.connected.length, updatedAt: data.updatedAt })
return data.connected
} catch (err) {
log("[connected-providers-cache] Error reading cache", { error: String(err) })
return null
}
}
function hasConnectedProvidersCache(): boolean {
const cacheFile = getCacheFilePath(CONNECTED_PROVIDERS_CACHE_FILE)
return existsSync(cacheFile)
}
function writeConnectedProvidersCache(connected: string[]): void {
ensureCacheDir()
const cacheFile = getCacheFilePath(CONNECTED_PROVIDERS_CACHE_FILE)
const data: ConnectedProvidersCache = {
connected,
updatedAt: new Date().toISOString(),
}
try {
writeFileSync(cacheFile, JSON.stringify(data, null, 2))
log("[connected-providers-cache] Cache written", { count: connected.length })
} catch (err) {
log("[connected-providers-cache] Error writing cache", { error: String(err) })
}
}
function readProviderModelsCache(): ProviderModelsCache | null {
const cacheFile = getCacheFilePath(PROVIDER_MODELS_CACHE_FILE)
if (!existsSync(cacheFile)) {
log("[connected-providers-cache] Provider-models cache file not found", { cacheFile })
return null
}
try {
const content = readFileSync(cacheFile, "utf-8")
const data = JSON.parse(content) as ProviderModelsCache
log("[connected-providers-cache] Read provider-models cache", {
providerCount: Object.keys(data.models).length,
updatedAt: data.updatedAt,
})
return data
} catch (err) {
log("[connected-providers-cache] Error reading provider-models cache", { error: String(err) })
return null
}
}
function hasProviderModelsCache(): boolean {
const cacheFile = getCacheFilePath(PROVIDER_MODELS_CACHE_FILE)
return existsSync(cacheFile)
}
function writeProviderModelsCache(data: { models: Record<string, string[]>; connected: string[] }): void {
ensureCacheDir()
const cacheFile = getCacheFilePath(PROVIDER_MODELS_CACHE_FILE)
const cacheData: ProviderModelsCache = {
...data,
updatedAt: new Date().toISOString(),
}
try {
writeFileSync(cacheFile, JSON.stringify(cacheData, null, 2))
log("[connected-providers-cache] Provider-models cache written", {
providerCount: Object.keys(data.models).length,
})
} catch (err) {
log("[connected-providers-cache] Error writing provider-models cache", { error: String(err) })
}
}
async function updateConnectedProvidersCache(client: {
provider?: {
list?: () => Promise<{
data?: {
connected?: string[]
all?: Array<{ id: string; models?: Record<string, unknown> }>
}
}>
}
}): Promise<void> {
if (!client?.provider?.list) {
log("[connected-providers-cache] client.provider.list not available")
return
}
try {
const result = await client.provider.list()
const connected = result.data?.connected ?? []
log("[connected-providers-cache] Fetched connected providers", {
count: connected.length,
providers: connected,
})
writeConnectedProvidersCache(connected)
const modelsByProvider: Record<string, string[]> = {}
const allProviders = result.data?.all ?? []
for (const provider of allProviders) {
if (provider.models) {
const modelIds = Object.keys(provider.models)
if (modelIds.length > 0) {
modelsByProvider[provider.id] = modelIds
}
}
}
log("[connected-providers-cache] Extracted models from provider list", {
providerCount: Object.keys(modelsByProvider).length,
totalModels: Object.values(modelsByProvider).reduce((sum, ids) => sum + ids.length, 0),
})
writeProviderModelsCache({
models: modelsByProvider,
connected,
})
} catch (err) {
log("[connected-providers-cache] Error updating cache", { error: String(err) })
}
}
return {
readConnectedProvidersCache,
hasConnectedProvidersCache,
readProviderModelsCache,
hasProviderModelsCache,
writeProviderModelsCache,
updateConnectedProvidersCache,
function ensureCacheDir(): void {
const cacheDir = dataPath.getOmoOpenCodeCacheDir()
if (!existsSync(cacheDir)) {
mkdirSync(cacheDir, { recursive: true })
}
}
const defaultConnectedProvidersCacheStore = createConnectedProvidersCacheStore(
() => dataPath.getOmoOpenCodeCacheDir()
)
/**
* Read the connected providers cache.
* Returns the list of connected provider IDs, or null if cache doesn't exist.
*/
export function readConnectedProvidersCache(): string[] | null {
const cacheFile = getCacheFilePath(CONNECTED_PROVIDERS_CACHE_FILE)
export const {
readConnectedProvidersCache,
hasConnectedProvidersCache,
readProviderModelsCache,
hasProviderModelsCache,
writeProviderModelsCache,
updateConnectedProvidersCache,
} = defaultConnectedProvidersCacheStore
if (!existsSync(cacheFile)) {
log("[connected-providers-cache] Cache file not found", { cacheFile })
return null
}
try {
const content = readFileSync(cacheFile, "utf-8")
const data = JSON.parse(content) as ConnectedProvidersCache
log("[connected-providers-cache] Read cache", { count: data.connected.length, updatedAt: data.updatedAt })
return data.connected
} catch (err) {
log("[connected-providers-cache] Error reading cache", { error: String(err) })
return null
}
}
/**
* Check if connected providers cache exists.
*/
export function hasConnectedProvidersCache(): boolean {
const cacheFile = getCacheFilePath(CONNECTED_PROVIDERS_CACHE_FILE)
return existsSync(cacheFile)
}
/**
* Write the connected providers cache.
*/
function writeConnectedProvidersCache(connected: string[]): void {
ensureCacheDir()
const cacheFile = getCacheFilePath(CONNECTED_PROVIDERS_CACHE_FILE)
const data: ConnectedProvidersCache = {
connected,
updatedAt: new Date().toISOString(),
}
try {
writeFileSync(cacheFile, JSON.stringify(data, null, 2))
log("[connected-providers-cache] Cache written", { count: connected.length })
} catch (err) {
log("[connected-providers-cache] Error writing cache", { error: String(err) })
}
}
/**
* Read the provider-models cache.
* Returns the cache data, or null if cache doesn't exist.
*/
export function readProviderModelsCache(): ProviderModelsCache | null {
const cacheFile = getCacheFilePath(PROVIDER_MODELS_CACHE_FILE)
if (!existsSync(cacheFile)) {
log("[connected-providers-cache] Provider-models cache file not found", { cacheFile })
return null
}
try {
const content = readFileSync(cacheFile, "utf-8")
const data = JSON.parse(content) as ProviderModelsCache
log("[connected-providers-cache] Read provider-models cache", {
providerCount: Object.keys(data.models).length,
updatedAt: data.updatedAt
})
return data
} catch (err) {
log("[connected-providers-cache] Error reading provider-models cache", { error: String(err) })
return null
}
}
/**
* Check if provider-models cache exists.
*/
export function hasProviderModelsCache(): boolean {
const cacheFile = getCacheFilePath(PROVIDER_MODELS_CACHE_FILE)
return existsSync(cacheFile)
}
/**
* Write the provider-models cache.
*/
export function writeProviderModelsCache(data: { models: Record<string, string[]>; connected: string[] }): void {
ensureCacheDir()
const cacheFile = getCacheFilePath(PROVIDER_MODELS_CACHE_FILE)
const cacheData: ProviderModelsCache = {
...data,
updatedAt: new Date().toISOString(),
}
try {
writeFileSync(cacheFile, JSON.stringify(cacheData, null, 2))
log("[connected-providers-cache] Provider-models cache written", {
providerCount: Object.keys(data.models).length
})
} catch (err) {
log("[connected-providers-cache] Error writing provider-models cache", { error: String(err) })
}
}
/**
* Update the connected providers cache by fetching from the client.
* Also updates the provider-models cache with model lists per provider.
*/
export async function updateConnectedProvidersCache(client: {
provider?: {
list?: () => Promise<{
data?: {
connected?: string[]
all?: Array<{ id: string; models?: Record<string, unknown> }>
}
}>
}
}): Promise<void> {
if (!client?.provider?.list) {
log("[connected-providers-cache] client.provider.list not available")
return
}
try {
const result = await client.provider.list()
const connected = result.data?.connected ?? []
log("[connected-providers-cache] Fetched connected providers", { count: connected.length, providers: connected })
writeConnectedProvidersCache(connected)
const modelsByProvider: Record<string, string[]> = {}
const allProviders = result.data?.all ?? []
for (const provider of allProviders) {
if (provider.models) {
const modelIds = Object.keys(provider.models)
if (modelIds.length > 0) {
modelsByProvider[provider.id] = modelIds
}
}
}
log("[connected-providers-cache] Extracted models from provider list", {
providerCount: Object.keys(modelsByProvider).length,
totalModels: Object.values(modelsByProvider).reduce((sum, ids) => sum + ids.length, 0),
})
writeProviderModelsCache({
models: modelsByProvider,
connected,
})
} catch (err) {
log("[connected-providers-cache] Error updating cache", { error: String(err) })
}
}

View File

@@ -1,4 +1,4 @@
import { afterEach, beforeEach, describe, expect, it, spyOn } from "bun:test"
import { describe, it, expect, beforeAll, afterAll } from "bun:test"
import {
isPortAvailable,
findAvailablePort,
@@ -6,283 +6,96 @@ import {
DEFAULT_SERVER_PORT,
} from "./port-utils"
const HOSTNAME = "127.0.0.1"
const REAL_PORT_SEARCH_WINDOW = 200
function supportsRealSocketBinding(): boolean {
try {
const server = Bun.serve({
port: 0,
hostname: HOSTNAME,
fetch: () => new Response("probe"),
})
server.stop(true)
return true
} catch {
return false
}
}
const canBindRealSockets = supportsRealSocketBinding()
describe("port-utils", () => {
if (canBindRealSockets) {
function startRealBlocker(port: number = 0) {
return Bun.serve({
describe("isPortAvailable", () => {
it("#given unused port #when checking availability #then returns true", async () => {
const port = 59999
const result = await isPortAvailable(port)
expect(result).toBe(true)
})
it("#given port in use #when checking availability #then returns false", async () => {
const port = 59998
const blocker = Bun.serve({
port,
hostname: HOSTNAME,
hostname: "127.0.0.1",
fetch: () => new Response("blocked"),
})
}
async function findContiguousAvailableStart(length: number): Promise<number> {
const probe = startRealBlocker()
const seedPort = probe.port
probe.stop(true)
for (let candidate = seedPort; candidate < seedPort + REAL_PORT_SEARCH_WINDOW; candidate++) {
const checks = await Promise.all(
Array.from({ length }, async (_, offset) => isPortAvailable(candidate + offset, HOSTNAME))
)
if (checks.every(Boolean)) {
return candidate
}
try {
const result = await isPortAvailable(port)
expect(result).toBe(false)
} finally {
blocker.stop(true)
}
throw new Error(`Could not find ${length} contiguous available ports`)
}
describe("with real sockets", () => {
describe("isPortAvailable", () => {
it("#given unused port #when checking availability #then returns true", async () => {
const blocker = startRealBlocker()
const port = blocker.port
blocker.stop(true)
const result = await isPortAvailable(port)
expect(result).toBe(true)
})
it("#given port in use #when checking availability #then returns false", async () => {
const blocker = startRealBlocker()
const port = blocker.port
try {
const result = await isPortAvailable(port)
expect(result).toBe(false)
} finally {
blocker.stop(true)
}
})
})
describe("findAvailablePort", () => {
it("#given start port available #when finding port #then returns start port", async () => {
const startPort = await findContiguousAvailableStart(1)
const result = await findAvailablePort(startPort)
expect(result).toBe(startPort)
})
it("#given start port blocked #when finding port #then returns next available", async () => {
const startPort = await findContiguousAvailableStart(2)
const blocker = startRealBlocker(startPort)
try {
const result = await findAvailablePort(startPort)
expect(result).toBe(startPort + 1)
} finally {
blocker.stop(true)
}
})
it("#given multiple ports blocked #when finding port #then skips all blocked", async () => {
const startPort = await findContiguousAvailableStart(4)
const blockers = [
startRealBlocker(startPort),
startRealBlocker(startPort + 1),
startRealBlocker(startPort + 2),
]
try {
const result = await findAvailablePort(startPort)
expect(result).toBe(startPort + 3)
} finally {
blockers.forEach((blocker) => blocker.stop(true))
}
})
})
describe("getAvailableServerPort", () => {
it("#given preferred port available #when getting port #then returns preferred with wasAutoSelected=false", async () => {
const preferredPort = await findContiguousAvailableStart(1)
const result = await getAvailableServerPort(preferredPort)
expect(result.port).toBe(preferredPort)
expect(result.wasAutoSelected).toBe(false)
})
it("#given preferred port blocked #when getting port #then returns alternative with wasAutoSelected=true", async () => {
const preferredPort = await findContiguousAvailableStart(2)
const blocker = startRealBlocker(preferredPort)
try {
const result = await getAvailableServerPort(preferredPort)
expect(result.port).toBe(preferredPort + 1)
expect(result.wasAutoSelected).toBe(true)
} finally {
blocker.stop(true)
}
})
})
})
} else {
const blockedSockets = new Set<string>()
let serveSpy: ReturnType<typeof spyOn>
})
function getSocketKey(port: number, hostname: string): string {
return `${hostname}:${port}`
}
beforeEach(() => {
blockedSockets.clear()
serveSpy = spyOn(Bun, "serve").mockImplementation(({ port, hostname }) => {
if (typeof port !== "number") {
throw new Error("Test expected numeric port")
}
const resolvedHostname = typeof hostname === "string" ? hostname : HOSTNAME
const socketKey = getSocketKey(port, resolvedHostname)
if (blockedSockets.has(socketKey)) {
const error = new Error(`Failed to start server. Is port ${port} in use?`) as Error & {
code?: string
syscall?: string
errno?: number
address?: string
port?: number
}
error.code = "EADDRINUSE"
error.syscall = "listen"
error.errno = 0
error.address = resolvedHostname
error.port = port
throw error
}
blockedSockets.add(socketKey)
return {
stop: (_force?: boolean) => {
blockedSockets.delete(socketKey)
},
} as { stop: (force?: boolean) => void }
})
describe("findAvailablePort", () => {
it("#given start port available #when finding port #then returns start port", async () => {
const startPort = 59997
const result = await findAvailablePort(startPort)
expect(result).toBe(startPort)
})
afterEach(() => {
expect(blockedSockets.size).toBe(0)
serveSpy.mockRestore()
blockedSockets.clear()
it("#given start port blocked #when finding port #then returns next available", async () => {
const startPort = 59996
const blocker = Bun.serve({
port: startPort,
hostname: "127.0.0.1",
fetch: () => new Response("blocked"),
})
try {
const result = await findAvailablePort(startPort)
expect(result).toBe(startPort + 1)
} finally {
blocker.stop(true)
}
})
describe("with mocked sockets fallback", () => {
describe("isPortAvailable", () => {
it("#given unused port #when checking availability #then returns true", async () => {
const port = 59999
it("#given multiple ports blocked #when finding port #then skips all blocked", async () => {
const startPort = 59993
const blockers = [
Bun.serve({ port: startPort, hostname: "127.0.0.1", fetch: () => new Response() }),
Bun.serve({ port: startPort + 1, hostname: "127.0.0.1", fetch: () => new Response() }),
Bun.serve({ port: startPort + 2, hostname: "127.0.0.1", fetch: () => new Response() }),
]
const result = await isPortAvailable(port)
expect(result).toBe(true)
expect(blockedSockets.size).toBe(0)
})
it("#given port in use #when checking availability #then returns false", async () => {
const port = 59998
const blocker = Bun.serve({
port,
hostname: HOSTNAME,
fetch: () => new Response("blocked"),
})
try {
const result = await isPortAvailable(port)
expect(result).toBe(false)
} finally {
blocker.stop(true)
}
})
it("#given custom hostname #when checking availability #then passes hostname through to Bun.serve", async () => {
const hostname = "192.0.2.10"
await isPortAvailable(59995, hostname)
expect(serveSpy.mock.calls[0]?.[0]?.hostname).toBe(hostname)
})
})
describe("findAvailablePort", () => {
it("#given start port available #when finding port #then returns start port", async () => {
const startPort = 59997
const result = await findAvailablePort(startPort)
expect(result).toBe(startPort)
})
it("#given start port blocked #when finding port #then returns next available", async () => {
const startPort = 59996
const blocker = Bun.serve({
port: startPort,
hostname: HOSTNAME,
fetch: () => new Response("blocked"),
})
try {
const result = await findAvailablePort(startPort)
expect(result).toBe(startPort + 1)
} finally {
blocker.stop(true)
}
})
it("#given multiple ports blocked #when finding port #then skips all blocked", async () => {
const startPort = 59993
const blockers = [
Bun.serve({ port: startPort, hostname: HOSTNAME, fetch: () => new Response() }),
Bun.serve({ port: startPort + 1, hostname: HOSTNAME, fetch: () => new Response() }),
Bun.serve({ port: startPort + 2, hostname: HOSTNAME, fetch: () => new Response() }),
]
try {
const result = await findAvailablePort(startPort)
expect(result).toBe(startPort + 3)
} finally {
blockers.forEach((blocker) => blocker.stop(true))
}
})
})
describe("getAvailableServerPort", () => {
it("#given preferred port available #when getting port #then returns preferred with wasAutoSelected=false", async () => {
const preferredPort = 59990
const result = await getAvailableServerPort(preferredPort)
expect(result.port).toBe(preferredPort)
expect(result.wasAutoSelected).toBe(false)
})
it("#given preferred port blocked #when getting port #then returns alternative with wasAutoSelected=true", async () => {
const preferredPort = 59989
const blocker = Bun.serve({
port: preferredPort,
hostname: HOSTNAME,
fetch: () => new Response("blocked"),
})
try {
const result = await getAvailableServerPort(preferredPort)
expect(result.port).toBe(preferredPort + 1)
expect(result.wasAutoSelected).toBe(true)
} finally {
blocker.stop(true)
}
})
})
try {
const result = await findAvailablePort(startPort)
expect(result).toBe(startPort + 3)
} finally {
blockers.forEach((b) => b.stop(true))
}
})
}
})
describe("getAvailableServerPort", () => {
it("#given preferred port available #when getting port #then returns preferred with wasAutoSelected=false", async () => {
const preferredPort = 59990
const result = await getAvailableServerPort(preferredPort)
expect(result.port).toBe(preferredPort)
expect(result.wasAutoSelected).toBe(false)
})
it("#given preferred port blocked #when getting port #then returns alternative with wasAutoSelected=true", async () => {
const preferredPort = 59989
const blocker = Bun.serve({
port: preferredPort,
hostname: "127.0.0.1",
fetch: () => new Response("blocked"),
})
try {
const result = await getAvailableServerPort(preferredPort)
expect(result.port).toBeGreaterThan(preferredPort)
expect(result.wasAutoSelected).toBe(true)
} finally {
blocker.stop(true)
}
})
})
describe("DEFAULT_SERVER_PORT", () => {
it("#given constant #when accessed #then returns 4096", () => {