fix: handle both string[] and object[] formats in provider-models cache

Category delegation fails when provider-models.json contains model objects
with metadata (id, provider, context, output) instead of plain strings.
Line 196 in model-availability.ts assumes string[] format, causing:
  - Object concatenation: `${providerId}/${modelId}` becomes "ollama/[object Object]"
  - Empty availableModels Set passed to resolveModelPipeline()
  - Error: "Model not configured for category"

This is the root cause of issue #1508 where delegate_task(category='quick')
fails despite direct agent routing (delegate_task(subagent_type='explore'))
working correctly.

Changes:
- model-availability.ts: Add type check to handle both string and object formats
- connected-providers-cache.ts: Update ProviderModelsCache interface to accept both formats
- model-availability.test.ts: Add 4 test cases for object[] format handling

Direct agent routing bypasses fetchAvailableModels() entirely, explaining why
it works while category routing fails. This fix enables category delegation
to work with manually-populated Ollama model caches.

Fixes #1508
This commit is contained in:
rooftop-Owl
2026-02-05 15:32:08 +09:00
parent 291f41f7f9
commit bd3a3bcfb9
3 changed files with 90 additions and 9 deletions

View File

@@ -11,8 +11,16 @@ interface ConnectedProvidersCache {
updatedAt: string
}
interface ModelMetadata {
id: string
provider?: string
context?: number
output?: number
name?: string
}
interface ProviderModelsCache {
models: Record<string, string[]>
models: Record<string, string[] | ModelMetadata[]>
connected: string[]
updatedAt: string
}

View File

@@ -619,7 +619,7 @@ describe("fetchAvailableModels with provider-models cache (whitelist-filtered)",
rmSync(tempDir, { recursive: true, force: true })
})
function writeProviderModelsCache(data: { models: Record<string, string[]>; connected: string[] }) {
function writeProviderModelsCache(data: { models: Record<string, string[] | any[]>; connected: string[] }) {
const cacheDir = join(tempDir, "oh-my-opencode")
require("fs").mkdirSync(cacheDir, { recursive: true })
writeFileSync(join(cacheDir, "provider-models.json"), JSON.stringify({
@@ -723,6 +723,72 @@ describe("fetchAvailableModels with provider-models cache (whitelist-filtered)",
expect(result.has("anthropic/claude-opus-4-5")).toBe(false)
expect(result.has("google/gemini-3-pro")).toBe(false)
})
it("should handle object[] format with metadata (Ollama-style)", async () => {
writeProviderModelsCache({
models: {
ollama: [
{ id: "ministral-3:14b-32k-agent", provider: "ollama", context: 32768, output: 8192 },
{ id: "qwen3-coder:32k-agent", provider: "ollama", context: 32768, output: 8192 }
]
},
connected: ["ollama"]
})
const result = await fetchAvailableModels(undefined, {
connectedProviders: ["ollama"]
})
expect(result.size).toBe(2)
expect(result.has("ollama/ministral-3:14b-32k-agent")).toBe(true)
expect(result.has("ollama/qwen3-coder:32k-agent")).toBe(true)
})
it("should handle mixed string[] and object[] formats across providers", async () => {
writeProviderModelsCache({
models: {
anthropic: ["claude-opus-4-5", "claude-sonnet-4-5"],
ollama: [
{ id: "ministral-3:14b-32k-agent", provider: "ollama" },
{ id: "qwen3-coder:32k-agent", provider: "ollama" }
]
},
connected: ["anthropic", "ollama"]
})
const result = await fetchAvailableModels(undefined, {
connectedProviders: ["anthropic", "ollama"]
})
expect(result.size).toBe(4)
expect(result.has("anthropic/claude-opus-4-5")).toBe(true)
expect(result.has("anthropic/claude-sonnet-4-5")).toBe(true)
expect(result.has("ollama/ministral-3:14b-32k-agent")).toBe(true)
expect(result.has("ollama/qwen3-coder:32k-agent")).toBe(true)
})
it("should skip invalid entries in object[] format", async () => {
writeProviderModelsCache({
models: {
ollama: [
{ id: "valid-model", provider: "ollama" },
{ provider: "ollama" },
{ id: "", provider: "ollama" },
null,
"string-model"
]
},
connected: ["ollama"]
})
const result = await fetchAvailableModels(undefined, {
connectedProviders: ["ollama"]
})
expect(result.size).toBe(2)
expect(result.has("ollama/valid-model")).toBe(true)
expect(result.has("ollama/string-model")).toBe(true)
})
})
describe("isModelAvailable", () => {

View File

@@ -187,16 +187,23 @@ export async function fetchAvailableModels(
if (providerCount === 0) {
log("[fetchAvailableModels] provider-models cache empty, falling back to models.json")
} else {
log("[fetchAvailableModels] using provider-models cache (whitelist-filtered)")
for (const [providerId, modelIds] of Object.entries(providerModelsCache.models)) {
if (!connectedSet.has(providerId)) {
continue
}
for (const modelId of modelIds) {
log("[fetchAvailableModels] using provider-models cache (whitelist-filtered)")
for (const [providerId, modelIds] of Object.entries(providerModelsCache.models)) {
if (!connectedSet.has(providerId)) {
continue
}
for (const modelItem of modelIds) {
// Handle both string[] (legacy) and object[] (with metadata) formats
const modelId = typeof modelItem === 'string'
? modelItem
: (modelItem as any)?.id
if (modelId) {
modelSet.add(`${providerId}/${modelId}`)
}
}
}
log("[fetchAvailableModels] parsed from provider-models cache", {
count: modelSet.size,