fix(model-resolver): return variant from fallback chain, handle model name normalization

- Add variant to ModelResolutionResult return type
- Return variant from matched fallback entry
- Add normalizeModelName() for Claude model hyphen/period differences
- Add transformModelForProvider() for github-copilot model names
- Update delegate-task to use resolved variant (user config takes priority)
- Fix test expectations for new fallback behavior
This commit is contained in:
justsisyphus
2026-01-23 02:20:32 +09:00
parent 7de376e24f
commit 6e84a14f20
7 changed files with 71 additions and 46 deletions

View File

@@ -32,30 +32,30 @@ describe("createBuiltinAgents with model overrides", () => {
expect(agents.Sisyphus.thinking).toBeUndefined()
})
test("Sisyphus with systemDefaultModel GPT has reasoningEffort, no thinking", async () => {
test("Sisyphus uses first fallbackChain entry when no availableModels provided", async () => {
// #given
const systemDefaultModel = "openai/gpt-5.2"
// #when
const agents = await createBuiltinAgents([], {}, undefined, systemDefaultModel)
// #then
expect(agents.Sisyphus.model).toBe("openai/gpt-5.2")
expect(agents.Sisyphus.reasoningEffort).toBe("medium")
expect(agents.Sisyphus.thinking).toBeUndefined()
// #then - Sisyphus first fallbackChain entry is anthropic/claude-opus-4-5
expect(agents.Sisyphus.model).toBe("anthropic/claude-opus-4-5")
expect(agents.Sisyphus.thinking).toEqual({ type: "enabled", budgetTokens: 32000 })
expect(agents.Sisyphus.reasoningEffort).toBeUndefined()
})
test("Oracle with default model has reasoningEffort", async () => {
// #given - no overrides, using systemDefaultModel for other agents
// Oracle uses its own default model (openai/gpt-5.2) from the factory singleton
test("Oracle uses first fallbackChain entry when no availableModels provided", async () => {
// #given - Oracle's first fallbackChain entry is openai/gpt-5.2
// #when
const agents = await createBuiltinAgents([], {}, undefined, TEST_DEFAULT_MODEL)
// #then - Oracle uses systemDefaultModel since model is now required
expect(agents.oracle.model).toBe("anthropic/claude-opus-4-5")
expect(agents.oracle.thinking).toEqual({ type: "enabled", budgetTokens: 32000 })
expect(agents.oracle.reasoningEffort).toBeUndefined()
// #then - Oracle first fallbackChain entry is openai/gpt-5.2
expect(agents.oracle.model).toBe("openai/gpt-5.2")
expect(agents.oracle.reasoningEffort).toBe("medium")
expect(agents.oracle.textVerbosity).toBe("high")
expect(agents.oracle.thinking).toBeUndefined()
})
test("Oracle with GPT model override has reasoningEffort, no thinking", async () => {

View File

@@ -256,7 +256,7 @@ describe("generateOmoConfig - model fallback system", () => {
// #when generating config
const result = generateOmoConfig(config)
// #then should use github-copilot sonnet models
// #then should use github-copilot sonnet models (copilot fallback)
expect((result.agents as Record<string, { model: string }>).Sisyphus.model).toBe("github-copilot/claude-sonnet-4.5")
})
@@ -318,8 +318,8 @@ describe("generateOmoConfig - model fallback system", () => {
// #then Sisyphus should use native OpenAI (fallback within native tier)
expect((result.agents as Record<string, { model: string }>).Sisyphus.model).toBe("openai/gpt-5.2")
// #then Oracle should use native OpenAI (primary for ultrabrain)
expect((result.agents as Record<string, { model: string }>).oracle.model).toBe("openai/gpt-5.2-codex")
// #then Oracle should use native OpenAI (first fallback entry)
expect((result.agents as Record<string, { model: string }>).oracle.model).toBe("openai/gpt-5.2")
// #then multimodal-looker should use native OpenAI (fallback within native tier)
expect((result.agents as Record<string, { model: string }>)["multimodal-looker"].model).toBe("openai/gpt-5.2")
})

View File

@@ -65,6 +65,17 @@ function isProviderAvailable(provider: string, avail: ProviderAvailability): boo
return mapping[provider] ?? false
}
function transformModelForProvider(provider: string, model: string): string {
if (provider === "github-copilot") {
return model
.replace("claude-opus-4-5", "claude-opus-4.5")
.replace("claude-sonnet-4-5", "claude-sonnet-4.5")
.replace("claude-haiku-4-5", "claude-haiku-4.5")
.replace("claude-sonnet-4", "claude-sonnet-4")
}
return model
}
function resolveModelFromChain(
fallbackChain: FallbackEntry[],
avail: ProviderAvailability
@@ -72,8 +83,9 @@ function resolveModelFromChain(
for (const entry of fallbackChain) {
for (const provider of entry.providers) {
if (isProviderAvailable(provider, avail)) {
const transformedModel = transformModelForProvider(provider, entry.model)
return {
model: `${provider}/${entry.model}`,
model: `${provider}/${transformedModel}`,
variant: entry.variant,
}
}

View File

@@ -25,6 +25,13 @@ import { log } from "./logger"
* fuzzyMatchModel("gpt-5.2", available) // → "openai/gpt-5.2"
* fuzzyMatchModel("claude", available, ["openai"]) // → null (provider filter excludes anthropic)
*/
function normalizeModelName(name: string): string {
return name
.toLowerCase()
.replace(/claude-(opus|sonnet|haiku)-4-5/g, "claude-$1-4.5")
.replace(/claude-(opus|sonnet|haiku)-4\.5/g, "claude-$1-4.5")
}
export function fuzzyMatchModel(
target: string,
available: Set<string>,
@@ -37,7 +44,7 @@ export function fuzzyMatchModel(
return null
}
const targetLower = target.toLowerCase()
const targetNormalized = normalizeModelName(target)
// Filter by providers if specified
let candidates = Array.from(available)
@@ -55,19 +62,19 @@ export function fuzzyMatchModel(
return null
}
// Find all matches (case-insensitive substring match)
// Find all matches (case-insensitive substring match with normalization)
const matches = candidates.filter((model) =>
model.toLowerCase().includes(targetLower),
normalizeModelName(model).includes(targetNormalized),
)
log("[fuzzyMatchModel] substring matches", { targetLower, matchCount: matches.length, matches })
log("[fuzzyMatchModel] substring matches", { targetNormalized, matchCount: matches.length, matches })
if (matches.length === 0) {
return null
}
// Priority 1: Exact match
const exactMatch = matches.find((model) => model.toLowerCase() === targetLower)
// Priority 1: Exact match (normalized)
const exactMatch = matches.find((model) => normalizeModelName(model) === targetNormalized)
if (exactMatch) {
log("[fuzzyMatchModel] exact match found", { exactMatch })
return exactMatch

View File

@@ -206,10 +206,11 @@ describe("resolveModelWithFallback", () => {
// #then
expect(result.model).toBe("github-copilot/claude-opus-4-5-preview")
expect(result.source).toBe("provider-fallback")
expect(logSpy).toHaveBeenCalledWith("Model resolved via fallback chain", {
expect(logSpy).toHaveBeenCalledWith("Model resolved via fallback chain (availability confirmed)", {
provider: "github-copilot",
model: "claude-opus-4-5",
match: "github-copilot/claude-opus-4-5-preview",
variant: undefined,
})
})
@@ -315,8 +316,8 @@ describe("resolveModelWithFallback", () => {
})
})
describe("Step 3: System default", () => {
test("returns systemDefaultModel with system-default source when nothing matches", () => {
describe("Step 3: First fallback entry (no availability match)", () => {
test("returns first fallbackChain entry when no availability match found", () => {
// #given
const input: ExtendedModelResolutionInput = {
fallbackChain: [
@@ -330,12 +331,12 @@ describe("resolveModelWithFallback", () => {
const result = resolveModelWithFallback(input)
// #then
expect(result.model).toBe("google/gemini-3-pro")
expect(result.source).toBe("system-default")
expect(logSpy).toHaveBeenCalledWith("Model resolved via system default", { model: "google/gemini-3-pro" })
expect(result.model).toBe("anthropic/nonexistent-model")
expect(result.source).toBe("provider-fallback")
expect(logSpy).toHaveBeenCalledWith("Model resolved via fallback chain first entry (no availability match)", { model: "anthropic/nonexistent-model", variant: undefined })
})
test("returns system default when availableModels is empty", () => {
test("returns first fallbackChain entry when availableModels is empty", () => {
// #given
const input: ExtendedModelResolutionInput = {
fallbackChain: [
@@ -349,8 +350,8 @@ describe("resolveModelWithFallback", () => {
const result = resolveModelWithFallback(input)
// #then
expect(result.model).toBe("google/gemini-3-pro")
expect(result.source).toBe("system-default")
expect(result.model).toBe("anthropic/claude-opus-4-5")
expect(result.source).toBe("provider-fallback")
})
test("returns system default when fallbackChain is not provided", () => {
@@ -430,7 +431,7 @@ describe("resolveModelWithFallback", () => {
expect(result.source).toBe("provider-fallback")
})
test("falls through all entries to system default when none match", () => {
test("falls through to first fallbackChain entry when none match availability", () => {
// #given
const availableModels = new Set(["other/model"])
@@ -446,8 +447,8 @@ describe("resolveModelWithFallback", () => {
})
// #then
expect(result.model).toBe("system/default")
expect(result.source).toBe("system-default")
expect(result.model).toBe("openai/gpt-5.2")
expect(result.source).toBe("provider-fallback")
})
})

View File

@@ -16,6 +16,7 @@ export type ModelSource =
export type ModelResolutionResult = {
model: string
source: ModelSource
variant?: string
}
export type ExtendedModelResolutionInput = {
@@ -57,8 +58,8 @@ export function resolveModelWithFallback(
const fullModel = `${provider}/${entry.model}`
const match = fuzzyMatchModel(fullModel, availableModels, [provider])
if (match) {
log("Model resolved via fallback chain (availability confirmed)", { provider, model: entry.model, match })
return { model: match, source: "provider-fallback" }
log("Model resolved via fallback chain (availability confirmed)", { provider, model: entry.model, match, variant: entry.variant })
return { model: match, source: "provider-fallback", variant: entry.variant }
}
}
}
@@ -68,8 +69,8 @@ export function resolveModelWithFallback(
const firstEntry = fallbackChain[0]
if (firstEntry.providers.length > 0) {
const fallbackModel = `${firstEntry.providers[0]}/${firstEntry.model}`
log("Model resolved via fallback chain first entry (no availability match)", { model: fallbackModel })
return { model: fallbackModel, source: "provider-fallback" }
log("Model resolved via fallback chain first entry (no availability match)", { model: fallbackModel, variant: firstEntry.variant })
return { model: fallbackModel, source: "provider-fallback", variant: firstEntry.variant }
}
}

View File

@@ -511,8 +511,8 @@ To resume this session: resume="${args.resume}"`
if (!requirement) {
actualModel = resolved.model
modelInfo = { model: actualModel, type: "system-default", source: "system-default" }
} else {
const { model: resolvedModel, source } = resolveModelWithFallback({
} else {
const { model: resolvedModel, source, variant: resolvedVariant } = resolveModelWithFallback({
userModel: userCategories?.[args.category]?.model,
fallbackChain: requirement.fallbackChain,
availableModels,
@@ -539,15 +539,19 @@ To resume this session: resume="${args.resume}"`
}
modelInfo = { model: actualModel, type, source }
const parsedModel = parseModelString(actualModel)
const variantToUse = userCategories?.[args.category]?.variant ?? resolvedVariant
categoryModel = parsedModel
? (variantToUse ? { ...parsedModel, variant: variantToUse } : parsedModel)
: undefined
}
agentToUse = SISYPHUS_JUNIOR_AGENT
const parsedModel = parseModelString(actualModel)
categoryModel = parsedModel
? (requirement?.variant
? { ...parsedModel, variant: requirement.variant }
: parsedModel)
: undefined
if (!categoryModel) {
const parsedModel = parseModelString(actualModel)
categoryModel = parsedModel ?? undefined
}
categoryPromptAppend = resolved.promptAppend || undefined
const isUnstableAgent = resolved.config.is_unstable_agent === true || actualModel.toLowerCase().includes("gemini")