refactor(models): update agent/category fallback chains

- quick: replace openai fallback with opencode/grok-code
- writing: add zai-coding-plan/glm-4.7 between sonnet and gpt
- unspecified-low: gpt-5.2 → gpt-5.2-codex (medium)
- Sisyphus: add zai/glm-4.7 before openai, use gpt-5.2-codex (medium)
- Momus & Metis: add variant 'max' to gemini-3-pro
- explore: simplify to haiku (anthropic/opencode) → grok-code (opencode)
This commit is contained in:
justsisyphus
2026-01-23 15:07:58 +09:00
parent 57b10439a4
commit c6d6bd197e
6 changed files with 60 additions and 52 deletions

View File

@@ -80,7 +80,7 @@ exports[`generateModelConfig single native provider uses Claude models when only
"model": "anthropic/claude-sonnet-4-5",
},
"explore": {
"model": "opencode/grok-code",
"model": "anthropic/claude-haiku-4-5",
},
"librarian": {
"model": "anthropic/claude-sonnet-4-5",
@@ -230,17 +230,19 @@ exports[`generateModelConfig single native provider uses OpenAI models when only
"model": "openai/gpt-5.2",
},
"quick": {
"model": "openai/gpt-5.1-codex-mini",
"model": "opencode/glm-4.7-free",
},
"ultrabrain": {
"model": "openai/gpt-5.2-codex",
"variant": "xhigh",
},
"unspecified-high": {
"model": "openai/gpt-5.2",
"model": "openai/gpt-5.2-codex",
"variant": "medium",
},
"unspecified-low": {
"model": "openai/gpt-5.2",
"model": "openai/gpt-5.2-codex",
"variant": "medium",
},
"visual-engineering": {
"model": "openai/gpt-5.2",
@@ -273,8 +275,8 @@ exports[`generateModelConfig single native provider uses OpenAI models with isMa
"variant": "high",
},
"Sisyphus": {
"model": "openai/gpt-5.2",
"variant": "high",
"model": "openai/gpt-5.2-codex",
"variant": "medium",
},
"explore": {
"model": "opencode/grok-code",
@@ -295,7 +297,7 @@ exports[`generateModelConfig single native provider uses OpenAI models with isMa
"model": "openai/gpt-5.2",
},
"quick": {
"model": "openai/gpt-5.1-codex-mini",
"model": "opencode/glm-4.7-free",
},
"ultrabrain": {
"model": "openai/gpt-5.2-codex",
@@ -306,7 +308,8 @@ exports[`generateModelConfig single native provider uses OpenAI models with isMa
"variant": "high",
},
"unspecified-low": {
"model": "openai/gpt-5.2",
"model": "openai/gpt-5.2-codex",
"variant": "medium",
},
"visual-engineering": {
"model": "openai/gpt-5.2",
@@ -328,9 +331,11 @@ exports[`generateModelConfig single native provider uses Gemini models when only
},
"Metis (Plan Consultant)": {
"model": "google/gemini-3-pro-preview",
"variant": "max",
},
"Momus (Plan Reviewer)": {
"model": "google/gemini-3-pro-preview",
"variant": "max",
},
"Prometheus (Planner)": {
"model": "google/gemini-3-pro-preview",
@@ -339,7 +344,7 @@ exports[`generateModelConfig single native provider uses Gemini models when only
"model": "google/gemini-3-pro-preview",
},
"explore": {
"model": "google/gemini-3-flash-preview",
"model": "opencode/grok-code",
},
"librarian": {
"model": "opencode/glm-4.7-free",
@@ -387,9 +392,11 @@ exports[`generateModelConfig single native provider uses Gemini models with isMa
},
"Metis (Plan Consultant)": {
"model": "google/gemini-3-pro-preview",
"variant": "max",
},
"Momus (Plan Reviewer)": {
"model": "google/gemini-3-pro-preview",
"variant": "max",
},
"Prometheus (Planner)": {
"model": "google/gemini-3-pro-preview",
@@ -398,7 +405,7 @@ exports[`generateModelConfig single native provider uses Gemini models with isMa
"model": "google/gemini-3-pro-preview",
},
"explore": {
"model": "google/gemini-3-flash-preview",
"model": "opencode/grok-code",
},
"librarian": {
"model": "opencode/glm-4.7-free",
@@ -460,7 +467,7 @@ exports[`generateModelConfig all native providers uses preferred models from fal
"model": "anthropic/claude-sonnet-4-5",
},
"explore": {
"model": "google/gemini-3-flash-preview",
"model": "anthropic/claude-haiku-4-5",
},
"librarian": {
"model": "anthropic/claude-sonnet-4-5",
@@ -525,7 +532,7 @@ exports[`generateModelConfig all native providers uses preferred models with isM
"variant": "max",
},
"explore": {
"model": "google/gemini-3-flash-preview",
"model": "anthropic/claude-haiku-4-5",
},
"librarian": {
"model": "anthropic/claude-sonnet-4-5",
@@ -590,7 +597,7 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models when on
"model": "opencode/claude-sonnet-4-5",
},
"explore": {
"model": "opencode/grok-code",
"model": "opencode/claude-haiku-4-5",
},
"librarian": {
"model": "opencode/glm-4.7-free",
@@ -655,7 +662,7 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models with is
"variant": "max",
},
"explore": {
"model": "opencode/grok-code",
"model": "opencode/claude-haiku-4-5",
},
"librarian": {
"model": "opencode/glm-4.7-free",
@@ -879,7 +886,7 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian whe
"model": "opencode/glm-4.7-free",
},
"writing": {
"model": "opencode/glm-4.7-free",
"model": "zai-coding-plan/glm-4.7",
},
},
}
@@ -902,7 +909,7 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian wit
"model": "opencode/glm-4.7-free",
},
"Sisyphus": {
"model": "opencode/glm-4.7-free",
"model": "zai-coding-plan/glm-4.7",
},
"explore": {
"model": "opencode/grok-code",
@@ -937,7 +944,7 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian wit
"model": "opencode/glm-4.7-free",
},
"writing": {
"model": "opencode/glm-4.7-free",
"model": "zai-coding-plan/glm-4.7",
},
},
}
@@ -966,7 +973,7 @@ exports[`generateModelConfig mixed provider scenarios uses Claude + OpenCode Zen
"model": "anthropic/claude-sonnet-4-5",
},
"explore": {
"model": "opencode/grok-code",
"model": "anthropic/claude-haiku-4-5",
},
"librarian": {
"model": "opencode/glm-4.7-free",
@@ -1093,7 +1100,7 @@ exports[`generateModelConfig mixed provider scenarios uses Claude + ZAI combinat
"model": "anthropic/claude-sonnet-4-5",
},
"explore": {
"model": "opencode/grok-code",
"model": "anthropic/claude-haiku-4-5",
},
"librarian": {
"model": "zai-coding-plan/glm-4.7",
@@ -1157,7 +1164,7 @@ exports[`generateModelConfig mixed provider scenarios uses Gemini + Claude combi
"model": "anthropic/claude-sonnet-4-5",
},
"explore": {
"model": "google/gemini-3-flash-preview",
"model": "anthropic/claude-haiku-4-5",
},
"librarian": {
"model": "anthropic/claude-sonnet-4-5",
@@ -1221,7 +1228,7 @@ exports[`generateModelConfig mixed provider scenarios uses all fallback provider
"model": "github-copilot/claude-sonnet-4.5",
},
"explore": {
"model": "opencode/grok-code",
"model": "opencode/claude-haiku-4-5",
},
"librarian": {
"model": "zai-coding-plan/glm-4.7",
@@ -1285,7 +1292,7 @@ exports[`generateModelConfig mixed provider scenarios uses all providers togethe
"model": "anthropic/claude-sonnet-4-5",
},
"explore": {
"model": "google/gemini-3-flash-preview",
"model": "anthropic/claude-haiku-4-5",
},
"librarian": {
"model": "zai-coding-plan/glm-4.7",
@@ -1350,7 +1357,7 @@ exports[`generateModelConfig mixed provider scenarios uses all providers with is
"variant": "max",
},
"explore": {
"model": "google/gemini-3-flash-preview",
"model": "anthropic/claude-haiku-4-5",
},
"librarian": {
"model": "zai-coding-plan/glm-4.7",

View File

@@ -343,7 +343,7 @@ describe("generateOmoConfig - model fallback system", () => {
expect((result.agents as Record<string, { model: string }>).explore.model).toBe("anthropic/claude-haiku-4-5")
})
test("uses grok-code for explore when not max20", () => {
test("uses haiku for explore regardless of max20 flag", () => {
// #given user has Claude but not max20
const config: InstallConfig = {
hasClaude: true,
@@ -358,7 +358,7 @@ describe("generateOmoConfig - model fallback system", () => {
// #when generating config
const result = generateOmoConfig(config)
// #then explore should use grok-code (preserve Claude quota)
expect((result.agents as Record<string, { model: string }>).explore.model).toBe("opencode/grok-code")
// #then explore should use haiku (isMax20 doesn't affect explore anymore)
expect((result.agents as Record<string, { model: string }>).explore.model).toBe("anthropic/claude-haiku-4-5")
})
})

View File

@@ -310,19 +310,19 @@ describe("generateModelConfig", () => {
})
describe("explore agent special cases", () => {
test("explore uses Gemini flash when Gemini available", () => {
// #given Gemini is available
test("explore uses grok-code when only Gemini available (no Claude)", () => {
// #given only Gemini is available (no Claude)
const config = createConfig({ hasGemini: true })
// #when generateModelConfig is called
const result = generateModelConfig(config)
// #then explore should use gemini-3-flash-preview
expect(result.agents?.explore?.model).toBe("google/gemini-3-flash-preview")
// #then explore should use grok-code (Claude haiku not available)
expect(result.agents?.explore?.model).toBe("opencode/grok-code")
})
test("explore uses Claude haiku when Claude + isMax20 but no Gemini", () => {
// #given Claude is available with Max 20 plan but no Gemini
test("explore uses Claude haiku when Claude available", () => {
// #given Claude is available
const config = createConfig({ hasClaude: true, isMax20: true })
// #when generateModelConfig is called
@@ -332,15 +332,15 @@ describe("generateModelConfig", () => {
expect(result.agents?.explore?.model).toBe("anthropic/claude-haiku-4-5")
})
test("explore uses grok-code when Claude without isMax20 and no Gemini", () => {
// #given Claude is available without Max 20 plan and no Gemini
test("explore uses Claude haiku regardless of isMax20 flag", () => {
// #given Claude is available without Max 20 plan
const config = createConfig({ hasClaude: true, isMax20: false })
// #when generateModelConfig is called
const result = generateModelConfig(config)
// #then explore should use grok-code
expect(result.agents?.explore?.model).toBe("opencode/grok-code")
// #then explore should use claude-haiku-4-5 (isMax20 doesn't affect explore)
expect(result.agents?.explore?.model).toBe("anthropic/claude-haiku-4-5")
})
test("explore uses grok-code when only OpenAI available", () => {

View File

@@ -139,12 +139,12 @@ export function generateModelConfig(config: InstallConfig): GeneratedOmoConfig {
continue
}
// Special case: explore has custom Gemini → Claude → Grok logic
// Special case: explore uses Claude haiku → OpenCode grok-code
if (role === "explore") {
if (avail.native.gemini) {
agents[role] = { model: "google/gemini-3-flash-preview" }
} else if (avail.native.claude && avail.isMaxPlan) {
if (avail.native.claude) {
agents[role] = { model: "anthropic/claude-haiku-4-5" }
} else if (avail.opencodeZen) {
agents[role] = { model: "opencode/claude-haiku-4-5" }
} else {
agents[role] = { model: "opencode/grok-code" }
}

View File

@@ -54,19 +54,19 @@ describe("AGENT_MODEL_REQUIREMENTS", () => {
expect(primary.model).toBe("glm-4.7")
})
test("explore has valid fallbackChain with gemini-3-flash-preview as primary", () => {
test("explore has valid fallbackChain with claude-haiku-4-5 as primary", () => {
// #given - explore agent requirement
const explore = AGENT_MODEL_REQUIREMENTS["explore"]
// #when - accessing explore requirement
// #then - fallbackChain exists with gemini-3-flash-preview as first entry
// #then - fallbackChain exists with claude-haiku-4-5 as first entry
expect(explore).toBeDefined()
expect(explore.fallbackChain).toBeArray()
expect(explore.fallbackChain.length).toBeGreaterThan(0)
const primary = explore.fallbackChain[0]
expect(primary.providers).toContain("google")
expect(primary.model).toBe("gemini-3-flash-preview")
expect(primary.providers).toContain("anthropic")
expect(primary.model).toBe("claude-haiku-4-5")
})
test("multimodal-looker has valid fallbackChain with gemini-3-flash-preview as primary", () => {

View File

@@ -13,7 +13,8 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
Sisyphus: {
fallbackChain: [
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2", variant: "high" },
{ providers: ["zai-coding-plan"], model: "glm-4.7" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2-codex", variant: "medium" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro-preview" },
],
},
@@ -33,9 +34,8 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
},
explore: {
fallbackChain: [
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash-preview" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-haiku-4-5" },
{ providers: ["opencode", "github-copilot"], model: "grok-code" },
{ providers: ["anthropic", "opencode"], model: "claude-haiku-4-5" },
{ providers: ["opencode"], model: "grok-code" },
],
},
"multimodal-looker": {
@@ -56,14 +56,14 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
fallbackChain: [
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2", variant: "high" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro-preview" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro-preview", variant: "max" },
],
},
"Momus (Plan Reviewer)": {
fallbackChain: [
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2", variant: "medium" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro-preview" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro-preview", variant: "max" },
],
},
Atlas: {
@@ -101,13 +101,13 @@ export const CATEGORY_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
fallbackChain: [
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-haiku-4-5" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash-preview" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.1-codex-mini" },
{ providers: ["opencode"], model: "grok-code" },
],
},
"unspecified-low": {
fallbackChain: [
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-sonnet-4-5" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2-codex", variant: "medium" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash-preview" },
],
},
@@ -122,6 +122,7 @@ export const CATEGORY_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
fallbackChain: [
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash-preview" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-sonnet-4-5" },
{ providers: ["zai-coding-plan"], model: "glm-4.7" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" },
],
},