Compare commits

...

1 Commits

Author SHA1 Message Date
YeonGyu-Kim
b2a9d8a9e0 feat(multimodal-looker): prioritize gpt-5.3-codex medium over gemini in fallback chain 2026-03-04 11:21:44 +09:00
5 changed files with 34 additions and 19 deletions

View File

@@ -212,7 +212,8 @@ exports[`generateModelConfig single native provider uses OpenAI models when only
"variant": "medium",
},
"multimodal-looker": {
"model": "openai/gpt-5.2",
"model": "openai/gpt-5.3-codex",
"variant": "medium",
},
"oracle": {
"model": "openai/gpt-5.2",
@@ -279,7 +280,8 @@ exports[`generateModelConfig single native provider uses OpenAI models with isMa
"variant": "medium",
},
"multimodal-looker": {
"model": "openai/gpt-5.2",
"model": "openai/gpt-5.3-codex",
"variant": "medium",
},
"oracle": {
"model": "openai/gpt-5.2",
@@ -468,7 +470,8 @@ exports[`generateModelConfig all native providers uses preferred models from fal
"variant": "medium",
},
"multimodal-looker": {
"model": "google/gemini-3-flash-preview",
"model": "openai/gpt-5.3-codex",
"variant": "medium",
},
"oracle": {
"model": "openai/gpt-5.2",
@@ -542,7 +545,8 @@ exports[`generateModelConfig all native providers uses preferred models with isM
"variant": "medium",
},
"multimodal-looker": {
"model": "google/gemini-3-flash-preview",
"model": "openai/gpt-5.3-codex",
"variant": "medium",
},
"oracle": {
"model": "openai/gpt-5.2",
@@ -617,7 +621,8 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models when on
"variant": "medium",
},
"multimodal-looker": {
"model": "opencode/gemini-3-flash",
"model": "opencode/gpt-5.3-codex",
"variant": "medium",
},
"oracle": {
"model": "opencode/gpt-5.2",
@@ -691,7 +696,8 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models with is
"variant": "medium",
},
"multimodal-looker": {
"model": "opencode/gemini-3-flash",
"model": "opencode/gpt-5.3-codex",
"variant": "medium",
},
"oracle": {
"model": "opencode/gpt-5.2",
@@ -1009,7 +1015,8 @@ exports[`generateModelConfig mixed provider scenarios uses Claude + OpenCode Zen
"variant": "medium",
},
"multimodal-looker": {
"model": "opencode/gemini-3-flash",
"model": "opencode/gpt-5.3-codex",
"variant": "medium",
},
"oracle": {
"model": "opencode/gpt-5.2",
@@ -1083,7 +1090,8 @@ exports[`generateModelConfig mixed provider scenarios uses OpenAI + Copilot comb
"variant": "medium",
},
"multimodal-looker": {
"model": "github-copilot/gemini-3-flash-preview",
"model": "openai/gpt-5.3-codex",
"variant": "medium",
},
"oracle": {
"model": "openai/gpt-5.2",
@@ -1284,7 +1292,8 @@ exports[`generateModelConfig mixed provider scenarios uses all fallback provider
"variant": "medium",
},
"multimodal-looker": {
"model": "github-copilot/gemini-3-flash-preview",
"model": "opencode/gpt-5.3-codex",
"variant": "medium",
},
"oracle": {
"model": "github-copilot/gpt-5.2",
@@ -1358,7 +1367,8 @@ exports[`generateModelConfig mixed provider scenarios uses all providers togethe
"variant": "medium",
},
"multimodal-looker": {
"model": "google/gemini-3-flash-preview",
"model": "openai/gpt-5.3-codex",
"variant": "medium",
},
"oracle": {
"model": "openai/gpt-5.2",
@@ -1432,7 +1442,8 @@ exports[`generateModelConfig mixed provider scenarios uses all providers with is
"variant": "medium",
},
"multimodal-looker": {
"model": "google/gemini-3-flash-preview",
"model": "openai/gpt-5.3-codex",
"variant": "medium",
},
"oracle": {
"model": "openai/gpt-5.2",

View File

@@ -324,7 +324,7 @@ describe("generateOmoConfig - model fallback system", () => {
// #then Oracle should use native OpenAI (first fallback entry)
expect((result.agents as Record<string, { model: string }>).oracle.model).toBe("openai/gpt-5.2")
// #then multimodal-looker should use native OpenAI (fallback within native tier)
expect((result.agents as Record<string, { model: string }>)["multimodal-looker"].model).toBe("openai/gpt-5.2")
expect((result.agents as Record<string, { model: string }>)["multimodal-looker"].model).toBe("openai/gpt-5.3-codex")
})
test("uses haiku for explore when Claude max20", () => {

View File

@@ -44,8 +44,8 @@ export const CLI_AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
},
"multimodal-looker": {
fallbackChain: [
{ providers: ["openai", "opencode"], model: "gpt-5.3-codex", variant: "medium" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" },
{ providers: ["zai-coding-plan"], model: "glm-4.6v" },
{ providers: ["kimi-for-coding"], model: "k2p5" },
{ providers: ["opencode"], model: "kimi-k2.5-free" },

View File

@@ -86,19 +86,23 @@ describe("AGENT_MODEL_REQUIREMENTS", () => {
expect(quaternary.model).toBe("gpt-5-nano")
})
test("multimodal-looker has valid fallbackChain with kimi-k2.5-free as primary", () => {
test("multimodal-looker has valid fallbackChain with gpt-5.3-codex as primary", () => {
// given - multimodal-looker agent requirement
const multimodalLooker = AGENT_MODEL_REQUIREMENTS["multimodal-looker"]
// when - accessing multimodal-looker requirement
// then - fallbackChain exists with kimi-k2.5-free first, gpt-5-nano last
// then - fallbackChain exists with gpt-5.3-codex first, gemini second, gpt-5-nano last
expect(multimodalLooker).toBeDefined()
expect(multimodalLooker.fallbackChain).toBeArray()
expect(multimodalLooker.fallbackChain).toHaveLength(5)
const primary = multimodalLooker.fallbackChain[0]
expect(primary.providers[0]).toBe("opencode")
expect(primary.model).toBe("kimi-k2.5-free")
expect(primary.providers).toEqual(["openai", "opencode"])
expect(primary.model).toBe("gpt-5.3-codex")
expect(primary.variant).toBe("medium")
const secondary = multimodalLooker.fallbackChain[1]
expect(secondary.model).toBe("gemini-3-flash")
const last = multimodalLooker.fallbackChain[4]
expect(last.providers).toEqual(["openai", "github-copilot", "opencode"])

View File

@@ -53,9 +53,9 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
},
"multimodal-looker": {
fallbackChain: [
{ providers: ["opencode"], model: "kimi-k2.5-free" },
{ providers: ["openai", "opencode"], model: "gpt-5.3-codex", variant: "medium" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" },
{ providers: ["opencode"], model: "kimi-k2.5-free" },
{ providers: ["zai-coding-plan"], model: "glm-4.6v" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5-nano" },
],