feat(model-requirements): set multimodal-looker primary model to gpt-5.4 medium

Change multimodal-looker's primary model from gpt-5.3-codex to gpt-5.4 medium
in both runtime and CLI fallback chains.

Changes:
- Runtime chain (src/shared/model-requirements.ts): primary now gpt-5.4
- CLI chain (src/cli/model-fallback-requirements.ts): primary now gpt-5.4
- Updated test expectations in model-requirements.test.ts
- Updated config-manager.test.ts assertion
- Updated model-fallback snapshots
This commit is contained in:
YeonGyu-Kim
2026-03-08 01:53:30 +09:00
parent 60bc9a7609
commit 01efda454f
5 changed files with 18 additions and 20 deletions

View File

@@ -213,7 +213,7 @@ exports[`generateModelConfig single native provider uses OpenAI models when only
"variant": "xhigh",
},
"multimodal-looker": {
"model": "openai/gpt-5.3-codex",
"model": "openai/gpt-5.4",
"variant": "medium",
},
"oracle": {
@@ -286,7 +286,7 @@ exports[`generateModelConfig single native provider uses OpenAI models with isMa
"variant": "xhigh",
},
"multimodal-looker": {
"model": "openai/gpt-5.3-codex",
"model": "openai/gpt-5.4",
"variant": "medium",
},
"oracle": {
@@ -480,7 +480,7 @@ exports[`generateModelConfig all native providers uses preferred models from fal
"variant": "xhigh",
},
"multimodal-looker": {
"model": "openai/gpt-5.3-codex",
"model": "openai/gpt-5.4",
"variant": "medium",
},
"oracle": {
@@ -555,7 +555,7 @@ exports[`generateModelConfig all native providers uses preferred models with isM
"variant": "xhigh",
},
"multimodal-looker": {
"model": "openai/gpt-5.3-codex",
"model": "openai/gpt-5.4",
"variant": "medium",
},
"oracle": {
@@ -631,7 +631,7 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models when on
"variant": "xhigh",
},
"multimodal-looker": {
"model": "opencode/gpt-5.3-codex",
"model": "opencode/gpt-5.4",
"variant": "medium",
},
"oracle": {
@@ -706,7 +706,7 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models with is
"variant": "xhigh",
},
"multimodal-looker": {
"model": "opencode/gpt-5.3-codex",
"model": "opencode/gpt-5.4",
"variant": "medium",
},
"oracle": {
@@ -1025,7 +1025,7 @@ exports[`generateModelConfig mixed provider scenarios uses Claude + OpenCode Zen
"variant": "xhigh",
},
"multimodal-looker": {
"model": "opencode/gpt-5.3-codex",
"model": "opencode/gpt-5.4",
"variant": "medium",
},
"oracle": {
@@ -1100,7 +1100,7 @@ exports[`generateModelConfig mixed provider scenarios uses OpenAI + Copilot comb
"variant": "xhigh",
},
"multimodal-looker": {
"model": "openai/gpt-5.3-codex",
"model": "openai/gpt-5.4",
"variant": "medium",
},
"oracle": {
@@ -1302,7 +1302,7 @@ exports[`generateModelConfig mixed provider scenarios uses all fallback provider
"variant": "xhigh",
},
"multimodal-looker": {
"model": "opencode/gpt-5.3-codex",
"model": "opencode/gpt-5.4",
"variant": "medium",
},
"oracle": {
@@ -1377,7 +1377,7 @@ exports[`generateModelConfig mixed provider scenarios uses all providers togethe
"variant": "xhigh",
},
"multimodal-looker": {
"model": "openai/gpt-5.3-codex",
"model": "openai/gpt-5.4",
"variant": "medium",
},
"oracle": {
@@ -1452,7 +1452,7 @@ exports[`generateModelConfig mixed provider scenarios uses all providers with is
"variant": "xhigh",
},
"multimodal-looker": {
"model": "openai/gpt-5.3-codex",
"model": "openai/gpt-5.4",
"variant": "medium",
},
"oracle": {

View File

@@ -254,8 +254,8 @@ describe("generateOmoConfig - model fallback system", () => {
expect((result.agents as Record<string, { model: string; variant?: string }>).sisyphus.variant).toBe("medium")
// #then Oracle should use native OpenAI (first fallback entry)
expect((result.agents as Record<string, { model: string }>).oracle.model).toBe("openai/gpt-5.4")
// #then multimodal-looker should use native OpenAI (first fallback entry is gpt-5.3-codex)
expect((result.agents as Record<string, { model: string }>)["multimodal-looker"].model).toBe("openai/gpt-5.3-codex")
// #then multimodal-looker should use native OpenAI (first fallback entry is gpt-5.4)
expect((result.agents as Record<string, { model: string }>)["multimodal-looker"].model).toBe("openai/gpt-5.4")
})
test("uses haiku for explore when Claude max20", () => {

View File

@@ -68,7 +68,7 @@ export const CLI_AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
fallbackChain: [
{
providers: ["openai", "opencode"],
model: "gpt-5.3-codex",
model: "gpt-5.4",
variant: "medium",
},
{ providers: ["kimi-for-coding"], model: "k2p5" },

View File

@@ -98,28 +98,26 @@ describe("AGENT_MODEL_REQUIREMENTS", () => {
expect(quaternary.model).toBe("gpt-5-nano")
})
test("multimodal-looker has valid fallbackChain with gpt-5.3-codex as primary", () => {
test("multimodal-looker has valid fallbackChain with gpt-5.4 as primary", () => {
// given - multimodal-looker agent requirement
const multimodalLooker = AGENT_MODEL_REQUIREMENTS["multimodal-looker"]
// when - accessing multimodal-looker requirement
// then - fallbackChain: gpt-5.3-codex -> k2p5 -> gemini-3-flash -> glm-4.6v -> gpt-5-nano
// then - fallbackChain: gpt-5.4 -> k2p5 -> gemini-3-flash -> glm-4.6v -> gpt-5-nano
expect(multimodalLooker).toBeDefined()
expect(multimodalLooker.fallbackChain).toBeArray()
expect(multimodalLooker.fallbackChain).toHaveLength(5)
const primary = multimodalLooker.fallbackChain[0]
expect(primary.providers).toEqual(["openai", "opencode"])
expect(primary.model).toBe("gpt-5.3-codex")
expect(primary.model).toBe("gpt-5.4")
expect(primary.variant).toBe("medium")
const secondary = multimodalLooker.fallbackChain[1]
expect(secondary.providers).toEqual(["kimi-for-coding"])
expect(secondary.model).toBe("k2p5")
const tertiary = multimodalLooker.fallbackChain[2]
expect(tertiary.model).toBe("gemini-3-flash")
const last = multimodalLooker.fallbackChain[4]
expect(last.providers).toEqual(["openai", "github-copilot", "opencode"])
expect(last.model).toBe("gpt-5-nano")

View File

@@ -90,7 +90,7 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
fallbackChain: [
{
providers: ["openai", "opencode"],
model: "gpt-5.3-codex",
model: "gpt-5.4",
variant: "medium",
},
{ providers: ["kimi-for-coding"], model: "k2p5" },