feat(model-requirements): prefer GPT-5.4 and glm-5 in agent fallback chains

Align Prometheus, Momus, and Atlas with newer GPT-5.4 fallback tiers and replace Sisyphus install-time GLM-4.7 fallbacks with GLM-5 only.

Ultraworked with [Sisyphus](https://github.com/code-yeongyu/oh-my-opencode)

Co-authored-by: Sisyphus <clio-agent@sisyphuslabs.ai>
This commit is contained in:
YeonGyu-Kim
2026-03-06 17:43:48 +09:00
parent 7fe44024c0
commit 7718969317
4 changed files with 51 additions and 46 deletions

View File

@@ -191,7 +191,8 @@ exports[`generateModelConfig single native provider uses OpenAI models when only
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/dev/assets/oh-my-opencode.schema.json", "$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/dev/assets/oh-my-opencode.schema.json",
"agents": { "agents": {
"atlas": { "atlas": {
"model": "openai/gpt-5.2", "model": "openai/gpt-5.4",
"variant": "medium",
}, },
"explore": { "explore": {
"model": "opencode/gpt-5-nano", "model": "opencode/gpt-5-nano",
@@ -208,8 +209,8 @@ exports[`generateModelConfig single native provider uses OpenAI models when only
"variant": "high", "variant": "high",
}, },
"momus": { "momus": {
"model": "openai/gpt-5.2", "model": "openai/gpt-5.4",
"variant": "medium", "variant": "xhigh",
}, },
"multimodal-looker": { "multimodal-looker": {
"model": "openai/gpt-5.3-codex", "model": "openai/gpt-5.3-codex",
@@ -220,7 +221,7 @@ exports[`generateModelConfig single native provider uses OpenAI models when only
"variant": "high", "variant": "high",
}, },
"prometheus": { "prometheus": {
"model": "openai/gpt-5.2", "model": "openai/gpt-5.4",
"variant": "high", "variant": "high",
}, },
}, },
@@ -259,7 +260,8 @@ exports[`generateModelConfig single native provider uses OpenAI models with isMa
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/dev/assets/oh-my-opencode.schema.json", "$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/dev/assets/oh-my-opencode.schema.json",
"agents": { "agents": {
"atlas": { "atlas": {
"model": "openai/gpt-5.2", "model": "openai/gpt-5.4",
"variant": "medium",
}, },
"explore": { "explore": {
"model": "opencode/gpt-5-nano", "model": "opencode/gpt-5-nano",
@@ -276,8 +278,8 @@ exports[`generateModelConfig single native provider uses OpenAI models with isMa
"variant": "high", "variant": "high",
}, },
"momus": { "momus": {
"model": "openai/gpt-5.2", "model": "openai/gpt-5.4",
"variant": "medium", "variant": "xhigh",
}, },
"multimodal-looker": { "multimodal-looker": {
"model": "openai/gpt-5.3-codex", "model": "openai/gpt-5.3-codex",
@@ -288,7 +290,7 @@ exports[`generateModelConfig single native provider uses OpenAI models with isMa
"variant": "high", "variant": "high",
}, },
"prometheus": { "prometheus": {
"model": "openai/gpt-5.2", "model": "openai/gpt-5.4",
"variant": "high", "variant": "high",
}, },
}, },
@@ -466,8 +468,8 @@ exports[`generateModelConfig all native providers uses preferred models from fal
"variant": "max", "variant": "max",
}, },
"momus": { "momus": {
"model": "openai/gpt-5.2", "model": "openai/gpt-5.4",
"variant": "medium", "variant": "xhigh",
}, },
"multimodal-looker": { "multimodal-looker": {
"model": "openai/gpt-5.3-codex", "model": "openai/gpt-5.3-codex",
@@ -541,8 +543,8 @@ exports[`generateModelConfig all native providers uses preferred models with isM
"variant": "max", "variant": "max",
}, },
"momus": { "momus": {
"model": "openai/gpt-5.2", "model": "openai/gpt-5.4",
"variant": "medium", "variant": "xhigh",
}, },
"multimodal-looker": { "multimodal-looker": {
"model": "openai/gpt-5.3-codex", "model": "openai/gpt-5.3-codex",
@@ -617,8 +619,8 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models when on
"variant": "max", "variant": "max",
}, },
"momus": { "momus": {
"model": "opencode/gpt-5.2", "model": "opencode/gpt-5.4",
"variant": "medium", "variant": "xhigh",
}, },
"multimodal-looker": { "multimodal-looker": {
"model": "opencode/gpt-5.3-codex", "model": "opencode/gpt-5.3-codex",
@@ -692,8 +694,8 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models with is
"variant": "max", "variant": "max",
}, },
"momus": { "momus": {
"model": "opencode/gpt-5.2", "model": "opencode/gpt-5.4",
"variant": "medium", "variant": "xhigh",
}, },
"multimodal-looker": { "multimodal-looker": {
"model": "opencode/gpt-5.3-codex", "model": "opencode/gpt-5.3-codex",
@@ -764,8 +766,8 @@ exports[`generateModelConfig fallback providers uses GitHub Copilot models when
"variant": "max", "variant": "max",
}, },
"momus": { "momus": {
"model": "github-copilot/gpt-5.2", "model": "github-copilot/gpt-5.4",
"variant": "medium", "variant": "xhigh",
}, },
"multimodal-looker": { "multimodal-looker": {
"model": "github-copilot/gemini-3-flash-preview", "model": "github-copilot/gemini-3-flash-preview",
@@ -830,8 +832,8 @@ exports[`generateModelConfig fallback providers uses GitHub Copilot models with
"variant": "max", "variant": "max",
}, },
"momus": { "momus": {
"model": "github-copilot/gpt-5.2", "model": "github-copilot/gpt-5.4",
"variant": "medium", "variant": "xhigh",
}, },
"multimodal-looker": { "multimodal-looker": {
"model": "github-copilot/gemini-3-flash-preview", "model": "github-copilot/gemini-3-flash-preview",
@@ -908,7 +910,7 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian whe
"model": "opencode/glm-4.7-free", "model": "opencode/glm-4.7-free",
}, },
"sisyphus": { "sisyphus": {
"model": "zai-coding-plan/glm-4.7", "model": "zai-coding-plan/glm-5",
}, },
}, },
"categories": { "categories": {
@@ -963,7 +965,7 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian wit
"model": "opencode/glm-4.7-free", "model": "opencode/glm-4.7-free",
}, },
"sisyphus": { "sisyphus": {
"model": "zai-coding-plan/glm-4.7", "model": "zai-coding-plan/glm-5",
}, },
}, },
"categories": { "categories": {
@@ -1011,8 +1013,8 @@ exports[`generateModelConfig mixed provider scenarios uses Claude + OpenCode Zen
"variant": "max", "variant": "max",
}, },
"momus": { "momus": {
"model": "opencode/gpt-5.2", "model": "opencode/gpt-5.4",
"variant": "medium", "variant": "xhigh",
}, },
"multimodal-looker": { "multimodal-looker": {
"model": "opencode/gpt-5.3-codex", "model": "opencode/gpt-5.3-codex",
@@ -1086,8 +1088,8 @@ exports[`generateModelConfig mixed provider scenarios uses OpenAI + Copilot comb
"variant": "max", "variant": "max",
}, },
"momus": { "momus": {
"model": "openai/gpt-5.2", "model": "openai/gpt-5.4",
"variant": "medium", "variant": "xhigh",
}, },
"multimodal-looker": { "multimodal-looker": {
"model": "openai/gpt-5.3-codex", "model": "openai/gpt-5.3-codex",
@@ -1288,8 +1290,8 @@ exports[`generateModelConfig mixed provider scenarios uses all fallback provider
"variant": "max", "variant": "max",
}, },
"momus": { "momus": {
"model": "github-copilot/gpt-5.2", "model": "github-copilot/gpt-5.4",
"variant": "medium", "variant": "xhigh",
}, },
"multimodal-looker": { "multimodal-looker": {
"model": "opencode/gpt-5.3-codex", "model": "opencode/gpt-5.3-codex",
@@ -1363,8 +1365,8 @@ exports[`generateModelConfig mixed provider scenarios uses all providers togethe
"variant": "max", "variant": "max",
}, },
"momus": { "momus": {
"model": "openai/gpt-5.2", "model": "openai/gpt-5.4",
"variant": "medium", "variant": "xhigh",
}, },
"multimodal-looker": { "multimodal-looker": {
"model": "openai/gpt-5.3-codex", "model": "openai/gpt-5.3-codex",
@@ -1438,8 +1440,8 @@ exports[`generateModelConfig mixed provider scenarios uses all providers with is
"variant": "max", "variant": "max",
}, },
"momus": { "momus": {
"model": "openai/gpt-5.2", "model": "openai/gpt-5.4",
"variant": "medium", "variant": "xhigh",
}, },
"multimodal-looker": { "multimodal-looker": {
"model": "openai/gpt-5.3-codex", "model": "openai/gpt-5.3-codex",

View File

@@ -13,8 +13,7 @@ export const CLI_AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
variant: "max", variant: "max",
}, },
{ providers: ["kimi-for-coding"], model: "k2p5" }, { providers: ["kimi-for-coding"], model: "k2p5" },
{ providers: ["zai-coding-plan"], model: "glm-4.7" }, { providers: ["zai-coding-plan", "opencode"], model: "glm-5" },
{ providers: ["opencode"], model: "glm-4.7-free" },
], ],
requiresAnyModel: true, requiresAnyModel: true,
}, },
@@ -90,7 +89,7 @@ export const CLI_AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
{ providers: ["kimi-for-coding"], model: "k2p5" }, { providers: ["kimi-for-coding"], model: "k2p5" },
{ {
providers: ["openai", "github-copilot", "opencode"], providers: ["openai", "github-copilot", "opencode"],
model: "gpt-5.2", model: "gpt-5.4",
variant: "high", variant: "high",
}, },
{ {
@@ -123,8 +122,8 @@ export const CLI_AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
fallbackChain: [ fallbackChain: [
{ {
providers: ["openai", "github-copilot", "opencode"], providers: ["openai", "github-copilot", "opencode"],
model: "gpt-5.2", model: "gpt-5.4",
variant: "medium", variant: "xhigh",
}, },
{ {
providers: ["anthropic", "github-copilot", "opencode"], providers: ["anthropic", "github-copilot", "opencode"],
@@ -145,7 +144,7 @@ export const CLI_AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
providers: ["anthropic", "github-copilot", "opencode"], providers: ["anthropic", "github-copilot", "opencode"],
model: "claude-sonnet-4-5", model: "claude-sonnet-4-5",
}, },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" }, { providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.4", variant: "medium" },
{ {
providers: ["google", "github-copilot", "opencode"], providers: ["google", "github-copilot", "opencode"],
model: "gemini-3.1-pro", model: "gemini-3.1-pro",

View File

@@ -145,19 +145,19 @@ describe("AGENT_MODEL_REQUIREMENTS", () => {
expect(primary.variant).toBe("max") expect(primary.variant).toBe("max")
}) })
test("momus has valid fallbackChain with gpt-5.2 as primary", () => { test("momus has valid fallbackChain with gpt-5.4 as primary", () => {
// given - momus agent requirement // given - momus agent requirement
const momus = AGENT_MODEL_REQUIREMENTS["momus"] const momus = AGENT_MODEL_REQUIREMENTS["momus"]
// when - accessing Momus requirement // when - accessing Momus requirement
// then - fallbackChain exists with gpt-5.2 as first entry, variant medium // then - fallbackChain exists with gpt-5.4 as first entry, variant xhigh
expect(momus).toBeDefined() expect(momus).toBeDefined()
expect(momus.fallbackChain).toBeArray() expect(momus.fallbackChain).toBeArray()
expect(momus.fallbackChain.length).toBeGreaterThan(0) expect(momus.fallbackChain.length).toBeGreaterThan(0)
const primary = momus.fallbackChain[0] const primary = momus.fallbackChain[0]
expect(primary.model).toBe("gpt-5.2") expect(primary.model).toBe("gpt-5.4")
expect(primary.variant).toBe("medium") expect(primary.variant).toBe("xhigh")
expect(primary.providers[0]).toBe("openai") expect(primary.providers[0]).toBe("openai")
}) })
@@ -174,6 +174,10 @@ describe("AGENT_MODEL_REQUIREMENTS", () => {
const primary = atlas.fallbackChain[0] const primary = atlas.fallbackChain[0]
expect(primary.model).toBe("claude-sonnet-4-6") expect(primary.model).toBe("claude-sonnet-4-6")
expect(primary.providers[0]).toBe("anthropic") expect(primary.providers[0]).toBe("anthropic")
const secondary = atlas.fallbackChain[1]
expect(secondary.model).toBe("gpt-5.4")
expect(secondary.variant).toBe("medium")
}) })
test("hephaestus supports openai, github-copilot, venice, and opencode providers", () => { test("hephaestus supports openai, github-copilot, venice, and opencode providers", () => {

View File

@@ -101,7 +101,7 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
}, },
{ {
providers: ["openai", "github-copilot", "opencode"], providers: ["openai", "github-copilot", "opencode"],
model: "gpt-5.2", model: "gpt-5.4",
variant: "high", variant: "high",
}, },
{ {
@@ -133,8 +133,8 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
fallbackChain: [ fallbackChain: [
{ {
providers: ["openai", "github-copilot", "opencode"], providers: ["openai", "github-copilot", "opencode"],
model: "gpt-5.2", model: "gpt-5.4",
variant: "medium", variant: "xhigh",
}, },
{ {
providers: ["anthropic", "github-copilot", "opencode"], providers: ["anthropic", "github-copilot", "opencode"],
@@ -154,7 +154,7 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
providers: ["anthropic", "github-copilot", "opencode"], providers: ["anthropic", "github-copilot", "opencode"],
model: "claude-sonnet-4-6", model: "claude-sonnet-4-6",
}, },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" }, { providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.4", variant: "medium" },
], ],
}, },
}; };