feat(model-requirements): add k2p5, kimi-k2.5, gpt-5.4 medium to Sisyphus fallback chain
Sisyphus can now fall back through Kimi and OpenAI models when Claude is unavailable, enabling OpenAI-only users to use Sisyphus directly instead of being redirected to Hephaestus. Runtime chain: claude-opus-4-6 max → k2p5 → kimi-k2.5 → gpt-5.4 medium → glm-5 → big-pickle CLI chain: claude-opus-4-6 max → k2p5 → gpt-5.4 medium → glm-5
This commit is contained in:
@@ -396,7 +396,7 @@ describe("generateModelConfig", () => {
|
||||
expect(result.agents?.sisyphus?.model).toBe("anthropic/claude-opus-4-6")
|
||||
})
|
||||
|
||||
test("Sisyphus is omitted when no fallback provider is available (OpenAI not in chain)", () => {
|
||||
test("Sisyphus resolves to gpt-5.4 medium when only OpenAI is available", () => {
|
||||
// #given
|
||||
const config = createConfig({ hasOpenAI: true })
|
||||
|
||||
@@ -404,7 +404,8 @@ describe("generateModelConfig", () => {
|
||||
const result = generateModelConfig(config)
|
||||
|
||||
// #then
|
||||
expect(result.agents?.sisyphus).toBeUndefined()
|
||||
expect(result.agents?.sisyphus?.model).toBe("openai/gpt-5.4")
|
||||
expect(result.agents?.sisyphus?.variant).toBe("medium")
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
Reference in New Issue
Block a user