feat: add kimi-k2.5 to agent fallback chains and update model catalog

- sisyphus: opus → kimi-k2.5 → glm-4.7 → gpt-5.2-codex → gemini-3-pro
- atlas: sonnet-4-5 → kimi-k2.5 → gpt-5.2 → gemini-3-pro
- prometheus/metis: opus → kimi-k2.5 → gpt-5.2 → gemini-3-pro
- multimodal-looker: gemini-flash → gpt-5.2 → glm-4.6v → kimi-k2.5 → haiku → gpt-5-nano
- visual-engineering: remove gpt-5.2 from chain
- ultrabrain: reorder to gpt-5.2-codex → gemini-3-pro → opus
- Add cross-provider fuzzy match for model resolution
- Update all documentation (AGENTS.md, features.md, configurations.md, category-skill-guide.md)
This commit is contained in:
justsisyphus
2026-01-30 14:52:45 +09:00
parent 10bdb6c694
commit d2d8d1a782
8 changed files with 134 additions and 37 deletions

View File

@@ -25,15 +25,15 @@ agents/
## AGENT MODELS
| Agent | Model | Temp | Purpose |
|-------|-------|------|---------|
| Sisyphus | anthropic/claude-opus-4-5 | 0.1 | Primary orchestrator |
| Atlas | anthropic/claude-opus-4-5 | 0.1 | Master orchestrator |
| Sisyphus | anthropic/claude-opus-4-5 | 0.1 | Primary orchestrator (fallback: kimi-k2.5 → glm-4.7) |
| Atlas | anthropic/claude-sonnet-4-5 | 0.1 | Master orchestrator (fallback: kimi-k2.5 → gpt-5.2) |
| oracle | openai/gpt-5.2 | 0.1 | Consultation, debugging |
| librarian | opencode/glm-4.7-free | 0.1 | Docs, GitHub search |
| explore | opencode/gpt-5-nano | 0.1 | Fast contextual grep |
| librarian | zai-coding-plan/glm-4.7 | 0.1 | Docs, GitHub search (fallback: glm-4.7-free) |
| explore | anthropic/claude-haiku-4-5 | 0.1 | Fast contextual grep (fallback: gpt-5-mini → gpt-5-nano) |
| multimodal-looker | google/gemini-3-flash | 0.1 | PDF/image analysis |
| Prometheus | anthropic/claude-opus-4-5 | 0.1 | Strategic planning |
| Metis | anthropic/claude-sonnet-4-5 | 0.3 | Pre-planning analysis |
| Momus | anthropic/claude-sonnet-4-5 | 0.1 | Plan validation |
| Prometheus | anthropic/claude-opus-4-5 | 0.1 | Strategic planning (fallback: kimi-k2.5 → gpt-5.2) |
| Metis | anthropic/claude-opus-4-5 | 0.3 | Pre-planning analysis (fallback: kimi-k2.5 → gpt-5.2) |
| Momus | openai/gpt-5.2 | 0.1 | Plan validation (fallback: claude-opus-4-5) |
| Sisyphus-Junior | anthropic/claude-sonnet-4-5 | 0.1 | Category-spawned executor |
## HOW TO ADD

View File

@@ -13,6 +13,7 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
sisyphus: {
fallbackChain: [
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["kimi-for-coding", "opencode"], model: "kimi-k2.5" },
{ providers: ["zai-coding-plan"], model: "glm-4.7" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2-codex", variant: "medium" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro" },
@@ -44,6 +45,7 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" },
{ providers: ["zai-coding-plan"], model: "glm-4.6v" },
{ providers: ["kimi-for-coding", "opencode"], model: "kimi-k2.5" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-haiku-4-5" },
{ providers: ["opencode"], model: "gpt-5-nano" },
],
@@ -51,6 +53,7 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
prometheus: {
fallbackChain: [
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["kimi-for-coding", "opencode"], model: "kimi-k2.5" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2", variant: "high" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro" },
],
@@ -58,6 +61,7 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
metis: {
fallbackChain: [
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["kimi-for-coding", "opencode"], model: "kimi-k2.5" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2", variant: "high" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "max" },
],
@@ -72,6 +76,7 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
atlas: {
fallbackChain: [
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-sonnet-4-5" },
{ providers: ["kimi-for-coding"], model: "kimi-k2.5" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro" },
],
@@ -84,14 +89,13 @@ export const CATEGORY_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["zai-coding-plan"], model: "glm-4.7" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2", variant: "high" },
],
},
ultrabrain: {
fallbackChain: [
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2-codex", variant: "xhigh" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "max" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
],
},
deep: {

View File

@@ -388,6 +388,85 @@ describe("resolveModelWithFallback", () => {
expect(result!.model).toBe("anthropic/claude-opus-4-5")
expect(result!.source).toBe("provider-fallback")
})
test("cross-provider fuzzy match when preferred provider unavailable (librarian scenario)", () => {
// #given - glm-4.7 is defined for zai-coding-plan, but only opencode has it
const input: ExtendedModelResolutionInput = {
fallbackChain: [
{ providers: ["zai-coding-plan"], model: "glm-4.7" },
{ providers: ["anthropic"], model: "claude-sonnet-4-5" },
],
availableModels: new Set(["opencode/glm-4.7", "anthropic/claude-sonnet-4-5"]),
systemDefaultModel: "google/gemini-3-pro",
}
// #when
const result = resolveModelWithFallback(input)
// #then - should find glm-4.7 from opencode via cross-provider fuzzy match
expect(result!.model).toBe("opencode/glm-4.7")
expect(result!.source).toBe("provider-fallback")
expect(logSpy).toHaveBeenCalledWith("Model resolved via fallback chain (cross-provider fuzzy match)", {
model: "glm-4.7",
match: "opencode/glm-4.7",
variant: undefined,
})
})
test("prefers specified provider over cross-provider match", () => {
// #given - both zai-coding-plan and opencode have glm-4.7
const input: ExtendedModelResolutionInput = {
fallbackChain: [
{ providers: ["zai-coding-plan"], model: "glm-4.7" },
],
availableModels: new Set(["zai-coding-plan/glm-4.7", "opencode/glm-4.7"]),
systemDefaultModel: "google/gemini-3-pro",
}
// #when
const result = resolveModelWithFallback(input)
// #then - should prefer zai-coding-plan (specified provider) over opencode
expect(result!.model).toBe("zai-coding-plan/glm-4.7")
expect(result!.source).toBe("provider-fallback")
})
test("cross-provider match preserves variant from entry", () => {
// #given - entry has variant, model found via cross-provider
const input: ExtendedModelResolutionInput = {
fallbackChain: [
{ providers: ["zai-coding-plan"], model: "glm-4.7", variant: "high" },
],
availableModels: new Set(["opencode/glm-4.7"]),
systemDefaultModel: "google/gemini-3-pro",
}
// #when
const result = resolveModelWithFallback(input)
// #then - variant should be preserved
expect(result!.model).toBe("opencode/glm-4.7")
expect(result!.variant).toBe("high")
})
test("cross-provider match tries next entry if no match found anywhere", () => {
// #given - first entry model not available anywhere, second entry available
const input: ExtendedModelResolutionInput = {
fallbackChain: [
{ providers: ["zai-coding-plan"], model: "nonexistent-model" },
{ providers: ["anthropic"], model: "claude-sonnet-4-5" },
],
availableModels: new Set(["anthropic/claude-sonnet-4-5"]),
systemDefaultModel: "google/gemini-3-pro",
}
// #when
const result = resolveModelWithFallback(input)
// #then - should fall through to second entry
expect(result!.model).toBe("anthropic/claude-sonnet-4-5")
expect(result!.source).toBe("provider-fallback")
})
})
describe("Step 4: System default fallback (no availability match)", () => {

View File

@@ -117,6 +117,7 @@ export function resolveModelWithFallback(
}
} else {
for (const entry of fallbackChain) {
// Step 1: Try with provider filter (preferred providers first)
for (const provider of entry.providers) {
const fullModel = `${provider}/${entry.model}`
const match = fuzzyMatchModel(fullModel, availableModels, [provider])
@@ -125,6 +126,17 @@ export function resolveModelWithFallback(
return { model: match, source: "provider-fallback", variant: entry.variant }
}
}
// Step 2: Try without provider filter (cross-provider fuzzy match)
const crossProviderMatch = fuzzyMatchModel(entry.model, availableModels)
if (crossProviderMatch) {
log("Model resolved via fallback chain (cross-provider fuzzy match)", {
model: entry.model,
match: crossProviderMatch,
variant: entry.variant,
})
return { model: crossProviderMatch, source: "provider-fallback", variant: entry.variant }
}
}
log("No available model found in fallback chain, falling through to system default")
}