Merge branch 'omo-avail' into dev

This commit is contained in:
justsisyphus
2026-01-30 15:28:20 +09:00
6 changed files with 132 additions and 43 deletions

View File

@@ -10,7 +10,7 @@ import { createMetisAgent } from "./metis"
import { createAtlasAgent } from "./atlas"
import { createMomusAgent } from "./momus"
import type { AvailableAgent, AvailableCategory, AvailableSkill } from "./dynamic-agent-prompt-builder"
import { deepMerge, fetchAvailableModels, resolveModelWithFallback, AGENT_MODEL_REQUIREMENTS, findCaseInsensitive, includesCaseInsensitive, readConnectedProvidersCache } from "../shared"
import { deepMerge, fetchAvailableModels, resolveModelWithFallback, AGENT_MODEL_REQUIREMENTS, findCaseInsensitive, includesCaseInsensitive, readConnectedProvidersCache, isModelAvailable } from "../shared"
import { DEFAULT_CATEGORIES, CATEGORY_DESCRIPTIONS } from "../tools/delegate-task/constants"
import { resolveMultipleSkills } from "../features/opencode-skill-loader/skill-content"
import { createBuiltinSkills } from "../features/builtin-skills"
@@ -222,12 +222,19 @@ export async function createBuiltinAgents(
if (agentName === "atlas") continue
if (includesCaseInsensitive(disabledAgents, agentName)) continue
const override = findCaseInsensitive(agentOverrides, agentName)
const requirement = AGENT_MODEL_REQUIREMENTS[agentName]
const isPrimaryAgent = isFactory(source) && source.mode === "primary"
const resolution = resolveModelWithFallback({
const override = findCaseInsensitive(agentOverrides, agentName)
const requirement = AGENT_MODEL_REQUIREMENTS[agentName]
// Check if agent requires a specific model
if (requirement?.requiresModel && availableModels) {
if (!isModelAvailable(requirement.requiresModel, availableModels)) {
continue
}
}
const isPrimaryAgent = isFactory(source) && source.mode === "primary"
const resolution = resolveModelWithFallback({
uiSelectedModel: isPrimaryAgent ? uiSelectedModel : undefined,
userModel: override?.model,
fallbackChain: requirement?.fallbackChain,

View File

@@ -2,7 +2,7 @@ import { describe, it, expect, beforeEach, afterEach } from "bun:test"
import { mkdtempSync, writeFileSync, rmSync } from "fs"
import { tmpdir } from "os"
import { join } from "path"
import { fetchAvailableModels, fuzzyMatchModel, getConnectedProviders, __resetModelCache } from "./model-availability"
import { fetchAvailableModels, fuzzyMatchModel, getConnectedProviders, __resetModelCache, isModelAvailable } from "./model-availability"
describe("fetchAvailableModels", () => {
let tempDir: string
@@ -610,3 +610,38 @@ describe("fetchAvailableModels with provider-models cache (whitelist-filtered)",
expect(result.has("google/gemini-3-pro")).toBe(false)
})
})
describe("isModelAvailable", () => {
it("returns true when model exists via fuzzy match", () => {
// #given
const available = new Set(["openai/gpt-5.2-codex", "anthropic/claude-opus-4-5"])
// #when
const result = isModelAvailable("gpt-5.2-codex", available)
// #then
expect(result).toBe(true)
})
it("returns false when model not found", () => {
// #given
const available = new Set(["anthropic/claude-opus-4-5"])
// #when
const result = isModelAvailable("gpt-5.2-codex", available)
// #then
expect(result).toBe(false)
})
it("returns false for empty available set", () => {
// #given
const available = new Set<string>()
// #when
const result = isModelAvailable("gpt-5.2-codex", available)
// #then
expect(result).toBe(false)
})
})

View File

@@ -87,6 +87,20 @@ export function fuzzyMatchModel(
return result
}
/**
* Check if a target model is available (fuzzy match by model name, no provider filtering)
*
* @param targetModel - Model name to check (e.g., "gpt-5.2-codex")
* @param availableModels - Set of available models in "provider/model" format
* @returns true if model is available, false otherwise
*/
export function isModelAvailable(
targetModel: string,
availableModels: Set<string>,
): boolean {
return fuzzyMatchModel(targetModel, availableModels) !== null
}
export async function getConnectedProviders(client: any): Promise<string[]> {
if (!client?.provider?.list) {
log("[getConnectedProviders] client.provider.list not available")

View File

@@ -424,20 +424,38 @@ describe("ModelRequirement type", () => {
}
})
test("all fallbackChain entries have non-empty providers array", () => {
// #given - all agent and category requirements
const allRequirements = [
...Object.values(AGENT_MODEL_REQUIREMENTS),
...Object.values(CATEGORY_MODEL_REQUIREMENTS),
]
test("all fallbackChain entries have non-empty providers array", () => {
// #given - all agent and category requirements
const allRequirements = [
...Object.values(AGENT_MODEL_REQUIREMENTS),
...Object.values(CATEGORY_MODEL_REQUIREMENTS),
]
// #when - checking each entry in fallbackChain
// #then - all have non-empty providers array
for (const req of allRequirements) {
for (const entry of req.fallbackChain) {
expect(entry.providers).toBeArray()
expect(entry.providers.length).toBeGreaterThan(0)
}
}
// #when - checking each entry in fallbackChain
// #then - all have non-empty providers array
for (const req of allRequirements) {
for (const entry of req.fallbackChain) {
expect(entry.providers).toBeArray()
expect(entry.providers.length).toBeGreaterThan(0)
}
}
})
})
describe("requiresModel field in categories", () => {
test("deep category has requiresModel set to gpt-5.2-codex", () => {
// #given
const deep = CATEGORY_MODEL_REQUIREMENTS["deep"]
// #when / #then
expect(deep.requiresModel).toBe("gpt-5.2-codex")
})
test("artistry category has requiresModel set to gemini-3-pro", () => {
// #given
const artistry = CATEGORY_MODEL_REQUIREMENTS["artistry"]
// #when / #then
expect(artistry.requiresModel).toBe("gemini-3-pro")
})
})

View File

@@ -7,6 +7,7 @@ export type FallbackEntry = {
export type ModelRequirement = {
fallbackChain: FallbackEntry[]
variant?: string // Default variant (used when entry doesn't specify one)
requiresModel?: string // If set, only activates when this model is available (fuzzy match)
}
export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
@@ -103,20 +104,22 @@ export const CATEGORY_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
],
},
deep: {
fallbackChain: [
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2-codex", variant: "medium" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "max" },
],
},
artistry: {
fallbackChain: [
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "max" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" },
],
},
deep: {
fallbackChain: [
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2-codex", variant: "medium" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "max" },
],
requiresModel: "gpt-5.2-codex",
},
artistry: {
fallbackChain: [
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "max" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" },
],
requiresModel: "gemini-3-pro",
},
quick: {
fallbackChain: [
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-haiku-4-5" },

View File

@@ -13,7 +13,7 @@ import { getTaskToastManager } from "../../features/task-toast-manager"
import type { ModelFallbackInfo } from "../../features/task-toast-manager/types"
import { subagentSessions, getSessionAgent } from "../../features/claude-code-session-state"
import { log, getAgentToolRestrictions, resolveModel, getOpenCodeConfigPaths, findByNameCaseInsensitive, equalsIgnoreCase, promptWithModelSuggestionRetry } from "../../shared"
import { fetchAvailableModels } from "../../shared/model-availability"
import { fetchAvailableModels, isModelAvailable } from "../../shared/model-availability"
import { readConnectedProvidersCache } from "../../shared/connected-providers-cache"
import { resolveModelWithFallback } from "../../shared/model-resolver"
import { CATEGORY_MODEL_REQUIREMENTS } from "../../shared/model-requirements"
@@ -117,9 +117,20 @@ export function resolveCategoryConfig(
userCategories?: CategoriesConfig
inheritedModel?: string
systemDefaultModel?: string
availableModels?: Set<string>
}
): { config: CategoryConfig; promptAppend: string; model: string | undefined } | null {
const { userCategories, inheritedModel, systemDefaultModel } = options
const { userCategories, inheritedModel, systemDefaultModel, availableModels } = options
// Check if category requires a specific model
const categoryReq = CATEGORY_MODEL_REQUIREMENTS[categoryName]
if (categoryReq?.requiresModel && availableModels) {
if (!isModelAvailable(categoryReq.requiresModel, availableModels)) {
log(`[resolveCategoryConfig] Category ${categoryName} requires ${categoryReq.requiresModel} but not available`)
return null
}
}
const defaultConfig = DEFAULT_CATEGORIES[categoryName]
const userConfig = userCategories?.[categoryName]
const defaultPromptAppend = CATEGORY_PROMPT_APPENDS[categoryName] ?? ""
@@ -522,11 +533,12 @@ To continue this session: session_id="${args.session_id}"`
connectedProviders: connectedProviders ?? undefined
})
const resolved = resolveCategoryConfig(args.category, {
userCategories,
inheritedModel,
systemDefaultModel,
})
const resolved = resolveCategoryConfig(args.category, {
userCategories,
inheritedModel,
systemDefaultModel,
availableModels,
})
if (!resolved) {
return `Unknown category: "${args.category}". Available: ${Object.keys({ ...DEFAULT_CATEGORIES, ...userCategories }).join(", ")}`
}