refactor: replace opencode/glm-4.7-free with opencode/big-pickle model

Ultraworked with [Sisyphus](https://github.com/code-yeongyu/oh-my-opencode)

Co-authored-by: Sisyphus <clio-agent@sisyphuslabs.ai>
This commit is contained in:
YeonGyu-Kim
2026-02-18 18:07:16 +09:00
parent 096db59399
commit 6bf365595f
14 changed files with 87 additions and 87 deletions

View File

@@ -13,7 +13,7 @@ Agent factories following `createXXXAgent(model) → AgentConfig` pattern. Each
| **Sisyphus** | claude-opus-4-6 | 0.1 | primary | kimi-k2.5 → glm-4.7 → gemini-3-pro | Main orchestrator, plans + delegates |
| **Hephaestus** | gpt-5.3-codex | 0.1 | primary | NONE (required) | Autonomous deep worker |
| **Oracle** | gpt-5.2 | 0.1 | subagent | claude-opus-4-6 → gemini-3-pro | Read-only consultation |
| **Librarian** | glm-4.7 | 0.1 | subagent | glm-4.7-free → claude-sonnet-4-6 | External docs/code search |
| **Librarian** | glm-4.7 | 0.1 | subagent | big-pickle → claude-sonnet-4-6 | External docs/code search |
| **Explore** | grok-code-fast-1 | 0.1 | subagent | claude-haiku-4-5 → gpt-5-nano | Contextual grep |
| **Multimodal-Looker** | gemini-3-flash | 0.1 | subagent | gpt-5.2 → glm-4.6v → ... (6 deep) | PDF/image analysis |
| **Metis** | claude-opus-4-6 | **0.3** | subagent | kimi-k2.5 → gpt-5.2 → gemini-3-pro | Pre-planning consultant |

View File

@@ -19,7 +19,7 @@ describe("createBuiltinAgents with model overrides", () => {
"kimi-for-coding/k2p5",
"opencode/kimi-k2.5-free",
"zai-coding-plan/glm-4.7",
"opencode/glm-4.7-free",
"opencode/big-pickle",
])
)
@@ -260,7 +260,7 @@ describe("createBuiltinAgents with model overrides", () => {
"kimi-for-coding/k2p5",
"opencode/kimi-k2.5-free",
"zai-coding-plan/glm-4.7",
"opencode/glm-4.7-free",
"opencode/big-pickle",
"openai/gpt-5.2",
])
)
@@ -506,7 +506,7 @@ describe("createBuiltinAgents without systemDefaultModel", () => {
"kimi-for-coding/k2p5",
"opencode/kimi-k2.5-free",
"zai-coding-plan/glm-4.7",
"opencode/glm-4.7-free",
"opencode/big-pickle",
])
)

View File

@@ -51,7 +51,7 @@ cli/
## MODEL FALLBACK SYSTEM
Priority: Claude > OpenAI > Gemini > Copilot > OpenCode Zen > Z.ai > Kimi > glm-4.7-free
Priority: Claude > OpenAI > Gemini > Copilot > OpenCode Zen > Z.ai > Kimi > big-pickle
Agent-specific: librarian→ZAI, explore→Haiku/nano, hephaestus→requires OpenAI/Copilot

View File

@@ -5,57 +5,57 @@ exports[`generateModelConfig no providers available returns ULTIMATE_FALLBACK fo
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": {
"atlas": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"explore": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"hephaestus": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"librarian": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"metis": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"momus": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"multimodal-looker": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"oracle": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"prometheus": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
},
"categories": {
"artistry": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"deep": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"quick": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"ultrabrain": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"unspecified-high": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"unspecified-low": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"visual-engineering": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"writing": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
},
}
@@ -205,7 +205,7 @@ exports[`generateModelConfig single native provider uses OpenAI models when only
"variant": "medium",
},
"librarian": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"metis": {
"model": "openai/gpt-5.2",
@@ -233,7 +233,7 @@ exports[`generateModelConfig single native provider uses OpenAI models when only
"variant": "medium",
},
"quick": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"ultrabrain": {
"model": "openai/gpt-5.3-codex",
@@ -248,10 +248,10 @@ exports[`generateModelConfig single native provider uses OpenAI models when only
"variant": "medium",
},
"visual-engineering": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"writing": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
},
}
@@ -272,7 +272,7 @@ exports[`generateModelConfig single native provider uses OpenAI models with isMa
"variant": "medium",
},
"librarian": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"metis": {
"model": "openai/gpt-5.2",
@@ -300,7 +300,7 @@ exports[`generateModelConfig single native provider uses OpenAI models with isMa
"variant": "medium",
},
"quick": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"ultrabrain": {
"model": "openai/gpt-5.3-codex",
@@ -315,10 +315,10 @@ exports[`generateModelConfig single native provider uses OpenAI models with isMa
"variant": "medium",
},
"visual-engineering": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"writing": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
},
}
@@ -335,7 +335,7 @@ exports[`generateModelConfig single native provider uses Gemini models when only
"model": "opencode/gpt-5-nano",
},
"librarian": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"metis": {
"model": "google/gemini-3-pro",
@@ -396,7 +396,7 @@ exports[`generateModelConfig single native provider uses Gemini models with isMa
"model": "opencode/gpt-5-nano",
},
"librarian": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"metis": {
"model": "google/gemini-3-pro",
@@ -614,7 +614,7 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models when on
"variant": "medium",
},
"librarian": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"metis": {
"model": "opencode/claude-opus-4-6",
@@ -688,7 +688,7 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models with is
"variant": "medium",
},
"librarian": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"metis": {
"model": "opencode/claude-opus-4-6",
@@ -902,7 +902,7 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian whe
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": {
"atlas": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"explore": {
"model": "opencode/gpt-5-nano",
@@ -911,19 +911,19 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian whe
"model": "zai-coding-plan/glm-4.7",
},
"metis": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"momus": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"multimodal-looker": {
"model": "zai-coding-plan/glm-4.6v",
},
"oracle": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"prometheus": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"sisyphus": {
"model": "zai-coding-plan/glm-4.7",
@@ -931,22 +931,22 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian whe
},
"categories": {
"quick": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"ultrabrain": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"unspecified-high": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"unspecified-low": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"visual-engineering": {
"model": "zai-coding-plan/glm-5",
},
"writing": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
},
}
@@ -957,7 +957,7 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian wit
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": {
"atlas": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"explore": {
"model": "opencode/gpt-5-nano",
@@ -966,19 +966,19 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian wit
"model": "zai-coding-plan/glm-4.7",
},
"metis": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"momus": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"multimodal-looker": {
"model": "zai-coding-plan/glm-4.6v",
},
"oracle": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"prometheus": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"sisyphus": {
"model": "zai-coding-plan/glm-4.7",
@@ -986,22 +986,22 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian wit
},
"categories": {
"quick": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"ultrabrain": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"unspecified-high": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"unspecified-low": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"visual-engineering": {
"model": "zai-coding-plan/glm-5",
},
"writing": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
},
}
@@ -1022,7 +1022,7 @@ exports[`generateModelConfig mixed provider scenarios uses Claude + OpenCode Zen
"variant": "medium",
},
"librarian": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"metis": {
"model": "anthropic/claude-opus-4-6",

View File

@@ -130,7 +130,7 @@ export async function runCliInstaller(args: InstallArgs, version: string): Promi
!config.hasCopilot &&
!config.hasOpencodeZen
) {
printWarning("No model providers configured. Using opencode/glm-4.7-free as fallback.")
printWarning("No model providers configured. Using opencode/big-pickle as fallback.")
}
console.log(`${SYMBOLS.star} ${color.bold(color.green(isUpdate ? "Configuration updated!" : "Installation complete!"))}`)

View File

@@ -18,7 +18,7 @@ export type { GeneratedOmoConfig } from "./model-fallback-types"
const ZAI_MODEL = "zai-coding-plan/glm-4.7"
const ULTIMATE_FALLBACK = "opencode/glm-4.7-free"
const ULTIMATE_FALLBACK = "opencode/big-pickle"
const SCHEMA_URL = "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json"

View File

@@ -32,7 +32,7 @@ export async function promptInstallConfig(detected: DetectedConfig): Promise<Ins
const claude = await selectOrCancel<ClaudeSubscription>({
message: "Do you have a Claude Pro/Max subscription?",
options: [
{ value: "no", label: "No", hint: "Will use opencode/glm-4.7-free as fallback" },
{ value: "no", label: "No", hint: "Will use opencode/big-pickle as fallback" },
{ value: "yes", label: "Yes (standard)", hint: "Claude Opus 4.5 for orchestration" },
{ value: "max20", label: "Yes (max20 mode)", hint: "Full power with Claude Sonnet 4.6 for Librarian" },
],

View File

@@ -98,7 +98,7 @@ export async function runTuiInstaller(args: InstallArgs, version: string): Promi
}
if (!config.hasClaude && !config.hasOpenAI && !config.hasGemini && !config.hasCopilot && !config.hasOpencodeZen) {
p.log.warn("No model providers configured. Using opencode/glm-4.7-free as fallback.")
p.log.warn("No model providers configured. Using opencode/big-pickle as fallback.")
}
p.note(formatConfigSummary(config), isUpdate ? "Updated Configuration" : "Installation Complete")

View File

@@ -46,7 +46,7 @@ describe("Agent Config Integration", () => {
const config = {
sisyphus: { model: "anthropic/claude-opus-4-6" },
oracle: { model: "openai/gpt-5.2" },
librarian: { model: "opencode/glm-4.7-free" },
librarian: { model: "opencode/big-pickle" },
}
// when - migration is applied
@@ -65,7 +65,7 @@ describe("Agent Config Integration", () => {
Sisyphus: { model: "anthropic/claude-opus-4-6" },
oracle: { model: "openai/gpt-5.2" },
"Prometheus (Planner)": { model: "anthropic/claude-opus-4-6" },
librarian: { model: "opencode/glm-4.7-free" },
librarian: { model: "opencode/big-pickle" },
}
// when - migration is applied

View File

@@ -251,7 +251,7 @@ describe("fuzzyMatchModel", () => {
it("should match github-copilot claude-opus-4-6 to claude-opus-4.6", () => {
const available = new Set([
"github-copilot/claude-opus-4.6",
"opencode/glm-4.7-free",
"opencode/big-pickle",
])
const result = fuzzyMatchModel("claude-opus-4-6", available, ["github-copilot"])
expect(result).toBe("github-copilot/claude-opus-4.6")
@@ -327,16 +327,16 @@ describe("fuzzyMatchModel", () => {
expect(result).toBe("anthropic/claude-opus-4-6")
})
// given available models with similar model IDs (e.g., glm-4.7 and glm-4.7-free)
// when searching for the longer variant (glm-4.7-free)
// given available models with similar model IDs (e.g., glm-4.7 and big-pickle)
// when searching for the longer variant (big-pickle)
// then return exact model ID match, not the shorter one
it("should prefer exact model ID match over shorter substring match", () => {
const available = new Set([
"zai-coding-plan/glm-4.7",
"zai-coding-plan/glm-4.7-free",
"zai-coding-plan/big-pickle",
])
const result = fuzzyMatchModel("glm-4.7-free", available)
expect(result).toBe("zai-coding-plan/glm-4.7-free")
const result = fuzzyMatchModel("big-pickle", available)
expect(result).toBe("zai-coding-plan/big-pickle")
})
// given available models with similar model IDs
@@ -345,7 +345,7 @@ describe("fuzzyMatchModel", () => {
it("should still prefer shorter match when searching for shorter variant", () => {
const available = new Set([
"zai-coding-plan/glm-4.7",
"zai-coding-plan/glm-4.7-free",
"zai-coding-plan/big-pickle",
])
const result = fuzzyMatchModel("glm-4.7", available)
expect(result).toBe("zai-coding-plan/glm-4.7")
@@ -690,13 +690,13 @@ describe("fetchAvailableModels with provider-models cache (whitelist-filtered)",
it("should prefer provider-models cache over models.json", async () => {
writeProviderModelsCache({
models: {
opencode: ["glm-4.7-free", "gpt-5-nano"],
opencode: ["big-pickle", "gpt-5-nano"],
anthropic: ["claude-opus-4-6"]
},
connected: ["opencode", "anthropic"]
})
writeModelsCache({
opencode: { models: { "glm-4.7-free": {}, "gpt-5-nano": {}, "gpt-5.2": {} } },
opencode: { models: { "big-pickle": {}, "gpt-5-nano": {}, "gpt-5.2": {} } },
anthropic: { models: { "claude-opus-4-6": {}, "claude-sonnet-4-6": {} } }
})
@@ -705,7 +705,7 @@ describe("fetchAvailableModels with provider-models cache (whitelist-filtered)",
})
expect(result.size).toBe(3)
expect(result.has("opencode/glm-4.7-free")).toBe(true)
expect(result.has("opencode/big-pickle")).toBe(true)
expect(result.has("opencode/gpt-5-nano")).toBe(true)
expect(result.has("anthropic/claude-opus-4-6")).toBe(true)
expect(result.has("opencode/gpt-5.2")).toBe(false)
@@ -738,7 +738,7 @@ describe("fetchAvailableModels with provider-models cache (whitelist-filtered)",
// then falls back to models.json (no whitelist filtering)
it("should fallback to models.json when provider-models cache not found", async () => {
writeModelsCache({
opencode: { models: { "glm-4.7-free": {}, "gpt-5-nano": {}, "gpt-5.2": {} } },
opencode: { models: { "big-pickle": {}, "gpt-5-nano": {}, "gpt-5.2": {} } },
})
const result = await fetchAvailableModels(undefined, {
@@ -746,7 +746,7 @@ describe("fetchAvailableModels with provider-models cache (whitelist-filtered)",
})
expect(result.size).toBe(3)
expect(result.has("opencode/glm-4.7-free")).toBe(true)
expect(result.has("opencode/big-pickle")).toBe(true)
expect(result.has("opencode/gpt-5-nano")).toBe(true)
expect(result.has("opencode/gpt-5.2")).toBe(true)
})
@@ -757,7 +757,7 @@ describe("fetchAvailableModels with provider-models cache (whitelist-filtered)",
it("should filter by connectedProviders even with provider-models cache", async () => {
writeProviderModelsCache({
models: {
opencode: ["glm-4.7-free"],
opencode: ["big-pickle"],
anthropic: ["claude-opus-4-6"],
google: ["gemini-3-pro"]
},
@@ -769,7 +769,7 @@ describe("fetchAvailableModels with provider-models cache (whitelist-filtered)",
})
expect(result.size).toBe(1)
expect(result.has("opencode/glm-4.7-free")).toBe(true)
expect(result.has("opencode/big-pickle")).toBe(true)
expect(result.has("anthropic/claude-opus-4-6")).toBe(false)
expect(result.has("google/gemini-3-pro")).toBe(false)
})

View File

@@ -81,7 +81,7 @@ export function fuzzyMatchModel(
}
// Priority 2: Exact model ID match (part after provider/)
// This ensures "glm-4.7-free" matches "zai-coding-plan/glm-4.7-free" over "zai-coding-plan/glm-4.7"
// This ensures "big-pickle" matches "zai-coding-plan/big-pickle" over "zai-coding-plan/glm-4.7"
// Use filter + shortest to handle multi-provider cases (e.g., openai/gpt-5.2 + opencode/gpt-5.2)
const exactModelIdMatches = matches.filter((model) => {
const modelId = model.split("/").slice(1).join("/")

View File

@@ -28,7 +28,7 @@ describe("AGENT_MODEL_REQUIREMENTS", () => {
const sisyphus = AGENT_MODEL_REQUIREMENTS["sisyphus"]
// #when - accessing Sisyphus requirement
// #then - fallbackChain has claude-opus-4-6 first, glm-4.7-free last
// #then - fallbackChain has claude-opus-4-6 first, big-pickle last
expect(sisyphus).toBeDefined()
expect(sisyphus.fallbackChain).toBeArray()
expect(sisyphus.fallbackChain).toHaveLength(5)
@@ -41,7 +41,7 @@ describe("AGENT_MODEL_REQUIREMENTS", () => {
const last = sisyphus.fallbackChain[4]
expect(last.providers[0]).toBe("opencode")
expect(last.model).toBe("glm-4.7-free")
expect(last.model).toBe("big-pickle")
})
test("librarian has valid fallbackChain with glm-4.7 as primary", () => {
@@ -404,7 +404,7 @@ describe("FallbackEntry type", () => {
// given - a FallbackEntry without variant
const entry: FallbackEntry = {
providers: ["opencode", "anthropic"],
model: "glm-4.7-free",
model: "big-pickle",
}
// when - accessing variant
@@ -434,7 +434,7 @@ describe("ModelRequirement type", () => {
test("ModelRequirement variant is optional", () => {
// given - a ModelRequirement without top-level variant
const requirement: ModelRequirement = {
fallbackChain: [{ providers: ["opencode"], model: "glm-4.7-free" }],
fallbackChain: [{ providers: ["opencode"], model: "big-pickle" }],
}
// when - accessing variant

View File

@@ -19,7 +19,7 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
{ providers: ["kimi-for-coding"], model: "k2p5" },
{ providers: ["opencode"], model: "kimi-k2.5-free" },
{ providers: ["zai-coding-plan"], model: "glm-4.7" },
{ providers: ["opencode"], model: "glm-4.7-free" },
{ providers: ["opencode"], model: "big-pickle" },
],
requiresAnyModel: true,
},
@@ -39,7 +39,7 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
librarian: {
fallbackChain: [
{ providers: ["zai-coding-plan"], model: "glm-4.7" },
{ providers: ["opencode"], model: "glm-4.7-free" },
{ providers: ["opencode"], model: "big-pickle" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-sonnet-4-6" },
],
},

View File

@@ -117,7 +117,7 @@ describe("resolveModelWithFallback", () => {
test("returns uiSelectedModel with override source when provided", () => {
// given
const input: ExtendedModelResolutionInput = {
uiSelectedModel: "opencode/glm-4.7-free",
uiSelectedModel: "opencode/big-pickle",
userModel: "anthropic/claude-opus-4-6",
fallbackChain: [
{ providers: ["anthropic", "github-copilot"], model: "claude-opus-4-6" },
@@ -130,15 +130,15 @@ describe("resolveModelWithFallback", () => {
const result = resolveModelWithFallback(input)
// then
expect(result!.model).toBe("opencode/glm-4.7-free")
expect(result!.model).toBe("opencode/big-pickle")
expect(result!.source).toBe("override")
expect(logSpy).toHaveBeenCalledWith("Model resolved via UI selection", { model: "opencode/glm-4.7-free" })
expect(logSpy).toHaveBeenCalledWith("Model resolved via UI selection", { model: "opencode/big-pickle" })
})
test("UI selection takes priority over config override", () => {
// given
const input: ExtendedModelResolutionInput = {
uiSelectedModel: "opencode/glm-4.7-free",
uiSelectedModel: "opencode/big-pickle",
userModel: "anthropic/claude-opus-4-6",
availableModels: new Set(["anthropic/claude-opus-4-6"]),
systemDefaultModel: "google/gemini-3-pro",
@@ -148,7 +148,7 @@ describe("resolveModelWithFallback", () => {
const result = resolveModelWithFallback(input)
// then
expect(result!.model).toBe("opencode/glm-4.7-free")
expect(result!.model).toBe("opencode/big-pickle")
expect(result!.source).toBe("override")
})