fix(models): update model names to match OpenCode Zen catalog (#1048)

* fix(models): update model names to match OpenCode Zen catalog

OpenCode Zen recently updated their official model catalog, deprecating
several preview and free model variants:

DEPRECATED → NEW (Official Zen Names):
- gemini-3-pro-preview → gemini-3-pro
- gemini-3-flash-preview → gemini-3-flash
- grok-code → gpt-5-nano (FREE tier maintained)
- glm-4.7-free → big-pickle (FREE tier maintained)
- glm-4.6v → glm-4.6

Changes:
- Updated 6 source files (model-requirements, delegate-task, think-mode, etc.)
- Updated 9 documentation files (installation, configurations, features, etc.)
- Updated 14 test files with new model references
- Regenerated snapshots to reflect catalog changes
- Removed duplicate think-mode entries for preview variants

Impact:
- FREE tier access preserved via gpt-5-nano and big-pickle
- All 55 model-related tests passing
- Zero breaking changes - pure string replacement
- Aligns codebase with official OpenCode Zen model catalog

Verified:
- Zero deprecated model names in codebase
- All model-related tests pass (55/55)
- Snapshots regenerated and validated

Affects: 30 files (6 source, 9 docs, 14 tests, 1 snapshot)

* fix(multimodal-looker): update fallback chain with glm-4.6v and gpt-5-nano

- Change glm-4.6 to glm-4.6v for zai-coding-plan provider
- Add opencode/gpt-5-nano as 4th fallback (FREE tier)
- Push gpt-5.2 to 5th position

Fallback chain now:
1. gemini-3-flash (google, github-copilot, opencode)
2. claude-haiku-4-5 (anthropic, github-copilot, opencode)
3. glm-4.6v (zai-coding-plan)
4. gpt-5-nano (opencode) - FREE
5. gpt-5.2 (openai, github-copilot, opencode)

* chore: update bun.lock

---------

Co-authored-by: justsisyphus <justsisyphus@users.noreply.github.com>
This commit is contained in:
YeonGyu-Kim
2026-01-24 15:30:35 +09:00
committed by GitHub
parent 58459e692b
commit 04633ba208
31 changed files with 292 additions and 293 deletions

View File

@@ -88,9 +88,9 @@ oh-my-opencode/
| Sisyphus | anthropic/claude-opus-4-5 | Primary orchestrator |
| Atlas | anthropic/claude-opus-4-5 | Master orchestrator |
| oracle | openai/gpt-5.2 | Consultation, debugging |
| librarian | opencode/glm-4.7-free | Docs, GitHub search |
| explore | opencode/grok-code | Fast codebase grep |
| multimodal-looker | google/gemini-3-flash-preview | PDF/image analysis |
| librarian | opencode/big-pickle | Docs, GitHub search |
| explore | opencode/gpt-5-nano | Fast codebase grep |
| multimodal-looker | google/gemini-3-flash | PDF/image analysis |
| Prometheus | anthropic/claude-opus-4-5 | Strategic planning |
## COMMANDS

View File

@@ -27,13 +27,13 @@
"typescript": "^5.7.3",
},
"optionalDependencies": {
"oh-my-opencode-darwin-arm64": "3.0.0-beta.11",
"oh-my-opencode-darwin-x64": "3.0.0-beta.11",
"oh-my-opencode-linux-arm64": "3.0.0-beta.11",
"oh-my-opencode-linux-arm64-musl": "3.0.0-beta.11",
"oh-my-opencode-linux-x64": "3.0.0-beta.11",
"oh-my-opencode-linux-x64-musl": "3.0.0-beta.11",
"oh-my-opencode-windows-x64": "3.0.0-beta.11",
"oh-my-opencode-darwin-arm64": "3.0.0-beta.16",
"oh-my-opencode-darwin-x64": "3.0.0-beta.16",
"oh-my-opencode-linux-arm64": "3.0.0-beta.16",
"oh-my-opencode-linux-arm64-musl": "3.0.0-beta.16",
"oh-my-opencode-linux-x64": "3.0.0-beta.16",
"oh-my-opencode-linux-x64-musl": "3.0.0-beta.16",
"oh-my-opencode-windows-x64": "3.0.0-beta.16",
},
},
},
@@ -225,19 +225,19 @@
"object-inspect": ["object-inspect@1.13.4", "", {}, "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew=="],
"oh-my-opencode-darwin-arm64": ["oh-my-opencode-darwin-arm64@3.0.0-beta.11", "", { "os": "darwin", "cpu": "arm64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-7cFv2bbz9HTY7sshgVTu+IhvYf7CT0czDYqHEB+dYfEqFU6TaoSMimq6uHqcWegUUR1T7PNmc0dyjYVw69FeVA=="],
"oh-my-opencode-darwin-arm64": ["oh-my-opencode-darwin-arm64@3.0.0-beta.16", "", { "os": "darwin", "cpu": "arm64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-1gfnTsKpYxTMXpbuV98wProR3RMe6BI/muuSVa3Xy68EEkBJsuRAne6IzFq/yxIMbx9OiQaS5cTE0mxFtxcCGA=="],
"oh-my-opencode-darwin-x64": ["oh-my-opencode-darwin-x64@3.0.0-beta.11", "", { "os": "darwin", "cpu": "x64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-rGAbDdUySWITIdm2yiuNFB9lFYaSXT8LMtg97LTlOO5vZbI3M+obIS3QlIkBtAhgOTIPB7Ni+T0W44OmJpHoYA=="],
"oh-my-opencode-darwin-x64": ["oh-my-opencode-darwin-x64@3.0.0-beta.16", "", { "os": "darwin", "cpu": "x64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-/h7kBZAN5Ut9kL7gEtwVVZ49Kw4gZoSVJdrpnh7Wij0a3mlOwqbkgGilK7oUiJ2N8fsxvxEBbTscYOLAdhyVBw=="],
"oh-my-opencode-linux-arm64": ["oh-my-opencode-linux-arm64@3.0.0-beta.11", "", { "os": "linux", "cpu": "arm64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-F9dqwWwGAdqeSkE7Tre5DmHQXwDpU2Z8Jk0lwTJMLj+kMqYFDVPjLPo4iVUdwPpxpmm0pR84u/oonG/2+84/zw=="],
"oh-my-opencode-linux-arm64": ["oh-my-opencode-linux-arm64@3.0.0-beta.16", "", { "os": "linux", "cpu": "arm64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-jW7pl76WerBa7FucKCYcthpbKbhJQSVe6rqUFSbVobjOP9VWslrGdxc9Y8BeiMx9SJEFYwA8/2ROhnOHpH3TxA=="],
"oh-my-opencode-linux-arm64-musl": ["oh-my-opencode-linux-arm64-musl@3.0.0-beta.11", "", { "os": "linux", "cpu": "arm64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-H+zOtHkHd+TmdPj64M1A0zLOk7OHIK4C8yqfLFhfizOIBffT1yOhAs6EpK3EqPhfPLu54ADgcQcu8W96VP24UA=="],
"oh-my-opencode-linux-arm64-musl": ["oh-my-opencode-linux-arm64-musl@3.0.0-beta.16", "", { "os": "linux", "cpu": "arm64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-cXXka0zQDBiFu9mmxa45o3g812w8q/jZRYgdwJsLbj3nm24WXv6uRP7nnVVoZiVmJ2GQbLE1nyGCMkBXFwRGGA=="],
"oh-my-opencode-linux-x64": ["oh-my-opencode-linux-x64@3.0.0-beta.11", "", { "os": "linux", "cpu": "x64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-IG+KODTJ8rs6cEJ2wN6Zpr6YtvCS5OpYP6jBdGJltmUpjQdMhdMsaY3ysZk+9Vxpx2KC3xj5KLHV1USg3uBTeg=="],
"oh-my-opencode-linux-x64": ["oh-my-opencode-linux-x64@3.0.0-beta.16", "", { "os": "linux", "cpu": "x64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-4VS1V6DiXdWHQ/AGc3rB1sCxFUlD1REex0Ai/y4tEgA2M0FD0Bu+tjXHhDghUvC8f0kQBRfijnTrtc1Lh7hIrA=="],
"oh-my-opencode-linux-x64-musl": ["oh-my-opencode-linux-x64-musl@3.0.0-beta.11", "", { "os": "linux", "cpu": "x64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-irV+AuWrHqNm7VT7HO56qgymR0+vEfJbtB3vCq68kprH2V4NQmGp2MNKIYPnUCYL7NEK3H2NX+h06YFZJ/8ELQ=="],
"oh-my-opencode-linux-x64-musl": ["oh-my-opencode-linux-x64-musl@3.0.0-beta.16", "", { "os": "linux", "cpu": "x64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-PGVe7vyUK3hjSNfvu1fBXTbgbe0OPh7JgB/TZR2U5R54X1k3NBkb1VHX9yxEUSA0VsNR+inE2x+DfEA+7KIruQ=="],
"oh-my-opencode-windows-x64": ["oh-my-opencode-windows-x64@3.0.0-beta.11", "", { "os": "win32", "cpu": "x64", "bin": { "oh-my-opencode": "bin/oh-my-opencode.exe" } }, "sha512-exZ/NEwGBlxyWszN7dvOfzbYX0cuhBZXftqAAFOlVP26elDHdo+AmSmLR/4cJyzpR9nCWz4xvl/RYF84bY6OEA=="],
"oh-my-opencode-windows-x64": ["oh-my-opencode-windows-x64@3.0.0-beta.16", "", { "os": "win32", "cpu": "x64", "bin": { "oh-my-opencode": "bin/oh-my-opencode.exe" } }, "sha512-1lN/8y4laQnSJDvyARuV5YaETAwBb+PK06QHQzpoK/0asiFoEIBcKNgjaRwau+nBsdRUrQocE2xc6g2ZNH4HUw=="],
"on-finished": ["on-finished@2.4.1", "", { "dependencies": { "ee-first": "1.1.1" } }, "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg=="],

View File

@@ -21,13 +21,13 @@ A Category is an agent configuration preset optimized for specific domains.
| Category | Default Model | Use Cases |
|----------|---------------|-----------|
| `visual-engineering` | `google/gemini-3-pro-preview` | Frontend, UI/UX, design, styling, animation |
| `visual-engineering` | `google/gemini-3-pro` | Frontend, UI/UX, design, styling, animation |
| `ultrabrain` | `openai/gpt-5.2-codex` (xhigh) | Deep logical reasoning, complex architecture decisions requiring extensive analysis |
| `artistry` | `google/gemini-3-pro-preview` (max) | Highly creative/artistic tasks, novel ideas |
| `artistry` | `google/gemini-3-pro` (max) | Highly creative/artistic tasks, novel ideas |
| `quick` | `anthropic/claude-haiku-4-5` | Trivial tasks - single file changes, typo fixes, simple modifications |
| `unspecified-low` | `anthropic/claude-sonnet-4-5` | Tasks that don't fit other categories, low effort required |
| `unspecified-high` | `anthropic/claude-opus-4-5` (max) | Tasks that don't fit other categories, high effort required |
| `writing` | `google/gemini-3-flash-preview` | Documentation, prose, technical writing |
| `writing` | `google/gemini-3-flash` | Documentation, prose, technical writing |
### Usage
@@ -177,7 +177,7 @@ You can fine-tune categories in `oh-my-opencode.json`.
"categories": {
// 1. Define new custom category
"korean-writer": {
"model": "google/gemini-3-flash-preview",
"model": "google/gemini-3-flash",
"temperature": 0.5,
"prompt_append": "You are a Korean technical writer. Maintain a friendly and clear tone."
},

View File

@@ -175,7 +175,7 @@ Configuration files support **JSONC (JSON with Comments)** format. You can use c
/* Category customization */
"categories": {
"visual-engineering": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
},
},
}

View File

@@ -22,13 +22,13 @@ It asks about your providers (Claude, OpenAI, Gemini, etc.) and generates optima
"agents": {
"oracle": { "model": "openai/gpt-5.2" }, // Use GPT for debugging
"librarian": { "model": "zai-coding-plan/glm-4.7" }, // Cheap model for research
"explore": { "model": "opencode/grok-code" } // Free model for grep
"explore": { "model": "opencode/gpt-5-nano" } // Free model for grep
},
// Override category models (used by delegate_task)
"categories": {
"quick": { "model": "opencode/grok-code" }, // Fast/cheap for trivial tasks
"visual-engineering": { "model": "google/gemini-3-pro-preview" } // Gemini for UI
"quick": { "model": "opencode/gpt-5-nano" }, // Fast/cheap for trivial tasks
"visual-engineering": { "model": "google/gemini-3-pro" } // Gemini for UI
}
}
```
@@ -75,7 +75,7 @@ When both `oh-my-opencode.jsonc` and `oh-my-opencode.json` files exist, `.jsonc`
"model": "openai/gpt-5.2" // GPT for strategic reasoning
},
"explore": {
"model": "opencode/grok-code" // Free & fast for exploration
"model": "opencode/gpt-5-nano" // Free & fast for exploration
},
},
}
@@ -305,7 +305,7 @@ Categories enable domain-specific task delegation via the `delegate_task` tool.
| Category | Model | Description |
| ---------------- | ----------------------------- | ---------------------------------------------------------------------------- |
| `visual` | `google/gemini-3-pro-preview` | Frontend, UI/UX, design-focused tasks. High creativity (temp 0.7). |
| `visual` | `google/gemini-3-pro` | Frontend, UI/UX, design-focused tasks. High creativity (temp 0.7). |
| `business-logic` | `openai/gpt-5.2` | Backend logic, architecture, strategic reasoning. Low creativity (temp 0.1). |
**Usage:**
@@ -332,7 +332,7 @@ Add custom categories in `oh-my-opencode.json`:
"prompt_append": "Focus on data analysis, ML pipelines, and statistical methods."
},
"visual": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
"prompt_append": "Use shadcn/ui components and Tailwind CSS."
}
}
@@ -403,9 +403,9 @@ Each agent has a defined provider priority chain. The system tries providers in
|-------|-------------------|-------------------------|
| **Sisyphus** | `claude-opus-4-5` | anthropic → github-copilot → opencode → antigravity → google |
| **oracle** | `gpt-5.2` | openai → anthropic → google → github-copilot → opencode |
| **librarian** | `glm-4.7-free` | opencode → github-copilot → anthropic |
| **explore** | `grok-code` | opencode → anthropic → github-copilot |
| **multimodal-looker** | `gemini-3-flash-preview` | google → anthropic → zai → openai → github-copilot → opencode |
| **librarian** | `big-pickle` | opencode → github-copilot → anthropic |
| **explore** | `gpt-5-nano` | opencode → anthropic → github-copilot |
| **multimodal-looker** | `gemini-3-flash` | google → anthropic → zai → openai → github-copilot → opencode |
| **Prometheus (Planner)** | `claude-opus-4-5` | anthropic → github-copilot → opencode → antigravity → google |
| **Metis (Plan Consultant)** | `claude-sonnet-4-5` | anthropic → github-copilot → opencode → antigravity → google |
| **Momus (Plan Reviewer)** | `claude-opus-4-5` | anthropic → github-copilot → opencode → antigravity → google |
@@ -417,13 +417,13 @@ Categories follow the same resolution logic:
| Category | Model (no prefix) | Provider Priority Chain |
|----------|-------------------|-------------------------|
| **visual-engineering** | `gemini-3-pro-preview` | google → openai → anthropic → github-copilot → opencode |
| **visual-engineering** | `gemini-3-pro` | google → openai → anthropic → github-copilot → opencode |
| **ultrabrain** | `gpt-5.2-codex` | openai → anthropic → google → github-copilot → opencode |
| **artistry** | `gemini-3-pro-preview` | google → openai → anthropic → github-copilot → opencode |
| **artistry** | `gemini-3-pro` | google → openai → anthropic → github-copilot → opencode |
| **quick** | `claude-haiku-4-5` | anthropic → github-copilot → opencode → antigravity → google |
| **unspecified-low** | `claude-sonnet-4-5` | anthropic → github-copilot → opencode → antigravity → google |
| **unspecified-high** | `claude-opus-4-5` | anthropic → github-copilot → opencode → antigravity → google |
| **writing** | `gemini-3-flash-preview` | google → openai → anthropic → github-copilot → opencode |
| **writing** | `gemini-3-flash` | google → openai → anthropic → github-copilot → opencode |
### Checking Your Configuration

View File

@@ -12,9 +12,9 @@ Oh-My-OpenCode provides 10 specialized AI agents. Each has distinct expertise, o
|-------|-------|---------|
| **Sisyphus** | `anthropic/claude-opus-4-5` | **The default orchestrator.** Plans, delegates, and executes complex tasks using specialized subagents with aggressive parallel execution. Todo-driven workflow with extended thinking (32k budget). |
| **oracle** | `openai/gpt-5.2` | Architecture decisions, code review, debugging. Read-only consultation - stellar logical reasoning and deep analysis. Inspired by AmpCode. |
| **librarian** | `opencode/glm-4.7-free` | Multi-repo analysis, documentation lookup, OSS implementation examples. Deep codebase understanding with evidence-based answers. Inspired by AmpCode. |
| **explore** | `opencode/grok-code` | Fast codebase exploration and contextual grep. Uses Gemini 3 Flash when Antigravity auth is configured, Haiku when Claude max20 is available, otherwise Grok. Inspired by Claude Code. |
| **multimodal-looker** | `google/gemini-3-flash-preview` | Visual content specialist. Analyzes PDFs, images, diagrams to extract information. Saves tokens by having another agent process media. |
| **librarian** | `opencode/big-pickle` | Multi-repo analysis, documentation lookup, OSS implementation examples. Deep codebase understanding with evidence-based answers. Inspired by AmpCode. |
| **explore** | `opencode/gpt-5-nano` | Fast codebase exploration and contextual grep. Uses Gemini 3 Flash when Antigravity auth is configured, Haiku when Claude max20 is available, otherwise Grok. Inspired by Claude Code. |
| **multimodal-looker** | `google/gemini-3-flash` | Visual content specialist. Analyzes PDFs, images, diagrams to extract information. Saves tokens by having another agent process media. |
### Planning Agents

View File

@@ -154,7 +154,7 @@ The `opencode-antigravity-auth` plugin uses different model names than the built
}
```
**Available model names**: `google/antigravity-gemini-3-pro-high`, `google/antigravity-gemini-3-pro-low`, `google/antigravity-gemini-3-flash`, `google/antigravity-claude-sonnet-4-5`, `google/antigravity-claude-sonnet-4-5-thinking-low`, `google/antigravity-claude-sonnet-4-5-thinking-medium`, `google/antigravity-claude-sonnet-4-5-thinking-high`, `google/antigravity-claude-opus-4-5-thinking-low`, `google/antigravity-claude-opus-4-5-thinking-medium`, `google/antigravity-claude-opus-4-5-thinking-high`, `google/gemini-3-pro-preview`, `google/gemini-3-flash-preview`, `google/gemini-2.5-pro`, `google/gemini-2.5-flash`
**Available model names**: `google/antigravity-gemini-3-pro-high`, `google/antigravity-gemini-3-pro-low`, `google/antigravity-gemini-3-flash`, `google/antigravity-claude-sonnet-4-5`, `google/antigravity-claude-sonnet-4-5-thinking-low`, `google/antigravity-claude-sonnet-4-5-thinking-medium`, `google/antigravity-claude-sonnet-4-5-thinking-high`, `google/antigravity-claude-opus-4-5-thinking-low`, `google/antigravity-claude-opus-4-5-thinking-medium`, `google/antigravity-claude-opus-4-5-thinking-high`, `google/gemini-3-pro`, `google/gemini-3-flash`, `google/gemini-2.5-pro`, `google/gemini-2.5-flash`
Then authenticate:
@@ -183,7 +183,7 @@ When GitHub Copilot is the best available provider, oh-my-opencode uses these mo
| ------------- | -------------------------------- |
| **Sisyphus** | `github-copilot/claude-opus-4.5` |
| **Oracle** | `github-copilot/gpt-5.2` |
| **Explore** | `github-copilot/grok-code-fast-1`|
| **Explore** | `github-copilot/gpt-5-nano-fast-1`|
| **Librarian** | `zai-coding-plan/glm-4.7` (if Z.ai available) or fallback |
GitHub Copilot acts as a proxy provider, routing requests to underlying models based on your subscription.
@@ -203,7 +203,7 @@ If Z.ai is the only provider available, all agents will use GLM models:
#### OpenCode Zen
OpenCode Zen provides access to `opencode/` prefixed models including `opencode/claude-opus-4-5`, `opencode/gpt-5.2`, `opencode/grok-code`, and `opencode/glm-4.7-free`.
OpenCode Zen provides access to `opencode/` prefixed models including `opencode/claude-opus-4-5`, `opencode/gpt-5.2`, `opencode/gpt-5-nano`, and `opencode/big-pickle`.
When OpenCode Zen is the best available provider (no native or Copilot), these models are used:
@@ -211,8 +211,8 @@ When OpenCode Zen is the best available provider (no native or Copilot), these m
| ------------- | -------------------------------- |
| **Sisyphus** | `opencode/claude-opus-4-5` |
| **Oracle** | `opencode/gpt-5.2` |
| **Explore** | `opencode/grok-code` |
| **Librarian** | `opencode/glm-4.7-free` |
| **Explore** | `opencode/gpt-5-nano` |
| **Librarian** | `opencode/big-pickle` |
##### Setup

View File

@@ -114,10 +114,10 @@ Each agent has a **provider priority chain**. The system tries providers in orde
Example: multimodal-looker
google → anthropic → zai → openai → github-copilot → opencode
↓ ↓ ↓ ↓ ↓ ↓
gemini haiku glm-4.6v gpt-5.2 fallback fallback
gemini haiku glm-4.6 gpt-5.2 fallback fallback
```
If you have Gemini, it uses `google/gemini-3-flash-preview`. No Gemini but have Claude? Uses `anthropic/claude-haiku-4-5`. And so on.
If you have Gemini, it uses `google/gemini-3-flash`. No Gemini but have Claude? Uses `anthropic/claude-haiku-4-5`. And so on.
### Example Configuration
@@ -130,12 +130,12 @@ Here's a real-world config for a user with **Claude, OpenAI, Gemini, and Z.ai**
// Override specific agents only - rest use fallback chain
"Atlas": { "model": "anthropic/claude-sonnet-4-5", "variant": "max" },
"librarian": { "model": "zai-coding-plan/glm-4.7" },
"explore": { "model": "opencode/grok-code" },
"multimodal-looker": { "model": "zai-coding-plan/glm-4.6v" }
"explore": { "model": "opencode/gpt-5-nano" },
"multimodal-looker": { "model": "zai-coding-plan/glm-4.6" }
},
"categories": {
// Override categories for cost optimization
"quick": { "model": "opencode/grok-code" },
"quick": { "model": "opencode/gpt-5-nano" },
"unspecified-low": { "model": "zai-coding-plan/glm-4.7" }
},
"experimental": {

View File

@@ -31,9 +31,9 @@ agents/
| Sisyphus | anthropic/claude-opus-4-5 | 0.1 | Primary orchestrator |
| Atlas | anthropic/claude-opus-4-5 | 0.1 | Master orchestrator |
| oracle | openai/gpt-5.2 | 0.1 | Consultation, debugging |
| librarian | opencode/glm-4.7-free | 0.1 | Docs, GitHub search |
| explore | opencode/grok-code | 0.1 | Fast contextual grep |
| multimodal-looker | google/gemini-3-flash-preview | 0.1 | PDF/image analysis |
| librarian | opencode/big-pickle | 0.1 | Docs, GitHub search |
| explore | opencode/gpt-5-nano | 0.1 | Fast contextual grep |
| multimodal-looker | google/gemini-3-flash | 0.1 | PDF/image analysis |
| Prometheus | anthropic/claude-opus-4-5 | 0.1 | Strategic planning |
| Metis | anthropic/claude-sonnet-4-5 | 0.3 | Pre-planning analysis |
| Momus | anthropic/claude-sonnet-4-5 | 0.1 | Plan validation |

View File

@@ -123,7 +123,7 @@ describe("buildAgent with category and skills", () => {
const agent = buildAgent(source["test-agent"], TEST_MODEL)
// #then - category's built-in model is applied
expect(agent.model).toBe("google/gemini-3-pro-preview")
expect(agent.model).toBe("google/gemini-3-pro")
})
test("agent with category and existing model keeps existing model", () => {

View File

@@ -5,54 +5,54 @@ exports[`generateModelConfig no providers available returns ULTIMATE_FALLBACK fo
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": {
"atlas": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"explore": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"librarian": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"metis": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"momus": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"multimodal-looker": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"oracle": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"prometheus": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"sisyphus": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
},
"categories": {
"artistry": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"quick": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"ultrabrain": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"unspecified-high": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"unspecified-low": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"visual-engineering": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"writing": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
},
}
@@ -196,10 +196,10 @@ exports[`generateModelConfig single native provider uses OpenAI models when only
"model": "openai/gpt-5.2",
},
"explore": {
"model": "opencode/grok-code",
"model": "opencode/gpt-5-nano",
},
"librarian": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"metis": {
"model": "openai/gpt-5.2",
@@ -230,7 +230,7 @@ exports[`generateModelConfig single native provider uses OpenAI models when only
"model": "openai/gpt-5.2",
},
"quick": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"ultrabrain": {
"model": "openai/gpt-5.2-codex",
@@ -263,10 +263,10 @@ exports[`generateModelConfig single native provider uses OpenAI models with isMa
"model": "openai/gpt-5.2",
},
"explore": {
"model": "opencode/grok-code",
"model": "opencode/gpt-5-nano",
},
"librarian": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"metis": {
"model": "openai/gpt-5.2",
@@ -297,7 +297,7 @@ exports[`generateModelConfig single native provider uses OpenAI models with isMa
"model": "openai/gpt-5.2",
},
"quick": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"ultrabrain": {
"model": "openai/gpt-5.2-codex",
@@ -327,57 +327,57 @@ exports[`generateModelConfig single native provider uses Gemini models when only
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": {
"atlas": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
},
"explore": {
"model": "opencode/grok-code",
"model": "opencode/gpt-5-nano",
},
"librarian": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"metis": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
"variant": "max",
},
"momus": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
"variant": "max",
},
"multimodal-looker": {
"model": "google/gemini-3-flash-preview",
"model": "google/gemini-3-flash",
},
"oracle": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
},
"prometheus": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
},
"sisyphus": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
},
},
"categories": {
"artistry": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
"variant": "max",
},
"quick": {
"model": "google/gemini-3-flash-preview",
"model": "google/gemini-3-flash",
},
"ultrabrain": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
},
"unspecified-high": {
"model": "google/gemini-3-flash-preview",
"model": "google/gemini-3-flash",
},
"unspecified-low": {
"model": "google/gemini-3-flash-preview",
"model": "google/gemini-3-flash",
},
"visual-engineering": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
},
"writing": {
"model": "google/gemini-3-flash-preview",
"model": "google/gemini-3-flash",
},
},
}
@@ -388,57 +388,57 @@ exports[`generateModelConfig single native provider uses Gemini models with isMa
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": {
"atlas": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
},
"explore": {
"model": "opencode/grok-code",
"model": "opencode/gpt-5-nano",
},
"librarian": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"metis": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
"variant": "max",
},
"momus": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
"variant": "max",
},
"multimodal-looker": {
"model": "google/gemini-3-flash-preview",
"model": "google/gemini-3-flash",
},
"oracle": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
},
"prometheus": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
},
"sisyphus": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
},
},
"categories": {
"artistry": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
"variant": "max",
},
"quick": {
"model": "google/gemini-3-flash-preview",
"model": "google/gemini-3-flash",
},
"ultrabrain": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
},
"unspecified-high": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
},
"unspecified-low": {
"model": "google/gemini-3-flash-preview",
"model": "google/gemini-3-flash",
},
"visual-engineering": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
},
"writing": {
"model": "google/gemini-3-flash-preview",
"model": "google/gemini-3-flash",
},
},
}
@@ -466,7 +466,7 @@ exports[`generateModelConfig all native providers uses preferred models from fal
"variant": "medium",
},
"multimodal-looker": {
"model": "google/gemini-3-flash-preview",
"model": "google/gemini-3-flash",
},
"oracle": {
"model": "openai/gpt-5.2",
@@ -482,7 +482,7 @@ exports[`generateModelConfig all native providers uses preferred models from fal
},
"categories": {
"artistry": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
"variant": "max",
},
"quick": {
@@ -499,10 +499,10 @@ exports[`generateModelConfig all native providers uses preferred models from fal
"model": "anthropic/claude-sonnet-4-5",
},
"visual-engineering": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
},
"writing": {
"model": "google/gemini-3-flash-preview",
"model": "google/gemini-3-flash",
},
},
}
@@ -530,7 +530,7 @@ exports[`generateModelConfig all native providers uses preferred models with isM
"variant": "medium",
},
"multimodal-looker": {
"model": "google/gemini-3-flash-preview",
"model": "google/gemini-3-flash",
},
"oracle": {
"model": "openai/gpt-5.2",
@@ -547,7 +547,7 @@ exports[`generateModelConfig all native providers uses preferred models with isM
},
"categories": {
"artistry": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
"variant": "max",
},
"quick": {
@@ -565,10 +565,10 @@ exports[`generateModelConfig all native providers uses preferred models with isM
"model": "anthropic/claude-sonnet-4-5",
},
"visual-engineering": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
},
"writing": {
"model": "google/gemini-3-flash-preview",
"model": "google/gemini-3-flash",
},
},
}
@@ -585,7 +585,7 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models when on
"model": "opencode/claude-haiku-4-5",
},
"librarian": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"metis": {
"model": "opencode/claude-opus-4-5",
@@ -596,7 +596,7 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models when on
"variant": "medium",
},
"multimodal-looker": {
"model": "opencode/gemini-3-flash-preview",
"model": "opencode/gemini-3-flash",
},
"oracle": {
"model": "opencode/gpt-5.2",
@@ -612,7 +612,7 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models when on
},
"categories": {
"artistry": {
"model": "opencode/gemini-3-pro-preview",
"model": "opencode/gemini-3-pro",
"variant": "max",
},
"quick": {
@@ -629,10 +629,10 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models when on
"model": "opencode/claude-sonnet-4-5",
},
"visual-engineering": {
"model": "opencode/gemini-3-pro-preview",
"model": "opencode/gemini-3-pro",
},
"writing": {
"model": "opencode/gemini-3-flash-preview",
"model": "opencode/gemini-3-flash",
},
},
}
@@ -649,7 +649,7 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models with is
"model": "opencode/claude-haiku-4-5",
},
"librarian": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"metis": {
"model": "opencode/claude-opus-4-5",
@@ -660,7 +660,7 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models with is
"variant": "medium",
},
"multimodal-looker": {
"model": "opencode/gemini-3-flash-preview",
"model": "opencode/gemini-3-flash",
},
"oracle": {
"model": "opencode/gpt-5.2",
@@ -677,7 +677,7 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models with is
},
"categories": {
"artistry": {
"model": "opencode/gemini-3-pro-preview",
"model": "opencode/gemini-3-pro",
"variant": "max",
},
"quick": {
@@ -695,10 +695,10 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models with is
"model": "opencode/claude-sonnet-4-5",
},
"visual-engineering": {
"model": "opencode/gemini-3-pro-preview",
"model": "opencode/gemini-3-pro",
},
"writing": {
"model": "opencode/gemini-3-flash-preview",
"model": "opencode/gemini-3-flash",
},
},
}
@@ -712,7 +712,7 @@ exports[`generateModelConfig fallback providers uses GitHub Copilot models when
"model": "github-copilot/claude-sonnet-4.5",
},
"explore": {
"model": "opencode/grok-code",
"model": "opencode/gpt-5-nano",
},
"librarian": {
"model": "github-copilot/claude-sonnet-4.5",
@@ -726,7 +726,7 @@ exports[`generateModelConfig fallback providers uses GitHub Copilot models when
"variant": "medium",
},
"multimodal-looker": {
"model": "github-copilot/gemini-3-flash-preview",
"model": "github-copilot/gemini-3-flash",
},
"oracle": {
"model": "github-copilot/gpt-5.2",
@@ -742,7 +742,7 @@ exports[`generateModelConfig fallback providers uses GitHub Copilot models when
},
"categories": {
"artistry": {
"model": "github-copilot/gemini-3-pro-preview",
"model": "github-copilot/gemini-3-pro",
"variant": "max",
},
"quick": {
@@ -759,10 +759,10 @@ exports[`generateModelConfig fallback providers uses GitHub Copilot models when
"model": "github-copilot/claude-sonnet-4.5",
},
"visual-engineering": {
"model": "github-copilot/gemini-3-pro-preview",
"model": "github-copilot/gemini-3-pro",
},
"writing": {
"model": "github-copilot/gemini-3-flash-preview",
"model": "github-copilot/gemini-3-flash",
},
},
}
@@ -776,7 +776,7 @@ exports[`generateModelConfig fallback providers uses GitHub Copilot models with
"model": "github-copilot/claude-sonnet-4.5",
},
"explore": {
"model": "opencode/grok-code",
"model": "opencode/gpt-5-nano",
},
"librarian": {
"model": "github-copilot/claude-sonnet-4.5",
@@ -790,7 +790,7 @@ exports[`generateModelConfig fallback providers uses GitHub Copilot models with
"variant": "medium",
},
"multimodal-looker": {
"model": "github-copilot/gemini-3-flash-preview",
"model": "github-copilot/gemini-3-flash",
},
"oracle": {
"model": "github-copilot/gpt-5.2",
@@ -807,7 +807,7 @@ exports[`generateModelConfig fallback providers uses GitHub Copilot models with
},
"categories": {
"artistry": {
"model": "github-copilot/gemini-3-pro-preview",
"model": "github-copilot/gemini-3-pro",
"variant": "max",
},
"quick": {
@@ -825,10 +825,10 @@ exports[`generateModelConfig fallback providers uses GitHub Copilot models with
"model": "github-copilot/claude-sonnet-4.5",
},
"visual-engineering": {
"model": "github-copilot/gemini-3-pro-preview",
"model": "github-copilot/gemini-3-pro",
},
"writing": {
"model": "github-copilot/gemini-3-flash-preview",
"model": "github-copilot/gemini-3-flash",
},
},
}
@@ -839,51 +839,51 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian whe
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": {
"atlas": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"explore": {
"model": "opencode/grok-code",
"model": "opencode/gpt-5-nano",
},
"librarian": {
"model": "zai-coding-plan/glm-4.7",
},
"metis": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"momus": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"multimodal-looker": {
"model": "zai-coding-plan/glm-4.6v",
"model": "zai-coding-plan/glm-4.6",
},
"oracle": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"prometheus": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"sisyphus": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
},
"categories": {
"artistry": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"quick": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"ultrabrain": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"unspecified-high": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"unspecified-low": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"visual-engineering": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"writing": {
"model": "zai-coding-plan/glm-4.7",
@@ -897,28 +897,28 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian wit
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": {
"atlas": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"explore": {
"model": "opencode/grok-code",
"model": "opencode/gpt-5-nano",
},
"librarian": {
"model": "zai-coding-plan/glm-4.7",
},
"metis": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"momus": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"multimodal-looker": {
"model": "zai-coding-plan/glm-4.6v",
"model": "zai-coding-plan/glm-4.6",
},
"oracle": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"prometheus": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"sisyphus": {
"model": "zai-coding-plan/glm-4.7",
@@ -926,22 +926,22 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian wit
},
"categories": {
"artistry": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"quick": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"ultrabrain": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"unspecified-high": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"unspecified-low": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"visual-engineering": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"writing": {
"model": "zai-coding-plan/glm-4.7",
@@ -961,7 +961,7 @@ exports[`generateModelConfig mixed provider scenarios uses Claude + OpenCode Zen
"model": "anthropic/claude-haiku-4-5",
},
"librarian": {
"model": "opencode/glm-4.7-free",
"model": "opencode/big-pickle",
},
"metis": {
"model": "anthropic/claude-opus-4-5",
@@ -972,7 +972,7 @@ exports[`generateModelConfig mixed provider scenarios uses Claude + OpenCode Zen
"variant": "medium",
},
"multimodal-looker": {
"model": "opencode/gemini-3-flash-preview",
"model": "opencode/gemini-3-flash",
},
"oracle": {
"model": "opencode/gpt-5.2",
@@ -988,7 +988,7 @@ exports[`generateModelConfig mixed provider scenarios uses Claude + OpenCode Zen
},
"categories": {
"artistry": {
"model": "opencode/gemini-3-pro-preview",
"model": "opencode/gemini-3-pro",
"variant": "max",
},
"quick": {
@@ -1005,10 +1005,10 @@ exports[`generateModelConfig mixed provider scenarios uses Claude + OpenCode Zen
"model": "anthropic/claude-sonnet-4-5",
},
"visual-engineering": {
"model": "opencode/gemini-3-pro-preview",
"model": "opencode/gemini-3-pro",
},
"writing": {
"model": "opencode/gemini-3-flash-preview",
"model": "opencode/gemini-3-flash",
},
},
}
@@ -1022,7 +1022,7 @@ exports[`generateModelConfig mixed provider scenarios uses OpenAI + Copilot comb
"model": "github-copilot/claude-sonnet-4.5",
},
"explore": {
"model": "opencode/grok-code",
"model": "opencode/gpt-5-nano",
},
"librarian": {
"model": "github-copilot/claude-sonnet-4.5",
@@ -1036,7 +1036,7 @@ exports[`generateModelConfig mixed provider scenarios uses OpenAI + Copilot comb
"variant": "medium",
},
"multimodal-looker": {
"model": "github-copilot/gemini-3-flash-preview",
"model": "github-copilot/gemini-3-flash",
},
"oracle": {
"model": "openai/gpt-5.2",
@@ -1052,7 +1052,7 @@ exports[`generateModelConfig mixed provider scenarios uses OpenAI + Copilot comb
},
"categories": {
"artistry": {
"model": "github-copilot/gemini-3-pro-preview",
"model": "github-copilot/gemini-3-pro",
"variant": "max",
},
"quick": {
@@ -1069,10 +1069,10 @@ exports[`generateModelConfig mixed provider scenarios uses OpenAI + Copilot comb
"model": "github-copilot/claude-sonnet-4.5",
},
"visual-engineering": {
"model": "github-copilot/gemini-3-pro-preview",
"model": "github-copilot/gemini-3-pro",
},
"writing": {
"model": "github-copilot/gemini-3-flash-preview",
"model": "github-copilot/gemini-3-flash",
},
},
}
@@ -1163,7 +1163,7 @@ exports[`generateModelConfig mixed provider scenarios uses Gemini + Claude combi
"model": "anthropic/claude-opus-4-5",
},
"multimodal-looker": {
"model": "google/gemini-3-flash-preview",
"model": "google/gemini-3-flash",
},
"oracle": {
"model": "anthropic/claude-opus-4-5",
@@ -1179,7 +1179,7 @@ exports[`generateModelConfig mixed provider scenarios uses Gemini + Claude combi
},
"categories": {
"artistry": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
"variant": "max",
},
"quick": {
@@ -1196,10 +1196,10 @@ exports[`generateModelConfig mixed provider scenarios uses Gemini + Claude combi
"model": "anthropic/claude-sonnet-4-5",
},
"visual-engineering": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
},
"writing": {
"model": "google/gemini-3-flash-preview",
"model": "google/gemini-3-flash",
},
},
}
@@ -1227,7 +1227,7 @@ exports[`generateModelConfig mixed provider scenarios uses all fallback provider
"variant": "medium",
},
"multimodal-looker": {
"model": "github-copilot/gemini-3-flash-preview",
"model": "github-copilot/gemini-3-flash",
},
"oracle": {
"model": "github-copilot/gpt-5.2",
@@ -1243,7 +1243,7 @@ exports[`generateModelConfig mixed provider scenarios uses all fallback provider
},
"categories": {
"artistry": {
"model": "github-copilot/gemini-3-pro-preview",
"model": "github-copilot/gemini-3-pro",
"variant": "max",
},
"quick": {
@@ -1260,10 +1260,10 @@ exports[`generateModelConfig mixed provider scenarios uses all fallback provider
"model": "github-copilot/claude-sonnet-4.5",
},
"visual-engineering": {
"model": "github-copilot/gemini-3-pro-preview",
"model": "github-copilot/gemini-3-pro",
},
"writing": {
"model": "github-copilot/gemini-3-flash-preview",
"model": "github-copilot/gemini-3-flash",
},
},
}
@@ -1291,7 +1291,7 @@ exports[`generateModelConfig mixed provider scenarios uses all providers togethe
"variant": "medium",
},
"multimodal-looker": {
"model": "google/gemini-3-flash-preview",
"model": "google/gemini-3-flash",
},
"oracle": {
"model": "openai/gpt-5.2",
@@ -1307,7 +1307,7 @@ exports[`generateModelConfig mixed provider scenarios uses all providers togethe
},
"categories": {
"artistry": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
"variant": "max",
},
"quick": {
@@ -1324,10 +1324,10 @@ exports[`generateModelConfig mixed provider scenarios uses all providers togethe
"model": "anthropic/claude-sonnet-4-5",
},
"visual-engineering": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
},
"writing": {
"model": "google/gemini-3-flash-preview",
"model": "google/gemini-3-flash",
},
},
}
@@ -1355,7 +1355,7 @@ exports[`generateModelConfig mixed provider scenarios uses all providers with is
"variant": "medium",
},
"multimodal-looker": {
"model": "google/gemini-3-flash-preview",
"model": "google/gemini-3-flash",
},
"oracle": {
"model": "openai/gpt-5.2",
@@ -1372,7 +1372,7 @@ exports[`generateModelConfig mixed provider scenarios uses all providers with is
},
"categories": {
"artistry": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
"variant": "max",
},
"quick": {
@@ -1390,10 +1390,10 @@ exports[`generateModelConfig mixed provider scenarios uses all providers with is
"model": "anthropic/claude-sonnet-4-5",
},
"visual-engineering": {
"model": "google/gemini-3-pro-preview",
"model": "google/gemini-3-pro",
},
"writing": {
"model": "google/gemini-3-flash-preview",
"model": "google/gemini-3-flash",
},
},
}

View File

@@ -277,7 +277,7 @@ describe("generateOmoConfig - model fallback system", () => {
// #then should use ultimate fallback for all agents
expect(result.$schema).toBe("https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json")
expect((result.agents as Record<string, { model: string }>).sisyphus.model).toBe("opencode/glm-4.7-free")
expect((result.agents as Record<string, { model: string }>).sisyphus.model).toBe("opencode/big-pickle")
})
test("uses zai-coding-plan/glm-4.7 for librarian when Z.ai available", () => {

View File

@@ -27,7 +27,7 @@ describe("model-resolution check", () => {
// #then: Should have category entries
const visual = info.categories.find((c) => c.name === "visual-engineering")
expect(visual).toBeDefined()
expect(visual!.requirement.fallbackChain[0]?.model).toBe("gemini-3-pro-preview")
expect(visual!.requirement.fallbackChain[0]?.model).toBe("gemini-3-pro")
expect(visual!.requirement.fallbackChain[0]?.providers).toContain("google")
})
})

View File

@@ -178,7 +178,7 @@ async function runTuiMode(detected: DetectedConfig): Promise<InstallConfig | nul
const claude = await p.select({
message: "Do you have a Claude Pro/Max subscription?",
options: [
{ value: "no" as const, label: "No", hint: "Will use opencode/glm-4.7-free as fallback" },
{ value: "no" as const, label: "No", hint: "Will use opencode/big-pickle as fallback" },
{ value: "yes" as const, label: "Yes (standard)", hint: "Claude Opus 4.5 for orchestration" },
{ value: "max20" as const, label: "Yes (max20 mode)", hint: "Full power with Claude Sonnet 4.5 for Librarian" },
],
@@ -363,7 +363,7 @@ async function runNonTuiInstall(args: InstallArgs): Promise<number> {
}
if (!config.hasClaude && !config.hasOpenAI && !config.hasGemini && !config.hasCopilot && !config.hasOpencodeZen) {
printWarning("No model providers configured. Using opencode/glm-4.7-free as fallback.")
printWarning("No model providers configured. Using opencode/big-pickle as fallback.")
}
console.log(`${SYMBOLS.star} ${color.bold(color.green(isUpdate ? "Configuration updated!" : "Installation complete!"))}`)
@@ -480,7 +480,7 @@ export async function install(args: InstallArgs): Promise<number> {
}
if (!config.hasClaude && !config.hasOpenAI && !config.hasGemini && !config.hasCopilot && !config.hasOpencodeZen) {
p.log.warn("No model providers configured. Using opencode/glm-4.7-free as fallback.")
p.log.warn("No model providers configured. Using opencode/big-pickle as fallback.")
}
p.note(formatConfigSummary(config), isUpdate ? "Updated Configuration" : "Installation Complete")

View File

@@ -310,15 +310,15 @@ describe("generateModelConfig", () => {
})
describe("explore agent special cases", () => {
test("explore uses grok-code when only Gemini available (no Claude)", () => {
test("explore uses gpt-5-nano when only Gemini available (no Claude)", () => {
// #given only Gemini is available (no Claude)
const config = createConfig({ hasGemini: true })
// #when generateModelConfig is called
const result = generateModelConfig(config)
// #then explore should use grok-code (Claude haiku not available)
expect(result.agents?.explore?.model).toBe("opencode/grok-code")
// #then explore should use gpt-5-nano (Claude haiku not available)
expect(result.agents?.explore?.model).toBe("opencode/gpt-5-nano")
})
test("explore uses Claude haiku when Claude available", () => {
@@ -343,15 +343,15 @@ describe("generateModelConfig", () => {
expect(result.agents?.explore?.model).toBe("anthropic/claude-haiku-4-5")
})
test("explore uses grok-code when only OpenAI available", () => {
test("explore uses gpt-5-nano when only OpenAI available", () => {
// #given only OpenAI is available
const config = createConfig({ hasOpenAI: true })
// #when generateModelConfig is called
const result = generateModelConfig(config)
// #then explore should use grok-code (fallback)
expect(result.agents?.explore?.model).toBe("opencode/grok-code")
// #then explore should use gpt-5-nano (fallback)
expect(result.agents?.explore?.model).toBe("opencode/gpt-5-nano")
})
})

View File

@@ -36,7 +36,7 @@ export interface GeneratedOmoConfig {
const ZAI_MODEL = "zai-coding-plan/glm-4.7"
const ULTIMATE_FALLBACK = "opencode/glm-4.7-free"
const ULTIMATE_FALLBACK = "opencode/big-pickle"
const SCHEMA_URL = "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json"
function toProviderAvailability(config: InstallConfig): ProviderAvailability {
@@ -103,7 +103,7 @@ function getSisyphusFallbackChain(isMaxPlan: boolean): FallbackEntry[] {
return [
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-sonnet-4-5" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2", variant: "high" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro-preview" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro" },
]
}
@@ -139,14 +139,14 @@ export function generateModelConfig(config: InstallConfig): GeneratedOmoConfig {
continue
}
// Special case: explore uses Claude haiku → OpenCode grok-code
// Special case: explore uses Claude haiku → OpenCode gpt-5-nano
if (role === "explore") {
if (avail.native.claude) {
agents[role] = { model: "anthropic/claude-haiku-4-5" }
} else if (avail.opencodeZen) {
agents[role] = { model: "opencode/claude-haiku-4-5" }
} else {
agents[role] = { model: "opencode/grok-code" }
agents[role] = { model: "opencode/gpt-5-nano" }
}
continue
}

View File

@@ -151,7 +151,7 @@ describe("TaskToastManager", () => {
description: "Task with category default model",
agent: "sisyphus-junior",
isBackground: false,
modelInfo: { model: "google/gemini-3-pro-preview", type: "category-default" as const },
modelInfo: { model: "google/gemini-3-pro", type: "category-default" as const },
}
// #when - addTask is called

View File

@@ -103,7 +103,7 @@ describe("createThinkModeHook integration", () => {
const hook = createThinkModeHook()
const input = createMockInput(
"github-copilot",
"gemini-3-pro-preview",
"gemini-3-pro",
"think about this"
)
@@ -112,7 +112,7 @@ describe("createThinkModeHook integration", () => {
// #then should upgrade to high variant and inject google thinking config
const message = input.message as MessageWithInjectedProps
expect(input.message.model?.modelID).toBe("gemini-3-pro-preview-high")
expect(input.message.model?.modelID).toBe("gemini-3-pro-high")
expect(message.providerOptions).toBeDefined()
const googleOptions = (
message.providerOptions as Record<string, unknown>
@@ -125,7 +125,7 @@ describe("createThinkModeHook integration", () => {
const hook = createThinkModeHook()
const input = createMockInput(
"github-copilot",
"gemini-3-flash-preview",
"gemini-3-flash",
"ultrathink"
)
@@ -134,7 +134,7 @@ describe("createThinkModeHook integration", () => {
// #then should upgrade to high variant
const message = input.message as MessageWithInjectedProps
expect(input.message.model?.modelID).toBe("gemini-3-flash-preview-high")
expect(input.message.model?.modelID).toBe("gemini-3-flash-high")
expect(message.providerOptions).toBeDefined()
})
})

View File

@@ -50,7 +50,7 @@ describe("think-mode switcher", () => {
describe("Gemini models via github-copilot", () => {
it("should resolve github-copilot Gemini Pro to google config", () => {
// #given a github-copilot provider with Gemini Pro model
const config = getThinkingConfig("github-copilot", "gemini-3-pro-preview")
const config = getThinkingConfig("github-copilot", "gemini-3-pro")
// #then should return google thinking config
expect(config).not.toBeNull()
@@ -65,7 +65,7 @@ describe("think-mode switcher", () => {
// #given a github-copilot provider with Gemini Flash model
const config = getThinkingConfig(
"github-copilot",
"gemini-3-flash-preview"
"gemini-3-flash"
)
// #then should return google thinking config
@@ -159,11 +159,11 @@ describe("think-mode switcher", () => {
it("should handle Gemini preview variants", () => {
// #given Gemini preview model IDs
expect(getHighVariant("gemini-3-pro-preview")).toBe(
"gemini-3-pro-preview-high"
expect(getHighVariant("gemini-3-pro")).toBe(
"gemini-3-pro-high"
)
expect(getHighVariant("gemini-3-flash-preview")).toBe(
"gemini-3-flash-preview-high"
expect(getHighVariant("gemini-3-flash")).toBe(
"gemini-3-flash-high"
)
})

View File

@@ -92,9 +92,7 @@ const HIGH_VARIANT_MAP: Record<string, string> = {
// Gemini
"gemini-3-pro": "gemini-3-pro-high",
"gemini-3-pro-low": "gemini-3-pro-high",
"gemini-3-pro-preview": "gemini-3-pro-preview-high",
"gemini-3-flash": "gemini-3-flash-high",
"gemini-3-flash-preview": "gemini-3-flash-preview-high",
// GPT-5
"gpt-5": "gpt-5-high",
"gpt-5-mini": "gpt-5-mini-high",

View File

@@ -27,7 +27,7 @@ describe("mergeConfigs", () => {
temperature: 0.3,
},
visual: {
model: "google/gemini-3-pro-preview",
model: "google/gemini-3-pro",
},
},
} as unknown as OhMyOpenCodeConfig;
@@ -41,7 +41,7 @@ describe("mergeConfigs", () => {
// #then quick should be preserved from base
expect(result.categories?.quick?.model).toBe("anthropic/claude-haiku-4-5");
// #then visual should be added from override
expect(result.categories?.visual?.model).toBe("google/gemini-3-pro-preview");
expect(result.categories?.visual?.model).toBe("google/gemini-3-pro");
});
it("should preserve base categories when override has no categories", () => {

View File

@@ -25,7 +25,7 @@ describe("Prometheus category config resolution", () => {
// #then
expect(config).toBeDefined()
expect(config?.model).toBe("google/gemini-3-pro-preview")
expect(config?.model).toBe("google/gemini-3-pro")
})
test("user categories override default categories", () => {

View File

@@ -46,7 +46,7 @@ describe("Agent Config Integration", () => {
const config = {
sisyphus: { model: "anthropic/claude-opus-4-5" },
oracle: { model: "openai/gpt-5.2" },
librarian: { model: "opencode/glm-4.7-free" },
librarian: { model: "opencode/big-pickle" },
}
// #when - migration is applied
@@ -65,7 +65,7 @@ describe("Agent Config Integration", () => {
Sisyphus: { model: "anthropic/claude-opus-4-5" },
oracle: { model: "openai/gpt-5.2" },
"Prometheus (Planner)": { model: "anthropic/claude-opus-4-5" },
librarian: { model: "opencode/glm-4.7-free" },
librarian: { model: "opencode/big-pickle" },
}
// #when - migration is applied

View File

@@ -37,7 +37,7 @@ describe("migrateAgentNames", () => {
const agents = {
oracle: { model: "openai/gpt-5.2" },
librarian: { model: "google/gemini-3-flash" },
explore: { model: "opencode/grok-code" },
explore: { model: "opencode/gpt-5-nano" },
}
// #when: Migrate agent names
@@ -47,7 +47,7 @@ describe("migrateAgentNames", () => {
expect(changed).toBe(false)
expect(migrated["oracle"]).toEqual({ model: "openai/gpt-5.2" })
expect(migrated["librarian"]).toEqual({ model: "google/gemini-3-flash" })
expect(migrated["explore"]).toEqual({ model: "opencode/grok-code" })
expect(migrated["explore"]).toEqual({ model: "opencode/gpt-5-nano" })
})
test("handles case-insensitive migration", () => {
@@ -418,7 +418,7 @@ describe("migrateAgentConfigToCategory", () => {
test("migrates model to category when mapping exists", () => {
// #given: Config with a model that has a category mapping
const config = {
model: "google/gemini-3-pro-preview",
model: "google/gemini-3-pro",
temperature: 0.5,
top_p: 0.9,
}
@@ -467,7 +467,7 @@ describe("migrateAgentConfigToCategory", () => {
test("handles all mapped models correctly", () => {
// #given: Configs for each mapped model
const configs = [
{ model: "google/gemini-3-pro-preview" },
{ model: "google/gemini-3-pro" },
{ model: "openai/gpt-5.2" },
{ model: "anthropic/claude-haiku-4-5" },
{ model: "anthropic/claude-opus-4-5" },
@@ -536,7 +536,7 @@ describe("shouldDeleteAgentConfig", () => {
// #given: Config with fields matching category defaults
const config = {
category: "visual-engineering",
model: "google/gemini-3-pro-preview",
model: "google/gemini-3-pro",
}
// #when: Check if config should be deleted
@@ -664,7 +664,7 @@ describe("migrateConfigFile with backup", () => {
agents: {
"multimodal-looker": { model: "anthropic/claude-haiku-4-5" },
oracle: { model: "openai/gpt-5.2" },
"my-custom-agent": { model: "google/gemini-3-pro-preview" },
"my-custom-agent": { model: "google/gemini-3-pro" },
},
}
@@ -680,7 +680,7 @@ describe("migrateConfigFile with backup", () => {
const agents = rawConfig.agents as Record<string, Record<string, unknown>>
expect(agents["multimodal-looker"].model).toBe("anthropic/claude-haiku-4-5")
expect(agents.oracle.model).toBe("openai/gpt-5.2")
expect(agents["my-custom-agent"].model).toBe("google/gemini-3-pro-preview")
expect(agents["my-custom-agent"].model).toBe("google/gemini-3-pro")
})
test("preserves category setting when explicitly set", () => {

View File

@@ -82,7 +82,7 @@ export const HOOK_NAME_MAP: Record<string, string | null> = {
* This map will be removed in a future major version once migration period ends.
*/
export const MODEL_TO_CATEGORY_MAP: Record<string, string> = {
"google/gemini-3-pro-preview": "visual-engineering",
"google/gemini-3-pro": "visual-engineering",
"openai/gpt-5.2": "ultrabrain",
"anthropic/claude-haiku-4-5": "quick",
"anthropic/claude-opus-4-5": "unspecified-high",

View File

@@ -80,7 +80,7 @@ describe("fetchAvailableModels", () => {
openai: { id: "openai", models: { "gpt-5.2-codex": { id: "gpt-5.2-codex" } } },
anthropic: { id: "anthropic", models: { "claude-sonnet-4-5": { id: "claude-sonnet-4-5" } } },
google: { id: "google", models: { "gemini-3-flash": { id: "gemini-3-flash" } } },
opencode: { id: "opencode", models: { "grok-code": { id: "grok-code" } } },
opencode: { id: "opencode", models: { "gpt-5-nano": { id: "gpt-5-nano" } } },
})
const result = await fetchAvailableModels()
@@ -89,7 +89,7 @@ describe("fetchAvailableModels", () => {
expect(result.has("openai/gpt-5.2-codex")).toBe(true)
expect(result.has("anthropic/claude-sonnet-4-5")).toBe(true)
expect(result.has("google/gemini-3-flash")).toBe(true)
expect(result.has("opencode/grok-code")).toBe(true)
expect(result.has("opencode/gpt-5-nano")).toBe(true)
})
})

View File

@@ -69,19 +69,19 @@ describe("AGENT_MODEL_REQUIREMENTS", () => {
expect(primary.model).toBe("claude-haiku-4-5")
})
test("multimodal-looker has valid fallbackChain with gemini-3-flash-preview as primary", () => {
test("multimodal-looker has valid fallbackChain with gemini-3-flash as primary", () => {
// #given - multimodal-looker agent requirement
const multimodalLooker = AGENT_MODEL_REQUIREMENTS["multimodal-looker"]
// #when - accessing multimodal-looker requirement
// #then - fallbackChain exists with gemini-3-flash-preview as first entry
// #then - fallbackChain exists with gemini-3-flash as first entry
expect(multimodalLooker).toBeDefined()
expect(multimodalLooker.fallbackChain).toBeArray()
expect(multimodalLooker.fallbackChain.length).toBeGreaterThan(0)
const primary = multimodalLooker.fallbackChain[0]
expect(primary.providers[0]).toBe("google")
expect(primary.model).toBe("gemini-3-flash-preview")
expect(primary.model).toBe("gemini-3-flash")
})
test("prometheus has valid fallbackChain with claude-opus-4-5 as primary", () => {
@@ -199,19 +199,19 @@ describe("CATEGORY_MODEL_REQUIREMENTS", () => {
expect(primary.providers[0]).toBe("openai")
})
test("visual-engineering has valid fallbackChain with gemini-3-pro-preview as primary", () => {
test("visual-engineering has valid fallbackChain with gemini-3-pro as primary", () => {
// #given - visual-engineering category requirement
const visualEngineering = CATEGORY_MODEL_REQUIREMENTS["visual-engineering"]
// #when - accessing visual-engineering requirement
// #then - fallbackChain exists with gemini-3-pro-preview as first entry
// #then - fallbackChain exists with gemini-3-pro as first entry
expect(visualEngineering).toBeDefined()
expect(visualEngineering.fallbackChain).toBeArray()
expect(visualEngineering.fallbackChain.length).toBeGreaterThan(0)
const primary = visualEngineering.fallbackChain[0]
expect(primary.providers[0]).toBe("google")
expect(primary.model).toBe("gemini-3-pro-preview")
expect(primary.model).toBe("gemini-3-pro")
})
test("quick has valid fallbackChain with claude-haiku-4-5 as primary", () => {
@@ -260,34 +260,34 @@ describe("CATEGORY_MODEL_REQUIREMENTS", () => {
expect(primary.providers[0]).toBe("anthropic")
})
test("artistry has valid fallbackChain with gemini-3-pro-preview as primary", () => {
test("artistry has valid fallbackChain with gemini-3-pro as primary", () => {
// #given - artistry category requirement
const artistry = CATEGORY_MODEL_REQUIREMENTS["artistry"]
// #when - accessing artistry requirement
// #then - fallbackChain exists with gemini-3-pro-preview as first entry
// #then - fallbackChain exists with gemini-3-pro as first entry
expect(artistry).toBeDefined()
expect(artistry.fallbackChain).toBeArray()
expect(artistry.fallbackChain.length).toBeGreaterThan(0)
const primary = artistry.fallbackChain[0]
expect(primary.model).toBe("gemini-3-pro-preview")
expect(primary.model).toBe("gemini-3-pro")
expect(primary.variant).toBe("max")
expect(primary.providers[0]).toBe("google")
})
test("writing has valid fallbackChain with gemini-3-flash-preview as primary", () => {
test("writing has valid fallbackChain with gemini-3-flash as primary", () => {
// #given - writing category requirement
const writing = CATEGORY_MODEL_REQUIREMENTS["writing"]
// #when - accessing writing requirement
// #then - fallbackChain exists with gemini-3-flash-preview as first entry
// #then - fallbackChain exists with gemini-3-flash as first entry
expect(writing).toBeDefined()
expect(writing.fallbackChain).toBeArray()
expect(writing.fallbackChain.length).toBeGreaterThan(0)
const primary = writing.fallbackChain[0]
expect(primary.model).toBe("gemini-3-flash-preview")
expect(primary.model).toBe("gemini-3-flash")
expect(primary.providers[0]).toBe("google")
})
@@ -344,7 +344,7 @@ describe("FallbackEntry type", () => {
// #given - a FallbackEntry without variant
const entry: FallbackEntry = {
providers: ["opencode", "anthropic"],
model: "glm-4.7-free",
model: "big-pickle",
}
// #when - accessing variant
@@ -374,7 +374,7 @@ describe("ModelRequirement type", () => {
test("ModelRequirement variant is optional", () => {
// #given - a ModelRequirement without top-level variant
const requirement: ModelRequirement = {
fallbackChain: [{ providers: ["opencode"], model: "glm-4.7-free" }],
fallbackChain: [{ providers: ["opencode"], model: "big-pickle" }],
}
// #when - accessing variant

View File

@@ -15,34 +15,35 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["zai-coding-plan"], model: "glm-4.7" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2-codex", variant: "medium" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro-preview" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro" },
],
},
oracle: {
fallbackChain: [
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2", variant: "high" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro-preview" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro" },
],
},
librarian: {
fallbackChain: [
{ providers: ["zai-coding-plan"], model: "glm-4.7" },
{ providers: ["opencode"], model: "glm-4.7-free" },
{ providers: ["opencode"], model: "big-pickle" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-sonnet-4-5" },
],
},
explore: {
fallbackChain: [
{ providers: ["anthropic", "opencode"], model: "claude-haiku-4-5" },
{ providers: ["opencode"], model: "grok-code" },
{ providers: ["opencode"], model: "gpt-5-nano" },
],
},
"multimodal-looker": {
fallbackChain: [
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash-preview" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-haiku-4-5" },
{ providers: ["zai-coding-plan"], model: "glm-4.6v" },
{ providers: ["opencode"], model: "gpt-5-nano" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" },
],
},
@@ -50,28 +51,28 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
fallbackChain: [
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2", variant: "high" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro-preview" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro" },
],
},
metis: {
fallbackChain: [
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2", variant: "high" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro-preview", variant: "max" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "max" },
],
},
momus: {
fallbackChain: [
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2", variant: "medium" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro-preview", variant: "max" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "max" },
],
},
atlas: {
fallbackChain: [
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-sonnet-4-5" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro-preview" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro" },
],
},
}
@@ -79,7 +80,7 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
export const CATEGORY_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
"visual-engineering": {
fallbackChain: [
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro-preview" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2", variant: "high" },
],
@@ -88,12 +89,12 @@ export const CATEGORY_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
fallbackChain: [
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2-codex", variant: "xhigh" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro-preview" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro" },
],
},
artistry: {
fallbackChain: [
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro-preview", variant: "max" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "max" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" },
],
@@ -101,27 +102,27 @@ export const CATEGORY_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
quick: {
fallbackChain: [
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-haiku-4-5" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash-preview" },
{ providers: ["opencode"], model: "grok-code" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash" },
{ providers: ["opencode"], model: "gpt-5-nano" },
],
},
"unspecified-low": {
fallbackChain: [
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-sonnet-4-5" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2-codex", variant: "medium" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash-preview" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash" },
],
},
"unspecified-high": {
fallbackChain: [
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2", variant: "high" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro-preview" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro" },
],
},
writing: {
fallbackChain: [
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash-preview" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-sonnet-4-5" },
{ providers: ["zai-coding-plan"], model: "glm-4.7" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" },

View File

@@ -236,9 +236,9 @@ describe("resolveModelWithFallback", () => {
// #given
const input: ExtendedModelResolutionInput = {
fallbackChain: [
{ providers: ["anthropic", "opencode", "github-copilot"], model: "grok-code" },
{ providers: ["anthropic", "opencode", "github-copilot"], model: "gpt-5-nano" },
],
availableModels: new Set(["opencode/grok-code", "github-copilot/grok-code-preview"]),
availableModels: new Set(["opencode/gpt-5-nano", "github-copilot/gpt-5-nano-preview"]),
systemDefaultModel: "google/gemini-3-pro",
}
@@ -246,7 +246,7 @@ describe("resolveModelWithFallback", () => {
const result = resolveModelWithFallback(input)
// #then
expect(result.model).toBe("opencode/grok-code")
expect(result.model).toBe("opencode/gpt-5-nano")
expect(result.source).toBe("provider-fallback")
})
@@ -392,20 +392,20 @@ describe("resolveModelWithFallback", () => {
test("tries all providers in first entry before moving to second entry", () => {
// #given
const availableModels = new Set(["google/gemini-3-pro-preview"])
const availableModels = new Set(["google/gemini-3-pro"])
// #when
const result = resolveModelWithFallback({
fallbackChain: [
{ providers: ["openai", "anthropic"], model: "gpt-5.2" },
{ providers: ["google"], model: "gemini-3-pro-preview" },
{ providers: ["google"], model: "gemini-3-pro" },
],
availableModels,
systemDefaultModel: "system/default",
})
// #then
expect(result.model).toBe("google/gemini-3-pro-preview")
expect(result.model).toBe("google/gemini-3-pro")
expect(result.source).toBe("provider-fallback")
})

View File

@@ -156,13 +156,13 @@ Approach:
export const DEFAULT_CATEGORIES: Record<string, CategoryConfig> = {
"visual-engineering": { model: "google/gemini-3-pro-preview" },
"visual-engineering": { model: "google/gemini-3-pro" },
ultrabrain: { model: "openai/gpt-5.2-codex", variant: "xhigh" },
artistry: { model: "google/gemini-3-pro-preview", variant: "max" },
artistry: { model: "google/gemini-3-pro", variant: "max" },
quick: { model: "anthropic/claude-haiku-4-5" },
"unspecified-low": { model: "anthropic/claude-sonnet-4-5" },
"unspecified-high": { model: "anthropic/claude-opus-4-5", variant: "max" },
writing: { model: "google/gemini-3-flash-preview" },
writing: { model: "google/gemini-3-flash" },
}
export const CATEGORY_PROMPT_APPENDS: Record<string, string> = {

View File

@@ -20,7 +20,7 @@ describe("sisyphus-task", () => {
// #when / #then
expect(category).toBeDefined()
expect(category.model).toBe("google/gemini-3-pro-preview")
expect(category.model).toBe("google/gemini-3-pro")
})
test("ultrabrain category has model and variant config", () => {
@@ -142,7 +142,7 @@ describe("sisyphus-task", () => {
// #then
expect(result).not.toBeNull()
expect(result!.config.model).toBe("google/gemini-3-pro-preview")
expect(result!.config.model).toBe("google/gemini-3-pro")
expect(result!.promptAppend).toContain("VISUAL/UI")
})
@@ -166,7 +166,7 @@ describe("sisyphus-task", () => {
const categoryName = "visual-engineering"
const userCategories = {
"visual-engineering": {
model: "google/gemini-3-pro-preview",
model: "google/gemini-3-pro",
prompt_append: "Custom instructions here",
},
}
@@ -206,7 +206,7 @@ describe("sisyphus-task", () => {
const categoryName = "visual-engineering"
const userCategories = {
"visual-engineering": {
model: "google/gemini-3-pro-preview",
model: "google/gemini-3-pro",
temperature: 0.3,
},
}
@@ -229,7 +229,7 @@ describe("sisyphus-task", () => {
// #then - category's built-in model wins over inheritedModel
expect(result).not.toBeNull()
expect(result!.config.model).toBe("google/gemini-3-pro-preview")
expect(result!.config.model).toBe("google/gemini-3-pro")
})
test("systemDefaultModel is used as fallback when custom category has no model", () => {
@@ -271,7 +271,7 @@ describe("sisyphus-task", () => {
// #then
expect(result).not.toBeNull()
expect(result!.config.model).toBe("google/gemini-3-pro-preview")
expect(result!.config.model).toBe("google/gemini-3-pro")
})
})
@@ -960,7 +960,7 @@ describe("sisyphus-task", () => {
const mockClient = {
app: { agents: async () => ({ data: [] }) },
config: { get: async () => ({ data: { model: SYSTEM_DEFAULT_MODEL } }) },
model: { list: async () => [{ id: "google/gemini-3-pro-preview" }] },
model: { list: async () => [{ id: "google/gemini-3-pro" }] },
session: {
get: async () => ({ data: { directory: "/project" } }),
create: async () => ({ data: { id: "ses_unstable_gemini" } }),
@@ -1144,7 +1144,7 @@ describe("sisyphus-task", () => {
const mockClient = {
app: { agents: async () => ({ data: [] }) },
config: { get: async () => ({ data: { model: SYSTEM_DEFAULT_MODEL } }) },
model: { list: async () => [{ id: "google/gemini-3-pro-preview" }] },
model: { list: async () => [{ id: "google/gemini-3-pro" }] },
session: {
get: async () => ({ data: { directory: "/project" } }),
create: async () => ({ data: { id: "ses_artistry_gemini" } }),
@@ -1170,7 +1170,7 @@ describe("sisyphus-task", () => {
abort: new AbortController().signal,
}
// #when - artistry category (gemini-3-pro-preview with max variant)
// #when - artistry category (gemini-3-pro with max variant)
const result = await tool.execute(
{
description: "Test artistry forced background",
@@ -1189,7 +1189,7 @@ describe("sisyphus-task", () => {
}, { timeout: 20000 })
test("writing category (gemini-flash) with run_in_background=false should force background but wait for result", async () => {
// #given - writing uses gemini-3-flash-preview
// #given - writing uses gemini-3-flash
const { createDelegateTask } = require("./tools")
let launchCalled = false
@@ -1209,7 +1209,7 @@ describe("sisyphus-task", () => {
const mockClient = {
app: { agents: async () => ({ data: [] }) },
config: { get: async () => ({ data: { model: SYSTEM_DEFAULT_MODEL } }) },
model: { list: async () => [{ id: "google/gemini-3-flash-preview" }] },
model: { list: async () => [{ id: "google/gemini-3-flash" }] },
session: {
get: async () => ({ data: { directory: "/project" } }),
create: async () => ({ data: { id: "ses_writing_gemini" } }),
@@ -1235,7 +1235,7 @@ describe("sisyphus-task", () => {
abort: new AbortController().signal,
}
// #when - writing category (gemini-3-flash-preview)
// #when - writing category (gemini-3-flash)
const result = await tool.execute(
{
description: "Test writing forced background",
@@ -1535,9 +1535,9 @@ describe("sisyphus-task", () => {
// #when resolveCategoryConfig is called
const resolved = resolveCategoryConfig(categoryName, { userCategories, inheritedModel, systemDefaultModel: SYSTEM_DEFAULT_MODEL })
// #then should use category's built-in model (gemini-3-pro-preview for visual-engineering)
// #then should use category's built-in model (gemini-3-pro for visual-engineering)
expect(resolved).not.toBeNull()
expect(resolved!.model).toBe("google/gemini-3-pro-preview")
expect(resolved!.model).toBe("google/gemini-3-pro")
})
test("systemDefaultModel is used when no other model is available", () => {