feat(cli): librarian/explore model fallback based on installer settings (#299)

* feat(cli): librarian uses gemini-3-flash when hasGemini (antigravity auth)

Closes #294

* feat(cli): add explore to gemini-3-flash when hasGemini + update docs

* feat(cli): fix explore agent fallback logic to use haiku for max20 Claude users

- Use gemini-3-flash for both librarian and explore when hasGemini
- Use haiku for explore when Claude max20 is available (hasClaude && isMax20)
- Fall back to big-pickle for both when other models unavailable
- Updated all README files to document the fallback precedence

🤖 Generated with assistance of [OhMyOpenCode](https://github.com/code-yeongyu/oh-my-opencode)
This commit is contained in:
YeonGyu-Kim
2025-12-28 14:27:35 +09:00
committed by GitHub
parent 889d80d0ca
commit 17b7dd396e
5 changed files with 17 additions and 10 deletions

View File

@@ -146,9 +146,16 @@ export function generateOmoConfig(installConfig: InstallConfig): Record<string,
if (!installConfig.hasClaude) {
agents["Sisyphus"] = { model: "opencode/big-pickle" }
}
if (installConfig.hasGemini) {
agents["librarian"] = { model: "google/gemini-3-flash" }
agents["explore"] = { model: "google/gemini-3-flash" }
} else if (installConfig.hasClaude && installConfig.isMax20) {
agents["explore"] = { model: "anthropic/claude-haiku-4-5" }
} else {
agents["librarian"] = { model: "opencode/big-pickle" }
} else if (!installConfig.isMax20) {
agents["librarian"] = { model: "opencode/big-pickle" }
agents["explore"] = { model: "opencode/big-pickle" }
}
if (!installConfig.hasChatGPT) {