Skip to content
Open
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions packages/cli/src/agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ async function customProviderAdd() {
const harness = unwrap(await p.select({
message: 'Harness (which CLI to use)',
options: harnessOptions(),
})) as 'claude' | 'codex';
})) as 'claude' | 'codex' | 'gemini';

const baseUrl = unwrap(await p.text({
message: "Base URL (e.g. 'https://proxy.example.com/v1')",
Expand Down Expand Up @@ -322,6 +322,7 @@ function agentProvider(agentId: string, providerArg?: string, flag?: string, mod
switch (providerArg) {
case 'anthropic':
case 'openai':
case 'google':
case 'opencode':
agent.provider = providerArg;
if (model) agent.model = model;
Expand All @@ -331,7 +332,7 @@ function agentProvider(agentId: string, providerArg?: string, flag?: string, mod
agent.provider = providerArg;
if (model) agent.model = model;
} else {
p.log.error('Usage: tinyclaw agent provider <agent_id> {anthropic|openai|opencode|custom:<id>} [--model MODEL]');
p.log.error('Usage: tinyclaw agent provider <agent_id> {anthropic|openai|google|opencode|custom:<id>} [--model MODEL]');
process.exit(1);
}
}
Expand Down
40 changes: 31 additions & 9 deletions packages/cli/src/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,26 @@
import * as p from '@clack/prompts';
import { readSettings, writeSettings, requireSettings } from './shared.ts';

function getModelSection(settings: ReturnType<typeof requireSettings>, provider: string) {
if (provider === 'openai') return settings.models?.openai;
if (provider === 'opencode') return settings.models?.opencode;
if (provider === 'google') return settings.models?.google;
return settings.models?.anthropic;
}

function getProviderLabel(provider: string): string {
if (provider === 'openai') return 'OpenAI/Codex';
if (provider === 'opencode') return 'OpenCode';
if (provider === 'google') return 'Google/Gemini';
return 'Anthropic';
}

// --- provider show ---

function providerShow() {
const settings = requireSettings();
const provider = settings.models?.provider || 'anthropic';
const modelSection = provider === 'openai' ? settings.models?.openai : settings.models?.anthropic;
const modelSection = getModelSection(settings, provider);
const model = modelSection?.model || '';

if (model) {
Expand Down Expand Up @@ -43,8 +57,8 @@ function providerSet(providerName: string, args: string[]) {
}
}

if (providerName !== 'anthropic' && providerName !== 'openai') {
p.log.error('Usage: provider {anthropic|openai} [--model MODEL] [--auth-token TOKEN]');
if (providerName !== 'anthropic' && providerName !== 'openai' && providerName !== 'google') {
p.log.error('Usage: provider {anthropic|openai|google} [--model MODEL] [--auth-token TOKEN]');
process.exit(1);
}

Expand All @@ -68,15 +82,18 @@ function providerSet(providerName: string, args: string[]) {
}
}

p.log.success(`Switched to ${providerName === 'anthropic' ? 'Anthropic' : 'OpenAI/Codex'} provider with model: ${modelArg}`);
p.log.success(`Switched to ${getProviderLabel(providerName)} provider with model: ${modelArg}`);
if (updatedCount > 0) {
p.log.message(` Updated ${updatedCount} agent(s) from ${oldProvider} to ${providerName}/${modelArg}`);
}
} else {
p.log.success(`Switched to ${providerName === 'anthropic' ? 'Anthropic' : 'OpenAI/Codex'} provider`);
p.log.success(`Switched to ${getProviderLabel(providerName)} provider`);
if (providerName === 'openai') {
p.log.message("Use 'tinyclaw model {gpt-5.3-codex|gpt-5.2}' to set the model.");
p.log.message("Note: Make sure you have the 'codex' CLI installed.");
} else if (providerName === 'google') {
p.log.message("Use 'tinyclaw model {gemini-2.5-flash|gemini-2.5-pro}' to set the model.");
p.log.message("Note: Make sure you have the 'gemini' CLI installed.");
} else {
p.log.message("Use 'tinyclaw model {sonnet|opus}' to set the model.");
}
Expand All @@ -85,7 +102,7 @@ function providerSet(providerName: string, args: string[]) {
if (authTokenArg) {
if (!settings.models[providerName]) settings.models[providerName] = {};
(settings.models as any)[providerName].auth_token = authTokenArg;
p.log.success(`${providerName === 'anthropic' ? 'Anthropic' : 'OpenAI'} auth token saved`);
p.log.success(`${getProviderLabel(providerName)} auth token saved`);
}

writeSettings(settings);
Expand All @@ -96,7 +113,7 @@ function providerSet(providerName: string, args: string[]) {
function modelShow() {
const settings = requireSettings();
const provider = settings.models?.provider || 'anthropic';
const modelSection = provider === 'openai' ? settings.models?.openai : settings.models?.anthropic;
const modelSection = getModelSection(settings, provider);
const model = modelSection?.model || '';

if (model) {
Expand Down Expand Up @@ -125,17 +142,21 @@ function modelSet(modelName: string) {
// Determine provider from model name
const anthropicModels = ['sonnet', 'opus'];
const openaiModels = ['gpt-5.2', 'gpt-5.3-codex'];
const googleModels = ['gemini-2.5-flash', 'gemini-2.5-pro'];

let targetProvider: string;
if (anthropicModels.includes(modelName)) {
targetProvider = 'anthropic';
} else if (openaiModels.includes(modelName)) {
targetProvider = 'openai';
} else if (googleModels.includes(modelName)) {
targetProvider = 'google';
} else {
p.log.error('Usage: model {sonnet|opus|gpt-5.2|gpt-5.3-codex}');
p.log.error('Usage: model {sonnet|opus|gpt-5.2|gpt-5.3-codex|gemini-2.5-flash|gemini-2.5-pro}');
p.log.message('');
p.log.message('Anthropic models: sonnet, opus');
p.log.message('OpenAI models: gpt-5.2, gpt-5.3-codex');
p.log.message('Google models: gemini-2.5-flash, gemini-2.5-pro');
process.exit(1);
}

Expand Down Expand Up @@ -176,6 +197,7 @@ switch (command) {
break;
case 'anthropic':
case 'openai':
case 'google':
providerSet(command, args);
break;
case 'model':
Expand All @@ -187,7 +209,7 @@ switch (command) {
break;
default:
p.log.error(`Unknown provider command: ${command}`);
p.log.message('Usage: provider {show|anthropic|openai} [--model MODEL] [--auth-token TOKEN]');
p.log.message('Usage: provider {show|anthropic|openai|google} [--model MODEL] [--auth-token TOKEN]');
p.log.message(' provider model [name]');
process.exit(1);
}
1 change: 1 addition & 0 deletions packages/cli/src/setup-wizard.ts
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,7 @@ async function main() {
provider: providerKey,
...(providerKey === 'anthropic' ? { anthropic: { model } } : {}),
...(providerKey === 'openai' ? { openai: { model } } : {}),
...(providerKey === 'google' ? { google: { model } } : {}),
...(providerKey === 'opencode' ? { opencode: { model } } : {}),
},
monitoring: {
Expand Down
14 changes: 13 additions & 1 deletion packages/cli/src/shared.ts
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ export function providerOptions(includeCustom = false): ProviderOption[] {
const opts: ProviderOption[] = [
{ value: 'anthropic', label: 'Anthropic (Claude)', hint: 'recommended' },
{ value: 'openai', label: 'OpenAI (Codex/GPT)' },
{ value: 'google', label: 'Google (Gemini CLI)' },
{ value: 'opencode', label: 'OpenCode' },
];
if (includeCustom) {
Expand All @@ -107,6 +108,14 @@ export function openaiModelOptions(): ProviderOption[] {
];
}

export function googleModelOptions(): ProviderOption[] {
return [
{ value: 'gemini-2.5-flash', label: 'Gemini 2.5 Flash', hint: 'recommended' },
{ value: 'gemini-2.5-pro', label: 'Gemini 2.5 Pro' },
{ value: '__custom__', label: 'Custom', hint: 'enter model name' },
];
}

export function opencodeModelOptions(): ProviderOption[] {
return [
{ value: 'opencode/claude-sonnet-4-5', label: 'opencode/claude-sonnet-4-5', hint: 'recommended' },
Expand All @@ -129,6 +138,8 @@ export async function promptModel(provider: string): Promise<string> {

if (provider === 'anthropic') {
options = anthropicModelOptions();
} else if (provider === 'google') {
options = googleModelOptions();
} else if (provider === 'opencode') {
options = opencodeModelOptions();
customHint = 'Enter model name (e.g. provider/model)';
Expand All @@ -152,11 +163,12 @@ export async function promptModel(provider: string): Promise<string> {
}

/**
* Prompt for harness selection (claude or codex).
* Prompt for harness selection.
*/
export function harnessOptions(): ProviderOption[] {
return [
{ value: 'claude', label: 'claude (Anthropic CLI)' },
{ value: 'codex', label: 'codex (OpenAI CLI)' },
{ value: 'gemini', label: 'gemini (Google Gemini CLI)' },
];
}
1 change: 0 additions & 1 deletion packages/core/src/agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -198,4 +198,3 @@ export function buildSystemPrompt(

return prompt;
}

15 changes: 14 additions & 1 deletion packages/core/src/config.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import fs from 'fs';
import path from 'path';
import { jsonrepair } from 'jsonrepair';
import { Settings, AgentConfig, TeamConfig, CLAUDE_MODEL_IDS, CODEX_MODEL_IDS, OPENCODE_MODEL_IDS } from './types';
import { Settings, AgentConfig, TeamConfig, CLAUDE_MODEL_IDS, CODEX_MODEL_IDS, GEMINI_MODEL_IDS, OPENCODE_MODEL_IDS } from './types';

export const SCRIPT_DIR = path.resolve(__dirname, '../../..');
export const TINYCLAW_HOME = process.env.TINYCLAW_HOME
Expand Down Expand Up @@ -45,6 +45,9 @@ export function getSettings(): Settings {
} else if (settings?.models?.opencode) {
if (!settings.models) settings.models = {};
settings.models.provider = 'opencode';
} else if (settings?.models?.google) {
if (!settings.models) settings.models = {};
settings.models.provider = 'google';
} else if (settings?.models?.anthropic) {
if (!settings.models) settings.models = {};
settings.models.provider = 'anthropic';
Expand All @@ -68,6 +71,8 @@ export function getDefaultAgentFromModels(settings: Settings): AgentConfig {
model = settings?.models?.openai?.model || 'gpt-5.3-codex';
} else if (provider === 'opencode') {
model = settings?.models?.opencode?.model || 'sonnet';
} else if (provider === 'google') {
model = settings?.models?.google?.model || 'gemini-2.5-flash';
} else {
model = settings?.models?.anthropic?.model || 'sonnet';
}
Expand Down Expand Up @@ -124,3 +129,11 @@ export function resolveCodexModel(model: string): string {
export function resolveOpenCodeModel(model: string): string {
return OPENCODE_MODEL_IDS[model] || model || '';
}

/**
* Resolve the model ID for Gemini.
* Falls back to the raw model string from settings if no mapping is found.
*/
export function resolveGeminiModel(model: string): string {
return GEMINI_MODEL_IDS[model] || model || '';
}
106 changes: 103 additions & 3 deletions packages/core/src/invoke.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,14 @@ import { spawn } from 'child_process';
import fs from 'fs';
import path from 'path';
import { AgentConfig, CustomProvider, TeamConfig } from './types';
import { SCRIPT_DIR, resolveClaudeModel, resolveCodexModel, resolveOpenCodeModel, getSettings } from './config';
import {
SCRIPT_DIR,
resolveClaudeModel,
resolveCodexModel,
resolveOpenCodeModel,
resolveGeminiModel,
getSettings,
} from './config';
import { log } from './logging';
import { ensureAgentDirectory, buildSystemPrompt } from './agent';

Expand Down Expand Up @@ -48,7 +55,7 @@ export async function runCommand(command: string, args: string[], cwd?: string,
}

/**
* Invoke a single agent with a message. Contains all Claude/Codex invocation logic.
* Invoke a single agent with a message. Contains all Claude/Codex/Gemini invocation logic.
* Returns the raw response text.
*/
export async function invokeAgent(
Expand Down Expand Up @@ -95,7 +102,7 @@ export async function invokeAgent(
throw new Error(`Custom provider '${customId}' not found in settings.custom_providers`);
}
// Map harness back to built-in provider for CLI selection
provider = customProvider.harness === 'codex' ? 'openai' : 'anthropic';
provider = customProvider.harness === 'codex' ? 'openai' : customProvider.harness === 'gemini' ? 'google' : 'anthropic';

// Build env overrides based on harness
if (customProvider.harness === 'claude') {
Expand All @@ -105,6 +112,10 @@ export async function invokeAgent(
} else if (customProvider.harness === 'codex') {
envOverrides.OPENAI_API_KEY = customProvider.api_key;
envOverrides.OPENAI_BASE_URL = customProvider.base_url;
} else if (customProvider.harness === 'gemini') {
envOverrides.GOOGLE_API_KEY = customProvider.api_key;
envOverrides.GEMINI_API_KEY = customProvider.api_key;
envOverrides.GEMINI_SANDBOX = 'true';
}

log('INFO', `Using custom provider '${customId}' (harness: ${customProvider.harness}, base_url: ${customProvider.base_url})`);
Expand All @@ -115,6 +126,9 @@ export async function invokeAgent(
envOverrides.ANTHROPIC_API_KEY = settings.models.anthropic.auth_token;
} else if (provider === 'openai' && settings.models?.openai?.auth_token) {
envOverrides.OPENAI_API_KEY = settings.models.openai.auth_token;
} else if (provider === 'google' && settings.models?.google?.auth_token) {
envOverrides.GOOGLE_API_KEY = settings.models.google.auth_token;
envOverrides.GEMINI_API_KEY = settings.models.google.auth_token;
}
}

Expand Down Expand Up @@ -215,6 +229,92 @@ export async function invokeAgent(
}

return response || 'Sorry, I could not generate a response from OpenCode.';
} else if (provider === 'google') {
const modelId = resolveGeminiModel(effectiveModel);
log('INFO', `Using Gemini CLI (agent: ${agentId}${modelId ? `, model: ${modelId}` : ''})`);

if (shouldReset) {
log('INFO', `Resetting Gemini state for agent: ${agentId}`);
}

const geminiRoot = path.join(agentDir, '.tinyclaw', 'gemini');
const homeDir = path.join(geminiRoot, 'home');
const geminiHomeDir = path.join(homeDir, '.gemini');
const systemPromptPath = path.join(geminiRoot, 'system.md');

fs.mkdirSync(path.join(agentDir, '.tinyclaw'), { recursive: true });
fs.mkdirSync(geminiRoot, { recursive: true });
if (shouldReset) {
fs.rmSync(homeDir, { recursive: true, force: true });
}
fs.mkdirSync(homeDir, { recursive: true });
fs.mkdirSync(geminiHomeDir, { recursive: true });

const trimmedPrompt = systemPrompt.trim();
if (trimmedPrompt) {
fs.writeFileSync(systemPromptPath, trimmedPrompt + '\n');
} else if (fs.existsSync(systemPromptPath)) {
fs.rmSync(systemPromptPath, { force: true });
}

envOverrides.GEMINI_CLI_HOME = homeDir;
if (trimmedPrompt) {
envOverrides.GEMINI_SYSTEM_MD = systemPromptPath;
} else {
envOverrides.GEMINI_SYSTEM_MD = '';
}

const buildGeminiArgs = (resumeLatest: boolean): string[] => {
const args = ['--approval-mode=yolo', '--output-format', 'json'];
if (resumeLatest) {
args.push('--resume', 'latest');
}
if (modelId) {
args.push('--model', modelId);
}
args.push('--prompt', message);
return args;
};

let geminiOutput: string;
try {
geminiOutput = await runCommand('gemini', buildGeminiArgs(!shouldReset), workingDir, envOverrides);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
if (!shouldReset && errorMessage.includes('No previous sessions found for this project')) {
log('INFO', `No Gemini session to resume for agent ${agentId}; retrying without resume`);
geminiOutput = await runCommand('gemini', buildGeminiArgs(false), workingDir, envOverrides);
} else {
throw error;
}
}

let response = '';
const trimmedOutput = geminiOutput.trim();
const candidates = [trimmedOutput, ...trimmedOutput.split('\n').map(line => line.trim()).filter(Boolean).reverse()];
for (const candidate of candidates) {
try {
const parsed = JSON.parse(candidate);
if (parsed?.error) {
const errorMessage = typeof parsed.error === 'string'
? parsed.error
: parsed.error?.message || JSON.stringify(parsed.error);
throw new Error(errorMessage);
}
if (typeof parsed?.response === 'string') {
response = parsed.response.trim();
break;
}
} catch (error) {
if (!(error instanceof SyntaxError)) {
throw error;
}
}
}
if (!response) {
response = trimmedOutput;
}
return response || 'Sorry, I could not generate a response from Gemini.';
} else {
// Default to Claude (Anthropic)
log('INFO', `Using Claude provider (agent: ${agentId})`);
Expand Down
Loading