diff --git a/apps/cli/README.md b/apps/cli/README.md index 8dec1f3a1c6..678d0c33cee 100644 --- a/apps/cli/README.md +++ b/apps/cli/README.md @@ -190,6 +190,7 @@ Tokens are valid for 90 days. The CLI will prompt you to re-authenticate when yo | `-k, --api-key ` | API key for the LLM provider | From env var | | `--provider ` | API provider (roo, anthropic, openai, openrouter, etc.) | `openrouter` (or `roo` if authenticated) | | `-m, --model ` | Model to use | `anthropic/claude-opus-4.6` | +| `-b, --base-url ` | Base URL for the LLM provider (e.g., for OpenAI-compatible APIs) | None | | `--mode ` | Mode to start in (code, architect, ask, debug, etc.) | `code` | | `--terminal-shell ` | Absolute shell path for inline terminal command execution | Auto-detected shell | | `-r, --reasoning-effort ` | Reasoning effort level (unspecified, disabled, none, minimal, low, medium, high, xhigh) | `medium` | diff --git a/apps/cli/src/agent/extension-host.ts b/apps/cli/src/agent/extension-host.ts index 393990301f1..07b23d64221 100644 --- a/apps/cli/src/agent/extension-host.ts +++ b/apps/cli/src/agent/extension-host.ts @@ -71,6 +71,7 @@ export interface ExtensionHostOptions { provider: SupportedProvider apiKey?: string model: string + baseUrl?: string workspacePath: string extensionPath: string nonInteractive?: boolean @@ -227,7 +228,12 @@ export class ExtensionHost extends EventEmitter implements ExtensionHostInterfac experiments: { customTools: true, }, - ...getProviderSettings(this.options.provider, this.options.apiKey, this.options.model), + ...getProviderSettings( + this.options.provider, + this.options.apiKey, + this.options.model, + this.options.baseUrl, + ), } this.initialSettings = this.options.nonInteractive diff --git a/apps/cli/src/commands/cli/run.ts b/apps/cli/src/commands/cli/run.ts index 62760919e7e..312356cbe0d 100644 --- a/apps/cli/src/commands/cli/run.ts +++ b/apps/cli/src/commands/cli/run.ts @@ -219,6 +219,7 @@ export async function run(promptArg: string | undefined, flagOptions: FlagOption user: null, provider: effectiveProvider, model: effectiveModel, + baseUrl: flagOptions.baseUrl, workspacePath: effectiveWorkspacePath, extensionPath: path.resolve(flagOptions.extension || getDefaultExtensionPath(__dirname)), nonInteractive: !effectiveRequireApproval, diff --git a/apps/cli/src/index.ts b/apps/cli/src/index.ts index 2805e6c9099..dce43720141 100644 --- a/apps/cli/src/index.ts +++ b/apps/cli/src/index.ts @@ -47,6 +47,7 @@ program .option("-k, --api-key ", "API key for the LLM provider") .option("--provider ", "API provider (roo, anthropic, openai, openrouter, etc.)") .option("-m, --model ", "Model to use", DEFAULT_FLAGS.model) + .option("-b, --base-url ", "Base URL for the LLM provider (e.g., for OpenAI-compatible APIs)") .option("--mode ", "Mode to start in (code, architect, ask, debug, etc.)", DEFAULT_FLAGS.mode) .option("--terminal-shell ", "Absolute path to shell executable for inline terminal commands") .option( diff --git a/apps/cli/src/lib/utils/provider.ts b/apps/cli/src/lib/utils/provider.ts index 64aec430c1b..03ee6f3958e 100644 --- a/apps/cli/src/lib/utils/provider.ts +++ b/apps/cli/src/lib/utils/provider.ts @@ -5,6 +5,7 @@ import type { SupportedProvider } from "@/types/index.js" const envVarMap: Record = { anthropic: "ANTHROPIC_API_KEY", "openai-native": "OPENAI_API_KEY", + openai: "OPENAI_API_KEY", gemini: "GOOGLE_API_KEY", openrouter: "OPENROUTER_API_KEY", "vercel-ai-gateway": "VERCEL_AI_GATEWAY_API_KEY", @@ -24,6 +25,7 @@ export function getProviderSettings( provider: SupportedProvider, apiKey: string | undefined, model: string | undefined, + baseUrl: string | undefined, ): RooCodeSettings { const config: RooCodeSettings = { apiProvider: provider } @@ -32,17 +34,25 @@ export function getProviderSettings( if (apiKey) config.apiKey = apiKey if (model) config.apiModelId = model break + case "openai": + if (apiKey) config.openAiApiKey = apiKey + if (model) config.openAiModelId = model + if (baseUrl) config.openAiBaseUrl = baseUrl + break case "openai-native": if (apiKey) config.openAiNativeApiKey = apiKey if (model) config.apiModelId = model + if (baseUrl) config.openAiNativeBaseUrl = baseUrl break case "gemini": if (apiKey) config.geminiApiKey = apiKey if (model) config.apiModelId = model + if (baseUrl) config.googleGeminiBaseUrl = baseUrl break case "openrouter": if (apiKey) config.openRouterApiKey = apiKey if (model) config.openRouterModelId = model + if (baseUrl) config.openRouterBaseUrl = baseUrl break case "vercel-ai-gateway": if (apiKey) config.vercelAiGatewayApiKey = apiKey @@ -55,6 +65,7 @@ export function getProviderSettings( default: if (apiKey) config.apiKey = apiKey if (model) config.apiModelId = model + if (baseUrl) config.openAiBaseUrl = baseUrl } return config diff --git a/apps/cli/src/types/types.ts b/apps/cli/src/types/types.ts index ecd3922aa1c..2924cd2866f 100644 --- a/apps/cli/src/types/types.ts +++ b/apps/cli/src/types/types.ts @@ -4,6 +4,7 @@ import type { OutputFormat } from "./json-events.js" export const supportedProviders = [ "anthropic", "openai-native", + "openai", "gemini", "openrouter", "vercel-ai-gateway", @@ -34,6 +35,7 @@ export type FlagOptions = { apiKey?: string provider?: SupportedProvider model?: string + baseUrl?: string mode?: string terminalShell?: string reasoningEffort?: ReasoningEffortFlagOptions