diff --git a/apps/cli/README.md b/apps/cli/README.md index 8dec1f3a1c..fbaa8f4d0d 100644 --- a/apps/cli/README.md +++ b/apps/cli/README.md @@ -215,10 +215,13 @@ The CLI will look for API keys in environment variables if not provided via `--a | roo | `ROO_API_KEY` | | anthropic | `ANTHROPIC_API_KEY` | | openai-native | `OPENAI_API_KEY` | +| openai | `OPENAI_API_KEY` | | openrouter | `OPENROUTER_API_KEY` | | gemini | `GOOGLE_API_KEY` | | vercel-ai-gateway | `VERCEL_AI_GATEWAY_API_KEY` | +The `openai` provider also reads `OPENAI_BASE_URL` to set a custom API endpoint (e.g., `http://localhost:8080/v1` for locally deployed models). + **Authentication Environment Variables:** | Variable | Description | diff --git a/apps/cli/src/lib/utils/provider.ts b/apps/cli/src/lib/utils/provider.ts index 64aec430c1..b9a6677e9d 100644 --- a/apps/cli/src/lib/utils/provider.ts +++ b/apps/cli/src/lib/utils/provider.ts @@ -5,6 +5,7 @@ import type { SupportedProvider } from "@/types/index.js" const envVarMap: Record = { anthropic: "ANTHROPIC_API_KEY", "openai-native": "OPENAI_API_KEY", + openai: "OPENAI_API_KEY", gemini: "GOOGLE_API_KEY", openrouter: "OPENROUTER_API_KEY", "vercel-ai-gateway": "VERCEL_AI_GATEWAY_API_KEY", @@ -36,6 +37,11 @@ export function getProviderSettings( if (apiKey) config.openAiNativeApiKey = apiKey if (model) config.apiModelId = model break + case "openai": + if (apiKey) config.openAiApiKey = apiKey + if (model) config.openAiModelId = model + if (process.env.OPENAI_BASE_URL) config.openAiBaseUrl = process.env.OPENAI_BASE_URL + break case "gemini": if (apiKey) config.geminiApiKey = apiKey if (model) config.apiModelId = model diff --git a/apps/cli/src/types/types.ts b/apps/cli/src/types/types.ts index ecd3922aa1..f23e0b169c 100644 --- a/apps/cli/src/types/types.ts +++ b/apps/cli/src/types/types.ts @@ -4,6 +4,7 @@ import type { OutputFormat } from "./json-events.js" export const supportedProviders = [ "anthropic", "openai-native", + "openai", "gemini", "openrouter", "vercel-ai-gateway",