From 1e9c05a54aa78c4e8527c8fea31847b60c5dcd3d Mon Sep 17 00:00:00 2001 From: Thump604 Date: Mon, 16 Mar 2026 18:18:06 -0500 Subject: [PATCH] feat(cli): add openai provider support for OpenAI-compatible endpoints Add the `openai` provider to the CLI's supported providers list, enabling use of OpenAI-compatible API endpoints (e.g., locally deployed LLMs via vLLM, llama.cpp, Ollama, LM Studio, etc.). Changes: - Add "openai" to supportedProviders in types.ts - Add "openai" env var mapping (OPENAI_API_KEY) in provider.ts - Add "openai" case in getProviderSettings() with support for OPENAI_BASE_URL environment variable for custom endpoints - Update README environment variable table The openai provider uses the existing @roo-code/types openAi schema (openAiBaseUrl, openAiApiKey, openAiModelId) which is already fully supported in the core extension via OpenAiHandler. Usage: export OPENAI_API_KEY=sk-local export OPENAI_BASE_URL=http://localhost:8080/v1 roo --provider openai --model my-model "Hello" Closes #11917 --- apps/cli/README.md | 3 +++ apps/cli/src/lib/utils/provider.ts | 6 ++++++ apps/cli/src/types/types.ts | 1 + 3 files changed, 10 insertions(+) diff --git a/apps/cli/README.md b/apps/cli/README.md index 8dec1f3a1c6..fbaa8f4d0df 100644 --- a/apps/cli/README.md +++ b/apps/cli/README.md @@ -215,10 +215,13 @@ The CLI will look for API keys in environment variables if not provided via `--a | roo | `ROO_API_KEY` | | anthropic | `ANTHROPIC_API_KEY` | | openai-native | `OPENAI_API_KEY` | +| openai | `OPENAI_API_KEY` | | openrouter | `OPENROUTER_API_KEY` | | gemini | `GOOGLE_API_KEY` | | vercel-ai-gateway | `VERCEL_AI_GATEWAY_API_KEY` | +The `openai` provider also reads `OPENAI_BASE_URL` to set a custom API endpoint (e.g., `http://localhost:8080/v1` for locally deployed models). + **Authentication Environment Variables:** | Variable | Description | diff --git a/apps/cli/src/lib/utils/provider.ts b/apps/cli/src/lib/utils/provider.ts index 64aec430c1b..b9a6677e9d7 100644 --- a/apps/cli/src/lib/utils/provider.ts +++ b/apps/cli/src/lib/utils/provider.ts @@ -5,6 +5,7 @@ import type { SupportedProvider } from "@/types/index.js" const envVarMap: Record = { anthropic: "ANTHROPIC_API_KEY", "openai-native": "OPENAI_API_KEY", + openai: "OPENAI_API_KEY", gemini: "GOOGLE_API_KEY", openrouter: "OPENROUTER_API_KEY", "vercel-ai-gateway": "VERCEL_AI_GATEWAY_API_KEY", @@ -36,6 +37,11 @@ export function getProviderSettings( if (apiKey) config.openAiNativeApiKey = apiKey if (model) config.apiModelId = model break + case "openai": + if (apiKey) config.openAiApiKey = apiKey + if (model) config.openAiModelId = model + if (process.env.OPENAI_BASE_URL) config.openAiBaseUrl = process.env.OPENAI_BASE_URL + break case "gemini": if (apiKey) config.geminiApiKey = apiKey if (model) config.apiModelId = model diff --git a/apps/cli/src/types/types.ts b/apps/cli/src/types/types.ts index ecd3922aa1c..f23e0b169c2 100644 --- a/apps/cli/src/types/types.ts +++ b/apps/cli/src/types/types.ts @@ -4,6 +4,7 @@ import type { OutputFormat } from "./json-events.js" export const supportedProviders = [ "anthropic", "openai-native", + "openai", "gemini", "openrouter", "vercel-ai-gateway",