Skip to content

Commit 88841c7

Browse files
axisrowclaude
andcommitted
fix: align default provider/model with README (ollama/gpt-oss)
- Change DEFAULT_PROVIDER from openai → ollama and DEFAULT_MODEL from gpt-4o-mini → gpt-oss in defaults.py and run_command.py - Import and use DEFAULT_PROVIDER/DEFAULT_MODEL in main.py to fix NameError when neither --provider nor --model is supplied - Add graceful error handling in ChatContext.client so missing API keys no longer crash the UI on startup - Downgrade "token not found" messages from WARNING → DEBUG in config_loader.py to reduce noise for local providers - Update test assertions to match the new defaults Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent a7d6b25 commit 88841c7

6 files changed

Lines changed: 36 additions & 26 deletions

File tree

src/mcp_cli/chat/chat_context.py

Lines changed: 15 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -240,8 +240,17 @@ def create(
240240
# ── Properties ────────────────────────────────────────────────────────
241241
@property
242242
def client(self) -> Any:
243-
"""Get current LLM client (cached automatically by ModelManager)."""
244-
return self.model_manager.get_client()
243+
"""Get current LLM client (cached automatically by ModelManager).
244+
245+
Returns None if the client cannot be created (e.g. missing API key)
246+
so the UI can start without crashing. The error is logged and the user
247+
will see a proper message when they try to send a message.
248+
"""
249+
try:
250+
return self.model_manager.get_client()
251+
except Exception as e:
252+
logger.error(f"Failed to create client for {self.provider}: {e}")
253+
return None
245254

246255
@property
247256
def provider(self) -> str:
@@ -460,11 +469,11 @@ async def initialize(
460469
await self._initialize_session()
461470

462471
# Quick provider validation (non-blocking)
463-
try:
464-
_client = self.client # noqa: F841 — fails fast if no API key
472+
_client = self.client # None if client could not be created
473+
if _client is not None:
465474
logger.info(f"Provider {self.provider} client created successfully")
466-
except Exception as e:
467-
logger.warning(f"Provider validation warning: {e}")
475+
else:
476+
logger.warning("Provider validation warning: client could not be created.")
468477
logger.warning("Chat may fail when making API calls.")
469478

470479
if not self.tools:

src/mcp_cli/config/defaults.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -176,10 +176,10 @@
176176
# Provider/Model Defaults
177177
# ================================================================
178178

179-
DEFAULT_PROVIDER = "openai"
179+
DEFAULT_PROVIDER = "ollama"
180180
"""Default LLM provider."""
181181

182-
DEFAULT_MODEL = "gpt-4o-mini"
182+
DEFAULT_MODEL = "gpt-oss"
183183
"""Default LLM model."""
184184

185185

src/mcp_cli/main.py

Lines changed: 12 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
restore_terminal,
3131
)
3232
from chuk_term.ui.theme import set_theme
33-
from mcp_cli.config import process_options, APP_VERSION, DEFAULT_CONFIG_FILENAME
33+
from mcp_cli.config import process_options, APP_VERSION, DEFAULT_CONFIG_FILENAME, DEFAULT_PROVIDER, DEFAULT_MODEL
3434
from mcp_cli.context import initialize_context
3535

3636
# ──────────────────────────────────────────────────────────────────────────────
@@ -345,11 +345,11 @@ def main_callback(
345345
f"Using current provider with specified model: {effective_provider}/{model}"
346346
)
347347
else:
348-
# Neither specified, use active configuration
349-
effective_provider = model_manager.get_active_provider()
350-
effective_model = model_manager.get_active_model()
348+
# Neither specifieduse declared project defaults
349+
effective_provider = DEFAULT_PROVIDER
350+
effective_model = DEFAULT_MODEL
351351
logger.debug(
352-
f"Using active configuration: {effective_provider}/{effective_model}"
352+
f"Using default configuration: {effective_provider}/{effective_model}"
353353
)
354354

355355
servers, _, server_names = process_options(
@@ -649,8 +649,9 @@ def _chat_command(
649649
effective_provider = model_manager.get_active_provider()
650650
effective_model = model
651651
else:
652-
effective_provider = model_manager.get_active_provider()
653-
effective_model = model_manager.get_active_model()
652+
# Neither specified — use declared project defaults
653+
effective_provider = DEFAULT_PROVIDER
654+
effective_model = DEFAULT_MODEL
654655

655656
servers, _, server_names = process_options(
656657
server,
@@ -818,11 +819,11 @@ def _interactive_command(
818819
f"Using current provider with specified model: {effective_provider}/{model}"
819820
)
820821
else:
821-
# Neither specified, use active configuration
822-
effective_provider = model_manager.get_active_provider()
823-
effective_model = model_manager.get_active_model()
822+
# Neither specifieduse declared project defaults
823+
effective_provider = DEFAULT_PROVIDER
824+
effective_model = DEFAULT_MODEL
824825
logger.debug(
825-
f"Using active configuration: {effective_provider}/{effective_model}"
826+
f"Using default configuration: {effective_provider}/{effective_model}"
826827
)
827828

828829
servers, _, server_names = process_options(

src/mcp_cli/run_command.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -308,8 +308,8 @@ def cli_entry(
308308
server: list[str] = typer.Option(
309309
["sqlite"], "--server", "-s", help="Server(s) to connect"
310310
),
311-
provider: str = typer.Option("openai", help="LLM provider name"),
312-
model: str = typer.Option("gpt-4o-mini", help="LLM model name"),
311+
provider: str = typer.Option("ollama", help="LLM provider name"),
312+
model: str = typer.Option("gpt-oss", help="LLM model name"),
313313
init_timeout: float = typer.Option(
314314
120.0, "--init-timeout", help="Server initialization timeout in seconds"
315315
),

src/mcp_cli/tools/config_loader.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -183,13 +183,13 @@ def process_value(value: Any) -> Any:
183183
)
184184
return token_value
185185
else:
186-
logger.warning(
186+
logger.debug(
187187
f"Token {namespace}:{name} has no token value in data"
188188
)
189189
else:
190-
logger.warning(f"Token not found: {namespace}:{name}")
190+
logger.debug(f"Token not found: {namespace}:{name}")
191191
except Exception as e:
192-
logger.warning(
192+
logger.debug(
193193
f"Failed to get token {namespace}:{name}: {e}"
194194
)
195195

tests/config/test_config_manager.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -134,8 +134,8 @@ def test_mcp_config_defaults(self):
134134
"""Test MCPConfig default values."""
135135
config = MCPConfig()
136136
assert config.servers == {}
137-
assert config.default_provider == "openai"
138-
assert config.default_model == "gpt-4o-mini"
137+
assert config.default_provider == "ollama"
138+
assert config.default_model == "gpt-oss"
139139
assert config.theme == "default"
140140
assert config.verbose is True
141141
assert config.confirm_tools is True

0 commit comments

Comments
 (0)