diff --git a/README.md b/README.md index 67c18be4..7bee544f 100644 --- a/README.md +++ b/README.md @@ -36,7 +36,9 @@ For deployments, you can use `pip install codeinterpreterapi` instead which does ## Usage -To configure OpenAI and Azure OpenAI, ensure that you set the appropriate environment variables (or use a .env file): +To configure your LLM provider, set the appropriate environment variables (or use a .env file). + +The API supports **OpenAI**, **Azure OpenAI**, **Anthropic**, and **MiniMax** as LLM providers. For OpenAI, set the OPENAI_API_KEY environment variable: @@ -44,6 +46,13 @@ For OpenAI, set the OPENAI_API_KEY environment variable: export OPENAI_API_KEY=sk-********** ``` +For [MiniMax](https://www.minimax.io/), set the MINIMAX_API_KEY environment variable: + +```bash +export MINIMAX_API_KEY=your-minimax-api-key +export MODEL=MiniMax-M2.5 # or MiniMax-M2.5-highspeed for faster responses +``` + ```python from codeinterpreterapi import CodeInterpreterSession, settings diff --git a/src/codeinterpreterapi/config.py b/src/codeinterpreterapi/config.py index ba2333c3..6264be66 100644 --- a/src/codeinterpreterapi/config.py +++ b/src/codeinterpreterapi/config.py @@ -20,6 +20,7 @@ class CodeInterpreterAPISettings(BaseSettings): AZURE_API_VERSION: Optional[str] = None AZURE_DEPLOYMENT_NAME: Optional[str] = None ANTHROPIC_API_KEY: Optional[str] = None + MINIMAX_API_KEY: Optional[str] = None # LLM Settings MODEL: str = "gpt-3.5-turbo" diff --git a/src/codeinterpreterapi/session.py b/src/codeinterpreterapi/session.py index 3ec1f01c..76d0a56b 100644 --- a/src/codeinterpreterapi/session.py +++ b/src/codeinterpreterapi/session.py @@ -164,6 +164,20 @@ def _choose_llm(self) -> BaseChatModel: temperature=settings.TEMPERATURE, anthropic_api_key=settings.ANTHROPIC_API_KEY, ) + if settings.MINIMAX_API_KEY: + from langchain_openai import ChatOpenAI + + model = settings.MODEL if "minimax" in settings.MODEL.lower() or "MiniMax" in settings.MODEL else "MiniMax-M2.5" + temperature = max(0.01, min(settings.TEMPERATURE, 1.0)) + self.log("Using MiniMax") + return ChatOpenAI( + model=model, + api_key=settings.MINIMAX_API_KEY, # type: ignore + base_url="https://api.minimax.io/v1", + timeout=settings.REQUEST_TIMEOUT, + temperature=temperature, + max_retries=settings.MAX_RETRY, + ) # type: ignore raise ValueError("Please set the API key for the LLM you want to use.") def _choose_agent(self) -> BaseSingleActionAgent: diff --git a/tests/test_minimax_integration.py b/tests/test_minimax_integration.py new file mode 100644 index 00000000..72748a10 --- /dev/null +++ b/tests/test_minimax_integration.py @@ -0,0 +1,76 @@ +"""Integration tests for MiniMax provider. + +These tests require a valid MINIMAX_API_KEY environment variable. +Run with: MINIMAX_API_KEY=your-key pytest tests/test_minimax_integration.py -v +""" + +import os + +import pytest + +# Skip all tests in this module if MINIMAX_API_KEY is not set +pytestmark = pytest.mark.skipif( + not os.environ.get("MINIMAX_API_KEY"), + reason="MINIMAX_API_KEY not set", +) + + +class TestMiniMaxIntegration: + """Integration tests that call the MiniMax API.""" + + def test_minimax_llm_creation(self): + """Test that a ChatOpenAI instance with MiniMax config can be created.""" + from langchain_openai import ChatOpenAI + + llm = ChatOpenAI( + model="MiniMax-M2.5", + api_key=os.environ["MINIMAX_API_KEY"], + base_url="https://api.minimax.io/v1", + temperature=0.5, + ) + assert llm is not None + assert llm.model_name == "MiniMax-M2.5" + + def test_minimax_simple_completion(self): + """Test that MiniMax can generate a simple completion.""" + from langchain_openai import ChatOpenAI + + llm = ChatOpenAI( + model="MiniMax-M2.5", + api_key=os.environ["MINIMAX_API_KEY"], + base_url="https://api.minimax.io/v1", + temperature=0.5, + ) + response = llm.invoke("What is 2+2? Answer with just the number.") + assert response.content is not None + assert "4" in response.content + + def test_minimax_session_choose_llm(self): + """Test that CodeInterpreterSession selects MiniMax when configured.""" + from unittest.mock import patch, MagicMock + + from langchain_openai import ChatOpenAI + + from codeinterpreterapi.config import CodeInterpreterAPISettings + + with patch.dict(os.environ, { + "MINIMAX_API_KEY": os.environ["MINIMAX_API_KEY"], + "OPENAI_API_KEY": "", + }, clear=False): + from codeinterpreterapi.session import CodeInterpreterSession, settings + + old_openai = settings.OPENAI_API_KEY + old_minimax = settings.MINIMAX_API_KEY + try: + settings.OPENAI_API_KEY = None + settings.MINIMAX_API_KEY = os.environ["MINIMAX_API_KEY"] + settings.MODEL = "MiniMax-M2.5" + + session = CodeInterpreterSession.__new__(CodeInterpreterSession) + session.verbose = False + llm = session._choose_llm() + + assert isinstance(llm, ChatOpenAI) + finally: + settings.OPENAI_API_KEY = old_openai + settings.MINIMAX_API_KEY = old_minimax diff --git a/tests/test_minimax_provider.py b/tests/test_minimax_provider.py new file mode 100644 index 00000000..cb2359a3 --- /dev/null +++ b/tests/test_minimax_provider.py @@ -0,0 +1,171 @@ +"""Unit tests for MiniMax provider integration.""" + +import os +from unittest.mock import MagicMock, patch + +import pytest + + +class TestMiniMaxConfig: + """Test MiniMax API key configuration in settings.""" + + def test_minimax_api_key_default_none(self): + """MINIMAX_API_KEY should default to None.""" + with patch.dict(os.environ, {}, clear=True): + from codeinterpreterapi.config import CodeInterpreterAPISettings + + s = CodeInterpreterAPISettings() + assert s.MINIMAX_API_KEY is None + + def test_minimax_api_key_from_env(self): + """MINIMAX_API_KEY should be loadable from environment.""" + with patch.dict(os.environ, {"MINIMAX_API_KEY": "test-key-123"}, clear=False): + from codeinterpreterapi.config import CodeInterpreterAPISettings + + s = CodeInterpreterAPISettings() + assert s.MINIMAX_API_KEY == "test-key-123" + + +class TestMiniMaxLLMSelection: + """Test _choose_llm selects MiniMax when MINIMAX_API_KEY is set.""" + + def _make_session(self): + from codeinterpreterapi.session import CodeInterpreterSession + + session = CodeInterpreterSession.__new__(CodeInterpreterSession) + session.verbose = False + return session + + def _setup_minimax_settings(self, mock_settings, **overrides): + mock_settings.AZURE_OPENAI_API_KEY = None + mock_settings.AZURE_API_BASE = None + mock_settings.AZURE_API_VERSION = None + mock_settings.AZURE_DEPLOYMENT_NAME = None + mock_settings.OPENAI_API_KEY = None + mock_settings.ANTHROPIC_API_KEY = None + mock_settings.MINIMAX_API_KEY = "test-minimax-key" + mock_settings.MODEL = "gpt-3.5-turbo" + mock_settings.TEMPERATURE = 0.03 + mock_settings.REQUEST_TIMEOUT = 180 + mock_settings.MAX_RETRY = 3 + mock_settings.DEBUG = False + mock_settings.CUSTOM_PACKAGES = [] + mock_settings.SYSTEM_MESSAGE = MagicMock() + for k, v in overrides.items(): + setattr(mock_settings, k, v) + + @patch("codeinterpreterapi.session.settings") + def test_choose_llm_minimax_default_model(self, mock_settings): + """When MINIMAX_API_KEY is set, _choose_llm should return ChatOpenAI with MiniMax config.""" + self._setup_minimax_settings(mock_settings) + session = self._make_session() + + llm = session._choose_llm() + + from langchain_openai import ChatOpenAI + + assert isinstance(llm, ChatOpenAI) + assert llm.model_name == "MiniMax-M2.5" + assert str(llm.openai_api_base) == "https://api.minimax.io/v1" + + @patch("codeinterpreterapi.session.settings") + def test_choose_llm_minimax_custom_model(self, mock_settings): + """When MODEL contains 'minimax', it should be used directly.""" + self._setup_minimax_settings(mock_settings, MODEL="MiniMax-M2.5-highspeed") + session = self._make_session() + + llm = session._choose_llm() + + assert llm.model_name == "MiniMax-M2.5-highspeed" + + @patch("codeinterpreterapi.session.settings") + def test_choose_llm_minimax_model_with_MiniMax_prefix(self, mock_settings): + """When MODEL starts with 'MiniMax', it should be used directly.""" + self._setup_minimax_settings(mock_settings, MODEL="MiniMax-M2.5") + session = self._make_session() + + llm = session._choose_llm() + + assert llm.model_name == "MiniMax-M2.5" + + @patch("codeinterpreterapi.session.settings") + def test_choose_llm_minimax_temperature_clamping_low(self, mock_settings): + """Temperature below 0.01 should be clamped to 0.01 for MiniMax.""" + self._setup_minimax_settings(mock_settings, MODEL="MiniMax-M2.5", TEMPERATURE=0.0) + session = self._make_session() + + llm = session._choose_llm() + + assert llm.temperature == 0.01 + + @patch("codeinterpreterapi.session.settings") + def test_choose_llm_minimax_temperature_clamping_high(self, mock_settings): + """Temperature above 1.0 should be clamped to 1.0 for MiniMax.""" + self._setup_minimax_settings(mock_settings, MODEL="MiniMax-M2.5", TEMPERATURE=2.0) + session = self._make_session() + + llm = session._choose_llm() + + assert llm.temperature == 1.0 + + @patch("codeinterpreterapi.session.settings") + def test_choose_llm_minimax_temperature_passthrough(self, mock_settings): + """Temperature within range should pass through unchanged.""" + self._setup_minimax_settings(mock_settings, MODEL="MiniMax-M2.5", TEMPERATURE=0.5) + session = self._make_session() + + llm = session._choose_llm() + + assert llm.temperature == 0.5 + + @patch("codeinterpreterapi.session.settings") + def test_choose_llm_minimax_not_chosen_when_openai_set(self, mock_settings): + """MiniMax should not be chosen when OpenAI key is also set (OpenAI takes priority).""" + self._setup_minimax_settings(mock_settings, OPENAI_API_KEY="sk-openai-key", MODEL="gpt-3.5-turbo") + session = self._make_session() + + llm = session._choose_llm() + + from langchain_openai import ChatOpenAI + + assert isinstance(llm, ChatOpenAI) + # OpenAI does not set base_url to MiniMax endpoint + base_url = str(llm.openai_api_base) if llm.openai_api_base else "" + assert "minimax" not in base_url.lower() + + @patch("codeinterpreterapi.session.settings") + def test_choose_llm_raises_without_any_key(self, mock_settings): + """Should raise ValueError when no API key is configured.""" + self._setup_minimax_settings(mock_settings, MINIMAX_API_KEY=None) + session = self._make_session() + + with pytest.raises(ValueError, match="API key"): + session._choose_llm() + + @patch("codeinterpreterapi.session.settings") + def test_minimax_uses_chatopenai_for_agent_compat(self, mock_settings): + """MiniMax returns ChatOpenAI instance, compatible with OpenAIFunctionsAgent.""" + self._setup_minimax_settings(mock_settings, MODEL="MiniMax-M2.5", TEMPERATURE=0.5) + session = self._make_session() + + from langchain_openai import ChatOpenAI + + llm = session._choose_llm() + assert isinstance(llm, ChatOpenAI) + + @patch("codeinterpreterapi.session.settings") + def test_minimax_timeout_and_retries(self, mock_settings): + """MiniMax should pass through timeout and max_retries settings.""" + self._setup_minimax_settings( + mock_settings, + MODEL="MiniMax-M2.5", + TEMPERATURE=0.5, + REQUEST_TIMEOUT=300, + MAX_RETRY=5, + ) + session = self._make_session() + + llm = session._choose_llm() + + assert llm.request_timeout == 300 + assert llm.max_retries == 5