Skip to content
Draft
7 changes: 7 additions & 0 deletions claude_code_proxy/claude_code_router.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,13 @@ def __init__(
# temperature ?
self.params_complapi.pop("temperature", None)

# # TODO TODO TODO Try installing this version of LiteLLM first:
# # https://github.com/BerriAI/litellm/pull/16719

# if self.model_route.is_target_gemini:
# # TODO Find a way to fix it more properly
# self.params_complapi["cache_prompt"] = False

# For Langfuse
trace_name = f"{self.timestamp}-OUTBOUND-{self.calling_method}"
self.params_complapi.setdefault("metadata", {})["trace_name"] = trace_name
Expand Down
1 change: 1 addition & 0 deletions claude_code_proxy/proxy_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,4 +32,5 @@
)

ANTHROPIC = "anthropic"
GEMINI = "gemini"
OPENAI = "openai"
3 changes: 3 additions & 0 deletions claude_code_proxy/route_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from claude_code_proxy.proxy_config import (
ALWAYS_USE_RESPONSES_API,
ANTHROPIC,
GEMINI,
OPENAI,
REMAP_CLAUDE_HAIKU_TO,
REMAP_CLAUDE_OPUS_TO,
Expand All @@ -18,6 +19,7 @@ class ModelRoute:
target_model: str # ALWAYS has a provider prefix ("provider/model_name")
extra_params: dict[str, Any]
is_target_anthropic: bool
is_target_gemini: bool
use_responses_api: bool

def __init__(self, requested_model: str) -> None:
Expand Down Expand Up @@ -84,6 +86,7 @@ def _finalize_model_route_object(self) -> None:
self.target_model = f"{OPENAI}/{model_name_only}"

self.is_target_anthropic = self.target_model.startswith(f"{ANTHROPIC}/")
self.is_target_gemini = self.target_model.startswith(f"{GEMINI}/")

if self.is_target_anthropic:
self.use_responses_api = False
Expand Down
Loading