Skip to content

Commit 7c430bb

Browse files
golahamitksingh1490autofix-ci[bot]
authored
fix: add xAI/Grok API compatibility (#2809)
Co-authored-by: Amit Singh <amitksingh1490@gmail.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
1 parent a529fb2 commit 7c430bb

3 files changed

Lines changed: 54 additions & 0 deletions

File tree

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
use forge_domain::Transformer;
2+
3+
use crate::dto::openai::Request;
4+
5+
/// Makes the Request compatible with xAI's API.
6+
/// xAI's /v1/chat/completions is OpenAI-compatible but rejects several
7+
/// parameters that OpenAI accepts.
8+
pub struct MakeXaiCompat;
9+
10+
impl Transformer for MakeXaiCompat {
11+
type Value = Request;
12+
13+
fn transform(&mut self, mut request: Self::Value) -> Self::Value {
14+
// xAI rejects stream_options
15+
request.stream_options = None;
16+
17+
// xAI uses max_tokens, not max_completion_tokens
18+
// MakeOpenAiCompat already moved max_tokens → max_completion_tokens,
19+
// so move it back
20+
if request.max_completion_tokens.is_some() && request.max_tokens.is_none() {
21+
request.max_tokens = request.max_completion_tokens.take();
22+
}
23+
request.max_completion_tokens = None;
24+
25+
// xAI does not support parallel_tool_calls
26+
request.parallel_tool_calls = None;
27+
28+
// xAI does not support prediction
29+
request.prediction = None;
30+
31+
// xAI does not support reasoning_effort (uses model variants instead)
32+
request.reasoning_effort = None;
33+
34+
// xAI does not support thinking config
35+
request.thinking = None;
36+
37+
// xAI does not support logit_bias
38+
request.logit_bias = None;
39+
40+
// xAI does not support top_logprobs
41+
request.top_logprobs = None;
42+
43+
// xAI does not support seed
44+
request.seed = None;
45+
46+
request
47+
}
48+
}

crates/forge_app/src/dto/openai/transformers/mod.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ mod github_copilot_reasoning;
33
mod kimi_k2_reasoning;
44
mod make_cerebras_compat;
55
mod make_openai_compat;
6+
mod make_xai_compat;
67
mod minimax;
78
mod normalize_tool_schema;
89
mod pipeline;

crates/forge_app/src/dto/openai/transformers/pipeline.rs

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ use super::github_copilot_reasoning::GitHubCopilotReasoning;
66
use super::kimi_k2_reasoning::KimiK2Reasoning;
77
use super::make_cerebras_compat::MakeCerebrasCompat;
88
use super::make_openai_compat::MakeOpenAiCompat;
9+
use super::make_xai_compat::MakeXaiCompat;
910
use super::minimax::SetMinimaxParams;
1011
use super::normalize_tool_schema::{
1112
EnforceStrictResponseFormatSchema, EnforceStrictToolSchema, NormalizeToolSchema,
@@ -67,6 +68,8 @@ impl Transformer for ProviderPipeline<'_> {
6768

6869
let cerebras_compat = MakeCerebrasCompat.when(move |_| provider.id == ProviderId::CEREBRAS);
6970

71+
let xai_compat = MakeXaiCompat.when(move |_| provider.id == ProviderId::XAI);
72+
7073
let trim_tool_call_ids = TrimToolCallIds.when(move |_| provider.id == ProviderId::OPENAI);
7174

7275
let strict_schema = EnforceStrictToolSchema
@@ -75,6 +78,7 @@ impl Transformer for ProviderPipeline<'_> {
7578
provider.id == ProviderId::FIREWORKS_AI
7679
|| provider.id == ProviderId::OPENCODE_ZEN
7780
|| provider.id == ProviderId::OPENCODE_GO
81+
|| provider.id == ProviderId::XAI
7882
});
7983

8084
let mut combined = zai_thinking
@@ -85,6 +89,7 @@ impl Transformer for ProviderPipeline<'_> {
8589
.pipe(github_copilot_reasoning)
8690
.pipe(kimi_k2_reasoning)
8791
.pipe(cerebras_compat)
92+
.pipe(xai_compat)
8893
.pipe(trim_tool_call_ids)
8994
.pipe(strict_schema)
9095
.pipe(NormalizeToolSchema);

0 commit comments

Comments
 (0)