Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions backend/app/model/model_platform.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
"grok": "openai-compatible-model",
"ernie": "qianfan",
"llama.cpp": "openai-compatible-model",
"orcarouter": "openai-compatible-model",
}

# Bedrock Converse requires a region during model initialization.
Expand Down
1 change: 1 addition & 0 deletions docs/core/models/byok.md
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ Eigent supports the following BYOK providers:
| **Anthropic** | `https://api.anthropic.com/` | [Anthropic API Docs](https://docs.anthropic.com/en/api/getting-started) |
| **Google Gemini** | `https://generativelanguage.googleapis.com/v1beta/openai/` | [Gemini API Docs](https://ai.google.dev/gemini-api/docs) |
| **OpenRouter** | `https://openrouter.ai/api/v1` | [OpenRouter Docs](https://openrouter.ai/docs) |
| **OrcaRouter** | `https://api.orcarouter.ai/v1` | [OrcaRouter Docs](https://docs.orcarouter.ai/) |
| **Qwen (Alibaba)** | `https://dashscope.aliyuncs.com/compatible-mode/v1` | [Qwen API Docs](https://help.aliyun.com/zh/dashscope/developer-reference/api-details) |
| **DeepSeek** | `https://api.deepseek.com` | [DeepSeek API Docs](https://platform.deepseek.com/api-docs) |
| **Minimax** | `https://api.minimax.io/v1` | [Minimax API Docs](https://platform.minimaxi.com/document/Announcement) |
Expand Down
1 change: 1 addition & 0 deletions src/assets/model/orcarouter.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
11 changes: 11 additions & 0 deletions src/lib/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,17 @@ export const INIT_PROVODERS: Provider[] = [
is_valid: false,
model_type: '',
},
{
id: 'orcarouter',
name: 'OrcaRouter',
apiKey: '',
apiHost: 'https://api.orcarouter.ai/v1',
description: 'OrcaRouter model configuration.',
is_valid: false,
model_type: '',
modelsEndpoint: '/models',
websiteUrl: 'https://www.orcarouter.ai',
},
{
id: 'openrouter',
name: 'OpenRouter',
Expand Down
155 changes: 155 additions & 0 deletions src/lib/providerModels.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,155 @@
// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========

/**
* Fetch + parse helper for cloud providers that expose an OpenAI-compatible
* `/v1/models` listing endpoint (e.g. OrcaRouter). Returns chat-capable
* models grouped by their `<provider>/<model>` prefix so the UI can render
* provider tabs.
*/

/** Single model entry as returned by an OpenAI-compatible /v1/models call. */
type RawModel = {
id: string;
architecture?: {
input_modalities?: string[] | null;
output_modalities?: string[] | null;
};
context_length?: number;
max_completion_tokens?: number;
};

export type ProviderModelInfo = {
id: string;
contextLength?: number;
maxCompletionTokens?: number;
};

export type ProviderModelGroup = {
provider: string;
models: ProviderModelInfo[];
};

/**
* Decide whether a model is chat-capable enough to surface in the dropdown.
* Keeps models that explicitly emit text, plus models that omit the
* architecture field entirely (some upstream listings — e.g. deepseek-reasoner
* — leave it null even though they are usable for chat).
*
* Filters out: TTS / image-only / video-only outputs.
*/
function isChatCapable(model: RawModel): boolean {
const arch = model.architecture;
if (!arch) return true;
const out = arch.output_modalities;
if (out == null) return true;
return out.includes('text');
}

/** Split `anthropic/claude-opus-4.6` into `["anthropic", "claude-opus-4.6"]`. */
function splitProviderPrefix(id: string): [string, string] {
const idx = id.indexOf('/');
if (idx <= 0) return ['', id];
return [id.slice(0, idx), id.slice(idx + 1)];
}

/**
* Hit `${apiHost}${modelsEndpoint}` with a Bearer token and return chat-capable
* models grouped by provider prefix, sorted alphabetically by provider, with
* models within each group sorted alphabetically by id.
*
* Throws on network failure or non-2xx response with a user-readable message.
*/
export async function fetchProviderModels(
apiHost: string,
modelsEndpoint: string,
apiKey: string
): Promise<ProviderModelGroup[]> {
if (!apiKey) {
throw new Error('API key is required to fetch model list.');
}
const trimmedHost = apiHost.replace(/\/+$/, '');
const url = `${trimmedHost}${modelsEndpoint}`;

const response = await fetch(url, {
method: 'GET',
headers: {
Authorization: `Bearer ${apiKey}`,
Accept: 'application/json',
},
});

if (!response.ok) {
throw new Error(
`Failed to fetch models: ${response.status} ${response.statusText}`
);
}
const payload = await response.json();
const data: RawModel[] = Array.isArray(payload?.data) ? payload.data : [];

const grouped = new Map<string, ProviderModelInfo[]>();
for (const model of data) {
if (!model?.id || !isChatCapable(model)) continue;
const [provider] = splitProviderPrefix(model.id);
const bucket = provider || 'other';
const info: ProviderModelInfo = {
id: model.id,
contextLength: model.context_length,
maxCompletionTokens: model.max_completion_tokens,
};
const arr = grouped.get(bucket);
if (arr) arr.push(info);
else grouped.set(bucket, [info]);
}

const groups: ProviderModelGroup[] = Array.from(grouped.entries())
.map(([provider, models]) => ({
provider,
models: models.sort((a, b) => a.id.localeCompare(b.id)),
}))
.sort((a, b) => a.provider.localeCompare(b.provider));

return groups;
}

/** localStorage cache helpers — keyed per provider id to keep entries small. */
const CACHE_KEY_PREFIX = 'eigent-provider-models-v1:';

export function loadCachedModels(
providerId: string
): ProviderModelGroup[] | null {
try {
const raw = localStorage.getItem(CACHE_KEY_PREFIX + providerId);
if (!raw) return null;
const parsed = JSON.parse(raw);
if (!Array.isArray(parsed)) return null;
return parsed as ProviderModelGroup[];
} catch {
return null;
}
}

export function saveCachedModels(
providerId: string,
groups: ProviderModelGroup[]
): void {
try {
localStorage.setItem(
CACHE_KEY_PREFIX + providerId,
JSON.stringify(groups)
);
} catch {
// localStorage may be unavailable (quota / private mode); silently ignore.
}
}
167 changes: 145 additions & 22 deletions src/pages/Agents/Models.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ import moonshotImage from '@/assets/model/moonshot.svg';
import ollamaImage from '@/assets/model/ollama.svg';
import openaiImage from '@/assets/model/openai.svg';
import openrouterImage from '@/assets/model/openrouter.svg';
import orcarouterImage from '@/assets/model/orcarouter.svg';
import qwenImage from '@/assets/model/qwen.svg';
import sglangImage from '@/assets/model/sglang.svg';
import vllmImage from '@/assets/model/vllm.svg';
Expand All @@ -97,6 +98,13 @@ import {
toEndpointBaseUrl,
VLLM_PROVIDER_ID,
} from './localModels';
import { ProviderModelCombobox } from './components/ProviderModelCombobox';
import {
fetchProviderModels,
loadCachedModels,
saveCachedModels,
type ProviderModelGroup,
} from '@/lib/providerModels';

// Sidebar tab types
type SidebarTab =
Expand Down Expand Up @@ -200,6 +208,72 @@ export default function SettingModels() {
const [ollamaEndpointAutoFixedOnce, setOllamaEndpointAutoFixedOnce] =
useState(false);

// Per-cloud-provider model list state: { groups, loading, error } keyed by
// provider id. Populated for providers whose `INIT_PROVODERS` entry declares
// a `modelsEndpoint` (today: only OrcaRouter).
const [cloudModelsState, setCloudModelsState] = useState<
Record<
string,
{ groups: ProviderModelGroup[]; loading: boolean; error: string | null }
>
>(() => {
const initial: Record<
string,
{ groups: ProviderModelGroup[]; loading: boolean; error: string | null }
> = {};
for (const p of INIT_PROVODERS) {
if (!p.modelsEndpoint) continue;
const cached = loadCachedModels(p.id);
if (cached) {
initial[p.id] = { groups: cached, loading: false, error: null };
}
}
return initial;
});

const fetchCloudProviderModels = useCallback(
async (idx: number) => {
const item = items[idx];
if (!item?.modelsEndpoint) return;
const apiKey = form[idx]?.apiKey;
const apiHost = form[idx]?.apiHost || item.apiHost;
if (!apiKey) return;
setCloudModelsState((prev) => ({
...prev,
[item.id]: {
groups: prev[item.id]?.groups || [],
loading: true,
error: null,
},
}));
try {
const groups = await fetchProviderModels(
apiHost,
item.modelsEndpoint,
apiKey
);
setCloudModelsState((prev) => ({
...prev,
[item.id]: { groups, loading: false, error: null },
}));
saveCachedModels(item.id, groups);
} catch (err: any) {
setCloudModelsState((prev) => ({
...prev,
[item.id]: {
groups: prev[item.id]?.groups || [],
loading: false,
error:
typeof err?.message === 'string'
? err.message
: 'Failed to fetch models.',
},
}));
}
},
[items, form]
);

// Generic model fetcher driven by LOCAL_MODEL_OPTIONS config.
// Only fetches for providers that define fetchPath and parseModels.
const fetchModelsForPlatform = useCallback(
Expand Down Expand Up @@ -1060,6 +1134,7 @@ export default function SettingModels() {
anthropic: anthropicImage,
gemini: geminiImage,
openrouter: openrouterImage,
orcarouter: orcarouterImage,
'tongyi-qianwen': qwenImage,
deepseek: deepseekImage,
ernie: ernieImage,
Expand Down Expand Up @@ -1347,6 +1422,19 @@ export default function SettingModels() {
</div>
<div className="text-body-sm text-text-label">
{item.description}
{item.websiteUrl ? (
<>
{' '}
<a
href={item.websiteUrl}
target="_blank"
rel="noopener noreferrer"
className="text-text-information hover:underline"
>
Visit {item.name}
</a>
</>
) : null}
</div>
</div>
<div className="flex w-full flex-col items-center gap-4 px-6">
Expand Down Expand Up @@ -1404,28 +1492,63 @@ export default function SettingModels() {
}}
/>
{/* Model Type Setting */}
<Input
id={`modelType-${item.id}`}
size="default"
title={t('setting.model-type-setting')}
state={errors[idx]?.model_type ? 'error' : 'default'}
note={errors[idx]?.model_type ?? undefined}
placeholder={`${t('setting.enter-your-model-type')} ${
item.name
} ${t('setting.model-type')}`}
value={form[idx].model_type}
onChange={(e) => {
const v = e.target.value;
setForm((f) =>
f.map((fi, i) => (i === idx ? { ...fi, model_type: v } : fi))
);
setErrors((errs) =>
errs.map((er, i) =>
i === idx ? { ...er, model_type: '' } : er
)
);
}}
/>
{item.modelsEndpoint ? (
<ProviderModelCombobox
providerName={item.name}
title={t('setting.model-type-setting')}
value={form[idx].model_type || ''}
onChange={(v) => {
setForm((f) =>
f.map((fi, i) =>
i === idx ? { ...fi, model_type: v } : fi
)
);
setErrors((errs) =>
errs.map((er, i) =>
i === idx ? { ...er, model_type: '' } : er
)
);
}}
groups={cloudModelsState[item.id]?.groups || []}
loading={cloudModelsState[item.id]?.loading || false}
error={
cloudModelsState[item.id]?.error ??
errors[idx]?.model_type ??
null
}
disabled={!form[idx].apiKey}
disabledReason="Enter API Key first."
onRefresh={() => void fetchCloudProviderModels(idx)}
triggerPlaceholder={`${t('setting.enter-your-model-type')} ${
item.name
} ${t('setting.model-type')}`}
/>
) : (
<Input
id={`modelType-${item.id}`}
size="default"
title={t('setting.model-type-setting')}
state={errors[idx]?.model_type ? 'error' : 'default'}
note={errors[idx]?.model_type ?? undefined}
placeholder={`${t('setting.enter-your-model-type')} ${
item.name
} ${t('setting.model-type')}`}
value={form[idx].model_type}
onChange={(e) => {
const v = e.target.value;
setForm((f) =>
f.map((fi, i) =>
i === idx ? { ...fi, model_type: v } : fi
)
);
setErrors((errs) =>
errs.map((er, i) =>
i === idx ? { ...er, model_type: '' } : er
)
);
}}
/>
)}
{/* externalConfig render */}
{item.externalConfig &&
form[idx].externalConfig &&
Expand Down
Loading