Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
116 changes: 113 additions & 3 deletions app/lib/data/assessmentModels.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import type {
AssessmentModelConfig,
ConfigParamDefinition,
ModelOption,
} from "@/app/lib/types/assessment";
import type { ConfigBlob } from "@/app/lib/types/configs";

Expand All @@ -21,7 +22,42 @@ export const GPT4_STYLE_CONFIG = {
},
} as const satisfies Record<string, ConfigParamDefinition>;

// ── Gemini param configs ─────────────────────────────────────────

const GEMINI_TEMPERATURE_CONFIG = {
temperature: {
max: 2.0,
min: 0.0,
type: "float",
default: 0.4,
description: "Controls randomness. Lower = more deterministic.",
},
} as const satisfies Record<string, ConfigParamDefinition>;

const GEMINI_THINKING_CONFIG = {
...GEMINI_TEMPERATURE_CONFIG,
thinking_level: {
type: "enum",
default: "LOW",
options: ["MINIMAL", "LOW", "MEDIUM", "HIGH"],
description: "Controls how much the model thinks before responding.",
},
} as const satisfies Record<string, ConfigParamDefinition>;

const GEMINI_THINKING_NO_MINIMAL_CONFIG = {
...GEMINI_TEMPERATURE_CONFIG,
thinking_level: {
type: "enum",
default: "LOW",
options: ["LOW", "MEDIUM", "HIGH"],
description: "Controls how much the model thinks before responding.",
},
} as const satisfies Record<string, ConfigParamDefinition>;

// ── All model configs ────────────────────────────────────────────

export const ASSESSMENT_MODEL_CONFIGS: AssessmentModelConfig[] = [
// OpenAI
{ provider: "openai", model_name: "gpt-4o-mini", config: GPT4_STYLE_CONFIG },
{ provider: "openai", model_name: "gpt-4o", config: GPT4_STYLE_CONFIG },
{ provider: "openai", model_name: "gpt-4.1", config: GPT4_STYLE_CONFIG },
Expand Down Expand Up @@ -215,9 +251,84 @@ export const ASSESSMENT_MODEL_CONFIGS: AssessmentModelConfig[] = [
},
},
},
// Google (Gemini)
{
provider: "google",
model_name: "gemini-2.0-flash-lite",
config: GEMINI_TEMPERATURE_CONFIG,
},
{
provider: "google",
model_name: "gemini-2.0-flash",
config: GEMINI_TEMPERATURE_CONFIG,
},
{
provider: "google",
model_name: "gemini-2.5-flash-lite",
config: GEMINI_TEMPERATURE_CONFIG,
},
{
provider: "google",
model_name: "gemini-2.5-flash",
config: GEMINI_TEMPERATURE_CONFIG,
},
{
provider: "google",
model_name: "gemini-2.5-pro",
config: GEMINI_TEMPERATURE_CONFIG,
},
{
provider: "google",
model_name: "gemini-3.1-flash-lite-preview",
config: GEMINI_THINKING_CONFIG,
},
{
provider: "google",
model_name: "gemini-3.1-pro-preview",
config: GEMINI_THINKING_NO_MINIMAL_CONFIG,
},
{
provider: "google",
model_name: "gemini-3-flash-preview",
config: GEMINI_THINKING_CONFIG,
},
];

export const PROVIDER_OPTIONS = [{ value: "openai", label: "OpenAI" }] as const;
export const PROVIDER_OPTIONS = [
{ value: "openai", label: "OpenAI" },
{ value: "google", label: "Google (Gemini)" },
] as const;

export function getModelsByProvider(provider: string): ModelOption[] {
return ASSESSMENT_MODEL_CONFIGS.filter((m) => m.provider === provider).map(
({ model_name }) => ({ value: model_name, label: model_name }),
);
}

export function getDefaultModelForProvider(provider: string): string {
return (
ASSESSMENT_MODEL_CONFIGS.find((m) => m.provider === provider)?.model_name ??
"gpt-4o-mini"
);
}

export function getModelConfigDefinition(
modelName: string,
): Record<string, ConfigParamDefinition> {
return (
ASSESSMENT_MODEL_CONFIGS.find((item) => item.model_name === modelName)
?.config ?? GPT4_STYLE_CONFIG
);
}

export function buildDefaultParams(
modelName: string,
): Record<string, number | string> {
const definition = getModelConfigDefinition(modelName);
return Object.fromEntries(
Object.entries(definition).map(([key, value]) => [key, value.default]),
);
}

export const ASSESSMENT_DEFAULT_CONFIG: ConfigBlob = {
completion: {
Expand All @@ -226,8 +337,7 @@ export const ASSESSMENT_DEFAULT_CONFIG: ConfigBlob = {
params: {
model: "gpt-4o-mini",
instructions: "",
top_p: GPT4_STYLE_CONFIG.top_p.default,
temperature: GPT4_STYLE_CONFIG.temperature.default,
...buildDefaultParams("gpt-4o-mini"),
},
},
};
2 changes: 1 addition & 1 deletion app/lib/types/assessment.ts
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ export interface ConfigParamDefinition {
}

export interface AssessmentModelConfig {
provider: "openai";
provider: "openai" | "google";
model_name: string;
config: Record<string, ConfigParamDefinition>;
}
Expand Down