diff --git a/.gitignore b/.gitignore index f286560..c02af93 100644 --- a/.gitignore +++ b/.gitignore @@ -26,6 +26,7 @@ report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json .eslintcache .cache *.tsbuildinfo +.bun-cache/ # IntelliJ based IDEs .idea diff --git a/bun.lock b/bun.lock index 04a9c89..4a2c7f8 100644 --- a/bun.lock +++ b/bun.lock @@ -5,6 +5,7 @@ "": { "name": "helixent", "dependencies": { + "@anthropic-ai/sdk": "^0.87.0", "commander": "^14.0.3", "gray-matter": "^4.0.3", "ink": "^6.8.0", @@ -36,6 +37,10 @@ "packages": { "@alcalzone/ansi-tokenize": ["@alcalzone/ansi-tokenize@0.2.5", "https://registry.npmmirror.com/@alcalzone/ansi-tokenize/-/ansi-tokenize-0.2.5.tgz", { "dependencies": { "ansi-styles": "^6.2.1", "is-fullwidth-code-point": "^5.0.0" } }, "sha512-3NX/MpTdroi0aKz134A6RC2Gb2iXVECN4QaAXnvCIxxIm3C3AVB1mkUe8NaaiyvOpDfsrqWhYtj+Q6a62RrTsw=="], + "@anthropic-ai/sdk": ["@anthropic-ai/sdk@0.87.0", "https://registry.npmmirror.com/@anthropic-ai/sdk/-/sdk-0.87.0.tgz", { "dependencies": { "json-schema-to-ts": "^3.1.1" }, "peerDependencies": { "zod": "^3.25.0 || ^4.0.0" }, "optionalPeers": ["zod"], "bin": { "anthropic-ai-sdk": "bin/cli" } }, "sha512-ZvBWT5VkPTW6b8LIpugpuAkpcYPSLOXdWTcgQrpUqf4IeJ5ZrH5rT8sTsUDvxPCHAlRG3nF4VIWfjw6uLhJ18g=="], + + "@babel/runtime": ["@babel/runtime@7.29.2", "https://registry.npmmirror.com/@babel/runtime/-/runtime-7.29.2.tgz", {}, "sha512-JiDShH45zKHWyGe4ZNVRrCjBz8Nh9TMmZG1kh4QTK8hCBTWBi8Da+i7s1fJw7/lYpM4ccepSNfqzZ/QvABBi5g=="], + "@colors/colors": ["@colors/colors@1.5.0", "https://registry.npmmirror.com/@colors/colors/-/colors-1.5.0.tgz", {}, "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ=="], "@emnapi/core": ["@emnapi/core@1.9.2", "https://registry.npmmirror.com/@emnapi/core/-/core-1.9.2.tgz", { "dependencies": { "@emnapi/wasi-threads": "1.2.1", "tslib": "^2.4.0" } }, "sha512-UC+ZhH3XtczQYfOlu3lNEkdW/p4dsJ1r/bP7H8+rhao3TTTMO1ATq/4DdIi23XuGoFY+Cz0JmCbdVl0hz9jZcA=="], @@ -196,7 +201,7 @@ "color-name": ["color-name@1.1.3", "https://registry.npmmirror.com/color-name/-/color-name-1.1.3.tgz", {}, "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw=="], - "commander": ["commander@14.0.3", "", {}, "sha512-H+y0Jo/T1RZ9qPP4Eh1pkcQcLRglraJaSLoyOtHxu6AapkjWVCy2Sit1QQ4x3Dng8qDlSsZEet7g5Pq06MvTgw=="], + "commander": ["commander@14.0.3", "https://registry.npm.taobao.org/commander/-/commander-14.0.3.tgz", {}, "sha512-H+y0Jo/T1RZ9qPP4Eh1pkcQcLRglraJaSLoyOtHxu6AapkjWVCy2Sit1QQ4x3Dng8qDlSsZEet7g5Pq06MvTgw=="], "comment-parser": ["comment-parser@1.4.6", "https://registry.npmmirror.com/comment-parser/-/comment-parser-1.4.6.tgz", {}, "sha512-ObxuY6vnbWTN6Od72xfwN9DbzC7Y2vv8u1Soi9ahRKL37gb6y1qk6/dgjs+3JWuXJHWvsg3BXIwzd/rkmAwavg=="], @@ -308,6 +313,8 @@ "json-buffer": ["json-buffer@3.0.1", "https://registry.npmmirror.com/json-buffer/-/json-buffer-3.0.1.tgz", {}, "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ=="], + "json-schema-to-ts": ["json-schema-to-ts@3.1.1", "https://registry.npmmirror.com/json-schema-to-ts/-/json-schema-to-ts-3.1.1.tgz", { "dependencies": { "@babel/runtime": "^7.18.3", "ts-algebra": "^2.0.0" } }, "sha512-+DWg8jCJG2TEnpy7kOm/7/AxaYoaRbjVB4LFZLySZlWn8exGs3A4OLJR966cVvU26N7X9TWxl+Jsw7dzAqKT6g=="], + "json-schema-traverse": ["json-schema-traverse@0.4.1", "https://registry.npmmirror.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", {}, "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="], "json-stable-stringify-without-jsonify": ["json-stable-stringify-without-jsonify@1.0.1", "https://registry.npmmirror.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", {}, "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw=="], @@ -424,6 +431,8 @@ "tinyglobby": ["tinyglobby@0.2.15", "https://registry.npmmirror.com/tinyglobby/-/tinyglobby-0.2.15.tgz", { "dependencies": { "fdir": "^6.5.0", "picomatch": "^4.0.3" } }, "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ=="], + "ts-algebra": ["ts-algebra@2.0.0", "https://registry.npmmirror.com/ts-algebra/-/ts-algebra-2.0.0.tgz", {}, "sha512-FPAhNPFMrkwz76P7cdjdmiShwMynZYN6SgOujD1urY4oNm80Ou9oMdmbR45LotcKOXoy7wSmHkRFE6Mxbrhefw=="], + "ts-api-utils": ["ts-api-utils@2.5.0", "https://registry.npmmirror.com/ts-api-utils/-/ts-api-utils-2.5.0.tgz", { "peerDependencies": { "typescript": ">=4.8.4" } }, "sha512-OJ/ibxhPlqrMM0UiNHJ/0CKQkoKF243/AEmplt3qpRgkW8VG7IfOS41h7V8TjITqdByHzrjcS/2si+y4lIh8NA=="], "tslib": ["tslib@2.8.1", "https://registry.npmmirror.com/tslib/-/tslib-2.8.1.tgz", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], diff --git a/package.json b/package.json index 8b86973..ccf75c8 100644 --- a/package.json +++ b/package.json @@ -58,6 +58,7 @@ "typescript": "^5" }, "dependencies": { + "@anthropic-ai/sdk": "^0.87.0", "commander": "^14.0.3", "gray-matter": "^4.0.3", "ink": "^6.8.0", diff --git a/src/cli/bootstrap/model-wizard.tsx b/src/cli/bootstrap/model-wizard.tsx index 8dd8643..5b9bd61 100644 --- a/src/cli/bootstrap/model-wizard.tsx +++ b/src/cli/bootstrap/model-wizard.tsx @@ -9,11 +9,12 @@ import { currentTheme } from "../tui/themes"; type Step = "provider" | "apiKey" | "modelName" | "baseURL" | "confirm"; -function buildModelEntry(baseURL: string, apiKey: string, modelName: string): ModelEntry { +function buildModelEntry(baseURL: string, apiKey: string, modelName: string, provider: ModelEntry["provider"]): ModelEntry { return { name: modelName.trim(), baseURL: baseURL.trim(), APIKey: apiKey.trim(), + provider, }; } @@ -75,7 +76,7 @@ function ModelWizard({ onComplete, onAbort }: ModelWizardProps) { const finishWithBaseURL = (url: string) => { setCustomBaseURL(url); - const entry = buildModelEntry(url, apiKey, modelName); + const entry = buildModelEntry(url, apiKey, modelName, selectedProvider.providerType); setPendingEntry(entry); setStep("confirm"); }; @@ -84,7 +85,7 @@ function ModelWizard({ onComplete, onAbort }: ModelWizardProps) { if (!selectedProvider.baseURL) { setStep("baseURL"); } else { - const entry = buildModelEntry(selectedProvider.baseURL, apiKey, modelName); + const entry = buildModelEntry(selectedProvider.baseURL, apiKey, modelName, selectedProvider.providerType); setPendingEntry(entry); setStep("confirm"); } diff --git a/src/cli/config/schema.ts b/src/cli/config/schema.ts index 8c42a04..45a25dd 100644 --- a/src/cli/config/schema.ts +++ b/src/cli/config/schema.ts @@ -4,6 +4,8 @@ export const modelEntrySchema = z.object({ name: z.string().min(1), baseURL: z.string().min(1), APIKey: z.string().min(1), + /** Provider type: "openai" (default) or "anthropic". */ + provider: z.enum(["openai", "anthropic"]).optional().default("openai"), }); export const helixentConfigSchema = z.object({ diff --git a/src/cli/index.tsx b/src/cli/index.tsx index 2f1e0db..1a9ec7f 100644 --- a/src/cli/index.tsx +++ b/src/cli/index.tsx @@ -7,7 +7,9 @@ import { validateIntegrity } from "@/cli/bootstrap"; import { registerCommands } from "@/cli/commands"; import { loadConfig } from "@/cli/config"; import { createCodingAgent, globalApprovalManager } from "@/coding"; +import { AnthropicModelProvider } from "@/community/anthropic"; import { OpenAIModelProvider } from "@/community/openai"; +import type { ModelProvider } from "@/foundation"; import { Model } from "@/foundation"; import { App } from "./tui"; @@ -38,10 +40,18 @@ if (args.length > 0) { throw new Error("No models configured. Run `helixent config model add` to add one."); } - const provider = new OpenAIModelProvider({ - baseURL: entry.baseURL, - apiKey: entry.APIKey, - }); + let provider: ModelProvider; + if (entry.provider === "anthropic") { + provider = new AnthropicModelProvider({ + baseURL: entry.baseURL, + apiKey: entry.APIKey, + }); + } else { + provider = new OpenAIModelProvider({ + baseURL: entry.baseURL, + apiKey: entry.APIKey, + }); + } const model = new Model(entry.name, provider, { max_tokens: 16 * 1024, diff --git a/src/cli/model-providers.ts b/src/cli/model-providers.ts index 297ca85..8b14b70 100644 --- a/src/cli/model-providers.ts +++ b/src/cli/model-providers.ts @@ -1,22 +1,27 @@ +export type ProviderType = "openai" | "anthropic"; + export type ModelProviderConfig = { label: string; id: string; baseURL: string; + providerType: ProviderType; }; export const MODEL_PROVIDERS: ModelProviderConfig[] = [ - { label: "OpenAI", id: "openai", baseURL: "https://api.openai.com/v1" }, - { label: "Volcengine - General", id: "volcengine", baseURL: "https://ark.cn-beijing.volces.com/api/v3" }, + { label: "Anthropic (Claude)", id: "anthropic", baseURL: "https://api.anthropic.com", providerType: "anthropic" }, + { label: "OpenAI", id: "openai", baseURL: "https://api.openai.com/v1", providerType: "openai" }, + { label: "Volcengine - General", id: "volcengine", baseURL: "https://ark.cn-beijing.volces.com/api/v3", providerType: "openai" }, { label: "Volcengine - Coding Plan", id: "volcengine_coding_plan", baseURL: "https://ark.cn-beijing.volces.com/api/coding/v3", + providerType: "openai", }, - { label: "Qwen (Aliyun)", id: "qwen", baseURL: "https://dashscope.aliyuncs.com/compatible-mode/v1" }, - { label: "Minimax (Domestic)", id: "minimax_cn", baseURL: "https://api.minimaxi.com/v1" }, - { label: "Minimax (Global)", id: "minimax_global", baseURL: "https://api.minimax.io/v1" }, - { label: "GLM (Zhipu AI)", id: "glm", baseURL: "https://open.bigmodel.cn/api/paas/v4" }, - { label: "Kimi (Moonshot)", id: "kimi", baseURL: "https://api.moonshot.cn/v1" }, - { label: "DeepSeek (OpenAI compatible)", id: "deepseek", baseURL: "https://api.deepseek.com/v1" }, - { label: "Other", id: "other", baseURL: "" }, + { label: "Qwen (Aliyun)", id: "qwen", baseURL: "https://dashscope.aliyuncs.com/compatible-mode/v1", providerType: "openai" }, + { label: "Minimax (Domestic)", id: "minimax_cn", baseURL: "https://api.minimaxi.com/v1", providerType: "openai" }, + { label: "Minimax (Global)", id: "minimax_global", baseURL: "https://api.minimax.io/v1", providerType: "openai" }, + { label: "GLM (Zhipu AI)", id: "glm", baseURL: "https://open.bigmodel.cn/api/paas/v4", providerType: "openai" }, + { label: "Kimi (Moonshot)", id: "kimi", baseURL: "https://api.moonshot.cn/v1", providerType: "openai" }, + { label: "DeepSeek (OpenAI compatible)", id: "deepseek", baseURL: "https://api.deepseek.com/v1", providerType: "openai" }, + { label: "Other", id: "other", baseURL: "", providerType: "openai" }, ]; diff --git a/src/community/anthropic/index.ts b/src/community/anthropic/index.ts new file mode 100644 index 0000000..bfa819a --- /dev/null +++ b/src/community/anthropic/index.ts @@ -0,0 +1 @@ +export * from "./model-provider"; diff --git a/src/community/anthropic/model-provider.ts b/src/community/anthropic/model-provider.ts new file mode 100644 index 0000000..b6c2c42 --- /dev/null +++ b/src/community/anthropic/model-provider.ts @@ -0,0 +1,88 @@ +import Anthropic from "@anthropic-ai/sdk"; + +import type { AssistantMessage, ModelProvider, ModelProviderInvokeParams, TokenUsage } from "@/foundation"; + +import { StreamAccumulator } from "./stream-utils"; +import { + convertToAnthropicMessages, + convertToAnthropicTools, + extractSystemPrompt, + parseAssistantMessage, +} from "./utils"; + +/** + * A provider for the Anthropic API (Claude models). + */ +export class AnthropicModelProvider implements ModelProvider { + _client: Anthropic; + + constructor({ baseURL, apiKey }: { baseURL?: string; apiKey?: string } = {}) { + // Only pass baseURL if it differs from the SDK default, so the SDK's + // own URL construction logic is used for the standard Anthropic endpoint. + const isDefaultURL = !baseURL || baseURL === "https://api.anthropic.com"; + this._client = new Anthropic({ + ...(isDefaultURL ? {} : { baseURL }), + apiKey, + }); + } + + async invoke(params: ModelProviderInvokeParams) { + const response = await this._client.messages.create(this._baseMessageParams(params), { + signal: params.signal, + }); + return parseAssistantMessage(response, toTokenUsage(response.usage)); + } + + async *stream(params: ModelProviderInvokeParams): AsyncGenerator { + const response = await this._client.messages.create( + { ...this._baseMessageParams(params), stream: true }, + { signal: params.signal }, + ); + + const acc = new StreamAccumulator(); + for await (const event of response) { + acc.push(event); + yield acc.snapshot(); + } + } + + private _baseMessageParams({ + model, + messages, + tools, + options, + }: ModelProviderInvokeParams): Anthropic.MessageCreateParamsNonStreaming { + const system = extractSystemPrompt(messages); + const anthropicMessages = convertToAnthropicMessages(messages); + const anthropicTools = tools ? convertToAnthropicTools(tools) : undefined; + + // Normalize options for Anthropic's API. + // When thinking is enabled, Anthropic requires `budget_tokens`. + // Default the budget to max_tokens minus a small buffer for the response. + const normalizedOptions = { ...options }; + const thinking = normalizedOptions.thinking as { type: string; budget_tokens?: number } | undefined; + if (thinking?.type === "enabled" && !thinking.budget_tokens) { + const maxTokens = (normalizedOptions.max_tokens as number | undefined) ?? 8192; + thinking.budget_tokens = Math.floor(maxTokens * 0.8); + normalizedOptions.thinking = thinking; + } + + return { + model, + max_tokens: 8192, + messages: anthropicMessages, + ...(system ? { system } : {}), + ...(anthropicTools && anthropicTools.length > 0 ? { tools: anthropicTools } : {}), + ...normalizedOptions, + }; + } +} + +function toTokenUsage(usage?: Anthropic.Usage): TokenUsage | undefined { + if (!usage) return undefined; + return { + promptTokens: usage.input_tokens ?? 0, + completionTokens: usage.output_tokens ?? 0, + totalTokens: (usage.input_tokens ?? 0) + (usage.output_tokens ?? 0), + }; +} diff --git a/src/community/anthropic/stream-utils.ts b/src/community/anthropic/stream-utils.ts new file mode 100644 index 0000000..dba42ba --- /dev/null +++ b/src/community/anthropic/stream-utils.ts @@ -0,0 +1,157 @@ +import type Anthropic from "@anthropic-ai/sdk"; + +import type { AssistantMessage, AssistantMessageContent, TokenUsage } from "@/foundation"; + +/** + * Accumulated state for a single content block while streaming. + * The `type` discriminator is fixed when the block starts; fields are filled + * in progressively as deltas arrive. + */ +type BlockState = + | { type: "text"; text: string } + | { type: "thinking"; thinking: string; signature?: string } + | { type: "tool_use"; id: string; name: string; partialJson: string }; + +/** + * Accumulates Anthropic stream events into progressively more complete + * {@link AssistantMessage} snapshots. + * + * Anthropic's streaming protocol emits a sequence of events: + * - `message_start` — carries initial usage (input tokens only). + * - `content_block_start` — opens a block at a given index (text, thinking, or tool_use). + * - `content_block_delta` — appends to the current block (text, thinking, input JSON, or signature). + * - `content_block_stop` — closes the block. + * - `message_delta` — carries final usage (output tokens) on the last event. + */ +export class StreamAccumulator { + private readonly blocks = new Map(); + private inputTokens = 0; + private outputTokens = 0; + private hasFinalUsage = false; + + push(event: Anthropic.RawMessageStreamEvent): void { + switch (event.type) { + case "message_start": + this.inputTokens = event.message.usage.input_tokens ?? 0; + this.outputTokens = event.message.usage.output_tokens ?? 0; + return; + case "content_block_start": + this._handleBlockStart(event); + return; + case "content_block_delta": + this._handleBlockDelta(event); + return; + case "message_delta": + this._handleMessageDelta(event); + return; + // content_block_stop and message_stop carry no data we need. + default: + return; + } + } + + snapshot(): AssistantMessage { + const content: AssistantMessageContent = []; + // Preserve the Anthropic block order by index. + const ordered = [...this.blocks.entries()].sort((a, b) => a[0] - b[0]); + for (const [, block] of ordered) { + const item = blockToContent(block); + if (item) content.push(item); + } + + return { + role: "assistant", + content, + usage: this.hasFinalUsage ? this._buildUsage() : undefined, + ...(this.hasFinalUsage ? {} : { streaming: true }), + }; + } + + private _handleBlockStart(event: Anthropic.RawContentBlockStartEvent): void { + const { index, content_block } = event; + if (content_block.type === "text") { + this.blocks.set(index, { type: "text", text: content_block.text }); + } else if (content_block.type === "thinking") { + this.blocks.set(index, { + type: "thinking", + thinking: content_block.thinking, + ...(content_block.signature ? { signature: content_block.signature } : {}), + }); + } else if (content_block.type === "tool_use") { + this.blocks.set(index, { + type: "tool_use", + id: content_block.id, + name: content_block.name, + partialJson: "", + }); + } + } + + private _handleBlockDelta(event: Anthropic.RawContentBlockDeltaEvent): void { + const block = this.blocks.get(event.index); + if (!block) return; + const delta = event.delta; + if (delta.type === "text_delta" && block.type === "text") { + block.text += delta.text; + } else if (delta.type === "thinking_delta" && block.type === "thinking") { + block.thinking += delta.thinking; + } else if (delta.type === "signature_delta" && block.type === "thinking") { + block.signature = delta.signature; + } else if (delta.type === "input_json_delta" && block.type === "tool_use") { + block.partialJson += delta.partial_json; + } + } + + private _handleMessageDelta(event: Anthropic.RawMessageDeltaEvent): void { + // Final usage — output tokens are cumulative on this event. + if (event.usage.output_tokens != null) { + this.outputTokens = event.usage.output_tokens; + } + if (event.usage.input_tokens != null) { + this.inputTokens = event.usage.input_tokens; + } + this.hasFinalUsage = true; + } + + private _buildUsage(): TokenUsage { + return { + promptTokens: this.inputTokens, + completionTokens: this.outputTokens, + totalTokens: this.inputTokens + this.outputTokens, + }; + } +} + +/** + * Converts a single accumulated block into its {@link AssistantMessageContent} + * representation, or returns null when the block carries no renderable payload yet + * (e.g. a text block that hasn't received any deltas). + */ +function blockToContent(block: BlockState): AssistantMessageContent[number] | null { + if (block.type === "text") { + return block.text ? { type: "text", text: block.text } : null; + } + if (block.type === "thinking") { + const thinkingContent: Record = { + type: "thinking", + thinking: block.thinking, + }; + if (block.signature) { + // Preserve the signature so it can be sent back in multi-turn conversations. + thinkingContent._anthropicSignature = block.signature; + } + return thinkingContent as { type: "thinking"; thinking: string }; + } + // tool_use + return { type: "tool_use", id: block.id, name: block.name, input: parseToolInput(block.partialJson) }; +} + +function parseToolInput(partialJson: string): Record { + if (!partialJson) return {}; + try { + return JSON.parse(partialJson); + } catch { + // Input JSON is still incomplete — yield empty input until it finishes. + return {}; + } +} diff --git a/src/community/anthropic/utils.ts b/src/community/anthropic/utils.ts new file mode 100644 index 0000000..c54d8e8 --- /dev/null +++ b/src/community/anthropic/utils.ts @@ -0,0 +1,158 @@ +import type Anthropic from "@anthropic-ai/sdk"; + +import type { AssistantMessage, Message, TokenUsage, Tool } from "@/foundation"; + +/** + * Extracts the system prompt from helixent messages. + * Anthropic takes the system prompt as a separate top-level parameter + * rather than embedding it in the messages array. + * + * @param messages - The helixent messages to extract the system prompt from. + * @returns The system prompt string, or undefined if none is present. + */ +export function extractSystemPrompt(messages: Message[]): string | undefined { + const systemMessages = messages.filter((m) => m.role === "system"); + if (systemMessages.length === 0) return undefined; + return systemMessages + .flatMap((m) => m.content) + .filter((c) => c.type === "text") + .map((c) => c.text) + .join("\n\n"); +} + +/** + * Converts helixent messages to Anthropic MessageParam messages. + * System messages are excluded here (handled separately via extractSystemPrompt). + * + * @param messages - The helixent messages to convert. + * @returns The Anthropic MessageParam messages. + */ +export function convertToAnthropicMessages( + messages: Message[], +): Anthropic.MessageParam[] { + const result: Anthropic.MessageParam[] = []; + + for (const message of messages) { + if (message.role === "system") { + // System messages are passed separately in Anthropic's API. + continue; + } + + if (message.role === "user") { + const content: Anthropic.ContentBlockParam[] = []; + for (const part of message.content) { + if (part.type === "text") { + content.push({ type: "text", text: part.text }); + } else if (part.type === "image_url") { + // Anthropic uses base64 or URL-based image sources. + // For URL-based images, we use the url type. + content.push({ + type: "image", + source: { + type: "url", + url: part.image_url.url, + }, + }); + } + } + result.push({ role: "user", content }); + } else if (message.role === "assistant") { + const content: Anthropic.ContentBlockParam[] = []; + for (const part of message.content) { + if (part.type === "text") { + content.push({ type: "text", text: part.text }); + } else if (part.type === "thinking") { + // Retrieve the preserved signature if available (set during parseAssistantMessage). + // Anthropic requires a valid signature for thinking blocks in multi-turn conversations. + const signature = + (part as unknown as Record)._anthropicSignature as string | undefined; + content.push({ + type: "thinking", + thinking: part.thinking, + signature: signature ?? "", + }); + } else if (part.type === "tool_use") { + content.push({ + type: "tool_use", + id: part.id, + name: part.name, + input: part.input, + }); + } + } + result.push({ role: "assistant", content }); + } else if (message.role === "tool") { + // Anthropic expects tool results as user messages with tool_result content blocks. + const content: Anthropic.ToolResultBlockParam[] = []; + for (const part of message.content) { + if (part.type === "tool_result") { + content.push({ + type: "tool_result", + tool_use_id: part.tool_use_id, + content: part.content, + }); + } + } + result.push({ role: "user", content }); + } + } + + return result; +} + +/** + * Parses an Anthropic API response into a helixent AssistantMessage. + * + * @param response - The Anthropic API response. + * @returns The parsed helixent AssistantMessage. + */ +export function parseAssistantMessage( + response: Anthropic.Message, + usage?: TokenUsage, +): AssistantMessage { + const result: AssistantMessage = { + role: "assistant", + content: [], + usage, + }; + + for (const block of response.content) { + if (block.type === "text") { + result.content.push({ type: "text", text: block.text }); + } else if (block.type === "thinking") { + // Preserve the signature so it can be sent back in multi-turn conversations. + // The signature is stored as an extra runtime property on the content object. + const thinkingContent: Record = { + type: "thinking", + thinking: block.thinking, + }; + if (block.signature) { + thinkingContent._anthropicSignature = block.signature; + } + result.content.push(thinkingContent as { type: "thinking"; thinking: string }); + } else if (block.type === "tool_use") { + result.content.push({ + type: "tool_use", + id: block.id, + name: block.name, + input: block.input as Record, + }); + } + } + + return result; +} + +/** + * Converts helixent tools to Anthropic tool definitions. + * + * @param tools - The helixent tools to convert. + * @returns The Anthropic tool definitions. + */ +export function convertToAnthropicTools(tools: Tool[]): Anthropic.Tool[] { + return tools.map((tool) => ({ + name: tool.name, + description: tool.description, + input_schema: tool.parameters.toJSONSchema() as Anthropic.Tool["input_schema"], + })); +}