From 8a51b460d275e316752158edc7a3c8ec6b299fed Mon Sep 17 00:00:00 2001 From: Vaibhav Shinde Date: Mon, 23 Feb 2026 14:56:07 +0530 Subject: [PATCH 1/4] fix(workers-ai-provider): use gateway in `createRun()` --- packages/workers-ai-provider/src/utils.ts | 38 +++++++++++++++++++---- 1 file changed, 32 insertions(+), 6 deletions(-) diff --git a/packages/workers-ai-provider/src/utils.ts b/packages/workers-ai-provider/src/utils.ts index 8bd5405f5..429afadbd 100644 --- a/packages/workers-ai-provider/src/utils.ts +++ b/packages/workers-ai-provider/src/utils.ts @@ -108,7 +108,7 @@ export function createRun(config: CreateRunConfig): AiRun { options?: AiOptions & Record, ): Promise | AiModels[Name]["postProcessedOutputs"]> { const { - gateway: _gateway, + gateway, prefix: _prefix, extraHeaders: _extraHeaders, returnRawResponse, @@ -137,15 +137,37 @@ export function createRun(config: CreateRunConfig): AiRun { } const queryString = urlParams.toString(); - const url = `https://api.cloudflare.com/client/v4/accounts/${accountId}/ai/run/${model}${ - queryString ? `?${queryString}` : "" - }`; - const headers = { + // Build URL: use AI Gateway if gateway option is provided, otherwise direct API + const url = gateway?.id + ? `https://gateway.ai.cloudflare.com/v1/${accountId}/${gateway.id}/workers-ai/run/${model}${ + queryString ? `?${queryString}` : "" + }` + : `https://api.cloudflare.com/client/v4/accounts/${accountId}/ai/run/${model}${ + queryString ? `?${queryString}` : "" + }`; + + // Build headers with optional gateway cache headers + const headers: Record = { Authorization: `Bearer ${apiKey}`, "Content-Type": "application/json", }; + if (gateway) { + if (gateway.skipCache) { + headers["cf-aig-skip-cache"] = "true"; + } + if (typeof gateway.cacheTtl === "number") { + headers["cf-aig-cache-ttl"] = String(gateway.cacheTtl); + } + if (gateway.cacheKey) { + headers["cf-aig-cache-key"] = gateway.cacheKey; + } + if (gateway.metadata) { + headers["cf-aig-metadata"] = JSON.stringify(gateway.metadata); + } + } + const body = JSON.stringify(inputs); const response = await fetch(url, { @@ -186,8 +208,12 @@ export function createRun(config: CreateRunConfig): AiRun { // endpoint and return a JSON response with empty result instead of SSE. // Retry without streaming so doStream's graceful degradation path can // wrap the complete response as a synthetic stream. + // Use the same URL (gateway or direct) as the original request. const retryResponse = await fetch(url, { - body: JSON.stringify({ ...(inputs as Record), stream: false }), + body: JSON.stringify({ + ...(inputs as Record), + stream: false, + }), headers, method: "POST", signal: signal as AbortSignal | undefined, From f130d67fd599ad4d0c46b34c979b0377f2b64e1f Mon Sep 17 00:00:00 2001 From: Vaibhav Shinde Date: Mon, 23 Feb 2026 15:24:49 +0530 Subject: [PATCH 2/4] fix: updating headers or config updates transport --- .../workers-ai/src/client/components/Chat.tsx | 4 +++- .../workers-ai/src/client/utils/useUniqueId.ts | 17 +++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 examples/workers-ai/src/client/utils/useUniqueId.ts diff --git a/examples/workers-ai/src/client/components/Chat.tsx b/examples/workers-ai/src/client/components/Chat.tsx index d4ed1cafb..7b0c4abd3 100644 --- a/examples/workers-ai/src/client/components/Chat.tsx +++ b/examples/workers-ai/src/client/components/Chat.tsx @@ -2,6 +2,7 @@ import { useChat } from "@ai-sdk/react"; import { DefaultChatTransport } from "ai"; import { useState, useRef, useEffect, useMemo } from "react"; import { useConfig } from "../config"; +import { useUniqueId } from "../utils/useUniqueId"; import { chatModels } from "./models"; export function Chat() { @@ -30,6 +31,7 @@ export function Chat() { function ChatSession({ model }: { model: string }) { const { headers } = useConfig(); + const chatId = useUniqueId({ model, headers }, "chat"); const transport = useMemo( () => @@ -41,7 +43,7 @@ function ChatSession({ model }: { model: string }) { [model, headers], ); - const { messages, sendMessage, status, error } = useChat({ transport }); + const { messages, sendMessage, status, error } = useChat({ id: chatId, transport }); const [input, setInput] = useState(""); const isLoading = status === "streaming" || status === "submitted"; diff --git a/examples/workers-ai/src/client/utils/useUniqueId.ts b/examples/workers-ai/src/client/utils/useUniqueId.ts new file mode 100644 index 000000000..59561281c --- /dev/null +++ b/examples/workers-ai/src/client/utils/useUniqueId.ts @@ -0,0 +1,17 @@ +import { useMemo } from "react"; + +/** + * Generates a unique ID based on the provided data object. + * Useful for creating stable identifiers from model + headers combinations. + */ +export function useUniqueId(data: Record, prefix = "id"): string { + return useMemo(() => { + const serialized = JSON.stringify(data); + let hash = 0; + for (let i = 0; i < serialized.length; i++) { + hash = (hash << 5) - hash + serialized.charCodeAt(i); + hash |= 0; + } + return `${prefix}-${Math.abs(hash).toString(36)}`; + }, [data, prefix]); +} From 0a66a1b4fdd53a3e8e087f646e4841e46feccf16 Mon Sep 17 00:00:00 2001 From: Vaibhav Shinde Date: Mon, 23 Feb 2026 15:30:49 +0530 Subject: [PATCH 3/4] fix(workers-ai-provider): add tests for createRun --- .../workers-ai-provider/test/utils.test.ts | 188 +++++++++++++++++- 1 file changed, 187 insertions(+), 1 deletion(-) diff --git a/packages/workers-ai-provider/test/utils.test.ts b/packages/workers-ai-provider/test/utils.test.ts index 50dba4a59..8f3407ec8 100644 --- a/packages/workers-ai-provider/test/utils.test.ts +++ b/packages/workers-ai-provider/test/utils.test.ts @@ -1,4 +1,4 @@ -import { describe, expect, it } from "vitest"; +import { describe, expect, it, vi, beforeEach, afterEach } from "vitest"; import { processPartialToolCalls, processToolCalls, @@ -6,6 +6,7 @@ import { sanitizeToolCallId, normalizeMessagesForBinding, prepareToolsAndToolChoice, + createRun, } from "../src/utils"; // --------------------------------------------------------------------------- @@ -404,3 +405,188 @@ describe("processText", () => { ).toBe("From choices"); }); }); + +// --------------------------------------------------------------------------- +// createRun - gateway support +// --------------------------------------------------------------------------- + +describe("createRun", () => { + const originalFetch = globalThis.fetch; + + beforeEach(() => { + globalThis.fetch = vi.fn(); + }); + + afterEach(() => { + globalThis.fetch = originalFetch; + vi.restoreAllMocks(); + }); + + it("should use direct API URL when no gateway is provided", async () => { + const mockResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ result: { response: "Hello" } }), + headers: new Headers({ "content-type": "application/json" }), + }; + vi.mocked(globalThis.fetch).mockResolvedValue(mockResponse as unknown as Response); + + const run = createRun({ accountId: "test-account", apiKey: "test-key" }); + await run("@cf/meta/llama-3.1-8b-instruct" as any, { prompt: "Hi" }); + + expect(globalThis.fetch).toHaveBeenCalledWith( + "https://api.cloudflare.com/client/v4/accounts/test-account/ai/run/@cf/meta/llama-3.1-8b-instruct", + expect.objectContaining({ + method: "POST", + headers: { + Authorization: "Bearer test-key", + "Content-Type": "application/json", + }, + }), + ); + }); + + it("should use gateway URL when gateway.id is provided", async () => { + const mockResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ result: { response: "Hello" } }), + headers: new Headers({ "content-type": "application/json" }), + }; + vi.mocked(globalThis.fetch).mockResolvedValue(mockResponse as unknown as Response); + + const run = createRun({ accountId: "test-account", apiKey: "test-key" }); + await run("@cf/meta/llama-3.1-8b-instruct" as any, { prompt: "Hi" }, { gateway: { id: "my-gateway" } }); + + expect(globalThis.fetch).toHaveBeenCalledWith( + "https://gateway.ai.cloudflare.com/v1/test-account/my-gateway/workers-ai/run/@cf/meta/llama-3.1-8b-instruct", + expect.objectContaining({ + method: "POST", + headers: { + Authorization: "Bearer test-key", + "Content-Type": "application/json", + }, + }), + ); + }); + + it("should add cf-aig-skip-cache header when skipCache is true", async () => { + const mockResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ result: { response: "Hello" } }), + headers: new Headers({ "content-type": "application/json" }), + }; + vi.mocked(globalThis.fetch).mockResolvedValue(mockResponse as unknown as Response); + + const run = createRun({ accountId: "test-account", apiKey: "test-key" }); + await run("@cf/meta/llama-3.1-8b-instruct" as any, { prompt: "Hi" }, { gateway: { id: "my-gateway", skipCache: true } }); + + expect(globalThis.fetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + headers: expect.objectContaining({ + "cf-aig-skip-cache": "true", + }), + }), + ); + }); + + it("should add cf-aig-cache-ttl header when cacheTtl is provided", async () => { + const mockResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ result: { response: "Hello" } }), + headers: new Headers({ "content-type": "application/json" }), + }; + vi.mocked(globalThis.fetch).mockResolvedValue(mockResponse as unknown as Response); + + const run = createRun({ accountId: "test-account", apiKey: "test-key" }); + await run("@cf/meta/llama-3.1-8b-instruct" as any, { prompt: "Hi" }, { gateway: { id: "my-gateway", cacheTtl: 3600 } }); + + expect(globalThis.fetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + headers: expect.objectContaining({ + "cf-aig-cache-ttl": "3600", + }), + }), + ); + }); + + it("should add cf-aig-cache-key header when cacheKey is provided", async () => { + const mockResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ result: { response: "Hello" } }), + headers: new Headers({ "content-type": "application/json" }), + }; + vi.mocked(globalThis.fetch).mockResolvedValue(mockResponse as unknown as Response); + + const run = createRun({ accountId: "test-account", apiKey: "test-key" }); + await run("@cf/meta/llama-3.1-8b-instruct" as any, { prompt: "Hi" }, { gateway: { id: "my-gateway", cacheKey: "my-custom-key" } }); + + expect(globalThis.fetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + headers: expect.objectContaining({ + "cf-aig-cache-key": "my-custom-key", + }), + }), + ); + }); + + it("should add cf-aig-metadata header when metadata is provided", async () => { + const mockResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ result: { response: "Hello" } }), + headers: new Headers({ "content-type": "application/json" }), + }; + vi.mocked(globalThis.fetch).mockResolvedValue(mockResponse as unknown as Response); + + const run = createRun({ accountId: "test-account", apiKey: "test-key" }); + await run("@cf/meta/llama-3.1-8b-instruct" as any, { prompt: "Hi" }, { gateway: { id: "my-gateway", metadata: { user: "test", session: 123 } } }); + + expect(globalThis.fetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + headers: expect.objectContaining({ + "cf-aig-metadata": '{"user":"test","session":123}', + }), + }), + ); + }); + + it("should add all gateway cache headers when all options are provided", async () => { + const mockResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ result: { response: "Hello" } }), + headers: new Headers({ "content-type": "application/json" }), + }; + vi.mocked(globalThis.fetch).mockResolvedValue(mockResponse as unknown as Response); + + const run = createRun({ accountId: "test-account", apiKey: "test-key" }); + await run( + "@cf/meta/llama-3.1-8b-instruct" as any, + { prompt: "Hi" }, + { + gateway: { + id: "my-gateway", + skipCache: true, + cacheTtl: 7200, + cacheKey: "custom-key", + metadata: { env: "prod" }, + }, + }, + ); + + expect(globalThis.fetch).toHaveBeenCalledWith( + "https://gateway.ai.cloudflare.com/v1/test-account/my-gateway/workers-ai/run/@cf/meta/llama-3.1-8b-instruct", + expect.objectContaining({ + headers: { + Authorization: "Bearer test-key", + "Content-Type": "application/json", + "cf-aig-skip-cache": "true", + "cf-aig-cache-ttl": "7200", + "cf-aig-cache-key": "custom-key", + "cf-aig-metadata": '{"env":"prod"}', + }, + }), + ); + }); +}); From 78ba2c6b2be7506b527fb9c0bec4faf3ad6b51a3 Mon Sep 17 00:00:00 2001 From: Vaibhav Shinde Date: Mon, 23 Feb 2026 15:44:42 +0530 Subject: [PATCH 4/4] add changeset --- .changeset/loud-months-shake.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .changeset/loud-months-shake.md diff --git a/.changeset/loud-months-shake.md b/.changeset/loud-months-shake.md new file mode 100644 index 000000000..6d7424f81 --- /dev/null +++ b/.changeset/loud-months-shake.md @@ -0,0 +1,5 @@ +--- +"workers-ai-provider": patch +--- + +Support for AI Gateway