From 1ea8046eaeae8f940c043a75b4dedb2e870c6330 Mon Sep 17 00:00:00 2001 From: Ayush8923 <80516839+Ayush8923@users.noreply.github.com> Date: Mon, 27 Apr 2026 23:07:30 +0530 Subject: [PATCH 01/18] feat(*): initate chat feature for llm call --- .env.example | 11 +- app/api/llm/call/[job_id]/result/route.ts | 38 +++ app/api/llm/call/[job_id]/route.ts | 25 ++ app/api/llm/call/route.ts | 44 +++ app/api/llm/webhook/[callback_id]/route.ts | 79 +++++ app/components/Sidebar.tsx | 6 +- app/components/auth/TokenVerifyPage.tsx | 4 +- app/components/chat/ChatConfigPicker.tsx | 220 +++++++++++++ app/components/chat/ChatEmptyState.tsx | 63 ++++ app/components/chat/ChatInput.tsx | 89 +++++ app/components/chat/ChatMessage.tsx | 62 ++++ app/components/chat/ChatMessageList.tsx | 32 ++ app/components/chat/index.ts | 5 + app/components/icons/index.tsx | 2 + app/components/icons/sidebar/ChatIcon.tsx | 21 ++ app/components/icons/sidebar/SendIcon.tsx | 21 ++ app/components/settings/SettingsSidebar.tsx | 2 +- app/globals.css | 12 + app/lib/chatClient.ts | 267 +++++++++++++++ app/lib/llmJobStore.ts | 75 +++++ app/lib/navConfig.ts | 7 + app/lib/types/chat.ts | 89 +++++ app/page.tsx | 342 +++++++++++++++++++- middleware.ts | 3 +- 24 files changed, 1500 insertions(+), 19 deletions(-) create mode 100644 app/api/llm/call/[job_id]/result/route.ts create mode 100644 app/api/llm/call/[job_id]/route.ts create mode 100644 app/api/llm/call/route.ts create mode 100644 app/api/llm/webhook/[callback_id]/route.ts create mode 100644 app/components/chat/ChatConfigPicker.tsx create mode 100644 app/components/chat/ChatEmptyState.tsx create mode 100644 app/components/chat/ChatInput.tsx create mode 100644 app/components/chat/ChatMessage.tsx create mode 100644 app/components/chat/ChatMessageList.tsx create mode 100644 app/components/chat/index.ts create mode 100644 app/components/icons/sidebar/ChatIcon.tsx create mode 100644 app/components/icons/sidebar/SendIcon.tsx create mode 100644 app/lib/chatClient.ts create mode 100644 app/lib/llmJobStore.ts create mode 100644 app/lib/types/chat.ts diff --git a/.env.example b/.env.example index cf493c5d..c6aaacd0 100644 --- a/.env.example +++ b/.env.example @@ -1,4 +1,13 @@ BACKEND_URL=http://localhost:8000 GUARDRAILS_URL = http://localhost:8001 -GUARDRAILS_TOKEN = +GUARDRAILS_TOKEN = NEXT_PUBLIC_GOOGLE_CLIENT_ID=your-google-client-id.apps.googleusercontent.com + +# Public origin of this Next.js app — the chat client builds +# `${NEXT_PUBLIC_APP_URL}/api/llm/webhook/` as the callback URL it sends +# with each LLM call. +NEXT_PUBLIC_APP_URL= + +# Shared secret the BFF appends to the callback URL as `?secret=...` before +# forwarding to the upstream LLM API. Leave blank to disable verification. +WEBHOOK_SECRET= diff --git a/app/api/llm/call/[job_id]/result/route.ts b/app/api/llm/call/[job_id]/result/route.ts new file mode 100644 index 00000000..965d09c7 --- /dev/null +++ b/app/api/llm/call/[job_id]/result/route.ts @@ -0,0 +1,38 @@ +/** + * Returns the webhook-delivered result for a given job_id. + * + * Status semantics: + * - 204: webhook hasn't fired yet — caller should keep polling. + * - 200: result is ready (the body's `data.status` indicates success vs. failure). + * + * Reads only from the in-process job store; no upstream call is made. + */ + +import { NextResponse } from "next/server"; +import { clearResult, getResult } from "@/app/lib/llmJobStore"; + +export async function GET( + _request: Request, + { params }: { params: Promise<{ job_id: string }> }, +) { + const { job_id } = await params; + const record = getResult(job_id); + + if (!record) { + return new NextResponse(null, { status: 204 }); + } + + // One-shot delivery: drop the parked entry once the browser has picked it up + // so memory isn't held until TTL eviction for completed messages. + clearResult(job_id); + + return NextResponse.json({ + success: record.outcome === "completed", + data: { + job_id, + status: record.status, + llm_response: record.llm_response ?? null, + error_message: record.error_message ?? null, + }, + }); +} diff --git a/app/api/llm/call/[job_id]/route.ts b/app/api/llm/call/[job_id]/route.ts new file mode 100644 index 00000000..4486e290 --- /dev/null +++ b/app/api/llm/call/[job_id]/route.ts @@ -0,0 +1,25 @@ +import { NextResponse } from "next/server"; +import { apiClient } from "@/app/lib/apiClient"; + +export async function GET( + request: Request, + { params }: { params: Promise<{ job_id: string }> }, +) { + const { job_id } = await params; + try { + const { status, data } = await apiClient( + request, + `/api/v1/llm/call/${job_id}`, + ); + return NextResponse.json(data, { status }); + } catch (error) { + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : String(error), + data: null, + }, + { status: 500 }, + ); + } +} diff --git a/app/api/llm/call/route.ts b/app/api/llm/call/route.ts new file mode 100644 index 00000000..2d1e7c86 --- /dev/null +++ b/app/api/llm/call/route.ts @@ -0,0 +1,44 @@ +/** + * Thin proxy for the upstream POST /api/v1/llm/call. + * + * The browser is responsible for generating the `callback_id` and the full + * `callback_url` (see chatClient.buildCallbackUrl) — the BFF only forwards. + * The one server-side concern is `WEBHOOK_SECRET`: when set we append it to + * the callback URL as a `?secret=...` query param so the receiver can verify + * the inbound webhook without leaking the secret into the client bundle. + */ + +import { NextResponse } from "next/server"; +import { apiClient } from "@/app/lib/apiClient"; + +function appendSecretToCallback(body: Record): void { + const secret = process.env.WEBHOOK_SECRET; + if (!secret) return; + const url = body.callback_url; + if (typeof url !== "string" || url.length === 0) return; + const sep = url.includes("?") ? "&" : "?"; + body.callback_url = `${url}${sep}secret=${encodeURIComponent(secret)}`; +} + +export async function POST(request: Request) { + try { + const body = (await request.json()) as Record; + appendSecretToCallback(body); + + const { status, data } = await apiClient(request, "/api/v1/llm/call", { + method: "POST", + body: JSON.stringify(body), + }); + + return NextResponse.json(data, { status }); + } catch (error) { + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : String(error), + data: null, + }, + { status: 500 }, + ); + } +} diff --git a/app/api/llm/webhook/[callback_id]/route.ts b/app/api/llm/webhook/[callback_id]/route.ts new file mode 100644 index 00000000..a2798d80 --- /dev/null +++ b/app/api/llm/webhook/[callback_id]/route.ts @@ -0,0 +1,79 @@ +/** + * Webhook receiver for asynchronous LLM call results. + * + * The backend POSTs here when a job finishes. The id is carried in the URL + * path (set by /api/llm/call POST when it generated the callback_url), since + * the upstream payload itself doesn't include a job_id we can correlate + * against. + * + * Payload shape from the backend: + * { + * "success": true, + * "data": { response: {...}, usage: {...}, provider_raw_response: {...} }, + * "error": null, + * ... + * } + * + * We treat the entire `data` blob as the `llm_response` and store it under + * the callback_id for the browser's polling to pick up. + * + * Auth: when WEBHOOK_SECRET is set we require a matching `secret` query param + * (or X-Webhook-Secret header). The id in the URL is unguessable on its own, + * but the secret adds a second layer for production deployments. + */ + +import { NextResponse } from "next/server"; +import { LLMJobRecord, publish } from "@/app/lib/llmJobStore"; + +function isAuthorized(request: Request): boolean { + const expected = process.env.WEBHOOK_SECRET; + if (!expected) return true; + const url = new URL(request.url); + const provided = + url.searchParams.get("secret") || + request.headers.get("x-webhook-secret") || + ""; + return provided === expected; +} + +export async function POST( + request: Request, + { params }: { params: Promise<{ callback_id: string }> }, +) { + if (!isAuthorized(request)) { + return NextResponse.json({ error: "forbidden" }, { status: 403 }); + } + + const { callback_id } = await params; + if (!callback_id) { + return NextResponse.json({ error: "missing callback id" }, { status: 400 }); + } + + let body: unknown; + try { + body = await request.json(); + } catch { + return NextResponse.json({ error: "invalid json" }, { status: 400 }); + } + + const root = (body as Record | null) ?? {}; + const success = root.success !== false; + const errorMessage = + typeof root.error === "string" + ? root.error + : root.error == null + ? null + : JSON.stringify(root.error); + + const record: LLMJobRecord = { + outcome: success ? "completed" : "failed", + status: success ? "completed" : "failed", + llm_response: root.data ?? null, + error_message: errorMessage, + receivedAt: Date.now(), + }; + + publish(callback_id, record); + + return NextResponse.json({ ok: true }); +} diff --git a/app/components/Sidebar.tsx b/app/components/Sidebar.tsx index 9fcd585d..917031a8 100644 --- a/app/components/Sidebar.tsx +++ b/app/components/Sidebar.tsx @@ -17,6 +17,7 @@ import { SlidersIcon, ShieldCheckIcon, ChevronRightIcon, + ChatIcon, } from "@/app/components/icons"; import { LoginModal } from "@/app/components/auth"; import { Branding, UserMenuPopover } from "@/app/components/user-menu"; @@ -25,11 +26,11 @@ import { NAV_ITEMS } from "@/app/lib/navConfig"; import { MenuItem, SidebarProps } from "@/app/lib/types/nav"; /** Routes that are always accessible without auth */ -const PUBLIC_ROUTES = new Set(["/evaluations"]); +const PUBLIC_ROUTES = new Set(["/", "/evaluations"]); export default function Sidebar({ collapsed, - activeRoute = "/evaluations", + activeRoute = "/", }: SidebarProps) { const router = useRouter(); const { currentUser, googleProfile, isAuthenticated, logout } = useAuth(); @@ -109,6 +110,7 @@ export default function Sidebar({ }; const iconMap: Record = { + chat: , clipboard: , document: , book: , diff --git a/app/components/auth/TokenVerifyPage.tsx b/app/components/auth/TokenVerifyPage.tsx index cb314520..5b17c15c 100644 --- a/app/components/auth/TokenVerifyPage.tsx +++ b/app/components/auth/TokenVerifyPage.tsx @@ -110,7 +110,7 @@ export default function TokenVerifyPage({ if (elapsed >= duration) { clearInterval(timer); - router.push("/evaluations"); + router.push("/"); } }, interval); @@ -213,7 +213,7 @@ export default function TokenVerifyPage({ {status === "error" && (
- + + {open && ( +
+ {allConfigMeta.length === 0 ? ( +
+

+ No configurations yet +

+

+ Create one in the Prompt Editor to start chatting. +

+ +
+ ) : ( + allConfigMeta.map((meta) => { + const isExpanded = expandedConfigId === meta.id; + const isLoadingGroup = loadingVersions.has(meta.id); + const items = versionItemsMap[meta.id] ?? []; + return ( +
+ + {isExpanded && ( +
+ {isLoadingGroup ? ( +
+ Loading versions… +
+ ) : items.length === 0 ? ( +
+ No versions available +
+ ) : ( + items.map((item) => { + const isSelected = + configId === item.config_id && + version === item.version; + return ( + + ); + }) + )} +
+ )} +
+ ); + }) + )} +
+ )} +
+ ); +} diff --git a/app/components/chat/ChatEmptyState.tsx b/app/components/chat/ChatEmptyState.tsx new file mode 100644 index 00000000..7a3e862c --- /dev/null +++ b/app/components/chat/ChatEmptyState.tsx @@ -0,0 +1,63 @@ +/** + * ChatEmptyState - Welcome surface shown when the conversation is empty. + * + * Mirrors the OpenAI-style "Ask anything" hero with optional starter prompts. + */ + +"use client"; + +import { ChatIcon } from "@/app/components/icons"; + +interface ChatEmptyStateProps { + hasConfig: boolean; + isAuthenticated: boolean; + onSuggestion?: (text: string) => void; +} + +const SUGGESTIONS = [ + "Summarize a long article in plain language", + "Draft a polite reply to a tough email", + "Brainstorm ideas for a product launch", + "Explain a complex concept like I'm five", +]; + +export default function ChatEmptyState({ + hasConfig, + isAuthenticated, + onSuggestion, +}: ChatEmptyStateProps) { + return ( +
+
+
+ +
+

+ What can I help with? +

+

+ {!isAuthenticated + ? "Log in to start chatting with your assistants." + : !hasConfig + ? "Pick a configuration above to choose which assistant answers." + : "Ask anything — your conversation history is kept in this session."} +

+ + {isAuthenticated && hasConfig && onSuggestion && ( +
+ {SUGGESTIONS.map((s) => ( + + ))} +
+ )} +
+
+ ); +} diff --git a/app/components/chat/ChatInput.tsx b/app/components/chat/ChatInput.tsx new file mode 100644 index 00000000..73700f1c --- /dev/null +++ b/app/components/chat/ChatInput.tsx @@ -0,0 +1,89 @@ +/** + * ChatInput - Auto-growing textarea with a send button. + * + * Enter sends; Shift+Enter inserts a newline. Disabled while a response is + * pending so users can't fire-and-forget multiple messages at once. + */ + +"use client"; + +import { KeyboardEvent, useEffect, useRef } from "react"; +import { SendIcon } from "@/app/components/icons"; + +interface ChatInputProps { + value: string; + onChange: (next: string) => void; + onSend: () => void; + disabled?: boolean; + isPending?: boolean; + placeholder?: string; + helperText?: string; +} + +const MAX_HEIGHT_PX = 200; + +export default function ChatInput({ + value, + onChange, + onSend, + disabled = false, + isPending = false, + placeholder = "Message…", + helperText, +}: ChatInputProps) { + const ref = useRef(null); + + useEffect(() => { + const el = ref.current; + if (!el) return; + el.style.height = "auto"; + el.style.height = `${Math.min(el.scrollHeight, MAX_HEIGHT_PX)}px`; + }, [value]); + + const handleKeyDown = (e: KeyboardEvent) => { + if (e.key === "Enter" && !e.shiftKey) { + e.preventDefault(); + if (!disabled && !isPending && value.trim()) onSend(); + } + }; + + const canSend = !disabled && !isPending && value.trim().length > 0; + + return ( +
+
+
+
+