Skip to content

Commit 76f7a86

Browse files
committed
Merge branch 'main' into derrek/fix-copy-for-llms
Resolved conflicts in package.json by taking origin/main's version which includes: - Added sync-openapi.ts script to build:openapi - Reordered prebuild to run build:embeddings before build:openapi
2 parents e8cd9ce + adeaf06 commit 76f7a86

31 files changed

+4023
-2854
lines changed

.env.example

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
OPENAI_API_KEY=your_openai_api_key # Only used for generating embeddings or AI search
22
NEXT_PUBLIC_SITE_URL=https://docs.recall.network # Only for production
3-
NEXT_PUBLIC_GA_ID=your_google_analytics_id # Only for production
3+
NEXT_PUBLIC_GA_ID=your_google_analytics_id # Only for production
4+
OPENAPI_SOURCE_URL=https://raw.githubusercontent.com/recallnet/js-recall/main/apps/api/openapi/openapi.json # Used by sync-openapi script

app/api/chat/route.ts

Lines changed: 81 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ import { OpenAI } from "openai";
33
import path from "path";
44

55
import { MessageRecord } from "@/components/ai/context";
6+
import { SUGGESTED_QUESTIONS } from "@/components/ai/suggestions";
67
import {
78
DocsEmbedding,
89
REJECTION_MESSAGE,
@@ -48,6 +49,48 @@ setInterval(() => {
4849
}
4950
}, MAX_SESSION_AGE);
5051

52+
/**
53+
* Generate contextual follow-up suggestions based on the conversation.
54+
* Uses OpenAI to pick 3 most relevant questions from the curated pool and rephrase them
55+
* to fit the conversation context.
56+
*/
57+
async function generateContextualSuggestions(
58+
client: OpenAI,
59+
conversationHistory: MessageRecord[]
60+
): Promise<string[]> {
61+
try {
62+
const response = await client.chat.completions.create({
63+
model: MODEL,
64+
messages: [
65+
{
66+
role: "system",
67+
content: `You are helping generate follow-up questions for a user chatting with the Recall documentation AI.
68+
69+
Here is a curated pool of key questions:
70+
${SUGGESTED_QUESTIONS.map((q, i) => `${i + 1}. ${q}`).join("\n")}
71+
72+
Based on the conversation history, pick 3 most relevant questions from this pool and rephrase them to fit the conversation context as potential follow-up questions shown to the user for their next input.
73+
74+
Return ONLY a JSON array of 3 strings, nothing else. Example: ["Question 1?", "Question 2?", "Question 3?"]`,
75+
},
76+
...conversationHistory,
77+
],
78+
temperature: 0.7,
79+
});
80+
81+
const content = response.choices[0]?.message?.content?.trim();
82+
if (!content) return [];
83+
84+
// Parse the JSON array response
85+
const suggestions = JSON.parse(content) as string[];
86+
return Array.isArray(suggestions) ? suggestions.slice(0, 3) : [];
87+
} catch (error) {
88+
console.error("Error generating suggestions:", error);
89+
// Fallback to random questions if generation fails
90+
return [...SUGGESTED_QUESTIONS].sort(() => Math.random() - 0.5).slice(0, 3);
91+
}
92+
}
93+
5194
export async function POST(request: Request) {
5295
if (!openai) {
5396
return new Response(JSON.stringify({ error: "OpenAI API key not configured" }), {
@@ -122,16 +165,51 @@ export async function POST(request: Request) {
122165
stream: true,
123166
});
124167

125-
// Create a transform stream to append source links
168+
// Create a transform stream to capture response content and append source links and suggestions
169+
let assistantResponse = "";
126170
const transform = new TransformStream({
127171
transform(chunk, controller) {
172+
// Decode and capture the streaming content
173+
const decoder = new TextDecoder();
174+
const chunkText = decoder.decode(chunk, { stream: true });
175+
176+
// Parse OpenAI streaming response chunks to extract content
177+
try {
178+
const lines = chunkText.split("\n").filter((line) => line.trim());
179+
for (const line of lines) {
180+
if (line.startsWith("data: ")) {
181+
const data = line.slice(6);
182+
if (data === "[DONE]") continue;
183+
184+
const parsed = JSON.parse(data);
185+
const content = parsed.choices?.[0]?.delta?.content;
186+
if (content) {
187+
assistantResponse += content;
188+
}
189+
}
190+
}
191+
} catch {
192+
// Not a JSON chunk, might be raw content - ignore parsing errors
193+
}
194+
128195
controller.enqueue(chunk);
129196
},
130-
flush(controller) {
197+
async flush(controller) {
131198
// Add reference links after the stream is done
132199
const referenceLinks = getReferenceLinks(relevant);
133-
const sourceJson = JSON.stringify({ references: referenceLinks }) + "\n"; // Match OpenAI response formatting
200+
const sourceJson = JSON.stringify({ references: referenceLinks }) + "\n";
134201
controller.enqueue(new TextEncoder().encode(sourceJson));
202+
203+
// Create complete conversation history including the assistant's response
204+
const completeMessages = [
205+
...messages,
206+
{ role: "assistant" as const, content: assistantResponse },
207+
];
208+
209+
// Generate contextual suggestions based on the complete conversation
210+
const suggestions = await generateContextualSuggestions(openai, completeMessages);
211+
const suggestionsJson = JSON.stringify({ suggestions }) + "\n";
212+
controller.enqueue(new TextEncoder().encode(suggestionsJson));
135213
},
136214
});
137215

app/layout.tsx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ export default function RootLayout({ children }: { children: ReactNode }) {
4848
buttonVariants({
4949
color: "secondary",
5050
}),
51-
"bg-secondary/50 text-fd-secondary-foreground/80 fixed right-4 bottom-4 z-10 gap-2 rounded-xl shadow-lg backdrop-blur-lg md:right-6 md:bottom-8"
51+
"bg-secondary/50 text-fd-secondary-foreground/80 fixed right-4 bottom-4 z-10 gap-2 shadow-lg backdrop-blur-lg md:right-6 md:bottom-8"
5252
)}
5353
>
5454
<MessageCircle className="size-4" />

components/ai/engines/openai.ts

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,10 @@ type ReferencesResponse = {
2121
references: MessageReference[];
2222
};
2323

24+
type SuggestionsResponse = {
25+
suggestions: string[];
26+
};
27+
2428
export async function createOpenAIEngine(): Promise<Engine> {
2529
const sessionId = localStorage.getItem("recallChatSessionId") || crypto.randomUUID();
2630
localStorage.setItem("recallChatSessionId", sessionId);
@@ -100,6 +104,11 @@ export async function createOpenAIEngine(): Promise<Engine> {
100104
message.references = data.references;
101105
onUpdate?.(content);
102106
}
107+
if ("suggestions" in json && !content.includes(REJECTION_MESSAGE)) {
108+
const data = json as SuggestionsResponse;
109+
message.suggestions = data.suggestions;
110+
onUpdate?.(content);
111+
}
103112
}
104113
}
105114

0 commit comments

Comments
 (0)