diff --git a/chartspec/llmRouter.js b/chartspec/llmRouter.js index ad12cde..b8852f6 100644 --- a/chartspec/llmRouter.js +++ b/chartspec/llmRouter.js @@ -29,6 +29,8 @@ IMPORTANT: 4. Choose appropriate chart types (scatter, bar, line, pie, histogram, box, heatmap, table) 5. Apply filters, groupBy, and aggregations when appropriate 6. For groupBy, specify columns array and aggregations object with format: { columnName: { func: 'sum|mean|count|min|max' } } +7. When user asks to modify or update an existing chart, review the previous ChartSpec in the conversation and apply only the requested changes while preserving other settings +8. For iterative requests (e.g., "change the axis", "update colors"), build upon the previous specification Example response: { @@ -145,16 +147,41 @@ async function callLLM(provider, apiKey, messages) { * @param {Array} columns - Available columns * @param {Array} sampleRows - Sample rows * @param {Object} currentSpec - Current ChartSpec (optional) + * @param {Array} chatHistory - Previous conversation messages (optional) * @returns {Promise} New ChartSpec object */ -export async function getUpdatedChartSpec(provider, apiKey, userMessage, columns, sampleRows, currentSpec = null) { +export async function getUpdatedChartSpec(provider, apiKey, userMessage, columns, sampleRows, currentSpec = null, chatHistory = []) { const systemPrompt = buildSystemPrompt(columns, sampleRows); const messages = [ { role: 'system', content: systemPrompt } ]; - if (currentSpec) { + // Include conversation history for context + if (chatHistory && chatHistory.length > 0) { + chatHistory.forEach((msg, index) => { + if (msg.role === 'user') { + messages.push({ role: 'user', content: msg.content }); + } else if (msg.role === 'assistant') { + // Convert spec object to JSON string for assistant messages + let content = msg.content; + if (msg.content !== null && msg.content !== undefined && typeof msg.content === 'object' && !Array.isArray(msg.content)) { + try { + content = JSON.stringify(msg.content); + } catch (e) { + // Handle circular references or non-serializable values + console.error(`Failed to stringify assistant message at index ${index}:`, e.message); + content = `[Serialization Error: ${e.message}]`; + } + } + messages.push({ role: 'assistant', content }); + } else { + // Log unexpected message types for debugging + console.warn(`Unexpected message role at index ${index}:`, msg.role, 'Full message:', msg); + } + }); + } else if (currentSpec) { + // Fallback to old behavior if no chat history provided messages.push({ role: 'assistant', content: `Current spec: ${JSON.stringify(currentSpec)}` diff --git a/chartspec/main.js b/chartspec/main.js index 9cc6bf3..c8b706a 100644 --- a/chartspec/main.js +++ b/chartspec/main.js @@ -576,13 +576,18 @@ async function handleSendMessage() { userMessage, columns, sampleRows, - state.currentSpec + state.currentSpec, + state.chatHistory ); // Update chat with spec updateChatMessage(loadingId, JSON.stringify(spec, null, 2)); - // Store spec + /** + * Store spec and update chat history + * Note: We update history here (after successful LLM response) even if rendering fails, + * so the user can continue to iterate on the spec + */ state.currentSpec = spec; state.chatHistory.push({ role: 'user', content: userMessage }); state.chatHistory.push({ role: 'assistant', content: spec });