From 6ce9c0ae786cc62d4df58d300b7254ed5dfca846 Mon Sep 17 00:00:00 2001 From: sanjibani Date: Tue, 31 Mar 2026 23:18:54 +0530 Subject: [PATCH] Fix 4 runtime bugs: UnboundLocalError, streaming None, discarded replace, memory leak Fixes #243 - Fix UnboundLocalError in rag_agent fallback path by initializing tool_calls before the if/else block. Also replace list mutation during iteration with a filter pattern. - Fix streaming endpoint yielding literal "None" to clients by moving yield inside the `if msg:` guard. - Fix discarded str.replace() result so tool descriptions are actually cleaned before being sent to the LLM. - Fix unbounded in-memory chat_history growth by using OrderedDict with a cap of 1000 conversations, evicting oldest on overflow. Co-Authored-By: Claude Opus 4.6 --- backend/src/agents/retriever_rag.py | 12 +++++++----- backend/src/api/routers/conversations.py | 11 +++++++++-- 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/backend/src/agents/retriever_rag.py b/backend/src/agents/retriever_rag.py index a90260cf..9b2a595b 100644 --- a/backend/src/agents/retriever_rag.py +++ b/backend/src/agents/retriever_rag.py @@ -107,7 +107,7 @@ def rag_initialize(self) -> None: self.tool_descriptions = "" for tool in self.tools: text_desc = render_text_description([tool]) - text_desc.replace("(query: str) -> Tuple[str, list[str], list[str]]", " ") + text_desc = text_desc.replace("(query: str) -> Tuple[str, list[str], list[str]]", " ") self.tool_descriptions += text_desc + "\n\n" def rag_agent(self, state: AgentState) -> dict[str, list[Any]]: @@ -159,12 +159,14 @@ def rag_agent(self, state: AgentState) -> dict[str, list[Any]]: ) return {"tools": []} + tool_calls: list[str] = [] if "tool_names" in str(response): - tool_calls = response.get("tool_names", []) # type: ignore - for tool in tool_calls: - if tool not in self.tool_names: + raw_tool_calls = response.get("tool_names", []) # type: ignore + for tool in raw_tool_calls: + if tool in self.tool_names: + tool_calls.append(tool) + else: logging.warning(f"Tool {tool} not found in tool list.") - tool_calls.remove(tool) else: logging.warning(str(response)) logging.warning("Tool selection failed. Returning empty tool list.") diff --git a/backend/src/api/routers/conversations.py b/backend/src/api/routers/conversations.py index f0450628..21d7a7e0 100644 --- a/backend/src/api/routers/conversations.py +++ b/backend/src/api/routers/conversations.py @@ -1,5 +1,6 @@ import os import logging +from collections import OrderedDict from dotenv import load_dotenv from typing import Any @@ -217,7 +218,9 @@ def parse_agent_output(output: list) -> tuple[str, list[ContextSource], list[str rg.initialize() -chat_history: dict[UUID, list[dict[str, str]]] = {} +MAX_IN_MEMORY_CONVERSATIONS = 1000 + +chat_history: OrderedDict[UUID, list[dict[str, str]]] = OrderedDict() def get_history_str(db: Session | None, conversation_uuid: UUID | None) -> str: @@ -274,6 +277,8 @@ async def get_agent_response( conversation_uuid = uuid4() if conversation_uuid not in chat_history: + if len(chat_history) >= MAX_IN_MEMORY_CONVERSATIONS: + chat_history.popitem(last=False) chat_history[conversation_uuid] = [] inputs = { @@ -369,6 +374,8 @@ async def get_response_stream(user_input: UserInput, db: Session | None) -> Any: conversation_uuid = uuid4() if conversation_uuid not in chat_history: + if len(chat_history) >= MAX_IN_MEMORY_CONVERSATIONS: + chat_history.popitem(last=False) chat_history[conversation_uuid] = [] inputs = { @@ -405,7 +412,7 @@ async def get_response_stream(user_input: UserInput, db: Session | None) -> Any: if msg: chunks.append(str(msg)) - yield str(msg) + "\n\n" + yield str(msg) + "\n\n" urls = list(set(urls)) yield f"Sources: {', '.join(urls)}\n\n"