From 2d842dcd4a8a33f27b831b4fbc85c0962b698e20 Mon Sep 17 00:00:00 2001 From: neagualexa Date: Mon, 22 Sep 2025 16:13:51 +0100 Subject: [PATCH 1/8] try fix max_retries from google call --- src/agents/llm_factory.py | 1 + src/module.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/agents/llm_factory.py b/src/agents/llm_factory.py index 5b6b1d5..75fcba6 100644 --- a/src/agents/llm_factory.py +++ b/src/agents/llm_factory.py @@ -79,6 +79,7 @@ def __init__(self, temperature: int = 0): model=os.environ['GOOGLE_AI_MODEL'], temperature=temperature, google_api_key=os.environ['GOOGLE_AI_API_KEY'], + max_retries=0, ) def get_llm(self): diff --git a/src/module.py b/src/module.py index 2b7b2b4..02c6f66 100755 --- a/src/module.py +++ b/src/module.py @@ -1,6 +1,7 @@ import time from typing import Any -from lf_toolkit.chat import ChatResult as Result, ChatParams as Params +from lf_toolkit.chat.result import ChatResult as Result +from lf_toolkit.chat.params import ChatParams as Params try: from .agents.utils.parse_json_context_to_prompt import parse_json_to_prompt From 2f5b51eb34a0e66a8eef7895713b2f7d3347e900 Mon Sep 17 00:00:00 2001 From: neagualexa Date: Mon, 22 Sep 2025 16:21:42 +0100 Subject: [PATCH 2/8] try fix llm with retry --- src/agents/llm_factory.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/agents/llm_factory.py b/src/agents/llm_factory.py index 75fcba6..6b2efc8 100644 --- a/src/agents/llm_factory.py +++ b/src/agents/llm_factory.py @@ -79,8 +79,13 @@ def __init__(self, temperature: int = 0): model=os.environ['GOOGLE_AI_MODEL'], temperature=temperature, google_api_key=os.environ['GOOGLE_AI_API_KEY'], - max_retries=0, + ) + + # This wrapper will catch errors and retry the call itself. + # It does NOT pass 'max_retries' to the underlying model. + self._google_llm_with_retries = self._google_llm.with_retry( + stop_after_attempt=1, # Corresponds to max_retries ) def get_llm(self): - return self._google_llm \ No newline at end of file + return self._google_llm_with_retries \ No newline at end of file From 217977e4d88c7039b8f6aadc24930ed214474042 Mon Sep 17 00:00:00 2001 From: neagualexa Date: Tue, 23 Sep 2025 09:27:30 +0100 Subject: [PATCH 3/8] test set versions for packages --- .github/workflows/dev.yml | 1 + requirements.txt | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml index 1191a86..05a6e84 100644 --- a/.github/workflows/dev.yml +++ b/.github/workflows/dev.yml @@ -45,6 +45,7 @@ jobs: run: | pip install --upgrade pip pip install -r requirements.txt + pip list - name: Run tests if: always() diff --git a/requirements.txt b/requirements.txt index cca5d47..d13b034 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,12 +2,12 @@ langchain langchain-chroma langchain-community langchain-core -langchain-openai -langchain_google_genai +langchain-openai==0.3.33 +langchain_google_genai==2.1.12 langchain-text-splitters langchainhub langdetect -langgraph +langgraph==0.6.7 langsmith lf_toolkit[ipc] @ git+https://github.com/lambda-feedback/toolkit-python.git@main From 9aa889bcf4721ec92278fb00c462201b12306a7c Mon Sep 17 00:00:00 2001 From: neagualexa Date: Tue, 23 Sep 2025 09:34:09 +0100 Subject: [PATCH 4/8] revert testing changes --- .github/workflows/dev.yml | 1 - requirements.txt | 6 +++--- src/agents/llm_factory.py | 8 +------- 3 files changed, 4 insertions(+), 11 deletions(-) diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml index 05a6e84..1191a86 100644 --- a/.github/workflows/dev.yml +++ b/.github/workflows/dev.yml @@ -45,7 +45,6 @@ jobs: run: | pip install --upgrade pip pip install -r requirements.txt - pip list - name: Run tests if: always() diff --git a/requirements.txt b/requirements.txt index d13b034..cca5d47 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,12 +2,12 @@ langchain langchain-chroma langchain-community langchain-core -langchain-openai==0.3.33 -langchain_google_genai==2.1.12 +langchain-openai +langchain_google_genai langchain-text-splitters langchainhub langdetect -langgraph==0.6.7 +langgraph langsmith lf_toolkit[ipc] @ git+https://github.com/lambda-feedback/toolkit-python.git@main diff --git a/src/agents/llm_factory.py b/src/agents/llm_factory.py index 6b2efc8..5b6b1d5 100644 --- a/src/agents/llm_factory.py +++ b/src/agents/llm_factory.py @@ -80,12 +80,6 @@ def __init__(self, temperature: int = 0): temperature=temperature, google_api_key=os.environ['GOOGLE_AI_API_KEY'], ) - - # This wrapper will catch errors and retry the call itself. - # It does NOT pass 'max_retries' to the underlying model. - self._google_llm_with_retries = self._google_llm.with_retry( - stop_after_attempt=1, # Corresponds to max_retries - ) def get_llm(self): - return self._google_llm_with_retries \ No newline at end of file + return self._google_llm \ No newline at end of file From fbedbc52c5b824acb2fadc32e812c0fb9a513616 Mon Sep 17 00:00:00 2001 From: neagualexa Date: Tue, 23 Sep 2025 09:45:26 +0100 Subject: [PATCH 5/8] update readme to match template --- README.md | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 7589c17..99e6906 100755 --- a/README.md +++ b/README.md @@ -62,7 +62,13 @@ git clone You're ready to start developing your chat function. Head over to the [Development](#development) section to learn more. -#### 4. Update the README +#### 4. Deploy the chat function + +You will have to add your API key and LLM model name into the Github repo settings. Under `Secrets and variables/Actions`: the API key must be added as a secret and the LLM model must be added as a variable. + +You must ensure the same namings as in your `.env` file. So, make sure to update the `.github/{dev and main}.yml` files with the correct parameter names. + +#### 5. Update the README In the `README.md` file, change the title and description so it fits the purpose of your chat function. From 368385daff2a4a1a8d4b48503db87e6d0f21b188 Mon Sep 17 00:00:00 2001 From: neagualexa Date: Thu, 25 Sep 2025 16:00:53 +0100 Subject: [PATCH 6/8] updated index --- index.py | 39 +++++++++++++++++++-------------------- 1 file changed, 19 insertions(+), 20 deletions(-) diff --git a/index.py b/index.py index 5ef3bb8..cf071dc 100644 --- a/index.py +++ b/index.py @@ -1,42 +1,38 @@ import json try: from .src.module import chat_module - from .src.agents.utils.types import JsonType except ImportError: from src.module import chat_module - from src.agents.utils.types import JsonType -def handler(event: JsonType, context): +def handler(event, context): """ Lambda handler function - Args: - event (JsonType): The AWS Lambda event received by the gateway. - context (Any): The AWS Lambda context object. - """ # Log the input event for debugging purposes - print("Received event:", json.dumps(event, indent=2)) + # print("Received event:", " ".join(json.dumps(event, indent=2).splitlines())) - if "body" not in event: - return { - "statusCode": 400, - "body": "Missing 'body' key in event. Please confirm the key in the json body." - } - body = json.loads(event["body"]) - - if "message" not in body: + if "body" in event: + try: + event = json.loads(event["body"]) + except json.JSONDecodeError: + return { + "statusCode": 400, + "body": "Invalid JSON format in the body or body not found. Please check the input." + } + + if "message" not in event: return { "statusCode": 400, "body": "Missing 'message' key in event. Please confirm the key in the json body." } - if "params" not in body: + if "params" not in event: return { "statusCode": 400, "body": "Missing 'params' key in event. Please confirm the key in the json body. Make sure it contains the necessary conversation_id." } - message = body["message"] - params = body["params"] + message = event.get("message", None) + params = event.get("params", None) try: chatbot_response = chat_module(message, params) @@ -49,7 +45,10 @@ def handler(event: JsonType, context): # Create a response response = { "statusCode": 200, - "body": json.dumps(chatbot_response) + "body": chatbot_response } + # Log the response for debugging purposes + print("Returning response:", " ".join(json.dumps(response, indent=2).splitlines())) + return response \ No newline at end of file From 1aad9c0c3e6f5aec07339d4cf90c55b91bfec01b Mon Sep 17 00:00:00 2001 From: neagualexa Date: Thu, 25 Sep 2025 16:01:07 +0100 Subject: [PATCH 7/8] new $ equations prompt --- src/agents/utils/prompt_context_templates.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/agents/utils/prompt_context_templates.py b/src/agents/utils/prompt_context_templates.py index 9eb175b..8f24553 100644 --- a/src/agents/utils/prompt_context_templates.py +++ b/src/agents/utils/prompt_context_templates.py @@ -73,7 +73,7 @@ def format_question_header( - Description: {content} {duration_text} -> Note: Mathematical equations are in KaTeX format, preserve them the same. Use British English spellings. +> Note: Mathematical equations are in KaTeX format, preserve them the same. Ensure mathematical equations are surrounded by one '$' for in-line equations and '$$' for block equations. Use British English spellings. --- """ From ad2cb0e1936cda89998bee4003024f9262964242 Mon Sep 17 00:00:00 2001 From: neagualexa Date: Thu, 25 Sep 2025 16:10:42 +0100 Subject: [PATCH 8/8] fix equations generated --- src/agents/utils/prompt_context_templates.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/agents/utils/prompt_context_templates.py b/src/agents/utils/prompt_context_templates.py index 8f24553..5d3d0af 100644 --- a/src/agents/utils/prompt_context_templates.py +++ b/src/agents/utils/prompt_context_templates.py @@ -73,8 +73,9 @@ def format_question_header( - Description: {content} {duration_text} -> Note: Mathematical equations are in KaTeX format, preserve them the same. Ensure mathematical equations are surrounded by one '$' for in-line equations and '$$' for block equations. Use British English spellings. - +> Note: Mathematical equations are in KaTeX format, preserve them the same. Ensure mathematical equations are surrounded by one '$' for in-line equations and '$$' for block equations. +Example: '$E=mc^2$' or '$$E=mc^2$$'. +Use British English spellings. --- """