From 8123e26b4ace196b6023c26ebf9a9d52deaca26c Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Fri, 12 Sep 2025 15:46:02 +0530 Subject: [PATCH 01/44] fix process_response argument --- backend/app/api/routes/responses.py | 105 ++++++++++++++++------------ 1 file changed, 62 insertions(+), 43 deletions(-) diff --git a/backend/app/api/routes/responses.py b/backend/app/api/routes/responses.py index 94e5f19db..fede3b678 100644 --- a/backend/app/api/routes/responses.py +++ b/backend/app/api/routes/responses.py @@ -126,25 +126,73 @@ def get_additional_data(request: dict) -> dict: def process_response( - request: ResponsesAPIRequest, - client: OpenAI, - assistant, - tracer: LangfuseTracer, + request_data: dict, project_id: int, organization_id: int, - ancestor_id: str, - latest_conversation: OpenAIConversation | None, ): """Process a response and send callback with results, with Langfuse tracing.""" + # Reconstruct request object from serialized data + request = ResponsesAPIRequest(**request_data) + assistant_id = request.assistant_id + logger.info( - f"[process_response] Starting generating response for assistant_id={mask_string(request.assistant_id)}, project_id={project_id}" + f"[process_response] Starting generating response for assistant_id={mask_string(assistant_id)}, project_id={project_id}" ) + # Reconstruct complex objects from IDs at the start of the job + with Session(engine) as session: + # Get assistant + assistant = get_assistant_by_id(session, assistant_id, project_id) + if not assistant: + logger.error( + f"[process_response] Assistant not found: assistant_id={mask_string(assistant_id)}, project_id={project_id}" + ) + return + + # Get OpenAI credentials and create client + credentials = get_provider_credential( + session=session, + org_id=organization_id, + provider="openai", + project_id=project_id, + ) + if not credentials or "api_key" not in credentials: + logger.error( + f"[process_response] OpenAI API key not configured for org_id={organization_id}, project_id={project_id}" + ) + return + + client = OpenAI(api_key=credentials["api_key"]) + + # Get Langfuse credentials and create tracer + langfuse_credentials = get_provider_credential( + session=session, + org_id=organization_id, + provider="langfuse", + project_id=project_id, + ) + tracer = LangfuseTracer( + credentials=langfuse_credentials, + response_id=request.response_id, + ) + + # Handle ancestor_id and latest conversation logic + ancestor_id = request.response_id + latest_conversation = None + if ancestor_id: + latest_conversation = get_conversation_by_ancestor_id( + session=session, + ancestor_response_id=ancestor_id, + project_id=project_id, + ) + if latest_conversation: + ancestor_id = latest_conversation.response_id + tracer.start_trace( name="generate_response_async", - input={"question": request.question, "assistant_id": request.assistant_id}, + input={"question": request.question, "assistant_id": assistant_id}, metadata={"callback_url": request.callback_url}, - tags=[request.assistant_id], + tags=[assistant_id], ) tracer.start_generation( @@ -177,7 +225,7 @@ def process_response( response_chunks = get_file_search_results(response) logger.info( - f"[process_response] Successfully generated response: response_id={response.id}, assistant={mask_string(request.assistant_id)}, project_id={project_id}" + f"[process_response] Successfully generated response: response_id={response.id}, assistant={mask_string(assistant_id)}, project_id={project_id}" ) tracer.end_generation( @@ -223,7 +271,7 @@ def process_response( user_question=request.question, response=response.output_text, model=response.model, - assistant_id=request.assistant_id, + assistant_id=assistant_id, ) create_conversation( @@ -263,7 +311,7 @@ def process_response( if request.callback_url: logger.info( - f"[process_response] Sending callback to URL: {request.callback_url}, assistant={mask_string(request.assistant_id)}, project_id={project_id}" + f"[process_response] Sending callback to URL: {request.callback_url}, assistant={mask_string(assistant_id)}, project_id={project_id}" ) # Send callback with webhook-specific response format @@ -281,7 +329,7 @@ def process_response( }, ) logger.info( - f"[process_response] Callback sent successfully, assistant={mask_string(request.assistant_id)}, project_id={project_id}" + f"[process_response] Callback sent successfully, assistant={mask_string(assistant_id)}, project_id={project_id}" ) @@ -325,40 +373,11 @@ async def responses( "metadata": None, } - client = OpenAI(api_key=credentials["api_key"]) - - langfuse_credentials = get_provider_credential( - session=_session, - org_id=organization_id, - provider="langfuse", - project_id=project_id, - ) - tracer = LangfuseTracer( - credentials=langfuse_credentials, - response_id=request.response_id, - ) - - ancestor_id = request.response_id - latest_conversation = None - if ancestor_id: - latest_conversation = get_conversation_by_ancestor_id( - session=_session, - ancestor_response_id=ancestor_id, - project_id=project_id, - ) - if latest_conversation: - ancestor_id = latest_conversation.response_id - background_tasks.add_task( process_response, - request, - client, - assistant, - tracer, + request.model_dump(), project_id, organization_id, - ancestor_id, - latest_conversation, ) logger.info( From 073a61cc847e8971f91df56177eea63fc12f50d1 Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Fri, 12 Sep 2025 15:54:45 +0530 Subject: [PATCH 02/44] Refactor process_response to use get_openai_client for OpenAI API client creation --- backend/app/api/routes/responses.py | 20 ++------------------ 1 file changed, 2 insertions(+), 18 deletions(-) diff --git a/backend/app/api/routes/responses.py b/backend/app/api/routes/responses.py index fede3b678..c221d0e85 100644 --- a/backend/app/api/routes/responses.py +++ b/backend/app/api/routes/responses.py @@ -18,7 +18,7 @@ get_conversation_by_ancestor_id, ) from app.models import UserProjectOrg, OpenAIConversationCreate, OpenAIConversation -from app.utils import APIResponse, mask_string +from app.utils import APIResponse, mask_string, get_openai_client from app.core.langfuse.langfuse import LangfuseTracer logger = logging.getLogger(__name__) @@ -139,9 +139,7 @@ def process_response( f"[process_response] Starting generating response for assistant_id={mask_string(assistant_id)}, project_id={project_id}" ) - # Reconstruct complex objects from IDs at the start of the job with Session(engine) as session: - # Get assistant assistant = get_assistant_by_id(session, assistant_id, project_id) if not assistant: logger.error( @@ -149,22 +147,8 @@ def process_response( ) return - # Get OpenAI credentials and create client - credentials = get_provider_credential( - session=session, - org_id=organization_id, - provider="openai", - project_id=project_id, - ) - if not credentials or "api_key" not in credentials: - logger.error( - f"[process_response] OpenAI API key not configured for org_id={organization_id}, project_id={project_id}" - ) - return - - client = OpenAI(api_key=credentials["api_key"]) + client = get_openai_client(session, organization_id, project_id) - # Get Langfuse credentials and create tracer langfuse_credentials = get_provider_credential( session=session, org_id=organization_id, From a757f29315d12c4a328af05f01d0f404fe0be828 Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Wed, 17 Sep 2025 16:44:54 +0530 Subject: [PATCH 03/44] Refactor response api: 1) Move models from route to models folder 2) fix process response to handle callback response --- backend/app/api/routes/responses.py | 351 +++++++++++----------------- backend/app/models/__init__.py | 8 + backend/app/models/response.py | 47 ++++ backend/app/utils.py | 4 +- 4 files changed, 198 insertions(+), 212 deletions(-) create mode 100644 backend/app/models/response.py diff --git a/backend/app/api/routes/responses.py b/backend/app/api/routes/responses.py index c221d0e85..eef466276 100644 --- a/backend/app/api/routes/responses.py +++ b/backend/app/api/routes/responses.py @@ -1,15 +1,14 @@ import logging -from typing import Optional import openai from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException -from openai import OpenAI -from pydantic import BaseModel, Extra +from fastapi.responses import JSONResponse from sqlmodel import Session -from app.core.db import engine from app.api.deps import get_db, get_current_user_org_project from app.api.routes.threads import send_callback +from app.core.db import engine +from app.core.langfuse.langfuse import LangfuseTracer from app.crud.assistants import get_assistant_by_id from app.crud.credentials import get_provider_credential from app.crud.openai_conversation import ( @@ -17,9 +16,17 @@ get_ancestor_id_from_response, get_conversation_by_ancestor_id, ) -from app.models import UserProjectOrg, OpenAIConversationCreate, OpenAIConversation -from app.utils import APIResponse, mask_string, get_openai_client -from app.core.langfuse.langfuse import LangfuseTracer +from app.models import ( + CallbackResponse, + Diagnostics, + FileResultChunk, + ResponsesAPIRequest, + ResponsesSyncAPIRequest, + UserProjectOrg, + OpenAIConversationCreate, +) +from app.utils import APIResponse, get_openai_client, mask_string + logger = logging.getLogger(__name__) router = APIRouter(tags=["responses"]) @@ -44,53 +51,6 @@ def handle_openai_error(e: openai.OpenAIError) -> str: return str(e) -class ResponsesAPIRequest(BaseModel): - assistant_id: str - question: str - callback_url: Optional[str] = None - response_id: Optional[str] = None - - class Config: - extra = Extra.allow - - -class ResponsesSyncAPIRequest(BaseModel): - model: str - instructions: str - vector_store_ids: list[str] - max_num_results: Optional[int] = 20 - temperature: Optional[float] = 0.1 - response_id: Optional[str] = None - question: str - - -class Diagnostics(BaseModel): - input_tokens: int - output_tokens: int - total_tokens: int - model: str - - -class FileResultChunk(BaseModel): - score: float - text: str - - -class _APIResponse(BaseModel): - status: str - response_id: str - message: str - chunks: list[FileResultChunk] - diagnostics: Optional[Diagnostics] = None - - class Config: - extra = Extra.allow - - -class ResponsesAPIResponse(APIResponse[_APIResponse]): - pass - - def get_file_search_results(response): results: list[FileResultChunk] = [] for tool_call in response.output: @@ -125,67 +85,99 @@ def get_additional_data(request: dict) -> dict: return {k: v for k, v in request.items() if k not in exclude_keys} +def send_response_callback( + callback_url: str, + callback_response: APIResponse, + request_dict: dict, +) -> None: + """Send a standardized callback response to the provided callback URL.""" + + callback_data = callback_response.model_dump() + + send_callback( + callback_url, + { + "success": callback_data.get("success", False), + "data": { + **(callback_data.get("data") or {}), + **get_additional_data(request_dict), + }, + "error": callback_data.get("error"), + "metadata": None, + }, + ) + + def process_response( request_data: dict, project_id: int, organization_id: int, ): """Process a response and send callback with results, with Langfuse tracing.""" - # Reconstruct request object from serialized data request = ResponsesAPIRequest(**request_data) assistant_id = request.assistant_id + request_dict = request.model_dump() logger.info( f"[process_response] Starting generating response for assistant_id={mask_string(assistant_id)}, project_id={project_id}" ) - with Session(engine) as session: - assistant = get_assistant_by_id(session, assistant_id, project_id) - if not assistant: - logger.error( - f"[process_response] Assistant not found: assistant_id={mask_string(assistant_id)}, project_id={project_id}" + callback_response: APIResponse | None = None + tracer: LangfuseTracer | None = None + + try: + with Session(engine) as session: + assistant = get_assistant_by_id(session, assistant_id, project_id) + if not assistant: + msg = f"Assistant not found: assistant_id={mask_string(assistant_id)}, project_id={project_id}" + logger.error(f"[process_response] {msg}") + callback_response = APIResponse.failure_response(error="Assistant not found or not active") + return + + try: + client = get_openai_client(session, organization_id, project_id) + except HTTPException as e: + callback_response = APIResponse.failure_response(error=str(e.detail)) + return + + langfuse_credentials = get_provider_credential( + session=session, + org_id=organization_id, + provider="langfuse", + project_id=project_id, ) - return - client = get_openai_client(session, organization_id, project_id) + # Handle ancestor_id + ancestor_id = request.response_id + latest_conversation = None + if ancestor_id: + latest_conversation = get_conversation_by_ancestor_id( + session=session, + ancestor_response_id=ancestor_id, + project_id=project_id, + ) + if latest_conversation: + ancestor_id = latest_conversation.response_id - langfuse_credentials = get_provider_credential( - session=session, - org_id=organization_id, - provider="langfuse", - project_id=project_id, - ) + # --- Langfuse trace --- tracer = LangfuseTracer( credentials=langfuse_credentials, response_id=request.response_id, ) + tracer.start_trace( + name="generate_response_async", + input={"question": request.question, "assistant_id": assistant_id}, + metadata={"callback_url": request.callback_url}, + tags=[assistant_id], + ) - # Handle ancestor_id and latest conversation logic - ancestor_id = request.response_id - latest_conversation = None - if ancestor_id: - latest_conversation = get_conversation_by_ancestor_id( - session=session, - ancestor_response_id=ancestor_id, - project_id=project_id, - ) - if latest_conversation: - ancestor_id = latest_conversation.response_id - - tracer.start_trace( - name="generate_response_async", - input={"question": request.question, "assistant_id": assistant_id}, - metadata={"callback_url": request.callback_url}, - tags=[assistant_id], - ) - - tracer.start_generation( - name="openai_response", - input={"question": request.question}, - metadata={"model": assistant.model, "temperature": assistant.temperature}, - ) + tracer.start_generation( + name="openai_response", + input={"question": request.question}, + metadata={"model": assistant.model, "temperature": assistant.temperature}, + ) - try: + # Build params params = { "model": assistant.model, "previous_response_id": ancestor_id, @@ -193,19 +185,16 @@ def process_response( "temperature": assistant.temperature, "input": [{"role": "user", "content": request.question}], } - if assistant.vector_store_ids: - params["tools"] = [ - { - "type": "file_search", - "vector_store_ids": assistant.vector_store_ids, - "max_num_results": assistant.max_num_results, - } - ] + params["tools"] = [{ + "type": "file_search", + "vector_store_ids": assistant.vector_store_ids, + "max_num_results": assistant.max_num_results, + }] params["include"] = ["file_search_call.results"] + # Generate response response = client.responses.create(**params) - response_chunks = get_file_search_results(response) logger.info( @@ -213,10 +202,7 @@ def process_response( ) tracer.end_generation( - output={ - "response_id": response.id, - "message": response.output_text, - }, + output={"response_id": response.id, "message": response.output_text}, usage={ "input": response.usage.input_tokens, "output": response.usage.output_tokens, @@ -225,16 +211,12 @@ def process_response( }, model=response.model, ) - tracer.update_trace( tags=[response.id], - output={ - "status": "success", - "message": response.output_text, - "error": None, - }, + output={"status": "success", "message": response.output_text, "error": None}, ) + # Store conversation with Session(engine) as session: ancestor_response_id = ( latest_conversation.ancestor_response_id @@ -246,28 +228,24 @@ def process_response( project_id=project_id, ) ) - - # Create conversation record in database - conversation_data = OpenAIConversationCreate( - response_id=response.id, - previous_response_id=response.previous_response_id, - ancestor_response_id=ancestor_response_id, - user_question=request.question, - response=response.output_text, - model=response.model, - assistant_id=assistant_id, - ) - create_conversation( session=session, - conversation=conversation_data, + conversation=OpenAIConversationCreate( + response_id=response.id, + previous_response_id=response.previous_response_id, + ancestor_response_id=ancestor_response_id, + user_question=request.question, + response=response.output_text, + model=response.model, + assistant_id=assistant_id, + ), project_id=project_id, organization_id=organization_id, ) - request_dict = request.model_dump() - callback_response = ResponsesAPIResponse.success_response( - data=_APIResponse( + # Success callback payload + callback_response = APIResponse.success_response( + data=CallbackResponse( status="success", response_id=response.id, message=response.output_text, @@ -280,41 +258,25 @@ def process_response( ), ) ) + except openai.OpenAIError as e: error_message = handle_openai_error(e) logger.error( - f"[process_response] OpenAI API error during response processing: {error_message}, project_id={project_id}", + f"[process_response] OpenAI API error: {error_message}, project_id={project_id}", exc_info=True, ) - tracer.log_error(error_message, response_id=request.response_id) - - request_dict = request.model_dump() - callback_response = ResponsesAPIResponse.failure_response(error=error_message) + if tracer: + tracer.log_error(error_message, response_id=request.response_id) + callback_response = APIResponse.failure_response(error=error_message) - tracer.flush() + finally: + if tracer: + tracer.flush() + if request.callback_url and callback_response: + send_response_callback(request.callback_url, callback_response, request_dict) - if request.callback_url: - logger.info( - f"[process_response] Sending callback to URL: {request.callback_url}, assistant={mask_string(assistant_id)}, project_id={project_id}" - ) + return callback_response - # Send callback with webhook-specific response format - callback_data = callback_response.model_dump() - send_callback( - request.callback_url, - { - "success": callback_data.get("success", False), - "data": { - **(callback_data.get("data") or {}), - **get_additional_data(request_dict), - }, - "error": callback_data.get("error"), - "metadata": None, - }, - ) - logger.info( - f"[process_response] Callback sent successfully, assistant={mask_string(assistant_id)}, project_id={project_id}" - ) @router.post("/responses", response_model=dict) @@ -331,35 +293,10 @@ async def responses( _current_user.organization_id, ) - assistant = get_assistant_by_id(_session, request.assistant_id, project_id) - if not assistant: - logger.warning( - f"[response] Assistant not found: assistant_id={mask_string(request.assistant_id)}, project_id={project_id}, organization_id={organization_id}", - ) - raise HTTPException(status_code=404, detail="Assistant not found or not active") - - credentials = get_provider_credential( - session=_session, - org_id=organization_id, - provider="openai", - project_id=project_id, - ) - if not credentials or "api_key" not in credentials: - logger.error( - f"[response] OpenAI API key not configured for org_id={organization_id}, project_id={project_id}" - ) - request_dict = request.model_dump() - additional_data = get_additional_data(request_dict) - return { - "success": False, - "error": "OpenAI API key not configured for this organization.", - "data": additional_data if additional_data else None, - "metadata": None, - } - + request_dict = request.model_dump() background_tasks.add_task( process_response, - request.model_dump(), + request_dict, project_id, organization_id, ) @@ -367,8 +304,6 @@ async def responses( logger.info( f"[response] Background task scheduled for response processing: assistant_id={mask_string(request.assistant_id)}, project_id={project_id}, organization_id={organization_id}" ) - - request_dict = request.model_dump() additional_data = get_additional_data(request_dict) return { @@ -383,7 +318,7 @@ async def responses( } -@router.post("/responses/sync", response_model=ResponsesAPIResponse) +@router.post("/responses/sync", response_model=APIResponse[CallbackResponse]) async def responses_sync( request: ResponsesSyncAPIRequest, _session: Session = Depends(get_db), @@ -395,28 +330,21 @@ async def responses_sync( _current_user.organization_id, ) - credentials = get_provider_credential( - session=_session, - org_id=organization_id, - provider="openai", - project_id=project_id, - ) - if not credentials or "api_key" not in credentials: + try: + client = get_openai_client(_session, organization_id, project_id) + except HTTPException as e: request_dict = request.model_dump() - logger.error( - f"[response_sync] OpenAI API key not configured for org_id={organization_id}, project_id={project_id}" - ) - # Create a custom error response with additional data in data field additional_data = get_additional_data(request_dict) - return APIResponse( - success=False, - data=additional_data if additional_data else None, - error="OpenAI API key not configured for this organization.", - metadata=None, + return JSONResponse( + status_code=e.status_code, + content={ + "success": False, + "data": additional_data if additional_data else None, + "error": str(e.detail), + "metadata": None, + } ) - client = OpenAI(api_key=credentials["api_key"]) - langfuse_credentials = get_provider_credential( session=_session, org_id=organization_id, @@ -487,8 +415,8 @@ async def responses_sync( request_dict = request.model_dump() additional_data = get_additional_data(request_dict) - return ResponsesAPIResponse.success_response( - data=_APIResponse( + return APIResponse.success_response( + data=CallbackResponse( status="success", response_id=response.id, message=response.output_text, @@ -514,9 +442,12 @@ async def responses_sync( request_dict = request.model_dump() # Create a custom error response with additional data in data field additional_data = get_additional_data(request_dict) - return ResponsesAPIResponse( - success=False, - data=additional_data if additional_data else None, - error=error_message, - metadata=None, + return JSONResponse( + status_code=400, + content={ + "success": False, + "data": additional_data if additional_data else None, + "error": error_message, + "metadata": None, + } ) diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py index e00f5ef28..dc0d3ab4e 100644 --- a/backend/app/models/__init__.py +++ b/backend/app/models/__init__.py @@ -91,4 +91,12 @@ ModelEvaluationUpdate, ) +from .response import ( + CallbackResponse, + Diagnostics, + FileResultChunk, + ResponsesAPIRequest, + ResponsesSyncAPIRequest, +) + from .onboarding import OnboardingRequest, OnboardingResponse diff --git a/backend/app/models/response.py b/backend/app/models/response.py new file mode 100644 index 000000000..d13217e75 --- /dev/null +++ b/backend/app/models/response.py @@ -0,0 +1,47 @@ +from sqlmodel import SQLModel + + +class ResponsesAPIRequest(SQLModel): + assistant_id: str + question: str + callback_url: str | None= None + response_id: str | None= None + + class Config: + extra = "allow" + + +class ResponsesSyncAPIRequest(SQLModel): + model: str + instructions: str + vector_store_ids: list[str] + max_num_results: int = 20 + temperature: float = 0.1 + response_id: str | None = None + question: str + + class Config: + extra = "allow" + + +class Diagnostics(SQLModel): + input_tokens: int + output_tokens: int + total_tokens: int + model: str + + +class FileResultChunk(SQLModel): + score: float + text: str + + +class CallbackResponse(SQLModel): + status: str + response_id: str + message: str + chunks: list[FileResultChunk] + diagnostics: Diagnostics | None = None + + class Config: + extra = "allow" diff --git a/backend/app/utils.py b/backend/app/utils.py index 1c03839ab..cd31e90b2 100644 --- a/backend/app/utils.py +++ b/backend/app/utils.py @@ -38,14 +38,14 @@ def success_response( @classmethod def failure_response( - cls, error: str | list, metadata: Optional[Dict[str, Any]] = None + cls, error: str | list, data:Optional[T]= None, metadata: Optional[Dict[str, Any]] = None ) -> "APIResponse[None]": if isinstance(error, list): # to handle cases when error is a list of errors error_message = "\n".join([f"{err['loc']}: {err['msg']}" for err in error]) else: error_message = error - return cls(success=False, data=None, error=error_message, metadata=metadata) + return cls(success=False, data=data, error=error_message, metadata=metadata) @dataclass From 824652e2c403fe71c46f4f27c1f16483fecda988 Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Wed, 17 Sep 2025 17:14:51 +0530 Subject: [PATCH 04/44] move response code to service --- backend/app/api/routes/responses.py | 274 +--------------------------- backend/app/service/response.py | 209 +++++++++++++++++++++ backend/app/utils.py | 18 ++ 3 files changed, 232 insertions(+), 269 deletions(-) create mode 100644 backend/app/service/response.py diff --git a/backend/app/api/routes/responses.py b/backend/app/api/routes/responses.py index eef466276..ca4fffb77 100644 --- a/backend/app/api/routes/responses.py +++ b/backend/app/api/routes/responses.py @@ -1,21 +1,13 @@ import logging import openai -from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException +from fastapi import APIRouter, Depends, HTTPException from fastapi.responses import JSONResponse from sqlmodel import Session from app.api.deps import get_db, get_current_user_org_project -from app.api.routes.threads import send_callback -from app.core.db import engine from app.core.langfuse.langfuse import LangfuseTracer -from app.crud.assistants import get_assistant_by_id from app.crud.credentials import get_provider_credential -from app.crud.openai_conversation import ( - create_conversation, - get_ancestor_id_from_response, - get_conversation_by_ancestor_id, -) from app.models import ( CallbackResponse, Diagnostics, @@ -25,287 +17,31 @@ UserProjectOrg, OpenAIConversationCreate, ) -from app.utils import APIResponse, get_openai_client, mask_string +from app.service.response import get_additional_data, get_file_search_results +from app.utils import APIResponse, get_openai_client, handle_openai_error, mask_string logger = logging.getLogger(__name__) router = APIRouter(tags=["responses"]) -def handle_openai_error(e: openai.OpenAIError) -> str: - """Extract error message from OpenAI error.""" - # Try to get error message from different possible attributes - if hasattr(e, "body") and isinstance(e.body, dict) and "message" in e.body: - return e.body["message"] - elif hasattr(e, "message"): - return e.message - elif hasattr(e, "response") and hasattr(e.response, "json"): - try: - error_data = e.response.json() - if isinstance(error_data, dict) and "error" in error_data: - error_info = error_data["error"] - if isinstance(error_info, dict) and "message" in error_info: - return error_info["message"] - except: - pass - return str(e) - - -def get_file_search_results(response): - results: list[FileResultChunk] = [] - for tool_call in response.output: - if tool_call.type == "file_search_call": - results.extend( - [FileResultChunk(score=hit.score, text=hit.text) for hit in results] - ) - return results - - -def get_additional_data(request: dict) -> dict: - """Extract additional data from request, excluding specific keys.""" - # Keys to exclude for async request (ResponsesAPIRequest) - async_exclude_keys = {"assistant_id", "callback_url", "response_id", "question"} - # Keys to exclude for sync request (ResponsesSyncAPIRequest) - sync_exclude_keys = { - "model", - "instructions", - "vector_store_ids", - "max_num_results", - "temperature", - "response_id", - "question", - } - - # Determine which keys to exclude based on the request structure - if "assistant_id" in request: - exclude_keys = async_exclude_keys - else: - exclude_keys = sync_exclude_keys - - return {k: v for k, v in request.items() if k not in exclude_keys} - - -def send_response_callback( - callback_url: str, - callback_response: APIResponse, - request_dict: dict, -) -> None: - """Send a standardized callback response to the provided callback URL.""" - - callback_data = callback_response.model_dump() - - send_callback( - callback_url, - { - "success": callback_data.get("success", False), - "data": { - **(callback_data.get("data") or {}), - **get_additional_data(request_dict), - }, - "error": callback_data.get("error"), - "metadata": None, - }, - ) - - -def process_response( - request_data: dict, - project_id: int, - organization_id: int, -): - """Process a response and send callback with results, with Langfuse tracing.""" - request = ResponsesAPIRequest(**request_data) - assistant_id = request.assistant_id - request_dict = request.model_dump() - - logger.info( - f"[process_response] Starting generating response for assistant_id={mask_string(assistant_id)}, project_id={project_id}" - ) - - callback_response: APIResponse | None = None - tracer: LangfuseTracer | None = None - - try: - with Session(engine) as session: - assistant = get_assistant_by_id(session, assistant_id, project_id) - if not assistant: - msg = f"Assistant not found: assistant_id={mask_string(assistant_id)}, project_id={project_id}" - logger.error(f"[process_response] {msg}") - callback_response = APIResponse.failure_response(error="Assistant not found or not active") - return - - try: - client = get_openai_client(session, organization_id, project_id) - except HTTPException as e: - callback_response = APIResponse.failure_response(error=str(e.detail)) - return - - langfuse_credentials = get_provider_credential( - session=session, - org_id=organization_id, - provider="langfuse", - project_id=project_id, - ) - - # Handle ancestor_id - ancestor_id = request.response_id - latest_conversation = None - if ancestor_id: - latest_conversation = get_conversation_by_ancestor_id( - session=session, - ancestor_response_id=ancestor_id, - project_id=project_id, - ) - if latest_conversation: - ancestor_id = latest_conversation.response_id - - # --- Langfuse trace --- - tracer = LangfuseTracer( - credentials=langfuse_credentials, - response_id=request.response_id, - ) - tracer.start_trace( - name="generate_response_async", - input={"question": request.question, "assistant_id": assistant_id}, - metadata={"callback_url": request.callback_url}, - tags=[assistant_id], - ) - - tracer.start_generation( - name="openai_response", - input={"question": request.question}, - metadata={"model": assistant.model, "temperature": assistant.temperature}, - ) - - # Build params - params = { - "model": assistant.model, - "previous_response_id": ancestor_id, - "instructions": assistant.instructions, - "temperature": assistant.temperature, - "input": [{"role": "user", "content": request.question}], - } - if assistant.vector_store_ids: - params["tools"] = [{ - "type": "file_search", - "vector_store_ids": assistant.vector_store_ids, - "max_num_results": assistant.max_num_results, - }] - params["include"] = ["file_search_call.results"] - - # Generate response - response = client.responses.create(**params) - response_chunks = get_file_search_results(response) - - logger.info( - f"[process_response] Successfully generated response: response_id={response.id}, assistant={mask_string(assistant_id)}, project_id={project_id}" - ) - - tracer.end_generation( - output={"response_id": response.id, "message": response.output_text}, - usage={ - "input": response.usage.input_tokens, - "output": response.usage.output_tokens, - "total": response.usage.total_tokens, - "unit": "TOKENS", - }, - model=response.model, - ) - tracer.update_trace( - tags=[response.id], - output={"status": "success", "message": response.output_text, "error": None}, - ) - - # Store conversation - with Session(engine) as session: - ancestor_response_id = ( - latest_conversation.ancestor_response_id - if latest_conversation - else get_ancestor_id_from_response( - session=session, - current_response_id=response.id, - previous_response_id=response.previous_response_id, - project_id=project_id, - ) - ) - create_conversation( - session=session, - conversation=OpenAIConversationCreate( - response_id=response.id, - previous_response_id=response.previous_response_id, - ancestor_response_id=ancestor_response_id, - user_question=request.question, - response=response.output_text, - model=response.model, - assistant_id=assistant_id, - ), - project_id=project_id, - organization_id=organization_id, - ) - - # Success callback payload - callback_response = APIResponse.success_response( - data=CallbackResponse( - status="success", - response_id=response.id, - message=response.output_text, - chunks=response_chunks, - diagnostics=Diagnostics( - input_tokens=response.usage.input_tokens, - output_tokens=response.usage.output_tokens, - total_tokens=response.usage.total_tokens, - model=response.model, - ), - ) - ) - - except openai.OpenAIError as e: - error_message = handle_openai_error(e) - logger.error( - f"[process_response] OpenAI API error: {error_message}, project_id={project_id}", - exc_info=True, - ) - if tracer: - tracer.log_error(error_message, response_id=request.response_id) - callback_response = APIResponse.failure_response(error=error_message) - - finally: - if tracer: - tracer.flush() - if request.callback_url and callback_response: - send_response_callback(request.callback_url, callback_response, request_dict) - - return callback_response - - - @router.post("/responses", response_model=dict) async def responses( request: ResponsesAPIRequest, - background_tasks: BackgroundTasks, _session: Session = Depends(get_db), _current_user: UserProjectOrg = Depends(get_current_user_org_project), ): - """Asynchronous endpoint that processes requests in background with Langfuse tracing.""" - + """Asynchronous endpoint that processes requests using Celery.""" project_id, organization_id = ( _current_user.project_id, _current_user.organization_id, ) - request_dict = request.model_dump() - background_tasks.add_task( - process_response, - request_dict, - project_id, - organization_id, - ) logger.info( - f"[response] Background task scheduled for response processing: assistant_id={mask_string(request.assistant_id)}, project_id={project_id}, organization_id={organization_id}" + f"[response] Celery task scheduled for response processing: assistant_id={mask_string(request.assistant_id)}, project_id={project_id}, organization_id={organization_id}, task_id={celery_task.id}" ) additional_data = get_additional_data(request_dict) - return { "success": True, "data": { diff --git a/backend/app/service/response.py b/backend/app/service/response.py new file mode 100644 index 000000000..8f2456bc8 --- /dev/null +++ b/backend/app/service/response.py @@ -0,0 +1,209 @@ +import logging +import openai +from fastapi import HTTPException +from sqlmodel import Session +from app.core.db import engine +from app.core.langfuse.langfuse import LangfuseTracer +from app.crud.assistants import get_assistant_by_id +from app.crud.credentials import get_provider_credential +from app.crud.openai_conversation import ( + create_conversation, + get_ancestor_id_from_response, + get_conversation_by_ancestor_id, +) +from app.models import ( + CallbackResponse, + Diagnostics, + FileResultChunk, + ResponsesAPIRequest, + OpenAIConversationCreate, +) +from app.utils import APIResponse, get_openai_client, handle_openai_error, mask_string + +logger = logging.getLogger(__name__) + + +def get_file_search_results(response): + results: list[FileResultChunk] = [] + for tool_call in response.output: + if tool_call.type == "file_search_call": + results.extend( + [FileResultChunk(score=hit.score, text=hit.text) for hit in results] + ) + return results + + +def get_additional_data(request: dict) -> dict: + async_exclude_keys = {"assistant_id", "callback_url", "response_id", "question"} + sync_exclude_keys = { + "model", + "instructions", + "vector_store_ids", + "max_num_results", + "temperature", + "response_id", + "question", + } + if "assistant_id" in request: + exclude_keys = async_exclude_keys + else: + exclude_keys = sync_exclude_keys + return {k: v for k, v in request.items() if k not in exclude_keys} + + +def process_response_task(request_data: dict, project_id: int, organization_id: int): + """Process a response and return callback payload, for Celery use.""" + request = ResponsesAPIRequest(**request_data) + assistant_id = request.assistant_id + request_dict = request.model_dump() + + logger.info( + f"[process_response_task] Generating response for assistant_id={mask_string(assistant_id)}, project_id={project_id}" + ) + + callback_response: APIResponse | None = None + tracer: LangfuseTracer | None = None + + try: + with Session(engine) as session: + assistant = get_assistant_by_id(session, assistant_id, project_id) + if not assistant: + msg = f"Assistant not found: assistant_id={mask_string(assistant_id)}, project_id={project_id}" + logger.error(f"[process_response_task] {msg}") + callback_response = APIResponse.failure_response(error="Assistant not found or not active") + return callback_response + + try: + client = get_openai_client(session, organization_id, project_id) + except HTTPException as e: + callback_response = APIResponse.failure_response(error=str(e.detail)) + return callback_response + + langfuse_credentials = get_provider_credential( + session=session, + org_id=organization_id, + provider="langfuse", + project_id=project_id, + ) + + ancestor_id = request.response_id + latest_conversation = None + if ancestor_id: + latest_conversation = get_conversation_by_ancestor_id( + session=session, + ancestor_response_id=ancestor_id, + project_id=project_id, + ) + if latest_conversation: + ancestor_id = latest_conversation.response_id + + tracer = LangfuseTracer( + credentials=langfuse_credentials, + response_id=request.response_id, + ) + tracer.start_trace( + name="generate_response_async", + input={"question": request.question, "assistant_id": assistant_id}, + metadata={"callback_url": request.callback_url}, + tags=[assistant_id], + ) + + tracer.start_generation( + name="openai_response", + input={"question": request.question}, + metadata={"model": assistant.model, "temperature": assistant.temperature}, + ) + + params = { + "model": assistant.model, + "previous_response_id": ancestor_id, + "instructions": assistant.instructions, + "temperature": assistant.temperature, + "input": [{"role": "user", "content": request.question}], + } + if assistant.vector_store_ids: + params["tools"] = [{ + "type": "file_search", + "vector_store_ids": assistant.vector_store_ids, + "max_num_results": assistant.max_num_results, + }] + params["include"] = ["file_search_call.results"] + + response = client.responses.create(**params) + response_chunks = get_file_search_results(response) + + logger.info( + f"[process_response_task] Successfully generated response: response_id={response.id}, assistant={mask_string(assistant_id)}, project_id={project_id}" + ) + + tracer.end_generation( + output={"response_id": response.id, "message": response.output_text}, + usage={ + "input": response.usage.input_tokens, + "output": response.usage.output_tokens, + "total": response.usage.total_tokens, + "unit": "TOKENS", + }, + model=response.model, + ) + tracer.update_trace( + tags=[response.id], + output={"status": "success", "message": response.output_text, "error": None}, + ) + + with Session(engine) as session: + ancestor_response_id = ( + latest_conversation.ancestor_response_id + if latest_conversation + else get_ancestor_id_from_response( + session=session, + current_response_id=response.id, + previous_response_id=response.previous_response_id, + project_id=project_id, + ) + ) + create_conversation( + session=session, + conversation=OpenAIConversationCreate( + response_id=response.id, + previous_response_id=response.previous_response_id, + ancestor_response_id=ancestor_response_id, + user_question=request.question, + response=response.output_text, + model=response.model, + assistant_id=assistant_id, + ), + project_id=project_id, + organization_id=organization_id, + ) + + callback_response = APIResponse.success_response( + data=CallbackResponse( + status="success", + response_id=response.id, + message=response.output_text, + chunks=response_chunks, + diagnostics=Diagnostics( + input_tokens=response.usage.input_tokens, + output_tokens=response.usage.output_tokens, + total_tokens=response.usage.total_tokens, + model=response.model, + ), + ) + ) + + except openai.OpenAIError as e: + error_message = handle_openai_error(e) + logger.error( + f"[process_response_task] OpenAI API error: {error_message}, project_id={project_id}", + exc_info=True, + ) + if tracer: + tracer.log_error(error_message, response_id=request.response_id) + callback_response = APIResponse.failure_response(error=error_message) + + finally: + if tracer: + tracer.flush() + + return callback_response diff --git a/backend/app/utils.py b/backend/app/utils.py index cd31e90b2..9311692a9 100644 --- a/backend/app/utils.py +++ b/backend/app/utils.py @@ -10,6 +10,7 @@ from jinja2 import Template from jwt.exceptions import InvalidTokenError from fastapi import HTTPException +import openai from openai import OpenAI from pydantic import BaseModel from sqlmodel import Session @@ -200,6 +201,23 @@ def get_openai_client(session: Session, org_id: int, project_id: int) -> OpenAI: ) +def handle_openai_error(e: openai.OpenAIError) -> str: + if hasattr(e, "body") and isinstance(e.body, dict) and "message" in e.body: + return e.body["message"] + elif hasattr(e, "message"): + return e.message + elif hasattr(e, "response") and hasattr(e.response, "json"): + try: + error_data = e.response.json() + if isinstance(error_data, dict) and "error" in error_data: + error_info = error_data["error"] + if isinstance(error_info, dict) and "message" in error_info: + return error_info["message"] + except: + pass + return str(e) + + @ft.singledispatch def load_description(filename: Path) -> str: if not filename.exists(): From b471c3e10a25849062e752cf2a19c703229b86f3 Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Thu, 18 Sep 2025 15:11:45 +0530 Subject: [PATCH 05/44] Implement job management and integrate it with response processing --- .../versions/be78247139f9_create_job_table.py | 43 ++++++ backend/app/api/routes/responses.py | 12 +- backend/app/crud/__init__.py | 3 + backend/app/crud/jobs.py | 40 ++++++ backend/app/models/__init__.py | 3 + backend/app/models/job.py | 52 ++++++++ backend/app/{service => services}/response.py | 125 +++++++++++++++++- 7 files changed, 268 insertions(+), 10 deletions(-) create mode 100644 backend/app/alembic/versions/be78247139f9_create_job_table.py create mode 100644 backend/app/crud/jobs.py create mode 100644 backend/app/models/job.py rename backend/app/{service => services}/response.py (64%) diff --git a/backend/app/alembic/versions/be78247139f9_create_job_table.py b/backend/app/alembic/versions/be78247139f9_create_job_table.py new file mode 100644 index 000000000..c8ad08c94 --- /dev/null +++ b/backend/app/alembic/versions/be78247139f9_create_job_table.py @@ -0,0 +1,43 @@ +"""create job table + +Revision ID: be78247139f9 +Revises: 6ed6ed401847 +Create Date: 2025-09-18 13:00:23.212198 + +""" +from alembic import op +import sqlalchemy as sa +import sqlmodel.sql.sqltypes + + +# revision identifiers, used by Alembic. +revision = 'be78247139f9' +down_revision = '6ed6ed401847' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('job', + sa.Column('id', sa.Uuid(), nullable=False), + sa.Column('task_id', sa.Uuid(), nullable=True), + sa.Column('trace_id', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column('error_message', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column('status', sa.Enum('PENDING', 'PROCESSING', 'SUCCESS', 'FAILED', name='jobstatus'), nullable=False), + sa.Column('job_type', sa.Enum('RESPONSE', name='jobtype'), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.drop_constraint('openai_conversation_project_id_fkey1', 'openai_conversation', type_='foreignkey') + op.drop_constraint('openai_conversation_organization_id_fkey1', 'openai_conversation', type_='foreignkey') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_foreign_key('openai_conversation_organization_id_fkey1', 'openai_conversation', 'organization', ['organization_id'], ['id']) + op.create_foreign_key('openai_conversation_project_id_fkey1', 'openai_conversation', 'project', ['project_id'], ['id']) + op.drop_table('job') + # ### end Alembic commands ### diff --git a/backend/app/api/routes/responses.py b/backend/app/api/routes/responses.py index ca4fffb77..662f9a531 100644 --- a/backend/app/api/routes/responses.py +++ b/backend/app/api/routes/responses.py @@ -17,7 +17,7 @@ UserProjectOrg, OpenAIConversationCreate, ) -from app.service.response import get_additional_data, get_file_search_results +from app.services.response import get_additional_data, get_file_search_results, start_job from app.utils import APIResponse, get_openai_client, handle_openai_error, mask_string @@ -36,11 +36,15 @@ async def responses( _current_user.project_id, _current_user.organization_id, ) - request_dict = request.model_dump() - logger.info( - f"[response] Celery task scheduled for response processing: assistant_id={mask_string(request.assistant_id)}, project_id={project_id}, organization_id={organization_id}, task_id={celery_task.id}" + start_job( + db=_session, + request=request, + project_id=project_id, + organization_id=organization_id, ) + request_dict = request.model_dump() + additional_data = get_additional_data(request_dict) return { "success": True, diff --git a/backend/app/crud/__init__.py b/backend/app/crud/__init__.py index f73a07003..43ef15565 100644 --- a/backend/app/crud/__init__.py +++ b/backend/app/crud/__init__.py @@ -10,6 +10,8 @@ from .document_collection import DocumentCollectionCrud from .doc_transformation_job import DocTransformationJobCrud +from .jobs import JobCrud + from .organization import ( create_organization, get_organization_by_id, @@ -59,6 +61,7 @@ ) from .openai_conversation import ( + get_ancestor_id_from_response, get_conversation_by_id, get_conversation_by_response_id, get_conversation_by_ancestor_id, diff --git a/backend/app/crud/jobs.py b/backend/app/crud/jobs.py new file mode 100644 index 000000000..d17348bfa --- /dev/null +++ b/backend/app/crud/jobs.py @@ -0,0 +1,40 @@ +import logging +from sqlmodel import Session +from uuid import UUID + +from app.models.job import Job, JobType, JobUpdate +from app.core.util import now + +logger = logging.getLogger(__name__) + +class JobCrud: + def __init__(self, session: Session): + self.session = session + + def create(self, job_type: JobType, trace_id: str | None = None) -> Job: + new_job = Job( + job_type=job_type, + trace_id=trace_id, + ) + self.session.add(new_job) + self.session.commit() + self.session.refresh(new_job) + return new_job + + + def update(self, job_id: UUID, job_update: JobUpdate) -> Job: + + job = self.session.get(Job, job_id) + if not job: + raise ValueError(f"Job not found with the given job_id {job_id}") + + update_data = job_update.model_dump(exclude_unset=True) + for field, value in update_data.items(): + setattr(job, field, value) + + job.updated_at = now() + self.session.add(job) + self.session.commit() + self.session.refresh(job) + + return job \ No newline at end of file diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py index dc0d3ab4e..4558cc598 100644 --- a/backend/app/models/__init__.py +++ b/backend/app/models/__init__.py @@ -14,6 +14,9 @@ TransformationStatus, ) from .document_collection import DocumentCollection + +from .job import Job, JobType, JobStatus, JobUpdate + from .message import Message from .project_user import ( diff --git a/backend/app/models/job.py b/backend/app/models/job.py new file mode 100644 index 000000000..40268a81f --- /dev/null +++ b/backend/app/models/job.py @@ -0,0 +1,52 @@ +from datetime import datetime +from enum import Enum +from uuid import uuid4, UUID + +from sqlmodel import SQLModel, Field +from app.core.util import now + +class JobStatus(str, Enum): + PENDING = "PENDING" + PROCESSING = "PROCESSING" + SUCCESS = "SUCCESS" + FAILED = "FAILED" + + +class JobType(str, Enum): + RESPONSE = "RESPONSE" + + +class Job(SQLModel, table=True): + __tablename__ = "job" + + id: UUID = Field( + default_factory=uuid4, + primary_key=True, + ) + task_id: UUID | None = Field( + nullable=True, + description="Celery task ID returned when job is queued." + ) + trace_id: str | None = Field( + default=None, + description="Tracing ID for correlating logs and traces." + ) + error_message: str | None = Field( + default=None, + description="Error details if the job fails." + ) + status: JobStatus = Field( + default=JobStatus.PENDING, + description="Current state of the job." + ) + job_type: JobType = Field( + description="Job type or classification (e.g., response job, ingestion job)." + ) + created_at: datetime = Field(default_factory=now) + updated_at: datetime = Field(default_factory=now) + + +class JobUpdate(SQLModel): + status: JobStatus | None = None + error_message: str | None = None + task_id: UUID | None = None \ No newline at end of file diff --git a/backend/app/service/response.py b/backend/app/services/response.py similarity index 64% rename from backend/app/service/response.py rename to backend/app/services/response.py index 8f2456bc8..114abe6cd 100644 --- a/backend/app/service/response.py +++ b/backend/app/services/response.py @@ -1,28 +1,62 @@ import logging +from uuid import UUID import openai from fastapi import HTTPException from sqlmodel import Session from app.core.db import engine from app.core.langfuse.langfuse import LangfuseTracer -from app.crud.assistants import get_assistant_by_id -from app.crud.credentials import get_provider_credential -from app.crud.openai_conversation import ( +from app.crud import ( + JobCrud, + get_assistant_by_id, + get_provider_credential, create_conversation, get_ancestor_id_from_response, get_conversation_by_ancestor_id, + ) from app.models import ( CallbackResponse, Diagnostics, FileResultChunk, + JobType, + JobStatus, + JobUpdate, ResponsesAPIRequest, OpenAIConversationCreate, ) from app.utils import APIResponse, get_openai_client, handle_openai_error, mask_string +from app.celery.utils import start_high_priority_job +from app.api.routes.threads import send_callback logger = logging.getLogger(__name__) +def start_job( + db: Session, + request: ResponsesAPIRequest, + project_id: int, + organization_id: int, +) -> UUID: + """Create a response job and schedule Celery task.""" + + job_crud = JobCrud(session=db) + job = job_crud.create(job_type=JobType.RESPONSE, trace_id="Aviraj") + + # Schedule the Celery task + task_id = start_high_priority_job( + function_path="app.services.response.execute_job", + project_id=project_id, + job_id=str(job.id), + request_data=request.model_dump(), + organization_id=organization_id, + ) + + logger.info( + f"[start_job] Job scheduled to generate response | job_id={job.id}, project_id={project_id}, task_id={task_id}" + ) + return job.id + + def get_file_search_results(response): results: list[FileResultChunk] = [] for tool_call in response.output: @@ -51,11 +85,81 @@ def get_additional_data(request: dict) -> dict: return {k: v for k, v in request.items() if k not in exclude_keys} -def process_response_task(request_data: dict, project_id: int, organization_id: int): +def send_response_callback( + callback_url: str, + callback_response: APIResponse, + request_dict: dict, +) -> None: + """Send a standardized callback response to the provided callback URL.""" + + # Convert Pydantic model to dict + callback_response = callback_response.model_dump() + + send_callback( + callback_url, + { + "success": callback_response.get("success", False), + "data": { + **(callback_response.get("data") or {}), + **get_additional_data(request_dict), + }, + "error": callback_response.get("error"), + "metadata": None, + }, + ) + + +def execute_job( + request_data: dict, + project_id: int, + organization_id: int, + job_id: str, + task_id: str, + task_instance, +) -> APIResponse | None: + """Celery task to process a response request asynchronously.""" + request_data = ResponsesAPIRequest(**request_data) + job_id = UUID(job_id) + response = process_response( + request=request_data, + project_id=project_id, + organization_id=organization_id, + job_id=job_id, + task_id=task_id, + task_instance=task_instance, + ) + if response is None: + response = APIResponse.failure_response(error="Unknown error occurred") + + with Session(engine) as session: + job_crud = JobCrud(session=session) + if response.success: + job_update = JobUpdate(status=JobStatus.SUCCESS) + else: + job_update = JobUpdate(status=JobStatus.FAILED, error_message=response.error) + job_crud.update(job_id=job_id, job_update=job_update) + + + if request_data.callback_url: + send_response_callback( + callback_url=request_data.callback_url, + callback_response=response, + request_dict=request_data.model_dump(), + ) + + return response.model_dump() + + +def process_response( + request: ResponsesAPIRequest, + project_id: int, + organization_id: int, + job_id: UUID, + task_id: str, + task_instance, +)-> APIResponse: """Process a response and return callback payload, for Celery use.""" - request = ResponsesAPIRequest(**request_data) assistant_id = request.assistant_id - request_dict = request.model_dump() logger.info( f"[process_response_task] Generating response for assistant_id={mask_string(assistant_id)}, project_id={project_id}" @@ -66,6 +170,11 @@ def process_response_task(request_data: dict, project_id: int, organization_id: try: with Session(engine) as session: + job_crud = JobCrud(session=session) + + job_update = JobUpdate(status=JobStatus.PROCESSING, task_id=UUID(task_id)) + job_crud.update(job_id=job_id, job_update=job_update) + assistant = get_assistant_by_id(session, assistant_id, project_id) if not assistant: msg = f"Assistant not found: assistant_id={mask_string(assistant_id)}, project_id={project_id}" @@ -176,6 +285,9 @@ def process_response_task(request_data: dict, project_id: int, organization_id: project_id=project_id, organization_id=organization_id, ) + job_crud = JobCrud(session=session) + job_update = JobUpdate(status=JobStatus.SUCCESS) + job_crud.update(job_id=job_id, job_update=job_update) callback_response = APIResponse.success_response( data=CallbackResponse( @@ -200,6 +312,7 @@ def process_response_task(request_data: dict, project_id: int, organization_id: ) if tracer: tracer.log_error(error_message, response_id=request.response_id) + callback_response = APIResponse.failure_response(error=error_message) finally: From 580c3f591ea550c9ad076a5ab6b1ae9427d9bde2 Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Thu, 18 Sep 2025 15:45:49 +0530 Subject: [PATCH 06/44] pass trace id to job table --- backend/app/services/response.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/backend/app/services/response.py b/backend/app/services/response.py index 114abe6cd..ccb7267c8 100644 --- a/backend/app/services/response.py +++ b/backend/app/services/response.py @@ -27,6 +27,7 @@ from app.utils import APIResponse, get_openai_client, handle_openai_error, mask_string from app.celery.utils import start_high_priority_job from app.api.routes.threads import send_callback +from asgi_correlation_id import correlation_id logger = logging.getLogger(__name__) @@ -39,8 +40,9 @@ def start_job( ) -> UUID: """Create a response job and schedule Celery task.""" + trace_id = correlation_id.get() or "N/A" job_crud = JobCrud(session=db) - job = job_crud.create(job_type=JobType.RESPONSE, trace_id="Aviraj") + job = job_crud.create(job_type=JobType.RESPONSE, trace_id=trace_id) # Schedule the Celery task task_id = start_high_priority_job( From 0de7cd0046851a7fc22b57273cc81a380716a761 Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Thu, 18 Sep 2025 15:50:36 +0530 Subject: [PATCH 07/44] pre commit and pass trace id to celery --- .../versions/be78247139f9_create_job_table.py | 59 ++- backend/app/api/routes/responses.py | 10 +- backend/app/crud/jobs.py | 5 +- backend/app/models/job.py | 15 +- backend/app/models/response.py | 4 +- backend/app/services/response.py | 487 +++++++++--------- backend/app/utils.py | 33 +- 7 files changed, 325 insertions(+), 288 deletions(-) diff --git a/backend/app/alembic/versions/be78247139f9_create_job_table.py b/backend/app/alembic/versions/be78247139f9_create_job_table.py index c8ad08c94..ec6ec0787 100644 --- a/backend/app/alembic/versions/be78247139f9_create_job_table.py +++ b/backend/app/alembic/versions/be78247139f9_create_job_table.py @@ -11,33 +11,58 @@ # revision identifiers, used by Alembic. -revision = 'be78247139f9' -down_revision = '6ed6ed401847' +revision = "be78247139f9" +down_revision = "6ed6ed401847" branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('job', - sa.Column('id', sa.Uuid(), nullable=False), - sa.Column('task_id', sa.Uuid(), nullable=True), - sa.Column('trace_id', sqlmodel.sql.sqltypes.AutoString(), nullable=True), - sa.Column('error_message', sqlmodel.sql.sqltypes.AutoString(), nullable=True), - sa.Column('status', sa.Enum('PENDING', 'PROCESSING', 'SUCCESS', 'FAILED', name='jobstatus'), nullable=False), - sa.Column('job_type', sa.Enum('RESPONSE', name='jobtype'), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.PrimaryKeyConstraint('id') + op.create_table( + "job", + sa.Column("id", sa.Uuid(), nullable=False), + sa.Column("task_id", sa.Uuid(), nullable=True), + sa.Column("trace_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("error_message", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column( + "status", + sa.Enum("PENDING", "PROCESSING", "SUCCESS", "FAILED", name="jobstatus"), + nullable=False, + ), + sa.Column("job_type", sa.Enum("RESPONSE", name="jobtype"), nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint("id"), + ) + op.drop_constraint( + "openai_conversation_project_id_fkey1", + "openai_conversation", + type_="foreignkey", + ) + op.drop_constraint( + "openai_conversation_organization_id_fkey1", + "openai_conversation", + type_="foreignkey", ) - op.drop_constraint('openai_conversation_project_id_fkey1', 'openai_conversation', type_='foreignkey') - op.drop_constraint('openai_conversation_organization_id_fkey1', 'openai_conversation', type_='foreignkey') # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_foreign_key('openai_conversation_organization_id_fkey1', 'openai_conversation', 'organization', ['organization_id'], ['id']) - op.create_foreign_key('openai_conversation_project_id_fkey1', 'openai_conversation', 'project', ['project_id'], ['id']) - op.drop_table('job') + op.create_foreign_key( + "openai_conversation_organization_id_fkey1", + "openai_conversation", + "organization", + ["organization_id"], + ["id"], + ) + op.create_foreign_key( + "openai_conversation_project_id_fkey1", + "openai_conversation", + "project", + ["project_id"], + ["id"], + ) + op.drop_table("job") # ### end Alembic commands ### diff --git a/backend/app/api/routes/responses.py b/backend/app/api/routes/responses.py index 662f9a531..7a0b4d774 100644 --- a/backend/app/api/routes/responses.py +++ b/backend/app/api/routes/responses.py @@ -17,7 +17,11 @@ UserProjectOrg, OpenAIConversationCreate, ) -from app.services.response import get_additional_data, get_file_search_results, start_job +from app.services.response import ( + get_additional_data, + get_file_search_results, + start_job, +) from app.utils import APIResponse, get_openai_client, handle_openai_error, mask_string @@ -82,7 +86,7 @@ async def responses_sync( "data": additional_data if additional_data else None, "error": str(e.detail), "metadata": None, - } + }, ) langfuse_credentials = get_provider_credential( @@ -189,5 +193,5 @@ async def responses_sync( "data": additional_data if additional_data else None, "error": error_message, "metadata": None, - } + }, ) diff --git a/backend/app/crud/jobs.py b/backend/app/crud/jobs.py index d17348bfa..e9cc38dce 100644 --- a/backend/app/crud/jobs.py +++ b/backend/app/crud/jobs.py @@ -7,6 +7,7 @@ logger = logging.getLogger(__name__) + class JobCrud: def __init__(self, session: Session): self.session = session @@ -21,9 +22,7 @@ def create(self, job_type: JobType, trace_id: str | None = None) -> Job: self.session.refresh(new_job) return new_job - def update(self, job_id: UUID, job_update: JobUpdate) -> Job: - job = self.session.get(Job, job_id) if not job: raise ValueError(f"Job not found with the given job_id {job_id}") @@ -37,4 +36,4 @@ def update(self, job_id: UUID, job_update: JobUpdate) -> Job: self.session.commit() self.session.refresh(job) - return job \ No newline at end of file + return job diff --git a/backend/app/models/job.py b/backend/app/models/job.py index 40268a81f..506adda55 100644 --- a/backend/app/models/job.py +++ b/backend/app/models/job.py @@ -5,6 +5,7 @@ from sqlmodel import SQLModel, Field from app.core.util import now + class JobStatus(str, Enum): PENDING = "PENDING" PROCESSING = "PROCESSING" @@ -24,20 +25,16 @@ class Job(SQLModel, table=True): primary_key=True, ) task_id: UUID | None = Field( - nullable=True, - description="Celery task ID returned when job is queued." + nullable=True, description="Celery task ID returned when job is queued." ) trace_id: str | None = Field( - default=None, - description="Tracing ID for correlating logs and traces." + default=None, description="Tracing ID for correlating logs and traces." ) error_message: str | None = Field( - default=None, - description="Error details if the job fails." + default=None, description="Error details if the job fails." ) status: JobStatus = Field( - default=JobStatus.PENDING, - description="Current state of the job." + default=JobStatus.PENDING, description="Current state of the job." ) job_type: JobType = Field( description="Job type or classification (e.g., response job, ingestion job)." @@ -49,4 +46,4 @@ class Job(SQLModel, table=True): class JobUpdate(SQLModel): status: JobStatus | None = None error_message: str | None = None - task_id: UUID | None = None \ No newline at end of file + task_id: UUID | None = None diff --git a/backend/app/models/response.py b/backend/app/models/response.py index d13217e75..8aef33e5b 100644 --- a/backend/app/models/response.py +++ b/backend/app/models/response.py @@ -4,8 +4,8 @@ class ResponsesAPIRequest(SQLModel): assistant_id: str question: str - callback_url: str | None= None - response_id: str | None= None + callback_url: str | None = None + response_id: str | None = None class Config: extra = "allow" diff --git a/backend/app/services/response.py b/backend/app/services/response.py index ccb7267c8..aa06d2577 100644 --- a/backend/app/services/response.py +++ b/backend/app/services/response.py @@ -6,23 +6,22 @@ from app.core.db import engine from app.core.langfuse.langfuse import LangfuseTracer from app.crud import ( - JobCrud, - get_assistant_by_id, - get_provider_credential, - create_conversation, - get_ancestor_id_from_response, - get_conversation_by_ancestor_id, - + JobCrud, + get_assistant_by_id, + get_provider_credential, + create_conversation, + get_ancestor_id_from_response, + get_conversation_by_ancestor_id, ) from app.models import ( - CallbackResponse, - Diagnostics, - FileResultChunk, - JobType, - JobStatus, - JobUpdate, - ResponsesAPIRequest, - OpenAIConversationCreate, + CallbackResponse, + Diagnostics, + FileResultChunk, + JobType, + JobStatus, + JobUpdate, + ResponsesAPIRequest, + OpenAIConversationCreate, ) from app.utils import APIResponse, get_openai_client, handle_openai_error, mask_string from app.celery.utils import start_high_priority_job @@ -49,6 +48,7 @@ def start_job( function_path="app.services.response.execute_job", project_id=project_id, job_id=str(job.id), + trace_id=trace_id, request_data=request.model_dump(), organization_id=organization_id, ) @@ -60,31 +60,31 @@ def start_job( def get_file_search_results(response): - results: list[FileResultChunk] = [] - for tool_call in response.output: - if tool_call.type == "file_search_call": - results.extend( - [FileResultChunk(score=hit.score, text=hit.text) for hit in results] - ) - return results + results: list[FileResultChunk] = [] + for tool_call in response.output: + if tool_call.type == "file_search_call": + results.extend( + [FileResultChunk(score=hit.score, text=hit.text) for hit in results] + ) + return results def get_additional_data(request: dict) -> dict: - async_exclude_keys = {"assistant_id", "callback_url", "response_id", "question"} - sync_exclude_keys = { - "model", - "instructions", - "vector_store_ids", - "max_num_results", - "temperature", - "response_id", - "question", - } - if "assistant_id" in request: - exclude_keys = async_exclude_keys - else: - exclude_keys = sync_exclude_keys - return {k: v for k, v in request.items() if k not in exclude_keys} + async_exclude_keys = {"assistant_id", "callback_url", "response_id", "question"} + sync_exclude_keys = { + "model", + "instructions", + "vector_store_ids", + "max_num_results", + "temperature", + "response_id", + "question", + } + if "assistant_id" in request: + exclude_keys = async_exclude_keys + else: + exclude_keys = sync_exclude_keys + return {k: v for k, v in request.items() if k not in exclude_keys} def send_response_callback( @@ -112,213 +112,222 @@ def send_response_callback( def execute_job( - request_data: dict, - project_id: int, - organization_id: int, - job_id: str, - task_id: str, - task_instance, + request_data: dict, + project_id: int, + organization_id: int, + job_id: str, + task_id: str, + task_instance, ) -> APIResponse | None: - """Celery task to process a response request asynchronously.""" - request_data = ResponsesAPIRequest(**request_data) - job_id = UUID(job_id) - response = process_response( - request=request_data, - project_id=project_id, - organization_id=organization_id, - job_id=job_id, - task_id=task_id, - task_instance=task_instance, - ) - if response is None: - response = APIResponse.failure_response(error="Unknown error occurred") - - with Session(engine) as session: - job_crud = JobCrud(session=session) - if response.success: - job_update = JobUpdate(status=JobStatus.SUCCESS) - else: - job_update = JobUpdate(status=JobStatus.FAILED, error_message=response.error) - job_crud.update(job_id=job_id, job_update=job_update) - - - if request_data.callback_url: - send_response_callback( - callback_url=request_data.callback_url, - callback_response=response, - request_dict=request_data.model_dump(), - ) - - return response.model_dump() + """Celery task to process a response request asynchronously.""" + request_data = ResponsesAPIRequest(**request_data) + job_id = UUID(job_id) + response = process_response( + request=request_data, + project_id=project_id, + organization_id=organization_id, + job_id=job_id, + task_id=task_id, + task_instance=task_instance, + ) + if response is None: + response = APIResponse.failure_response(error="Unknown error occurred") + + with Session(engine) as session: + job_crud = JobCrud(session=session) + if response.success: + job_update = JobUpdate(status=JobStatus.SUCCESS) + else: + job_update = JobUpdate( + status=JobStatus.FAILED, error_message=response.error + ) + job_crud.update(job_id=job_id, job_update=job_update) + + if request_data.callback_url: + send_response_callback( + callback_url=request_data.callback_url, + callback_response=response, + request_dict=request_data.model_dump(), + ) + + return response.model_dump() def process_response( - request: ResponsesAPIRequest, + request: ResponsesAPIRequest, project_id: int, organization_id: int, - job_id: UUID, + job_id: UUID, task_id: str, task_instance, -)-> APIResponse: - """Process a response and return callback payload, for Celery use.""" - assistant_id = request.assistant_id - - logger.info( - f"[process_response_task] Generating response for assistant_id={mask_string(assistant_id)}, project_id={project_id}" - ) - - callback_response: APIResponse | None = None - tracer: LangfuseTracer | None = None - - try: - with Session(engine) as session: - job_crud = JobCrud(session=session) - - job_update = JobUpdate(status=JobStatus.PROCESSING, task_id=UUID(task_id)) - job_crud.update(job_id=job_id, job_update=job_update) - - assistant = get_assistant_by_id(session, assistant_id, project_id) - if not assistant: - msg = f"Assistant not found: assistant_id={mask_string(assistant_id)}, project_id={project_id}" - logger.error(f"[process_response_task] {msg}") - callback_response = APIResponse.failure_response(error="Assistant not found or not active") - return callback_response - - try: - client = get_openai_client(session, organization_id, project_id) - except HTTPException as e: - callback_response = APIResponse.failure_response(error=str(e.detail)) - return callback_response - - langfuse_credentials = get_provider_credential( - session=session, - org_id=organization_id, - provider="langfuse", - project_id=project_id, - ) - - ancestor_id = request.response_id - latest_conversation = None - if ancestor_id: - latest_conversation = get_conversation_by_ancestor_id( - session=session, - ancestor_response_id=ancestor_id, - project_id=project_id, - ) - if latest_conversation: - ancestor_id = latest_conversation.response_id - - tracer = LangfuseTracer( - credentials=langfuse_credentials, - response_id=request.response_id, - ) - tracer.start_trace( - name="generate_response_async", - input={"question": request.question, "assistant_id": assistant_id}, - metadata={"callback_url": request.callback_url}, - tags=[assistant_id], - ) - - tracer.start_generation( - name="openai_response", - input={"question": request.question}, - metadata={"model": assistant.model, "temperature": assistant.temperature}, - ) - - params = { - "model": assistant.model, - "previous_response_id": ancestor_id, - "instructions": assistant.instructions, - "temperature": assistant.temperature, - "input": [{"role": "user", "content": request.question}], - } - if assistant.vector_store_ids: - params["tools"] = [{ - "type": "file_search", - "vector_store_ids": assistant.vector_store_ids, - "max_num_results": assistant.max_num_results, - }] - params["include"] = ["file_search_call.results"] - - response = client.responses.create(**params) - response_chunks = get_file_search_results(response) - - logger.info( - f"[process_response_task] Successfully generated response: response_id={response.id}, assistant={mask_string(assistant_id)}, project_id={project_id}" - ) - - tracer.end_generation( - output={"response_id": response.id, "message": response.output_text}, - usage={ - "input": response.usage.input_tokens, - "output": response.usage.output_tokens, - "total": response.usage.total_tokens, - "unit": "TOKENS", - }, - model=response.model, - ) - tracer.update_trace( - tags=[response.id], - output={"status": "success", "message": response.output_text, "error": None}, - ) - - with Session(engine) as session: - ancestor_response_id = ( - latest_conversation.ancestor_response_id - if latest_conversation - else get_ancestor_id_from_response( - session=session, - current_response_id=response.id, - previous_response_id=response.previous_response_id, - project_id=project_id, - ) - ) - create_conversation( - session=session, - conversation=OpenAIConversationCreate( - response_id=response.id, - previous_response_id=response.previous_response_id, - ancestor_response_id=ancestor_response_id, - user_question=request.question, - response=response.output_text, - model=response.model, - assistant_id=assistant_id, - ), - project_id=project_id, - organization_id=organization_id, - ) - job_crud = JobCrud(session=session) - job_update = JobUpdate(status=JobStatus.SUCCESS) - job_crud.update(job_id=job_id, job_update=job_update) - - callback_response = APIResponse.success_response( - data=CallbackResponse( - status="success", - response_id=response.id, - message=response.output_text, - chunks=response_chunks, - diagnostics=Diagnostics( - input_tokens=response.usage.input_tokens, - output_tokens=response.usage.output_tokens, - total_tokens=response.usage.total_tokens, - model=response.model, - ), - ) - ) - - except openai.OpenAIError as e: - error_message = handle_openai_error(e) - logger.error( - f"[process_response_task] OpenAI API error: {error_message}, project_id={project_id}", - exc_info=True, - ) - if tracer: - tracer.log_error(error_message, response_id=request.response_id) - - callback_response = APIResponse.failure_response(error=error_message) - - finally: - if tracer: - tracer.flush() - - return callback_response +) -> APIResponse: + """Process a response and return callback payload, for Celery use.""" + assistant_id = request.assistant_id + + logger.info( + f"[process_response_task] Generating response for assistant_id={mask_string(assistant_id)}, project_id={project_id}" + ) + + callback_response: APIResponse | None = None + tracer: LangfuseTracer | None = None + + try: + with Session(engine) as session: + job_crud = JobCrud(session=session) + + job_update = JobUpdate(status=JobStatus.PROCESSING, task_id=UUID(task_id)) + job_crud.update(job_id=job_id, job_update=job_update) + + assistant = get_assistant_by_id(session, assistant_id, project_id) + if not assistant: + msg = f"Assistant not found: assistant_id={mask_string(assistant_id)}, project_id={project_id}" + logger.error(f"[process_response_task] {msg}") + callback_response = APIResponse.failure_response( + error="Assistant not found or not active" + ) + return callback_response + + try: + client = get_openai_client(session, organization_id, project_id) + except HTTPException as e: + callback_response = APIResponse.failure_response(error=str(e.detail)) + return callback_response + + langfuse_credentials = get_provider_credential( + session=session, + org_id=organization_id, + provider="langfuse", + project_id=project_id, + ) + + ancestor_id = request.response_id + latest_conversation = None + if ancestor_id: + latest_conversation = get_conversation_by_ancestor_id( + session=session, + ancestor_response_id=ancestor_id, + project_id=project_id, + ) + if latest_conversation: + ancestor_id = latest_conversation.response_id + + tracer = LangfuseTracer( + credentials=langfuse_credentials, + response_id=request.response_id, + ) + tracer.start_trace( + name="generate_response_async", + input={"question": request.question, "assistant_id": assistant_id}, + metadata={"callback_url": request.callback_url}, + tags=[assistant_id], + ) + + tracer.start_generation( + name="openai_response", + input={"question": request.question}, + metadata={"model": assistant.model, "temperature": assistant.temperature}, + ) + + params = { + "model": assistant.model, + "previous_response_id": ancestor_id, + "instructions": assistant.instructions, + "temperature": assistant.temperature, + "input": [{"role": "user", "content": request.question}], + } + if assistant.vector_store_ids: + params["tools"] = [ + { + "type": "file_search", + "vector_store_ids": assistant.vector_store_ids, + "max_num_results": assistant.max_num_results, + } + ] + params["include"] = ["file_search_call.results"] + + response = client.responses.create(**params) + response_chunks = get_file_search_results(response) + + logger.info( + f"[process_response_task] Successfully generated response: response_id={response.id}, assistant={mask_string(assistant_id)}, project_id={project_id}" + ) + + tracer.end_generation( + output={"response_id": response.id, "message": response.output_text}, + usage={ + "input": response.usage.input_tokens, + "output": response.usage.output_tokens, + "total": response.usage.total_tokens, + "unit": "TOKENS", + }, + model=response.model, + ) + tracer.update_trace( + tags=[response.id], + output={ + "status": "success", + "message": response.output_text, + "error": None, + }, + ) + + with Session(engine) as session: + ancestor_response_id = ( + latest_conversation.ancestor_response_id + if latest_conversation + else get_ancestor_id_from_response( + session=session, + current_response_id=response.id, + previous_response_id=response.previous_response_id, + project_id=project_id, + ) + ) + create_conversation( + session=session, + conversation=OpenAIConversationCreate( + response_id=response.id, + previous_response_id=response.previous_response_id, + ancestor_response_id=ancestor_response_id, + user_question=request.question, + response=response.output_text, + model=response.model, + assistant_id=assistant_id, + ), + project_id=project_id, + organization_id=organization_id, + ) + job_crud = JobCrud(session=session) + job_update = JobUpdate(status=JobStatus.SUCCESS) + job_crud.update(job_id=job_id, job_update=job_update) + + callback_response = APIResponse.success_response( + data=CallbackResponse( + status="success", + response_id=response.id, + message=response.output_text, + chunks=response_chunks, + diagnostics=Diagnostics( + input_tokens=response.usage.input_tokens, + output_tokens=response.usage.output_tokens, + total_tokens=response.usage.total_tokens, + model=response.model, + ), + ) + ) + + except openai.OpenAIError as e: + error_message = handle_openai_error(e) + logger.error( + f"[process_response_task] OpenAI API error: {error_message}, project_id={project_id}", + exc_info=True, + ) + if tracer: + tracer.log_error(error_message, response_id=request.response_id) + + callback_response = APIResponse.failure_response(error=error_message) + + finally: + if tracer: + tracer.flush() + + return callback_response diff --git a/backend/app/utils.py b/backend/app/utils.py index 9311692a9..673096cad 100644 --- a/backend/app/utils.py +++ b/backend/app/utils.py @@ -39,7 +39,10 @@ def success_response( @classmethod def failure_response( - cls, error: str | list, data:Optional[T]= None, metadata: Optional[Dict[str, Any]] = None + cls, + error: str | list, + data: Optional[T] = None, + metadata: Optional[Dict[str, Any]] = None, ) -> "APIResponse[None]": if isinstance(error, list): # to handle cases when error is a list of errors error_message = "\n".join([f"{err['loc']}: {err['msg']}" for err in error]) @@ -202,20 +205,20 @@ def get_openai_client(session: Session, org_id: int, project_id: int) -> OpenAI: def handle_openai_error(e: openai.OpenAIError) -> str: - if hasattr(e, "body") and isinstance(e.body, dict) and "message" in e.body: - return e.body["message"] - elif hasattr(e, "message"): - return e.message - elif hasattr(e, "response") and hasattr(e.response, "json"): - try: - error_data = e.response.json() - if isinstance(error_data, dict) and "error" in error_data: - error_info = error_data["error"] - if isinstance(error_info, dict) and "message" in error_info: - return error_info["message"] - except: - pass - return str(e) + if hasattr(e, "body") and isinstance(e.body, dict) and "message" in e.body: + return e.body["message"] + elif hasattr(e, "message"): + return e.message + elif hasattr(e, "response") and hasattr(e.response, "json"): + try: + error_data = e.response.json() + if isinstance(error_data, dict) and "error" in error_data: + error_info = error_data["error"] + if isinstance(error_info, dict) and "message" in error_info: + return error_info["message"] + except: + pass + return str(e) @ft.singledispatch From 19928ab502582de412fee59b9e6c166db420f42b Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Thu, 18 Sep 2025 15:55:25 +0530 Subject: [PATCH 08/44] add task id to response log --- backend/app/services/response.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/app/services/response.py b/backend/app/services/response.py index aa06d2577..9087c2100 100644 --- a/backend/app/services/response.py +++ b/backend/app/services/response.py @@ -54,7 +54,7 @@ def start_job( ) logger.info( - f"[start_job] Job scheduled to generate response | job_id={job.id}, project_id={project_id}, task_id={task_id}" + f"[start_job] Job scheduled to generate response | job_id={job.id}, project_id={project_id}, task_id={task_id}, job_id={job.id}" ) return job.id @@ -165,7 +165,7 @@ def process_response( assistant_id = request.assistant_id logger.info( - f"[process_response_task] Generating response for assistant_id={mask_string(assistant_id)}, project_id={project_id}" + f"[process_response_task] Generating response for assistant_id={mask_string(assistant_id)}, project_id={project_id}, task_id={task_id}, job_id={job_id}" ) callback_response: APIResponse | None = None From b1e8fb42cd800f1c26f568c00d79051182fd3c1c Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Mon, 22 Sep 2025 11:43:06 +0530 Subject: [PATCH 09/44] Refactor response handling: split response logic into separate modules --- backend/app/api/routes/responses.py | 10 +- backend/app/services/response.py | 333 --------------------- backend/app/services/response/callbacks.py | 60 ++++ backend/app/services/response/jobs.py | 68 +++++ backend/app/services/response/response.py | 284 ++++++++++++++++++ 5 files changed, 415 insertions(+), 340 deletions(-) delete mode 100644 backend/app/services/response.py create mode 100644 backend/app/services/response/callbacks.py create mode 100644 backend/app/services/response/jobs.py create mode 100644 backend/app/services/response/response.py diff --git a/backend/app/api/routes/responses.py b/backend/app/api/routes/responses.py index 7a0b4d774..a31a8f658 100644 --- a/backend/app/api/routes/responses.py +++ b/backend/app/api/routes/responses.py @@ -11,17 +11,13 @@ from app.models import ( CallbackResponse, Diagnostics, - FileResultChunk, ResponsesAPIRequest, ResponsesSyncAPIRequest, UserProjectOrg, - OpenAIConversationCreate, -) -from app.services.response import ( - get_additional_data, - get_file_search_results, - start_job, ) +from app.services.response.jobs import start_job +from app.services.response.response import get_file_search_results +from app.services.response.callbacks import get_additional_data from app.utils import APIResponse, get_openai_client, handle_openai_error, mask_string diff --git a/backend/app/services/response.py b/backend/app/services/response.py deleted file mode 100644 index 9087c2100..000000000 --- a/backend/app/services/response.py +++ /dev/null @@ -1,333 +0,0 @@ -import logging -from uuid import UUID -import openai -from fastapi import HTTPException -from sqlmodel import Session -from app.core.db import engine -from app.core.langfuse.langfuse import LangfuseTracer -from app.crud import ( - JobCrud, - get_assistant_by_id, - get_provider_credential, - create_conversation, - get_ancestor_id_from_response, - get_conversation_by_ancestor_id, -) -from app.models import ( - CallbackResponse, - Diagnostics, - FileResultChunk, - JobType, - JobStatus, - JobUpdate, - ResponsesAPIRequest, - OpenAIConversationCreate, -) -from app.utils import APIResponse, get_openai_client, handle_openai_error, mask_string -from app.celery.utils import start_high_priority_job -from app.api.routes.threads import send_callback -from asgi_correlation_id import correlation_id - -logger = logging.getLogger(__name__) - - -def start_job( - db: Session, - request: ResponsesAPIRequest, - project_id: int, - organization_id: int, -) -> UUID: - """Create a response job and schedule Celery task.""" - - trace_id = correlation_id.get() or "N/A" - job_crud = JobCrud(session=db) - job = job_crud.create(job_type=JobType.RESPONSE, trace_id=trace_id) - - # Schedule the Celery task - task_id = start_high_priority_job( - function_path="app.services.response.execute_job", - project_id=project_id, - job_id=str(job.id), - trace_id=trace_id, - request_data=request.model_dump(), - organization_id=organization_id, - ) - - logger.info( - f"[start_job] Job scheduled to generate response | job_id={job.id}, project_id={project_id}, task_id={task_id}, job_id={job.id}" - ) - return job.id - - -def get_file_search_results(response): - results: list[FileResultChunk] = [] - for tool_call in response.output: - if tool_call.type == "file_search_call": - results.extend( - [FileResultChunk(score=hit.score, text=hit.text) for hit in results] - ) - return results - - -def get_additional_data(request: dict) -> dict: - async_exclude_keys = {"assistant_id", "callback_url", "response_id", "question"} - sync_exclude_keys = { - "model", - "instructions", - "vector_store_ids", - "max_num_results", - "temperature", - "response_id", - "question", - } - if "assistant_id" in request: - exclude_keys = async_exclude_keys - else: - exclude_keys = sync_exclude_keys - return {k: v for k, v in request.items() if k not in exclude_keys} - - -def send_response_callback( - callback_url: str, - callback_response: APIResponse, - request_dict: dict, -) -> None: - """Send a standardized callback response to the provided callback URL.""" - - # Convert Pydantic model to dict - callback_response = callback_response.model_dump() - - send_callback( - callback_url, - { - "success": callback_response.get("success", False), - "data": { - **(callback_response.get("data") or {}), - **get_additional_data(request_dict), - }, - "error": callback_response.get("error"), - "metadata": None, - }, - ) - - -def execute_job( - request_data: dict, - project_id: int, - organization_id: int, - job_id: str, - task_id: str, - task_instance, -) -> APIResponse | None: - """Celery task to process a response request asynchronously.""" - request_data = ResponsesAPIRequest(**request_data) - job_id = UUID(job_id) - response = process_response( - request=request_data, - project_id=project_id, - organization_id=organization_id, - job_id=job_id, - task_id=task_id, - task_instance=task_instance, - ) - if response is None: - response = APIResponse.failure_response(error="Unknown error occurred") - - with Session(engine) as session: - job_crud = JobCrud(session=session) - if response.success: - job_update = JobUpdate(status=JobStatus.SUCCESS) - else: - job_update = JobUpdate( - status=JobStatus.FAILED, error_message=response.error - ) - job_crud.update(job_id=job_id, job_update=job_update) - - if request_data.callback_url: - send_response_callback( - callback_url=request_data.callback_url, - callback_response=response, - request_dict=request_data.model_dump(), - ) - - return response.model_dump() - - -def process_response( - request: ResponsesAPIRequest, - project_id: int, - organization_id: int, - job_id: UUID, - task_id: str, - task_instance, -) -> APIResponse: - """Process a response and return callback payload, for Celery use.""" - assistant_id = request.assistant_id - - logger.info( - f"[process_response_task] Generating response for assistant_id={mask_string(assistant_id)}, project_id={project_id}, task_id={task_id}, job_id={job_id}" - ) - - callback_response: APIResponse | None = None - tracer: LangfuseTracer | None = None - - try: - with Session(engine) as session: - job_crud = JobCrud(session=session) - - job_update = JobUpdate(status=JobStatus.PROCESSING, task_id=UUID(task_id)) - job_crud.update(job_id=job_id, job_update=job_update) - - assistant = get_assistant_by_id(session, assistant_id, project_id) - if not assistant: - msg = f"Assistant not found: assistant_id={mask_string(assistant_id)}, project_id={project_id}" - logger.error(f"[process_response_task] {msg}") - callback_response = APIResponse.failure_response( - error="Assistant not found or not active" - ) - return callback_response - - try: - client = get_openai_client(session, organization_id, project_id) - except HTTPException as e: - callback_response = APIResponse.failure_response(error=str(e.detail)) - return callback_response - - langfuse_credentials = get_provider_credential( - session=session, - org_id=organization_id, - provider="langfuse", - project_id=project_id, - ) - - ancestor_id = request.response_id - latest_conversation = None - if ancestor_id: - latest_conversation = get_conversation_by_ancestor_id( - session=session, - ancestor_response_id=ancestor_id, - project_id=project_id, - ) - if latest_conversation: - ancestor_id = latest_conversation.response_id - - tracer = LangfuseTracer( - credentials=langfuse_credentials, - response_id=request.response_id, - ) - tracer.start_trace( - name="generate_response_async", - input={"question": request.question, "assistant_id": assistant_id}, - metadata={"callback_url": request.callback_url}, - tags=[assistant_id], - ) - - tracer.start_generation( - name="openai_response", - input={"question": request.question}, - metadata={"model": assistant.model, "temperature": assistant.temperature}, - ) - - params = { - "model": assistant.model, - "previous_response_id": ancestor_id, - "instructions": assistant.instructions, - "temperature": assistant.temperature, - "input": [{"role": "user", "content": request.question}], - } - if assistant.vector_store_ids: - params["tools"] = [ - { - "type": "file_search", - "vector_store_ids": assistant.vector_store_ids, - "max_num_results": assistant.max_num_results, - } - ] - params["include"] = ["file_search_call.results"] - - response = client.responses.create(**params) - response_chunks = get_file_search_results(response) - - logger.info( - f"[process_response_task] Successfully generated response: response_id={response.id}, assistant={mask_string(assistant_id)}, project_id={project_id}" - ) - - tracer.end_generation( - output={"response_id": response.id, "message": response.output_text}, - usage={ - "input": response.usage.input_tokens, - "output": response.usage.output_tokens, - "total": response.usage.total_tokens, - "unit": "TOKENS", - }, - model=response.model, - ) - tracer.update_trace( - tags=[response.id], - output={ - "status": "success", - "message": response.output_text, - "error": None, - }, - ) - - with Session(engine) as session: - ancestor_response_id = ( - latest_conversation.ancestor_response_id - if latest_conversation - else get_ancestor_id_from_response( - session=session, - current_response_id=response.id, - previous_response_id=response.previous_response_id, - project_id=project_id, - ) - ) - create_conversation( - session=session, - conversation=OpenAIConversationCreate( - response_id=response.id, - previous_response_id=response.previous_response_id, - ancestor_response_id=ancestor_response_id, - user_question=request.question, - response=response.output_text, - model=response.model, - assistant_id=assistant_id, - ), - project_id=project_id, - organization_id=organization_id, - ) - job_crud = JobCrud(session=session) - job_update = JobUpdate(status=JobStatus.SUCCESS) - job_crud.update(job_id=job_id, job_update=job_update) - - callback_response = APIResponse.success_response( - data=CallbackResponse( - status="success", - response_id=response.id, - message=response.output_text, - chunks=response_chunks, - diagnostics=Diagnostics( - input_tokens=response.usage.input_tokens, - output_tokens=response.usage.output_tokens, - total_tokens=response.usage.total_tokens, - model=response.model, - ), - ) - ) - - except openai.OpenAIError as e: - error_message = handle_openai_error(e) - logger.error( - f"[process_response_task] OpenAI API error: {error_message}, project_id={project_id}", - exc_info=True, - ) - if tracer: - tracer.log_error(error_message, response_id=request.response_id) - - callback_response = APIResponse.failure_response(error=error_message) - - finally: - if tracer: - tracer.flush() - - return callback_response diff --git a/backend/app/services/response/callbacks.py b/backend/app/services/response/callbacks.py new file mode 100644 index 000000000..55b8c58a1 --- /dev/null +++ b/backend/app/services/response/callbacks.py @@ -0,0 +1,60 @@ +from app.utils import APIResponse +import requests +import logging + +logger = logging.getLogger(__name__) + + +def get_additional_data(request: dict) -> dict: + async_exclude_keys = {"assistant_id", "callback_url", "response_id", "question"} + sync_exclude_keys = { + "model", + "instructions", + "vector_store_ids", + "max_num_results", + "temperature", + "response_id", + "question", + } + if "assistant_id" in request: + exclude_keys = async_exclude_keys + else: + exclude_keys = sync_exclude_keys + return {k: v for k, v in request.items() if k not in exclude_keys} + + +def send_callback(callback_url: str, data: dict): + """Send results to the callback URL (synchronously).""" + try: + session = requests.Session() + # uncomment this to run locally without SSL + # session.verify = False + response = session.post(callback_url, json=data) + response.raise_for_status() + logger.info(f"[send_callback] Callback sent successfully to {callback_url}") + return True + except requests.RequestException as e: + logger.error(f"[send_callback] Callback failed: {str(e)}", exc_info=True) + return False + + +def send_response_callback( + callback_url: str, + callback_response: APIResponse, + request_dict: dict, +) -> None: + """Send a standardized callback response to the provided callback URL.""" + + callback_response = callback_response.model_dump() + send_callback( + callback_url, + { + "success": callback_response.get("success", False), + "data": { + **(callback_response.get("data") or {}), + **get_additional_data(request_dict), + }, + "error": callback_response.get("error"), + "metadata": None, + }, + ) diff --git a/backend/app/services/response/jobs.py b/backend/app/services/response/jobs.py new file mode 100644 index 000000000..55c347768 --- /dev/null +++ b/backend/app/services/response/jobs.py @@ -0,0 +1,68 @@ +import logging +from uuid import UUID +from fastapi import HTTPException +from sqlmodel import Session +from asgi_correlation_id import correlation_id +from app.core.db import engine +from app.crud import JobCrud +from app.models import JobType, JobStatus, JobUpdate, ResponsesAPIRequest +from app.utils import APIResponse +from app.celery.utils import start_high_priority_job +from app.api.routes.threads import send_callback + +from app.services.response.response import process_response +from app.services.response.callbacks import send_response_callback + +logger = logging.getLogger(__name__) + + +def start_job( + db: Session, request: ResponsesAPIRequest, project_id: int, organization_id: int +) -> UUID: + """Create a response job and schedule Celery task.""" + trace_id = correlation_id.get() or "N/A" + job_crud = JobCrud(session=db) + job = job_crud.create(job_type=JobType.RESPONSE, trace_id=trace_id) + + task_id = start_high_priority_job( + function_path="app.services.response.jobs.execute_job", + project_id=project_id, + job_id=str(job.id), + trace_id=trace_id, + request_data=request.model_dump(), + organization_id=organization_id, + ) + + logger.info( + f"[start_job] Job scheduled to generate response | job_id={job.id}, project_id={project_id}, task_id={task_id}" + ) + return job.id + + +def execute_job( + request_data: dict, + project_id: int, + organization_id: int, + job_id: str, + task_id: str, + task_instance, +) -> None: + """Celery task to process a response request asynchronously.""" + request_data: ResponsesAPIRequest = ResponsesAPIRequest(**request_data) + job_id = UUID(job_id) + + response = process_response( + request=request_data, + project_id=project_id, + organization_id=organization_id, + job_id=job_id, + task_id=task_id, + task_instance=task_instance, + ) + + if request_data.callback_url: + send_response_callback( + callback_url=request_data.callback_url, + callback_response=response, + request_dict=request_data.model_dump(), + ) diff --git a/backend/app/services/response/response.py b/backend/app/services/response/response.py new file mode 100644 index 000000000..80a8faa7e --- /dev/null +++ b/backend/app/services/response/response.py @@ -0,0 +1,284 @@ +import logging +from uuid import UUID + +import openai +from openai import OpenAI +from openai.types.responses.response import Response +from fastapi import HTTPException +from sqlmodel import Session + +from app.core.db import engine +from app.core.langfuse.langfuse import LangfuseTracer +from app.crud import ( + JobCrud, + get_assistant_by_id, + get_provider_credential, + create_conversation, + get_ancestor_id_from_response, + get_conversation_by_ancestor_id, +) +from app.models import ( + CallbackResponse, + Diagnostics, + FileResultChunk, + Assistant, + JobStatus, + JobUpdate, + ResponsesAPIRequest, + OpenAIConversationCreate, + OpenAIConversation, +) +from app.utils import ( + APIResponse, + get_openai_client, + handle_openai_error, + mask_string, +) + +logger = logging.getLogger(__name__) + + +def get_file_search_results(response: Response) -> list[FileResultChunk]: + """Extract file search results from a response.""" + results: list[FileResultChunk] = [] + for tool_call in response.output: + if tool_call.type == "file_search_call": + results.extend( + FileResultChunk(score=hit.score, text=hit.text) + for hit in tool_call.results + ) + return results + + +def _build_callback_response(response: Response) -> CallbackResponse: + """Build callback response with diagnostics and search results.""" + response_chunks = get_file_search_results(response) + return CallbackResponse( + status="success", + response_id=response.id, + message=response.output_text, + chunks=response_chunks, + diagnostics=Diagnostics( + input_tokens=response.usage.input_tokens, + output_tokens=response.usage.output_tokens, + total_tokens=response.usage.total_tokens, + model=response.model, + ), + ) + + +def _fail_job(job_id: UUID, error_message: str) -> APIResponse: + with Session(engine) as session: + JobCrud(session=session).update( + job_id=job_id, + job_update=JobUpdate( + status=JobStatus.FAILED, + error_message=error_message, + ), + ) + return APIResponse.failure_response(error=error_message) + + +def generate_response( + tracer: LangfuseTracer, + client: OpenAI, + assistant: Assistant, + request: ResponsesAPIRequest, + ancestor_id: str, +) -> tuple[Response | None, str | None]: + """Generate a response using OpenAI and track with Langfuse.""" + response: Response | None = None + error_message: str | None = None + + try: + tracer.start_trace( + name="generate_response_async", + input={"question": request.question, "assistant_id": assistant.id}, + metadata={"callback_url": request.callback_url}, + tags=[assistant.id], + ) + tracer.start_generation( + name="openai_response", + input={"question": request.question}, + metadata={"model": assistant.model, "temperature": assistant.temperature}, + ) + + params: dict = { + "model": assistant.model, + "previous_response_id": ancestor_id, + "instructions": assistant.instructions, + "temperature": assistant.temperature, + "input": [{"role": "user", "content": request.question}], + } + + if assistant.vector_store_ids: + params["tools"] = [ + { + "type": "file_search", + "vector_store_ids": assistant.vector_store_ids, + "max_num_results": assistant.max_num_results, + } + ] + params["include"] = ["file_search_call.results"] + + response = client.responses.create(**params) + + tracer.end_generation( + output={"response_id": response.id, "message": response.output_text}, + usage={ + "input": response.usage.input_tokens, + "output": response.usage.output_tokens, + "total": response.usage.total_tokens, + "unit": "TOKENS", + }, + model=response.model, + ) + tracer.update_trace( + tags=[response.id], + output={ + "status": "success", + "message": response.output_text, + "error": None, + }, + ) + + except openai.OpenAIError as e: + error_message = handle_openai_error(e) + logger.error( + f"[process_response_task] OpenAI API error: {error_message}", + exc_info=True, + ) + if tracer: + tracer.log_error(error_message, response_id=request.response_id) + + return response, error_message + + +def persist_conversation( + response: Response, + request: ResponsesAPIRequest, + project_id: int, + organization_id: int, + job_id: UUID, + assistant_id: str, + latest_conversation: OpenAIConversation | None, +) -> None: + """Persist conversation and mark job as successful.""" + with Session(engine) as session: + ancestor_response_id = ( + latest_conversation.ancestor_response_id + if latest_conversation + else get_ancestor_id_from_response( + session=session, + current_response_id=response.id, + previous_response_id=response.previous_response_id, + project_id=project_id, + ) + ) + + create_conversation( + session=session, + conversation=OpenAIConversationCreate( + response_id=response.id, + previous_response_id=response.previous_response_id, + ancestor_response_id=ancestor_response_id, + user_question=request.question, + response=response.output_text, + model=response.model, + assistant_id=assistant_id, + ), + project_id=project_id, + organization_id=organization_id, + ) + + JobCrud(session=session).update( + job_id=job_id, + job_update=JobUpdate(status=JobStatus.SUCCESS), + ) + + +def process_response( + request: ResponsesAPIRequest, + project_id: int, + organization_id: int, + job_id: UUID, + task_id: str, + task_instance, +) -> APIResponse: + assistant_id = request.assistant_id + logger.info( + f"[process_response_task] Generating response for " + f"assistant_id={mask_string(assistant_id)}, " + f"project_id={project_id}, task_id={task_id}, job_id={job_id}" + ) + + latest_conversation: OpenAIConversation | None = None + + try: + with Session(engine) as session: + JobCrud(session=session).update( + job_id=job_id, + job_update=JobUpdate( + status=JobStatus.PROCESSING, task_id=UUID(task_id) + ), + ) + + assistant = get_assistant_by_id(session, assistant_id, project_id) + if not assistant: + logger.error( + f"[process_response_task] Assistant not found: " + f"assistant_id={mask_string(assistant_id)}, project_id={project_id}" + ) + return _fail_job(job_id, "Assistant not found or not active") + + try: + client = get_openai_client(session, organization_id, project_id) + except HTTPException as e: + return _fail_job(job_id, str(e.detail)) + + langfuse_credentials = get_provider_credential( + session=session, + org_id=organization_id, + provider="langfuse", + project_id=project_id, + ) + + ancestor_id = request.response_id + if ancestor_id: + latest_conversation = get_conversation_by_ancestor_id( + session, + ancestor_response_id=ancestor_id, + project_id=project_id, + ) + if latest_conversation: + ancestor_id = latest_conversation.response_id + + tracer = LangfuseTracer( + credentials=langfuse_credentials, + response_id=request.response_id, + ) + response, error_message = generate_response( + tracer=tracer, + client=client, + assistant=assistant, + request=request, + ancestor_id=ancestor_id, + ) + + if response: + persist_conversation( + response, + request, + project_id, + organization_id, + job_id, + assistant_id, + latest_conversation, + ) + return APIResponse.success_response(data=_build_callback_response(response)) + else: + return _fail_job(job_id, error_message or "Unknown error") + + except Exception as e: + logger.error(f"[process_response_task] Unexpected error: {e}", exc_info=True) + return _fail_job(job_id, f"Unexpected error: {str(e)}") From 3d9b2fa29ee32bb68ee88dbeacdf70b4650b537e Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Mon, 22 Sep 2025 12:08:57 +0530 Subject: [PATCH 10/44] Refactor callback handling: move send_callback function to utils and clean up imports --- backend/app/services/response/callbacks.py | 21 +-------------------- backend/app/utils.py | 16 ++++++++++++++++ 2 files changed, 17 insertions(+), 20 deletions(-) diff --git a/backend/app/services/response/callbacks.py b/backend/app/services/response/callbacks.py index 55b8c58a1..7919ed5db 100644 --- a/backend/app/services/response/callbacks.py +++ b/backend/app/services/response/callbacks.py @@ -1,8 +1,4 @@ -from app.utils import APIResponse -import requests -import logging - -logger = logging.getLogger(__name__) +from app.utils import APIResponse, send_callback def get_additional_data(request: dict) -> dict: @@ -23,21 +19,6 @@ def get_additional_data(request: dict) -> dict: return {k: v for k, v in request.items() if k not in exclude_keys} -def send_callback(callback_url: str, data: dict): - """Send results to the callback URL (synchronously).""" - try: - session = requests.Session() - # uncomment this to run locally without SSL - # session.verify = False - response = session.post(callback_url, json=data) - response.raise_for_status() - logger.info(f"[send_callback] Callback sent successfully to {callback_url}") - return True - except requests.RequestException as e: - logger.error(f"[send_callback] Callback failed: {str(e)}", exc_info=True) - return False - - def send_response_callback( callback_url: str, callback_response: APIResponse, diff --git a/backend/app/utils.py b/backend/app/utils.py index 673096cad..87441b9f3 100644 --- a/backend/app/utils.py +++ b/backend/app/utils.py @@ -3,6 +3,7 @@ from dataclasses import dataclass from datetime import datetime, timedelta, timezone from pathlib import Path +import requests from typing import Any, Dict, Generic, Optional, TypeVar import jwt @@ -221,6 +222,21 @@ def handle_openai_error(e: openai.OpenAIError) -> str: return str(e) +def send_callback(callback_url: str, data: dict): + """Send results to the callback URL (synchronously).""" + try: + session = requests.Session() + # uncomment this to run locally without SSL + # session.verify = False + response = session.post(callback_url, json=data) + response.raise_for_status() + logger.info(f"[send_callback] Callback sent successfully to {callback_url}") + return True + except requests.RequestException as e: + logger.error(f"[send_callback] Callback failed: {str(e)}", exc_info=True) + return False + + @ft.singledispatch def load_description(filename: Path) -> str: if not filename.exists(): From b962760874a5e7b3ae866d5c847c41a94a121d54 Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Mon, 22 Sep 2025 15:16:45 +0530 Subject: [PATCH 11/44] Add ResponseJobStatus model and update responses endpoint to return structured response --- backend/app/api/routes/responses.py | 20 +++++++++----------- backend/app/models/response.py | 8 ++++++++ 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/backend/app/api/routes/responses.py b/backend/app/api/routes/responses.py index a31a8f658..c84659ee0 100644 --- a/backend/app/api/routes/responses.py +++ b/backend/app/api/routes/responses.py @@ -12,6 +12,7 @@ CallbackResponse, Diagnostics, ResponsesAPIRequest, + ResponseJobStatus, ResponsesSyncAPIRequest, UserProjectOrg, ) @@ -25,7 +26,7 @@ router = APIRouter(tags=["responses"]) -@router.post("/responses", response_model=dict) +@router.post("/responses", response_model=APIResponse[ResponseJobStatus]) async def responses( request: ResponsesAPIRequest, _session: Session = Depends(get_db), @@ -46,16 +47,13 @@ async def responses( request_dict = request.model_dump() additional_data = get_additional_data(request_dict) - return { - "success": True, - "data": { - "status": "processing", - "message": "Response creation started", - **additional_data, - }, - "error": None, - "metadata": None, - } + + response = ResponseJobStatus( + status="processing", + message="Your request is being processed. You will receive a callback once it's complete.", + **additional_data, + ) + return APIResponse.success_response(data=response) @router.post("/responses/sync", response_model=APIResponse[CallbackResponse]) diff --git a/backend/app/models/response.py b/backend/app/models/response.py index 8aef33e5b..09291ec81 100644 --- a/backend/app/models/response.py +++ b/backend/app/models/response.py @@ -24,6 +24,14 @@ class Config: extra = "allow" +class ResponseJobStatus(SQLModel): + status: str + message: str | None = None + + class Config: + extra = "allow" + + class Diagnostics(SQLModel): input_tokens: int output_tokens: int From 7764568bea7781d3c0dbf6141a2fdb940038da4e Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Mon, 22 Sep 2025 15:17:22 +0530 Subject: [PATCH 12/44] fix init --- backend/app/models/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py index 4558cc598..94c45ba3f 100644 --- a/backend/app/models/__init__.py +++ b/backend/app/models/__init__.py @@ -99,6 +99,7 @@ Diagnostics, FileResultChunk, ResponsesAPIRequest, + ResponseJobStatus, ResponsesSyncAPIRequest, ) From a569485c3c7c56a2c67a95e153731abbda9f1492 Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Mon, 22 Sep 2025 16:12:55 +0530 Subject: [PATCH 13/44] Add tests for JobCrud and response job handling --- backend/app/crud/jobs.py | 3 + .../app/tests/api/routes/test_responses.py | 672 +----------------- backend/app/tests/crud/test_jobs.py | 58 ++ .../app/tests/services/response/test_jobs.py | 38 + 4 files changed, 122 insertions(+), 649 deletions(-) create mode 100644 backend/app/tests/crud/test_jobs.py create mode 100644 backend/app/tests/services/response/test_jobs.py diff --git a/backend/app/crud/jobs.py b/backend/app/crud/jobs.py index e9cc38dce..1a9005b69 100644 --- a/backend/app/crud/jobs.py +++ b/backend/app/crud/jobs.py @@ -37,3 +37,6 @@ def update(self, job_id: UUID, job_update: JobUpdate) -> Job: self.session.refresh(job) return job + + def get(self, job_id: UUID) -> Job | None: + return self.session.get(Job, job_id) diff --git a/backend/app/tests/api/routes/test_responses.py b/backend/app/tests/api/routes/test_responses.py index 54a28ca12..3ee85e600 100644 --- a/backend/app/tests/api/routes/test_responses.py +++ b/backend/app/tests/api/routes/test_responses.py @@ -1,656 +1,30 @@ -from unittest.mock import MagicMock, patch -from app.api.routes.responses import process_response +from unittest.mock import patch +from fastapi.testclient import TestClient +from app.models import ResponsesAPIRequest -def create_mock_assistant(model="gpt-4o", vector_store_ids=None, max_num_results=20): - """Create a mock assistant with default or custom values.""" - if vector_store_ids is None: - vector_store_ids = ["vs_test"] - - mock_assistant = MagicMock() - mock_assistant.model = model - mock_assistant.instructions = "Test instructions" - mock_assistant.temperature = 0.1 - mock_assistant.vector_store_ids = vector_store_ids - mock_assistant.max_num_results = max_num_results - return mock_assistant - - -def create_mock_openai_response( - response_id="resp_1234567890abcdef1234567890abcdef1234567890", - output_text="Test output", - model="gpt-4o", - output=None, - previous_response_id=None, -): - """Create a mock OpenAI response with default or custom values.""" - if output is None: - output = [] - - mock_response = MagicMock() - mock_response.id = response_id - mock_response.output_text = output_text - mock_response.model = model - mock_response.usage.input_tokens = 10 - mock_response.usage.output_tokens = 5 - mock_response.usage.total_tokens = 15 - mock_response.output = output - mock_response.previous_response_id = previous_response_id - return mock_response - - -def create_mock_conversation( - response_id="resp_latest1234567890abcdef1234567890", - ancestor_response_id="resp_ancestor1234567890abcdef1234567890", -): - """Create a mock conversation with default or custom values.""" - mock_conversation = MagicMock() - mock_conversation.response_id = response_id - mock_conversation.ancestor_response_id = ancestor_response_id - return mock_conversation - - -def setup_common_mocks( - mock_get_credential, - mock_get_assistant, - mock_openai, - mock_tracer_class, - mock_get_ancestor_id_from_response, - mock_create_conversation, - mock_get_conversation_by_ancestor_id, - assistant_model="gpt-4o", - vector_store_ids=None, - conversation_found=True, - response_output=None, -): - """Setup common mocks used across multiple tests.""" - # Setup mock credentials - mock_get_credential.return_value = {"api_key": "test_api_key"} - - # Setup mock assistant - mock_assistant = create_mock_assistant(assistant_model, vector_store_ids) - mock_get_assistant.return_value = mock_assistant - - # Setup mock OpenAI client - mock_client = MagicMock() - mock_openai.return_value = mock_client - - # Setup mock response - mock_response = create_mock_openai_response(output=response_output) - mock_client.responses.create.return_value = mock_response - - # Setup mock tracer - mock_tracer = MagicMock() - mock_tracer_class.return_value = mock_tracer - - # Setup mock CRUD functions - mock_get_ancestor_id_from_response.return_value = ( - "resp_ancestor1234567890abcdef1234567890" - ) - mock_create_conversation.return_value = None - - # Setup mock conversation if needed - if conversation_found: - mock_conversation = create_mock_conversation() - mock_get_conversation_by_ancestor_id.return_value = mock_conversation - else: - mock_get_conversation_by_ancestor_id.return_value = None - - return mock_client, mock_assistant - - -@patch("app.api.routes.responses.process_response") -@patch("app.api.routes.responses.OpenAI") -@patch("app.api.routes.responses.get_provider_credential") -@patch("app.api.routes.responses.get_assistant_by_id") -@patch("app.api.routes.responses.LangfuseTracer") -@patch("app.api.routes.responses.get_ancestor_id_from_response") -@patch("app.api.routes.responses.create_conversation") -@patch("app.api.routes.responses.get_conversation_by_ancestor_id") -def test_responses_endpoint_success( - mock_get_conversation_by_ancestor_id, - mock_create_conversation, - mock_get_ancestor_id_from_response, - mock_tracer_class, - mock_get_assistant, - mock_get_credential, - mock_openai, - mock_process_response, - user_api_key_header: dict[str, str], - client, -): - """Test the /responses endpoint for successful response creation.""" - - # Mock the background task to prevent actual execution - mock_process_response.return_value = None - - # Setup common mocks - setup_common_mocks( - mock_get_credential, - mock_get_assistant, - mock_openai, - mock_tracer_class, - mock_get_ancestor_id_from_response, - mock_create_conversation, - mock_get_conversation_by_ancestor_id, - ) - - request_data = { - "assistant_id": "assistant_dalgo", - "question": "What is Dalgo?", - "callback_url": "http://example.com/callback", - } - - response = client.post( - "/api/v1/responses", json=request_data, headers=user_api_key_header - ) - - assert response.status_code == 200 - response_json = response.json() - assert response_json["success"] is True - assert response_json["data"]["status"] == "processing" - assert response_json["data"]["message"] == "Response creation started" - - # Verify that the background task was scheduled with correct parameters - mock_process_response.assert_called_once() - call_args = mock_process_response.call_args - assert call_args[0][0].assistant_id == "assistant_dalgo" - assert call_args[0][0].question == "What is Dalgo?" - assert call_args[0][0].callback_url == "http://example.com/callback" - assert call_args[0][0].response_id is None - - -@patch("app.api.routes.responses.process_response") -@patch("app.api.routes.responses.OpenAI") -@patch("app.api.routes.responses.get_provider_credential") -@patch("app.api.routes.responses.get_assistant_by_id") -@patch("app.api.routes.responses.LangfuseTracer") -@patch("app.api.routes.responses.get_ancestor_id_from_response") -@patch("app.api.routes.responses.create_conversation") -@patch("app.api.routes.responses.get_conversation_by_ancestor_id") -def test_responses_endpoint_without_vector_store( - mock_get_conversation_by_ancestor_id, - mock_create_conversation, - mock_get_ancestor_id_from_response, - mock_tracer_class, - mock_get_assistant, - mock_get_credential, - mock_openai, - mock_process_response, - user_api_key_header, - client, -): - """Test the /responses endpoint when assistant has no vector store configured.""" - # Mock the background task to prevent actual execution - mock_process_response.return_value = None - - # Setup common mocks with no vector store - mock_client, mock_assistant = setup_common_mocks( - mock_get_credential, - mock_get_assistant, - mock_openai, - mock_tracer_class, - mock_get_ancestor_id_from_response, - mock_create_conversation, - mock_get_conversation_by_ancestor_id, - assistant_model="gpt-4", - vector_store_ids=[], - ) - - request_data = { - "assistant_id": "assistant_123", - "question": "What is Glific?", - "callback_url": "http://example.com/callback", - } - - response = client.post( - "/api/v1/responses", json=request_data, headers=user_api_key_header - ) - assert response.status_code == 200 - response_json = response.json() - assert response_json["success"] is True - assert response_json["data"]["status"] == "processing" - assert response_json["data"]["message"] == "Response creation started" - - # Verify that the background task was scheduled with correct parameters - mock_process_response.assert_called_once() - call_args = mock_process_response.call_args - assert call_args[0][0].assistant_id == "assistant_123" - assert call_args[0][0].question == "What is Glific?" - assert call_args[0][0].callback_url == "http://example.com/callback" - assert call_args[0][0].response_id is None - - -@patch("app.api.routes.responses.get_assistant_by_id") -def test_responses_endpoint_assistant_not_found( - mock_get_assistant, - user_api_key_header, - client, -): - """Test the /responses endpoint when assistant is not found.""" - # Setup mock assistant to return None (not found) - mock_get_assistant.return_value = None - - request_data = { - "assistant_id": "nonexistent_assistant", - "question": "What is this?", - "callback_url": "http://example.com/callback", - } - - response = client.post( - "/api/v1/responses", json=request_data, headers=user_api_key_header - ) - assert response.status_code == 404 - response_json = response.json() - assert response_json["success"] is False - assert response_json["error"] == "Assistant not found or not active" - - -@patch("app.api.routes.responses.get_provider_credential") -@patch("app.api.routes.responses.get_assistant_by_id") -@patch("app.api.routes.responses.process_response") -def test_responses_endpoint_no_openai_credentials( - mock_process_response, - mock_get_assistant, - mock_get_credential, - user_api_key_header, - client, -): - """Test the /responses endpoint when OpenAI credentials are not configured.""" - # Setup mock assistant - mock_assistant = create_mock_assistant() - mock_get_assistant.return_value = mock_assistant - - # Setup mock credentials to return None (no credentials) - mock_get_credential.return_value = None - - request_data = { - "assistant_id": "assistant_123", - "question": "What is this?", - "callback_url": "http://example.com/callback", - } - - response = client.post( - "/api/v1/responses", json=request_data, headers=user_api_key_header - ) - assert response.status_code == 200 - response_json = response.json() - assert response_json["success"] is False - assert "OpenAI API key not configured" in response_json["error"] - # Ensure background task was not scheduled - mock_process_response.assert_not_called() - - -@patch("app.api.routes.responses.get_provider_credential") -@patch("app.api.routes.responses.get_assistant_by_id") -@patch("app.api.routes.responses.process_response") -def test_responses_endpoint_missing_api_key_in_credentials( - mock_process_response, - mock_get_assistant, - mock_get_credential, - user_api_key_header, - client, -): - """Test the /responses endpoint when credentials exist but don't have api_key.""" - # Setup mock assistant - mock_assistant = create_mock_assistant() - mock_get_assistant.return_value = mock_assistant - - # Setup mock credentials without api_key - mock_get_credential.return_value = {"other_key": "value"} - - request_data = { - "assistant_id": "assistant_123", - "question": "What is this?", - "callback_url": "http://example.com/callback", - } - - response = client.post( - "/api/v1/responses", json=request_data, headers=user_api_key_header - ) - assert response.status_code == 200 - response_json = response.json() - assert response_json["success"] is False - assert "OpenAI API key not configured" in response_json["error"] - # Ensure background task was not scheduled - mock_process_response.assert_not_called() - - -@patch("app.api.routes.responses.process_response") -@patch("app.api.routes.responses.OpenAI") -@patch("app.api.routes.responses.get_provider_credential") -@patch("app.api.routes.responses.get_assistant_by_id") -@patch("app.api.routes.responses.LangfuseTracer") -@patch("app.api.routes.responses.get_ancestor_id_from_response") -@patch("app.api.routes.responses.create_conversation") -@patch("app.api.routes.responses.get_conversation_by_ancestor_id") -def test_responses_endpoint_with_ancestor_conversation_found( - mock_get_conversation_by_ancestor_id, - mock_create_conversation, - mock_get_ancestor_id_from_response, - mock_tracer_class, - mock_get_assistant, - mock_get_credential, - mock_openai, - mock_process_response, - user_api_key_header: dict[str, str], - client, +def test_responses_async_success( + client: TestClient, user_api_key_header: dict[str, str] ): - """Test the /responses endpoint when a conversation is found by ancestor ID.""" - # Mock the background task to prevent actual execution - mock_process_response.return_value = None - - # Setup common mocks with conversation found - mock_client, mock_assistant = setup_common_mocks( - mock_get_credential, - mock_get_assistant, - mock_openai, - mock_tracer_class, - mock_get_ancestor_id_from_response, - mock_create_conversation, - mock_get_conversation_by_ancestor_id, - conversation_found=True, - ) - - request_data = { - "assistant_id": "assistant_dalgo", - "question": "What is Dalgo?", - "callback_url": "http://example.com/callback", - "response_id": "resp_ancestor1234567890abcdef1234567890", - } - - response = client.post( - "/api/v1/responses", json=request_data, headers=user_api_key_header - ) - - assert response.status_code == 200 - response_json = response.json() - assert response_json["success"] is True - assert response_json["data"]["status"] == "processing" - assert response_json["data"]["message"] == "Response creation started" - - # Verify that the background task was scheduled with correct parameters - mock_process_response.assert_called_once() - call_args = mock_process_response.call_args - assert call_args[0][0].assistant_id == "assistant_dalgo" - assert call_args[0][0].question == "What is Dalgo?" - assert call_args[0][0].callback_url == "http://example.com/callback" - assert call_args[0][0].response_id == "resp_ancestor1234567890abcdef1234567890" - - -@patch("app.api.routes.responses.process_response") -@patch("app.api.routes.responses.OpenAI") -@patch("app.api.routes.responses.get_provider_credential") -@patch("app.api.routes.responses.get_assistant_by_id") -@patch("app.api.routes.responses.LangfuseTracer") -@patch("app.api.routes.responses.get_ancestor_id_from_response") -@patch("app.api.routes.responses.create_conversation") -@patch("app.api.routes.responses.get_conversation_by_ancestor_id") -def test_responses_endpoint_with_ancestor_conversation_not_found( - mock_get_conversation_by_ancestor_id, - mock_create_conversation, - mock_get_ancestor_id_from_response, - mock_tracer_class, - mock_get_assistant, - mock_get_credential, - mock_openai, - mock_process_response, - user_api_key_header: dict[str, str], - client, -): - """Test the /responses endpoint when no conversation is found by ancestor ID.""" - # Mock the background task to prevent actual execution - mock_process_response.return_value = None - - # Setup common mocks with conversation not found - mock_client, mock_assistant = setup_common_mocks( - mock_get_credential, - mock_get_assistant, - mock_openai, - mock_tracer_class, - mock_get_ancestor_id_from_response, - mock_create_conversation, - mock_get_conversation_by_ancestor_id, - conversation_found=False, - ) - - request_data = { - "assistant_id": "assistant_dalgo", - "question": "What is Dalgo?", - "callback_url": "http://example.com/callback", - "response_id": "resp_ancestor1234567890abcdef1234567890", - } - - response = client.post( - "/api/v1/responses", json=request_data, headers=user_api_key_header - ) - - assert response.status_code == 200 - response_json = response.json() - assert response_json["success"] is True - assert response_json["data"]["status"] == "processing" - assert response_json["data"]["message"] == "Response creation started" - - # Verify that the background task was scheduled with correct parameters - mock_process_response.assert_called_once() - call_args = mock_process_response.call_args - assert call_args[0][0].assistant_id == "assistant_dalgo" - assert call_args[0][0].question == "What is Dalgo?" - assert call_args[0][0].callback_url == "http://example.com/callback" - assert call_args[0][0].response_id == "resp_ancestor1234567890abcdef1234567890" - - -@patch("app.api.routes.responses.process_response") -@patch("app.api.routes.responses.OpenAI") -@patch("app.api.routes.responses.get_provider_credential") -@patch("app.api.routes.responses.get_assistant_by_id") -@patch("app.api.routes.responses.LangfuseTracer") -@patch("app.api.routes.responses.get_ancestor_id_from_response") -@patch("app.api.routes.responses.create_conversation") -@patch("app.api.routes.responses.get_conversation_by_ancestor_id") -def test_responses_endpoint_without_response_id( - mock_get_conversation_by_ancestor_id, - mock_create_conversation, - mock_get_ancestor_id_from_response, - mock_tracer_class, - mock_get_assistant, - mock_get_credential, - mock_openai, - mock_process_response, - user_api_key_header: dict[str, str], - client, -): - """Test the /responses endpoint when no response_id is provided.""" - # Mock the background task to prevent actual execution - mock_process_response.return_value = None - - # Setup common mocks - mock_client, mock_assistant = setup_common_mocks( - mock_get_credential, - mock_get_assistant, - mock_openai, - mock_tracer_class, - mock_get_ancestor_id_from_response, - mock_create_conversation, - mock_get_conversation_by_ancestor_id, - ) - - request_data = { - "assistant_id": "assistant_dalgo", - "question": "What is Dalgo?", - "callback_url": "http://example.com/callback", - # No response_id provided - } - - response = client.post( - "/api/v1/responses", json=request_data, headers=user_api_key_header - ) - - assert response.status_code == 200 - response_json = response.json() - assert response_json["success"] is True - assert response_json["data"]["status"] == "processing" - assert response_json["data"]["message"] == "Response creation started" - - # Verify that the background task was scheduled with correct parameters - mock_process_response.assert_called_once() - call_args = mock_process_response.call_args - assert call_args[0][0].assistant_id == "assistant_dalgo" - assert call_args[0][0].question == "What is Dalgo?" - assert call_args[0][0].callback_url == "http://example.com/callback" - assert call_args[0][0].response_id is None - - -@patch("app.api.routes.responses.get_conversation_by_ancestor_id") -@patch("app.api.routes.responses.create_conversation") -@patch("app.api.routes.responses.get_ancestor_id_from_response") -@patch("app.api.routes.responses.send_callback") -def test_process_response_ancestor_conversation_found( - mock_send_callback, - mock_get_ancestor_id_from_response, - mock_create_conversation, - mock_get_conversation_by_ancestor_id, - db, - user_api_key, -): - """Test process_response function when ancestor conversation is found.""" - from app.api.routes.responses import ResponsesAPIRequest - - # Setup mock request - request = ResponsesAPIRequest( - assistant_id="assistant_dalgo", - question="What is Dalgo?", - callback_url="http://example.com/callback", - response_id="resp_ancestor1234567890abcdef1234567890", - ) - - # Setup mock assistant - mock_assistant = create_mock_assistant() - - # Setup mock OpenAI client - mock_client = MagicMock() - mock_response = create_mock_openai_response( - response_id="resp_new1234567890abcdef1234567890abcdef", - output_text="Test response", - previous_response_id="resp_latest1234567890abcdef1234567890", - ) - mock_client.responses.create.return_value = mock_response - - # Setup mock tracer - mock_tracer = MagicMock() - - # Setup mock conversation found by ancestor ID - mock_conversation = create_mock_conversation() - mock_get_conversation_by_ancestor_id.return_value = mock_conversation - - # Setup mock CRUD functions - mock_get_ancestor_id_from_response.return_value = ( - "resp_ancestor1234567890abcdef1234567890" - ) - mock_create_conversation.return_value = None - - # Call process_response - process_response( - request=request, - client=mock_client, - assistant=mock_assistant, - tracer=mock_tracer, - project_id=user_api_key.project_id, - organization_id=user_api_key.organization_id, - ancestor_id=mock_conversation.response_id, - latest_conversation=mock_conversation, - ) - - # process_response doesn't call get_conversation_by_ancestor_id; endpoint resolves it - mock_get_conversation_by_ancestor_id.assert_not_called() - - # Verify OpenAI client was called with the conversation's response_id as - # previous_response_id - mock_client.responses.create.assert_called_once() - call_args = mock_client.responses.create.call_args[1] - assert call_args["previous_response_id"] == ( - "resp_latest1234567890abcdef1234567890" - ) - - # Verify create_conversation was called - mock_create_conversation.assert_called_once() - - # Verify send_callback was called - mock_send_callback.assert_called_once() - - -@patch("app.api.routes.responses.get_conversation_by_ancestor_id") -@patch("app.api.routes.responses.create_conversation") -@patch("app.api.routes.responses.get_ancestor_id_from_response") -@patch("app.api.routes.responses.send_callback") -def test_process_response_ancestor_conversation_not_found( - mock_send_callback, - mock_get_ancestor_id_from_response, - mock_create_conversation, - mock_get_conversation_by_ancestor_id, - db, - user_api_key, -): - """Test process_response function when no ancestor conversation is found.""" - from app.api.routes.responses import ResponsesAPIRequest - - # Setup mock request - request = ResponsesAPIRequest( - assistant_id="assistant_dalgo", - question="What is Dalgo?", - callback_url="http://example.com/callback", - response_id="resp_ancestor1234567890abcdef1234567890", - ) - - # Setup mock assistant - mock_assistant = create_mock_assistant() - - # Setup mock OpenAI client - mock_client = MagicMock() - mock_response = create_mock_openai_response( - response_id="resp_new1234567890abcdef1234567890abcdef", - output_text="Test response", - previous_response_id="resp_ancestor1234567890abcdef1234567890", - ) - mock_client.responses.create.return_value = mock_response - - # Setup mock tracer - mock_tracer = MagicMock() - - # Setup mock conversation not found by ancestor ID - mock_get_conversation_by_ancestor_id.return_value = None - - # Setup mock CRUD functions - mock_get_ancestor_id_from_response.return_value = ( - "resp_ancestor1234567890abcdef1234567890" - ) - mock_create_conversation.return_value = None - - # Call process_response - process_response( - request=request, - client=mock_client, - assistant=mock_assistant, - tracer=mock_tracer, - project_id=user_api_key.project_id, - organization_id=user_api_key.organization_id, - ancestor_id=request.response_id, - latest_conversation=None, - ) + with patch("app.api.routes.responses.start_job") as mock_start_job: + payload = ResponsesAPIRequest( + assistant_id="assistant_123", + question="What is the capital of France?", + callback_url="http://example.com/callback", + response_id="response_123", + extra_field="extra_value", + ) - # process_response doesn't call get_conversation_by_ancestor_id; endpoint resolves it - mock_get_conversation_by_ancestor_id.assert_not_called() + response = client.post( + "api/v1/responses", json=payload.model_dump(), headers=user_api_key_header + ) - # Verify OpenAI client was called with the original response_id as - # previous_response_id - mock_client.responses.create.assert_called_once() - call_args = mock_client.responses.create.call_args[1] - assert call_args["previous_response_id"] == ( - "resp_ancestor1234567890abcdef1234567890" - ) + assert response.status_code == 200 + response_data = response.json() - # Verify create_conversation was called - mock_create_conversation.assert_called_once() + assert response_data["success"] is True + assert response_data["data"]["status"] == "processing" + assert "Your request is being processed" in response_data["data"]["message"] + assert response_data["data"]["extra_field"] == "extra_value" - # Verify send_callback was called - mock_send_callback.assert_called_once() + mock_start_job.assert_called_once() diff --git a/backend/app/tests/crud/test_jobs.py b/backend/app/tests/crud/test_jobs.py new file mode 100644 index 000000000..9c0866737 --- /dev/null +++ b/backend/app/tests/crud/test_jobs.py @@ -0,0 +1,58 @@ +from uuid import uuid4 +import pytest +from sqlmodel import Session +from app.crud import JobCrud +from app.models import JobUpdate, JobStatus, JobType + +@pytest.fixture +def dummy_jobs(db: Session): + """Create and return a list of dummy jobs for testing.""" + crud = JobCrud(db) + + jobs = [ + crud.create(job_type=JobType.RESPONSE, trace_id="trace-1"), + crud.create(job_type=JobType.RESPONSE, trace_id="trace-2"), + crud.create(job_type=JobType.RESPONSE, trace_id="trace-3"), + ] + + return jobs + +def test_create_job(db: Session): + crud = JobCrud(db) + job = crud.create(job_type=JobType.RESPONSE, trace_id="trace-123") + + assert job.id is not None + assert job.trace_id == "trace-123" + assert job.status == JobStatus.PENDING + + +def test_get_job(db: Session, dummy_jobs): + crud = JobCrud(db) + job = dummy_jobs[0] + + fetched = crud.get(job.id) + assert fetched is not None + assert fetched.id == job.id + assert fetched.trace_id == "trace-1" + + +def test_update_job(db: Session, dummy_jobs): + crud = JobCrud(db) + job = dummy_jobs[1] + + update_data = JobUpdate(status=JobStatus.SUCCESS, error_message="All good now") + updated_job = crud.update(job.id, update_data) + + assert updated_job.status == JobStatus.SUCCESS + assert updated_job.error_message == "All good now" + assert updated_job.updated_at is not None + assert updated_job.updated_at >= job.updated_at + + +def test_update_job_not_found(db: Session): + crud = JobCrud(db) + fake_id = uuid4() + update_data = JobUpdate(status=JobStatus.SUCCESS) + + with pytest.raises(ValueError, match=str(fake_id)): + crud.update(fake_id, update_data) diff --git a/backend/app/tests/services/response/test_jobs.py b/backend/app/tests/services/response/test_jobs.py new file mode 100644 index 000000000..71dcdf83a --- /dev/null +++ b/backend/app/tests/services/response/test_jobs.py @@ -0,0 +1,38 @@ +import pytest +from unittest.mock import patch +from sqlmodel import Session +from app.services.response.jobs import start_job +from app.models import ResponsesAPIRequest, JobType, JobStatus +from app.crud import JobCrud +from app.tests.utils.utils import get_project + + +def test_start_job(db: Session): + + request = ResponsesAPIRequest( + assistant_id="assistant_123", + question="What is the capital of France?", + ) + + project = get_project(db) + # Patch Celery scheduling + with patch("app.services.response.jobs.start_high_priority_job") as mock_schedule: + mock_schedule.return_value = "fake-task-id" + + job_id = start_job(db, request, project.id, project.organization_id) + + job_crud = JobCrud(session=db) + job = job_crud.get(job_id) + assert job is not None + assert job.job_type == JobType.RESPONSE + assert job.status == JobStatus.PENDING + assert job.trace_id is not None + + # Validate Celery was called correctly + mock_schedule.assert_called_once() + _, kwargs = mock_schedule.call_args + assert kwargs["function_path"] == "app.services.response.jobs.execute_job" + assert kwargs["project_id"] == project.id + assert kwargs["organization_id"] == project.organization_id + assert kwargs["job_id"] == str(job_id) + assert kwargs["request_data"]["assistant_id"] == "assistant_123" From 87ecdb6c756fe37af7aa4b0cb97c434d63fbcefc Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Mon, 22 Sep 2025 18:22:32 +0530 Subject: [PATCH 14/44] update Job model to use string for task_id --- .../versions/be78247139f9_create_job_table.py | 68 ------------------- .../versions/c6fb6d0b5897_create_job_table.py | 43 ++++++++++++ backend/app/models/job.py | 4 +- backend/app/services/response/response.py | 6 +- 4 files changed, 47 insertions(+), 74 deletions(-) delete mode 100644 backend/app/alembic/versions/be78247139f9_create_job_table.py create mode 100644 backend/app/alembic/versions/c6fb6d0b5897_create_job_table.py diff --git a/backend/app/alembic/versions/be78247139f9_create_job_table.py b/backend/app/alembic/versions/be78247139f9_create_job_table.py deleted file mode 100644 index ec6ec0787..000000000 --- a/backend/app/alembic/versions/be78247139f9_create_job_table.py +++ /dev/null @@ -1,68 +0,0 @@ -"""create job table - -Revision ID: be78247139f9 -Revises: 6ed6ed401847 -Create Date: 2025-09-18 13:00:23.212198 - -""" -from alembic import op -import sqlalchemy as sa -import sqlmodel.sql.sqltypes - - -# revision identifiers, used by Alembic. -revision = "be78247139f9" -down_revision = "6ed6ed401847" -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "job", - sa.Column("id", sa.Uuid(), nullable=False), - sa.Column("task_id", sa.Uuid(), nullable=True), - sa.Column("trace_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True), - sa.Column("error_message", sqlmodel.sql.sqltypes.AutoString(), nullable=True), - sa.Column( - "status", - sa.Enum("PENDING", "PROCESSING", "SUCCESS", "FAILED", name="jobstatus"), - nullable=False, - ), - sa.Column("job_type", sa.Enum("RESPONSE", name="jobtype"), nullable=False), - sa.Column("created_at", sa.DateTime(), nullable=False), - sa.Column("updated_at", sa.DateTime(), nullable=False), - sa.PrimaryKeyConstraint("id"), - ) - op.drop_constraint( - "openai_conversation_project_id_fkey1", - "openai_conversation", - type_="foreignkey", - ) - op.drop_constraint( - "openai_conversation_organization_id_fkey1", - "openai_conversation", - type_="foreignkey", - ) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_foreign_key( - "openai_conversation_organization_id_fkey1", - "openai_conversation", - "organization", - ["organization_id"], - ["id"], - ) - op.create_foreign_key( - "openai_conversation_project_id_fkey1", - "openai_conversation", - "project", - ["project_id"], - ["id"], - ) - op.drop_table("job") - # ### end Alembic commands ### diff --git a/backend/app/alembic/versions/c6fb6d0b5897_create_job_table.py b/backend/app/alembic/versions/c6fb6d0b5897_create_job_table.py new file mode 100644 index 000000000..8dc24a771 --- /dev/null +++ b/backend/app/alembic/versions/c6fb6d0b5897_create_job_table.py @@ -0,0 +1,43 @@ +"""create job table + +Revision ID: c6fb6d0b5897 +Revises: 6ed6ed401847 +Create Date: 2025-09-22 17:55:57.558157 + +""" +from alembic import op +import sqlalchemy as sa +import sqlmodel.sql.sqltypes + + +# revision identifiers, used by Alembic. +revision = 'c6fb6d0b5897' +down_revision = '6ed6ed401847' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('job', + sa.Column('id', sa.Uuid(), nullable=False), + sa.Column('task_id', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column('trace_id', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column('error_message', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column('status', sa.Enum('PENDING', 'PROCESSING', 'SUCCESS', 'FAILED', name='jobstatus'), nullable=False), + sa.Column('job_type', sa.Enum('RESPONSE', name='jobtype'), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.drop_constraint('openai_conversation_project_id_fkey1', 'openai_conversation', type_='foreignkey') + op.drop_constraint('openai_conversation_organization_id_fkey1', 'openai_conversation', type_='foreignkey') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_foreign_key('openai_conversation_organization_id_fkey1', 'openai_conversation', 'organization', ['organization_id'], ['id']) + op.create_foreign_key('openai_conversation_project_id_fkey1', 'openai_conversation', 'project', ['project_id'], ['id']) + op.drop_table('job') + # ### end Alembic commands ### diff --git a/backend/app/models/job.py b/backend/app/models/job.py index 506adda55..8a4cdee22 100644 --- a/backend/app/models/job.py +++ b/backend/app/models/job.py @@ -24,7 +24,7 @@ class Job(SQLModel, table=True): default_factory=uuid4, primary_key=True, ) - task_id: UUID | None = Field( + task_id: str | None = Field( nullable=True, description="Celery task ID returned when job is queued." ) trace_id: str | None = Field( @@ -46,4 +46,4 @@ class Job(SQLModel, table=True): class JobUpdate(SQLModel): status: JobStatus | None = None error_message: str | None = None - task_id: UUID | None = None + task_id: str | None = None diff --git a/backend/app/services/response/response.py b/backend/app/services/response/response.py index 80a8faa7e..cf2db453a 100644 --- a/backend/app/services/response/response.py +++ b/backend/app/services/response/response.py @@ -84,7 +84,7 @@ def generate_response( client: OpenAI, assistant: Assistant, request: ResponsesAPIRequest, - ancestor_id: str, + ancestor_id: str | None, ) -> tuple[Response | None, str | None]: """Generate a response using OpenAI and track with Langfuse.""" response: Response | None = None @@ -218,9 +218,7 @@ def process_response( with Session(engine) as session: JobCrud(session=session).update( job_id=job_id, - job_update=JobUpdate( - status=JobStatus.PROCESSING, task_id=UUID(task_id) - ), + job_update=JobUpdate(status=JobStatus.PROCESSING, task_id=task_id), ) assistant = get_assistant_by_id(session, assistant_id, project_id) From 96160c6c935db8d8a5509b864cfaf60fe5b2e5e2 Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Mon, 22 Sep 2025 18:22:44 +0530 Subject: [PATCH 15/44] Add tests for response generation and processing, including success and error handling --- backend/app/tests/crud/test_jobs.py | 2 + .../response/test_generate_response.py | 71 +++++++++ .../response/test_process_response.py | 149 ++++++++++++++++++ .../app/tests/services/response/test_jobs.py | 1 - backend/app/tests/utils/openai.py | 36 +++++ 5 files changed, 258 insertions(+), 1 deletion(-) create mode 100644 backend/app/tests/services/response/response/test_generate_response.py create mode 100644 backend/app/tests/services/response/response/test_process_response.py diff --git a/backend/app/tests/crud/test_jobs.py b/backend/app/tests/crud/test_jobs.py index 9c0866737..98bf7305b 100644 --- a/backend/app/tests/crud/test_jobs.py +++ b/backend/app/tests/crud/test_jobs.py @@ -4,6 +4,7 @@ from app.crud import JobCrud from app.models import JobUpdate, JobStatus, JobType + @pytest.fixture def dummy_jobs(db: Session): """Create and return a list of dummy jobs for testing.""" @@ -17,6 +18,7 @@ def dummy_jobs(db: Session): return jobs + def test_create_job(db: Session): crud = JobCrud(db) job = crud.create(job_type=JobType.RESPONSE, trace_id="trace-123") diff --git a/backend/app/tests/services/response/response/test_generate_response.py b/backend/app/tests/services/response/response/test_generate_response.py new file mode 100644 index 000000000..0cf81a5d2 --- /dev/null +++ b/backend/app/tests/services/response/response/test_generate_response.py @@ -0,0 +1,71 @@ +import pytest +from unittest.mock import MagicMock + +from openai import OpenAIError +from sqlmodel import Session + +from app.core.langfuse.langfuse import LangfuseTracer +from app.models import Assistant, ResponsesAPIRequest +from app.services.response.response import generate_response + + +@pytest.fixture +def assistant_mock() -> Assistant: + """Fixture to create an assistant in DB with id=123.""" + assistant = Assistant( + id="123", + name="Test Assistant", + model="gpt-4", + temperature=0.7, + instructions="You are a helpful assistant.", + vector_store_ids=["vs1", "vs2"], + max_num_results=5, + ) + return assistant + +def test_generate_response_success(db: Session, assistant_mock: Assistant): + """Test successful OpenAI response generation.""" + mock_response = MagicMock() + + mock_client = MagicMock() + + request = ResponsesAPIRequest( + assistant_id="123", + question="What is the capital of France?", + callback_url="http://example.com/callback", + ) + + response, error = generate_response( + tracer=LangfuseTracer(), + client=mock_client, + assistant=assistant_mock, + request=request, + ancestor_id=None, + ) + + mock_client.responses.create.assert_called_once() + assert error is None + + +def test_generate_response_openai_error(assistant_mock: Assistant): + """Test OpenAI error handling path.""" + + mock_client = MagicMock() + mock_client.responses.create.side_effect = OpenAIError("API failed") + + request = ResponsesAPIRequest( + assistant_id="123", + question="What is the capital of Germany?", + ) + + response, error = generate_response( + tracer=LangfuseTracer(), + client=mock_client, + assistant=assistant_mock, + request=request, + ancestor_id=None, + ) + + assert response is None + assert error is not None + assert "API failed" in error diff --git a/backend/app/tests/services/response/response/test_process_response.py b/backend/app/tests/services/response/response/test_process_response.py new file mode 100644 index 000000000..d76e3045f --- /dev/null +++ b/backend/app/tests/services/response/response/test_process_response.py @@ -0,0 +1,149 @@ +import pytest +from unittest.mock import patch, MagicMock +from uuid import uuid4 +from app.services.response.response import process_response +from app.models import ResponsesAPIRequest, Assistant, Job, JobStatus, AssistantCreate, Project, JobType +from app.core.db import engine +from sqlmodel import Session +from app.utils import APIResponse +from app.tests.utils.utils import get_project +from app.tests.utils.test_data import create_test_credential +from app.tests.utils.openai import mock_openai_response, generate_openai_id +from app.crud import JobCrud, create_assistant +from openai import OpenAI + + +@pytest.fixture +def setup_db(db: Session) -> tuple[Assistant, Job, Project]: + """Fixture to set up a job and assistant in the database.""" + _ , project = create_test_credential(db) + assistant_create = AssistantCreate( + name="Test Assistant", + instructions="You are a helpful assistant.", + model="gpt-4", + ) + client = OpenAI(api_key="test_api_key") + assistant = create_assistant( + session=db, + assistant=assistant_create, + openai_client=client, + project_id=project.id, + organization_id=project.organization_id, + ) + + job = JobCrud(session=db).create( + job_type=JobType.RESPONSE, + trace_id=str(uuid4()), + ) + + return assistant, job, project + + +def make_request(assistant_id: str, previous_response_id: str | None = None): + return ResponsesAPIRequest( + assistant_id=assistant_id, + question="What is the capital of France?", + callback_url="http://example.com/callback", + response_id=previous_response_id, + ) + + +def test_process_response_success( + db: Session, setup_db: tuple[Assistant, Job, Project] +) -> None: + assistant, job, project = setup_db + prev_id = generate_openai_id("resp_") + request = make_request(assistant.assistant_id, prev_id) + job_id = job.id + task_id = "task_123" + + response, error = mock_openai_response("Mock response text.", prev_id), None + + with ( + patch("app.services.response.response.generate_response", return_value=(response, error)), + patch("app.services.response.response.Session", return_value=db), + ): + api_response: APIResponse = process_response( + request=request, + project_id=project.id, + organization_id=project.organization_id, + job_id=job_id, + task_id=task_id, + task_instance=None, + ) + + job = db.get(Job, job_id) + assert api_response.success is True + assert job.status == JobStatus.SUCCESS + + +def test_process_response_assistant_not_found( + db: Session, setup_db: tuple[Assistant, Job, Project] +) -> None: + _, job, project = setup_db + request: ResponsesAPIRequest = make_request("non_existent_asst") + + with patch("app.services.response.response.Session", return_value=db): + api_response: APIResponse = process_response( + request=request, + project_id=project.id, + organization_id=project.organization_id, + job_id=job.id, + task_id="task_456", + task_instance=None, + ) + + job = db.get(Job, job.id) + assert api_response.success is False + assert "Assistant not found" in api_response.error + assert job.status == JobStatus.FAILED + + +def test_process_response_generate_response_failure( + db: Session, setup_db: tuple[Assistant, Job, Project] +) -> None: + assistant, job, project = setup_db + request: ResponsesAPIRequest = make_request(assistant.assistant_id) + + with ( + patch("app.services.response.response.generate_response", return_value=(None, "Some error")), + patch("app.services.response.response.Session", return_value=db), + ): + api_response: APIResponse = process_response( + request=request, + project_id=project.id, + organization_id=project.organization_id, + job_id=job.id, + task_id="task_789", + task_instance=None, + ) + + job = db.get(Job, job.id) + assert api_response.success is False + assert "Some error" in api_response.error + assert job.status == JobStatus.FAILED + + +def test_process_response_unexpected_exception( + db: Session, setup_db: tuple[Assistant, Job, Project] +) -> None: + assistant, job, project = setup_db + request: ResponsesAPIRequest = make_request(assistant.assistant_id) + + with ( + patch("app.services.response.response.generate_response", side_effect=Exception("Boom")), + patch("app.services.response.response.Session", return_value=db), + ): + api_response: APIResponse = process_response( + request=request, + project_id=project.id, + organization_id=project.organization_id, + job_id=job.id, + task_id="task_999", + task_instance=None, + ) + + job = db.get(Job, job.id) + assert api_response.success is False + assert "Unexpected error" in api_response.error + assert job.status == JobStatus.FAILED diff --git a/backend/app/tests/services/response/test_jobs.py b/backend/app/tests/services/response/test_jobs.py index 71dcdf83a..6c8830a7f 100644 --- a/backend/app/tests/services/response/test_jobs.py +++ b/backend/app/tests/services/response/test_jobs.py @@ -8,7 +8,6 @@ def test_start_job(db: Session): - request = ResponsesAPIRequest( assistant_id="assistant_123", question="What is the capital of France?", diff --git a/backend/app/tests/utils/openai.py b/backend/app/tests/utils/openai.py index a864ee33c..93bebed7d 100644 --- a/backend/app/tests/utils/openai.py +++ b/backend/app/tests/utils/openai.py @@ -3,12 +3,15 @@ import string from typing import Optional +from types import SimpleNamespace from unittest.mock import MagicMock from openai.types.beta import Assistant as OpenAIAssistant from openai.types.beta.assistant import ToolResources, ToolResourcesFileSearch from openai.types.beta.assistant_tool import FileSearchTool from openai.types.beta.file_search_tool import FileSearch +from openai.types.responses.response import Response, ToolChoice, ResponseUsage +from openai.types.responses.response_output_item import ResponseOutputItem def generate_openai_id(prefix: str, length: int = 40) -> str: @@ -51,6 +54,39 @@ def mock_openai_assistant( ) +def mock_openai_response( + text: str = "Hello world", + previous_response_id: str | None = None, + model: str = "gpt-4", +) -> SimpleNamespace: + """Return a minimal mock OpenAI-like response object for testing.""" + + usage = SimpleNamespace( + input_tokens=10, + output_tokens=20, + total_tokens=30, + ) + + output_item = SimpleNamespace( + id=generate_openai_id("out_"), + type="message", + role="assistant", + content=[{"type": "output_text", "text": text}], + ) + + response = SimpleNamespace( + id=generate_openai_id("resp_"), + created_at=int(time.time()), + model=model, + object="response", + output=[output_item], + output_text=text, + usage=usage, + previous_response_id=previous_response_id, + ) + return response + + def get_mock_openai_client_with_vector_store(): mock_client = MagicMock() From f15037201bd4a7d6c1312aed2e9ad561d3a6c53e Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Tue, 23 Sep 2025 10:14:37 +0530 Subject: [PATCH 16/44] pre commit --- .../versions/c6fb6d0b5897_create_job_table.py | 59 +++++++++++++------ .../response/test_generate_response.py | 1 + .../response/test_process_response.py | 27 +++++++-- 3 files changed, 65 insertions(+), 22 deletions(-) diff --git a/backend/app/alembic/versions/c6fb6d0b5897_create_job_table.py b/backend/app/alembic/versions/c6fb6d0b5897_create_job_table.py index 8dc24a771..86f49a7e4 100644 --- a/backend/app/alembic/versions/c6fb6d0b5897_create_job_table.py +++ b/backend/app/alembic/versions/c6fb6d0b5897_create_job_table.py @@ -11,33 +11,58 @@ # revision identifiers, used by Alembic. -revision = 'c6fb6d0b5897' -down_revision = '6ed6ed401847' +revision = "c6fb6d0b5897" +down_revision = "6ed6ed401847" branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('job', - sa.Column('id', sa.Uuid(), nullable=False), - sa.Column('task_id', sqlmodel.sql.sqltypes.AutoString(), nullable=True), - sa.Column('trace_id', sqlmodel.sql.sqltypes.AutoString(), nullable=True), - sa.Column('error_message', sqlmodel.sql.sqltypes.AutoString(), nullable=True), - sa.Column('status', sa.Enum('PENDING', 'PROCESSING', 'SUCCESS', 'FAILED', name='jobstatus'), nullable=False), - sa.Column('job_type', sa.Enum('RESPONSE', name='jobtype'), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.PrimaryKeyConstraint('id') + op.create_table( + "job", + sa.Column("id", sa.Uuid(), nullable=False), + sa.Column("task_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("trace_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("error_message", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column( + "status", + sa.Enum("PENDING", "PROCESSING", "SUCCESS", "FAILED", name="jobstatus"), + nullable=False, + ), + sa.Column("job_type", sa.Enum("RESPONSE", name="jobtype"), nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint("id"), + ) + op.drop_constraint( + "openai_conversation_project_id_fkey1", + "openai_conversation", + type_="foreignkey", + ) + op.drop_constraint( + "openai_conversation_organization_id_fkey1", + "openai_conversation", + type_="foreignkey", ) - op.drop_constraint('openai_conversation_project_id_fkey1', 'openai_conversation', type_='foreignkey') - op.drop_constraint('openai_conversation_organization_id_fkey1', 'openai_conversation', type_='foreignkey') # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_foreign_key('openai_conversation_organization_id_fkey1', 'openai_conversation', 'organization', ['organization_id'], ['id']) - op.create_foreign_key('openai_conversation_project_id_fkey1', 'openai_conversation', 'project', ['project_id'], ['id']) - op.drop_table('job') + op.create_foreign_key( + "openai_conversation_organization_id_fkey1", + "openai_conversation", + "organization", + ["organization_id"], + ["id"], + ) + op.create_foreign_key( + "openai_conversation_project_id_fkey1", + "openai_conversation", + "project", + ["project_id"], + ["id"], + ) + op.drop_table("job") # ### end Alembic commands ### diff --git a/backend/app/tests/services/response/response/test_generate_response.py b/backend/app/tests/services/response/response/test_generate_response.py index 0cf81a5d2..886c5019e 100644 --- a/backend/app/tests/services/response/response/test_generate_response.py +++ b/backend/app/tests/services/response/response/test_generate_response.py @@ -23,6 +23,7 @@ def assistant_mock() -> Assistant: ) return assistant + def test_generate_response_success(db: Session, assistant_mock: Assistant): """Test successful OpenAI response generation.""" mock_response = MagicMock() diff --git a/backend/app/tests/services/response/response/test_process_response.py b/backend/app/tests/services/response/response/test_process_response.py index d76e3045f..0ca59019e 100644 --- a/backend/app/tests/services/response/response/test_process_response.py +++ b/backend/app/tests/services/response/response/test_process_response.py @@ -2,7 +2,15 @@ from unittest.mock import patch, MagicMock from uuid import uuid4 from app.services.response.response import process_response -from app.models import ResponsesAPIRequest, Assistant, Job, JobStatus, AssistantCreate, Project, JobType +from app.models import ( + ResponsesAPIRequest, + Assistant, + Job, + JobStatus, + AssistantCreate, + Project, + JobType, +) from app.core.db import engine from sqlmodel import Session from app.utils import APIResponse @@ -16,7 +24,7 @@ @pytest.fixture def setup_db(db: Session) -> tuple[Assistant, Job, Project]: """Fixture to set up a job and assistant in the database.""" - _ , project = create_test_credential(db) + _, project = create_test_credential(db) assistant_create = AssistantCreate( name="Test Assistant", instructions="You are a helpful assistant.", @@ -60,7 +68,10 @@ def test_process_response_success( response, error = mock_openai_response("Mock response text.", prev_id), None with ( - patch("app.services.response.response.generate_response", return_value=(response, error)), + patch( + "app.services.response.response.generate_response", + return_value=(response, error), + ), patch("app.services.response.response.Session", return_value=db), ): api_response: APIResponse = process_response( @@ -106,7 +117,10 @@ def test_process_response_generate_response_failure( request: ResponsesAPIRequest = make_request(assistant.assistant_id) with ( - patch("app.services.response.response.generate_response", return_value=(None, "Some error")), + patch( + "app.services.response.response.generate_response", + return_value=(None, "Some error"), + ), patch("app.services.response.response.Session", return_value=db), ): api_response: APIResponse = process_response( @@ -131,7 +145,10 @@ def test_process_response_unexpected_exception( request: ResponsesAPIRequest = make_request(assistant.assistant_id) with ( - patch("app.services.response.response.generate_response", side_effect=Exception("Boom")), + patch( + "app.services.response.response.generate_response", + side_effect=Exception("Boom"), + ), patch("app.services.response.response.Session", return_value=db), ): api_response: APIResponse = process_response( From e7fe68d8fcf1aa909606f4a28377c5449f14544c Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Tue, 23 Sep 2025 11:20:59 +0530 Subject: [PATCH 17/44] rename test_jobs --- .../services/response/{test_jobs.py => test_jobs_response.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename backend/app/tests/services/response/{test_jobs.py => test_jobs_response.py} (100%) diff --git a/backend/app/tests/services/response/test_jobs.py b/backend/app/tests/services/response/test_jobs_response.py similarity index 100% rename from backend/app/tests/services/response/test_jobs.py rename to backend/app/tests/services/response/test_jobs_response.py From e62fa533f342c815d2fe82e4a210a21e1a9f6795 Mon Sep 17 00:00:00 2001 From: nishika26 Date: Tue, 23 Sep 2025 23:48:50 +0530 Subject: [PATCH 18/44] move tenant from user to project, add collections in services folder for job execution --- ...20256_alter_collection_table_for_celery.py | 42 ++ backend/app/api/routes/collections.py | 378 +++--------------- backend/app/crud/collection.py | 22 +- backend/app/models/collection.py | 111 ++++- backend/app/models/user.py | 4 +- .../services/collections/create_collection.py | 164 ++++++++ .../services/collections/delete_collection.py | 104 +++++ backend/app/services/collections/helpers.py | 99 +++++ 8 files changed, 576 insertions(+), 348 deletions(-) create mode 100644 backend/app/alembic/versions/96388ce20256_alter_collection_table_for_celery.py create mode 100644 backend/app/services/collections/create_collection.py create mode 100644 backend/app/services/collections/delete_collection.py create mode 100644 backend/app/services/collections/helpers.py diff --git a/backend/app/alembic/versions/96388ce20256_alter_collection_table_for_celery.py b/backend/app/alembic/versions/96388ce20256_alter_collection_table_for_celery.py new file mode 100644 index 000000000..ac778cdd5 --- /dev/null +++ b/backend/app/alembic/versions/96388ce20256_alter_collection_table_for_celery.py @@ -0,0 +1,42 @@ +"""alter collection table for celery + +Revision ID: 96388ce20256 +Revises: 6ed6ed401847 +Create Date: 2025-09-17 16:35:37.809812 + +""" +from alembic import op +import sqlalchemy as sa +import sqlmodel.sql.sqltypes + + +# revision identifiers, used by Alembic. +revision = "96388ce20256" +down_revision = "6ed6ed401847" +branch_labels = None +depends_on = None + + +def upgrade(): + op.drop_constraint("collection_owner_id_fkey", "collection", type_="foreignkey") + op.drop_column("collection", "owner_id") + op.add_column( + "collection", + sa.Column("task_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + ) + + +def downgrade(): + op.drop_column("collection", "task_id") + op.add_column( + "collection", + sa.Column("owner_id", sa.INTEGER(), autoincrement=False, nullable=False), + ) + op.create_foreign_key( + "collection_owner_id_fkey", + "collection", + "user", + ["owner_id"], + ["id"], + ondelete="CASCADE", + ) diff --git a/backend/app/api/routes/collections.py b/backend/app/api/routes/collections.py index bea3ed3a3..32eac4b23 100644 --- a/backend/app/api/routes/collections.py +++ b/backend/app/api/routes/collections.py @@ -1,292 +1,39 @@ -import inspect -import logging -import time +import re import json import ast -import re -from uuid import UUID, uuid4 -from typing import Any, List, Optional -from dataclasses import dataclass, field, fields, asdict, replace +import inspect +import logging +from uuid import UUID +from typing import List +from dataclasses import asdict -from openai import OpenAIError, OpenAI -from fastapi import APIRouter, HTTPException, BackgroundTasks, Query +from fastapi import APIRouter, BackgroundTasks, Query from fastapi import Path as FastPath -from pydantic import BaseModel, Field, HttpUrl -from sqlalchemy.exc import SQLAlchemyError + from app.api.deps import CurrentUser, SessionDep, CurrentUserOrgProject -from app.core.cloud import get_cloud_storage -from app.api.routes.responses import handle_openai_error -from app.core.util import now, post_callback from app.crud import ( - DocumentCrud, CollectionCrud, DocumentCollectionCrud, ) -from app.crud.rag import OpenAIVectorStoreCrud, OpenAIAssistantCrud -from app.models import Collection, Document, DocumentPublic -from app.models.collection import CollectionStatus +from app.models import Collection, DocumentPublic +from app.models.collection import ( + CollectionStatus, + CreationRequest, + ResponsePayload, + DeletionRequest, +) from app.utils import APIResponse, load_description, get_openai_client +from app.services.collections.helpers import extract_error_message +from app.services.collections import ( + create_collection as create_services, + delete_collection as delete_services, +) logger = logging.getLogger(__name__) router = APIRouter(prefix="/collections", tags=["collections"]) -def extract_error_message(err: Exception) -> str: - err_str = str(err).strip() - - body = re.sub(r"^Error code:\s*\d+\s*-\s*", "", err_str) - message = None - try: - payload = json.loads(body) - if isinstance(payload, dict): - message = payload.get("error", {}).get("message") - except Exception: - pass - - if message is None: - try: - payload = ast.literal_eval(body) - if isinstance(payload, dict): - message = payload.get("error", {}).get("message") - except Exception: - pass - - if not message: - message = body - - return message.strip()[:1000] - - -@dataclass -class ResponsePayload: - status: str - route: str - key: str = field(default_factory=lambda: str(uuid4())) - time: str = field(default_factory=lambda: now().strftime("%c")) - - @classmethod - def now(cls): - attr = "time" - for i in fields(cls): - if i.name == attr: - return i.default_factory() - - raise AttributeError(f'Expected attribute "{attr}" does not exist') - - -class DocumentOptions(BaseModel): - documents: List[UUID] = Field( - description="List of document IDs", - ) - batch_size: int = Field( - default=1, - description=( - "Number of documents to send to OpenAI in a single " - "transaction. See the `file_ids` parameter in the " - "vector store [create batch](https://platform.openai.com/docs/api-reference/vector-stores-file-batches/createBatch)." - ), - ) - - def model_post_init(self, __context: Any): - self.documents = list(set(self.documents)) - - def __call__(self, crud: DocumentCrud): - logger.info( - f"[DocumentOptions.call] Starting batch iteration for documents | {{'batch_size': {self.batch_size}, 'total_documents': {len(self.documents)}}}" - ) - (start, stop) = (0, self.batch_size) - while True: - view = self.documents[start:stop] - if not view: - break - yield crud.read_each(view) - start = stop - stop += self.batch_size - - -class AssistantOptions(BaseModel): - # Fields to be passed along to OpenAI. They must be a subset of - # parameters accepted by the OpenAI.clien.beta.assistants.create - # API. - model: str = Field( - description=( - "OpenAI model to attach to this assistant. The model " - "must compatable with the assistants API; see the " - "OpenAI [model documentation](https://platform.openai.com/docs/models/compare) for more." - ), - ) - instructions: str = Field( - description=( - "Assistant instruction. Sometimes referred to as the " '"system" prompt.' - ), - ) - temperature: float = Field( - default=1e-6, - description=( - "Model temperature. The default is slightly " - "greater-than zero because it is [unknown how OpenAI " - "handles zero](https://community.openai.com/t/clarifications-on-setting-temperature-0/886447/5)." - ), - ) - - -class CallbackRequest(BaseModel): - callback_url: Optional[HttpUrl] = Field( - default=None, - description="URL to call to report endpoint status", - ) - - -class CreationRequest( - DocumentOptions, - AssistantOptions, - CallbackRequest, -): - def extract_super_type(self, cls: "CreationRequest"): - for field_name in cls.__fields__.keys(): - field_value = getattr(self, field_name) - yield (field_name, field_value) - - -class DeletionRequest(CallbackRequest): - collection_id: UUID = Field("Collection to delete") - - -class CallbackHandler: - def __init__(self, payload: ResponsePayload): - self.payload = payload - - def fail(self, body): - raise NotImplementedError() - - def success(self, body): - raise NotImplementedError() - - -class SilentCallback(CallbackHandler): - def fail(self, body): - logger.info(f"[SilentCallback.fail] Silent callback failure") - return - - def success(self, body): - logger.info(f"[SilentCallback.success] Silent callback success") - return - - -class WebHookCallback(CallbackHandler): - def __init__(self, url: HttpUrl, payload: ResponsePayload): - super().__init__(payload) - self.url = url - logger.info( - f"[WebHookCallback.init] Initialized webhook callback | {{'url': '{url}'}}" - ) - - def __call__(self, response: APIResponse, status: str): - time = ResponsePayload.now() - payload = replace(self.payload, status=status, time=time) - response.metadata = asdict(payload) - logger.info( - f"[WebHookCallback.call] Posting callback | {{'url': '{self.url}', 'status': '{status}'}}" - ) - post_callback(self.url, response) - - def fail(self, body): - logger.warning(f"[WebHookCallback.fail] Callback failed | {{'body': '{body}'}}") - self(APIResponse.failure_response(body), "incomplete") - - def success(self, body): - logger.info(f"[WebHookCallback.success] Callback succeeded") - self(APIResponse.success_response(body), "complete") - - -def _backout(crud: OpenAIAssistantCrud, assistant_id: str): - try: - crud.delete(assistant_id) - except OpenAIError as err: - logger.error( - f"[backout] Failed to delete assistant | {{'assistant_id': '{assistant_id}', 'error': '{str(err)}'}}", - exc_info=True, - ) - - -def do_create_collection( - session: SessionDep, - current_user: CurrentUserOrgProject, - request: CreationRequest, - payload: ResponsePayload, - client: OpenAI, -): - start_time = time.time() - - callback = ( - SilentCallback(payload) - if request.callback_url is None - else WebHookCallback(request.callback_url, payload) - ) - - storage = get_cloud_storage(session=session, project_id=current_user.project_id) - document_crud = DocumentCrud(session, current_user.project_id) - assistant_crud = OpenAIAssistantCrud(client) - vector_store_crud = OpenAIVectorStoreCrud(client) - collection_crud = CollectionCrud(session, current_user.id) - - try: - vector_store = vector_store_crud.create() - - docs = list(request(document_crud)) - flat_docs = [doc for sublist in docs for doc in sublist] - - file_exts = {doc.fname.split(".")[-1] for doc in flat_docs if "." in doc.fname} - file_sizes_kb = [ - storage.get_file_size_kb(doc.object_store_url) for doc in flat_docs - ] - - list(vector_store_crud.update(vector_store.id, storage, docs)) - - assistant_options = dict(request.extract_super_type(AssistantOptions)) - assistant = assistant_crud.create(vector_store.id, **assistant_options) - - collection = collection_crud.read_one(UUID(payload.key)) - collection.llm_service_id = assistant.id - collection.llm_service_name = request.model - collection.status = CollectionStatus.successful - collection.updated_at = now() - - if flat_docs: - DocumentCollectionCrud(session).create(collection, flat_docs) - - collection_crud._update(collection) - - elapsed = time.time() - start_time - logger.info( - f"[do_create_collection] Collection created: {collection.id} | Time: {elapsed:.2f}s | " - f"Files: {len(flat_docs)} | Sizes: {file_sizes_kb} KB | Types: {list(file_exts)}" - ) - callback.success(collection.model_dump(mode="json")) - - except Exception as err: - logger.error( - f"[do_create_collection] Collection Creation Failed | {{'collection_id': '{payload.key}', 'error': '{str(err)}'}}", - exc_info=True, - ) - if "assistant" in locals(): - _backout(assistant_crud, assistant.id) - try: - collection = collection_crud.read_one(UUID(payload.key)) - collection.status = CollectionStatus.failed - collection.updated_at = now() - message = extract_error_message(err) - collection.error_message = message - - collection_crud._update(collection) - except Exception as suberr: - logger.warning( - f"[do_create_collection] Failed to update collection status | {{'collection_id': '{payload.key}', 'reason': '{str(suberr)}'}}" - ) - callback.fail(str(err)) - - @router.post( "/create", description=load_description("collections/create.md"), @@ -297,27 +44,27 @@ def create_collection( request: CreationRequest, background_tasks: BackgroundTasks, ): - client = get_openai_client( - session, current_user.organization_id, current_user.project_id - ) - this = inspect.currentframe() route = router.url_path_for(this.f_code.co_name) payload = ResponsePayload("processing", route) collection = Collection( id=UUID(payload.key), - owner_id=current_user.id, organization_id=current_user.organization_id, project_id=current_user.project_id, status=CollectionStatus.processing, ) - collection_crud = CollectionCrud(session, current_user.id) + collection_crud = CollectionCrud(session, current_user.project_id) collection_crud.create(collection) - background_tasks.add_task( - do_create_collection, session, current_user, request, payload, client + create_services.start_job( + db=session, + request=request.model_dump(), + payload=asdict(payload), + collection=collection, + project_id=current_user.project_id, + organization_id=current_user.organization_id, ) logger.info( @@ -327,41 +74,6 @@ def create_collection( return APIResponse.success_response(data=None, metadata=asdict(payload)) -def do_delete_collection( - session: SessionDep, - current_user: CurrentUserOrgProject, - request: DeletionRequest, - payload: ResponsePayload, - client: OpenAI, -): - if request.callback_url is None: - callback = SilentCallback(payload) - else: - callback = WebHookCallback(request.callback_url, payload) - - collection_crud = CollectionCrud(session, current_user.id) - try: - collection = collection_crud.read_one(request.collection_id) - assistant = OpenAIAssistantCrud(client) - data = collection_crud.delete(collection, assistant) - logger.info( - f"[do_delete_collection] Collection deleted successfully | {{'collection_id': '{collection.id}'}}" - ) - callback.success(data.model_dump(mode="json")) - except (ValueError, PermissionError, SQLAlchemyError) as err: - logger.error( - f"[do_delete_collection] Failed to delete collection | {{'collection_id': '{request.collection_id}', 'error': '{str(err)}'}}", - exc_info=True, - ) - callback.fail(str(err)) - except Exception as err: - logger.error( - f"[do_delete_collection] Unexpected error during deletion | {{'collection_id': '{request.collection_id}', 'error': '{str(err)}', 'error_type': '{type(err).__name__}'}}", - exc_info=True, - ) - callback.fail(str(err)) - - @router.post( "/delete", description=load_description("collections/delete.md"), @@ -376,12 +88,20 @@ def delete_collection( session, current_user.organization_id, current_user.project_id ) + collection_crud = CollectionCrud(session, current_user.project_id) + collection = collection_crud.read_one(request.collection_id) + this = inspect.currentframe() route = router.url_path_for(this.f_code.co_name) payload = ResponsePayload("processing", route) - background_tasks.add_task( - do_delete_collection, session, current_user, request, payload, client + delete_services.start_job( + db=session, + request=request.model_dump(), + payload=asdict(payload), + collection=collection, + project_id=current_user.project_id, + organization_id=current_user.organization_id, ) logger.info( @@ -398,11 +118,16 @@ def delete_collection( ) def collection_info( session: SessionDep, - current_user: CurrentUser, + current_user: CurrentUserOrgProject, collection_id: UUID = FastPath(description="Collection to retrieve"), ): - collection_crud = CollectionCrud(session, current_user.id) + collection_crud = CollectionCrud(session, current_user.project_id) data = collection_crud.read_one(collection_id) + + err = getattr(data, "error_message", None) + if err: + data.error_message = extract_error_message(err) + return APIResponse.success_response(data) @@ -413,11 +138,16 @@ def collection_info( ) def list_collections( session: SessionDep, - current_user: CurrentUser, + current_user: CurrentUserOrgProject, ): - collection_crud = CollectionCrud(session, current_user.id) - data = collection_crud.read_all() - return APIResponse.success_response(data) + collection_crud = CollectionCrud(session, current_user.project_id) + rows = collection_crud.read_all() + + for c in rows: + if getattr(c, "error_message", None): + c.error_message = extract_error_message(c.error_message) + + return APIResponse.success_response(rows) @router.post( diff --git a/backend/app/crud/collection.py b/backend/app/crud/collection.py index b08ddcf5e..0f5d8c09f 100644 --- a/backend/app/crud/collection.py +++ b/backend/app/crud/collection.py @@ -3,6 +3,7 @@ from uuid import UUID from typing import Optional import logging + from sqlmodel import Session, func, select, and_ from app.models import Document, Collection, DocumentCollection @@ -15,17 +16,17 @@ class CollectionCrud: - def __init__(self, session: Session, owner_id: int): + def __init__(self, session: Session, project_id: int): self.session = session - self.owner_id = owner_id + self.project_id = project_id def _update(self, collection: Collection): - if not collection.owner_id: - collection.owner_id = self.owner_id - elif collection.owner_id != self.owner_id: - err = "Invalid collection ownership: owner={} attempter={}".format( - self.owner_id, - collection.owner_id, + if not collection.project_id: + collection.project_id = self.project_id + elif collection.project_id != self.project_id: + err = "Invalid collection ownership: owner_project={} attempter={}".format( + self.project_id, + collection.project_id, ) try: raise PermissionError(err) @@ -84,18 +85,19 @@ def create( def read_one(self, collection_id: UUID): statement = select(Collection).where( and_( - Collection.owner_id == self.owner_id, + Collection.project_id == self.project_id, Collection.id == collection_id, ) ) collection = self.session.exec(statement).one() + return collection def read_all(self): statement = select(Collection).where( and_( - Collection.owner_id == self.owner_id, + Collection.project_id == self.project_id, Collection.deleted_at.is_(None), ) ) diff --git a/backend/app/models/collection.py b/backend/app/models/collection.py index 5b9119c6c..cb43be72b 100644 --- a/backend/app/models/collection.py +++ b/backend/app/models/collection.py @@ -1,15 +1,17 @@ +import enum from uuid import UUID, uuid4 from datetime import datetime -from typing import Optional +from typing import Any, List, Optional +from dataclasses import dataclass, field, fields from sqlmodel import Field, Relationship, SQLModel +from pydantic import HttpUrl, BaseModel from app.core.util import now -from .user import User from .organization import Organization from .project import Project -import enum -from enum import Enum +from app.core.util import now +from app.crud.document import DocumentCrud class CollectionStatus(str, enum.Enum): @@ -21,12 +23,6 @@ class CollectionStatus(str, enum.Enum): class Collection(SQLModel, table=True): id: UUID = Field(default_factory=uuid4, primary_key=True) - owner_id: int = Field( - foreign_key="user.id", - nullable=False, - ondelete="CASCADE", - ) - organization_id: int = Field( foreign_key="organization.id", nullable=False, @@ -44,11 +40,104 @@ class Collection(SQLModel, table=True): status: CollectionStatus = Field(default=CollectionStatus.processing) error_message: Optional[str] = Field(default=None, nullable=True) + task_id: Optional[str] = Field(default=None, description="Celery task ID") created_at: datetime = Field(default_factory=now) updated_at: datetime = Field(default_factory=now) deleted_at: Optional[datetime] = None - owner: User = Relationship(back_populates="collections") organization: Organization = Relationship(back_populates="collections") project: Project = Relationship(back_populates="collections") + + +@dataclass +class ResponsePayload: + status: str + route: str + key: str = field(default_factory=lambda: str(uuid4())) + time: str = field(default_factory=lambda: now().strftime("%c")) + + @classmethod + def now(cls): + attr = "time" + for i in fields(cls): + if i.name == attr: + return i.default_factory() + + raise AttributeError(f'Expected attribute "{attr}" does not exist') + + +# pydantic models +class DocumentOptions(BaseModel): + documents: List[UUID] = Field( + description="List of document IDs", + ) + batch_size: int = Field( + default=1, + description=( + "Number of documents to send to OpenAI in a single " + "transaction. See the `file_ids` parameter in the " + "vector store [create batch](https://platform.openai.com/docs/api-reference/vector-stores-file-batches/createBatch)." + ), + ) + + def model_post_init(self, __context: Any): + self.documents = list(set(self.documents)) + + def __call__(self, crud: DocumentCrud): + (start, stop) = (0, self.batch_size) + while True: + view = self.documents[start:stop] + if not view: + break + yield crud.read_each(view) + start = stop + stop += self.batch_size + + +class AssistantOptions(BaseModel): + # Fields to be passed along to OpenAI. They must be a subset of + # parameters accepted by the OpenAI.clien.beta.assistants.create + # API. + model: str = Field( + description=( + "OpenAI model to attach to this assistant. The model " + "must compatable with the assistants API; see the " + "OpenAI [model documentation](https://platform.openai.com/docs/models/compare) for more." + ), + ) + instructions: str = Field( + description=( + "Assistant instruction. Sometimes referred to as the " '"system" prompt.' + ), + ) + temperature: float = Field( + default=1e-6, + description=( + "Model temperature. The default is slightly " + "greater-than zero because it is [unknown how OpenAI " + "handles zero](https://community.openai.com/t/clarifications-on-setting-temperature-0/886447/5)." + ), + ) + + +class CallbackRequest(BaseModel): + callback_url: Optional[HttpUrl] = Field( + default=None, + description="URL to call to report endpoint status", + ) + + +class CreationRequest( + DocumentOptions, + AssistantOptions, + CallbackRequest, +): + def extract_super_type(self, cls: "CreationRequest"): + for field_name in cls.__fields__.keys(): + field_value = getattr(self, field_name) + yield (field_name, field_value) + + +class DeletionRequest(CallbackRequest): + collection_id: UUID = Field("Collection to delete") diff --git a/backend/app/models/user.py b/backend/app/models/user.py index fa526ab5e..57336e72f 100644 --- a/backend/app/models/user.py +++ b/backend/app/models/user.py @@ -48,9 +48,7 @@ class UpdatePassword(SQLModel): class User(UserBase, table=True): id: int = Field(default=None, primary_key=True) hashed_password: str - collections: list["Collection"] = Relationship( - back_populates="owner", cascade_delete=True - ) + projects: list["ProjectUser"] = Relationship( back_populates="user", cascade_delete=True ) diff --git a/backend/app/services/collections/create_collection.py b/backend/app/services/collections/create_collection.py new file mode 100644 index 000000000..0fcaf3757 --- /dev/null +++ b/backend/app/services/collections/create_collection.py @@ -0,0 +1,164 @@ +import logging +import time +from uuid import UUID + +from sqlmodel import Session +from asgi_correlation_id import correlation_id + +from app.core.cloud import get_cloud_storage +from app.core.util import now +from app.core.db import engine +from app.crud import ( + DocumentCrud, + CollectionCrud, + DocumentCollectionCrud, +) +from app.crud.rag import OpenAIVectorStoreCrud, OpenAIAssistantCrud +from app.models import Collection +from app.models.collection import ( + CollectionStatus, + ResponsePayload, + CreationRequest, + AssistantOptions, +) +from app.services.collections.helpers import _backout, SilentCallback, WebHookCallback +from app.celery.utils import start_low_priority_job +from app.utils import get_openai_client + +logger = logging.getLogger(__name__) + + +def start_job( + db: Session, # kept for signature compatibility, even if unused here + request: dict, + collection: Collection, + project_id: int, + payload: dict, + organization_id: int, +) -> UUID: + trace_id = correlation_id.get() or "N/A" + + task_id = start_low_priority_job( + # keep the function path in sync with the worker entrypoint below + function_path="app.services.collections.create_collection.execute_job", + project_id=project_id, + job_id=collection.id, + trace_id=trace_id, + request=request, + payload_data=payload, + organization_id=organization_id, + ) + + logger.info( + "[start_job] Job scheduled to create collection | " + f"collection_id={collection.id}, project_id={project_id}, task_id={task_id}, job_id={collection.id}" + ) + return collection.id + + +def execute_job( + request: dict, + payload_data: dict, + project_id: int, + organization_id: int, + task_id: str, + job_id: UUID, + task_instance, +) -> None: + """ + Worker entrypoint scheduled by start_job. + """ + start_time = time.time() + + with Session(engine) as session: + # Parse/validate incoming data + creation_request = CreationRequest(**request) + payload = ResponsePayload(**payload_data) + + collection_crud = CollectionCrud(session, project_id) + collection = collection_crud.read_one(job_id) + collection.task_id = task_id + collection_crud._update(collection) + + client = get_openai_client(session, organization_id, project_id) + + callback = ( + SilentCallback(payload) + if creation_request.callback_url is None + else WebHookCallback(creation_request.callback_url, payload) + ) + + storage = get_cloud_storage(session=session, project_id=project_id) + document_crud = DocumentCrud(session, project_id) + assistant_crud = OpenAIAssistantCrud(client) + vector_store_crud = OpenAIVectorStoreCrud(client) + + try: + vector_store = vector_store_crud.create() + + docs_batches = list(creation_request(document_crud)) + flat_docs = [doc for batch in docs_batches for doc in batch] + + file_exts = { + doc.fname.split(".")[-1] for doc in flat_docs if "." in doc.fname + } + file_sizes_kb = [ + storage.get_file_size_kb(doc.object_store_url) for doc in flat_docs + ] + + list(vector_store_crud.update(vector_store.id, storage, docs_batches)) + + assistant_options = dict( + creation_request.extract_super_type(AssistantOptions) + ) + assistant = assistant_crud.create(vector_store.id, **assistant_options) + + collection = collection_crud.read_one(collection.id) # refresh + collection.llm_service_id = assistant.id + collection.llm_service_name = creation_request.model + collection.status = CollectionStatus.successful + collection.updated_at = now() + + if flat_docs: + DocumentCollectionCrud(session).create(collection, flat_docs) + + collection_crud._update(collection) + + elapsed = time.time() - start_time + logger.info( + "[do_create_collection] Collection created: %s | Time: %.2fs | Files: %d | Sizes: %s KB | Types: %s", + collection.id, + elapsed, + len(flat_docs), + file_sizes_kb, + list(file_exts), + ) + + callback.success(collection.model_dump(mode="json")) + + except Exception as err: + logger.error( + "[do_create_collection] Collection Creation Failed | {'collection_id': '%s', 'error': '%s'}", + collection.id, + str(err), + exc_info=True, + ) + + if "assistant" in locals(): + _backout(assistant_crud, assistant.id) + + try: + collection = collection_crud.read_one(job_id) + collection.status = CollectionStatus.failed + collection.updated_at = now() + collection.error_message = str(err) + collection_crud._update(collection) + except Exception as suberr: + logger.warning( + "[do_create_collection] Failed to update collection status | " + "{'collection_id': '%s', 'reason': '%s'}", + collection.id, + str(suberr), + ) + + callback.fail(str(err)) diff --git a/backend/app/services/collections/delete_collection.py b/backend/app/services/collections/delete_collection.py new file mode 100644 index 000000000..2817d8926 --- /dev/null +++ b/backend/app/services/collections/delete_collection.py @@ -0,0 +1,104 @@ +import logging +from uuid import UUID + +from sqlmodel import Session +from asgi_correlation_id import correlation_id +from sqlalchemy.exc import SQLAlchemyError + +from app.core.db import engine +from app.crud import ( + CollectionCrud, +) +from app.crud.rag import OpenAIAssistantCrud +from app.models import Collection +from app.models.collection import ResponsePayload, DeletionRequest +from app.services.collections.helpers import SilentCallback, WebHookCallback +from app.celery.utils import start_low_priority_job +from app.utils import get_openai_client + +logger = logging.getLogger(__name__) + + +def start_job( + db: Session, + request: dict, + collection: Collection, + project_id: int, + payload: dict, + organization_id: int, +) -> UUID: + trace_id = correlation_id.get() or "N/A" + + task_id = start_low_priority_job( + function_path="app.services.collections.delete_collection.execute_job", + project_id=project_id, + job_id=collection.id, + trace_id=trace_id, + request=request, + payload_data=payload, + organization_id=organization_id, + ) + + logger.info( + "[start_job] Job scheduled to delete collection | " + f"collection_id={collection.id}, project_id={project_id}, task_id={task_id}, job_id={collection.id}" + ) + return collection.id + + +def execute_job( + request: dict, + payload_data: dict, + project_id: int, + organization_id: int, + task_id: str, + job_id: UUID, + task_instance, +) -> None: + deletion_request = DeletionRequest(**request) + payload = ResponsePayload(**payload_data) + + callback = ( + SilentCallback(payload) + if deletion_request.callback_url is None + else WebHookCallback(deletion_request.callback_url, payload) + ) + + with Session(engine) as session: + client = get_openai_client(session, organization_id, project_id) + assistant_crud = OpenAIAssistantCrud(client) + collection_crud = CollectionCrud(session, project_id) + + collection = collection_crud.read_one(job_id) + + collection.task_id = task_id + collection_crud._update(collection) + + try: + result = collection_crud.delete(collection, assistant_crud) + + logger.info( + "[do_delete_collection] Collection deleted successfully | {'collection_id': '%s'}", + str(collection.id), + ) + callback.success(result.model_dump(mode="json")) + + except (ValueError, PermissionError, SQLAlchemyError) as err: + logger.error( + "[do_delete_collection] Failed to delete collection | {'collection_id': '%s', 'error': '%s'}", + str(collection.id), + str(err), + exc_info=True, + ) + callback.fail(str(err)) + + except Exception as err: + logger.error( + "[do_delete_collection] Unexpected error during deletion | " + "{'collection_id': '%s', 'error': '%s', 'error_type': '%s'}", + str(collection.id), + str(err), + type(err).__name__, + exc_info=True, + ) + callback.fail(str(err)) diff --git a/backend/app/services/collections/helpers.py b/backend/app/services/collections/helpers.py new file mode 100644 index 000000000..c6c0c2b04 --- /dev/null +++ b/backend/app/services/collections/helpers.py @@ -0,0 +1,99 @@ +import logging +import json +import ast +import re +from dataclasses import asdict, replace + +from pydantic import HttpUrl +from openai import OpenAIError + +from app.core.util import post_callback +from app.models.collection import ResponsePayload +from app.crud.rag import OpenAIAssistantCrud +from app.utils import APIResponse + + +logger = logging.getLogger(__name__) + + +def extract_error_message(err: Exception) -> str: + err_str = str(err).strip() + + body = re.sub(r"^Error code:\s*\d+\s*-\s*", "", err_str) + message = None + try: + payload = json.loads(body) + if isinstance(payload, dict): + message = payload.get("error", {}).get("message") + except Exception: + pass + + if message is None: + try: + payload = ast.literal_eval(body) + if isinstance(payload, dict): + message = payload.get("error", {}).get("message") + except Exception: + pass + + if not message: + message = body + + return message.strip()[:1000] + + +class CallbackHandler: + def __init__(self, payload: ResponsePayload): + self.payload = payload + + def fail(self, body): + raise NotImplementedError() + + def success(self, body): + raise NotImplementedError() + + +class SilentCallback(CallbackHandler): + def fail(self, body): + logger.info(f"[SilentCallback.fail] Silent callback failure") + return + + def success(self, body): + logger.info(f"[SilentCallback.success] Silent callback success") + return + + +class WebHookCallback(CallbackHandler): + def __init__(self, url: HttpUrl, payload: ResponsePayload): + super().__init__(payload) + self.url = url + logger.info( + f"[WebHookCallback.init] Initialized webhook callback | {{'url': '{url}'}}" + ) + + def __call__(self, response: APIResponse, status: str): + time = ResponsePayload.now() + payload = replace(self.payload, status=status, time=time) + response.metadata = asdict(payload) + logger.info( + f"[WebHookCallback.call] Posting callback | {{'url': '{self.url}', 'status': '{status}'}}" + ) + post_callback(self.url, response) + + def fail(self, body): + logger.warning(f"[WebHookCallback.fail] Callback failed | {{'body': '{body}'}}") + self(APIResponse.failure_response(body), "incomplete") + + def success(self, body): + logger.info(f"[WebHookCallback.success] Callback succeeded") + self(APIResponse.success_response(body), "complete") + + +def _backout(crud: OpenAIAssistantCrud, assistant_id: str): + try: + crud.delete(assistant_id) + except OpenAIError as err: + logger.error( + f"[backout] Failed to delete assistant | {{'assistant_id': '{assistant_id}', 'error': '{str(err)}'}}", + exc_info=True, + ) From bc40a18687bbb05766b471d7e232d221c579212d Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Wed, 24 Sep 2025 14:05:58 +0530 Subject: [PATCH 19/44] Add flower dependency to pyproject.toml and uv.lock --- backend/pyproject.toml | 1 + backend/uv.lock | 55 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 56 insertions(+) diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 85f31cf5c..da158b34d 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -34,6 +34,7 @@ dependencies = [ "scikit-learn>=1.7.1", "celery>=5.3.0,<6.0.0", "redis>=5.0.0,<6.0.0", + "flower>=2.0.1", ] [tool.uv] diff --git a/backend/uv.lock b/backend/uv.lock index b6979ee53..c5f5ca013 100644 --- a/backend/uv.lock +++ b/backend/uv.lock @@ -176,6 +176,7 @@ dependencies = [ { name = "email-validator" }, { name = "emails" }, { name = "fastapi", extra = ["standard"] }, + { name = "flower" }, { name = "httpx" }, { name = "jinja2" }, { name = "langfuse" }, @@ -219,6 +220,7 @@ requires-dist = [ { name = "email-validator", specifier = ">=2.1.0.post1,<3.0.0.0" }, { name = "emails", specifier = ">=0.6,<1.0" }, { name = "fastapi", extras = ["standard"], specifier = ">=0.114.2,<1.0.0" }, + { name = "flower", specifier = ">=2.0.1" }, { name = "httpx", specifier = ">=0.25.1,<1.0.0" }, { name = "jinja2", specifier = ">=3.1.4,<4.0.0" }, { name = "langfuse", specifier = ">=2.60.3" }, @@ -761,6 +763,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163, upload-time = "2024-09-17T19:02:00.268Z" }, ] +[[package]] +name = "flower" +version = "2.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "celery" }, + { name = "humanize" }, + { name = "prometheus-client" }, + { name = "pytz" }, + { name = "tornado" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/a1/357f1b5d8946deafdcfdd604f51baae9de10aafa2908d0b7322597155f92/flower-2.0.1.tar.gz", hash = "sha256:5ab717b979530770c16afb48b50d2a98d23c3e9fe39851dcf6bc4d01845a02a0", size = 3220408, upload-time = "2023-08-13T14:37:46.073Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/ff/ee2f67c0ff146ec98b5df1df637b2bc2d17beeb05df9f427a67bd7a7d79c/flower-2.0.1-py2.py3-none-any.whl", hash = "sha256:9db2c621eeefbc844c8dd88be64aef61e84e2deb29b271e02ab2b5b9f01068e2", size = 383553, upload-time = "2023-08-13T14:37:41.552Z" }, +] + [[package]] name = "frozenlist" version = "1.7.0" @@ -983,6 +1001,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/39/7b/bb06b061991107cd8783f300adff3e7b7f284e330fd82f507f2a1417b11d/huggingface_hub-0.34.4-py3-none-any.whl", hash = "sha256:9b365d781739c93ff90c359844221beef048403f1bc1f1c123c191257c3c890a", size = 561452, upload-time = "2025-08-08T09:14:50.159Z" }, ] +[[package]] +name = "humanize" +version = "4.13.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/1d/3062fcc89ee05a715c0b9bfe6490c00c576314f27ffee3a704122c6fd259/humanize-4.13.0.tar.gz", hash = "sha256:78f79e68f76f0b04d711c4e55d32bebef5be387148862cb1ef83d2b58e7935a0", size = 81884, upload-time = "2025-08-25T09:39:20.04Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/c7/316e7ca04d26695ef0635dc81683d628350810eb8e9b2299fc08ba49f366/humanize-4.13.0-py3-none-any.whl", hash = "sha256:b810820b31891813b1673e8fec7f1ed3312061eab2f26e3fa192c393d11ed25f", size = 128869, upload-time = "2025-08-25T09:39:18.54Z" }, +] + [[package]] name = "identify" version = "2.6.1" @@ -1780,6 +1807,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b1/07/4e8d94f94c7d41ca5ddf8a9695ad87b888104e2fd41a35546c1dc9ca74ac/premailer-3.10.0-py2.py3-none-any.whl", hash = "sha256:021b8196364d7df96d04f9ade51b794d0b77bcc19e998321c515633a2273be1a", size = 19544, upload-time = "2021-08-02T20:32:52.771Z" }, ] +[[package]] +name = "prometheus-client" +version = "0.23.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/53/3edb5d68ecf6b38fcbcc1ad28391117d2a322d9a1a3eff04bfdb184d8c3b/prometheus_client-0.23.1.tar.gz", hash = "sha256:6ae8f9081eaaaf153a2e959d2e6c4f4fb57b12ef76c8c7980202f1e57b48b2ce", size = 80481, upload-time = "2025-09-18T20:47:25.043Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/db/14bafcb4af2139e046d03fd00dea7873e48eafe18b7d2797e73d6681f210/prometheus_client-0.23.1-py3-none-any.whl", hash = "sha256:dd1913e6e76b59cfe44e7a4b83e01afc9873c1bdfd2ed8739f1e76aeca115f99", size = 61145, upload-time = "2025-09-18T20:47:23.875Z" }, +] + [[package]] name = "prompt-toolkit" version = "3.0.52" @@ -2721,6 +2757,25 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/41/f2/fd673d979185f5dcbac4be7d09461cbb99751554ffb6718d0013af8604cb/tokenizers-0.21.4-cp39-abi3-win_amd64.whl", hash = "sha256:475d807a5c3eb72c59ad9b5fcdb254f6e17f53dfcbb9903233b0dfa9c943b597", size = 2507568, upload-time = "2025-07-28T15:48:55.456Z" }, ] +[[package]] +name = "tornado" +version = "6.5.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/09/ce/1eb500eae19f4648281bb2186927bb062d2438c2e5093d1360391afd2f90/tornado-6.5.2.tar.gz", hash = "sha256:ab53c8f9a0fa351e2c0741284e06c7a45da86afb544133201c5cc8578eb076a0", size = 510821, upload-time = "2025-08-08T18:27:00.78Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/48/6a7529df2c9cc12efd2e8f5dd219516184d703b34c06786809670df5b3bd/tornado-6.5.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:2436822940d37cde62771cff8774f4f00b3c8024fe482e16ca8387b8a2724db6", size = 442563, upload-time = "2025-08-08T18:26:42.945Z" }, + { url = "https://files.pythonhosted.org/packages/f2/b5/9b575a0ed3e50b00c40b08cbce82eb618229091d09f6d14bce80fc01cb0b/tornado-6.5.2-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:583a52c7aa94ee046854ba81d9ebb6c81ec0fd30386d96f7640c96dad45a03ef", size = 440729, upload-time = "2025-08-08T18:26:44.473Z" }, + { url = "https://files.pythonhosted.org/packages/1b/4e/619174f52b120efcf23633c817fd3fed867c30bff785e2cd5a53a70e483c/tornado-6.5.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0fe179f28d597deab2842b86ed4060deec7388f1fd9c1b4a41adf8af058907e", size = 444295, upload-time = "2025-08-08T18:26:46.021Z" }, + { url = "https://files.pythonhosted.org/packages/95/fa/87b41709552bbd393c85dd18e4e3499dcd8983f66e7972926db8d96aa065/tornado-6.5.2-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b186e85d1e3536d69583d2298423744740986018e393d0321df7340e71898882", size = 443644, upload-time = "2025-08-08T18:26:47.625Z" }, + { url = "https://files.pythonhosted.org/packages/f9/41/fb15f06e33d7430ca89420283a8762a4e6b8025b800ea51796ab5e6d9559/tornado-6.5.2-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e792706668c87709709c18b353da1f7662317b563ff69f00bab83595940c7108", size = 443878, upload-time = "2025-08-08T18:26:50.599Z" }, + { url = "https://files.pythonhosted.org/packages/11/92/fe6d57da897776ad2e01e279170ea8ae726755b045fe5ac73b75357a5a3f/tornado-6.5.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:06ceb1300fd70cb20e43b1ad8aaee0266e69e7ced38fa910ad2e03285009ce7c", size = 444549, upload-time = "2025-08-08T18:26:51.864Z" }, + { url = "https://files.pythonhosted.org/packages/9b/02/c8f4f6c9204526daf3d760f4aa555a7a33ad0e60843eac025ccfd6ff4a93/tornado-6.5.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:74db443e0f5251be86cbf37929f84d8c20c27a355dd452a5cfa2aada0d001ec4", size = 443973, upload-time = "2025-08-08T18:26:53.625Z" }, + { url = "https://files.pythonhosted.org/packages/ae/2d/f5f5707b655ce2317190183868cd0f6822a1121b4baeae509ceb9590d0bd/tornado-6.5.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b5e735ab2889d7ed33b32a459cac490eda71a1ba6857b0118de476ab6c366c04", size = 443954, upload-time = "2025-08-08T18:26:55.072Z" }, + { url = "https://files.pythonhosted.org/packages/e8/59/593bd0f40f7355806bf6573b47b8c22f8e1374c9b6fd03114bd6b7a3dcfd/tornado-6.5.2-cp39-abi3-win32.whl", hash = "sha256:c6f29e94d9b37a95013bb669616352ddb82e3bfe8326fccee50583caebc8a5f0", size = 445023, upload-time = "2025-08-08T18:26:56.677Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2a/f609b420c2f564a748a2d80ebfb2ee02a73ca80223af712fca591386cafb/tornado-6.5.2-cp39-abi3-win_amd64.whl", hash = "sha256:e56a5af51cc30dd2cae649429af65ca2f6571da29504a07995175df14c18f35f", size = 445427, upload-time = "2025-08-08T18:26:57.91Z" }, + { url = "https://files.pythonhosted.org/packages/5e/4f/e1f65e8f8c76d73658b33d33b81eed4322fb5085350e4328d5c956f0c8f9/tornado-6.5.2-cp39-abi3-win_arm64.whl", hash = "sha256:d6c33dc3672e3a1f3618eb63b7ef4683a7688e7b9e6e8f0d9aa5726360a004af", size = 444456, upload-time = "2025-08-08T18:26:59.207Z" }, +] + [[package]] name = "tqdm" version = "4.67.1" From f3f15657274007c3057ddb7776e1fad8ea645382 Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Wed, 24 Sep 2025 16:57:01 +0530 Subject: [PATCH 20/44] Refactor job table migration and enhance error handling in job scheduling --- .../versions/c6fb6d0b5897_create_job_table.py | 24 ----------------- backend/app/services/response/jobs.py | 27 +++++++++++++------ .../response/test_generate_response.py | 2 -- .../services/response/test_jobs_response.py | 22 +++++++++++++-- 4 files changed, 39 insertions(+), 36 deletions(-) diff --git a/backend/app/alembic/versions/c6fb6d0b5897_create_job_table.py b/backend/app/alembic/versions/c6fb6d0b5897_create_job_table.py index 86f49a7e4..029bb1740 100644 --- a/backend/app/alembic/versions/c6fb6d0b5897_create_job_table.py +++ b/backend/app/alembic/versions/c6fb6d0b5897_create_job_table.py @@ -35,34 +35,10 @@ def upgrade(): sa.Column("updated_at", sa.DateTime(), nullable=False), sa.PrimaryKeyConstraint("id"), ) - op.drop_constraint( - "openai_conversation_project_id_fkey1", - "openai_conversation", - type_="foreignkey", - ) - op.drop_constraint( - "openai_conversation_organization_id_fkey1", - "openai_conversation", - type_="foreignkey", - ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_foreign_key( - "openai_conversation_organization_id_fkey1", - "openai_conversation", - "organization", - ["organization_id"], - ["id"], - ) - op.create_foreign_key( - "openai_conversation_project_id_fkey1", - "openai_conversation", - "project", - ["project_id"], - ["id"], - ) op.drop_table("job") # ### end Alembic commands ### diff --git a/backend/app/services/response/jobs.py b/backend/app/services/response/jobs.py index 55c347768..bb016c7c6 100644 --- a/backend/app/services/response/jobs.py +++ b/backend/app/services/response/jobs.py @@ -24,14 +24,25 @@ def start_job( job_crud = JobCrud(session=db) job = job_crud.create(job_type=JobType.RESPONSE, trace_id=trace_id) - task_id = start_high_priority_job( - function_path="app.services.response.jobs.execute_job", - project_id=project_id, - job_id=str(job.id), - trace_id=trace_id, - request_data=request.model_dump(), - organization_id=organization_id, - ) + try: + task_id = start_high_priority_job( + function_path="app.services.response.jobs.execute_job", + project_id=project_id, + job_id=str(job.id), + trace_id=trace_id, + request_data=request.model_dump(), + organization_id=organization_id, + ) + except Exception as e: + logger.error( + f"[start_job] Error starting Celery task : {str(e)} | job_id={job.id}, project_id={project_id}", + exc_info=True, + ) + job_update = JobUpdate(status=JobStatus.FAILED, error_message=str(e)) + job_crud.update(job_id=job.id, job_update=job_update) + raise HTTPException( + status_code=500, detail="Internal server error while generating response" + ) logger.info( f"[start_job] Job scheduled to generate response | job_id={job.id}, project_id={project_id}, task_id={task_id}" diff --git a/backend/app/tests/services/response/response/test_generate_response.py b/backend/app/tests/services/response/response/test_generate_response.py index 886c5019e..9c26466b0 100644 --- a/backend/app/tests/services/response/response/test_generate_response.py +++ b/backend/app/tests/services/response/response/test_generate_response.py @@ -26,8 +26,6 @@ def assistant_mock() -> Assistant: def test_generate_response_success(db: Session, assistant_mock: Assistant): """Test successful OpenAI response generation.""" - mock_response = MagicMock() - mock_client = MagicMock() request = ResponsesAPIRequest( diff --git a/backend/app/tests/services/response/test_jobs_response.py b/backend/app/tests/services/response/test_jobs_response.py index 6c8830a7f..1baf39ac1 100644 --- a/backend/app/tests/services/response/test_jobs_response.py +++ b/backend/app/tests/services/response/test_jobs_response.py @@ -1,8 +1,9 @@ import pytest from unittest.mock import patch -from sqlmodel import Session +from sqlmodel import Session, select +from fastapi import HTTPException from app.services.response.jobs import start_job -from app.models import ResponsesAPIRequest, JobType, JobStatus +from app.models import ResponsesAPIRequest, JobType, JobStatus, Job from app.crud import JobCrud from app.tests.utils.utils import get_project @@ -35,3 +36,20 @@ def test_start_job(db: Session): assert kwargs["organization_id"] == project.organization_id assert kwargs["job_id"] == str(job_id) assert kwargs["request_data"]["assistant_id"] == "assistant_123" + + +def test_start_job_celery_exception(db: Session): + """Test start_job when Celery task scheduling fails.""" + request = ResponsesAPIRequest( + assistant_id="assistant_123", + question="What is the capital of France?", + ) + project = get_project(db) + + with patch("app.services.response.jobs.start_high_priority_job") as mock_schedule: + mock_schedule.side_effect = Exception("Celery connection failed") + + with pytest.raises(HTTPException) as exc_info: + start_job(db, request, project.id, project.organization_id) + + assert exc_info.value.status_code == 500 From 7504ef749802dbd5a3438f67d785cbbc722f8ed7 Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Wed, 24 Sep 2025 17:16:09 +0530 Subject: [PATCH 21/44] Add CALLBACK_TIMEOUT setting and update send_callback to use it --- backend/app/core/config.py | 1 + backend/app/utils.py | 16 +++++++++------- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/backend/app/core/config.py b/backend/app/core/config.py index 16f095647..c02b54c1e 100644 --- a/backend/app/core/config.py +++ b/backend/app/core/config.py @@ -117,6 +117,7 @@ def AWS_S3_BUCKET(self) -> str: CELERY_WORKER_PREFETCH_MULTIPLIER: int = 1 CELERY_ENABLE_UTC: bool = True CELERY_TIMEZONE: str = "UTC" + CALLBACK_TIMEOUT: tuple[int, int] = (3, 10) # seconds @computed_field # type: ignore[prop-decorator] @property diff --git a/backend/app/utils.py b/backend/app/utils.py index 87441b9f3..02be3165e 100644 --- a/backend/app/utils.py +++ b/backend/app/utils.py @@ -225,13 +225,15 @@ def handle_openai_error(e: openai.OpenAIError) -> str: def send_callback(callback_url: str, data: dict): """Send results to the callback URL (synchronously).""" try: - session = requests.Session() - # uncomment this to run locally without SSL - # session.verify = False - response = session.post(callback_url, json=data) - response.raise_for_status() - logger.info(f"[send_callback] Callback sent successfully to {callback_url}") - return True + with requests.Session() as session: + # uncomment this to run locally without SSL + # session.verify = False + response = session.post( + callback_url, json=data, timeout=settings.CALLBACK_TIMEOUT + ) + response.raise_for_status() + logger.info(f"[send_callback] Callback sent successfully to {callback_url}") + return True except requests.RequestException as e: logger.error(f"[send_callback] Callback failed: {str(e)}", exc_info=True) return False From 1268dfbb8a453674d78fa9ee1e373b529b854c07 Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Wed, 24 Sep 2025 18:26:48 +0530 Subject: [PATCH 22/44] Add callback timeout settings and update send_callback function to use them --- .env.example | 5 +++++ .env.test.example | 4 ++++ backend/app/core/config.py | 5 ++++- backend/app/utils.py | 7 ++++++- 4 files changed, 19 insertions(+), 2 deletions(-) diff --git a/.env.example b/.env.example index 3fa7be346..b25b0f24e 100644 --- a/.env.example +++ b/.env.example @@ -73,3 +73,8 @@ CELERY_WORKER_PREFETCH_MULTIPLIER=1 CELERY_ENABLE_UTC=true # India Standard Time (UTC+05:30) CELERY_TIMEZONE=Asia/Kolkata + + +# Callback Timeouts (in seconds) +CALLBACK_CONNECT_TIMEOUT = 3 +CALLBACK_READ_TIMEOUT = 10 diff --git a/.env.test.example b/.env.test.example index 9065e4e68..f938561d9 100644 --- a/.env.test.example +++ b/.env.test.example @@ -28,3 +28,7 @@ AWS_ACCESS_KEY_ID=this_is_a_test_key AWS_SECRET_ACCESS_KEY=this_is_a_test_key AWS_DEFAULT_REGION=ap-south-1 AWS_S3_BUCKET_PREFIX="bucket-prefix-name" + +# Callback Timeouts (in seconds) +CALLBACK_CONNECT_TIMEOUT = 3 +CALLBACK_READ_TIMEOUT = 10 diff --git a/backend/app/core/config.py b/backend/app/core/config.py index c02b54c1e..515874af5 100644 --- a/backend/app/core/config.py +++ b/backend/app/core/config.py @@ -117,7 +117,10 @@ def AWS_S3_BUCKET(self) -> str: CELERY_WORKER_PREFETCH_MULTIPLIER: int = 1 CELERY_ENABLE_UTC: bool = True CELERY_TIMEZONE: str = "UTC" - CALLBACK_TIMEOUT: tuple[int, int] = (3, 10) # seconds + + # callback timeouts + CALLBACK_CONNECT_TIMEOUT: int = 3 + CALLBACK_READ_TIMEOUT: int = 10 @computed_field # type: ignore[prop-decorator] @property diff --git a/backend/app/utils.py b/backend/app/utils.py index 02be3165e..b8e922973 100644 --- a/backend/app/utils.py +++ b/backend/app/utils.py @@ -229,7 +229,12 @@ def send_callback(callback_url: str, data: dict): # uncomment this to run locally without SSL # session.verify = False response = session.post( - callback_url, json=data, timeout=settings.CALLBACK_TIMEOUT + callback_url, + json=data, + timeout=( + settings.CALLBACK_CONNECT_TIMEOUT, + settings.CALLBACK_READ_TIMEOUT, + ), ) response.raise_for_status() logger.info(f"[send_callback] Callback sent successfully to {callback_url}") From 6bc2a47ee2f2995a65940b16a4deff2cb42111d6 Mon Sep 17 00:00:00 2001 From: nishika26 Date: Thu, 25 Sep 2025 01:16:59 +0530 Subject: [PATCH 23/44] adding batching document in helper function, updating test cases for the shift from user id to project id --- backend/app/api/routes/collections.py | 12 +- backend/app/api/routes/documents.py | 4 +- backend/app/models/collection.py | 13 +- .../services/collections/create_collection.py | 23 ++-- .../services/collections/delete_collection.py | 18 ++- backend/app/services/collections/helpers.py | 25 ++++ .../collections/test_collection_info.py | 1 - .../collections/test_create_collections.py | 125 ++++++------------ .../test_crud_collection_create.py | 6 +- .../test_crud_collection_delete.py | 18 +-- .../test_crud_collection_read_all.py | 9 +- .../test_crud_collection_read_one.py | 10 +- backend/app/tests/utils/collection.py | 22 +-- 13 files changed, 124 insertions(+), 162 deletions(-) diff --git a/backend/app/api/routes/collections.py b/backend/app/api/routes/collections.py index 32eac4b23..a6374f817 100644 --- a/backend/app/api/routes/collections.py +++ b/backend/app/api/routes/collections.py @@ -1,6 +1,3 @@ -import re -import json -import ast import inspect import logging from uuid import UUID @@ -11,7 +8,7 @@ from fastapi import Path as FastPath -from app.api.deps import CurrentUser, SessionDep, CurrentUserOrgProject +from app.api.deps import SessionDep, CurrentUserOrgProject from app.crud import ( CollectionCrud, DocumentCollectionCrud, @@ -19,8 +16,8 @@ from app.models import Collection, DocumentPublic from app.models.collection import ( CollectionStatus, - CreationRequest, ResponsePayload, + CreationRequest, DeletionRequest, ) from app.utils import APIResponse, load_description, get_openai_client @@ -30,6 +27,7 @@ delete_collection as delete_services, ) + logger = logging.getLogger(__name__) router = APIRouter(prefix="/collections", tags=["collections"]) @@ -157,12 +155,12 @@ def list_collections( ) def collection_documents( session: SessionDep, - current_user: CurrentUser, + current_user: CurrentUserOrgProject, collection_id: UUID = FastPath(description="Collection to retrieve"), skip: int = Query(0, ge=0), limit: int = Query(100, gt=0, le=100), ): - collection_crud = CollectionCrud(session, current_user.id) + collection_crud = CollectionCrud(session, current_user.project_id) document_collection_crud = DocumentCollectionCrud(session) collection = collection_crud.read_one(collection_id) data = document_collection_crud.read(collection, skip, limit) diff --git a/backend/app/api/routes/documents.py b/backend/app/api/routes/documents.py index e95c0c9e8..8fad2a70c 100644 --- a/backend/app/api/routes/documents.py +++ b/backend/app/api/routes/documents.py @@ -165,7 +165,7 @@ def remove_doc( a_crud = OpenAIAssistantCrud(client) d_crud = DocumentCrud(session, current_user.project_id) - c_crud = CollectionCrud(session, current_user.id) + c_crud = CollectionCrud(session, current_user.project_id) document = d_crud.delete(doc_id) data = c_crud.delete(document, a_crud) @@ -190,7 +190,7 @@ def permanent_delete_doc( ) a_crud = OpenAIAssistantCrud(client) d_crud = DocumentCrud(session, current_user.project_id) - c_crud = CollectionCrud(session, current_user.id) + c_crud = CollectionCrud(session, current_user.project_id) storage = get_cloud_storage(session=session, project_id=current_user.project_id) document = d_crud.read_one(doc_id) diff --git a/backend/app/models/collection.py b/backend/app/models/collection.py index cb43be72b..2ba66e16b 100644 --- a/backend/app/models/collection.py +++ b/backend/app/models/collection.py @@ -11,7 +11,6 @@ from .organization import Organization from .project import Project from app.core.util import now -from app.crud.document import DocumentCrud class CollectionStatus(str, enum.Enum): @@ -67,7 +66,7 @@ def now(cls): raise AttributeError(f'Expected attribute "{attr}" does not exist') -# pydantic models +# pydantic models - class DocumentOptions(BaseModel): documents: List[UUID] = Field( description="List of document IDs", @@ -84,16 +83,6 @@ class DocumentOptions(BaseModel): def model_post_init(self, __context: Any): self.documents = list(set(self.documents)) - def __call__(self, crud: DocumentCrud): - (start, stop) = (0, self.batch_size) - while True: - view = self.documents[start:stop] - if not view: - break - yield crud.read_each(view) - start = stop - stop += self.batch_size - class AssistantOptions(BaseModel): # Fields to be passed along to OpenAI. They must be a subset of diff --git a/backend/app/services/collections/create_collection.py b/backend/app/services/collections/create_collection.py index 0fcaf3757..72a3579c5 100644 --- a/backend/app/services/collections/create_collection.py +++ b/backend/app/services/collections/create_collection.py @@ -21,7 +21,12 @@ CreationRequest, AssistantOptions, ) -from app.services.collections.helpers import _backout, SilentCallback, WebHookCallback +from app.services.collections.helpers import ( + _backout, + batch_documents, + SilentCallback, + WebHookCallback, +) from app.celery.utils import start_low_priority_job from app.utils import get_openai_client @@ -29,7 +34,7 @@ def start_job( - db: Session, # kept for signature compatibility, even if unused here + db: Session, request: dict, collection: Collection, project_id: int, @@ -39,7 +44,6 @@ def start_job( trace_id = correlation_id.get() or "N/A" task_id = start_low_priority_job( - # keep the function path in sync with the worker entrypoint below function_path="app.services.collections.create_collection.execute_job", project_id=project_id, job_id=collection.id, @@ -50,7 +54,7 @@ def start_job( ) logger.info( - "[start_job] Job scheduled to create collection | " + "[create_collection.start_job] Job scheduled to create collection | " f"collection_id={collection.id}, project_id={project_id}, task_id={task_id}, job_id={collection.id}" ) return collection.id @@ -71,7 +75,6 @@ def execute_job( start_time = time.time() with Session(engine) as session: - # Parse/validate incoming data creation_request = CreationRequest(**request) payload = ResponsePayload(**payload_data) @@ -96,7 +99,9 @@ def execute_job( try: vector_store = vector_store_crud.create() - docs_batches = list(creation_request(document_crud)) + docs_batches = batch_documents( + document_crud, creation_request.documents, creation_request.batch_size + ) flat_docs = [doc for batch in docs_batches for doc in batch] file_exts = { @@ -126,7 +131,7 @@ def execute_job( elapsed = time.time() - start_time logger.info( - "[do_create_collection] Collection created: %s | Time: %.2fs | Files: %d | Sizes: %s KB | Types: %s", + "[create_collection.execute_job] Collection created: %s | Time: %.2fs | Files: %d | Sizes: %s KB | Types: %s", collection.id, elapsed, len(flat_docs), @@ -138,7 +143,7 @@ def execute_job( except Exception as err: logger.error( - "[do_create_collection] Collection Creation Failed | {'collection_id': '%s', 'error': '%s'}", + "[create_collection.execute_job] Collection Creation Failed | {'collection_id': '%s', 'error': '%s'}", collection.id, str(err), exc_info=True, @@ -155,7 +160,7 @@ def execute_job( collection_crud._update(collection) except Exception as suberr: logger.warning( - "[do_create_collection] Failed to update collection status | " + "[create_collection.execute_job] Failed to update collection status | " "{'collection_id': '%s', 'reason': '%s'}", collection.id, str(suberr), diff --git a/backend/app/services/collections/delete_collection.py b/backend/app/services/collections/delete_collection.py index 2817d8926..a165233b8 100644 --- a/backend/app/services/collections/delete_collection.py +++ b/backend/app/services/collections/delete_collection.py @@ -10,12 +10,16 @@ CollectionCrud, ) from app.crud.rag import OpenAIAssistantCrud -from app.models import Collection -from app.models.collection import ResponsePayload, DeletionRequest -from app.services.collections.helpers import SilentCallback, WebHookCallback +from app.models.collection import Collection, DeletionRequest +from app.services.collections.helpers import ( + SilentCallback, + WebHookCallback, + ResponsePayload, +) from app.celery.utils import start_low_priority_job from app.utils import get_openai_client + logger = logging.getLogger(__name__) @@ -40,7 +44,7 @@ def start_job( ) logger.info( - "[start_job] Job scheduled to delete collection | " + "[delete_collection.start_job] Job scheduled to delete collection | " f"collection_id={collection.id}, project_id={project_id}, task_id={task_id}, job_id={collection.id}" ) return collection.id @@ -78,14 +82,14 @@ def execute_job( result = collection_crud.delete(collection, assistant_crud) logger.info( - "[do_delete_collection] Collection deleted successfully | {'collection_id': '%s'}", + "[delete_collection.execute_job] Collection deleted successfully | {'collection_id': '%s'}", str(collection.id), ) callback.success(result.model_dump(mode="json")) except (ValueError, PermissionError, SQLAlchemyError) as err: logger.error( - "[do_delete_collection] Failed to delete collection | {'collection_id': '%s', 'error': '%s'}", + "[delete_collection.execute_job] Failed to delete collection | {'collection_id': '%s', 'error': '%s'}", str(collection.id), str(err), exc_info=True, @@ -94,7 +98,7 @@ def execute_job( except Exception as err: logger.error( - "[do_delete_collection] Unexpected error during deletion | " + "[delete_collection.execute_job] Unexpected error during deletion | " "{'collection_id': '%s', 'error': '%s', 'error_type': '%s'}", str(collection.id), str(err), diff --git a/backend/app/services/collections/helpers.py b/backend/app/services/collections/helpers.py index c6c0c2b04..d6ee102ca 100644 --- a/backend/app/services/collections/helpers.py +++ b/backend/app/services/collections/helpers.py @@ -2,12 +2,15 @@ import json import ast import re +from uuid import UUID +from typing import List from dataclasses import asdict, replace from pydantic import HttpUrl from openai import OpenAIError from app.core.util import post_callback +from app.crud.document import DocumentCrud from app.models.collection import ResponsePayload from app.crud.rag import OpenAIAssistantCrud from app.utils import APIResponse @@ -16,6 +19,7 @@ logger = logging.getLogger(__name__) +# function to extract cleaned up error message from the error body for the user - def extract_error_message(err: Exception) -> str: err_str = str(err).strip() @@ -42,6 +46,27 @@ def extract_error_message(err: Exception) -> str: return message.strip()[:1000] +# batching the documents according to the given batch size +def batch_documents( + document_crud: DocumentCrud, documents: List[UUID], batch_size: int +): + logger.info( + f"[batch_documents] Starting batch iteration for documents | {{'batch_size': {batch_size}, 'total_documents': {len(documents)}}}" + ) + docs_batches = [] + start, stop = 0, batch_size + while True: + view = documents[start:stop] + if not view: + break + batch_docs = document_crud.read_each(view) + docs_batches.append(batch_docs) + start = stop + stop += batch_size + return docs_batches + + +# functions related to callback handling - class CallbackHandler: def __init__(self, payload: ResponsePayload): self.payload = payload diff --git a/backend/app/tests/api/routes/collections/test_collection_info.py b/backend/app/tests/api/routes/collections/test_collection_info.py index 5747f7905..787ed029f 100644 --- a/backend/app/tests/api/routes/collections/test_collection_info.py +++ b/backend/app/tests/api/routes/collections/test_collection_info.py @@ -17,7 +17,6 @@ def create_collection( now = datetime.now(timezone.utc) collection = Collection( id=uuid4(), - owner_id=user.user_id, organization_id=user.organization_id, project_id=user.project_id, status=status, diff --git a/backend/app/tests/api/routes/collections/test_create_collections.py b/backend/app/tests/api/routes/collections/test_create_collections.py index 22764df4d..dd762228c 100644 --- a/backend/app/tests/api/routes/collections/test_create_collections.py +++ b/backend/app/tests/api/routes/collections/test_create_collections.py @@ -1,97 +1,46 @@ -import pytest from uuid import UUID -import io -from sqlmodel import Session from fastapi.testclient import TestClient from unittest.mock import patch -from app.models import APIKeyPublic -from app.core.config import settings -from app.tests.utils.document import DocumentStore -from app.tests.utils.utils import get_user_from_api_key -from app.crud.collection import CollectionCrud -from app.models.collection import CollectionStatus -from app.tests.utils.openai import get_mock_openai_client_with_vector_store - - -@pytest.fixture(autouse=True) -def mock_s3(monkeypatch): - class FakeStorage: - def __init__(self, *args, **kwargs): - pass - - def upload(self, file_obj, path: str, **kwargs): - return f"s3://fake-bucket/{path or 'mock-file.txt'}" - - def stream(self, file_obj): - fake_file = io.BytesIO(b"dummy content") - fake_file.name = "fake.txt" - return fake_file - - def get_file_size_kb(self, url: str) -> float: - return 1.0 - - class FakeS3Client: - def head_object(self, Bucket, Key): - return {"ContentLength": 1024} - - monkeypatch.setattr("app.api.routes.collections.get_cloud_storage", FakeStorage) - monkeypatch.setattr("boto3.client", lambda service: FakeS3Client()) - - -class TestCollectionRouteCreate: - _n_documents = 5 - - @patch("app.api.routes.collections.get_openai_client") - def test_create_collection_success( - self, - mock_get_openai_client, - client: TestClient, - db: Session, - user_api_key: APIKeyPublic, - ): - store = DocumentStore(db, project_id=user_api_key.project_id) - documents = store.fill(self._n_documents) - doc_ids = [str(doc.id) for doc in documents] - - body = { - "documents": doc_ids, - "batch_size": 2, - "model": "gpt-4o", - "instructions": "Test collection assistant.", - "temperature": 0.1, - } - - headers = {"X-API-KEY": user_api_key.key} - - mock_openai_client = get_mock_openai_client_with_vector_store() - mock_get_openai_client.return_value = mock_openai_client - - response = client.post( - f"{settings.API_V1_STR}/collections/create", json=body, headers=headers +from app.models.collection import Collection, CollectionStatus, CreationRequest + + +def test_collection_creation_success( + client: TestClient, user_api_key_header: dict[str, str] +): + with patch( + "app.api.routes.collections.create_services.start_job" + ) as mock_job_start: + creation_data = CreationRequest( + model="gpt-4o", + instructions="string", + temperature=0.000001, + documents=[UUID("f3e86a17-1e6f-41ec-b020-5b08eebef928")], + batch_size=1, + callback_url=None, ) - assert response.status_code == 200 - json = response.json() - assert json["success"] is True - metadata = json.get("metadata", {}) - assert metadata["status"] == CollectionStatus.processing.value - assert UUID(metadata["key"]) - - # Confirm collection metadata in DB - collection_id = UUID(metadata["key"]) - user = get_user_from_api_key(db, headers) - collection = CollectionCrud(db, user.user_id).read_one(collection_id) - - info_response = client.post( - f"{settings.API_V1_STR}/collections/info/{collection_id}", - headers=headers, + api_response = client.post( + "/api/v1/collections/create", + json=creation_data.model_dump(mode="json"), + headers=user_api_key_header, ) - assert info_response.status_code == 200 - info_data = info_response.json()["data"] - assert collection.status == CollectionStatus.successful.value - assert collection.owner_id == user.user_id - assert collection.llm_service_id is not None - assert collection.llm_service_name == "gpt-4o" + assert api_response.status_code == 200 + response_body = api_response.json() + + assert response_body["success"] is True + assert response_body["metadata"]["status"] == "processing" + assert response_body["metadata"]["key"] is not None + assert UUID(response_body["metadata"]["key"]) # Verify UUID format + assert response_body["data"] is None + + mock_job_start.assert_called_once() + job_args = mock_job_start.call_args[1] + assert job_args["request"] == creation_data.model_dump() + assert job_args["payload"]["status"] == "processing" + assert isinstance(job_args["collection"], Collection) + assert job_args["collection"].status == CollectionStatus.processing + assert job_args["project_id"] == job_args["collection"].project_id + assert job_args["organization_id"] == job_args["collection"].organization_id diff --git a/backend/app/tests/crud/collections/test_crud_collection_create.py b/backend/app/tests/crud/collections/test_crud_collection_create.py index 53293d28c..925f595e8 100644 --- a/backend/app/tests/crud/collections/test_crud_collection_create.py +++ b/backend/app/tests/crud/collections/test_crud_collection_create.py @@ -4,6 +4,7 @@ from app.crud import CollectionCrud from app.models import DocumentCollection from app.tests.utils.document import DocumentStore +from app.tests.utils.utils import get_project from app.tests.utils.collection import get_collection @@ -12,11 +13,12 @@ class TestCollectionCreate: @openai_responses.mock() def test_create_associates_documents(self, db: Session): - collection = get_collection(db) + project = get_project(db) + collection = get_collection(db, project_id=project.id) store = DocumentStore(db, project_id=collection.project_id) documents = store.fill(self._n_documents) - crud = CollectionCrud(db, collection.owner_id) + crud = CollectionCrud(db, collection.project_id) collection = crud.create(collection, documents) statement = select(DocumentCollection).where( diff --git a/backend/app/tests/crud/collections/test_crud_collection_delete.py b/backend/app/tests/crud/collections/test_crud_collection_delete.py index 0a01588ba..104704d92 100644 --- a/backend/app/tests/crud/collections/test_crud_collection_delete.py +++ b/backend/app/tests/crud/collections/test_crud_collection_delete.py @@ -17,12 +17,13 @@ class TestCollectionDelete: @openai_responses.mock() def test_delete_marks_deleted(self, db: Session): + project = get_project(db) client = OpenAI(api_key="sk-test-key") assistant = OpenAIAssistantCrud(client) - collection = get_collection(db, client) + collection = get_collection(db, client, project_id=project.id) - crud = CollectionCrud(db, collection.owner_id) + crud = CollectionCrud(db, collection.project_id) collection_ = crud.delete(collection, assistant) assert collection_.deleted_at is not None @@ -32,9 +33,10 @@ def test_delete_follows_insert(self, db: Session): client = OpenAI(api_key="sk-test-key") assistant = OpenAIAssistantCrud(client) - collection = get_collection(db, client) + project = get_project(db) + collection = get_collection(db, project_id=project.id) - crud = CollectionCrud(db, collection.owner_id) + crud = CollectionCrud(db, collection.project_id) collection_ = crud.delete(collection, assistant) assert collection_.created_at <= collection_.deleted_at @@ -44,7 +46,8 @@ def test_cannot_delete_others_collections(self, db: Session): client = OpenAI(api_key="sk-test-key") assistant = OpenAIAssistantCrud(client) - collection = get_collection(db, client) + project = get_project(db) + collection = get_collection(db, project_id=project.id) c_id = uuid_increment(collection.id) crud = CollectionCrud(db, c_id) @@ -61,13 +64,12 @@ def test_delete_document_deletes_collections(self, db: Session): APIKey.project_id == project.id, APIKey.is_deleted == False ) api_key = db.exec(stmt).first() - owner_id = api_key.user_id client = OpenAI(api_key="sk-test-key") resources = [] for _ in range(self._n_collections): - coll = get_collection(db, client, owner_id=owner_id) - crud = CollectionCrud(db, owner_id=owner_id) + coll = get_collection(db, client, project_id=project.id) + crud = CollectionCrud(db, project_id=project.id) collection = crud.create(coll, documents) resources.append((crud, collection)) diff --git a/backend/app/tests/crud/collections/test_crud_collection_read_all.py b/backend/app/tests/crud/collections/test_crud_collection_read_all.py index f8cc82fb4..d1f329a2a 100644 --- a/backend/app/tests/crud/collections/test_crud_collection_read_all.py +++ b/backend/app/tests/crud/collections/test_crud_collection_read_all.py @@ -6,24 +6,25 @@ from app.crud import CollectionCrud from app.models import Collection from app.tests.utils.document import DocumentStore +from app.tests.utils.utils import get_project from app.tests.utils.collection import get_collection def create_collections(db: Session, n: int): crud = None - + project = get_project(db) openai_mock = OpenAIMock() with openai_mock.router: client = OpenAI(api_key="sk-test-key") for _ in range(n): - collection = get_collection(db, client) + collection = get_collection(db, client, project_id=project.id) store = DocumentStore(db, project_id=collection.project_id) documents = store.fill(1) if crud is None: - crud = CollectionCrud(db, collection.owner_id) + crud = CollectionCrud(db, collection.project_id) crud.create(collection, documents) - return crud.owner_id + return crud.project_id @pytest.fixture(scope="class") diff --git a/backend/app/tests/crud/collections/test_crud_collection_read_one.py b/backend/app/tests/crud/collections/test_crud_collection_read_one.py index 388a68ad7..63829eb70 100644 --- a/backend/app/tests/crud/collections/test_crud_collection_read_one.py +++ b/backend/app/tests/crud/collections/test_crud_collection_read_one.py @@ -7,17 +7,19 @@ from app.crud import CollectionCrud from app.core.config import settings from app.tests.utils.document import DocumentStore +from app.tests.utils.utils import get_project from app.tests.utils.collection import get_collection, uuid_increment def mk_collection(db: Session): openai_mock = OpenAIMock() + project = get_project(db) with openai_mock.router: client = OpenAI(api_key="sk-test-key") - collection = get_collection(db, client) + collection = get_collection(db, client, project_id=project.id) store = DocumentStore(db, project_id=collection.project_id) documents = store.fill(1) - crud = CollectionCrud(db, collection.owner_id) + crud = CollectionCrud(db, collection.project_id) return crud.create(collection, documents) @@ -25,14 +27,14 @@ class TestDatabaseReadOne: def test_can_select_valid_id(self, db: Session): collection = mk_collection(db) - crud = CollectionCrud(db, collection.owner_id) + crud = CollectionCrud(db, collection.project_id) result = crud.read_one(collection.id) assert result.id == collection.id def test_cannot_select_others_collections(self, db: Session): collection = mk_collection(db) - other = collection.owner_id + 1 + other = collection.project_id + 1 crud = CollectionCrud(db, other) with pytest.raises(NoResultFound): crud.read_one(collection.id) diff --git a/backend/app/tests/utils/collection.py b/backend/app/tests/utils/collection.py index b2d3ae945..e025f908b 100644 --- a/backend/app/tests/utils/collection.py +++ b/backend/app/tests/utils/collection.py @@ -6,7 +6,7 @@ from app.core.config import settings from app.models import Collection, Organization, Project -from app.tests.utils.utils import get_user_id_by_email +from app.tests.utils.utils import get_user_id_by_email, get_project from app.tests.utils.test_data import create_test_project from app.crud import create_api_key @@ -21,21 +21,8 @@ def uuid_increment(value: UUID): return UUID(int=inc) -def get_collection(db: Session, client=None, owner_id: int = None) -> Collection: - if owner_id is None: - owner_id = get_user_id_by_email(db) - - # Step 1: Create real organization and project entries - project = create_test_project(db) - - # Step 2: Create API key for user with valid foreign keys - create_api_key( - db, - organization_id=project.organization_id, - user_id=owner_id, - project_id=project.id, - ) - +def get_collection(db: Session, client=None, project_id: int = None) -> Collection: + project = get_project(db) if client is None: client = OpenAI(api_key="test_api_key") @@ -47,9 +34,8 @@ def get_collection(db: Session, client=None, owner_id: int = None) -> Collection ) return Collection( - owner_id=owner_id, organization_id=project.organization_id, - project_id=project.id, + project_id=project_id, llm_service_id=assistant.id, llm_service_name=constants.llm_service_name, ) From 65380f838654211507fdbbf551739d7710308739 Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Thu, 25 Sep 2025 11:16:49 +0530 Subject: [PATCH 24/44] Remove unused response_chunks in CallbackResponse and update related functions --- backend/app/models/response.py | 1 - backend/app/services/response/response.py | 9 +++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/backend/app/models/response.py b/backend/app/models/response.py index 09291ec81..4de982de6 100644 --- a/backend/app/models/response.py +++ b/backend/app/models/response.py @@ -48,7 +48,6 @@ class CallbackResponse(SQLModel): status: str response_id: str message: str - chunks: list[FileResultChunk] diagnostics: Diagnostics | None = None class Config: diff --git a/backend/app/services/response/response.py b/backend/app/services/response/response.py index cf2db453a..ca23a951b 100644 --- a/backend/app/services/response/response.py +++ b/backend/app/services/response/response.py @@ -52,12 +52,10 @@ def get_file_search_results(response: Response) -> list[FileResultChunk]: def _build_callback_response(response: Response) -> CallbackResponse: """Build callback response with diagnostics and search results.""" - response_chunks = get_file_search_results(response) return CallbackResponse( status="success", response_id=response.id, message=response.output_text, - chunks=response_chunks, diagnostics=Diagnostics( input_tokens=response.usage.input_tokens, output_tokens=response.usage.output_tokens, @@ -93,9 +91,12 @@ def generate_response( try: tracer.start_trace( name="generate_response_async", - input={"question": request.question, "assistant_id": assistant.id}, + input={ + "question": request.question, + "assistant_id": assistant.assistant_id, + }, metadata={"callback_url": request.callback_url}, - tags=[assistant.id], + tags=[assistant.assistant_id], ) tracer.start_generation( name="openai_response", From db0c27676c229e47c9743e52bd4827d3abc90581 Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Thu, 25 Sep 2025 11:41:49 +0530 Subject: [PATCH 25/44] Fix update_job test to assert failure status and correct error message --- backend/app/tests/crud/test_jobs.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/app/tests/crud/test_jobs.py b/backend/app/tests/crud/test_jobs.py index 98bf7305b..09a640665 100644 --- a/backend/app/tests/crud/test_jobs.py +++ b/backend/app/tests/crud/test_jobs.py @@ -42,11 +42,11 @@ def test_update_job(db: Session, dummy_jobs): crud = JobCrud(db) job = dummy_jobs[1] - update_data = JobUpdate(status=JobStatus.SUCCESS, error_message="All good now") + update_data = JobUpdate(status=JobStatus.FAILED, error_message="Errror occurred") updated_job = crud.update(job.id, update_data) - assert updated_job.status == JobStatus.SUCCESS - assert updated_job.error_message == "All good now" + assert updated_job.status == JobStatus.FAILED + assert updated_job.error_message == "Error occurred" assert updated_job.updated_at is not None assert updated_job.updated_at >= job.updated_at From 6fe72d64e8d33f45119c981fbdc75208d5a8ed61 Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Thu, 25 Sep 2025 11:59:38 +0530 Subject: [PATCH 26/44] Refactor get_additional_data function to simplify exclusion logic for request metadata --- backend/app/services/response/callbacks.py | 20 ++++++++------------ backend/app/tests/crud/test_jobs.py | 2 +- 2 files changed, 9 insertions(+), 13 deletions(-) diff --git a/backend/app/services/response/callbacks.py b/backend/app/services/response/callbacks.py index 7919ed5db..fbf0af1f5 100644 --- a/backend/app/services/response/callbacks.py +++ b/backend/app/services/response/callbacks.py @@ -1,21 +1,17 @@ +from app.models import ResponsesAPIRequest, ResponsesSyncAPIRequest from app.utils import APIResponse, send_callback def get_additional_data(request: dict) -> dict: - async_exclude_keys = {"assistant_id", "callback_url", "response_id", "question"} - sync_exclude_keys = { - "model", - "instructions", - "vector_store_ids", - "max_num_results", - "temperature", - "response_id", - "question", - } + """ + Returns extra metadata included in the request payload + that is not part of the async or sync request models. + """ + if "assistant_id" in request: - exclude_keys = async_exclude_keys + exclude_keys = set(ResponsesAPIRequest.model_fields.keys()) else: - exclude_keys = sync_exclude_keys + exclude_keys = set(ResponsesSyncAPIRequest.model_fields.keys()) return {k: v for k, v in request.items() if k not in exclude_keys} diff --git a/backend/app/tests/crud/test_jobs.py b/backend/app/tests/crud/test_jobs.py index 09a640665..4c4aacede 100644 --- a/backend/app/tests/crud/test_jobs.py +++ b/backend/app/tests/crud/test_jobs.py @@ -42,7 +42,7 @@ def test_update_job(db: Session, dummy_jobs): crud = JobCrud(db) job = dummy_jobs[1] - update_data = JobUpdate(status=JobStatus.FAILED, error_message="Errror occurred") + update_data = JobUpdate(status=JobStatus.FAILED, error_message="Error occurred") updated_job = crud.update(job.id, update_data) assert updated_job.status == JobStatus.FAILED From eaf8202b96be27b95903e373e7117a77f42a2f81 Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Thu, 25 Sep 2025 12:04:01 +0530 Subject: [PATCH 27/44] Update job_type field description for clarity and consistency --- backend/app/models/job.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/app/models/job.py b/backend/app/models/job.py index 8a4cdee22..4ddbd3b3e 100644 --- a/backend/app/models/job.py +++ b/backend/app/models/job.py @@ -37,7 +37,7 @@ class Job(SQLModel, table=True): default=JobStatus.PENDING, description="Current state of the job." ) job_type: JobType = Field( - description="Job type or classification (e.g., response job, ingestion job)." + description="Type of job being executed (e.g., response, ingestion)." ) created_at: datetime = Field(default_factory=now) updated_at: datetime = Field(default_factory=now) From fcbd764c5d18bdb5d94ec5cb8d3316d1466797e1 Mon Sep 17 00:00:00 2001 From: Aviraj <100823015+avirajsingh7@users.noreply.github.com> Date: Thu, 25 Sep 2025 12:10:30 +0530 Subject: [PATCH 28/44] Remove unused imports and add conditional previous_response_id in generate_response function --- backend/app/services/response/jobs.py | 3 --- backend/app/services/response/response.py | 3 ++- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/backend/app/services/response/jobs.py b/backend/app/services/response/jobs.py index bb016c7c6..cab0f0e83 100644 --- a/backend/app/services/response/jobs.py +++ b/backend/app/services/response/jobs.py @@ -3,12 +3,9 @@ from fastapi import HTTPException from sqlmodel import Session from asgi_correlation_id import correlation_id -from app.core.db import engine from app.crud import JobCrud from app.models import JobType, JobStatus, JobUpdate, ResponsesAPIRequest -from app.utils import APIResponse from app.celery.utils import start_high_priority_job -from app.api.routes.threads import send_callback from app.services.response.response import process_response from app.services.response.callbacks import send_response_callback diff --git a/backend/app/services/response/response.py b/backend/app/services/response/response.py index ca23a951b..681606406 100644 --- a/backend/app/services/response/response.py +++ b/backend/app/services/response/response.py @@ -106,11 +106,12 @@ def generate_response( params: dict = { "model": assistant.model, - "previous_response_id": ancestor_id, "instructions": assistant.instructions, "temperature": assistant.temperature, "input": [{"role": "user", "content": request.question}], } + if ancestor_id: + params["previous_response_id"] = ancestor_id if assistant.vector_store_ids: params["tools"] = [ From 4083b62c62e56b2534e38a6143a5d29f6ee3317d Mon Sep 17 00:00:00 2001 From: nishika26 Date: Thu, 25 Sep 2025 13:02:06 +0530 Subject: [PATCH 29/44] services test cases --- backend/app/services/collections/__init__.py | 0 .../collections/test_create_collection.py | 140 ++++++++++++++++++ .../collections/test_delete_collection.py | 132 +++++++++++++++++ 3 files changed, 272 insertions(+) create mode 100644 backend/app/services/collections/__init__.py create mode 100644 backend/app/tests/services/collections/test_create_collection.py create mode 100644 backend/app/tests/services/collections/test_delete_collection.py diff --git a/backend/app/services/collections/__init__.py b/backend/app/services/collections/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/app/tests/services/collections/test_create_collection.py b/backend/app/tests/services/collections/test_create_collection.py new file mode 100644 index 000000000..5215dbaaf --- /dev/null +++ b/backend/app/tests/services/collections/test_create_collection.py @@ -0,0 +1,140 @@ +import pytest +import os +from pathlib import Path +from urllib.parse import urlparse +from unittest.mock import patch +from uuid import UUID +from dataclasses import asdict + +from sqlmodel import Session +from moto import mock_aws + +from app.core.config import settings +from app.models.collection import ( + CreationRequest, + Collection, + ResponsePayload, + CollectionStatus, +) +from app.crud import CollectionCrud, DocumentCollectionCrud +from app.tests.utils.utils import get_project +from app.tests.utils.collection import get_collection +from app.tests.utils.document import DocumentStore +from app.tests.utils.openai import get_mock_openai_client_with_vector_store +from app.services.collections.create_collection import start_job, execute_job +from app.core.cloud import AmazonCloudStorageClient + + +@pytest.fixture(scope="function") +def aws_credentials(): + os.environ["AWS_ACCESS_KEY_ID"] = "testing" + os.environ["AWS_SECRET_ACCESS_KEY"] = "testing" + os.environ["AWS_SECURITY_TOKEN"] = "testing" + os.environ["AWS_SESSION_TOKEN"] = "testing" + os.environ["AWS_DEFAULT_REGION"] = settings.AWS_DEFAULT_REGION + + +def test_start_job(db: Session): + request = CreationRequest( + model="gpt-4o", + instructions="string", + temperature=0.000001, + documents=[UUID("f3e86a17-1e6f-41ec-b020-5b08eebef928")], + batch_size=1, + callback_url=None, + ) + project = get_project(db) + collection = Collection( + id=UUID("42be84e8-d1b0-4e93-8b26-ebb74034674b"), + project_id=project.id, + organization_id=project.organization_id, + status="PENDING", + ) + + with patch( + "app.services.collections.create_collection.start_low_priority_job" + ) as mock_schedule: + mock_schedule.return_value = "fake-task-id" + + job_id = start_job( + db, + request.model_dump(), + collection, + project.id, + {"some": "data"}, # payload + project.organization_id, + ) + + assert job_id == collection.id + + mock_schedule.assert_called_once() + _, kwargs = mock_schedule.call_args + assert ( + kwargs["function_path"] + == "app.services.collections.create_collection.execute_job" + ) + assert kwargs["project_id"] == project.id + assert kwargs["organization_id"] == project.organization_id + assert kwargs["job_id"] == collection.id + assert kwargs["request"] == request.model_dump() + assert kwargs["payload_data"] == {"some": "data"} + + +@pytest.mark.usefixtures("aws_credentials") +@mock_aws +@patch("app.services.collections.create_collection.get_openai_client") +def test_execute_job_success(mock_get_openai_client, db: Session): + project = get_project(db) + + aws = AmazonCloudStorageClient() + aws.create() + store = DocumentStore(db=db, project_id=project.id) + document = store.put() + s3_key = Path(urlparse(document.object_store_url).path).relative_to("/") + aws.client.put_object(Bucket=settings.AWS_S3_BUCKET, Key=str(s3_key), Body=b"test") + + sample_request = CreationRequest( + model="gpt-4o", + instructions="string", + temperature=0.000001, + documents=[document.id], + batch_size=1, + callback_url=None, + ) + sample_payload = ResponsePayload(status="pending", route="/test/route") + + mock_client = get_mock_openai_client_with_vector_store() + mock_get_openai_client.return_value = mock_client + + collection_obj = get_collection(db, client=mock_client, project_id=project.id) + + crud = CollectionCrud(db, project_id=project.id) + collection = crud.create(collection_obj) + + job_id = collection.id + task_id = "task-123" + + with patch("app.services.collections.create_collection.Session") as SessionCtor: + SessionCtor.return_value.__enter__.return_value = db + SessionCtor.return_value.__exit__.return_value = False + + execute_job( + request=sample_request.model_dump(), + payload_data=asdict(sample_payload), + project_id=collection.project_id, + organization_id=collection.organization_id, + task_id=task_id, + job_id=job_id, + task_instance=None, + ) + + updated = CollectionCrud(db, collection.project_id).read_one(job_id) + assert updated.task_id == task_id + assert updated.status == CollectionStatus.successful + assert updated.llm_service_id == "mock_assistant_id" + assert updated.llm_service_name == sample_request.model + assert updated.updated_at is not None + + docs = DocumentCollectionCrud(db).read(updated, skip=0, limit=10) + assert len(docs) == 1 + assert docs[0].fname == document.fname diff --git a/backend/app/tests/services/collections/test_delete_collection.py b/backend/app/tests/services/collections/test_delete_collection.py new file mode 100644 index 000000000..6c44b0b94 --- /dev/null +++ b/backend/app/tests/services/collections/test_delete_collection.py @@ -0,0 +1,132 @@ +from unittest.mock import patch +import pytest +import os +from uuid import uuid4 +from dataclasses import asdict +from pathlib import Path +from urllib.parse import urlparse + +from sqlmodel import Session +from moto import mock_aws + +from app.models.collection import ( + DeletionRequest, + Collection, + CollectionStatus, + ResponsePayload, +) +from app.tests.utils.utils import get_project +from app.crud import CollectionCrud +from app.core.config import settings +from app.tests.utils.collection import get_collection +from app.tests.utils.document import DocumentStore +from app.tests.utils.openai import get_mock_openai_client_with_vector_store +from app.services.collections.delete_collection import start_job, execute_job +from app.core.cloud import AmazonCloudStorageClient + + +@pytest.fixture(scope="function") +def aws_credentials(): + os.environ["AWS_ACCESS_KEY_ID"] = "testing" + os.environ["AWS_SECRET_ACCESS_KEY"] = "testing" + os.environ["AWS_SECURITY_TOKEN"] = "testing" + os.environ["AWS_SESSION_TOKEN"] = "testing" + os.environ["AWS_DEFAULT_REGION"] = settings.AWS_DEFAULT_REGION + + +def test_start_job(db: Session): + req = DeletionRequest(collection_id=str(uuid4())) + project = get_project(db) + + collection = Collection( + id=req.collection_id, + project_id=project.id, + organization_id=project.organization_id, + status=CollectionStatus.processing, + ) + + payload = {"status": "processing"} + + with patch( + "app.services.collections.delete_collection.start_low_priority_job" + ) as mock_schedule: + mock_schedule.return_value = "fake-task-id" + + job_id = start_job( + db=db, + request=req.model_dump(), + collection=collection, + project_id=project.id, + payload=payload, + organization_id=project.organization_id, + ) + + assert job_id == collection.id + + mock_schedule.assert_called_once() + _, kwargs = mock_schedule.call_args + assert ( + kwargs["function_path"] + == "app.services.collections.delete_collection.execute_job" + ) + assert kwargs["project_id"] == project.id + assert kwargs["organization_id"] == project.organization_id + assert kwargs["job_id"] == collection.id + assert kwargs["request"] == req.model_dump() + assert kwargs["payload_data"] == payload + + +@pytest.mark.usefixtures("aws_credentials") +@mock_aws +@patch("app.services.collections.delete_collection.get_openai_client") +def test_execute_job_delete_success(mock_get_openai_client, db: Session): + project = get_project(db) + + aws = AmazonCloudStorageClient() + aws.create() + + store = DocumentStore(db=db, project_id=project.id) + document = store.put() + s3_key = Path(urlparse(document.object_store_url).path).relative_to("/") + aws.client.put_object(Bucket=settings.AWS_S3_BUCKET, Key=str(s3_key), Body=b"test") + + mock_client = get_mock_openai_client_with_vector_store() + mock_get_openai_client.return_value = mock_client + + collection_obj = get_collection(db, client=mock_client, project_id=project.id) + crud = CollectionCrud(db, project_id=project.id) + collection = crud.create(collection_obj, [document]) + db.flush() + db.commit() + + job_id = collection.id + task_id = "task-123" + req = DeletionRequest(collection_id=job_id) + payload = ResponsePayload(status="pending", route="/test/route") + + with patch( + "app.services.collections.delete_collection.Session" + ) as SessionCtor, patch( + "app.services.collections.delete_collection.OpenAIAssistantCrud" + ) as MockAssistantCrud: + SessionCtor.return_value.__enter__.return_value = db + SessionCtor.return_value.__exit__.return_value = False + + MockAssistantCrud.return_value.delete.return_value = None + + execute_job( + request=req.model_dump(), + payload_data=asdict(payload), + project_id=project.id, + organization_id=project.organization_id, + task_id=task_id, + job_id=job_id, + task_instance=None, + ) + + updated = CollectionCrud(db, project.id).read_one(job_id) + assert updated.task_id == task_id + assert updated.deleted_at is not None + + mock_get_openai_client.assert_called_once() + MockAssistantCrud.return_value.delete.assert_called_once() From 7f948fbd05a44e99d9ae990dfd991a196fbed340 Mon Sep 17 00:00:00 2001 From: nishika26 Date: Fri, 26 Sep 2025 09:48:54 +0530 Subject: [PATCH 30/44] fixing alembic head --- .../versions/96388ce20256_alter_collection_table_for_celery.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/app/alembic/versions/96388ce20256_alter_collection_table_for_celery.py b/backend/app/alembic/versions/96388ce20256_alter_collection_table_for_celery.py index ac778cdd5..60892ea51 100644 --- a/backend/app/alembic/versions/96388ce20256_alter_collection_table_for_celery.py +++ b/backend/app/alembic/versions/96388ce20256_alter_collection_table_for_celery.py @@ -12,7 +12,7 @@ # revision identifiers, used by Alembic. revision = "96388ce20256" -down_revision = "6ed6ed401847" +down_revision = "c6fb6d0b5897" branch_labels = None depends_on = None From e274c4589a1130ca92fd5d9469d49ac4b98c2e7c Mon Sep 17 00:00:00 2001 From: nishika26 Date: Wed, 1 Oct 2025 14:45:00 +0530 Subject: [PATCH 31/44] final push for collection jobs --- ...lection_jobs_table_altering_collections.py | 107 +++++++++ ...20256_alter_collection_table_for_celery.py | 42 ---- backend/app/api/docs/collections/info.md | 7 +- backend/app/api/docs/collections/job_info.md | 10 + backend/app/api/routes/collections.py | 58 +++-- backend/app/crud/__init__.py | 3 +- .../app/crud/{ => collection}/collection.py | 9 +- backend/app/crud/collection/collection_job.py | 87 +++++++ backend/app/models/__init__.py | 8 + backend/app/models/collection.py | 12 +- backend/app/models/collection_job.py | 80 +++++++ .../services/collections/create_collection.py | 202 +++++++++------- .../services/collections/delete_collection.py | 119 +++++---- .../collections/test_collection_info.py | 109 ++++++--- .../collections/test_create_collections.py | 35 +-- .../test_crud_collection_create.py | 0 .../test_crud_collection_delete.py | 2 +- .../test_crud_collection_read_all.py | 0 .../test_crud_collection_read_one.py | 0 .../crud/collections/test_collection_jobs.py | 142 +++++++++++ .../collections/test_create_collection.py | 130 ++++++---- .../collections/test_delete_collection.py | 225 ++++++++++++------ 22 files changed, 993 insertions(+), 394 deletions(-) create mode 100644 backend/app/alembic/versions/718dcc83f3b6_adding_collection_jobs_table_altering_collections.py delete mode 100644 backend/app/alembic/versions/96388ce20256_alter_collection_table_for_celery.py create mode 100644 backend/app/api/docs/collections/job_info.md rename backend/app/crud/{ => collection}/collection.py (93%) create mode 100644 backend/app/crud/collection/collection_job.py create mode 100644 backend/app/models/collection_job.py rename backend/app/tests/crud/collections/{ => collection}/test_crud_collection_create.py (100%) rename backend/app/tests/crud/collections/{ => collection}/test_crud_collection_delete.py (97%) rename backend/app/tests/crud/collections/{ => collection}/test_crud_collection_read_all.py (100%) rename backend/app/tests/crud/collections/{ => collection}/test_crud_collection_read_one.py (100%) create mode 100644 backend/app/tests/crud/collections/test_collection_jobs.py diff --git a/backend/app/alembic/versions/718dcc83f3b6_adding_collection_jobs_table_altering_collections.py b/backend/app/alembic/versions/718dcc83f3b6_adding_collection_jobs_table_altering_collections.py new file mode 100644 index 000000000..2797bbcc1 --- /dev/null +++ b/backend/app/alembic/versions/718dcc83f3b6_adding_collection_jobs_table_altering_collections.py @@ -0,0 +1,107 @@ +"""adding collection jobs table and altering collection table + +Revision ID: 718dcc83f3b6 +Revises: c6fb6d0b5897 +Create Date: 2025-09-29 20:41:38.005505 + +""" +from alembic import op +import sqlalchemy as sa +import sqlmodel.sql.sqltypes +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "718dcc83f3b6" +down_revision = "c6fb6d0b5897" +branch_labels = None +depends_on = None + + +collection_job_status_enum = postgresql.ENUM( + "processing", + "successful", + "failed", + name="collectionjobstatus", + create_type=False, +) + +collection_action_type = postgresql.ENUM( + "create", + "delete", + name="collectionactiontype", + create_type=False, +) + + +def upgrade(): + collection_job_status_enum.create(op.get_bind(), checkfirst=True) + collection_action_type.create(op.get_bind(), checkfirst=True) + op.create_table( + "collection_jobs", + sa.Column("id", sa.Uuid(), nullable=False), + sa.Column("action_type", collection_action_type, nullable=False), + sa.Column("collection_id", sa.Uuid(), nullable=True), + sa.Column("task_id", sa.Uuid(), nullable=True), + sa.Column("project_id", sa.Integer(), nullable=False), + sa.Column("status", collection_job_status_enum, nullable=False), + sa.Column("error_message", sa.Text(), nullable=True), + sa.Column("inserted_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint( + ["collection_id"], ["collection.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["project_id"], ["project.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.drop_constraint("collection_owner_id_fkey", "collection", type_="foreignkey") + op.drop_column("collection", "owner_id") + op.drop_column("collection", "status") + op.drop_column("collection", "error_message") + op.add_column("collection", sa.Column("inserted_at", sa.DateTime(), nullable=False)) + op.drop_column("collection", "created_at") + + +def downgrade(): + op.create_foreign_key( + "openai_conversation_project_id_fkey1", + "openai_conversation", + "project", + ["project_id"], + ["id"], + ) + op.create_foreign_key( + "openai_conversation_organization_id_fkey1", + "openai_conversation", + "organization", + ["organization_id"], + ["id"], + ) + op.add_column( + "collection", + sa.Column("error_message", sa.VARCHAR(), autoincrement=False, nullable=True), + ) + op.add_column( + "collection", + sa.Column( + "status", + postgresql.ENUM( + "processing", "successful", "failed", name="collectionstatus" + ), + server_default=sa.text("'processing'::collectionstatus"), + autoincrement=False, + nullable=False, + ), + ) + op.add_column( + "collection", + sa.Column("owner_id", sa.INTEGER(), autoincrement=False, nullable=False), + ) + op.create_foreign_key( + "collection_owner_id_fkey", + "collection", + "user", + ["owner_id"], + ["id"], + ondelete="CASCADE", + ) + op.drop_table("collection_jobs") diff --git a/backend/app/alembic/versions/96388ce20256_alter_collection_table_for_celery.py b/backend/app/alembic/versions/96388ce20256_alter_collection_table_for_celery.py deleted file mode 100644 index 60892ea51..000000000 --- a/backend/app/alembic/versions/96388ce20256_alter_collection_table_for_celery.py +++ /dev/null @@ -1,42 +0,0 @@ -"""alter collection table for celery - -Revision ID: 96388ce20256 -Revises: 6ed6ed401847 -Create Date: 2025-09-17 16:35:37.809812 - -""" -from alembic import op -import sqlalchemy as sa -import sqlmodel.sql.sqltypes - - -# revision identifiers, used by Alembic. -revision = "96388ce20256" -down_revision = "c6fb6d0b5897" -branch_labels = None -depends_on = None - - -def upgrade(): - op.drop_constraint("collection_owner_id_fkey", "collection", type_="foreignkey") - op.drop_column("collection", "owner_id") - op.add_column( - "collection", - sa.Column("task_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True), - ) - - -def downgrade(): - op.drop_column("collection", "task_id") - op.add_column( - "collection", - sa.Column("owner_id", sa.INTEGER(), autoincrement=False, nullable=False), - ) - op.create_foreign_key( - "collection_owner_id_fkey", - "collection", - "user", - ["owner_id"], - ["id"], - ondelete="CASCADE", - ) diff --git a/backend/app/api/docs/collections/info.md b/backend/app/api/docs/collections/info.md index 5fb0d7d8d..4fa32e2ea 100644 --- a/backend/app/api/docs/collections/info.md +++ b/backend/app/api/docs/collections/info.md @@ -1,5 +1,4 @@ -Retrieve all AI-platform information about a collection given its -ID. This route is very helpful for: +Retrieve detailed information about a specific collection by its ID from the collection table. Note that this endpoint CANNOT be used as a polling endpoint for collection creation because an entry will be made in the collection table only after the resource creation and association has been successful. -* Understanding whether a `create` request has finished -* Obtaining the OpenAI assistant ID (`llm_service_id`) +This endpoint returns metadata for the collection, including its project, organization, +timestamps, and associated LLM service details (`llm_service_id`). diff --git a/backend/app/api/docs/collections/job_info.md b/backend/app/api/docs/collections/job_info.md new file mode 100644 index 000000000..fe4ba5837 --- /dev/null +++ b/backend/app/api/docs/collections/job_info.md @@ -0,0 +1,10 @@ +Retrieve information about a collection job by the collection job ID. This endpoint can be considered the polling endpoint for collection creation job. This endpoint provides detailed status and metadata for a specific collection job +in the AI platform. It is especially useful for: + +* Fetching the collection job object containingg the ID which will be collection job id, job action type, status of the job as well as error message if the job has been failed. + +* Accessing associated collection details from the collection table when the job is successful, including: + - `llm_service_id`: The OpenAI assistant or model used for the collection. + - Collection metadata such as ID, project, organization, and timestamps. + +* Containing a simplified error messages in the retrieved collection job object when a job has failed. diff --git a/backend/app/api/routes/collections.py b/backend/app/api/routes/collections.py index a6374f817..6816ac445 100644 --- a/backend/app/api/routes/collections.py +++ b/backend/app/api/routes/collections.py @@ -11,11 +11,11 @@ from app.api.deps import SessionDep, CurrentUserOrgProject from app.crud import ( CollectionCrud, + CollectionJobCrud, DocumentCollectionCrud, ) from app.models import Collection, DocumentPublic from app.models.collection import ( - CollectionStatus, ResponsePayload, CreationRequest, DeletionRequest, @@ -46,28 +46,18 @@ def create_collection( route = router.url_path_for(this.f_code.co_name) payload = ResponsePayload("processing", route) - collection = Collection( - id=UUID(payload.key), - organization_id=current_user.organization_id, - project_id=current_user.project_id, - status=CollectionStatus.processing, - ) - - collection_crud = CollectionCrud(session, current_user.project_id) - collection_crud.create(collection) - create_services.start_job( db=session, request=request.model_dump(), payload=asdict(payload), - collection=collection, + collection_job_id=UUID(payload.key), project_id=current_user.project_id, organization_id=current_user.organization_id, ) logger.info( f"[create_collection] Background task for collection creation scheduled | " - f"{{'collection_id': '{collection.id}'}}" + f"{{'collection_job_id': '{payload.key}'}}" ) return APIResponse.success_response(data=None, metadata=asdict(payload)) @@ -82,10 +72,6 @@ def delete_collection( request: DeletionRequest, background_tasks: BackgroundTasks, ): - client = get_openai_client( - session, current_user.organization_id, current_user.project_id - ) - collection_crud = CollectionCrud(session, current_user.project_id) collection = collection_crud.read_one(request.collection_id) @@ -109,10 +95,36 @@ def delete_collection( return APIResponse.success_response(data=None, metadata=asdict(payload)) -@router.post( +@router.get( + "/info/collection_job/{collection_job_id}", + description=load_description("collections/job_info.md"), + response_model=APIResponse, +) +def collection_job_info( + session: SessionDep, + current_user: CurrentUserOrgProject, + collection_job_id: UUID = FastPath(description="Collection job to retrieve"), +): + collection_job_crud = CollectionJobCrud(session, current_user.project_id) + collection_job = collection_job_crud.read_one(collection_job_id) + + if collection_job.status == "successful": + collection_crud = CollectionCrud(session, current_user.project_id) + collection = collection_crud.read_one(collection_job.collection_id) + return APIResponse.success_response(data=collection) + + if collection_job.status in ["processing", "failed"]: + err = getattr(collection_job, "error_message", None) + if err: + collection_job.error_message = extract_error_message(err) + + return APIResponse.success_response(data=collection_job) + + +@router.get( "/info/{collection_id}", description=load_description("collections/info.md"), - response_model=APIResponse[Collection], + response_model=APIResponse, ) def collection_info( session: SessionDep, @@ -120,13 +132,9 @@ def collection_info( collection_id: UUID = FastPath(description="Collection to retrieve"), ): collection_crud = CollectionCrud(session, current_user.project_id) - data = collection_crud.read_one(collection_id) - - err = getattr(data, "error_message", None) - if err: - data.error_message = extract_error_message(err) + collection = collection_crud.read_one(collection_id) - return APIResponse.success_response(data) + return APIResponse.success_response(collection) @router.post( diff --git a/backend/app/crud/__init__.py b/backend/app/crud/__init__.py index 43ef15565..d496645e7 100644 --- a/backend/app/crud/__init__.py +++ b/backend/app/crud/__init__.py @@ -4,11 +4,12 @@ get_user_by_email, update_user, ) -from .collection import CollectionCrud +from .collection.collection import CollectionCrud from .document import DocumentCrud from .document_collection import DocumentCollectionCrud from .doc_transformation_job import DocTransformationJobCrud +from .collection.collection_job import CollectionJobCrud from .jobs import JobCrud diff --git a/backend/app/crud/collection.py b/backend/app/crud/collection/collection.py similarity index 93% rename from backend/app/crud/collection.py rename to backend/app/crud/collection/collection.py index 0f5d8c09f..ce02744ee 100644 --- a/backend/app/crud/collection.py +++ b/backend/app/crud/collection/collection.py @@ -8,9 +8,8 @@ from app.models import Document, Collection, DocumentCollection from app.core.util import now -from app.models.collection import CollectionStatus -from .document_collection import DocumentCollectionCrud +from ..document_collection import DocumentCollectionCrud logger = logging.getLogger(__name__) @@ -68,10 +67,8 @@ def create( ): try: existing = self.read_one(collection.id) - if existing.status == CollectionStatus.failed: - self._update(collection) - else: - raise FileExistsError("Collection already present") + + raise FileExistsError("Collection already present") except: self.session.add(collection) self.session.commit() diff --git a/backend/app/crud/collection/collection_job.py b/backend/app/crud/collection/collection_job.py new file mode 100644 index 000000000..1a450d7fc --- /dev/null +++ b/backend/app/crud/collection/collection_job.py @@ -0,0 +1,87 @@ +from datetime import datetime +import logging + +from app.models.collection_job import CollectionJob, CollectionJobUpdate + + +from sqlmodel import Session, func, select, and_ + +logger = logging.getLogger(__name__) + + +class CollectionJobCrud: + def __init__(self, session: Session, project_id: int): + self.session = session + self.project_id = project_id + + def _update(self, collection_job: CollectionJobUpdate): + """Update an existing collection job.""" + if collection_job.project_id != self.project_id: + err = f"Invalid collection job ownership: owner_project={self.project_id} attempter={collection_job.project_id}" + try: + raise PermissionError(err) + except PermissionError as e: + logger.error( + f"[CollectionJobCrud._update] Permission error | {{'collection_job_id': '{collection_job.id}', 'error': '{str(e)}'}}", + exc_info=True, + ) + raise + + collection_job.updated_at = datetime.utcnow() + self.session.add(collection_job) + self.session.commit() + self.session.refresh(collection_job) + logger.info( + f"[CollectionJobCrud._update] Collection job updated successfully | {{'collection_job_id': '{collection_job.id}'}}" + ) + + return collection_job + + def create(self, collection_job: CollectionJob): + """Create a new collection job.""" + try: + self.session.add(collection_job) + self.session.commit() + self.session.refresh(collection_job) + logger.info( + f"[CollectionJobCrud.create] Collection job created successfully | {{'collection_job_id': '{collection_job.id}'}}" + ) + + except Exception as e: + logger.error( + f"[CollectionJobCrud.create] Error during job creation: {str(e)}", + exc_info=True, + ) + raise + + return collection_job + + def read_one(self, task_id: str) -> CollectionJob: + """Retrieve a single collection job by its task_id.""" + statement = select(CollectionJob).where( + and_( + CollectionJob.project_id == self.project_id, + CollectionJob.id == task_id, + ) + ) + collection_job = self.session.exec(statement).one() + logger.info( + f"[CollectionJobCrud.read_one] Retrieved collection job | {{'task_id': '{task_id}'}}" + ) + return collection_job + + def read_all(self): + """Retrieve all collection jobs for a given project.""" + statement = select(CollectionJob).where( + and_( + CollectionJob.project_id == self.project_id, + CollectionJob.updated_at.isnot( + None + ), # Exclude any jobs that have been deleted + ) + ) + collection_jobs = self.session.exec(statement).all() + logger.info( + f"[CollectionJobCrud.read_all] Retrieved all collection jobs for project | {{'project_id': '{self.project_id}', 'count': {len(collection_jobs)}}}" + ) + return collection_jobs diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py index 94c45ba3f..dbd14f2d8 100644 --- a/backend/app/models/__init__.py +++ b/backend/app/models/__init__.py @@ -104,3 +104,11 @@ ) from .onboarding import OnboardingRequest, OnboardingResponse + +from .collection_job import ( + CollectionActionType, + CollectionJob, + CollectionJobBase, + CollectionJobStatus, + CollectionJobUpdate, +) diff --git a/backend/app/models/collection.py b/backend/app/models/collection.py index 2ba66e16b..445911274 100644 --- a/backend/app/models/collection.py +++ b/backend/app/models/collection.py @@ -13,12 +13,6 @@ from app.core.util import now -class CollectionStatus(str, enum.Enum): - processing = "processing" - successful = "successful" - failed = "failed" - - class Collection(SQLModel, table=True): id: UUID = Field(default_factory=uuid4, primary_key=True) @@ -37,11 +31,7 @@ class Collection(SQLModel, table=True): llm_service_id: Optional[str] = Field(default=None, nullable=True) llm_service_name: Optional[str] = Field(default=None, nullable=True) - status: CollectionStatus = Field(default=CollectionStatus.processing) - error_message: Optional[str] = Field(default=None, nullable=True) - task_id: Optional[str] = Field(default=None, description="Celery task ID") - - created_at: datetime = Field(default_factory=now) + inserted_at: datetime = Field(default_factory=now) updated_at: datetime = Field(default_factory=now) deleted_at: Optional[datetime] = None diff --git a/backend/app/models/collection_job.py b/backend/app/models/collection_job.py new file mode 100644 index 000000000..0e219e4f3 --- /dev/null +++ b/backend/app/models/collection_job.py @@ -0,0 +1,80 @@ +from enum import Enum +from uuid import UUID +from datetime import datetime +from typing import Optional + +from sqlmodel import Field, SQLModel +from sqlalchemy import Column, Text + + +from app.core.util import now + + +class CollectionJobStatus(str, Enum): + processing = "processing" + successful = "successful" + failed = "failed" + + +class CollectionActionType(str, Enum): + create = "create" + delete = "delete" + + +class CollectionJobBase(SQLModel): + action_type: CollectionActionType = Field( + nullable=False, description="Type of operation" + ) + collection_id: UUID | None = Field( + foreign_key="collection.id", nullable=True, ondelete="CASCADE" + ) + project_id: int = Field( + foreign_key="project.id", nullable=False, ondelete="CASCADE" + ) + + +class CollectionJob(CollectionJobBase, table=True): + """Database model for tracking collection operations.""" + + __tablename__ = "collection_jobs" + + id: UUID = Field(primary_key=True) + + status: CollectionJobStatus = Field( + default=CollectionJobStatus.processing, + nullable=False, + description="Current job status", + ) + + task_id: UUID = Field(nullable=True) + + error_message: str | None = Field(sa_column=Column(Text, nullable=True)) + inserted_at: datetime = Field( + default_factory=now, + nullable=False, + description="When the job record was created", + ) + + updated_at: datetime = Field( + default_factory=now, + nullable=False, + description="Last time the job record was updated", + ) + + +class CollectionJobUpdate(SQLModel): + task_id: UUID | None = None + status: CollectionJobStatus + error_message: str | None = None + collection_id: UUID | None = None + + updated_at: datetime | None = None + + +class CollectionJobPublic(SQLModel): + collection_id: UUID | None = None + status: CollectionJobStatus + error_message: str | None = None + + inserted_at: datetime + updated_at: datetime diff --git a/backend/app/services/collections/create_collection.py b/backend/app/services/collections/create_collection.py index 72a3579c5..1737d2508 100644 --- a/backend/app/services/collections/create_collection.py +++ b/backend/app/services/collections/create_collection.py @@ -1,6 +1,6 @@ import logging import time -from uuid import UUID +from uuid import UUID, uuid4 from sqlmodel import Session from asgi_correlation_id import correlation_id @@ -9,14 +9,14 @@ from app.core.util import now from app.core.db import engine from app.crud import ( - DocumentCrud, CollectionCrud, + DocumentCrud, DocumentCollectionCrud, + CollectionJobCrud, ) from app.crud.rag import OpenAIVectorStoreCrud, OpenAIAssistantCrud -from app.models import Collection +from app.models import CollectionJobStatus, CollectionJob, Collection from app.models.collection import ( - CollectionStatus, ResponsePayload, CreationRequest, AssistantOptions, @@ -36,17 +36,27 @@ def start_job( db: Session, request: dict, - collection: Collection, project_id: int, payload: dict, + collection_job_id: str, organization_id: int, ) -> UUID: trace_id = correlation_id.get() or "N/A" + collection_job = CollectionJob( + id=collection_job_id, + action_type="create", + project_id=project_id, + status=CollectionJobStatus.processing, + ) + + job_crud = CollectionJobCrud(db, project_id) + collection_job = job_crud.create(collection_job) + task_id = start_low_priority_job( function_path="app.services.collections.create_collection.execute_job", project_id=project_id, - job_id=collection.id, + job_id=collection_job.id, trace_id=trace_id, request=request, payload_data=payload, @@ -55,9 +65,10 @@ def start_job( logger.info( "[create_collection.start_job] Job scheduled to create collection | " - f"collection_id={collection.id}, project_id={project_id}, task_id={task_id}, job_id={collection.id}" + f"collection_job_id={collection_job_id}, project_id={project_id}, task_id={task_id}" ) - return collection.id + + return collection_job.id def execute_job( @@ -74,96 +85,115 @@ def execute_job( """ start_time = time.time() - with Session(engine) as session: - creation_request = CreationRequest(**request) - payload = ResponsePayload(**payload_data) + try: + with Session(engine) as session: + creation_request = CreationRequest(**request) + payload = ResponsePayload(**payload_data) - collection_crud = CollectionCrud(session, project_id) - collection = collection_crud.read_one(job_id) - collection.task_id = task_id - collection_crud._update(collection) + collection_job_crud = CollectionJobCrud(session, project_id) + collection_job = collection_job_crud.read_one(job_id) + collection_job.task_id = task_id + collection_job_crud._update(collection_job) - client = get_openai_client(session, organization_id, project_id) + client = get_openai_client(session, organization_id, project_id) - callback = ( - SilentCallback(payload) - if creation_request.callback_url is None - else WebHookCallback(creation_request.callback_url, payload) - ) + callback = ( + SilentCallback(payload) + if creation_request.callback_url is None + else WebHookCallback(creation_request.callback_url, payload) + ) - storage = get_cloud_storage(session=session, project_id=project_id) - document_crud = DocumentCrud(session, project_id) - assistant_crud = OpenAIAssistantCrud(client) - vector_store_crud = OpenAIVectorStoreCrud(client) + storage = get_cloud_storage(session=session, project_id=project_id) + document_crud = DocumentCrud(session, project_id) + assistant_crud = OpenAIAssistantCrud(client) + vector_store_crud = OpenAIVectorStoreCrud(client) - try: - vector_store = vector_store_crud.create() + try: + vector_store = vector_store_crud.create() - docs_batches = batch_documents( - document_crud, creation_request.documents, creation_request.batch_size - ) - flat_docs = [doc for batch in docs_batches for doc in batch] + docs_batches = batch_documents( + document_crud, + creation_request.documents, + creation_request.batch_size, + ) + flat_docs = [doc for batch in docs_batches for doc in batch] - file_exts = { - doc.fname.split(".")[-1] for doc in flat_docs if "." in doc.fname - } - file_sizes_kb = [ - storage.get_file_size_kb(doc.object_store_url) for doc in flat_docs - ] + file_exts = { + doc.fname.split(".")[-1] for doc in flat_docs if "." in doc.fname + } + file_sizes_kb = [ + storage.get_file_size_kb(doc.object_store_url) for doc in flat_docs + ] - list(vector_store_crud.update(vector_store.id, storage, docs_batches)) + list(vector_store_crud.update(vector_store.id, storage, docs_batches)) - assistant_options = dict( - creation_request.extract_super_type(AssistantOptions) - ) - assistant = assistant_crud.create(vector_store.id, **assistant_options) - - collection = collection_crud.read_one(collection.id) # refresh - collection.llm_service_id = assistant.id - collection.llm_service_name = creation_request.model - collection.status = CollectionStatus.successful - collection.updated_at = now() - - if flat_docs: - DocumentCollectionCrud(session).create(collection, flat_docs) - - collection_crud._update(collection) - - elapsed = time.time() - start_time - logger.info( - "[create_collection.execute_job] Collection created: %s | Time: %.2fs | Files: %d | Sizes: %s KB | Types: %s", - collection.id, - elapsed, - len(flat_docs), - file_sizes_kb, - list(file_exts), - ) + assistant_options = dict( + creation_request.extract_super_type(AssistantOptions) + ) + assistant = assistant_crud.create(vector_store.id, **assistant_options) + + collection_id = uuid4() + collection_crud = CollectionCrud(session, project_id) + collection = Collection( + id=collection_id, + project_id=project_id, + organization_id=organization_id, + llm_service_id=assistant.id, + llm_service_name=creation_request.model, + ) - callback.success(collection.model_dump(mode="json")) + collection_crud.create(collection) + collection_data = collection_crud.read_one(collection.id) - except Exception as err: - logger.error( - "[create_collection.execute_job] Collection Creation Failed | {'collection_id': '%s', 'error': '%s'}", - collection.id, - str(err), - exc_info=True, - ) + if flat_docs: + DocumentCollectionCrud(session).create(collection_data, flat_docs) - if "assistant" in locals(): - _backout(assistant_crud, assistant.id) + collection_crud.create(collection_data) - try: - collection = collection_crud.read_one(job_id) - collection.status = CollectionStatus.failed - collection.updated_at = now() - collection.error_message = str(err) - collection_crud._update(collection) - except Exception as suberr: - logger.warning( - "[create_collection.execute_job] Failed to update collection status | " - "{'collection_id': '%s', 'reason': '%s'}", - collection.id, - str(suberr), + collection_job.status = CollectionJobStatus.successful + collection_job.collection_id = collection_id + collection_job.updated_at = now() + collection_job_crud._update(collection_job) + + elapsed = time.time() - start_time + logger.info( + "[create_collection.execute_job] Collection created: %s | Time: %.2fs | Files: %d | Sizes: %s KB | Types: %s", + collection_id, + elapsed, + len(flat_docs), + file_sizes_kb, + list(file_exts), ) - callback.fail(str(err)) + callback.success(collection.model_dump(mode="json")) + + except Exception as err: + logger.error( + "[create_collection.execute_job] Collection Creation Failed | " + "{'collection_job_id': '%s', 'error': '%s'}", + job_id, + str(err), + exc_info=True, + ) + + if "assistant" in locals(): + _backout(assistant_crud, assistant.id) + + collection_job.status = CollectionJobStatus.failed + collection_job.updated_at = now() + collection_job.error_message = str(err) + collection_job_crud._update(collection_job) + + callback.fail(str(err)) + + except Exception as outer_err: + logger.error( + "[create_collection.execute_job] Unexpected Error during collection creation: %s", + str(outer_err), + exc_info=True, + ) + + collection_job.status = CollectionJobStatus.failed + collection_job.updated_at = now() + collection_job.error_message = str(err) + collection_job_crud._update(collection_job) diff --git a/backend/app/services/collections/delete_collection.py b/backend/app/services/collections/delete_collection.py index a165233b8..d722d97c3 100644 --- a/backend/app/services/collections/delete_collection.py +++ b/backend/app/services/collections/delete_collection.py @@ -1,15 +1,14 @@ import logging -from uuid import UUID +from uuid import UUID, uuid4 from sqlmodel import Session from asgi_correlation_id import correlation_id from sqlalchemy.exc import SQLAlchemyError from app.core.db import engine -from app.crud import ( - CollectionCrud, -) +from app.crud import CollectionCrud, CollectionJobCrud from app.crud.rag import OpenAIAssistantCrud +from app.models import CollectionJob, CollectionJobStatus from app.models.collection import Collection, DeletionRequest from app.services.collections.helpers import ( SilentCallback, @@ -33,10 +32,24 @@ def start_job( ) -> UUID: trace_id = correlation_id.get() or "N/A" + job_id = uuid4() + + collection_job = CollectionJob( + id=job_id, + action_type="delete", + project_id=project_id, + collection_id=collection.id, + status=CollectionJobStatus.processing, + ) + + job_crud = CollectionJobCrud(db, project_id) + collection_job = job_crud.create(collection_job) + task_id = start_low_priority_job( function_path="app.services.collections.delete_collection.execute_job", project_id=project_id, - job_id=collection.id, + job_id=job_id, + collection_id=collection.id, trace_id=trace_id, request=request, payload_data=payload, @@ -45,7 +58,7 @@ def start_job( logger.info( "[delete_collection.start_job] Job scheduled to delete collection | " - f"collection_id={collection.id}, project_id={project_id}, task_id={task_id}, job_id={collection.id}" + f"Job_id={job_id}, project_id={project_id}, task_id={task_id}, collection_id={collection.id}" ) return collection.id @@ -57,6 +70,7 @@ def execute_job( organization_id: int, task_id: str, job_id: UUID, + collection_id: UUID, task_instance, ) -> None: deletion_request = DeletionRequest(**request) @@ -68,41 +82,58 @@ def execute_job( else WebHookCallback(deletion_request.callback_url, payload) ) - with Session(engine) as session: - client = get_openai_client(session, organization_id, project_id) - assistant_crud = OpenAIAssistantCrud(client) - collection_crud = CollectionCrud(session, project_id) - - collection = collection_crud.read_one(job_id) - - collection.task_id = task_id - collection_crud._update(collection) - - try: - result = collection_crud.delete(collection, assistant_crud) - - logger.info( - "[delete_collection.execute_job] Collection deleted successfully | {'collection_id': '%s'}", - str(collection.id), - ) - callback.success(result.model_dump(mode="json")) - - except (ValueError, PermissionError, SQLAlchemyError) as err: - logger.error( - "[delete_collection.execute_job] Failed to delete collection | {'collection_id': '%s', 'error': '%s'}", - str(collection.id), - str(err), - exc_info=True, - ) - callback.fail(str(err)) - - except Exception as err: - logger.error( - "[delete_collection.execute_job] Unexpected error during deletion | " - "{'collection_id': '%s', 'error': '%s', 'error_type': '%s'}", - str(collection.id), - str(err), - type(err).__name__, - exc_info=True, - ) - callback.fail(str(err)) + try: + with Session(engine) as session: + client = get_openai_client(session, organization_id, project_id) + assistant_crud = OpenAIAssistantCrud(client) + collection_crud = CollectionCrud(session, project_id) + collection_job_crud = CollectionJobCrud(session, project_id) + + collection = collection_crud.read_one(collection_id) + collection_job = collection_job_crud.read_one(job_id) + + collection_job.task_id = task_id + + try: + result = collection_crud.delete(collection, assistant_crud) + + collection_job.status = CollectionJobStatus.successful + collection_job.error_message = None + collection_job_crud._update(collection_job) + + logger.info( + "[delete_collection.execute_job] Collection deleted successfully | {'collection_id': '%s', 'job_id': '%s'}", + str(collection.id), + str(job_id), + ) + callback.success(result.model_dump(mode="json")) + + except (ValueError, PermissionError, SQLAlchemyError) as err: + collection_job.status = CollectionJobStatus.failed + collection_job.error_message = str(err) + collection_job_crud._update(collection_job) + + logger.error( + "[delete_collection.execute_job] Failed to delete collection | {'collection_id': '%s', 'error': '%s', 'job_id': '%s'}", + str(collection.id), + str(err), + str(job_id), + exc_info=True, + ) + callback.fail(str(err)) + + except Exception as err: + collection_job.status = CollectionJobStatus.failed + collection_job.error_message = str(err) + collection_job_crud._update(collection_job) + + logger.error( + "[delete_collection.execute_job] Unexpected error during deletion | " + "{'collection_id': '%s', 'error': '%s', 'error_type': '%s', 'job_id': '%s'}", + str(collection.id), + str(err), + type(err).__name__, + str(job_id), + exc_info=True, + ) + callback.fail(str(err)) diff --git a/backend/app/tests/api/routes/collections/test_collection_info.py b/backend/app/tests/api/routes/collections/test_collection_info.py index 787ed029f..485f62242 100644 --- a/backend/app/tests/api/routes/collections/test_collection_info.py +++ b/backend/app/tests/api/routes/collections/test_collection_info.py @@ -1,69 +1,103 @@ -from uuid import uuid4 -from datetime import datetime, timezone +from uuid import uuid4, UUID +from typing import Optional + from fastapi.testclient import TestClient from sqlmodel import Session + from app.core.config import settings -from app.models import Collection -from app.tests.utils.utils import get_user_from_api_key -from app.models.collection import CollectionStatus +from app.core.util import now +from app.models import ( + Collection, + CollectionJob, + CollectionActionType, + CollectionJobStatus, +) +from app.crud import CollectionJobCrud, CollectionCrud def create_collection( db, user, - status: CollectionStatus = CollectionStatus.processing, with_llm: bool = False, ): - now = datetime.now(timezone.utc) collection = Collection( id=uuid4(), organization_id=user.organization_id, project_id=user.project_id, - status=status, - updated_at=now, + inserted_at=now(), + updated_at=now(), ) if with_llm: collection.llm_service_id = f"asst_{uuid4()}" collection.llm_service_name = "gpt-4o" - db.add(collection) - db.commit() - db.refresh(collection) + collection_crud = CollectionCrud(db, user.project_id) + collection = collection_crud.create(collection) + return collection +def create_collection_job( + db, + user, + collection_id: Optional[UUID] = None, + action_type=CollectionActionType.create, + status=CollectionJobStatus.processing, +): + collection_job = CollectionJob( + id=uuid4(), + collection_id=collection_id, + project_id=user.project_id, + action_type=action_type, + status=status, + inserted_at=now(), + updated_at=now(), + ) + + if status == CollectionJobStatus.failed: + collection_job.error_message = ( + "Something went wrong during the collection job process." + ) + + collection_job_crud = CollectionJobCrud(db, user.project_id) + created_job = collection_job_crud.create(collection_job) + + return created_job + + def test_collection_info_processing( - db: Session, client: TestClient, user_api_key_header + db: Session, client: TestClient, user_api_key_header, user_api_key ): headers = user_api_key_header - user = get_user_from_api_key(db, headers) - collection = create_collection(db, user, status=CollectionStatus.processing) - response = client.post( - f"{settings.API_V1_STR}/collections/info/{collection.id}", + collection_job = create_collection_job(db, user_api_key) + + response = client.get( + f"{settings.API_V1_STR}/collections/info/{collection_job.id}", headers=headers, ) assert response.status_code == 200 data = response.json()["data"] - assert data["id"] == str(collection.id) - assert data["status"] == CollectionStatus.processing.value - assert data["llm_service_id"] is None - assert data["llm_service_name"] is None + assert data["status"] == CollectionJobStatus.processing.value + assert data["inserted_at"] is not None + assert data["action_type"] == CollectionActionType.create.value + assert data["updated_at"] is not None def test_collection_info_successful( - db: Session, client: TestClient, user_api_key_header + db: Session, client: TestClient, user_api_key_header, user_api_key ): headers = user_api_key_header - user = get_user_from_api_key(db, headers) - collection = create_collection( - db, user, status=CollectionStatus.successful, with_llm=True + + collection = create_collection(db, user_api_key, with_llm=True) + collection_job = create_collection_job( + db, user_api_key, collection.id, status=CollectionJobStatus.successful ) - response = client.post( - f"{settings.API_V1_STR}/collections/info/{collection.id}", + response = client.get( + f"{settings.API_V1_STR}/collections/info/{collection_job.id}", headers=headers, ) @@ -71,25 +105,26 @@ def test_collection_info_successful( data = response.json()["data"] assert data["id"] == str(collection.id) - assert data["status"] == CollectionStatus.successful.value assert data["llm_service_id"] == collection.llm_service_id assert data["llm_service_name"] == "gpt-4o" -def test_collection_info_failed(db: Session, client: TestClient, user_api_key_header): +def test_collection_info_failed( + db: Session, client: TestClient, user_api_key_header, user_api_key +): headers = user_api_key_header - user = get_user_from_api_key(db, headers) - collection = create_collection(db, user, status=CollectionStatus.failed) - response = client.post( - f"{settings.API_V1_STR}/collections/info/{collection.id}", + collection_job = create_collection_job( + db, user_api_key, status=CollectionJobStatus.failed + ) + + response = client.get( + f"{settings.API_V1_STR}/collections/info/{collection_job.id}", headers=headers, ) assert response.status_code == 200 data = response.json()["data"] - assert data["id"] == str(collection.id) - assert data["status"] == CollectionStatus.failed.value - assert data["llm_service_id"] is None - assert data["llm_service_name"] is None + assert data["status"] == CollectionJobStatus.failed.value + assert data["error_message"] is not None diff --git a/backend/app/tests/api/routes/collections/test_create_collections.py b/backend/app/tests/api/routes/collections/test_create_collections.py index dd762228c..7b4ff9d39 100644 --- a/backend/app/tests/api/routes/collections/test_create_collections.py +++ b/backend/app/tests/api/routes/collections/test_create_collections.py @@ -1,9 +1,10 @@ from uuid import UUID +from unittest.mock import patch from fastapi.testclient import TestClient from unittest.mock import patch -from app.models.collection import Collection, CollectionStatus, CreationRequest +from app.models.collection import Collection, CreationRequest def test_collection_creation_success( @@ -21,26 +22,28 @@ def test_collection_creation_success( callback_url=None, ) - api_response = client.post( + resp = client.post( "/api/v1/collections/create", json=creation_data.model_dump(mode="json"), headers=user_api_key_header, ) - assert api_response.status_code == 200 - response_body = api_response.json() + assert resp.status_code == 200 + body = resp.json() + + assert body["success"] is True + assert body["data"] is None - assert response_body["success"] is True - assert response_body["metadata"]["status"] == "processing" - assert response_body["metadata"]["key"] is not None - assert UUID(response_body["metadata"]["key"]) # Verify UUID format - assert response_body["data"] is None + assert body["metadata"]["status"] == "processing" + assert body["metadata"]["route"] == "/collections/create" + assert body["metadata"]["key"] is not None + job_key = UUID(body["metadata"]["key"]) mock_job_start.assert_called_once() - job_args = mock_job_start.call_args[1] - assert job_args["request"] == creation_data.model_dump() - assert job_args["payload"]["status"] == "processing" - assert isinstance(job_args["collection"], Collection) - assert job_args["collection"].status == CollectionStatus.processing - assert job_args["project_id"] == job_args["collection"].project_id - assert job_args["organization_id"] == job_args["collection"].organization_id + kwargs = mock_job_start.call_args.kwargs + + assert "db" in kwargs + assert kwargs["request"] == creation_data.model_dump() + assert kwargs["payload"]["status"] == "processing" + + assert kwargs["collection_job_id"] == job_key diff --git a/backend/app/tests/crud/collections/test_crud_collection_create.py b/backend/app/tests/crud/collections/collection/test_crud_collection_create.py similarity index 100% rename from backend/app/tests/crud/collections/test_crud_collection_create.py rename to backend/app/tests/crud/collections/collection/test_crud_collection_create.py diff --git a/backend/app/tests/crud/collections/test_crud_collection_delete.py b/backend/app/tests/crud/collections/collection/test_crud_collection_delete.py similarity index 97% rename from backend/app/tests/crud/collections/test_crud_collection_delete.py rename to backend/app/tests/crud/collections/collection/test_crud_collection_delete.py index 104704d92..e151a1c6a 100644 --- a/backend/app/tests/crud/collections/test_crud_collection_delete.py +++ b/backend/app/tests/crud/collections/collection/test_crud_collection_delete.py @@ -39,7 +39,7 @@ def test_delete_follows_insert(self, db: Session): crud = CollectionCrud(db, collection.project_id) collection_ = crud.delete(collection, assistant) - assert collection_.created_at <= collection_.deleted_at + assert collection_.inserted_at <= collection_.deleted_at @openai_responses.mock() def test_cannot_delete_others_collections(self, db: Session): diff --git a/backend/app/tests/crud/collections/test_crud_collection_read_all.py b/backend/app/tests/crud/collections/collection/test_crud_collection_read_all.py similarity index 100% rename from backend/app/tests/crud/collections/test_crud_collection_read_all.py rename to backend/app/tests/crud/collections/collection/test_crud_collection_read_all.py diff --git a/backend/app/tests/crud/collections/test_crud_collection_read_one.py b/backend/app/tests/crud/collections/collection/test_crud_collection_read_one.py similarity index 100% rename from backend/app/tests/crud/collections/test_crud_collection_read_one.py rename to backend/app/tests/crud/collections/collection/test_crud_collection_read_one.py diff --git a/backend/app/tests/crud/collections/test_collection_jobs.py b/backend/app/tests/crud/collections/test_collection_jobs.py new file mode 100644 index 000000000..fedd26463 --- /dev/null +++ b/backend/app/tests/crud/collections/test_collection_jobs.py @@ -0,0 +1,142 @@ +import pytest +from uuid import uuid4 + +from sqlmodel import Session +from sqlalchemy.exc import IntegrityError + +from app.models import CollectionJob, CollectionJobStatus, CollectionActionType +from app.crud import CollectionJobCrud +from app.core.util import now +from app.tests.utils.utils import get_project + + +def create_sample_collection_job( + db, + project_id, + action_type=CollectionActionType.create, + status=CollectionJobStatus.processing, +): + collection_job = CollectionJob( + id=uuid4(), + project_id=project_id, + action_type=action_type, + status=status, + inserted_at=now(), + updated_at=now(), + ) + + collection_job_crud = CollectionJobCrud(db, project_id) + created_job = collection_job_crud.create(collection_job) + + return created_job + + +@pytest.fixture +def sample_project(db: Session): + """Fixture to create a sample project.""" + return get_project(db) + + +def test_create_collection_job(db: Session, sample_project): + """Test case to create a CollectionJob.""" + collection_job = CollectionJob( + id=uuid4(), + project_id=sample_project.id, + action_type=CollectionActionType.create, + status=CollectionJobStatus.processing, + inserted_at=now(), + updated_at=now(), + ) + collection_job = create_sample_collection_job(db, sample_project.id) + + collection_job_crud = CollectionJobCrud(db, sample_project.id) + + created_job = collection_job_crud.create(collection_job) + + assert created_job.id is not None + assert created_job.project_id == sample_project.id + assert created_job.action_type == CollectionActionType.create + assert created_job.status == CollectionJobStatus.processing + assert created_job.inserted_at is not None + assert created_job.updated_at is not None + + +def test_read_one_collection_job(db: Session, sample_project): + """Test case to read a single CollectionJob by ID.""" + collection_job = create_sample_collection_job(db, sample_project.id) + + collection_job_crud = CollectionJobCrud(db, sample_project.id) + + retrieved_job = collection_job_crud.read_one(str(collection_job.id)) + + assert retrieved_job.id == collection_job.id + assert retrieved_job.project_id == sample_project.id + assert retrieved_job.action_type == collection_job.action_type + assert retrieved_job.status == collection_job.status + assert retrieved_job.inserted_at == collection_job.inserted_at + + +def test_read_all_collection_jobs(db: Session, sample_project): + """Test case to retrieve all collection jobs for a project.""" + collection_job1 = create_sample_collection_job(db, sample_project.id) + collection_job2 = create_sample_collection_job(db, sample_project.id) + + db.commit() + + collection_job_crud = CollectionJobCrud(db, sample_project.id) + + collection_jobs = collection_job_crud.read_all() + + assert len(collection_jobs) == 2 + job_ids = [str(job.id) for job in collection_jobs] + assert str(collection_job1.id) in job_ids + assert str(collection_job2.id) in job_ids + + +def test_update_collection_job(db: Session, sample_project): + """Test case to update a CollectionJob.""" + collection_job = create_sample_collection_job(db, sample_project.id) + + collection_job_crud = CollectionJobCrud(db, sample_project.id) + + collection_job.status = CollectionJobStatus.failed + collection_job.error_message = "model name not valid" + collection_job.updated_at = now() + + updated_job = collection_job_crud._update(collection_job) + + assert updated_job.status == CollectionJobStatus.failed + assert updated_job.error_message is not None + assert updated_job.updated_at is not None + + +def test_update_invalid_project_permission(db: Session, sample_project): + """Test case to check permission error during update.""" + collection_job = create_sample_collection_job(db, sample_project.id) + + collection_job_crud = CollectionJobCrud(db, sample_project.id) + + collection_job.status = CollectionJobStatus.successful + collection_job.updated_at = now() + + collection_job.project_id = 999 + + with pytest.raises(PermissionError): + collection_job_crud._update(collection_job) + + +def test_create_collection_job_with_invalid_data(db: Session, sample_project): + """Test case to handle invalid data during job creation.""" + collection_job = CollectionJob( + id=uuid4(), + project_id=sample_project.id, + action_type=None, + status=CollectionJobStatus.processing, + inserted_at=now(), + updated_at=now(), + ) + + collection_job_crud = CollectionJobCrud(db, sample_project.id) + + with pytest.raises(IntegrityError): + collection_job_crud.create(collection_job) diff --git a/backend/app/tests/services/collections/test_create_collection.py b/backend/app/tests/services/collections/test_create_collection.py index 5215dbaaf..5cac4eed9 100644 --- a/backend/app/tests/services/collections/test_create_collection.py +++ b/backend/app/tests/services/collections/test_create_collection.py @@ -1,28 +1,25 @@ -import pytest +# tests/services/collections/test_create_collection_jobs.py + import os +from dataclasses import asdict from pathlib import Path -from urllib.parse import urlparse from unittest.mock import patch -from uuid import UUID -from dataclasses import asdict +from urllib.parse import urlparse +from uuid import UUID, uuid4 -from sqlmodel import Session +import pytest from moto import mock_aws +from sqlmodel import Session +from app.core.cloud import AmazonCloudStorageClient from app.core.config import settings -from app.models.collection import ( - CreationRequest, - Collection, - ResponsePayload, - CollectionStatus, -) -from app.crud import CollectionCrud, DocumentCollectionCrud +from app.crud import CollectionCrud, CollectionJobCrud, DocumentCollectionCrud +from app.models import CollectionJobStatus, CollectionJob +from app.models.collection import CreationRequest, ResponsePayload +from app.services.collections.create_collection import start_job, execute_job +from app.tests.utils.openai import get_mock_openai_client_with_vector_store from app.tests.utils.utils import get_project -from app.tests.utils.collection import get_collection from app.tests.utils.document import DocumentStore -from app.tests.utils.openai import get_mock_openai_client_with_vector_store -from app.services.collections.create_collection import start_job, execute_job -from app.core.cloud import AmazonCloudStorageClient @pytest.fixture(scope="function") @@ -34,7 +31,14 @@ def aws_credentials(): os.environ["AWS_DEFAULT_REGION"] = settings.AWS_DEFAULT_REGION -def test_start_job(db: Session): +def test_start_job_creates_collection_job_and_schedules_task(db: Session): + """ + start_job should: + - create a CollectionJob in 'processing' + - call start_low_priority_job with the correct kwargs + - return the job UUID (same one that was passed in) + """ + project = get_project(db) request = CreationRequest( model="gpt-4o", instructions="string", @@ -43,51 +47,65 @@ def test_start_job(db: Session): batch_size=1, callback_url=None, ) - project = get_project(db) - collection = Collection( - id=UUID("42be84e8-d1b0-4e93-8b26-ebb74034674b"), - project_id=project.id, - organization_id=project.organization_id, - status="PENDING", - ) + payload = {"some": "data"} + job_id = uuid4() with patch( "app.services.collections.create_collection.start_low_priority_job" ) as mock_schedule: mock_schedule.return_value = "fake-task-id" - job_id = start_job( - db, - request.model_dump(), - collection, - project.id, - {"some": "data"}, # payload - project.organization_id, + returned_job_id = start_job( + db=db, + request=request.model_dump(), + project_id=project.id, + payload=payload, + collection_job_id=job_id, + organization_id=project.organization_id, ) - assert job_id == collection.id + assert returned_job_id == job_id + + job = CollectionJobCrud(db, project.id).read_one(job_id) + assert job.id == job_id + assert job.project_id == project.id + assert job.status == CollectionJobStatus.processing + assert job.action_type == "create" + assert job.collection_id is None mock_schedule.assert_called_once() - _, kwargs = mock_schedule.call_args + kwargs = mock_schedule.call_args.kwargs assert ( kwargs["function_path"] == "app.services.collections.create_collection.execute_job" ) assert kwargs["project_id"] == project.id assert kwargs["organization_id"] == project.organization_id - assert kwargs["job_id"] == collection.id + assert kwargs["job_id"] == job_id assert kwargs["request"] == request.model_dump() - assert kwargs["payload_data"] == {"some": "data"} + assert kwargs["payload_data"] == payload @pytest.mark.usefixtures("aws_credentials") @mock_aws @patch("app.services.collections.create_collection.get_openai_client") -def test_execute_job_success(mock_get_openai_client, db: Session): +def test_execute_job_success_flow_updates_job_and_creates_collection( + mock_get_openai_client, db: Session +): + """ + execute_job should: + - set task_id on the CollectionJob + - ingest documents into a vector store + - create an OpenAI assistant + - create a Collection with llm fields filled + - link the CollectionJob -> collection_id, set status=successful + - create DocumentCollection links + """ project = get_project(db) aws = AmazonCloudStorageClient() aws.create() + store = DocumentStore(db=db, project_id=project.id) document = store.put() s3_key = Path(urlparse(document.object_store_url).path).relative_to("/") @@ -106,13 +124,18 @@ def test_execute_job_success(mock_get_openai_client, db: Session): mock_client = get_mock_openai_client_with_vector_store() mock_get_openai_client.return_value = mock_client - collection_obj = get_collection(db, client=mock_client, project_id=project.id) - - crud = CollectionCrud(db, project_id=project.id) - collection = crud.create(collection_obj) + job_id = uuid4() + job_crud = CollectionJobCrud(db, project.id) + job_crud.create( + CollectionJob( + id=job_id, + project_id=project.id, + status=CollectionJobStatus.processing, + action_type="create", + ) + ) - job_id = collection.id - task_id = "task-123" + task_id = uuid4() with patch("app.services.collections.create_collection.Session") as SessionCtor: SessionCtor.return_value.__enter__.return_value = db @@ -121,20 +144,25 @@ def test_execute_job_success(mock_get_openai_client, db: Session): execute_job( request=sample_request.model_dump(), payload_data=asdict(sample_payload), - project_id=collection.project_id, - organization_id=collection.organization_id, + project_id=project.id, + organization_id=project.organization_id, task_id=task_id, job_id=job_id, task_instance=None, ) - updated = CollectionCrud(db, collection.project_id).read_one(job_id) - assert updated.task_id == task_id - assert updated.status == CollectionStatus.successful - assert updated.llm_service_id == "mock_assistant_id" - assert updated.llm_service_name == sample_request.model - assert updated.updated_at is not None + updated_job = CollectionJobCrud(db, project.id).read_one(job_id) + assert updated_job.task_id == task_id + assert updated_job.status == CollectionJobStatus.successful + assert updated_job.collection_id is not None + + created_collection = CollectionCrud(db, project.id).read_one( + updated_job.collection_id + ) + assert created_collection.llm_service_id == "mock_assistant_id" + assert created_collection.llm_service_name == sample_request.model + assert created_collection.updated_at is not None - docs = DocumentCollectionCrud(db).read(updated, skip=0, limit=10) + docs = DocumentCollectionCrud(db).read(created_collection, skip=0, limit=10) assert len(docs) == 1 assert docs[0].fname == document.fname diff --git a/backend/app/tests/services/collections/test_delete_collection.py b/backend/app/tests/services/collections/test_delete_collection.py index 6c44b0b94..473bf957f 100644 --- a/backend/app/tests/services/collections/test_delete_collection.py +++ b/backend/app/tests/services/collections/test_delete_collection.py @@ -1,50 +1,61 @@ -from unittest.mock import patch -import pytest -import os +from unittest.mock import patch, MagicMock from uuid import uuid4 from dataclasses import asdict -from pathlib import Path -from urllib.parse import urlparse from sqlmodel import Session -from moto import mock_aws +from sqlalchemy.exc import SQLAlchemyError + from app.models.collection import ( DeletionRequest, Collection, - CollectionStatus, ResponsePayload, ) from app.tests.utils.utils import get_project from app.crud import CollectionCrud -from app.core.config import settings -from app.tests.utils.collection import get_collection -from app.tests.utils.document import DocumentStore -from app.tests.utils.openai import get_mock_openai_client_with_vector_store +from app.crud import CollectionCrud, CollectionJobCrud +from app.models import CollectionJobStatus, CollectionJob +from app.tests.utils.utils import get_project from app.services.collections.delete_collection import start_job, execute_job -from app.core.cloud import AmazonCloudStorageClient - -@pytest.fixture(scope="function") -def aws_credentials(): - os.environ["AWS_ACCESS_KEY_ID"] = "testing" - os.environ["AWS_SECRET_ACCESS_KEY"] = "testing" - os.environ["AWS_SECURITY_TOKEN"] = "testing" - os.environ["AWS_SESSION_TOKEN"] = "testing" - os.environ["AWS_DEFAULT_REGION"] = settings.AWS_DEFAULT_REGION - - -def test_start_job(db: Session): - req = DeletionRequest(collection_id=str(uuid4())) - project = get_project(db) +def create_collection(db: Session, project): collection = Collection( - id=req.collection_id, + id=uuid4(), project_id=project.id, organization_id=project.organization_id, - status=CollectionStatus.processing, + llm_service_id="asst-nasjnl", ) + collection = CollectionCrud(db, project.id).create(collection) + return collection + + +def create_collection_job(db: Session, project, collection): + job_id = uuid4() + job_crud = CollectionJobCrud(db, project.id) + job = job_crud.create( + CollectionJob( + id=job_id, + action_type="delete", + project_id=project.id, + collection_id=collection.id, + status=CollectionJobStatus.processing, + ) + ) + return job + +def test_start_job_creates_collection_job_and_schedules_task(db: Session): + """ + - start_job should create a CollectionJob (status=processing, action=delete) + - schedule the task with a *generated* job_id and the provided collection_id + - return the collection.id (per implementation) + """ + project = get_project(db) + + created_collection = create_collection(db, project) + + req = DeletionRequest(collection_id=created_collection.id) payload = {"status": "processing"} with patch( @@ -52,67 +63,139 @@ def test_start_job(db: Session): ) as mock_schedule: mock_schedule.return_value = "fake-task-id" - job_id = start_job( + returned = start_job( db=db, request=req.model_dump(), - collection=collection, + collection=created_collection, project_id=project.id, payload=payload, organization_id=project.organization_id, ) - assert job_id == collection.id - - mock_schedule.assert_called_once() - _, kwargs = mock_schedule.call_args - assert ( - kwargs["function_path"] - == "app.services.collections.delete_collection.execute_job" - ) - assert kwargs["project_id"] == project.id - assert kwargs["organization_id"] == project.organization_id - assert kwargs["job_id"] == collection.id - assert kwargs["request"] == req.model_dump() - assert kwargs["payload_data"] == payload + assert returned == created_collection.id + + jobs = CollectionJobCrud(db, project.id).read_all() + assert len(jobs) == 1 + job = jobs[0] + assert job.project_id == project.id + assert job.collection_id == created_collection.id + assert job.status == CollectionJobStatus.processing + assert job.action_type == "delete" + + mock_schedule.assert_called_once() + kwargs = mock_schedule.call_args.kwargs + assert ( + kwargs["function_path"] + == "app.services.collections.delete_collection.execute_job" + ) + assert kwargs["project_id"] == project.id + assert kwargs["organization_id"] == project.organization_id + assert kwargs["job_id"] == job.id + assert kwargs["collection_id"] == created_collection.id + assert kwargs["request"] == req.model_dump() + assert kwargs["payload_data"] == payload + assert "trace_id" in kwargs -@pytest.mark.usefixtures("aws_credentials") -@mock_aws @patch("app.services.collections.delete_collection.get_openai_client") -def test_execute_job_delete_success(mock_get_openai_client, db: Session): +def test_execute_job_delete_success_updates_job_and_calls_delete( + mock_get_openai_client, db: Session +): + """ + - execute_job should set task_id on the CollectionJob + - call CollectionCrud.delete(collection, assistant_crud) + - mark job successful and clear error_message + """ project = get_project(db) - aws = AmazonCloudStorageClient() - aws.create() + collection = create_collection(db, project) - store = DocumentStore(db=db, project_id=project.id) - document = store.put() - s3_key = Path(urlparse(document.object_store_url).path).relative_to("/") - aws.client.put_object(Bucket=settings.AWS_S3_BUCKET, Key=str(s3_key), Body=b"test") + job = create_collection_job(db, project, collection) - mock_client = get_mock_openai_client_with_vector_store() - mock_get_openai_client.return_value = mock_client + mock_get_openai_client.return_value = MagicMock() - collection_obj = get_collection(db, client=mock_client, project_id=project.id) - crud = CollectionCrud(db, project_id=project.id) - collection = crud.create(collection_obj, [document]) - db.flush() - db.commit() + with patch( + "app.services.collections.delete_collection.Session" + ) as SessionCtor, patch( + "app.services.collections.delete_collection.OpenAIAssistantCrud" + ) as MockAssistantCrud, patch( + "app.services.collections.delete_collection.CollectionCrud" + ) as MockCollectionCrud: + SessionCtor.return_value.__enter__.return_value = db + SessionCtor.return_value.__exit__.return_value = False + + collection_crud_instance = MockCollectionCrud.return_value + collection_crud_instance.read_one.return_value = collection + + deletion_result = MagicMock() + deletion_result.model_dump.return_value = { + "id": str(collection.id), + "deleted": True, + } + collection_crud_instance.delete.return_value = deletion_result + + task_id = uuid4() + req = DeletionRequest(collection_id=collection.id) + payload = ResponsePayload(status="processing", route="/test/delete") + + execute_job( + request=req.model_dump(), + payload_data=asdict(payload), + project_id=project.id, + organization_id=project.organization_id, + task_id=task_id, + job_id=job.id, + collection_id=collection.id, + task_instance=None, + ) + + updated_job = CollectionJobCrud(db, project.id).read_one(job.id) + assert updated_job.task_id == task_id + assert updated_job.status == CollectionJobStatus.successful + assert updated_job.error_message in (None, "") - job_id = collection.id - task_id = "task-123" - req = DeletionRequest(collection_id=job_id) - payload = ResponsePayload(status="pending", route="/test/route") + MockCollectionCrud.assert_called_with(db, project.id) + collection_crud_instance.read_one.assert_called_once_with(collection.id) + collection_crud_instance.delete.assert_called_once() + args, kwargs = collection_crud_instance.delete.call_args + assert isinstance(args[0], Collection) + MockAssistantCrud.assert_called_once() + mock_get_openai_client.assert_called_once() + + +@patch("app.services.collections.delete_collection.get_openai_client") +def test_execute_job_delete_failure_marks_job_failed( + mock_get_openai_client, db: Session +): + """ + When CollectionCrud.delete raises (e.g., SQLAlchemyError), + the job should be marked failed and error_message set. + """ + project = get_project(db) + + collection = create_collection(db, project) + + job = create_collection_job(db, project, collection) + + mock_get_openai_client.return_value = MagicMock() with patch( "app.services.collections.delete_collection.Session" ) as SessionCtor, patch( "app.services.collections.delete_collection.OpenAIAssistantCrud" - ) as MockAssistantCrud: + ) as MockAssistantCrud, patch( + "app.services.collections.delete_collection.CollectionCrud" + ) as MockCollectionCrud: SessionCtor.return_value.__enter__.return_value = db SessionCtor.return_value.__exit__.return_value = False - MockAssistantCrud.return_value.delete.return_value = None + collection_crud_instance = MockCollectionCrud.return_value + collection_crud_instance.read_one.return_value = collection + collection_crud_instance.delete.side_effect = SQLAlchemyError("boom") + + task_id = uuid4() + req = DeletionRequest(collection_id=collection.id) + payload = ResponsePayload(status="processing", route="/test/delete") execute_job( request=req.model_dump(), @@ -120,13 +203,15 @@ def test_execute_job_delete_success(mock_get_openai_client, db: Session): project_id=project.id, organization_id=project.organization_id, task_id=task_id, - job_id=job_id, + job_id=job.id, + collection_id=collection.id, task_instance=None, ) - updated = CollectionCrud(db, project.id).read_one(job_id) - assert updated.task_id == task_id - assert updated.deleted_at is not None + failed_job = CollectionJobCrud(db, project.id).read_one(job.id) + assert failed_job.task_id == task_id + assert failed_job.status == CollectionJobStatus.failed + assert failed_job.error_message and "boom" in failed_job.error_message - mock_get_openai_client.assert_called_once() - MockAssistantCrud.return_value.delete.assert_called_once() + MockAssistantCrud.assert_called_once() + MockCollectionCrud.assert_called_with(db, project.id) From 439743429ab9f19cb815036b5188ed7f0c3ad28c Mon Sep 17 00:00:00 2001 From: nishika26 Date: Wed, 1 Oct 2025 14:50:48 +0530 Subject: [PATCH 32/44] collection job info test fix --- .../tests/api/routes/collections/test_collection_info.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/app/tests/api/routes/collections/test_collection_info.py b/backend/app/tests/api/routes/collections/test_collection_info.py index 485f62242..116f0ebcc 100644 --- a/backend/app/tests/api/routes/collections/test_collection_info.py +++ b/backend/app/tests/api/routes/collections/test_collection_info.py @@ -73,7 +73,7 @@ def test_collection_info_processing( collection_job = create_collection_job(db, user_api_key) response = client.get( - f"{settings.API_V1_STR}/collections/info/{collection_job.id}", + f"{settings.API_V1_STR}/collections/info/collection_job/{collection_job.id}", headers=headers, ) @@ -97,7 +97,7 @@ def test_collection_info_successful( ) response = client.get( - f"{settings.API_V1_STR}/collections/info/{collection_job.id}", + f"{settings.API_V1_STR}/collections/info/collection_job/{collection_job.id}", headers=headers, ) @@ -119,7 +119,7 @@ def test_collection_info_failed( ) response = client.get( - f"{settings.API_V1_STR}/collections/info/{collection_job.id}", + f"{settings.API_V1_STR}/collections/info/collection_job/{collection_job.id}", headers=headers, ) From f5a5871acac4b1ee4db75596e1d2d0bf7e6cf881 Mon Sep 17 00:00:00 2001 From: nishika26 Date: Mon, 6 Oct 2025 13:51:09 +0530 Subject: [PATCH 33/44] PR Reviews fixes --- ...37e65_adding_collection_job_table_and_.py} | 49 ++++------ backend/app/api/docs/collections/create.md | 6 +- backend/app/api/docs/collections/job_info.md | 4 +- backend/app/api/routes/collections.py | 65 ++++++------ backend/app/crud/__init__.py | 4 +- backend/app/crud/collection/__init__.py | 2 + backend/app/crud/collection/collection.py | 45 ++++++--- backend/app/crud/collection/collection_job.py | 98 ++++++++++--------- backend/app/models/__init__.py | 4 +- backend/app/models/collection.py | 35 ++++--- backend/app/models/collection_job.py | 33 ++++--- .../services/collections/create_collection.py | 26 +++-- .../services/collections/delete_collection.py | 18 ++-- .../collections/test_collection_info.py | 16 +-- .../collections/test_create_collections.py | 4 +- .../test_crud_collection_read_one.py | 7 +- .../crud/collections/test_collection_jobs.py | 35 ++----- .../collections/test_create_collection.py | 14 +-- .../collections/test_delete_collection.py | 18 ++-- 19 files changed, 260 insertions(+), 223 deletions(-) rename backend/app/alembic/versions/{718dcc83f3b6_adding_collection_jobs_table_altering_collections.py => b30727137e65_adding_collection_job_table_and_.py} (82%) create mode 100644 backend/app/crud/collection/__init__.py diff --git a/backend/app/alembic/versions/718dcc83f3b6_adding_collection_jobs_table_altering_collections.py b/backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_.py similarity index 82% rename from backend/app/alembic/versions/718dcc83f3b6_adding_collection_jobs_table_altering_collections.py rename to backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_.py index 2797bbcc1..fed80f483 100644 --- a/backend/app/alembic/versions/718dcc83f3b6_adding_collection_jobs_table_altering_collections.py +++ b/backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_.py @@ -1,8 +1,8 @@ -"""adding collection jobs table and altering collection table +"""adding collection job table and altering collections table -Revision ID: 718dcc83f3b6 +Revision ID: b30727137e65 Revises: c6fb6d0b5897 -Create Date: 2025-09-29 20:41:38.005505 +Create Date: 2025-10-05 14:19:14.213933 """ from alembic import op @@ -11,23 +11,23 @@ from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. -revision = "718dcc83f3b6" +revision = "b30727137e65" down_revision = "c6fb6d0b5897" branch_labels = None depends_on = None - collection_job_status_enum = postgresql.ENUM( - "processing", - "successful", - "failed", + "PENDING", + "PROCESSING", + "SUCCESSFUL", + "FAILED", name="collectionjobstatus", create_type=False, ) collection_action_type = postgresql.ENUM( - "create", - "delete", + "CREATE", + "DELETE", name="collectionactiontype", create_type=False, ) @@ -38,12 +38,12 @@ def upgrade(): collection_action_type.create(op.get_bind(), checkfirst=True) op.create_table( "collection_jobs", - sa.Column("id", sa.Uuid(), nullable=False), sa.Column("action_type", collection_action_type, nullable=False), sa.Column("collection_id", sa.Uuid(), nullable=True), - sa.Column("task_id", sa.Uuid(), nullable=True), sa.Column("project_id", sa.Integer(), nullable=False), + sa.Column("id", sa.Uuid(), nullable=False), sa.Column("status", collection_job_status_enum, nullable=False), + sa.Column("task_id", sa.Uuid(), nullable=True), sa.Column("error_message", sa.Text(), nullable=True), sa.Column("inserted_at", sa.DateTime(), nullable=False), sa.Column("updated_at", sa.DateTime(), nullable=False), @@ -53,29 +53,15 @@ def upgrade(): sa.ForeignKeyConstraint(["project_id"], ["project.id"], ondelete="CASCADE"), sa.PrimaryKeyConstraint("id"), ) + op.add_column("collection", sa.Column("inserted_at", sa.DateTime(), nullable=False)) op.drop_constraint("collection_owner_id_fkey", "collection", type_="foreignkey") op.drop_column("collection", "owner_id") + op.drop_column("collection", "created_at") op.drop_column("collection", "status") op.drop_column("collection", "error_message") - op.add_column("collection", sa.Column("inserted_at", sa.DateTime(), nullable=False)) - op.drop_column("collection", "created_at") def downgrade(): - op.create_foreign_key( - "openai_conversation_project_id_fkey1", - "openai_conversation", - "project", - ["project_id"], - ["id"], - ) - op.create_foreign_key( - "openai_conversation_organization_id_fkey1", - "openai_conversation", - "organization", - ["organization_id"], - ["id"], - ) op.add_column( "collection", sa.Column("error_message", sa.VARCHAR(), autoincrement=False, nullable=True), @@ -92,6 +78,12 @@ def downgrade(): nullable=False, ), ) + op.add_column( + "collection", + sa.Column( + "created_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=False + ), + ) op.add_column( "collection", sa.Column("owner_id", sa.INTEGER(), autoincrement=False, nullable=False), @@ -104,4 +96,5 @@ def downgrade(): ["id"], ondelete="CASCADE", ) + op.drop_column("collection", "inserted_at") op.drop_table("collection_jobs") diff --git a/backend/app/api/docs/collections/create.md b/backend/app/api/docs/collections/create.md index d4dc9d89f..f35139479 100644 --- a/backend/app/api/docs/collections/create.md +++ b/backend/app/api/docs/collections/create.md @@ -20,7 +20,7 @@ value being invalid. It can also fail due to document types not be accepted. This is especially true for PDFs that may not be parseable. The immediate response from the endpoint is a packet containing a -`key`. Once the collection has been created, information about the -collection will be returned to the user via the callback URL. If a -callback URL is not provided, clients can poll the `info` endpoint +`key` which is the collection job ID. Once the collection has been created, +information about the collection will be returned to the user via the callback URL. +If a callback URL is not provided, clients can poll the `collectionjob info` endpoint with the `key` to retrieve the same information. diff --git a/backend/app/api/docs/collections/job_info.md b/backend/app/api/docs/collections/job_info.md index fe4ba5837..3cb9c1482 100644 --- a/backend/app/api/docs/collections/job_info.md +++ b/backend/app/api/docs/collections/job_info.md @@ -1,9 +1,9 @@ Retrieve information about a collection job by the collection job ID. This endpoint can be considered the polling endpoint for collection creation job. This endpoint provides detailed status and metadata for a specific collection job in the AI platform. It is especially useful for: -* Fetching the collection job object containingg the ID which will be collection job id, job action type, status of the job as well as error message if the job has been failed. +* Fetching the collection job object containing the ID which will be collection job id, collection ID, status of the job as well as error message. -* Accessing associated collection details from the collection table when the job is successful, including: +* If the job has finished and has been successful, this endpoint will fetch the associated collection details from the collection table, including: - `llm_service_id`: The OpenAI assistant or model used for the collection. - Collection metadata such as ID, project, organization, and timestamps. diff --git a/backend/app/api/routes/collections.py b/backend/app/api/routes/collections.py index 6816ac445..c80ee2cce 100644 --- a/backend/app/api/routes/collections.py +++ b/backend/app/api/routes/collections.py @@ -1,10 +1,10 @@ import inspect import logging from uuid import UUID -from typing import List +from typing import List, Union from dataclasses import asdict -from fastapi import APIRouter, BackgroundTasks, Query +from fastapi import APIRouter, Query from fastapi import Path as FastPath @@ -14,17 +14,18 @@ CollectionJobCrud, DocumentCollectionCrud, ) -from app.models import Collection, DocumentPublic +from app.models import DocumentPublic, CollectionJobStatus, CollectionJobPublic from app.models.collection import ( ResponsePayload, CreationRequest, DeletionRequest, + CollectionPublic, ) -from app.utils import APIResponse, load_description, get_openai_client +from app.utils import APIResponse, load_description from app.services.collections.helpers import extract_error_message from app.services.collections import ( - create_collection as create_services, - delete_collection as delete_services, + create_collection as create_service, + delete_collection as delete_service, ) @@ -40,16 +41,15 @@ def create_collection( session: SessionDep, current_user: CurrentUserOrgProject, request: CreationRequest, - background_tasks: BackgroundTasks, ): this = inspect.currentframe() route = router.url_path_for(this.f_code.co_name) - payload = ResponsePayload("processing", route) + payload = ResponsePayload(status="processing", route=route) - create_services.start_job( + create_service.start_job( db=session, request=request.model_dump(), - payload=asdict(payload), + payload=payload.model_dump(), collection_job_id=UUID(payload.key), project_id=current_user.project_id, organization_id=current_user.organization_id, @@ -59,7 +59,9 @@ def create_collection( f"[create_collection] Background task for collection creation scheduled | " f"{{'collection_job_id': '{payload.key}'}}" ) - return APIResponse.success_response(data=None, metadata=asdict(payload)) + return APIResponse.success_response( + data=None, metadata=payload.model_dump(mode="json") + ) @router.post( @@ -70,19 +72,18 @@ def delete_collection( session: SessionDep, current_user: CurrentUserOrgProject, request: DeletionRequest, - background_tasks: BackgroundTasks, ): collection_crud = CollectionCrud(session, current_user.project_id) collection = collection_crud.read_one(request.collection_id) this = inspect.currentframe() route = router.url_path_for(this.f_code.co_name) - payload = ResponsePayload("processing", route) + payload = ResponsePayload(status="processing", route=route) - delete_services.start_job( + delete_service.start_job( db=session, request=request.model_dump(), - payload=asdict(payload), + payload=payload.model_dump(), collection=collection, project_id=current_user.project_id, organization_id=current_user.organization_id, @@ -92,13 +93,17 @@ def delete_collection( f"[delete_collection] Background task for deletion scheduled | " f"{{'collection_id': '{request.collection_id}'}}" ) - return APIResponse.success_response(data=None, metadata=asdict(payload)) + return APIResponse.success_response( + data=None, metadata=payload.model_dump(mode="json") + ) @router.get( "/info/collection_job/{collection_job_id}", description=load_description("collections/job_info.md"), - response_model=APIResponse, + response_model=Union[ + APIResponse[CollectionPublic], APIResponse[CollectionJobPublic] + ], ) def collection_job_info( session: SessionDep, @@ -108,23 +113,31 @@ def collection_job_info( collection_job_crud = CollectionJobCrud(session, current_user.project_id) collection_job = collection_job_crud.read_one(collection_job_id) - if collection_job.status == "successful": + if collection_job.status == CollectionJobStatus.SUCCESSFUL: collection_crud = CollectionCrud(session, current_user.project_id) collection = collection_crud.read_one(collection_job.collection_id) - return APIResponse.success_response(data=collection) + return APIResponse.success_response( + data=CollectionPublic.model_validate(collection) + ) - if collection_job.status in ["processing", "failed"]: + if collection_job.status == CollectionJobStatus.FAILED: err = getattr(collection_job, "error_message", None) if err: collection_job.error_message = extract_error_message(err) - return APIResponse.success_response(data=collection_job) + return APIResponse.success_response( + data=CollectionJobPublic.model_validate(collection_job) + ) + + return APIResponse.success_response( + data=CollectionJobPublic.model_validate(collection_job) + ) @router.get( "/info/{collection_id}", description=load_description("collections/info.md"), - response_model=APIResponse, + response_model=APIResponse[CollectionPublic], ) def collection_info( session: SessionDep, @@ -137,10 +150,10 @@ def collection_info( return APIResponse.success_response(collection) -@router.post( +@router.get( "/list", description=load_description("collections/list.md"), - response_model=APIResponse[List[Collection]], + response_model=APIResponse[List[CollectionPublic]], ) def list_collections( session: SessionDep, @@ -149,10 +162,6 @@ def list_collections( collection_crud = CollectionCrud(session, current_user.project_id) rows = collection_crud.read_all() - for c in rows: - if getattr(c, "error_message", None): - c.error_message = extract_error_message(c.error_message) - return APIResponse.success_response(rows) diff --git a/backend/app/crud/__init__.py b/backend/app/crud/__init__.py index d496645e7..098d93632 100644 --- a/backend/app/crud/__init__.py +++ b/backend/app/crud/__init__.py @@ -5,12 +5,10 @@ update_user, ) from .collection.collection import CollectionCrud - +from .collection.collection_job import CollectionJobCrud from .document import DocumentCrud from .document_collection import DocumentCollectionCrud from .doc_transformation_job import DocTransformationJobCrud -from .collection.collection_job import CollectionJobCrud - from .jobs import JobCrud from .organization import ( diff --git a/backend/app/crud/collection/__init__.py b/backend/app/crud/collection/__init__.py new file mode 100644 index 000000000..7b303a6ce --- /dev/null +++ b/backend/app/crud/collection/__init__.py @@ -0,0 +1,2 @@ +from .collection import CollectionCrud +from .collection_job import CollectionJobCrud diff --git a/backend/app/crud/collection/collection.py b/backend/app/crud/collection/collection.py index ce02744ee..6a78af370 100644 --- a/backend/app/crud/collection/collection.py +++ b/backend/app/crud/collection/collection.py @@ -4,6 +4,7 @@ from typing import Optional import logging +from fastapi import HTTPException from sqlmodel import Session, func, select, and_ from app.models import Document, Collection, DocumentCollection @@ -45,20 +46,22 @@ def _update(self, collection: Collection): return collection - def _exists(self, collection: Collection): - present = ( - self.session.query(func.count(Collection.id)) - .filter( - Collection.llm_service_id == collection.llm_service_id, - Collection.llm_service_name == collection.llm_service_name, + def _exists(self, collection: Collection) -> bool: + stmt = ( + select(Collection.id) + .where( + (Collection.llm_service_id == collection.llm_service_id) + & (Collection.llm_service_name == collection.llm_service_name) ) - .scalar() + .limit(1) ) + present = self.session.exec(stmt).first() is not None + logger.info( - f"[CollectionCrud._exists] Existence check completed | {{'llm_service_id': '{collection.llm_service_id}', 'exists': {bool(present)}}}" + "[CollectionCrud._exists] Existence check completed | " + f"{{'llm_service_id': '{collection.llm_service_id}', 'exists': {present}}}" ) - - return bool(present) + return present def create( self, @@ -79,16 +82,30 @@ def create( return collection - def read_one(self, collection_id: UUID): + def read_one(self, collection_id: UUID) -> Collection: statement = select(Collection).where( and_( Collection.project_id == self.project_id, Collection.id == collection_id, + Collection.deleted_at.is_(None), ) ) - collection = self.session.exec(statement).one() + collection = self.session.exec(statement).first() + if collection is None: + logger.error( + "[CollectionCrud.read_one] Collection not found | " + f"{{'project_id': '{self.project_id}', 'collection_id': '{collection_id}'}}" + ) + raise HTTPException( + status_code=404, + detail="Collection not found", + ) + logger.info( + "[CollectionCrud.read_one] Retrieved collection | " + f"{{'project_id': '{self.project_id}', 'collection_id': '{collection_id}'}}" + ) return collection def read_all(self): @@ -135,8 +152,8 @@ def _(self, model: Document, remote): .distinct() ) - for c in self.session.execute(statement): - self.delete(c.Collection, remote) + for coll in self.session.exec(statement): + self.delete(coll, remote) self.session.refresh(model) logger.info( f"[CollectionCrud.delete] Document deletion from collections completed | {{'document_id': '{model.id}'}}" diff --git a/backend/app/crud/collection/collection_job.py b/backend/app/crud/collection/collection_job.py index 1a450d7fc..5dabd20ee 100644 --- a/backend/app/crud/collection/collection_job.py +++ b/backend/app/crud/collection/collection_job.py @@ -1,10 +1,17 @@ -from datetime import datetime +from uuid import UUID import logging +from typing import List -from app.models.collection_job import CollectionJob, CollectionJobUpdate +from fastapi import HTTPException +from sqlmodel import Session, select, and_ +from app.models.collection_job import ( + CollectionJob, + CollectionJobUpdate, + CollectionJobCreate, +) +from app.core.util import now -from sqlmodel import Session, func, select, and_ logger = logging.getLogger(__name__) @@ -14,20 +21,49 @@ def __init__(self, session: Session, project_id: int): self.session = session self.project_id = project_id - def _update(self, collection_job: CollectionJobUpdate): + def read_one(self, job_id: UUID) -> CollectionJob: + """Retrieve a single collection job by its id; 404 if not found.""" + statement = select(CollectionJob).where( + and_( + CollectionJob.project_id == self.project_id, + CollectionJob.id == job_id, + ) + ) + collection_job = self.session.exec(statement).first() + if collection_job is None: + logger.error( + "[CollectionJobCrud.read_one] Collection job not found | " + f"{{'project_id': '{self.project_id}', 'job_id': '{job_id}'}}" + ) + raise HTTPException( + status_code=404, + detail="Collection job not found", + ) + + logger.info( + "[CollectionJobCrud.read_one] Retrieved collection job | " + f"{{'job_id': '{job_id}'}}" + ) + return collection_job + + def read_all(self) -> List[CollectionJob]: + """Retrieve all collection jobs for a given project.""" + statement = select(CollectionJob).where( + CollectionJob.project_id == self.project_id + ) + collection_jobs = self.session.exec(statement).all() + logger.info( + f"[CollectionJobCrud.read_all] Retrieved all collection jobs for project | {{'project_id': '{self.project_id}', 'count': {len(collection_jobs)}}}" + ) + return collection_jobs + + def update( + self, job_id: UUID, collection_job: CollectionJobUpdate + ) -> CollectionJob: """Update an existing collection job.""" - if collection_job.project_id != self.project_id: - err = f"Invalid collection job ownership: owner_project={self.project_id} attempter={collection_job.project_id}" - try: - raise PermissionError(err) - except PermissionError as e: - logger.error( - f"[CollectionJobCrud._update] Permission error | {{'collection_job_id': '{collection_job.id}', 'error': '{str(e)}'}}", - exc_info=True, - ) - raise + collection_job = self.read_one(job_id) - collection_job.updated_at = datetime.utcnow() + collection_job.updated_at = now() self.session.add(collection_job) self.session.commit() self.session.refresh(collection_job) @@ -37,7 +73,7 @@ def _update(self, collection_job: CollectionJobUpdate): return collection_job - def create(self, collection_job: CollectionJob): + def create(self, collection_job: CollectionJobCreate) -> CollectionJob: """Create a new collection job.""" try: self.session.add(collection_job) @@ -55,33 +91,3 @@ def create(self, collection_job: CollectionJob): raise return collection_job - - def read_one(self, task_id: str) -> CollectionJob: - """Retrieve a single collection job by its task_id.""" - statement = select(CollectionJob).where( - and_( - CollectionJob.project_id == self.project_id, - CollectionJob.id == task_id, - ) - ) - collection_job = self.session.exec(statement).one() - logger.info( - f"[CollectionJobCrud.read_one] Retrieved collection job | {{'task_id': '{task_id}'}}" - ) - return collection_job - - def read_all(self): - """Retrieve all collection jobs for a given project.""" - statement = select(CollectionJob).where( - and_( - CollectionJob.project_id == self.project_id, - CollectionJob.updated_at.isnot( - None - ), # Exclude any jobs that have been deleted - ) - ) - collection_jobs = self.session.exec(statement).all() - logger.info( - f"[CollectionJobCrud.read_all] Retrieved all collection jobs for project | {{'project_id': '{self.project_id}', 'count': {len(collection_jobs)}}}" - ) - return collection_jobs diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py index dbd14f2d8..c4b69deff 100644 --- a/backend/app/models/__init__.py +++ b/backend/app/models/__init__.py @@ -1,7 +1,7 @@ from sqlmodel import SQLModel from .auth import Token, TokenPayload -from .collection import Collection +from .collection import Collection, CollectionPublic from .document import ( Document, DocumentPublic, @@ -111,4 +111,6 @@ CollectionJobBase, CollectionJobStatus, CollectionJobUpdate, + CollectionJobPublic, + CollectionJobCreate, ) diff --git a/backend/app/models/collection.py b/backend/app/models/collection.py index 445911274..a47f3aa5a 100644 --- a/backend/app/models/collection.py +++ b/backend/app/models/collection.py @@ -5,7 +5,7 @@ from dataclasses import dataclass, field, fields from sqlmodel import Field, Relationship, SQLModel -from pydantic import HttpUrl, BaseModel +from pydantic import HttpUrl from app.core.util import now from .organization import Organization @@ -39,25 +39,20 @@ class Collection(SQLModel, table=True): project: Project = Relationship(back_populates="collections") -@dataclass -class ResponsePayload: +class ResponsePayload(SQLModel): status: str route: str - key: str = field(default_factory=lambda: str(uuid4())) - time: str = field(default_factory=lambda: now().strftime("%c")) + key: str = Field(default_factory=lambda: str(uuid4())) + time: datetime = Field(default_factory=now) @classmethod def now(cls): - attr = "time" - for i in fields(cls): - if i.name == attr: - return i.default_factory() - - raise AttributeError(f'Expected attribute "{attr}" does not exist') + """Returns current UTC time without timezone info""" + return now() # pydantic models - -class DocumentOptions(BaseModel): +class DocumentOptions(SQLModel): documents: List[UUID] = Field( description="List of document IDs", ) @@ -74,7 +69,7 @@ def model_post_init(self, __context: Any): self.documents = list(set(self.documents)) -class AssistantOptions(BaseModel): +class AssistantOptions(SQLModel): # Fields to be passed along to OpenAI. They must be a subset of # parameters accepted by the OpenAI.clien.beta.assistants.create # API. @@ -100,7 +95,7 @@ class AssistantOptions(BaseModel): ) -class CallbackRequest(BaseModel): +class CallbackRequest(SQLModel): callback_url: Optional[HttpUrl] = Field( default=None, description="URL to call to report endpoint status", @@ -120,3 +115,15 @@ def extract_super_type(self, cls: "CreationRequest"): class DeletionRequest(CallbackRequest): collection_id: UUID = Field("Collection to delete") + + +class CollectionPublic(SQLModel): + id: UUID + llm_service_id: str + llm_service_name: str + project_id: int + organization_id: int + + inserted_at: datetime + updated_at: datetime + deleted_at: datetime | None = None diff --git a/backend/app/models/collection_job.py b/backend/app/models/collection_job.py index 0e219e4f3..12e8a14e3 100644 --- a/backend/app/models/collection_job.py +++ b/backend/app/models/collection_job.py @@ -1,24 +1,26 @@ from enum import Enum -from uuid import UUID +from uuid import UUID, uuid4 from datetime import datetime from typing import Optional -from sqlmodel import Field, SQLModel -from sqlalchemy import Column, Text +from sqlmodel import Field, SQLModel, Column, Text + +# from sqlalchemy import Column, Text from app.core.util import now class CollectionJobStatus(str, Enum): - processing = "processing" - successful = "successful" - failed = "failed" + PENDING = "PENDING" + PROCESSING = "PROCESSING" + SUCCESSFUL = "SUCCESSFUL" + FAILED = "FAILED" class CollectionActionType(str, Enum): - create = "create" - delete = "delete" + CREATE = "CREATE" + DELETE = "DELETE" class CollectionJobBase(SQLModel): @@ -38,10 +40,10 @@ class CollectionJob(CollectionJobBase, table=True): __tablename__ = "collection_jobs" - id: UUID = Field(primary_key=True) + id: UUID = Field(default_factory=uuid4, primary_key=True) status: CollectionJobStatus = Field( - default=CollectionJobStatus.processing, + default=CollectionJobStatus.PENDING, nullable=False, description="Current job status", ) @@ -62,16 +64,25 @@ class CollectionJob(CollectionJobBase, table=True): ) +class CollectionJobCreate(SQLModel): + id: UUID + collection_id: UUID | None = None + status: CollectionJobStatus + action_type: CollectionActionType + project_id: int + + class CollectionJobUpdate(SQLModel): task_id: UUID | None = None status: CollectionJobStatus error_message: str | None = None collection_id: UUID | None = None - updated_at: datetime | None = None + updated_at: datetime class CollectionJobPublic(SQLModel): + id: UUID collection_id: UUID | None = None status: CollectionJobStatus error_message: str | None = None diff --git a/backend/app/services/collections/create_collection.py b/backend/app/services/collections/create_collection.py index 1737d2508..db776022a 100644 --- a/backend/app/services/collections/create_collection.py +++ b/backend/app/services/collections/create_collection.py @@ -15,7 +15,12 @@ CollectionJobCrud, ) from app.crud.rag import OpenAIVectorStoreCrud, OpenAIAssistantCrud -from app.models import CollectionJobStatus, CollectionJob, Collection +from app.models import ( + CollectionJobStatus, + CollectionJob, + Collection, + CollectionActionType, +) from app.models.collection import ( ResponsePayload, CreationRequest, @@ -45,9 +50,9 @@ def start_job( collection_job = CollectionJob( id=collection_job_id, - action_type="create", + action_type=CollectionActionType.CREATE, project_id=project_id, - status=CollectionJobStatus.processing, + status=CollectionJobStatus.PENDING, ) job_crud = CollectionJobCrud(db, project_id) @@ -93,7 +98,8 @@ def execute_job( collection_job_crud = CollectionJobCrud(session, project_id) collection_job = collection_job_crud.read_one(job_id) collection_job.task_id = task_id - collection_job_crud._update(collection_job) + collection_job.status = CollectionJobStatus.PROCESSING + collection_job_crud.update(collection_job.id, collection_job) client = get_openai_client(session, organization_id, project_id) @@ -150,10 +156,10 @@ def execute_job( collection_crud.create(collection_data) - collection_job.status = CollectionJobStatus.successful + collection_job.status = CollectionJobStatus.SUCCESSFUL collection_job.collection_id = collection_id collection_job.updated_at = now() - collection_job_crud._update(collection_job) + collection_job_crud.update(collection_job.id, collection_job) elapsed = time.time() - start_time logger.info( @@ -179,10 +185,10 @@ def execute_job( if "assistant" in locals(): _backout(assistant_crud, assistant.id) - collection_job.status = CollectionJobStatus.failed + collection_job.status = CollectionJobStatus.FAILED collection_job.updated_at = now() collection_job.error_message = str(err) - collection_job_crud._update(collection_job) + collection_job_crud.update(collection_job.id, collection_job) callback.fail(str(err)) @@ -193,7 +199,7 @@ def execute_job( exc_info=True, ) - collection_job.status = CollectionJobStatus.failed + collection_job.status = CollectionJobStatus.FAILED collection_job.updated_at = now() collection_job.error_message = str(err) - collection_job_crud._update(collection_job) + collection_job_crud.update(collection_job.id, collection_job) diff --git a/backend/app/services/collections/delete_collection.py b/backend/app/services/collections/delete_collection.py index d722d97c3..4d35e4d7d 100644 --- a/backend/app/services/collections/delete_collection.py +++ b/backend/app/services/collections/delete_collection.py @@ -8,7 +8,7 @@ from app.core.db import engine from app.crud import CollectionCrud, CollectionJobCrud from app.crud.rag import OpenAIAssistantCrud -from app.models import CollectionJob, CollectionJobStatus +from app.models import CollectionJob, CollectionJobStatus, CollectionActionType from app.models.collection import Collection, DeletionRequest from app.services.collections.helpers import ( SilentCallback, @@ -36,10 +36,10 @@ def start_job( collection_job = CollectionJob( id=job_id, - action_type="delete", + action_type=CollectionActionType.DELETE, project_id=project_id, collection_id=collection.id, - status=CollectionJobStatus.processing, + status=CollectionJobStatus.PENDING, ) job_crud = CollectionJobCrud(db, project_id) @@ -97,9 +97,9 @@ def execute_job( try: result = collection_crud.delete(collection, assistant_crud) - collection_job.status = CollectionJobStatus.successful + collection_job.status = CollectionJobStatus.SUCCESSFUL collection_job.error_message = None - collection_job_crud._update(collection_job) + collection_job_crud.update(collection_job.id, collection_job) logger.info( "[delete_collection.execute_job] Collection deleted successfully | {'collection_id': '%s', 'job_id': '%s'}", @@ -109,9 +109,9 @@ def execute_job( callback.success(result.model_dump(mode="json")) except (ValueError, PermissionError, SQLAlchemyError) as err: - collection_job.status = CollectionJobStatus.failed + collection_job.status = CollectionJobStatus.FAILED collection_job.error_message = str(err) - collection_job_crud._update(collection_job) + collection_job_crud.update(collection_job.id, collection_job) logger.error( "[delete_collection.execute_job] Failed to delete collection | {'collection_id': '%s', 'error': '%s', 'job_id': '%s'}", @@ -123,9 +123,9 @@ def execute_job( callback.fail(str(err)) except Exception as err: - collection_job.status = CollectionJobStatus.failed + collection_job.status = CollectionJobStatus.FAILED collection_job.error_message = str(err) - collection_job_crud._update(collection_job) + collection_job_crud.update(collection_job) logger.error( "[delete_collection.execute_job] Unexpected error during deletion | " diff --git a/backend/app/tests/api/routes/collections/test_collection_info.py b/backend/app/tests/api/routes/collections/test_collection_info.py index 116f0ebcc..06e5e9bb4 100644 --- a/backend/app/tests/api/routes/collections/test_collection_info.py +++ b/backend/app/tests/api/routes/collections/test_collection_info.py @@ -41,8 +41,8 @@ def create_collection_job( db, user, collection_id: Optional[UUID] = None, - action_type=CollectionActionType.create, - status=CollectionJobStatus.processing, + action_type=CollectionActionType.CREATE, + status=CollectionJobStatus.PENDING, ): collection_job = CollectionJob( id=uuid4(), @@ -54,7 +54,7 @@ def create_collection_job( updated_at=now(), ) - if status == CollectionJobStatus.failed: + if status == CollectionJobStatus.FAILED: collection_job.error_message = ( "Something went wrong during the collection job process." ) @@ -80,9 +80,9 @@ def test_collection_info_processing( assert response.status_code == 200 data = response.json()["data"] - assert data["status"] == CollectionJobStatus.processing.value + assert data["status"] == CollectionJobStatus.PENDING assert data["inserted_at"] is not None - assert data["action_type"] == CollectionActionType.create.value + assert data["collection_id"] == collection_job.collection_id assert data["updated_at"] is not None @@ -93,7 +93,7 @@ def test_collection_info_successful( collection = create_collection(db, user_api_key, with_llm=True) collection_job = create_collection_job( - db, user_api_key, collection.id, status=CollectionJobStatus.successful + db, user_api_key, collection.id, status=CollectionJobStatus.SUCCESSFUL ) response = client.get( @@ -115,7 +115,7 @@ def test_collection_info_failed( headers = user_api_key_header collection_job = create_collection_job( - db, user_api_key, status=CollectionJobStatus.failed + db, user_api_key, status=CollectionJobStatus.FAILED ) response = client.get( @@ -126,5 +126,5 @@ def test_collection_info_failed( assert response.status_code == 200 data = response.json()["data"] - assert data["status"] == CollectionJobStatus.failed.value + assert data["status"] == CollectionJobStatus.FAILED assert data["error_message"] is not None diff --git a/backend/app/tests/api/routes/collections/test_create_collections.py b/backend/app/tests/api/routes/collections/test_create_collections.py index 7b4ff9d39..7cd7c4a53 100644 --- a/backend/app/tests/api/routes/collections/test_create_collections.py +++ b/backend/app/tests/api/routes/collections/test_create_collections.py @@ -10,9 +10,7 @@ def test_collection_creation_success( client: TestClient, user_api_key_header: dict[str, str] ): - with patch( - "app.api.routes.collections.create_services.start_job" - ) as mock_job_start: + with patch("app.api.routes.collections.create_service.start_job") as mock_job_start: creation_data = CreationRequest( model="gpt-4o", instructions="string", diff --git a/backend/app/tests/crud/collections/collection/test_crud_collection_read_one.py b/backend/app/tests/crud/collections/collection/test_crud_collection_read_one.py index 63829eb70..acf7d39ad 100644 --- a/backend/app/tests/crud/collections/collection/test_crud_collection_read_one.py +++ b/backend/app/tests/crud/collections/collection/test_crud_collection_read_one.py @@ -1,8 +1,9 @@ import pytest + from openai import OpenAI from openai_responses import OpenAIMock +from fastapi import HTTPException from sqlmodel import Session -from sqlalchemy.exc import NoResultFound from app.crud import CollectionCrud from app.core.config import settings @@ -36,5 +37,7 @@ def test_cannot_select_others_collections(self, db: Session): collection = mk_collection(db) other = collection.project_id + 1 crud = CollectionCrud(db, other) - with pytest.raises(NoResultFound): + with pytest.raises(HTTPException) as excinfo: crud.read_one(collection.id) + assert excinfo.value.status_code == 404 + assert excinfo.value.detail == "Collection not found" diff --git a/backend/app/tests/crud/collections/test_collection_jobs.py b/backend/app/tests/crud/collections/test_collection_jobs.py index fedd26463..03daf74ee 100644 --- a/backend/app/tests/crud/collections/test_collection_jobs.py +++ b/backend/app/tests/crud/collections/test_collection_jobs.py @@ -13,8 +13,8 @@ def create_sample_collection_job( db, project_id, - action_type=CollectionActionType.create, - status=CollectionJobStatus.processing, + action_type=CollectionActionType.CREATE, + status=CollectionJobStatus.PENDING, ): collection_job = CollectionJob( id=uuid4(), @@ -42,8 +42,8 @@ def test_create_collection_job(db: Session, sample_project): collection_job = CollectionJob( id=uuid4(), project_id=sample_project.id, - action_type=CollectionActionType.create, - status=CollectionJobStatus.processing, + action_type=CollectionActionType.CREATE, + status=CollectionJobStatus.PENDING, inserted_at=now(), updated_at=now(), ) @@ -55,8 +55,8 @@ def test_create_collection_job(db: Session, sample_project): assert created_job.id is not None assert created_job.project_id == sample_project.id - assert created_job.action_type == CollectionActionType.create - assert created_job.status == CollectionJobStatus.processing + assert created_job.action_type == CollectionActionType.CREATE + assert created_job.status == CollectionJobStatus.PENDING assert created_job.inserted_at is not None assert created_job.updated_at is not None @@ -99,39 +99,24 @@ def test_update_collection_job(db: Session, sample_project): collection_job_crud = CollectionJobCrud(db, sample_project.id) - collection_job.status = CollectionJobStatus.failed + collection_job.status = CollectionJobStatus.FAILED collection_job.error_message = "model name not valid" collection_job.updated_at = now() - updated_job = collection_job_crud._update(collection_job) + updated_job = collection_job_crud.update(collection_job.id, collection_job) - assert updated_job.status == CollectionJobStatus.failed + assert updated_job.status == CollectionJobStatus.FAILED assert updated_job.error_message is not None assert updated_job.updated_at is not None -def test_update_invalid_project_permission(db: Session, sample_project): - """Test case to check permission error during update.""" - collection_job = create_sample_collection_job(db, sample_project.id) - - collection_job_crud = CollectionJobCrud(db, sample_project.id) - - collection_job.status = CollectionJobStatus.successful - collection_job.updated_at = now() - - collection_job.project_id = 999 - - with pytest.raises(PermissionError): - collection_job_crud._update(collection_job) - - def test_create_collection_job_with_invalid_data(db: Session, sample_project): """Test case to handle invalid data during job creation.""" collection_job = CollectionJob( id=uuid4(), project_id=sample_project.id, action_type=None, - status=CollectionJobStatus.processing, + status=CollectionJobStatus.PENDING, inserted_at=now(), updated_at=now(), ) diff --git a/backend/app/tests/services/collections/test_create_collection.py b/backend/app/tests/services/collections/test_create_collection.py index 5cac4eed9..228c21644 100644 --- a/backend/app/tests/services/collections/test_create_collection.py +++ b/backend/app/tests/services/collections/test_create_collection.py @@ -14,7 +14,7 @@ from app.core.cloud import AmazonCloudStorageClient from app.core.config import settings from app.crud import CollectionCrud, CollectionJobCrud, DocumentCollectionCrud -from app.models import CollectionJobStatus, CollectionJob +from app.models import CollectionJobStatus, CollectionJob, CollectionActionType from app.models.collection import CreationRequest, ResponsePayload from app.services.collections.create_collection import start_job, execute_job from app.tests.utils.openai import get_mock_openai_client_with_vector_store @@ -69,8 +69,8 @@ def test_start_job_creates_collection_job_and_schedules_task(db: Session): job = CollectionJobCrud(db, project.id).read_one(job_id) assert job.id == job_id assert job.project_id == project.id - assert job.status == CollectionJobStatus.processing - assert job.action_type == "create" + assert job.status == CollectionJobStatus.PENDING + assert job.action_type == CollectionActionType.CREATE.value assert job.collection_id is None mock_schedule.assert_called_once() @@ -130,8 +130,8 @@ def test_execute_job_success_flow_updates_job_and_creates_collection( CollectionJob( id=job_id, project_id=project.id, - status=CollectionJobStatus.processing, - action_type="create", + status=CollectionJobStatus.PENDING, + action_type=CollectionActionType.CREATE.value, ) ) @@ -143,7 +143,7 @@ def test_execute_job_success_flow_updates_job_and_creates_collection( execute_job( request=sample_request.model_dump(), - payload_data=asdict(sample_payload), + payload_data=sample_payload.model_dump(), project_id=project.id, organization_id=project.organization_id, task_id=task_id, @@ -153,7 +153,7 @@ def test_execute_job_success_flow_updates_job_and_creates_collection( updated_job = CollectionJobCrud(db, project.id).read_one(job_id) assert updated_job.task_id == task_id - assert updated_job.status == CollectionJobStatus.successful + assert updated_job.status == CollectionJobStatus.SUCCESSFUL assert updated_job.collection_id is not None created_collection = CollectionCrud(db, project.id).read_one( diff --git a/backend/app/tests/services/collections/test_delete_collection.py b/backend/app/tests/services/collections/test_delete_collection.py index 473bf957f..baaf67e52 100644 --- a/backend/app/tests/services/collections/test_delete_collection.py +++ b/backend/app/tests/services/collections/test_delete_collection.py @@ -14,7 +14,7 @@ from app.tests.utils.utils import get_project from app.crud import CollectionCrud from app.crud import CollectionCrud, CollectionJobCrud -from app.models import CollectionJobStatus, CollectionJob +from app.models import CollectionJobStatus, CollectionJob, CollectionActionType from app.tests.utils.utils import get_project from app.services.collections.delete_collection import start_job, execute_job @@ -36,10 +36,10 @@ def create_collection_job(db: Session, project, collection): job = job_crud.create( CollectionJob( id=job_id, - action_type="delete", + action_type=CollectionActionType.DELETE, project_id=project.id, collection_id=collection.id, - status=CollectionJobStatus.processing, + status=CollectionJobStatus.PENDING, ) ) return job @@ -79,8 +79,8 @@ def test_start_job_creates_collection_job_and_schedules_task(db: Session): job = jobs[0] assert job.project_id == project.id assert job.collection_id == created_collection.id - assert job.status == CollectionJobStatus.processing - assert job.action_type == "delete" + assert job.status == CollectionJobStatus.PENDING + assert job.action_type == CollectionActionType.DELETE mock_schedule.assert_called_once() kwargs = mock_schedule.call_args.kwargs @@ -140,7 +140,7 @@ def test_execute_job_delete_success_updates_job_and_calls_delete( execute_job( request=req.model_dump(), - payload_data=asdict(payload), + payload_data=payload.model_dump(), project_id=project.id, organization_id=project.organization_id, task_id=task_id, @@ -151,7 +151,7 @@ def test_execute_job_delete_success_updates_job_and_calls_delete( updated_job = CollectionJobCrud(db, project.id).read_one(job.id) assert updated_job.task_id == task_id - assert updated_job.status == CollectionJobStatus.successful + assert updated_job.status == CollectionJobStatus.SUCCESSFUL assert updated_job.error_message in (None, "") MockCollectionCrud.assert_called_with(db, project.id) @@ -199,7 +199,7 @@ def test_execute_job_delete_failure_marks_job_failed( execute_job( request=req.model_dump(), - payload_data=asdict(payload), + payload_data=payload.model_dump(), project_id=project.id, organization_id=project.organization_id, task_id=task_id, @@ -210,7 +210,7 @@ def test_execute_job_delete_failure_marks_job_failed( failed_job = CollectionJobCrud(db, project.id).read_one(job.id) assert failed_job.task_id == task_id - assert failed_job.status == CollectionJobStatus.failed + assert failed_job.status == CollectionJobStatus.FAILED assert failed_job.error_message and "boom" in failed_job.error_message MockAssistantCrud.assert_called_once() From d741ef6b3517020fb6cc53767b11121df359e4a7 Mon Sep 17 00:00:00 2001 From: nishika26 Date: Mon, 6 Oct 2025 13:56:09 +0530 Subject: [PATCH 34/44] removing unused imports --- backend/app/api/routes/collections.py | 1 - backend/app/crud/collection/collection.py | 2 +- backend/app/models/collection.py | 3 --- backend/app/models/collection_job.py | 4 ---- 4 files changed, 1 insertion(+), 9 deletions(-) diff --git a/backend/app/api/routes/collections.py b/backend/app/api/routes/collections.py index c80ee2cce..58d7cd904 100644 --- a/backend/app/api/routes/collections.py +++ b/backend/app/api/routes/collections.py @@ -2,7 +2,6 @@ import logging from uuid import UUID from typing import List, Union -from dataclasses import asdict from fastapi import APIRouter, Query from fastapi import Path as FastPath diff --git a/backend/app/crud/collection/collection.py b/backend/app/crud/collection/collection.py index 6a78af370..3937ec7ef 100644 --- a/backend/app/crud/collection/collection.py +++ b/backend/app/crud/collection/collection.py @@ -5,7 +5,7 @@ import logging from fastapi import HTTPException -from sqlmodel import Session, func, select, and_ +from sqlmodel import Session, select, and_ from app.models import Document, Collection, DocumentCollection from app.core.util import now diff --git a/backend/app/models/collection.py b/backend/app/models/collection.py index a47f3aa5a..35c85b96a 100644 --- a/backend/app/models/collection.py +++ b/backend/app/models/collection.py @@ -1,8 +1,6 @@ -import enum from uuid import UUID, uuid4 from datetime import datetime from typing import Any, List, Optional -from dataclasses import dataclass, field, fields from sqlmodel import Field, Relationship, SQLModel from pydantic import HttpUrl @@ -10,7 +8,6 @@ from app.core.util import now from .organization import Organization from .project import Project -from app.core.util import now class Collection(SQLModel, table=True): diff --git a/backend/app/models/collection_job.py b/backend/app/models/collection_job.py index 12e8a14e3..1889c2cde 100644 --- a/backend/app/models/collection_job.py +++ b/backend/app/models/collection_job.py @@ -1,13 +1,9 @@ from enum import Enum from uuid import UUID, uuid4 from datetime import datetime -from typing import Optional from sqlmodel import Field, SQLModel, Column, Text -# from sqlalchemy import Column, Text - - from app.core.util import now From 8e8fe7db2d58aa6de4c1ac9111307782bcb6dae1 Mon Sep 17 00:00:00 2001 From: nishika26 Date: Mon, 6 Oct 2025 14:23:28 +0530 Subject: [PATCH 35/44] migration for removing failed collection columns --- ...uccessful_columns_from_collection_table.py | 30 ++++++ ...on_job_table_and_alter_collection_table.py | 94 +++++++++++++++++++ 2 files changed, 124 insertions(+) create mode 100644 backend/app/alembic/versions/7ab577d3af26_delete_non_successful_columns_from_collection_table.py create mode 100644 backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_alter_collection_table.py diff --git a/backend/app/alembic/versions/7ab577d3af26_delete_non_successful_columns_from_collection_table.py b/backend/app/alembic/versions/7ab577d3af26_delete_non_successful_columns_from_collection_table.py new file mode 100644 index 000000000..e6f624e15 --- /dev/null +++ b/backend/app/alembic/versions/7ab577d3af26_delete_non_successful_columns_from_collection_table.py @@ -0,0 +1,30 @@ +"""delete processing and failed columns from collection table + +Revision ID: 7ab577d3af26 +Revises: b30727137e65 +Create Date: 2025-10-06 13:59:28.561706 + +""" +from alembic import op +import sqlalchemy as sa +import sqlmodel.sql.sqltypes + + +# revision identifiers, used by Alembic. +revision = "7ab577d3af26" +down_revision = "c6fb6d0b5897" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute( + """ + DELETE FROM collection + WHERE status IN ('processing', 'failed') + """ + ) + + +def downgrade(): + pass diff --git a/backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_alter_collection_table.py b/backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_alter_collection_table.py new file mode 100644 index 000000000..a6f3d2983 --- /dev/null +++ b/backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_alter_collection_table.py @@ -0,0 +1,94 @@ +"""adding collection job table and altering collections table + +Revision ID: b30727137e65 +Revises: c6fb6d0b5897 +Create Date: 2025-10-05 14:19:14.213933 + +""" +from alembic import op +import sqlalchemy as sa +import sqlmodel.sql.sqltypes +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "b30727137e65" +down_revision = "7ab577d3af26" +branch_labels = None +depends_on = None + +collection_job_status_enum = postgresql.ENUM( + "PENDING", + "PROCESSING", + "SUCCESSFUL", + "FAILED", + name="collectionjobstatus", + create_type=False, +) + +collection_action_type = postgresql.ENUM( + "CREATE", + "DELETE", + name="collectionactiontype", + create_type=False, +) + + +def upgrade(): + collection_job_status_enum.create(op.get_bind(), checkfirst=True) + collection_action_type.create(op.get_bind(), checkfirst=True) + op.create_table( + "collection_jobs", + sa.Column("action_type", collection_action_type, nullable=False), + sa.Column("collection_id", sa.Uuid(), nullable=True), + sa.Column("project_id", sa.Integer(), nullable=False), + sa.Column("id", sa.Uuid(), nullable=False), + sa.Column("status", collection_job_status_enum, nullable=False), + sa.Column("task_id", sa.Uuid(), nullable=True), + sa.Column("error_message", sa.Text(), nullable=True), + sa.Column("inserted_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint( + ["collection_id"], ["collection.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["project_id"], ["project.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + + op.alter_column("collection", "created_at", new_column_name="inserted_at") + op.drop_constraint("collection_owner_id_fkey", "collection", type_="foreignkey") + op.drop_column("collection", "owner_id") + op.drop_column("collection", "status") + op.drop_column("collection", "error_message") + + +def downgrade(): + op.add_column( + "collection", + sa.Column("error_message", sa.VARCHAR(), autoincrement=False, nullable=True), + ) + op.add_column( + "collection", + sa.Column( + "status", + postgresql.ENUM( + "processing", "successful", "failed", name="collectionstatus" + ), + server_default=sa.text("'processing'::collectionstatus"), + autoincrement=False, + nullable=False, + ), + ) + op.add_column( + "collection", + sa.Column("owner_id", sa.INTEGER(), autoincrement=False, nullable=False), + ) + op.create_foreign_key( + "collection_owner_id_fkey", + "collection", + "user", + ["owner_id"], + ["id"], + ondelete="CASCADE", + ) + op.alter_column("collection", "inserted_at", new_column_name="created_at") + op.drop_table("collection_jobs") From 5b6f2f2d9ff880176d29b1d3aea575294917cded Mon Sep 17 00:00:00 2001 From: nishika26 Date: Mon, 6 Oct 2025 14:24:43 +0530 Subject: [PATCH 36/44] migration for removing failed collection columns --- ...137e65_adding_collection_job_table_and_.py | 100 ------------------ 1 file changed, 100 deletions(-) delete mode 100644 backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_.py diff --git a/backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_.py b/backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_.py deleted file mode 100644 index fed80f483..000000000 --- a/backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_.py +++ /dev/null @@ -1,100 +0,0 @@ -"""adding collection job table and altering collections table - -Revision ID: b30727137e65 -Revises: c6fb6d0b5897 -Create Date: 2025-10-05 14:19:14.213933 - -""" -from alembic import op -import sqlalchemy as sa -import sqlmodel.sql.sqltypes -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = "b30727137e65" -down_revision = "c6fb6d0b5897" -branch_labels = None -depends_on = None - -collection_job_status_enum = postgresql.ENUM( - "PENDING", - "PROCESSING", - "SUCCESSFUL", - "FAILED", - name="collectionjobstatus", - create_type=False, -) - -collection_action_type = postgresql.ENUM( - "CREATE", - "DELETE", - name="collectionactiontype", - create_type=False, -) - - -def upgrade(): - collection_job_status_enum.create(op.get_bind(), checkfirst=True) - collection_action_type.create(op.get_bind(), checkfirst=True) - op.create_table( - "collection_jobs", - sa.Column("action_type", collection_action_type, nullable=False), - sa.Column("collection_id", sa.Uuid(), nullable=True), - sa.Column("project_id", sa.Integer(), nullable=False), - sa.Column("id", sa.Uuid(), nullable=False), - sa.Column("status", collection_job_status_enum, nullable=False), - sa.Column("task_id", sa.Uuid(), nullable=True), - sa.Column("error_message", sa.Text(), nullable=True), - sa.Column("inserted_at", sa.DateTime(), nullable=False), - sa.Column("updated_at", sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint( - ["collection_id"], ["collection.id"], ondelete="CASCADE" - ), - sa.ForeignKeyConstraint(["project_id"], ["project.id"], ondelete="CASCADE"), - sa.PrimaryKeyConstraint("id"), - ) - op.add_column("collection", sa.Column("inserted_at", sa.DateTime(), nullable=False)) - op.drop_constraint("collection_owner_id_fkey", "collection", type_="foreignkey") - op.drop_column("collection", "owner_id") - op.drop_column("collection", "created_at") - op.drop_column("collection", "status") - op.drop_column("collection", "error_message") - - -def downgrade(): - op.add_column( - "collection", - sa.Column("error_message", sa.VARCHAR(), autoincrement=False, nullable=True), - ) - op.add_column( - "collection", - sa.Column( - "status", - postgresql.ENUM( - "processing", "successful", "failed", name="collectionstatus" - ), - server_default=sa.text("'processing'::collectionstatus"), - autoincrement=False, - nullable=False, - ), - ) - op.add_column( - "collection", - sa.Column( - "created_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=False - ), - ) - op.add_column( - "collection", - sa.Column("owner_id", sa.INTEGER(), autoincrement=False, nullable=False), - ) - op.create_foreign_key( - "collection_owner_id_fkey", - "collection", - "user", - ["owner_id"], - ["id"], - ondelete="CASCADE", - ) - op.drop_column("collection", "inserted_at") - op.drop_table("collection_jobs") From 4a1f5ff22484397e7d1461c0413e011d2d4b64e1 Mon Sep 17 00:00:00 2001 From: nishika26 Date: Tue, 7 Oct 2025 14:34:31 +0530 Subject: [PATCH 37/44] coderabbit and pr review fixes --- ...uccessful_columns_from_collection_table.py | 2 +- ...on_job_table_and_alter_collection_table.py | 22 ++++--- backend/app/api/docs/collections/create.md | 2 +- backend/app/api/main.py | 2 + backend/app/api/routes/collection_job.py | 57 +++++++++++++++++++ backend/app/api/routes/collections.py | 39 +------------ backend/app/crud/collection/collection.py | 3 +- backend/app/models/collection.py | 7 ++- backend/app/models/collection_job.py | 2 +- .../services/collections/create_collection.py | 10 ++-- .../services/collections/delete_collection.py | 11 ++-- .../collections/test_create_collections.py | 2 +- .../test_collection_jobs.py | 0 .../collections/test_create_collection.py | 15 ++--- .../collections/test_delete_collection.py | 15 ++--- 15 files changed, 111 insertions(+), 78 deletions(-) create mode 100644 backend/app/api/routes/collection_job.py rename backend/app/tests/crud/collections/{ => collection_job}/test_collection_jobs.py (100%) diff --git a/backend/app/alembic/versions/7ab577d3af26_delete_non_successful_columns_from_collection_table.py b/backend/app/alembic/versions/7ab577d3af26_delete_non_successful_columns_from_collection_table.py index e6f624e15..df1020d01 100644 --- a/backend/app/alembic/versions/7ab577d3af26_delete_non_successful_columns_from_collection_table.py +++ b/backend/app/alembic/versions/7ab577d3af26_delete_non_successful_columns_from_collection_table.py @@ -1,7 +1,7 @@ """delete processing and failed columns from collection table Revision ID: 7ab577d3af26 -Revises: b30727137e65 +Revises: c6fb6d0b5897 Create Date: 2025-10-06 13:59:28.561706 """ diff --git a/backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_alter_collection_table.py b/backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_alter_collection_table.py index a6f3d2983..4a4f11c7e 100644 --- a/backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_alter_collection_table.py +++ b/backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_alter_collection_table.py @@ -1,7 +1,7 @@ """adding collection job table and altering collections table Revision ID: b30727137e65 -Revises: c6fb6d0b5897 +Revises: 7ab577d3af26 Create Date: 2025-10-05 14:19:14.213933 """ @@ -43,7 +43,7 @@ def upgrade(): sa.Column("project_id", sa.Integer(), nullable=False), sa.Column("id", sa.Uuid(), nullable=False), sa.Column("status", collection_job_status_enum, nullable=False), - sa.Column("task_id", sa.Uuid(), nullable=True), + sa.Column("task_id", sa.String(), nullable=True), sa.Column("error_message", sa.Text(), nullable=True), sa.Column("inserted_at", sa.DateTime(), nullable=False), sa.Column("updated_at", sa.DateTime(), nullable=False), @@ -66,22 +66,26 @@ def downgrade(): "collection", sa.Column("error_message", sa.VARCHAR(), autoincrement=False, nullable=True), ) + collectionstatus = postgresql.ENUM( + "processing", "successful", "failed", name="collectionstatus" + ) + op.add_column( "collection", sa.Column( "status", - postgresql.ENUM( - "processing", "successful", "failed", name="collectionstatus" - ), + collectionstatus, server_default=sa.text("'processing'::collectionstatus"), - autoincrement=False, - nullable=False, + nullable=True, ), ) op.add_column( "collection", - sa.Column("owner_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column("owner_id", sa.Integer(), nullable=True), ) + + op.execute("UPDATE collection SET status = 'processing' WHERE status IS NULL") + op.execute("UPDATE collection SET owner_id = 1 WHERE owner_id IS NULL") op.create_foreign_key( "collection_owner_id_fkey", "collection", @@ -90,5 +94,7 @@ def downgrade(): ["id"], ondelete="CASCADE", ) + op.alter_column("collection", "status", nullable=False) + op.alter_column("collection", "owner_id", nullable=False) op.alter_column("collection", "inserted_at", new_column_name="created_at") op.drop_table("collection_jobs") diff --git a/backend/app/api/docs/collections/create.md b/backend/app/api/docs/collections/create.md index f35139479..ebf5dc28a 100644 --- a/backend/app/api/docs/collections/create.md +++ b/backend/app/api/docs/collections/create.md @@ -22,5 +22,5 @@ accepted. This is especially true for PDFs that may not be parseable. The immediate response from the endpoint is a packet containing a `key` which is the collection job ID. Once the collection has been created, information about the collection will be returned to the user via the callback URL. -If a callback URL is not provided, clients can poll the `collectionjob info` endpoint +If a callback URL is not provided, clients can poll the `collection job info` endpoint with the `key` to retrieve the same information. diff --git a/backend/app/api/main.py b/backend/app/api/main.py index 3b7f34617..b617e4b30 100644 --- a/backend/app/api/main.py +++ b/backend/app/api/main.py @@ -20,6 +20,7 @@ credentials, fine_tuning, model_evaluation, + collection_job, ) from app.core.config import settings @@ -27,6 +28,7 @@ api_router.include_router(api_keys.router) api_router.include_router(assistants.router) api_router.include_router(collections.router) +api_router.include_router(collection_job.router) api_router.include_router(credentials.router) api_router.include_router(documents.router) api_router.include_router(doc_transformation_job.router) diff --git a/backend/app/api/routes/collection_job.py b/backend/app/api/routes/collection_job.py new file mode 100644 index 000000000..e3e893ec4 --- /dev/null +++ b/backend/app/api/routes/collection_job.py @@ -0,0 +1,57 @@ +import logging +from uuid import UUID +from typing import Union + +from fastapi import APIRouter +from fastapi import Path as FastPath + + +from app.api.deps import SessionDep, CurrentUserOrgProject +from app.crud import ( + CollectionCrud, + CollectionJobCrud, +) +from app.models import CollectionJobStatus, CollectionJobPublic +from app.models.collection import CollectionPublic +from app.utils import APIResponse, load_description +from app.services.collections.helpers import extract_error_message + + +logger = logging.getLogger(__name__) +router = APIRouter(prefix="/collections", tags=["collections"]) + + +@router.get( + "/info/collection_job/{collection_job_id}", + description=load_description("collections/job_info.md"), + response_model=Union[ + APIResponse[CollectionPublic], APIResponse[CollectionJobPublic] + ], +) +def collection_job_info( + session: SessionDep, + current_user: CurrentUserOrgProject, + collection_job_id: UUID = FastPath(description="Collection job to retrieve"), +): + collection_job_crud = CollectionJobCrud(session, current_user.project_id) + collection_job = collection_job_crud.read_one(collection_job_id) + + if collection_job.status == CollectionJobStatus.SUCCESSFUL: + collection_crud = CollectionCrud(session, current_user.project_id) + collection = collection_crud.read_one(collection_job.collection_id) + return APIResponse.success_response( + data=CollectionPublic.model_validate(collection) + ) + + if collection_job.status == CollectionJobStatus.FAILED: + err = getattr(collection_job, "error_message", None) + if err: + collection_job.error_message = extract_error_message(err) + + return APIResponse.success_response( + data=CollectionJobPublic.model_validate(collection_job) + ) + + return APIResponse.success_response( + data=CollectionJobPublic.model_validate(collection_job) + ) diff --git a/backend/app/api/routes/collections.py b/backend/app/api/routes/collections.py index 58d7cd904..72f9ed08a 100644 --- a/backend/app/api/routes/collections.py +++ b/backend/app/api/routes/collections.py @@ -49,7 +49,7 @@ def create_collection( db=session, request=request.model_dump(), payload=payload.model_dump(), - collection_job_id=UUID(payload.key), + collection_job_id=payload.key, project_id=current_user.project_id, organization_id=current_user.organization_id, ) @@ -84,6 +84,7 @@ def delete_collection( request=request.model_dump(), payload=payload.model_dump(), collection=collection, + collection_job_id=payload.key, project_id=current_user.project_id, organization_id=current_user.organization_id, ) @@ -97,42 +98,6 @@ def delete_collection( ) -@router.get( - "/info/collection_job/{collection_job_id}", - description=load_description("collections/job_info.md"), - response_model=Union[ - APIResponse[CollectionPublic], APIResponse[CollectionJobPublic] - ], -) -def collection_job_info( - session: SessionDep, - current_user: CurrentUserOrgProject, - collection_job_id: UUID = FastPath(description="Collection job to retrieve"), -): - collection_job_crud = CollectionJobCrud(session, current_user.project_id) - collection_job = collection_job_crud.read_one(collection_job_id) - - if collection_job.status == CollectionJobStatus.SUCCESSFUL: - collection_crud = CollectionCrud(session, current_user.project_id) - collection = collection_crud.read_one(collection_job.collection_id) - return APIResponse.success_response( - data=CollectionPublic.model_validate(collection) - ) - - if collection_job.status == CollectionJobStatus.FAILED: - err = getattr(collection_job, "error_message", None) - if err: - collection_job.error_message = extract_error_message(err) - - return APIResponse.success_response( - data=CollectionJobPublic.model_validate(collection_job) - ) - - return APIResponse.success_response( - data=CollectionJobPublic.model_validate(collection_job) - ) - - @router.get( "/info/{collection_id}", description=load_description("collections/info.md"), diff --git a/backend/app/crud/collection/collection.py b/backend/app/crud/collection/collection.py index 3937ec7ef..5bc809085 100644 --- a/backend/app/crud/collection/collection.py +++ b/backend/app/crud/collection/collection.py @@ -50,7 +50,8 @@ def _exists(self, collection: Collection) -> bool: stmt = ( select(Collection.id) .where( - (Collection.llm_service_id == collection.llm_service_id) + (Collection.project_id == self.project_id) + & (Collection.llm_service_id == collection.llm_service_id) & (Collection.llm_service_name == collection.llm_service_name) ) .limit(1) diff --git a/backend/app/models/collection.py b/backend/app/models/collection.py index 35c85b96a..4883deedb 100644 --- a/backend/app/models/collection.py +++ b/backend/app/models/collection.py @@ -37,6 +37,9 @@ class Collection(SQLModel, table=True): class ResponsePayload(SQLModel): + """Response metadata for background jobs—gives status, route, a UUID key, + and creation time.""" + status: str route: str key: str = Field(default_factory=lambda: str(uuid4())) @@ -73,7 +76,7 @@ class AssistantOptions(SQLModel): model: str = Field( description=( "OpenAI model to attach to this assistant. The model " - "must compatable with the assistants API; see the " + "must be compatable with the assistants API; see the " "OpenAI [model documentation](https://platform.openai.com/docs/models/compare) for more." ), ) @@ -111,7 +114,7 @@ def extract_super_type(self, cls: "CreationRequest"): class DeletionRequest(CallbackRequest): - collection_id: UUID = Field("Collection to delete") + collection_id: UUID = Field(description="Collection to delete") class CollectionPublic(SQLModel): diff --git a/backend/app/models/collection_job.py b/backend/app/models/collection_job.py index 1889c2cde..642bad63f 100644 --- a/backend/app/models/collection_job.py +++ b/backend/app/models/collection_job.py @@ -44,7 +44,7 @@ class CollectionJob(CollectionJobBase, table=True): description="Current job status", ) - task_id: UUID = Field(nullable=True) + task_id: str = Field(nullable=True) error_message: str | None = Field(sa_column=Column(Text, nullable=True)) inserted_at: datetime = Field( diff --git a/backend/app/services/collections/create_collection.py b/backend/app/services/collections/create_collection.py index db776022a..708ca9492 100644 --- a/backend/app/services/collections/create_collection.py +++ b/backend/app/services/collections/create_collection.py @@ -49,7 +49,7 @@ def start_job( trace_id = correlation_id.get() or "N/A" collection_job = CollectionJob( - id=collection_job_id, + id=UUID(collection_job_id), action_type=CollectionActionType.CREATE, project_id=project_id, status=CollectionJobStatus.PENDING, @@ -61,7 +61,7 @@ def start_job( task_id = start_low_priority_job( function_path="app.services.collections.create_collection.execute_job", project_id=project_id, - job_id=collection_job.id, + job_id=collection_job_id, trace_id=trace_id, request=request, payload_data=payload, @@ -73,7 +73,7 @@ def start_job( f"collection_job_id={collection_job_id}, project_id={project_id}, task_id={task_id}" ) - return collection_job.id + return collection_job_id def execute_job( @@ -82,7 +82,7 @@ def execute_job( project_id: int, organization_id: int, task_id: str, - job_id: UUID, + job_id: str, task_instance, ) -> None: """ @@ -95,6 +95,8 @@ def execute_job( creation_request = CreationRequest(**request) payload = ResponsePayload(**payload_data) + job_id = UUID(job_id) + collection_job_crud = CollectionJobCrud(session, project_id) collection_job = collection_job_crud.read_one(job_id) collection_job.task_id = task_id diff --git a/backend/app/services/collections/delete_collection.py b/backend/app/services/collections/delete_collection.py index 4d35e4d7d..8e295ebaa 100644 --- a/backend/app/services/collections/delete_collection.py +++ b/backend/app/services/collections/delete_collection.py @@ -27,15 +27,14 @@ def start_job( request: dict, collection: Collection, project_id: int, + collection_job_id: str, payload: dict, organization_id: int, ) -> UUID: trace_id = correlation_id.get() or "N/A" - job_id = uuid4() - collection_job = CollectionJob( - id=job_id, + id=UUID(collection_job_id), action_type=CollectionActionType.DELETE, project_id=project_id, collection_id=collection.id, @@ -48,7 +47,7 @@ def start_job( task_id = start_low_priority_job( function_path="app.services.collections.delete_collection.execute_job", project_id=project_id, - job_id=job_id, + job_id=collection_job_id, collection_id=collection.id, trace_id=trace_id, request=request, @@ -58,9 +57,9 @@ def start_job( logger.info( "[delete_collection.start_job] Job scheduled to delete collection | " - f"Job_id={job_id}, project_id={project_id}, task_id={task_id}, collection_id={collection.id}" + f"Job_id={collection_job_id}, project_id={project_id}, task_id={task_id}, collection_id={collection.id}" ) - return collection.id + return collection_job_id def execute_job( diff --git a/backend/app/tests/api/routes/collections/test_create_collections.py b/backend/app/tests/api/routes/collections/test_create_collections.py index 7cd7c4a53..6fe087136 100644 --- a/backend/app/tests/api/routes/collections/test_create_collections.py +++ b/backend/app/tests/api/routes/collections/test_create_collections.py @@ -35,7 +35,7 @@ def test_collection_creation_success( assert body["metadata"]["status"] == "processing" assert body["metadata"]["route"] == "/collections/create" assert body["metadata"]["key"] is not None - job_key = UUID(body["metadata"]["key"]) + job_key = body["metadata"]["key"] mock_job_start.assert_called_once() kwargs = mock_job_start.call_args.kwargs diff --git a/backend/app/tests/crud/collections/test_collection_jobs.py b/backend/app/tests/crud/collections/collection_job/test_collection_jobs.py similarity index 100% rename from backend/app/tests/crud/collections/test_collection_jobs.py rename to backend/app/tests/crud/collections/collection_job/test_collection_jobs.py diff --git a/backend/app/tests/services/collections/test_create_collection.py b/backend/app/tests/services/collections/test_create_collection.py index 228c21644..a19769432 100644 --- a/backend/app/tests/services/collections/test_create_collection.py +++ b/backend/app/tests/services/collections/test_create_collection.py @@ -1,13 +1,10 @@ -# tests/services/collections/test_create_collection_jobs.py - import os -from dataclasses import asdict +import pytest from pathlib import Path from unittest.mock import patch from urllib.parse import urlparse from uuid import UUID, uuid4 -import pytest from moto import mock_aws from sqlmodel import Session @@ -60,11 +57,11 @@ def test_start_job_creates_collection_job_and_schedules_task(db: Session): request=request.model_dump(), project_id=project.id, payload=payload, - collection_job_id=job_id, + collection_job_id=str(job_id), organization_id=project.organization_id, ) - assert returned_job_id == job_id + assert returned_job_id == str(job_id) job = CollectionJobCrud(db, project.id).read_one(job_id) assert job.id == job_id @@ -81,7 +78,7 @@ def test_start_job_creates_collection_job_and_schedules_task(db: Session): ) assert kwargs["project_id"] == project.id assert kwargs["organization_id"] == project.organization_id - assert kwargs["job_id"] == job_id + assert kwargs["job_id"] == str(job_id) assert kwargs["request"] == request.model_dump() assert kwargs["payload_data"] == payload @@ -147,12 +144,12 @@ def test_execute_job_success_flow_updates_job_and_creates_collection( project_id=project.id, organization_id=project.organization_id, task_id=task_id, - job_id=job_id, + job_id=str(job_id), task_instance=None, ) updated_job = CollectionJobCrud(db, project.id).read_one(job_id) - assert updated_job.task_id == task_id + assert updated_job.task_id == str(task_id) assert updated_job.status == CollectionJobStatus.SUCCESSFUL assert updated_job.collection_id is not None diff --git a/backend/app/tests/services/collections/test_delete_collection.py b/backend/app/tests/services/collections/test_delete_collection.py index baaf67e52..40f23bf90 100644 --- a/backend/app/tests/services/collections/test_delete_collection.py +++ b/backend/app/tests/services/collections/test_delete_collection.py @@ -12,10 +12,8 @@ ResponsePayload, ) from app.tests.utils.utils import get_project -from app.crud import CollectionCrud from app.crud import CollectionCrud, CollectionJobCrud from app.models import CollectionJobStatus, CollectionJob, CollectionActionType -from app.tests.utils.utils import get_project from app.services.collections.delete_collection import start_job, execute_job @@ -63,16 +61,19 @@ def test_start_job_creates_collection_job_and_schedules_task(db: Session): ) as mock_schedule: mock_schedule.return_value = "fake-task-id" + collection_job_id = str(uuid4()) + returned = start_job( db=db, request=req.model_dump(), collection=created_collection, project_id=project.id, + collection_job_id=collection_job_id, payload=payload, organization_id=project.organization_id, ) - assert returned == created_collection.id + assert returned == collection_job_id jobs = CollectionJobCrud(db, project.id).read_all() assert len(jobs) == 1 @@ -90,7 +91,7 @@ def test_start_job_creates_collection_job_and_schedules_task(db: Session): ) assert kwargs["project_id"] == project.id assert kwargs["organization_id"] == project.organization_id - assert kwargs["job_id"] == job.id + assert kwargs["job_id"] == str(job.id) assert kwargs["collection_id"] == created_collection.id assert kwargs["request"] == req.model_dump() assert kwargs["payload_data"] == payload @@ -144,13 +145,13 @@ def test_execute_job_delete_success_updates_job_and_calls_delete( project_id=project.id, organization_id=project.organization_id, task_id=task_id, - job_id=job.id, + job_id=str(job.id), collection_id=collection.id, task_instance=None, ) updated_job = CollectionJobCrud(db, project.id).read_one(job.id) - assert updated_job.task_id == task_id + assert updated_job.task_id == str(task_id) assert updated_job.status == CollectionJobStatus.SUCCESSFUL assert updated_job.error_message in (None, "") @@ -209,7 +210,7 @@ def test_execute_job_delete_failure_marks_job_failed( ) failed_job = CollectionJobCrud(db, project.id).read_one(job.id) - assert failed_job.task_id == task_id + assert failed_job.task_id == str(task_id) assert failed_job.status == CollectionJobStatus.FAILED assert failed_job.error_message and "boom" in failed_job.error_message From 14d50a81d1c897b5c00336221557108295c731a8 Mon Sep 17 00:00:00 2001 From: nishika26 Date: Tue, 7 Oct 2025 15:02:35 +0530 Subject: [PATCH 38/44] minor changes --- backend/app/crud/collection/collection_job.py | 28 +++++++++++-------- backend/app/models/collection_job.py | 2 -- .../services/collections/create_collection.py | 5 +--- .../services/collections/delete_collection.py | 2 +- .../collection_job/test_collection_jobs.py | 2 -- 5 files changed, 18 insertions(+), 21 deletions(-) diff --git a/backend/app/crud/collection/collection_job.py b/backend/app/crud/collection/collection_job.py index 5dabd20ee..a557b8b2a 100644 --- a/backend/app/crud/collection/collection_job.py +++ b/backend/app/crud/collection/collection_job.py @@ -57,21 +57,25 @@ def read_all(self) -> List[CollectionJob]: ) return collection_jobs - def update( - self, job_id: UUID, collection_job: CollectionJobUpdate - ) -> CollectionJob: - """Update an existing collection job.""" - collection_job = self.read_one(job_id) - - collection_job.updated_at = now() - self.session.add(collection_job) + def update(self, job_id: UUID, patch: CollectionJobUpdate) -> CollectionJob: + """Update an existing collection job and return the updated row.""" + job = self.read_one(job_id) + + changes = patch.model_dump(exclude_unset=True, exclude_none=True) + for field, value in changes.items(): + setattr(job, field, value) + + job.updated_at = now() + + self.session.add(job) self.session.commit() - self.session.refresh(collection_job) + self.session.refresh(job) + logger.info( - f"[CollectionJobCrud._update] Collection job updated successfully | {{'collection_job_id': '{collection_job.id}'}}" + "[CollectionJobCrud.update] Collection job updated successfully | {'collection_job_id': '%s'}", + job.id, ) - - return collection_job + return job def create(self, collection_job: CollectionJobCreate) -> CollectionJob: """Create a new collection job.""" diff --git a/backend/app/models/collection_job.py b/backend/app/models/collection_job.py index 642bad63f..f93b0abe5 100644 --- a/backend/app/models/collection_job.py +++ b/backend/app/models/collection_job.py @@ -74,8 +74,6 @@ class CollectionJobUpdate(SQLModel): error_message: str | None = None collection_id: UUID | None = None - updated_at: datetime - class CollectionJobPublic(SQLModel): id: UUID diff --git a/backend/app/services/collections/create_collection.py b/backend/app/services/collections/create_collection.py index 708ca9492..2720c7ed8 100644 --- a/backend/app/services/collections/create_collection.py +++ b/backend/app/services/collections/create_collection.py @@ -45,7 +45,7 @@ def start_job( payload: dict, collection_job_id: str, organization_id: int, -) -> UUID: +) -> str: trace_id = correlation_id.get() or "N/A" collection_job = CollectionJob( @@ -160,7 +160,6 @@ def execute_job( collection_job.status = CollectionJobStatus.SUCCESSFUL collection_job.collection_id = collection_id - collection_job.updated_at = now() collection_job_crud.update(collection_job.id, collection_job) elapsed = time.time() - start_time @@ -188,7 +187,6 @@ def execute_job( _backout(assistant_crud, assistant.id) collection_job.status = CollectionJobStatus.FAILED - collection_job.updated_at = now() collection_job.error_message = str(err) collection_job_crud.update(collection_job.id, collection_job) @@ -202,6 +200,5 @@ def execute_job( ) collection_job.status = CollectionJobStatus.FAILED - collection_job.updated_at = now() collection_job.error_message = str(err) collection_job_crud.update(collection_job.id, collection_job) diff --git a/backend/app/services/collections/delete_collection.py b/backend/app/services/collections/delete_collection.py index 8e295ebaa..af88c4166 100644 --- a/backend/app/services/collections/delete_collection.py +++ b/backend/app/services/collections/delete_collection.py @@ -30,7 +30,7 @@ def start_job( collection_job_id: str, payload: dict, organization_id: int, -) -> UUID: +) -> str: trace_id = correlation_id.get() or "N/A" collection_job = CollectionJob( diff --git a/backend/app/tests/crud/collections/collection_job/test_collection_jobs.py b/backend/app/tests/crud/collections/collection_job/test_collection_jobs.py index 03daf74ee..733df1a82 100644 --- a/backend/app/tests/crud/collections/collection_job/test_collection_jobs.py +++ b/backend/app/tests/crud/collections/collection_job/test_collection_jobs.py @@ -47,8 +47,6 @@ def test_create_collection_job(db: Session, sample_project): inserted_at=now(), updated_at=now(), ) - collection_job = create_sample_collection_job(db, sample_project.id) - collection_job_crud = CollectionJobCrud(db, sample_project.id) created_job = collection_job_crud.create(collection_job) From 1dcab67b8fff54529d16db7f368cb917516aaaa2 Mon Sep 17 00:00:00 2001 From: nishika26 Date: Tue, 7 Oct 2025 15:12:47 +0530 Subject: [PATCH 39/44] minor fixes --- backend/app/models/collection_job.py | 2 +- backend/app/services/collections/create_collection.py | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/backend/app/models/collection_job.py b/backend/app/models/collection_job.py index f93b0abe5..0cfe524ec 100644 --- a/backend/app/models/collection_job.py +++ b/backend/app/models/collection_job.py @@ -69,7 +69,7 @@ class CollectionJobCreate(SQLModel): class CollectionJobUpdate(SQLModel): - task_id: UUID | None = None + task_id: str | None = None status: CollectionJobStatus error_message: str | None = None collection_id: UUID | None = None diff --git a/backend/app/services/collections/create_collection.py b/backend/app/services/collections/create_collection.py index 2720c7ed8..9a8a7325a 100644 --- a/backend/app/services/collections/create_collection.py +++ b/backend/app/services/collections/create_collection.py @@ -156,8 +156,6 @@ def execute_job( if flat_docs: DocumentCollectionCrud(session).create(collection_data, flat_docs) - collection_crud.create(collection_data) - collection_job.status = CollectionJobStatus.SUCCESSFUL collection_job.collection_id = collection_id collection_job_crud.update(collection_job.id, collection_job) From 7b09e3fdcb9683f70849675ac8d7047317c786e9 Mon Sep 17 00:00:00 2001 From: nishika26 Date: Wed, 8 Oct 2025 15:16:10 +0530 Subject: [PATCH 40/44] pr reviews --- ...on_job_table_and_alter_collection_table.py | 1 + backend/app/api/docs/collections/create.md | 11 +- backend/app/api/docs/collections/delete.md | 6 +- backend/app/api/routes/collection_job.py | 37 ++--- backend/app/api/routes/collections.py | 64 +++++---- backend/app/crud/collection/collection.py | 62 ++++----- backend/app/crud/collection/collection_job.py | 11 +- backend/app/models/__init__.py | 129 +++++++++--------- backend/app/models/collection_job.py | 12 +- .../services/collections/create_collection.py | 29 ++-- .../services/collections/delete_collection.py | 33 ++--- backend/app/services/collections/helpers.py | 9 +- .../collections/test_collection_info.py | 66 +++++---- .../collections/test_create_collections.py | 26 ++-- .../collections/test_create_collection.py | 37 ++++- .../collections/test_delete_collection.py | 47 ++++--- 16 files changed, 316 insertions(+), 264 deletions(-) diff --git a/backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_alter_collection_table.py b/backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_alter_collection_table.py index 4a4f11c7e..038c0e04c 100644 --- a/backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_alter_collection_table.py +++ b/backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_alter_collection_table.py @@ -44,6 +44,7 @@ def upgrade(): sa.Column("id", sa.Uuid(), nullable=False), sa.Column("status", collection_job_status_enum, nullable=False), sa.Column("task_id", sa.String(), nullable=True), + sa.Column("trace_id", sa.String(), nullable=True), sa.Column("error_message", sa.Text(), nullable=True), sa.Column("inserted_at", sa.DateTime(), nullable=False), sa.Column("updated_at", sa.DateTime(), nullable=False), diff --git a/backend/app/api/docs/collections/create.md b/backend/app/api/docs/collections/create.md index ebf5dc28a..7ab900a85 100644 --- a/backend/app/api/docs/collections/create.md +++ b/backend/app/api/docs/collections/create.md @@ -19,8 +19,9 @@ OpenAI. Failure can occur from OpenAI being down, or some parameter value being invalid. It can also fail due to document types not be accepted. This is especially true for PDFs that may not be parseable. -The immediate response from the endpoint is a packet containing a -`key` which is the collection job ID. Once the collection has been created, -information about the collection will be returned to the user via the callback URL. -If a callback URL is not provided, clients can poll the `collection job info` endpoint -with the `key` to retrieve the same information. +The immediate response from the endpoint is `collection_job` object which is +going to contain the collection job ID, status and action type ("CREATE"). +Once the collection has been created, information about the collection will +be returned to the user via the callback URL. If a callback URL is not provided, +clients can poll the `collection job info` endpoint with the `id` in the +`collection_job object` returned as it is the collection job id, to retrieve the same information. diff --git a/backend/app/api/docs/collections/delete.md b/backend/app/api/docs/collections/delete.md index 2a4e782ea..d25ea6059 100644 --- a/backend/app/api/docs/collections/delete.md +++ b/backend/app/api/docs/collections/delete.md @@ -6,4 +6,8 @@ Remove a collection from the platform. This is a two step process: No action is taken on the documents themselves: the contents of the documents that were a part of the collection remain unchanged, those -documents can still be accessed via the documents endpoints. +documents can still be accessed via the documents endpoints. The response from this +endpoint will be a `collection_job` object which will contain the collection job ID, +status and action type ("DELETE"). when you take the id returned and use the collection job +info endpoint, if the job is successful, you will get the status as successful and nothing will +be returned as the collection as it has been deleted and marked as deleted. diff --git a/backend/app/api/routes/collection_job.py b/backend/app/api/routes/collection_job.py index e3e893ec4..ce0c65263 100644 --- a/backend/app/api/routes/collection_job.py +++ b/backend/app/api/routes/collection_job.py @@ -1,6 +1,5 @@ import logging from uuid import UUID -from typing import Union from fastapi import APIRouter from fastapi import Path as FastPath @@ -11,7 +10,7 @@ CollectionCrud, CollectionJobCrud, ) -from app.models import CollectionJobStatus, CollectionJobPublic +from app.models import CollectionJobStatus, CollectionJobPublic, CollectionActionType from app.models.collection import CollectionPublic from app.utils import APIResponse, load_description from app.services.collections.helpers import extract_error_message @@ -24,9 +23,7 @@ @router.get( "/info/collection_job/{collection_job_id}", description=load_description("collections/job_info.md"), - response_model=Union[ - APIResponse[CollectionPublic], APIResponse[CollectionJobPublic] - ], + response_model=APIResponse[CollectionJobPublic], ) def collection_job_info( session: SessionDep, @@ -36,22 +33,18 @@ def collection_job_info( collection_job_crud = CollectionJobCrud(session, current_user.project_id) collection_job = collection_job_crud.read_one(collection_job_id) - if collection_job.status == CollectionJobStatus.SUCCESSFUL: + job_out = CollectionJobPublic.model_validate(collection_job) + + if ( + collection_job.status == CollectionJobStatus.SUCCESSFUL + and collection_job.action_type == CollectionActionType.CREATE + and collection_job.collection_id + ): collection_crud = CollectionCrud(session, current_user.project_id) collection = collection_crud.read_one(collection_job.collection_id) - return APIResponse.success_response( - data=CollectionPublic.model_validate(collection) - ) - - if collection_job.status == CollectionJobStatus.FAILED: - err = getattr(collection_job, "error_message", None) - if err: - collection_job.error_message = extract_error_message(err) - - return APIResponse.success_response( - data=CollectionJobPublic.model_validate(collection_job) - ) - - return APIResponse.success_response( - data=CollectionJobPublic.model_validate(collection_job) - ) + job_out.collection = CollectionPublic.model_validate(collection) + + if collection_job.status == CollectionJobStatus.FAILED and job_out.error_message: + job_out.error_message = extract_error_message(job_out.error_message) + + return APIResponse.success_response(data=job_out) diff --git a/backend/app/api/routes/collections.py b/backend/app/api/routes/collections.py index 72f9ed08a..c6210bebc 100644 --- a/backend/app/api/routes/collections.py +++ b/backend/app/api/routes/collections.py @@ -1,7 +1,7 @@ import inspect import logging from uuid import UUID -from typing import List, Union +from typing import List from fastapi import APIRouter, Query from fastapi import Path as FastPath @@ -13,7 +13,12 @@ CollectionJobCrud, DocumentCollectionCrud, ) -from app.models import DocumentPublic, CollectionJobStatus, CollectionJobPublic +from app.models import ( + DocumentPublic, + CollectionJobStatus, + CollectionActionType, + CollectionJobCreate, +) from app.models.collection import ( ResponsePayload, CreationRequest, @@ -41,26 +46,31 @@ def create_collection( current_user: CurrentUserOrgProject, request: CreationRequest, ): + collection_job_crud = CollectionJobCrud(session, current_user.project_id) + collection_job = collection_job_crud.create( + CollectionJobCreate( + action_type=CollectionActionType.CREATE, + project_id=current_user.project_id, + status=CollectionJobStatus.PENDING, + ) + ) + this = inspect.currentframe() route = router.url_path_for(this.f_code.co_name) - payload = ResponsePayload(status="processing", route=route) + payload = ResponsePayload( + status="processing", route=route, key=str(collection_job.id) + ) create_service.start_job( db=session, - request=request.model_dump(), - payload=payload.model_dump(), - collection_job_id=payload.key, + request=request, + payload=payload, + collection_job_id=collection_job.id, project_id=current_user.project_id, organization_id=current_user.organization_id, ) - logger.info( - f"[create_collection] Background task for collection creation scheduled | " - f"{{'collection_job_id': '{payload.key}'}}" - ) - return APIResponse.success_response( - data=None, metadata=payload.model_dump(mode="json") - ) + return APIResponse.success_response(collection_job) @router.post( @@ -75,27 +85,33 @@ def delete_collection( collection_crud = CollectionCrud(session, current_user.project_id) collection = collection_crud.read_one(request.collection_id) + collection_job_crud = CollectionJobCrud(session, current_user.project_id) + collection_job = collection_job_crud.create( + CollectionJobCreate( + action_type=CollectionActionType.DELETE, + project_id=current_user.project_id, + status=CollectionJobStatus.PENDING, + collection_id=collection.id, + ) + ) + this = inspect.currentframe() route = router.url_path_for(this.f_code.co_name) - payload = ResponsePayload(status="processing", route=route) + payload = ResponsePayload( + status="processing", route=route, key=str(collection_job.id) + ) delete_service.start_job( db=session, - request=request.model_dump(), - payload=payload.model_dump(), + request=request, + payload=payload, collection=collection, - collection_job_id=payload.key, + collection_job_id=collection_job.id, project_id=current_user.project_id, organization_id=current_user.organization_id, ) - logger.info( - f"[delete_collection] Background task for deletion scheduled | " - f"{{'collection_id': '{request.collection_id}'}}" - ) - return APIResponse.success_response( - data=None, metadata=payload.model_dump(mode="json") - ) + return APIResponse.success_response(collection_job) @router.get( diff --git a/backend/app/crud/collection/collection.py b/backend/app/crud/collection/collection.py index 5bc809085..d218ef2a9 100644 --- a/backend/app/crud/collection/collection.py +++ b/backend/app/crud/collection/collection.py @@ -9,8 +9,7 @@ from app.models import Document, Collection, DocumentCollection from app.core.util import now - -from ..document_collection import DocumentCollectionCrud +from app.crud.document_collection import DocumentCollectionCrud logger = logging.getLogger(__name__) @@ -24,18 +23,15 @@ def _update(self, collection: Collection): if not collection.project_id: collection.project_id = self.project_id elif collection.project_id != self.project_id: - err = "Invalid collection ownership: owner_project={} attempter={}".format( - self.project_id, - collection.project_id, + err = ( + f"Invalid collection ownership: owner_project={self.project_id} " + f"attempter={collection.project_id}" ) - try: - raise PermissionError(err) - except PermissionError as e: - logger.error( - f"[CollectionCrud._update] Permission error | {{'collection_id': '{collection.id}', 'error': '{str(e)}'}}", - exc_info=True, - ) - raise + logger.error( + "[CollectionCrud._update] Permission error | " + f"{{'collection_id': '{collection.id}', 'error': '{err}'}}" + ) + raise PermissionError(err) self.session.add(collection) self.session.commit() @@ -47,21 +43,13 @@ def _update(self, collection: Collection): return collection def _exists(self, collection: Collection) -> bool: - stmt = ( - select(Collection.id) - .where( - (Collection.project_id == self.project_id) - & (Collection.llm_service_id == collection.llm_service_id) - & (Collection.llm_service_name == collection.llm_service_name) - ) - .limit(1) + stmt = select(Collection.id).where( + (Collection.project_id == self.project_id) + & (Collection.llm_service_id == collection.llm_service_id) + & (Collection.llm_service_name == collection.llm_service_name) ) - present = self.session.exec(stmt).first() is not None + present = self.session.exec(stmt).scalar_one_or_none() is not None - logger.info( - "[CollectionCrud._exists] Existence check completed | " - f"{{'llm_service_id': '{collection.llm_service_id}', 'exists': {present}}}" - ) return present def create( @@ -71,11 +59,19 @@ def create( ): try: existing = self.read_one(collection.id) - - raise FileExistsError("Collection already present") - except: - self.session.add(collection) - self.session.commit() + except HTTPException as e: + if e.status_code == 404: + self.session.add(collection) + self.session.commit() + self.session.refresh(collection) + else: + raise + else: + logger.warning( + "[CollectionCrud.create] Collection already present | " + f"{{'collection_id': '{collection.id}'}}" + ) + return existing if documents: dc_crud = DocumentCollectionCrud(self.session) @@ -92,9 +88,9 @@ def read_one(self, collection_id: UUID) -> Collection: ) ) - collection = self.session.exec(statement).first() + collection = self.session.exec(statement).one_or_none() if collection is None: - logger.error( + logger.warning( "[CollectionCrud.read_one] Collection not found | " f"{{'project_id': '{self.project_id}', 'collection_id': '{collection_id}'}}" ) diff --git a/backend/app/crud/collection/collection_job.py b/backend/app/crud/collection/collection_job.py index a557b8b2a..fcf9b5603 100644 --- a/backend/app/crud/collection/collection_job.py +++ b/backend/app/crud/collection/collection_job.py @@ -24,14 +24,12 @@ def __init__(self, session: Session, project_id: int): def read_one(self, job_id: UUID) -> CollectionJob: """Retrieve a single collection job by its id; 404 if not found.""" statement = select(CollectionJob).where( - and_( - CollectionJob.project_id == self.project_id, - CollectionJob.id == job_id, - ) + CollectionJob.project_id == self.project_id, + CollectionJob.id == job_id, ) - collection_job = self.session.exec(statement).first() + collection_job = self.session.exec(statement).one_or_none() if collection_job is None: - logger.error( + logger.warning( "[CollectionJobCrud.read_one] Collection job not found | " f"{{'project_id': '{self.project_id}', 'job_id': '{job_id}'}}" ) @@ -80,6 +78,7 @@ def update(self, job_id: UUID, patch: CollectionJobUpdate) -> CollectionJob: def create(self, collection_job: CollectionJobCreate) -> CollectionJob: """Create a new collection job.""" try: + collection_job = CollectionJob(**collection_job.model_dump()) self.session.add(collection_job) self.session.commit() self.session.refresh(collection_job) diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py index c4b69deff..537532d08 100644 --- a/backend/app/models/__init__.py +++ b/backend/app/models/__init__.py @@ -1,7 +1,27 @@ from sqlmodel import SQLModel from .auth import Token, TokenPayload +from .api_key import APIKey, APIKeyBase, APIKeyPublic +from .assistants import Assistant, AssistantBase, AssistantCreate, AssistantUpdate + from .collection import Collection, CollectionPublic +from .collection_job import ( + CollectionActionType, + CollectionJob, + CollectionJobBase, + CollectionJobStatus, + CollectionJobUpdate, + CollectionJobPublic, + CollectionJobCreate, +) +from .credentials import ( + Credential, + CredsBase, + CredsCreate, + CredsPublic, + CredsUpdate, +) + from .document import ( Document, DocumentPublic, @@ -15,16 +35,48 @@ ) from .document_collection import DocumentCollection +from .fine_tuning import ( + FineTuningJobBase, + Fine_Tuning, + FineTuningJobCreate, + FineTuningJobPublic, + FineTuningUpdate, + FineTuningStatus, +) + from .job import Job, JobType, JobStatus, JobUpdate from .message import Message +from .model_evaluation import ( + ModelEvaluation, + ModelEvaluationBase, + ModelEvaluationCreate, + ModelEvaluationPublic, + ModelEvaluationStatus, + ModelEvaluationUpdate, +) + + +from .onboarding import OnboardingRequest, OnboardingResponse +from .openai_conversation import ( + OpenAIConversationPublic, + OpenAIConversation, + OpenAIConversationBase, + OpenAIConversationCreate, +) +from .organization import ( + Organization, + OrganizationCreate, + OrganizationPublic, + OrganizationsPublic, + OrganizationUpdate, +) from .project_user import ( ProjectUser, ProjectUserPublic, ProjectUsersPublic, ) - from .project import ( Project, ProjectCreate, @@ -33,16 +85,17 @@ ProjectUpdate, ) -from .api_key import APIKey, APIKeyBase, APIKeyPublic - -from .organization import ( - Organization, - OrganizationCreate, - OrganizationPublic, - OrganizationsPublic, - OrganizationUpdate, +from .response import ( + CallbackResponse, + Diagnostics, + FileResultChunk, + ResponsesAPIRequest, + ResponseJobStatus, + ResponsesSyncAPIRequest, ) +from .threads import OpenAI_Thread, OpenAIThreadBase, OpenAIThreadCreate + from .user import ( NewPassword, User, @@ -56,61 +109,3 @@ UsersPublic, UpdatePassword, ) - -from .credentials import ( - Credential, - CredsBase, - CredsCreate, - CredsPublic, - CredsUpdate, -) - -from .threads import OpenAI_Thread, OpenAIThreadBase, OpenAIThreadCreate - -from .assistants import Assistant, AssistantBase, AssistantCreate, AssistantUpdate - -from .fine_tuning import ( - FineTuningJobBase, - Fine_Tuning, - FineTuningJobCreate, - FineTuningJobPublic, - FineTuningUpdate, - FineTuningStatus, -) - -from .openai_conversation import ( - OpenAIConversationPublic, - OpenAIConversation, - OpenAIConversationBase, - OpenAIConversationCreate, -) - -from .model_evaluation import ( - ModelEvaluation, - ModelEvaluationBase, - ModelEvaluationCreate, - ModelEvaluationPublic, - ModelEvaluationStatus, - ModelEvaluationUpdate, -) - -from .response import ( - CallbackResponse, - Diagnostics, - FileResultChunk, - ResponsesAPIRequest, - ResponseJobStatus, - ResponsesSyncAPIRequest, -) - -from .onboarding import OnboardingRequest, OnboardingResponse - -from .collection_job import ( - CollectionActionType, - CollectionJob, - CollectionJobBase, - CollectionJobStatus, - CollectionJobUpdate, - CollectionJobPublic, - CollectionJobCreate, -) diff --git a/backend/app/models/collection_job.py b/backend/app/models/collection_job.py index 0cfe524ec..af7eda6eb 100644 --- a/backend/app/models/collection_job.py +++ b/backend/app/models/collection_job.py @@ -5,6 +5,7 @@ from sqlmodel import Field, SQLModel, Column, Text from app.core.util import now +from app.models.collection import CollectionPublic class CollectionJobStatus(str, Enum): @@ -45,6 +46,9 @@ class CollectionJob(CollectionJobBase, table=True): ) task_id: str = Field(nullable=True) + trace_id: str | None = Field( + default=None, description="Tracing ID for correlating logs and traces." + ) error_message: str | None = Field(sa_column=Column(Text, nullable=True)) inserted_at: datetime = Field( @@ -61,7 +65,6 @@ class CollectionJob(CollectionJobBase, table=True): class CollectionJobCreate(SQLModel): - id: UUID collection_id: UUID | None = None status: CollectionJobStatus action_type: CollectionActionType @@ -70,16 +73,19 @@ class CollectionJobCreate(SQLModel): class CollectionJobUpdate(SQLModel): task_id: str | None = None - status: CollectionJobStatus + status: CollectionJobStatus | None = None error_message: str | None = None collection_id: UUID | None = None + trace_id: str | None = None class CollectionJobPublic(SQLModel): id: UUID + action_type: CollectionActionType collection_id: UUID | None = None status: CollectionJobStatus error_message: str | None = None - inserted_at: datetime updated_at: datetime + + collection: CollectionPublic | None = None diff --git a/backend/app/services/collections/create_collection.py b/backend/app/services/collections/create_collection.py index 9a8a7325a..1157e3ed2 100644 --- a/backend/app/services/collections/create_collection.py +++ b/backend/app/services/collections/create_collection.py @@ -19,7 +19,7 @@ CollectionJobStatus, CollectionJob, Collection, - CollectionActionType, + CollectionJobUpdate, ) from app.models.collection import ( ResponsePayload, @@ -40,31 +40,26 @@ def start_job( db: Session, - request: dict, + request: CreationRequest, + payload: ResponsePayload, project_id: int, - payload: dict, - collection_job_id: str, + collection_job_id: UUID, organization_id: int, ) -> str: trace_id = correlation_id.get() or "N/A" - collection_job = CollectionJob( - id=UUID(collection_job_id), - action_type=CollectionActionType.CREATE, - project_id=project_id, - status=CollectionJobStatus.PENDING, - ) - job_crud = CollectionJobCrud(db, project_id) - collection_job = job_crud.create(collection_job) + collection_job = job_crud.update( + collection_job_id, CollectionJobUpdate(trace_id=trace_id) + ) task_id = start_low_priority_job( function_path="app.services.collections.create_collection.execute_job", project_id=project_id, - job_id=collection_job_id, + job_id=str(collection_job_id), + payload=payload.model_dump(), trace_id=trace_id, - request=request, - payload_data=payload, + request=request.model_dump(), organization_id=organization_id, ) @@ -78,9 +73,9 @@ def start_job( def execute_job( request: dict, - payload_data: dict, project_id: int, organization_id: int, + payload: dict, task_id: str, job_id: str, task_instance, @@ -93,7 +88,7 @@ def execute_job( try: with Session(engine) as session: creation_request = CreationRequest(**request) - payload = ResponsePayload(**payload_data) + payload = ResponsePayload(**payload) job_id = UUID(job_id) diff --git a/backend/app/services/collections/delete_collection.py b/backend/app/services/collections/delete_collection.py index af88c4166..19e1c7394 100644 --- a/backend/app/services/collections/delete_collection.py +++ b/backend/app/services/collections/delete_collection.py @@ -1,5 +1,5 @@ import logging -from uuid import UUID, uuid4 +from uuid import UUID from sqlmodel import Session from asgi_correlation_id import correlation_id @@ -8,7 +8,7 @@ from app.core.db import engine from app.crud import CollectionCrud, CollectionJobCrud from app.crud.rag import OpenAIAssistantCrud -from app.models import CollectionJob, CollectionJobStatus, CollectionActionType +from app.models import CollectionJobStatus, CollectionJobUpdate from app.models.collection import Collection, DeletionRequest from app.services.collections.helpers import ( SilentCallback, @@ -24,34 +24,27 @@ def start_job( db: Session, - request: dict, + request: DeletionRequest, collection: Collection, project_id: int, - collection_job_id: str, - payload: dict, + collection_job_id: UUID, + payload: ResponsePayload, organization_id: int, ) -> str: trace_id = correlation_id.get() or "N/A" - - collection_job = CollectionJob( - id=UUID(collection_job_id), - action_type=CollectionActionType.DELETE, - project_id=project_id, - collection_id=collection.id, - status=CollectionJobStatus.PENDING, - ) - job_crud = CollectionJobCrud(db, project_id) - collection_job = job_crud.create(collection_job) + collection_job = job_crud.update( + collection_job_id, CollectionJobUpdate(trace_id=trace_id) + ) task_id = start_low_priority_job( function_path="app.services.collections.delete_collection.execute_job", project_id=project_id, - job_id=collection_job_id, + job_id=str(collection_job_id), collection_id=collection.id, trace_id=trace_id, - request=request, - payload_data=payload, + request=request.model_dump(), + payload=payload.model_dump(), organization_id=organization_id, ) @@ -64,7 +57,7 @@ def start_job( def execute_job( request: dict, - payload_data: dict, + payload: dict, project_id: int, organization_id: int, task_id: str, @@ -73,7 +66,7 @@ def execute_job( task_instance, ) -> None: deletion_request = DeletionRequest(**request) - payload = ResponsePayload(**payload_data) + payload = ResponsePayload(**payload) callback = ( SilentCallback(payload) diff --git a/backend/app/services/collections/helpers.py b/backend/app/services/collections/helpers.py index d6ee102ca..158994c69 100644 --- a/backend/app/services/collections/helpers.py +++ b/backend/app/services/collections/helpers.py @@ -19,8 +19,10 @@ logger = logging.getLogger(__name__) -# function to extract cleaned up error message from the error body for the user - def extract_error_message(err: Exception) -> str: + """Extract a concise, user-facing message from an exception, preferring `error.message` + in JSON/dict bodies after stripping prefixes.Falls back to cleaned text and truncates to + 1000 characters.""" err_str = str(err).strip() body = re.sub(r"^Error code:\s*\d+\s*-\s*", "", err_str) @@ -46,10 +48,12 @@ def extract_error_message(err: Exception) -> str: return message.strip()[:1000] -# batching the documents according to the given batch size def batch_documents( document_crud: DocumentCrud, documents: List[UUID], batch_size: int ): + """Batch document IDs into chunks of size `batch_size`, load each via `DocumentCrud.read_each`, + and return a list of document batches.""" + logger.info( f"[batch_documents] Starting batch iteration for documents | {{'batch_size': {batch_size}, 'total_documents': {len(documents)}}}" ) @@ -115,6 +119,7 @@ def success(self, body): def _backout(crud: OpenAIAssistantCrud, assistant_id: str): + """Best-effort cleanup: attempt to delete the assistant by ID""" try: crud.delete(assistant_id) except OpenAIError as err: diff --git a/backend/app/tests/api/routes/collections/test_collection_info.py b/backend/app/tests/api/routes/collections/test_collection_info.py index 06e5e9bb4..7dc9b3f37 100644 --- a/backend/app/tests/api/routes/collections/test_collection_info.py +++ b/backend/app/tests/api/routes/collections/test_collection_info.py @@ -8,44 +8,48 @@ from app.core.util import now from app.models import ( Collection, - CollectionJob, + CollectionJobCreate, CollectionActionType, CollectionJobStatus, + CollectionJobUpdate, ) from app.crud import CollectionJobCrud, CollectionCrud def create_collection( - db, + db: Session, user, with_llm: bool = False, ): + """Create a Collection row (optionally prefilled with LLM service fields).""" + llm_service_id = None + llm_service_name = None + if with_llm: + llm_service_id = f"asst_{uuid4()}" + llm_service_name = "gpt-4o" + collection = Collection( id=uuid4(), organization_id=user.organization_id, project_id=user.project_id, + llm_service_id=llm_service_id, + llm_service_name=llm_service_name, inserted_at=now(), updated_at=now(), ) - if with_llm: - collection.llm_service_id = f"asst_{uuid4()}" - collection.llm_service_name = "gpt-4o" - - collection_crud = CollectionCrud(db, user.project_id) - collection = collection_crud.create(collection) - return collection + return CollectionCrud(db, user.project_id).create(collection) def create_collection_job( - db, + db: Session, user, collection_id: Optional[UUID] = None, - action_type=CollectionActionType.CREATE, - status=CollectionJobStatus.PENDING, + action_type: CollectionActionType = CollectionActionType.CREATE, + status: CollectionJobStatus = CollectionJobStatus.PENDING, ): - collection_job = CollectionJob( - id=uuid4(), + """Create a CollectionJob row (uses create schema for clarity).""" + job_in = CollectionJobCreate( collection_id=collection_id, project_id=user.project_id, action_type=action_type, @@ -53,20 +57,21 @@ def create_collection_job( inserted_at=now(), updated_at=now(), ) + collection_job = CollectionJobCrud(db, user.project_id).create(job_in) - if status == CollectionJobStatus.FAILED: - collection_job.error_message = ( - "Something went wrong during the collection job process." + if collection_job.status == CollectionJobStatus.FAILED: + job_in = CollectionJobUpdate( + error_message="Something went wrong during the collection job process." + ) + collection_job = CollectionJobCrud(db, user.project_id).update( + collection_job.id, job_in ) - collection_job_crud = CollectionJobCrud(db, user.project_id) - created_job = collection_job_crud.create(collection_job) - - return created_job + return collection_job def test_collection_info_processing( - db: Session, client: TestClient, user_api_key_header, user_api_key + db: Session, client: "TestClient", user_api_key_header, user_api_key ): headers = user_api_key_header @@ -87,7 +92,7 @@ def test_collection_info_processing( def test_collection_info_successful( - db: Session, client: TestClient, user_api_key_header, user_api_key + db: Session, client: "TestClient", user_api_key_header, user_api_key ): headers = user_api_key_header @@ -104,13 +109,20 @@ def test_collection_info_successful( assert response.status_code == 200 data = response.json()["data"] - assert data["id"] == str(collection.id) - assert data["llm_service_id"] == collection.llm_service_id - assert data["llm_service_name"] == "gpt-4o" + assert data["id"] == str(collection_job.id) + assert data["status"] == CollectionJobStatus.SUCCESSFUL + assert data["action_type"] == CollectionActionType.CREATE + assert data["collection_id"] == str(collection.id) + + assert data["collection"] is not None + col = data["collection"] + assert col["id"] == str(collection.id) + assert col["llm_service_id"] == collection.llm_service_id + assert col["llm_service_name"] == "gpt-4o" def test_collection_info_failed( - db: Session, client: TestClient, user_api_key_header, user_api_key + db: Session, client: "TestClient", user_api_key_header, user_api_key ): headers = user_api_key_header diff --git a/backend/app/tests/api/routes/collections/test_create_collections.py b/backend/app/tests/api/routes/collections/test_create_collections.py index 6fe087136..2b5d786bb 100644 --- a/backend/app/tests/api/routes/collections/test_create_collections.py +++ b/backend/app/tests/api/routes/collections/test_create_collections.py @@ -8,7 +8,7 @@ def test_collection_creation_success( - client: TestClient, user_api_key_header: dict[str, str] + client: TestClient, user_api_key_header: dict[str, str], user_api_key ): with patch("app.api.routes.collections.create_service.start_job") as mock_job_start: creation_data = CreationRequest( @@ -29,19 +29,21 @@ def test_collection_creation_success( assert resp.status_code == 200 body = resp.json() - assert body["success"] is True - assert body["data"] is None + data = body["data"] + assert isinstance(data, dict) + assert data["action_type"] == "CREATE" + assert data["status"] == "PENDING" + assert data["project_id"] == user_api_key.project_id + assert data["collection_id"] is None + assert data["task_id"] is None + assert "trace_id" in data + assert data["inserted_at"] + assert data["updated_at"] - assert body["metadata"]["status"] == "processing" - assert body["metadata"]["route"] == "/collections/create" - assert body["metadata"]["key"] is not None - job_key = body["metadata"]["key"] + job_key = data["id"] mock_job_start.assert_called_once() kwargs = mock_job_start.call_args.kwargs - assert "db" in kwargs - assert kwargs["request"] == creation_data.model_dump() - assert kwargs["payload"]["status"] == "processing" - - assert kwargs["collection_job_id"] == job_key + assert kwargs["request"] == creation_data + assert kwargs["collection_job_id"] == UUID(job_key) diff --git a/backend/app/tests/services/collections/test_create_collection.py b/backend/app/tests/services/collections/test_create_collection.py index a19769432..b42074e7d 100644 --- a/backend/app/tests/services/collections/test_create_collection.py +++ b/backend/app/tests/services/collections/test_create_collection.py @@ -28,10 +28,27 @@ def aws_credentials(): os.environ["AWS_DEFAULT_REGION"] = settings.AWS_DEFAULT_REGION +def create_collection_job_for_create( + db: Session, + project, + job_id: UUID, +): + """Pre-create a CREATE job with the given id so start_job can update it.""" + return CollectionJobCrud(db, project.id).create( + CollectionJob( + id=job_id, + action_type=CollectionActionType.CREATE, + project_id=project.id, + collection_id=None, + status=CollectionJobStatus.PENDING, + ) + ) + + def test_start_job_creates_collection_job_and_schedules_task(db: Session): """ start_job should: - - create a CollectionJob in 'processing' + - update an existing CollectionJob (status=PENDING, action=CREATE) - call start_low_priority_job with the correct kwargs - return the job UUID (same one that was passed in) """ @@ -44,9 +61,12 @@ def test_start_job_creates_collection_job_and_schedules_task(db: Session): batch_size=1, callback_url=None, ) - payload = {"some": "data"} + route = "/collections/create" + payload = ResponsePayload(status="processing", route=route) job_id = uuid4() + _ = create_collection_job_for_create(db, project, job_id) + with patch( "app.services.collections.create_collection.start_low_priority_job" ) as mock_schedule: @@ -54,7 +74,7 @@ def test_start_job_creates_collection_job_and_schedules_task(db: Session): returned_job_id = start_job( db=db, - request=request.model_dump(), + request=request, project_id=project.id, payload=payload, collection_job_id=str(job_id), @@ -67,7 +87,10 @@ def test_start_job_creates_collection_job_and_schedules_task(db: Session): assert job.id == job_id assert job.project_id == project.id assert job.status == CollectionJobStatus.PENDING - assert job.action_type == CollectionActionType.CREATE.value + assert job.action_type in ( + CollectionActionType.CREATE, + CollectionActionType.CREATE.value, + ) assert job.collection_id is None mock_schedule.assert_called_once() @@ -80,7 +103,9 @@ def test_start_job_creates_collection_job_and_schedules_task(db: Session): assert kwargs["organization_id"] == project.organization_id assert kwargs["job_id"] == str(job_id) assert kwargs["request"] == request.model_dump() - assert kwargs["payload_data"] == payload + + passed_payload = kwargs.get("payload", kwargs.get("payload_data")) + assert passed_payload == payload.model_dump() @pytest.mark.usefixtures("aws_credentials") @@ -140,7 +165,7 @@ def test_execute_job_success_flow_updates_job_and_creates_collection( execute_job( request=sample_request.model_dump(), - payload_data=sample_payload.model_dump(), + payload=sample_payload.model_dump(), project_id=project.id, organization_id=project.organization_id, task_id=task_id, diff --git a/backend/app/tests/services/collections/test_delete_collection.py b/backend/app/tests/services/collections/test_delete_collection.py index 40f23bf90..fc27a6c7d 100644 --- a/backend/app/tests/services/collections/test_delete_collection.py +++ b/backend/app/tests/services/collections/test_delete_collection.py @@ -1,11 +1,9 @@ from unittest.mock import patch, MagicMock -from uuid import uuid4 -from dataclasses import asdict +from uuid import uuid4, UUID from sqlmodel import Session from sqlalchemy.exc import SQLAlchemyError - from app.models.collection import ( DeletionRequest, Collection, @@ -24,14 +22,19 @@ def create_collection(db: Session, project): organization_id=project.organization_id, llm_service_id="asst-nasjnl", ) - collection = CollectionCrud(db, project.id).create(collection) - return collection + return CollectionCrud(db, project.id).create(collection) -def create_collection_job(db: Session, project, collection): - job_id = uuid4() +def create_collection_job( + db: Session, + project, + collection, + job_id: UUID | None = None, +): + if job_id is None: + job_id = uuid4() job_crud = CollectionJobCrud(db, project.id) - job = job_crud.create( + return job_crud.create( CollectionJob( id=job_id, action_type=CollectionActionType.DELETE, @@ -40,32 +43,37 @@ def create_collection_job(db: Session, project, collection): status=CollectionJobStatus.PENDING, ) ) - return job def test_start_job_creates_collection_job_and_schedules_task(db: Session): """ - - start_job should create a CollectionJob (status=processing, action=delete) - - schedule the task with a *generated* job_id and the provided collection_id - - return the collection.id (per implementation) + - start_job should update an existing CollectionJob (status=processing, action=delete) + - schedule the task with the provided job_id and collection_id + - return the same job_id (string) """ project = get_project(db) - created_collection = create_collection(db, project) req = DeletionRequest(collection_id=created_collection.id) - payload = {"status": "processing"} + route = "/collections/delete" + payload = ResponsePayload(status="processing", route=route) with patch( "app.services.collections.delete_collection.start_low_priority_job" ) as mock_schedule: mock_schedule.return_value = "fake-task-id" - collection_job_id = str(uuid4()) + collection_job_id = uuid4() + precreated = create_collection_job( + db=db, + project=project, + collection=created_collection, + job_id=collection_job_id, + ) returned = start_job( db=db, - request=req.model_dump(), + request=req, collection=created_collection, project_id=project.id, collection_job_id=collection_job_id, @@ -78,6 +86,7 @@ def test_start_job_creates_collection_job_and_schedules_task(db: Session): jobs = CollectionJobCrud(db, project.id).read_all() assert len(jobs) == 1 job = jobs[0] + assert job.id == collection_job_id assert job.project_id == project.id assert job.collection_id == created_collection.id assert job.status == CollectionJobStatus.PENDING @@ -94,7 +103,7 @@ def test_start_job_creates_collection_job_and_schedules_task(db: Session): assert kwargs["job_id"] == str(job.id) assert kwargs["collection_id"] == created_collection.id assert kwargs["request"] == req.model_dump() - assert kwargs["payload_data"] == payload + assert kwargs["payload"] == payload.model_dump() assert "trace_id" in kwargs @@ -141,7 +150,7 @@ def test_execute_job_delete_success_updates_job_and_calls_delete( execute_job( request=req.model_dump(), - payload_data=payload.model_dump(), + payload=payload.model_dump(), project_id=project.id, organization_id=project.organization_id, task_id=task_id, @@ -200,7 +209,7 @@ def test_execute_job_delete_failure_marks_job_failed( execute_job( request=req.model_dump(), - payload_data=payload.model_dump(), + payload=payload.model_dump(), project_id=project.id, organization_id=project.organization_id, task_id=task_id, From 95ef1057829cd3b43c6bc51fb3d69eac6115da78 Mon Sep 17 00:00:00 2001 From: nishika26 Date: Wed, 8 Oct 2025 15:26:44 +0530 Subject: [PATCH 41/44] minimal fixes --- .../app/tests/api/routes/collections/test_collection_info.py | 4 ---- .../app/tests/services/collections/test_create_collection.py | 4 ++-- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/backend/app/tests/api/routes/collections/test_collection_info.py b/backend/app/tests/api/routes/collections/test_collection_info.py index 7dc9b3f37..32a02e9dd 100644 --- a/backend/app/tests/api/routes/collections/test_collection_info.py +++ b/backend/app/tests/api/routes/collections/test_collection_info.py @@ -34,8 +34,6 @@ def create_collection( project_id=user.project_id, llm_service_id=llm_service_id, llm_service_name=llm_service_name, - inserted_at=now(), - updated_at=now(), ) return CollectionCrud(db, user.project_id).create(collection) @@ -54,8 +52,6 @@ def create_collection_job( project_id=user.project_id, action_type=action_type, status=status, - inserted_at=now(), - updated_at=now(), ) collection_job = CollectionJobCrud(db, user.project_id).create(job_in) diff --git a/backend/app/tests/services/collections/test_create_collection.py b/backend/app/tests/services/collections/test_create_collection.py index b42074e7d..e827f5b0f 100644 --- a/backend/app/tests/services/collections/test_create_collection.py +++ b/backend/app/tests/services/collections/test_create_collection.py @@ -77,11 +77,11 @@ def test_start_job_creates_collection_job_and_schedules_task(db: Session): request=request, project_id=project.id, payload=payload, - collection_job_id=str(job_id), + collection_job_id=job_id, organization_id=project.organization_id, ) - assert returned_job_id == str(job_id) + assert returned_job_id == job_id job = CollectionJobCrud(db, project.id).read_one(job_id) assert job.id == job_id From 4de8c1d0fdb3ff7f3d0e6ee4a6ba44a8ee1da83a Mon Sep 17 00:00:00 2001 From: nishika26 Date: Wed, 8 Oct 2025 16:55:46 +0530 Subject: [PATCH 42/44] changing router name --- backend/app/api/docs/collections/create.md | 4 ++-- backend/app/api/docs/collections/delete.md | 2 +- backend/app/api/routes/collection_job.py | 6 +++--- .../tests/api/routes/collections/test_collection_info.py | 6 +++--- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/backend/app/api/docs/collections/create.md b/backend/app/api/docs/collections/create.md index 7ab900a85..3917d7c19 100644 --- a/backend/app/api/docs/collections/create.md +++ b/backend/app/api/docs/collections/create.md @@ -20,8 +20,8 @@ value being invalid. It can also fail due to document types not be accepted. This is especially true for PDFs that may not be parseable. The immediate response from the endpoint is `collection_job` object which is -going to contain the collection job ID, status and action type ("CREATE"). +going to contain the collection "job ID", status and action type ("CREATE"). Once the collection has been created, information about the collection will be returned to the user via the callback URL. If a callback URL is not provided, clients can poll the `collection job info` endpoint with the `id` in the -`collection_job object` returned as it is the collection job id, to retrieve the same information. +`collection_job` object returned as it is the `job id`, to retrieve the same information. diff --git a/backend/app/api/docs/collections/delete.md b/backend/app/api/docs/collections/delete.md index d25ea6059..63a1e3cf4 100644 --- a/backend/app/api/docs/collections/delete.md +++ b/backend/app/api/docs/collections/delete.md @@ -7,7 +7,7 @@ Remove a collection from the platform. This is a two step process: No action is taken on the documents themselves: the contents of the documents that were a part of the collection remain unchanged, those documents can still be accessed via the documents endpoints. The response from this -endpoint will be a `collection_job` object which will contain the collection job ID, +endpoint will be a `collection_job` object which will contain the collection `job ID`, status and action type ("DELETE"). when you take the id returned and use the collection job info endpoint, if the job is successful, you will get the status as successful and nothing will be returned as the collection as it has been deleted and marked as deleted. diff --git a/backend/app/api/routes/collection_job.py b/backend/app/api/routes/collection_job.py index ce0c65263..bddfe8d95 100644 --- a/backend/app/api/routes/collection_job.py +++ b/backend/app/api/routes/collection_job.py @@ -21,17 +21,17 @@ @router.get( - "/info/collection_job/{collection_job_id}", + "/info/jobs/{job_id}", description=load_description("collections/job_info.md"), response_model=APIResponse[CollectionJobPublic], ) def collection_job_info( session: SessionDep, current_user: CurrentUserOrgProject, - collection_job_id: UUID = FastPath(description="Collection job to retrieve"), + job_id: UUID = FastPath(description="Collection job to retrieve"), ): collection_job_crud = CollectionJobCrud(session, current_user.project_id) - collection_job = collection_job_crud.read_one(collection_job_id) + collection_job = collection_job_crud.read_one(job_id) job_out = CollectionJobPublic.model_validate(collection_job) diff --git a/backend/app/tests/api/routes/collections/test_collection_info.py b/backend/app/tests/api/routes/collections/test_collection_info.py index 32a02e9dd..2317ef241 100644 --- a/backend/app/tests/api/routes/collections/test_collection_info.py +++ b/backend/app/tests/api/routes/collections/test_collection_info.py @@ -74,7 +74,7 @@ def test_collection_info_processing( collection_job = create_collection_job(db, user_api_key) response = client.get( - f"{settings.API_V1_STR}/collections/info/collection_job/{collection_job.id}", + f"{settings.API_V1_STR}/collections/info/jobs/{collection_job.id}", headers=headers, ) @@ -98,7 +98,7 @@ def test_collection_info_successful( ) response = client.get( - f"{settings.API_V1_STR}/collections/info/collection_job/{collection_job.id}", + f"{settings.API_V1_STR}/collections/info/jobs/{collection_job.id}", headers=headers, ) @@ -127,7 +127,7 @@ def test_collection_info_failed( ) response = client.get( - f"{settings.API_V1_STR}/collections/info/collection_job/{collection_job.id}", + f"{settings.API_V1_STR}/collections/info/jobs/{collection_job.id}", headers=headers, ) From 2c0a11dc4b2fafc9db0312ae6698b7bd5fa51809 Mon Sep 17 00:00:00 2001 From: nishika26 Date: Thu, 9 Oct 2025 13:05:24 +0530 Subject: [PATCH 43/44] last pr review changes --- ...uccessful_columns_from_collection_table.py | 6 +++++ ...on_job_table_and_alter_collection_table.py | 12 +++++++++ backend/app/api/docs/collections/job_info.md | 4 ++- backend/app/models/collection.py | 4 +-- .../services/collections/delete_collection.py | 26 +++++++++++++------ 5 files changed, 41 insertions(+), 11 deletions(-) diff --git a/backend/app/alembic/versions/7ab577d3af26_delete_non_successful_columns_from_collection_table.py b/backend/app/alembic/versions/7ab577d3af26_delete_non_successful_columns_from_collection_table.py index df1020d01..229083ee0 100644 --- a/backend/app/alembic/versions/7ab577d3af26_delete_non_successful_columns_from_collection_table.py +++ b/backend/app/alembic/versions/7ab577d3af26_delete_non_successful_columns_from_collection_table.py @@ -24,6 +24,12 @@ def upgrade(): WHERE status IN ('processing', 'failed') """ ) + op.execute( + """ + DELETE FROM collection + WHERE llm_service_id IS NULL + """ + ) def downgrade(): diff --git a/backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_alter_collection_table.py b/backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_alter_collection_table.py index 038c0e04c..fdd47876e 100644 --- a/backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_alter_collection_table.py +++ b/backend/app/alembic/versions/b30727137e65_adding_collection_job_table_and_alter_collection_table.py @@ -56,6 +56,12 @@ def upgrade(): ) op.alter_column("collection", "created_at", new_column_name="inserted_at") + op.alter_column( + "collection", "llm_service_id", existing_type=sa.VARCHAR(), nullable=False + ) + op.alter_column( + "collection", "llm_service_name", existing_type=sa.VARCHAR(), nullable=False + ) op.drop_constraint("collection_owner_id_fkey", "collection", type_="foreignkey") op.drop_column("collection", "owner_id") op.drop_column("collection", "status") @@ -98,4 +104,10 @@ def downgrade(): op.alter_column("collection", "status", nullable=False) op.alter_column("collection", "owner_id", nullable=False) op.alter_column("collection", "inserted_at", new_column_name="created_at") + op.alter_column( + "collection", "llm_service_name", existing_type=sa.VARCHAR(), nullable=True + ) + op.alter_column( + "collection", "llm_service_id", existing_type=sa.VARCHAR(), nullable=True + ) op.drop_table("collection_jobs") diff --git a/backend/app/api/docs/collections/job_info.md b/backend/app/api/docs/collections/job_info.md index 3cb9c1482..e785967b5 100644 --- a/backend/app/api/docs/collections/job_info.md +++ b/backend/app/api/docs/collections/job_info.md @@ -3,8 +3,10 @@ in the AI platform. It is especially useful for: * Fetching the collection job object containing the ID which will be collection job id, collection ID, status of the job as well as error message. -* If the job has finished and has been successful, this endpoint will fetch the associated collection details from the collection table, including: +* If the job has finished, has been successful and it was a job of creation of collection then this endpoint will fetch the associated collection details from the collection table, including: - `llm_service_id`: The OpenAI assistant or model used for the collection. - Collection metadata such as ID, project, organization, and timestamps. +* If the job of delete collection was successful, we will get the status as successful and nothing will be returned as collection. + * Containing a simplified error messages in the retrieved collection job object when a job has failed. diff --git a/backend/app/models/collection.py b/backend/app/models/collection.py index 4883deedb..3ffb318dc 100644 --- a/backend/app/models/collection.py +++ b/backend/app/models/collection.py @@ -25,8 +25,8 @@ class Collection(SQLModel, table=True): ondelete="CASCADE", ) - llm_service_id: Optional[str] = Field(default=None, nullable=True) - llm_service_name: Optional[str] = Field(default=None, nullable=True) + llm_service_id: str = Field(nullable=False) + llm_service_name: str = Field(nullable=False) inserted_at: datetime = Field(default_factory=now) updated_at: datetime = Field(default_factory=now) diff --git a/backend/app/services/collections/delete_collection.py b/backend/app/services/collections/delete_collection.py index 19e1c7394..d7664dace 100644 --- a/backend/app/services/collections/delete_collection.py +++ b/backend/app/services/collections/delete_collection.py @@ -32,6 +32,7 @@ def start_job( organization_id: int, ) -> str: trace_id = correlation_id.get() or "N/A" + job_crud = CollectionJobCrud(db, project_id) collection_job = job_crud.update( collection_job_id, CollectionJobUpdate(trace_id=trace_id) @@ -41,7 +42,7 @@ def start_job( function_path="app.services.collections.delete_collection.execute_job", project_id=project_id, job_id=str(collection_job_id), - collection_id=collection.id, + collection_id=str(collection.id), trace_id=trace_id, request=request.model_dump(), payload=payload.model_dump(), @@ -61,8 +62,8 @@ def execute_job( project_id: int, organization_id: int, task_id: str, - job_id: UUID, - collection_id: UUID, + job_id: str, + collection_id: str, task_instance, ) -> None: deletion_request = DeletionRequest(**request) @@ -74,17 +75,26 @@ def execute_job( else WebHookCallback(deletion_request.callback_url, payload) ) + collection_id = UUID(collection_id) + job_id = UUID(job_id) + try: with Session(engine) as session: client = get_openai_client(session, organization_id, project_id) + + collection_job_crud = CollectionJobCrud(session, project_id) + collection_job = collection_job_crud.read_one(job_id) + collection_job = collection_job_crud.update( + job_id, + CollectionJobUpdate( + task_id=task_id, status=CollectionJobStatus.PROCESSING + ), + ) + assistant_crud = OpenAIAssistantCrud(client) collection_crud = CollectionCrud(session, project_id) - collection_job_crud = CollectionJobCrud(session, project_id) collection = collection_crud.read_one(collection_id) - collection_job = collection_job_crud.read_one(job_id) - - collection_job.task_id = task_id try: result = collection_crud.delete(collection, assistant_crud) @@ -117,7 +127,7 @@ def execute_job( except Exception as err: collection_job.status = CollectionJobStatus.FAILED collection_job.error_message = str(err) - collection_job_crud.update(collection_job) + collection_job_crud.update(collection_job.id, collection_job) logger.error( "[delete_collection.execute_job] Unexpected error during deletion | " From 94234b10f10e30ab5e5a41241aa4d051e3f5dba1 Mon Sep 17 00:00:00 2001 From: nishika26 Date: Thu, 9 Oct 2025 13:40:55 +0530 Subject: [PATCH 44/44] minimal fixes --- backend/app/models/collection.py | 4 +- .../services/collections/create_collection.py | 39 +++++++++++++------ .../services/collections/delete_collection.py | 36 +++++++++++------ .../collections/test_create_collection.py | 2 +- .../collections/test_delete_collection.py | 11 +++--- 5 files changed, 61 insertions(+), 31 deletions(-) diff --git a/backend/app/models/collection.py b/backend/app/models/collection.py index 3ffb318dc..9e5f866fd 100644 --- a/backend/app/models/collection.py +++ b/backend/app/models/collection.py @@ -1,6 +1,6 @@ from uuid import UUID, uuid4 from datetime import datetime -from typing import Any, List, Optional +from typing import Any, Optional from sqlmodel import Field, Relationship, SQLModel from pydantic import HttpUrl @@ -53,7 +53,7 @@ def now(cls): # pydantic models - class DocumentOptions(SQLModel): - documents: List[UUID] = Field( + documents: list[UUID] = Field( description="List of document IDs", ) batch_size: int = Field( diff --git a/backend/app/services/collections/create_collection.py b/backend/app/services/collections/create_collection.py index 1157e3ed2..d424c5333 100644 --- a/backend/app/services/collections/create_collection.py +++ b/backend/app/services/collections/create_collection.py @@ -94,9 +94,12 @@ def execute_job( collection_job_crud = CollectionJobCrud(session, project_id) collection_job = collection_job_crud.read_one(job_id) - collection_job.task_id = task_id - collection_job.status = CollectionJobStatus.PROCESSING - collection_job_crud.update(collection_job.id, collection_job) + collection_job = collection_job_crud.update( + job_id, + CollectionJobUpdate( + task_id=task_id, status=CollectionJobStatus.PROCESSING + ), + ) client = get_openai_client(session, organization_id, project_id) @@ -151,9 +154,13 @@ def execute_job( if flat_docs: DocumentCollectionCrud(session).create(collection_data, flat_docs) - collection_job.status = CollectionJobStatus.SUCCESSFUL - collection_job.collection_id = collection_id - collection_job_crud.update(collection_job.id, collection_job) + collection_job_crud.update( + collection_job.id, + CollectionJobUpdate( + status=CollectionJobStatus.SUCCESSFUL, + collection_id=collection.id, + ), + ) elapsed = time.time() - start_time logger.info( @@ -179,9 +186,13 @@ def execute_job( if "assistant" in locals(): _backout(assistant_crud, assistant.id) - collection_job.status = CollectionJobStatus.FAILED - collection_job.error_message = str(err) - collection_job_crud.update(collection_job.id, collection_job) + collection_job_crud.update( + collection_job.id, + CollectionJobUpdate( + status=CollectionJobStatus.FAILED, + error_message=str(err), + ), + ) callback.fail(str(err)) @@ -192,6 +203,10 @@ def execute_job( exc_info=True, ) - collection_job.status = CollectionJobStatus.FAILED - collection_job.error_message = str(err) - collection_job_crud.update(collection_job.id, collection_job) + collection_job_crud.update( + collection_job.id, + CollectionJobUpdate( + status=CollectionJobStatus.FAILED, + error_message=str(outer_err), + ), + ) diff --git a/backend/app/services/collections/delete_collection.py b/backend/app/services/collections/delete_collection.py index d7664dace..088647c31 100644 --- a/backend/app/services/collections/delete_collection.py +++ b/backend/app/services/collections/delete_collection.py @@ -75,8 +75,10 @@ def execute_job( else WebHookCallback(deletion_request.callback_url, payload) ) - collection_id = UUID(collection_id) - job_id = UUID(job_id) + if not isinstance(collection_id, UUID): + collection_id = UUID(str(collection_id)) + if not isinstance(job_id, UUID): + job_id = UUID(str(job_id)) try: with Session(engine) as session: @@ -99,9 +101,13 @@ def execute_job( try: result = collection_crud.delete(collection, assistant_crud) - collection_job.status = CollectionJobStatus.SUCCESSFUL - collection_job.error_message = None - collection_job_crud.update(collection_job.id, collection_job) + collection_job_crud.update( + collection_job.id, + CollectionJobUpdate( + status=CollectionJobStatus.SUCCESSFUL, + error_message=None, + ), + ) logger.info( "[delete_collection.execute_job] Collection deleted successfully | {'collection_id': '%s', 'job_id': '%s'}", @@ -111,9 +117,13 @@ def execute_job( callback.success(result.model_dump(mode="json")) except (ValueError, PermissionError, SQLAlchemyError) as err: - collection_job.status = CollectionJobStatus.FAILED - collection_job.error_message = str(err) - collection_job_crud.update(collection_job.id, collection_job) + collection_job_crud.update( + collection_job.id, + CollectionJobUpdate( + status=CollectionJobStatus.FAILED, + error_message=str(err), + ), + ) logger.error( "[delete_collection.execute_job] Failed to delete collection | {'collection_id': '%s', 'error': '%s', 'job_id': '%s'}", @@ -125,9 +135,13 @@ def execute_job( callback.fail(str(err)) except Exception as err: - collection_job.status = CollectionJobStatus.FAILED - collection_job.error_message = str(err) - collection_job_crud.update(collection_job.id, collection_job) + collection_job_crud.update( + collection_job.id, + CollectionJobUpdate( + status=CollectionJobStatus.FAILED, + error_message=str(err), + ), + ) logger.error( "[delete_collection.execute_job] Unexpected error during deletion | " diff --git a/backend/app/tests/services/collections/test_create_collection.py b/backend/app/tests/services/collections/test_create_collection.py index e827f5b0f..430e7b4be 100644 --- a/backend/app/tests/services/collections/test_create_collection.py +++ b/backend/app/tests/services/collections/test_create_collection.py @@ -168,7 +168,7 @@ def test_execute_job_success_flow_updates_job_and_creates_collection( payload=sample_payload.model_dump(), project_id=project.id, organization_id=project.organization_id, - task_id=task_id, + task_id=str(task_id), job_id=str(job_id), task_instance=None, ) diff --git a/backend/app/tests/services/collections/test_delete_collection.py b/backend/app/tests/services/collections/test_delete_collection.py index fc27a6c7d..f6f55c6ad 100644 --- a/backend/app/tests/services/collections/test_delete_collection.py +++ b/backend/app/tests/services/collections/test_delete_collection.py @@ -21,6 +21,7 @@ def create_collection(db: Session, project): project_id=project.id, organization_id=project.organization_id, llm_service_id="asst-nasjnl", + llm_service_name="gpt-4o", ) return CollectionCrud(db, project.id).create(collection) @@ -101,7 +102,7 @@ def test_start_job_creates_collection_job_and_schedules_task(db: Session): assert kwargs["project_id"] == project.id assert kwargs["organization_id"] == project.organization_id assert kwargs["job_id"] == str(job.id) - assert kwargs["collection_id"] == created_collection.id + assert kwargs["collection_id"] == str(created_collection.id) assert kwargs["request"] == req.model_dump() assert kwargs["payload"] == payload.model_dump() assert "trace_id" in kwargs @@ -153,7 +154,7 @@ def test_execute_job_delete_success_updates_job_and_calls_delete( payload=payload.model_dump(), project_id=project.id, organization_id=project.organization_id, - task_id=task_id, + task_id=str(task_id), job_id=str(job.id), collection_id=collection.id, task_instance=None, @@ -212,9 +213,9 @@ def test_execute_job_delete_failure_marks_job_failed( payload=payload.model_dump(), project_id=project.id, organization_id=project.organization_id, - task_id=task_id, - job_id=job.id, - collection_id=collection.id, + task_id=str(task_id), + job_id=str(job.id), + collection_id=str(collection.id), task_instance=None, )