diff --git a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py new file mode 100644 index 000000000..4a6f870a5 --- /dev/null +++ b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py @@ -0,0 +1,85 @@ +"""add openai_conversation table + +Revision ID: ff579a9523c5 +Revises: e8ee93526b37 +Create Date: 2025-07-24 12:16:51.311014 + +""" +from alembic import op +import sqlalchemy as sa +import sqlmodel.sql.sqltypes + + +# revision identifiers, used by Alembic. +revision = "ff579a9523c5" +down_revision = "e8ee93526b37" +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table( + "openai_conversation", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column( + "ancestor_response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True + ), + sa.Column( + "previous_response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True + ), + sa.Column("user_question", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("response", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("model", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("assistant_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("project_id", sa.Integer(), nullable=False), + sa.Column("organization_id", sa.Integer(), nullable=False), + sa.Column("is_deleted", sa.Boolean(), nullable=False), + sa.Column("inserted_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("id"), + sa.ForeignKeyConstraint( + ["organization_id"], ["organization.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["project_id"], ["project.id"], ondelete="CASCADE"), + ) + op.create_index( + op.f("ix_openai_conversation_ancestor_response_id"), + "openai_conversation", + ["ancestor_response_id"], + unique=False, + ) + op.create_index( + op.f("ix_openai_conversation_previous_response_id"), + "openai_conversation", + ["previous_response_id"], + unique=False, + ) + op.create_index( + op.f("ix_openai_conversation_response_id"), + "openai_conversation", + ["response_id"], + unique=False, + ) + op.create_foreign_key( + None, "openai_conversation", "project", ["project_id"], ["id"] + ) + op.create_foreign_key( + None, "openai_conversation", "organization", ["organization_id"], ["id"] + ) + + +def downgrade(): + op.drop_index( + op.f("ix_openai_conversation_response_id"), table_name="openai_conversation" + ) + op.drop_index( + op.f("ix_openai_conversation_previous_response_id"), + table_name="openai_conversation", + ) + op.drop_index( + op.f("ix_openai_conversation_ancestor_response_id"), + table_name="openai_conversation", + ) + op.drop_table("openai_conversation") diff --git a/backend/app/api/main.py b/backend/app/api/main.py index 7db3c3d54..d504dfda1 100644 --- a/backend/app/api/main.py +++ b/backend/app/api/main.py @@ -16,6 +16,7 @@ utils, onboarding, credentials, + openai_conversation, ) from app.core.config import settings @@ -27,6 +28,7 @@ api_router.include_router(documents.router) api_router.include_router(login.router) api_router.include_router(onboarding.router) +api_router.include_router(openai_conversation.router) api_router.include_router(organization.router) api_router.include_router(project.router) api_router.include_router(project_user.router) diff --git a/backend/app/api/routes/openai_conversation.py b/backend/app/api/routes/openai_conversation.py new file mode 100644 index 000000000..91044b6dc --- /dev/null +++ b/backend/app/api/routes/openai_conversation.py @@ -0,0 +1,126 @@ +from sqlmodel import Session +from fastapi import APIRouter, Depends, HTTPException, Query, Path + +from app.api.deps import get_db, get_current_user_org, get_current_user_org_project +from app.models import UserOrganization, UserProjectOrg +from app.models.openai_conversation import OpenAIConversationPublic +from app.crud.openai_conversation import ( + get_openai_conversation_by_id, + get_openai_conversation_by_response_id, + get_openai_conversations_by_ancestor, + get_all_openai_conversations, + delete_openai_conversation, +) +from app.utils import APIResponse + +router = APIRouter(prefix="/openai-conversation", tags=["openai_conversation"]) + + +@router.get( + "/list", + response_model=APIResponse[list[OpenAIConversationPublic]], + summary="List all conversations", + description="Retrieve all OpenAI conversations with pagination support", +) +async def list_conversations( + session: Session = Depends(get_db), + current_user: UserProjectOrg = Depends(get_current_user_org_project), + skip: int = Query(0, ge=0, description="Number of records to skip"), + limit: int = Query( + 100, gt=0, le=100, description="Maximum number of records to return" + ), +): + """Get all conversations with pagination for project and organization""" + conversations = get_all_openai_conversations( + session=session, project_id=current_user.project_id, skip=skip, limit=limit + ) + return APIResponse.success_response( + data=[OpenAIConversationPublic.model_validate(conv) for conv in conversations] + ) + + +@router.get( + "/{conversation_id}", + response_model=APIResponse[OpenAIConversationPublic], + summary="Get conversation by ID", + description="Retrieve a conversation by its database ID", +) +async def get_conversation_by_id( + conversation_id: int = Path(..., description="The conversation ID"), + session: Session = Depends(get_db), + current_user: UserProjectOrg = Depends(get_current_user_org_project), +): + """Get a conversation by its ID, only if it belongs to the user's project.""" + conversation = get_openai_conversation_by_id( + session, conversation_id, current_user.project_id + ) + if not conversation: + raise HTTPException( + status_code=404, detail=f"Conversation with ID {conversation_id} not found." + ) + return APIResponse.success_response(conversation) + + +@router.get( + "/response/{response_id}", + response_model=APIResponse[OpenAIConversationPublic], + summary="Get conversation by response ID", + description="Retrieve a conversation by its response_id", +) +async def get_conversation_by_response_id( + response_id: str = Path(..., description="The response ID"), + session: Session = Depends(get_db), + current_user: UserProjectOrg = Depends(get_current_user_org_project), +): + """Get a conversation by its response_id, only if it belongs to the user's project.""" + conversation = get_openai_conversation_by_response_id( + session, response_id, current_user.project_id + ) + if not conversation: + raise HTTPException( + status_code=404, + detail=f"Conversation with response ID {response_id} not found.", + ) + return APIResponse.success_response(conversation) + + +@router.get( + "/ancestor/{ancestor_response_id}", + response_model=APIResponse[list[OpenAIConversationPublic]], + summary="Get conversations by ancestor", + description="Retrieve all conversations that have the specified ancestor_response_id", +) +async def get_conversations_by_ancestor( + ancestor_response_id: str = Path(..., description="The ancestor ID"), + session: Session = Depends(get_db), + current_user: UserProjectOrg = Depends(get_current_user_org_project), +): + """Get a conversation by its response_id, only if it belongs to the user's project.""" + conversation = get_openai_conversations_by_ancestor( + session, ancestor_response_id, current_user.project_id + ) + if not conversation: + raise HTTPException( + status_code=404, + detail=f"Conversation with ancestor ID {ancestor_response_id} not found.", + ) + return APIResponse.success_response(conversation) + + +@router.delete("/{conversation_id}", response_model=APIResponse) +def delete_conversation_by_id( + conversation_id: int = Path(..., description="The conversation ID"), + session: Session = Depends(get_db), + current_user: UserProjectOrg = Depends(get_current_user_org_project), +): + """ + Soft delete an conversation by updating flag is_deleted. + """ + delete_openai_conversation( + session=session, + conversation_id=conversation_id, + project_id=current_user.project_id, + ) + return APIResponse.success_response( + data={"message": "Conversation deleted successfully."} + ) diff --git a/backend/app/api/routes/responses.py b/backend/app/api/routes/responses.py index 130a48399..7a07aeff5 100644 --- a/backend/app/api/routes/responses.py +++ b/backend/app/api/routes/responses.py @@ -4,14 +4,15 @@ import openai from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException from openai import OpenAI -from pydantic import BaseModel, Extra +from pydantic import BaseModel, ConfigDict from sqlmodel import Session from app.api.deps import get_db, get_current_user_org_project from app.api.routes.threads import send_callback from app.crud.assistants import get_assistant_by_id from app.crud.credentials import get_provider_credential -from app.models import UserProjectOrg +from app.crud.openai_conversation import create_openai_conversation +from app.models import UserProjectOrg, OpenAIConversationCreate from app.utils import APIResponse, mask_string from app.core.langfuse.langfuse import LangfuseTracer @@ -32,8 +33,7 @@ class ResponsesAPIRequest(BaseModel): callback_url: Optional[str] = None response_id: Optional[str] = None - class Config: - extra = Extra.allow + model_config = ConfigDict(extra="allow") class ResponsesSyncAPIRequest(BaseModel): @@ -65,8 +65,7 @@ class _APIResponse(BaseModel): chunks: list[FileResultChunk] diagnostics: Optional[Diagnostics] = None - class Config: - extra = Extra.allow + model_config = ConfigDict(extra="allow") class ResponsesAPIResponse(APIResponse[_APIResponse]): @@ -98,6 +97,8 @@ def process_response( assistant, tracer: LangfuseTracer, project_id: int, + organization_id: int, + session: Session, ): """Process a response and send callback with results, with Langfuse tracing.""" logger.info( @@ -143,6 +144,27 @@ def process_response( f"Successfully generated response: response_id={response.id}, assistant={mask_string(request.assistant_id)}, project_id={project_id}" ) + # Store conversation in database + try: + conversation_data = OpenAIConversationCreate( + response_id=response.id, + previous_response_id=request.response_id, + user_question=request.question, + response=response.output_text, + model=response.model, + assistant_id=request.assistant_id, + project_id=project_id, + organization_id=organization_id, + ) + create_openai_conversation(session, conversation_data) + logger.info( + f"Conversation stored in database: response_id={response.id}, project_id={project_id}" + ) + except Exception as e: + logger.error( + f"Failed to store conversation in database: {str(e)}, response_id={response.id}, project_id={project_id}" + ) + tracer.end_generation( output={ "response_id": response.id, @@ -264,6 +286,8 @@ async def responses( assistant, tracer, project_id, + organization_id, + _session, ) logger.info( @@ -346,6 +370,27 @@ async def responses_sync( response_chunks = get_file_search_results(response) + # Store conversation in database + try: + conversation_data = OpenAIConversationCreate( + response_id=response.id, + previous_response_id=request.response_id, + user_question=request.question, + response=response.output_text, + model=response.model, + assistant_id=None, # Not available in sync endpoint + project_id=project_id, + organization_id=organization_id, + ) + create_openai_conversation(_session, conversation_data) + logger.info( + f"Conversation stored in database: response_id={response.id}, project_id={project_id}" + ) + except Exception as e: + logger.error( + f"Failed to store conversation in database: {str(e)}, response_id={response.id}, project_id={project_id}" + ) + tracer.end_generation( output={ "response_id": response.id, diff --git a/backend/app/crud/__init__.py b/backend/app/crud/__init__.py index 49b09f563..e517fdd3c 100644 --- a/backend/app/crud/__init__.py +++ b/backend/app/crud/__init__.py @@ -45,6 +45,15 @@ from .thread_results import upsert_thread_result, get_thread_result +from .openai_conversation import ( + create_openai_conversation, + get_openai_conversation_by_id, + get_openai_conversation_by_response_id, + get_openai_conversations_by_ancestor, + get_all_openai_conversations, + delete_openai_conversation, +) + from .assistants import ( get_assistant_by_id, fetch_assistant_from_openai, diff --git a/backend/app/crud/api_key.py b/backend/app/crud/api_key.py index 7a8b7c166..a305612c0 100644 --- a/backend/app/crud/api_key.py +++ b/backend/app/crud/api_key.py @@ -168,11 +168,12 @@ def get_api_key_by_user_id(session: Session, user_id: int) -> APIKeyPublic | Non """ Retrieves the API key associated with a user by their user_id. """ - api_key = ( - session.query(APIKey) - .filter(APIKey.user_id == user_id, APIKey.is_deleted == False) - .first() + statement = ( + select(APIKey) + .where(APIKey.user_id == user_id, APIKey.is_deleted == False) + .limit(1) ) + api_key = session.exec(statement).first() if not api_key: return None diff --git a/backend/app/crud/openai_conversation.py b/backend/app/crud/openai_conversation.py new file mode 100644 index 000000000..bd59f5fea --- /dev/null +++ b/backend/app/crud/openai_conversation.py @@ -0,0 +1,110 @@ +import logging + +from fastapi import HTTPException +from sqlmodel import Session, and_, select +from typing import List, Optional + +from app.core.util import now +from app.models import OpenAIConversation, OpenAIConversationCreate + +logger = logging.getLogger(__name__) + + +def create_openai_conversation( + session: Session, data: OpenAIConversationCreate +) -> OpenAIConversation: + conversation = OpenAIConversation(**data.model_dump()) + session.add(conversation) + session.commit() + session.refresh(conversation) + return conversation + + +def get_openai_conversation_by_id( + session: Session, openai_conversation_id: str, project_id: int +) -> Optional[OpenAIConversation]: + """Get an openai_conversation by its OpenAI openai_conversation ID and project ID.""" + statement = select(OpenAIConversation).where( + and_( + OpenAIConversation.id == openai_conversation_id, + OpenAIConversation.project_id == project_id, + OpenAIConversation.is_deleted == False, + ) + ) + return session.exec(statement).first() + + +def get_openai_conversation_by_response_id( + session: Session, response_id: str, project_id: int +) -> Optional[OpenAIConversation]: + """Get an openai_conversation by its OpenAI response ID and project ID.""" + statement = select(OpenAIConversation).where( + and_( + OpenAIConversation.response_id == response_id, + OpenAIConversation.project_id == project_id, + OpenAIConversation.is_deleted == False, + ) + ) + return session.exec(statement).first() + + +def get_openai_conversations_by_ancestor( + session: Session, ancestor_response_id: str, project_id: int +) -> list[OpenAIConversation]: + """Get all openai_conversations by ancestor_response_id.""" + statement = select(OpenAIConversation).where( + and_( + OpenAIConversation.ancestor_response_id == ancestor_response_id, + OpenAIConversation.project_id == project_id, + OpenAIConversation.is_deleted == False, + ) + ) + return session.exec(statement).all() + + +def get_all_openai_conversations( + session: Session, project_id: int, skip: int = 0, limit: int = 100 +) -> List[OpenAIConversation]: + """ + Return all openai conversations for a given project and organization, with optional pagination. + """ + statement = ( + select(OpenAIConversation) + .where( + OpenAIConversation.project_id == project_id, + OpenAIConversation.is_deleted == False, + ) + .offset(skip) + .limit(limit) + ) + results = session.exec(statement).all() + return results + + +def delete_openai_conversation( + session: Session, + conversation_id: int, + project_id: int, +) -> OpenAIConversation: + """ + Soft delete an conversation by updating is_deleted flag. + """ + existing_conversation = get_openai_conversation_by_id( + session, conversation_id, project_id + ) + if not existing_conversation: + logger.warning( + f"[delete_openai_conversation] Conversation {conversation_id} not found | project_id: {project_id}" + ) + raise HTTPException(status_code=404, detail="Conversation not found.") + + existing_conversation.is_deleted = True + existing_conversation.deleted_at = now() + session.add(existing_conversation) + session.commit() + session.refresh(existing_conversation) + + logger.info( + f"[delete_openai_conversation] Conversation {conversation_id} soft deleted successfully. | project_id: {project_id}" + ) + return existing_conversation diff --git a/backend/app/crud/thread_results.py b/backend/app/crud/thread_results.py index cd72ef188..2d11f01c5 100644 --- a/backend/app/crud/thread_results.py +++ b/backend/app/crud/thread_results.py @@ -1,5 +1,5 @@ from sqlmodel import Session, select -from datetime import datetime +from datetime import datetime, UTC from app.models import OpenAIThreadCreate, OpenAI_Thread @@ -12,9 +12,9 @@ def upsert_thread_result(session: Session, data: OpenAIThreadCreate): existing.response = data.response existing.status = data.status existing.error = data.error - existing.updated_at = datetime.utcnow() + existing.updated_at = datetime.now(UTC) else: - new_thread = OpenAI_Thread(**data.dict()) + new_thread = OpenAI_Thread(**data.model_dump()) session.add(new_thread) session.commit() diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py index 2c4c87e00..08fd27b2c 100644 --- a/backend/app/models/__init__.py +++ b/backend/app/models/__init__.py @@ -54,4 +54,11 @@ from .threads import OpenAI_Thread, OpenAIThreadBase, OpenAIThreadCreate +from .openai_conversation import ( + OpenAIConversation, + OpenAIConversationBase, + OpenAIConversationCreate, + OpenAIConversationPublic, +) + from .assistants import Assistant, AssistantBase, AssistantCreate, AssistantUpdate diff --git a/backend/app/models/credentials.py b/backend/app/models/credentials.py index 7096ef298..2379ee4d2 100644 --- a/backend/app/models/credentials.py +++ b/backend/app/models/credentials.py @@ -1,7 +1,7 @@ from typing import Dict, Any, Optional import sqlalchemy as sa from sqlmodel import Field, Relationship, SQLModel -from datetime import datetime +from datetime import datetime, UTC from app.core.util import now @@ -62,11 +62,11 @@ class Credential(CredsBase, table=True): ) inserted_at: datetime = Field( default_factory=now, - sa_column=sa.Column(sa.DateTime, default=datetime.utcnow, nullable=False), + sa_column=sa.Column(sa.DateTime, default=lambda: datetime.now(UTC)), ) updated_at: datetime = Field( default_factory=now, - sa_column=sa.Column(sa.DateTime, onupdate=datetime.utcnow, nullable=False), + sa_column=sa.Column(sa.DateTime, onupdate=lambda: datetime.now(UTC)), ) deleted_at: Optional[datetime] = Field( default=None, sa_column=sa.Column(sa.DateTime, nullable=True) diff --git a/backend/app/models/openai_conversation.py b/backend/app/models/openai_conversation.py new file mode 100644 index 000000000..750609037 --- /dev/null +++ b/backend/app/models/openai_conversation.py @@ -0,0 +1,46 @@ +from sqlmodel import SQLModel, Field +from typing import Optional +from datetime import datetime, UTC + + +class OpenAIConversationBase(SQLModel): + response_id: str = Field(index=True, min_length=10) + ancestor_response_id: Optional[str] = Field(default=None, index=True) + previous_response_id: Optional[str] = Field(default=None, index=True) + user_question: str = Field(description="The user's input question", min_length=1) + response: Optional[str] = Field(description="The assistant's response") + # there are models with small name like o1 and usually fine tuned models have long names + model: str = Field( + description="The model used for the response", min_length=1, max_length=150 + ) + # usually follow the pattern of asst_WD9bumYqTtpSvxxxxx + assistant_id: Optional[str] = Field( + default=None, + description="The assistant ID used", + min_length=10, + max_length=50, + ) + project_id: int = Field( + foreign_key="project.id", nullable=False, ondelete="CASCADE" + ) + organization_id: int = Field( + foreign_key="organization.id", nullable=False, ondelete="CASCADE" + ) + is_deleted: bool = Field(default=False, nullable=False) + deleted_at: Optional[datetime] = Field(default=None, nullable=True) + + +class OpenAIConversationCreate(OpenAIConversationBase): + pass # Used for requests, no `id` or timestamps + + +class OpenAIConversationPublic(OpenAIConversationBase): + id: int + inserted_at: datetime + updated_at: datetime + + +class OpenAIConversation(OpenAIConversationBase, table=True): + id: int = Field(default=None, primary_key=True) + inserted_at: datetime = Field(default_factory=lambda: datetime.now(UTC)) + updated_at: datetime = Field(default_factory=lambda: datetime.now(UTC)) diff --git a/backend/app/models/threads.py b/backend/app/models/threads.py index e353c6760..61748ec2b 100644 --- a/backend/app/models/threads.py +++ b/backend/app/models/threads.py @@ -1,6 +1,6 @@ from sqlmodel import SQLModel, Field from typing import Optional -from datetime import datetime +from datetime import datetime, UTC class OpenAIThreadBase(SQLModel): @@ -17,5 +17,5 @@ class OpenAIThreadCreate(OpenAIThreadBase): class OpenAI_Thread(OpenAIThreadBase, table=True): id: int = Field(default=None, primary_key=True) - inserted_at: datetime = Field(default_factory=datetime.utcnow) - updated_at: datetime = Field(default_factory=datetime.utcnow) + inserted_at: datetime = Field(default_factory=lambda: datetime.now(UTC)) + updated_at: datetime = Field(default_factory=lambda: datetime.now(UTC)) diff --git a/backend/app/tests/api/routes/test_openai_conversation.py b/backend/app/tests/api/routes/test_openai_conversation.py new file mode 100644 index 000000000..3895e9e12 --- /dev/null +++ b/backend/app/tests/api/routes/test_openai_conversation.py @@ -0,0 +1,201 @@ +import pytest +from fastapi.testclient import TestClient +from sqlmodel import Session + +from app.crud.openai_conversation import ( + create_openai_conversation, + get_openai_conversation_by_id, +) +from app.models.openai_conversation import OpenAIConversationCreate + +from app.models import APIKeyPublic + + +def test_get_conversation_by_id( + client: TestClient, db: Session, user_api_key: APIKeyPublic +): + """Test getting a conversation by ID.""" + # Create a conversation first + conversation_data = OpenAIConversationCreate( + response_id="resp_test688080a1c52c819c937", + ancestor_response_id="ancestor_456", + user_question="What is the capital of France?", + response="The capital of France is Paris.", + model="gpt-4o", + assistant_id="asst_testXLnzQYrQlAEzrOA", + project_id=user_api_key.project_id, + organization_id=user_api_key.organization_id, + ) + conversation = create_openai_conversation(db, conversation_data) + response = client.get( + f"/api/v1/openai-conversation/{conversation.id}", + headers={"X-API-KEY": user_api_key.key}, + ) + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + assert data["data"]["id"] == conversation.id + assert data["data"]["response_id"] == "resp_test688080a1c52c819c937" + assert data["data"]["is_deleted"] is False + assert data["data"]["deleted_at"] is None + + +def test_get_conversation_by_response_id( + client: TestClient, db: Session, user_api_key: APIKeyPublic +): + """Test getting a conversation by response_id.""" + # Create a conversation first + conversation_data = OpenAIConversationCreate( + response_id="resp_test688080a1c52c819c937", + ancestor_response_id="ancestor_456", + user_question="What is the capital of France?", + response="The capital of France is Paris.", + model="gpt-4o", + assistant_id="asst_testXLnzQYrQlAEzrOA", + project_id=user_api_key.project_id, + organization_id=user_api_key.organization_id, + ) + create_openai_conversation(db, conversation_data) + response = client.get( + "/api/v1/openai-conversation/response/resp_test688080a1c52c819c937", + headers={"X-API-KEY": user_api_key.key}, + ) + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + assert data["data"]["response_id"] == "resp_test688080a1c52c819c937" + assert data["data"]["is_deleted"] is False + assert data["data"]["deleted_at"] is None + + +def test_get_conversations_by_ancestor( + client: TestClient, db: Session, user_api_key: APIKeyPublic +): + """Test getting conversations by ancestor_response_id.""" + # Create multiple conversations with same ancestor + conversation_data1 = OpenAIConversationCreate( + response_id="resp_test688080a1c52c819c937", + ancestor_response_id="resp_test688080a1c52c819c937", + user_question="What is the capital of France?", + response="The capital of France is Paris.", + model="gpt-4o", + assistant_id="asst_testXLnzQYrQlAEzrOA", + project_id=user_api_key.project_id, + organization_id=user_api_key.organization_id, + ) + conversation_data2 = OpenAIConversationCreate( + response_id="resp_test688080a1c52c819c937_2", + ancestor_response_id="resp_test688080a1c52c819c937", + user_question="What is the capital of Spain?", + response="The capital of Spain is Madrid.", + model="gpt-4o", + assistant_id="asst_testXLnzQYrQlAEzrOA", + project_id=user_api_key.project_id, + organization_id=user_api_key.organization_id, + ) + conversation_data3 = OpenAIConversationCreate( + response_id="resp_test688080a1c52c819c937_3", + ancestor_response_id="resp_test688080a1c52c819c937", + user_question="What is the capital of Italy?", + response="The capital of Italy is Rome.", + model="gpt-4o", + assistant_id="asst_testXLnzQYrQlAEzrOA", + project_id=user_api_key.project_id, + organization_id=user_api_key.organization_id, + ) + + create_openai_conversation(db, conversation_data1) + create_openai_conversation(db, conversation_data2) + create_openai_conversation(db, conversation_data3) + + response = client.get( + "/api/v1/openai-conversation/ancestor/resp_test688080a1c52c819c937", + headers={"X-API-KEY": user_api_key.key}, + ) + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + assert len(data["data"]) == 3 + assert all( + conv["ancestor_response_id"] == "resp_test688080a1c52c819c937" + for conv in data["data"] + ) + for conv in data["data"]: + assert conv["is_deleted"] is False + assert conv["deleted_at"] is None + + +def test_delete_conversation_by_id( + client: TestClient, db: Session, user_api_key: APIKeyPublic +): + """Test deleting a conversation by ID.""" + # Create a conversation first + conversation_data = OpenAIConversationCreate( + response_id="resp_test688080a1c52c819c937", + ancestor_response_id="resp_test688080a1c52c819c937", + user_question="What is the capital of France?", + response="The capital of France is Paris.", + model="gpt-4o", + assistant_id="asst_testXLnzQYrQlAEzrOA", + project_id=user_api_key.project_id, + organization_id=user_api_key.organization_id, + ) + conversation = create_openai_conversation(db, conversation_data) + print(conversation) + response = client.delete( + f"/api/v1/openai-conversation/{conversation.id}", + headers={"X-API-KEY": user_api_key.key}, + ) + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + assert "deleted successfully" in data["data"]["message"] + + +def test_list_conversations( + client: TestClient, db: Session, user_api_key: APIKeyPublic +): + """Test listing all conversations.""" + # Create multiple conversations + conversation_data1 = OpenAIConversationCreate( + response_id="resp_test688080a1c52c819c937", + ancestor_response_id="ancestor_1", + user_question="What is the capital of France?", + response="The capital of France is Paris.", + model="gpt-4o", + assistant_id="asst_testXLnzQYrQlAEzrOA", + project_id=user_api_key.project_id, + organization_id=user_api_key.organization_id, + ) + conversation_data2 = OpenAIConversationCreate( + response_id="resp_test688080a1c52c819c937_2", + ancestor_response_id="ancestor_2", + user_question="What is the capital of Spain?", + response="The capital of Spain is Madrid.", + model="gpt-4o", + assistant_id="asst_testXLnzQYrQlAEzrOA", + project_id=user_api_key.project_id, + organization_id=user_api_key.organization_id, + ) + conversation1 = create_openai_conversation(db, conversation_data1) + conversation2 = create_openai_conversation(db, conversation_data2) + response = client.get( + "/api/v1/openai-conversation/list", + headers={"X-API-KEY": user_api_key.key}, + params={"skip": 0, "limit": 100}, + ) + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + # Should contain at least the two conversations we just created + response_ids = [conv["response_id"] for conv in data["data"]] + assert conversation1.response_id in response_ids + assert conversation2.response_id in response_ids + for conv in data["data"]: + assert conv["is_deleted"] is False + assert conv["deleted_at"] is None diff --git a/backend/app/tests/api/routes/test_responses.py b/backend/app/tests/api/routes/test_responses.py index aac0a2beb..f32032c40 100644 --- a/backend/app/tests/api/routes/test_responses.py +++ b/backend/app/tests/api/routes/test_responses.py @@ -28,8 +28,8 @@ def test_responses_endpoint_success( # Setup the mock response object with real values for all used fields mock_response = MagicMock() - mock_response.id = "mock_response_id" - mock_response.output_text = "Test output" + mock_response.id = "resp_test688080a1c52c819c937" + mock_response.output_text = "Test assistant response" mock_response.model = "gpt-4o" mock_response.usage.input_tokens = 10 mock_response.usage.output_tokens = 5 @@ -85,8 +85,8 @@ def test_responses_endpoint_without_vector_store( # Setup the mock response object mock_response = MagicMock() - mock_response.id = "mock_response_id" - mock_response.output_text = "Test output" + mock_response.id = "resp_test688080a1c52c819c937" + mock_response.output_text = "Test assistant response" mock_response.model = "gpt-4" mock_response.usage.input_tokens = 10 mock_response.usage.output_tokens = 5 @@ -100,7 +100,7 @@ def test_responses_endpoint_without_vector_store( pytest.skip("Glific project not found in the database") request_data = { - "assistant_id": "assistant_123", + "assistant_id": "asst_testXLnzQYrQlAEzrOA", "question": "What is Glific?", "callback_url": "http://example.com/callback", } @@ -120,3 +120,63 @@ def test_responses_endpoint_without_vector_store( temperature=mock_assistant.temperature, input=[{"role": "user", "content": "What is Glific?"}], ) + + +@patch("app.api.routes.responses.OpenAI") +@patch("app.api.routes.responses.get_provider_credential") +@patch("app.api.routes.responses.get_assistant_by_id") +@patch("app.api.routes.responses.create_openai_conversation") +def test_responses_endpoint_stores_conversation( + mock_create_conversation, + mock_get_assistant, + mock_get_credential, + mock_openai, + user_api_key_header, +): + """Test that the /responses endpoint stores conversation in database.""" + # Setup mock credentials + mock_get_credential.return_value = {"api_key": "test_api_key"} + + # Setup mock assistant + mock_assistant = MagicMock() + mock_assistant.model = "gpt-4o" + mock_assistant.instructions = "Test instructions" + mock_assistant.temperature = 0.1 + mock_assistant.vector_store_ids = ["vs_test"] + mock_assistant.max_num_results = 20 + mock_get_assistant.return_value = mock_assistant + + # Setup mock OpenAI client + mock_client = MagicMock() + mock_openai.return_value = mock_client + + # Setup the mock response object + mock_response = MagicMock() + mock_response.id = "resp_test688080a1c52c819c937" + mock_response.output_text = "Test assistant response" + mock_response.model = "gpt-4o" + mock_response.output = [] + mock_client.responses.create.return_value = mock_response + + request_data = { + "assistant_id": "asst_testXLnzQYrQlAEzrOA", + "question": "What is Glific?", + "callback_url": "http://example.com/callback", + } + + response = client.post("/responses", json=request_data, headers=user_api_key_header) + assert response.status_code == 200 + response_json = response.json() + assert response_json["success"] is True + assert response_json["data"]["status"] == "processing" + + # Verify that create_openai_conversation was called with correct data + mock_create_conversation.assert_called_once() + call_args = mock_create_conversation.call_args + conversation_data = call_args[0][1] # Second argument is the conversation data + + assert conversation_data.response_id == "resp_test688080a1c52c819c937" + assert conversation_data.user_question == "What is Glific?" + assert conversation_data.response == "Test assistant response" + assert conversation_data.model == "gpt-4o" + assert conversation_data.assistant_id == "asst_testXLnzQYrQlAEzrOA"