From 4f5333d3f04334b26692c2c3b997ddb616c5be76 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Sat, 12 Jul 2025 13:44:27 +0530 Subject: [PATCH 01/40] first stab at responseAPI conversation memory --- ...28e3a9988_add_openai_conversation_table.py | 154 ++++++++++++++++++ backend/app/crud/__init__.py | 13 ++ backend/app/crud/openai_conversation.py | 121 ++++++++++++++ backend/app/models/__init__.py | 8 + backend/app/models/openai_conversation.py | 31 ++++ backend/app/tests/conftest.py | 2 + 6 files changed, 329 insertions(+) create mode 100644 backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py create mode 100644 backend/app/crud/openai_conversation.py create mode 100644 backend/app/models/openai_conversation.py diff --git a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py new file mode 100644 index 000000000..1c8616a08 --- /dev/null +++ b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py @@ -0,0 +1,154 @@ +"""add openai_conversation table + +Revision ID: f5628e3a9988 +Revises: 3389c67fdcb4 +Create Date: 2025-07-12 12:59:51.909268 + +""" +from alembic import op +import sqlalchemy as sa +import sqlmodel.sql.sqltypes + + +# revision identifiers, used by Alembic. +revision = "f5628e3a9988" +down_revision = "3389c67fdcb4" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "openai_conversation", + sa.Column("response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column( + "ancestor_response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True + ), + sa.Column( + "previous_response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True + ), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("inserted_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + op.f("ix_openai_conversation_ancestor_response_id"), + "openai_conversation", + ["ancestor_response_id"], + unique=False, + ) + op.create_index( + op.f("ix_openai_conversation_previous_response_id"), + "openai_conversation", + ["previous_response_id"], + unique=False, + ) + op.create_index( + op.f("ix_openai_conversation_response_id"), + "openai_conversation", + ["response_id"], + unique=False, + ) + op.alter_column( + "credential", "credential", existing_type=sa.VARCHAR(), nullable=True + ) + op.drop_constraint( + "credential_organization_id_fkey", "credential", type_="foreignkey" + ) + op.drop_constraint("credential_project_id_fkey", "credential", type_="foreignkey") + op.create_foreign_key(None, "credential", "project", ["project_id"], ["id"]) + op.create_foreign_key( + None, "credential", "organization", ["organization_id"], ["id"] + ) + op.alter_column( + "openai_assistant", + "instructions", + existing_type=sa.TEXT(), + type_=sqlmodel.sql.sqltypes.AutoString(), + existing_nullable=False, + ) + op.create_index( + op.f("ix_openai_assistant_assistant_id"), + "openai_assistant", + ["assistant_id"], + unique=True, + ) + op.drop_constraint( + "openai_assistant_organization_id_fkey", "openai_assistant", type_="foreignkey" + ) + op.drop_constraint( + "openai_assistant_project_id_fkey", "openai_assistant", type_="foreignkey" + ) + op.create_foreign_key(None, "openai_assistant", "project", ["project_id"], ["id"]) + op.create_foreign_key( + None, "openai_assistant", "organization", ["organization_id"], ["id"] + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint(None, "openai_assistant", type_="foreignkey") + op.drop_constraint(None, "openai_assistant", type_="foreignkey") + op.create_foreign_key( + "openai_assistant_project_id_fkey", + "openai_assistant", + "project", + ["project_id"], + ["id"], + ondelete="CASCADE", + ) + op.create_foreign_key( + "openai_assistant_organization_id_fkey", + "openai_assistant", + "organization", + ["organization_id"], + ["id"], + ondelete="CASCADE", + ) + op.drop_index( + op.f("ix_openai_assistant_assistant_id"), table_name="openai_assistant" + ) + op.alter_column( + "openai_assistant", + "instructions", + existing_type=sqlmodel.sql.sqltypes.AutoString(), + type_=sa.TEXT(), + existing_nullable=False, + ) + op.drop_constraint(None, "credential", type_="foreignkey") + op.drop_constraint(None, "credential", type_="foreignkey") + op.create_foreign_key( + "credential_project_id_fkey", + "credential", + "project", + ["project_id"], + ["id"], + ondelete="SET NULL", + ) + op.create_foreign_key( + "credential_organization_id_fkey", + "credential", + "organization", + ["organization_id"], + ["id"], + ondelete="CASCADE", + ) + op.alter_column( + "credential", "credential", existing_type=sa.VARCHAR(), nullable=False + ) + op.drop_index( + op.f("ix_openai_conversation_response_id"), table_name="openai_conversation" + ) + op.drop_index( + op.f("ix_openai_conversation_previous_response_id"), + table_name="openai_conversation", + ) + op.drop_index( + op.f("ix_openai_conversation_ancestor_response_id"), + table_name="openai_conversation", + ) + op.drop_table("openai_conversation") + # ### end Alembic commands ### diff --git a/backend/app/crud/__init__.py b/backend/app/crud/__init__.py index 6bf4d5da5..ac9c169e0 100644 --- a/backend/app/crud/__init__.py +++ b/backend/app/crud/__init__.py @@ -41,4 +41,17 @@ from .thread_results import upsert_thread_result, get_thread_result +from .openai_conversation import ( + create_openai_conversation, + get_openai_conversation_by_id, + get_openai_conversation_by_response_id, + get_openai_conversations_by_ancestor, + get_openai_conversations_by_previous, + get_all_openai_conversations, + update_openai_conversation, + delete_openai_conversation, + delete_openai_conversation_by_response_id, + upsert_openai_conversation, +) + from .assistants import get_assistant_by_id diff --git a/backend/app/crud/openai_conversation.py b/backend/app/crud/openai_conversation.py new file mode 100644 index 000000000..8321f3c3a --- /dev/null +++ b/backend/app/crud/openai_conversation.py @@ -0,0 +1,121 @@ +from sqlmodel import Session, select +from datetime import datetime +from typing import List, Optional +from app.models import ( + OpenAI_Conversation, + OpenAIConversationCreate, + OpenAIConversationUpdate, + OpenAIConversationPublic, +) + + +def create_openai_conversation( + session: Session, data: OpenAIConversationCreate +) -> OpenAI_Conversation: + conversation = OpenAI_Conversation(**data.dict()) + session.add(conversation) + session.commit() + session.refresh(conversation) + return conversation + + +def get_openai_conversation_by_id( + session: Session, conversation_id: int +) -> Optional[OpenAI_Conversation]: + statement = select(OpenAI_Conversation).where( + OpenAI_Conversation.id == conversation_id + ) + return session.exec(statement).first() + + +def get_openai_conversation_by_response_id( + session: Session, response_id: str +) -> Optional[OpenAI_Conversation]: + statement = select(OpenAI_Conversation).where( + OpenAI_Conversation.response_id == response_id + ) + return session.exec(statement).first() + + +def get_openai_conversations_by_ancestor( + session: Session, ancestor_response_id: str +) -> List[OpenAI_Conversation]: + statement = select(OpenAI_Conversation).where( + OpenAI_Conversation.ancestor_response_id == ancestor_response_id + ) + return session.exec(statement).all() + + +def get_openai_conversations_by_previous( + session: Session, previous_response_id: str +) -> List[OpenAI_Conversation]: + statement = select(OpenAI_Conversation).where( + OpenAI_Conversation.previous_response_id == previous_response_id + ) + return session.exec(statement).all() + + +def get_all_openai_conversations( + session: Session, skip: int = 0, limit: int = 100 +) -> List[OpenAI_Conversation]: + statement = select(OpenAI_Conversation).offset(skip).limit(limit) + return session.exec(statement).all() + + +def update_openai_conversation( + session: Session, + conversation_id: int, + data: OpenAIConversationUpdate, +) -> Optional[OpenAI_Conversation]: + conversation = get_openai_conversation_by_id(session, conversation_id) + if not conversation: + return None + + update_data = data.dict(exclude_unset=True) + update_data["updated_at"] = datetime.utcnow() + + for field, value in update_data.items(): + setattr(conversation, field, value) + + session.add(conversation) + session.commit() + session.refresh(conversation) + return conversation + + +def delete_openai_conversation(session: Session, conversation_id: int) -> bool: + conversation = get_openai_conversation_by_id(session, conversation_id) + if not conversation: + return False + + session.delete(conversation) + session.commit() + return True + + +def delete_openai_conversation_by_response_id( + session: Session, response_id: str +) -> bool: + conversation = get_openai_conversation_by_response_id(session, response_id) + if not conversation: + return False + + session.delete(conversation) + session.commit() + return True + + +def upsert_openai_conversation( + session: Session, data: OpenAIConversationCreate +) -> OpenAI_Conversation: + """Create or update a conversation based on response_id""" + existing = get_openai_conversation_by_response_id(session, data.response_id) + + if existing: + update_data = OpenAIConversationUpdate( + ancestor_response_id=data.ancestor_response_id, + previous_response_id=data.previous_response_id, + ) + return update_openai_conversation(session, existing.id, update_data) + else: + return create_openai_conversation(session, data) diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py index 046936371..d459e82e3 100644 --- a/backend/app/models/__init__.py +++ b/backend/app/models/__init__.py @@ -54,4 +54,12 @@ from .threads import OpenAI_Thread, OpenAIThreadBase, OpenAIThreadCreate +from .openai_conversation import ( + OpenAI_Conversation, + OpenAIConversationBase, + OpenAIConversationCreate, + OpenAIConversationUpdate, + OpenAIConversationPublic, +) + from .assistants import Assistant, AssistantBase diff --git a/backend/app/models/openai_conversation.py b/backend/app/models/openai_conversation.py new file mode 100644 index 000000000..d73ca6b9b --- /dev/null +++ b/backend/app/models/openai_conversation.py @@ -0,0 +1,31 @@ +from sqlmodel import SQLModel, Field +from typing import Optional +from datetime import datetime + + +class OpenAIConversationBase(SQLModel): + response_id: str = Field(index=True) + ancestor_response_id: Optional[str] = Field(default=None, index=True) + previous_response_id: Optional[str] = Field(default=None, index=True) + + +class OpenAIConversationCreate(OpenAIConversationBase): + pass # Used for requests, no `id` or timestamps + + +class OpenAIConversationUpdate(SQLModel): + response_id: Optional[str] = None + ancestor_response_id: Optional[str] = None + previous_response_id: Optional[str] = None + + +class OpenAIConversationPublic(OpenAIConversationBase): + id: int + inserted_at: datetime + updated_at: datetime + + +class OpenAI_Conversation(OpenAIConversationBase, table=True): + id: int = Field(default=None, primary_key=True) + inserted_at: datetime = Field(default_factory=datetime.utcnow) + updated_at: datetime = Field(default_factory=datetime.utcnow) diff --git a/backend/app/tests/conftest.py b/backend/app/tests/conftest.py index 9f6fe76cb..8ccdb416c 100644 --- a/backend/app/tests/conftest.py +++ b/backend/app/tests/conftest.py @@ -15,6 +15,7 @@ ProjectUser, User, OpenAI_Thread, + OpenAI_Conversation, Credential, Collection, ) @@ -31,6 +32,7 @@ def db() -> Generator[Session, None, None]: # Delete data in reverse dependency order session.execute(delete(ProjectUser)) # Many-to-many relationship session.execute(delete(Assistant)) + session.execute(delete(OpenAI_Conversation)) session.execute(delete(Credential)) session.execute(delete(Project)) session.execute(delete(Organization)) From f01225a4e68ca5f80235ac17686d5de0f5883eec Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Sat, 12 Jul 2025 14:19:09 +0530 Subject: [PATCH 02/40] cleanup unnecesary endpoints --- backend/app/api/main.py | 2 + backend/app/api/routes/openai_conversation.py | 174 +++++++++++++ backend/app/crud/__init__.py | 3 - backend/app/crud/openai_conversation.py | 37 --- .../api/routes/test_openai_conversation.py | 232 ++++++++++++++++++ 5 files changed, 408 insertions(+), 40 deletions(-) create mode 100644 backend/app/api/routes/openai_conversation.py create mode 100644 backend/app/tests/api/routes/test_openai_conversation.py diff --git a/backend/app/api/main.py b/backend/app/api/main.py index e6a749710..4e8eadbbd 100644 --- a/backend/app/api/main.py +++ b/backend/app/api/main.py @@ -15,6 +15,7 @@ utils, onboarding, credentials, + openai_conversation, ) from app.core.config import settings @@ -25,6 +26,7 @@ api_router.include_router(documents.router) api_router.include_router(login.router) api_router.include_router(onboarding.router) +api_router.include_router(openai_conversation.router) api_router.include_router(organization.router) api_router.include_router(project.router) api_router.include_router(project_user.router) diff --git a/backend/app/api/routes/openai_conversation.py b/backend/app/api/routes/openai_conversation.py new file mode 100644 index 000000000..5fbfe0703 --- /dev/null +++ b/backend/app/api/routes/openai_conversation.py @@ -0,0 +1,174 @@ +from typing import List, Optional +from uuid import UUID +from fastapi import APIRouter, Depends, HTTPException, Query, Path +from sqlmodel import Session + +from app.api.deps import get_db, get_current_user_org, get_current_user_org_project +from app.models import UserOrganization, UserProjectOrg +from app.models.openai_conversation import ( + OpenAIConversationCreate, + OpenAIConversationUpdate, + OpenAIConversationPublic, +) +from app.crud.openai_conversation import ( + create_openai_conversation, + get_openai_conversation_by_id, + get_openai_conversation_by_response_id, + get_openai_conversations_by_ancestor, + get_all_openai_conversations, + update_openai_conversation, + delete_openai_conversation, +) +from app.utils import APIResponse + +router = APIRouter(prefix="/openai-conversation", tags=["openai_conversation"]) + + +@router.post( + "/create", + response_model=APIResponse[OpenAIConversationPublic], + summary="Create a new OpenAI conversation", + description="Create a new conversation entry with response_id, ancestor_response_id, and previous_response_id", +) +async def create_conversation( + conversation_data: OpenAIConversationCreate, + db: Session = Depends(get_db), + _current_user: UserProjectOrg = Depends(get_current_user_org_project), +): + """Create a new OpenAI conversation entry.""" + try: + conversation = create_openai_conversation(db, conversation_data) + return APIResponse.success_response( + data=OpenAIConversationPublic.model_validate(conversation) + ) + except Exception as e: + raise HTTPException( + status_code=400, detail=f"Failed to create conversation: {str(e)}" + ) + + +@router.get( + "/{conversation_id}", + response_model=APIResponse[OpenAIConversationPublic], + summary="Get conversation by ID", + description="Retrieve a conversation by its database ID", +) +async def get_conversation_by_id( + conversation_id: int = Path(..., description="The conversation ID"), + db: Session = Depends(get_db), + _current_user: UserOrganization = Depends(get_current_user_org), +): + """Get a conversation by its ID.""" + conversation = get_openai_conversation_by_id(db, conversation_id) + if not conversation: + raise HTTPException(status_code=404, detail="Conversation not found") + + return APIResponse.success_response( + data=OpenAIConversationPublic.model_validate(conversation) + ) + + +@router.get( + "/response/{response_id}", + response_model=APIResponse[OpenAIConversationPublic], + summary="Get conversation by response ID", + description="Retrieve a conversation by its response_id", +) +async def get_conversation_by_response_id( + response_id: str = Path(..., description="The response ID"), + db: Session = Depends(get_db), + _current_user: UserOrganization = Depends(get_current_user_org), +): + """Get a conversation by its response_id.""" + conversation = get_openai_conversation_by_response_id(db, response_id) + if not conversation: + raise HTTPException(status_code=404, detail="Conversation not found") + + return APIResponse.success_response( + data=OpenAIConversationPublic.model_validate(conversation) + ) + + +@router.get( + "/ancestor/{ancestor_response_id}", + response_model=APIResponse[List[OpenAIConversationPublic]], + summary="Get conversations by ancestor", + description="Retrieve all conversations that have the specified ancestor_response_id", +) +async def get_conversations_by_ancestor( + ancestor_response_id: str = Path(..., description="The ancestor response ID"), + db: Session = Depends(get_db), + _current_user: UserOrganization = Depends(get_current_user_org), +): + """Get all conversations by ancestor_response_id.""" + conversations = get_openai_conversations_by_ancestor(db, ancestor_response_id) + return APIResponse.success_response( + data=[OpenAIConversationPublic.model_validate(conv) for conv in conversations] + ) + + +@router.get( + "/list", + response_model=APIResponse[List[OpenAIConversationPublic]], + summary="List all conversations", + description="Retrieve all conversations with pagination support", +) +async def list_conversations( + skip: int = Query(0, ge=0, description="Number of records to skip"), + limit: int = Query( + 100, gt=0, le=1000, description="Maximum number of records to return" + ), + db: Session = Depends(get_db), + _current_user: UserOrganization = Depends(get_current_user_org), +): + """Get all conversations with pagination.""" + conversations = get_all_openai_conversations(db, skip=skip, limit=limit) + return APIResponse.success_response( + data=[OpenAIConversationPublic.model_validate(conv) for conv in conversations] + ) + + +@router.put( + "/{conversation_id}", + response_model=APIResponse[OpenAIConversationPublic], + summary="Update conversation", + description="Update an existing conversation by ID", +) +async def update_conversation( + conversation_id: int = Path(..., description="The conversation ID"), + conversation_data: OpenAIConversationUpdate = None, + db: Session = Depends(get_db), + _current_user: UserProjectOrg = Depends(get_current_user_org_project), +): + """Update a conversation by its ID.""" + if not conversation_data: + raise HTTPException(status_code=400, detail="Update data is required") + + conversation = update_openai_conversation(db, conversation_id, conversation_data) + if not conversation: + raise HTTPException(status_code=404, detail="Conversation not found") + + return APIResponse.success_response( + data=OpenAIConversationPublic.model_validate(conversation) + ) + + +@router.delete( + "/{conversation_id}", + response_model=APIResponse[dict], + summary="Delete conversation by ID", + description="Delete a conversation by its database ID", +) +async def delete_conversation_by_id( + conversation_id: int = Path(..., description="The conversation ID"), + db: Session = Depends(get_db), + _current_user: UserProjectOrg = Depends(get_current_user_org_project), +): + """Delete a conversation by its ID.""" + success = delete_openai_conversation(db, conversation_id) + if not success: + raise HTTPException(status_code=404, detail="Conversation not found") + + return APIResponse.success_response( + data={"message": "Conversation deleted successfully"} + ) diff --git a/backend/app/crud/__init__.py b/backend/app/crud/__init__.py index ac9c169e0..2915feebd 100644 --- a/backend/app/crud/__init__.py +++ b/backend/app/crud/__init__.py @@ -46,12 +46,9 @@ get_openai_conversation_by_id, get_openai_conversation_by_response_id, get_openai_conversations_by_ancestor, - get_openai_conversations_by_previous, get_all_openai_conversations, update_openai_conversation, delete_openai_conversation, - delete_openai_conversation_by_response_id, - upsert_openai_conversation, ) from .assistants import get_assistant_by_id diff --git a/backend/app/crud/openai_conversation.py b/backend/app/crud/openai_conversation.py index 8321f3c3a..a80d9c309 100644 --- a/backend/app/crud/openai_conversation.py +++ b/backend/app/crud/openai_conversation.py @@ -46,15 +46,6 @@ def get_openai_conversations_by_ancestor( return session.exec(statement).all() -def get_openai_conversations_by_previous( - session: Session, previous_response_id: str -) -> List[OpenAI_Conversation]: - statement = select(OpenAI_Conversation).where( - OpenAI_Conversation.previous_response_id == previous_response_id - ) - return session.exec(statement).all() - - def get_all_openai_conversations( session: Session, skip: int = 0, limit: int = 100 ) -> List[OpenAI_Conversation]: @@ -91,31 +82,3 @@ def delete_openai_conversation(session: Session, conversation_id: int) -> bool: session.delete(conversation) session.commit() return True - - -def delete_openai_conversation_by_response_id( - session: Session, response_id: str -) -> bool: - conversation = get_openai_conversation_by_response_id(session, response_id) - if not conversation: - return False - - session.delete(conversation) - session.commit() - return True - - -def upsert_openai_conversation( - session: Session, data: OpenAIConversationCreate -) -> OpenAI_Conversation: - """Create or update a conversation based on response_id""" - existing = get_openai_conversation_by_response_id(session, data.response_id) - - if existing: - update_data = OpenAIConversationUpdate( - ancestor_response_id=data.ancestor_response_id, - previous_response_id=data.previous_response_id, - ) - return update_openai_conversation(session, existing.id, update_data) - else: - return create_openai_conversation(session, data) diff --git a/backend/app/tests/api/routes/test_openai_conversation.py b/backend/app/tests/api/routes/test_openai_conversation.py new file mode 100644 index 000000000..a1752f38f --- /dev/null +++ b/backend/app/tests/api/routes/test_openai_conversation.py @@ -0,0 +1,232 @@ +import pytest +from fastapi.testclient import TestClient +from sqlmodel import Session + +from app.models.openai_conversation import ( + OpenAIConversationCreate, + OpenAIConversationUpdate, +) +from app.crud.openai_conversation import create_openai_conversation + + +def test_create_conversation(client: TestClient, superuser_token_headers: dict): + """Test creating a new conversation.""" + conversation_data = { + "response_id": "resp_123", + "ancestor_response_id": "ancestor_456", + "previous_response_id": "prev_789", + } + + response = client.post( + "/api/openai-conversation/create", + json=conversation_data, + headers=superuser_token_headers, + ) + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + assert data["data"]["response_id"] == "resp_123" + assert data["data"]["ancestor_response_id"] == "ancestor_456" + assert data["data"]["previous_response_id"] == "prev_789" + assert "id" in data["data"] + assert "inserted_at" in data["data"] + assert "updated_at" in data["data"] + + +def test_get_conversation_by_id( + client: TestClient, superuser_token_headers: dict, db: Session +): + """Test getting a conversation by ID.""" + # Create a conversation first + conversation_data = OpenAIConversationCreate( + response_id="resp_123", ancestor_response_id="ancestor_456" + ) + conversation = create_openai_conversation(db, conversation_data) + + response = client.get( + f"/api/openai-conversation/{conversation.id}", headers=superuser_token_headers + ) + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + assert data["data"]["id"] == conversation.id + assert data["data"]["response_id"] == "resp_123" + + +def test_get_conversation_by_id_not_found( + client: TestClient, superuser_token_headers: dict +): + """Test getting a conversation by ID that doesn't exist.""" + response = client.get( + "/api/openai-conversation/99999", headers=superuser_token_headers + ) + + assert response.status_code == 404 + data = response.json() + assert "Conversation not found" in data["detail"] + + +def test_get_conversation_by_response_id( + client: TestClient, superuser_token_headers: dict, db: Session +): + """Test getting a conversation by response_id.""" + # Create a conversation first + conversation_data = OpenAIConversationCreate( + response_id="resp_123", ancestor_response_id="ancestor_456" + ) + create_openai_conversation(db, conversation_data) + + response = client.get( + "/api/openai-conversation/response/resp_123", headers=superuser_token_headers + ) + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + assert data["data"]["response_id"] == "resp_123" + + +def test_get_conversation_by_response_id_not_found( + client: TestClient, superuser_token_headers: dict +): + """Test getting a conversation by response_id that doesn't exist.""" + response = client.get( + "/api/openai-conversation/response/nonexistent", headers=superuser_token_headers + ) + + assert response.status_code == 404 + data = response.json() + assert "Conversation not found" in data["detail"] + + +def test_get_conversations_by_ancestor( + client: TestClient, superuser_token_headers: dict, db: Session +): + """Test getting conversations by ancestor_response_id.""" + # Create multiple conversations with same ancestor + conversation_data1 = OpenAIConversationCreate( + response_id="resp_1", ancestor_response_id="ancestor_123" + ) + conversation_data2 = OpenAIConversationCreate( + response_id="resp_2", ancestor_response_id="ancestor_123" + ) + conversation_data3 = OpenAIConversationCreate( + response_id="resp_3", ancestor_response_id="ancestor_456" + ) + + create_openai_conversation(db, conversation_data1) + create_openai_conversation(db, conversation_data2) + create_openai_conversation(db, conversation_data3) + + response = client.get( + "/api/openai-conversation/ancestor/ancestor_123", + headers=superuser_token_headers, + ) + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + assert len(data["data"]) == 2 + assert all(conv["ancestor_response_id"] == "ancestor_123" for conv in data["data"]) + + +def test_list_conversations( + client: TestClient, superuser_token_headers: dict, db: Session +): + """Test listing all conversations with pagination.""" + # Create multiple conversations + conversation_data1 = OpenAIConversationCreate(response_id="resp_1") + conversation_data2 = OpenAIConversationCreate(response_id="resp_2") + conversation_data3 = OpenAIConversationCreate(response_id="resp_3") + + create_openai_conversation(db, conversation_data1) + create_openai_conversation(db, conversation_data2) + create_openai_conversation(db, conversation_data3) + + response = client.get( + "/api/openai-conversation/list?skip=0&limit=10", headers=superuser_token_headers + ) + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + assert len(data["data"]) >= 3 # Should have at least 3 conversations + + +def test_update_conversation( + client: TestClient, superuser_token_headers: dict, db: Session +): + """Test updating a conversation.""" + # Create a conversation first + conversation_data = OpenAIConversationCreate( + response_id="resp_123", ancestor_response_id="ancestor_456" + ) + conversation = create_openai_conversation(db, conversation_data) + + update_data = { + "ancestor_response_id": "ancestor_789", + "previous_response_id": "prev_123", + } + + response = client.put( + f"/api/openai-conversation/{conversation.id}", + json=update_data, + headers=superuser_token_headers, + ) + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + assert data["data"]["ancestor_response_id"] == "ancestor_789" + assert data["data"]["previous_response_id"] == "prev_123" + assert data["data"]["response_id"] == "resp_123" # Should remain unchanged + + +def test_update_conversation_not_found( + client: TestClient, superuser_token_headers: dict +): + """Test updating a conversation that doesn't exist.""" + update_data = {"ancestor_response_id": "ancestor_789"} + + response = client.put( + "/api/openai-conversation/99999", + json=update_data, + headers=superuser_token_headers, + ) + + assert response.status_code == 404 + data = response.json() + assert "Conversation not found" in data["detail"] + + +def test_delete_conversation_by_id( + client: TestClient, superuser_token_headers: dict, db: Session +): + """Test deleting a conversation by ID.""" + # Create a conversation first + conversation_data = OpenAIConversationCreate(response_id="resp_123") + conversation = create_openai_conversation(db, conversation_data) + + response = client.delete( + f"/api/openai-conversation/{conversation.id}", headers=superuser_token_headers + ) + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + assert "deleted successfully" in data["data"]["message"] + + +def test_delete_conversation_by_id_not_found( + client: TestClient, superuser_token_headers: dict +): + """Test deleting a conversation by ID that doesn't exist.""" + response = client.delete( + "/api/openai-conversation/99999", headers=superuser_token_headers + ) + + assert response.status_code == 404 + data = response.json() + assert "Conversation not found" in data["detail"] From 9393a258901e2cd3f71bf76b8cc759c24c0fcd23 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Sat, 12 Jul 2025 16:11:14 +0530 Subject: [PATCH 03/40] cleanup testcases --- .../api/routes/test_openai_conversation.py | 133 +++--------------- 1 file changed, 23 insertions(+), 110 deletions(-) diff --git a/backend/app/tests/api/routes/test_openai_conversation.py b/backend/app/tests/api/routes/test_openai_conversation.py index a1752f38f..3d0fa72bf 100644 --- a/backend/app/tests/api/routes/test_openai_conversation.py +++ b/backend/app/tests/api/routes/test_openai_conversation.py @@ -8,19 +8,21 @@ ) from app.crud.openai_conversation import create_openai_conversation +original_api_key = "ApiKey No3x47A5qoIGhm0kVKjQ77dhCqEdWRIQZlEPzzzh7i8" -def test_create_conversation(client: TestClient, superuser_token_headers: dict): + +def test_create_conversation(client: TestClient): """Test creating a new conversation.""" conversation_data = { "response_id": "resp_123", "ancestor_response_id": "ancestor_456", "previous_response_id": "prev_789", } - + headers = {"X-API-KEY": original_api_key} response = client.post( - "/api/openai-conversation/create", + "/api/v1/openai-conversation/create", json=conversation_data, - headers=superuser_token_headers, + headers=headers, ) assert response.status_code == 200 @@ -34,18 +36,16 @@ def test_create_conversation(client: TestClient, superuser_token_headers: dict): assert "updated_at" in data["data"] -def test_get_conversation_by_id( - client: TestClient, superuser_token_headers: dict, db: Session -): +def test_get_conversation_by_id(client: TestClient, db: Session): """Test getting a conversation by ID.""" # Create a conversation first conversation_data = OpenAIConversationCreate( response_id="resp_123", ancestor_response_id="ancestor_456" ) conversation = create_openai_conversation(db, conversation_data) - + headers = {"X-API-KEY": original_api_key} response = client.get( - f"/api/openai-conversation/{conversation.id}", headers=superuser_token_headers + f"/api/v1/openai-conversation/{conversation.id}", headers=headers ) assert response.status_code == 200 @@ -55,31 +55,16 @@ def test_get_conversation_by_id( assert data["data"]["response_id"] == "resp_123" -def test_get_conversation_by_id_not_found( - client: TestClient, superuser_token_headers: dict -): - """Test getting a conversation by ID that doesn't exist.""" - response = client.get( - "/api/openai-conversation/99999", headers=superuser_token_headers - ) - - assert response.status_code == 404 - data = response.json() - assert "Conversation not found" in data["detail"] - - -def test_get_conversation_by_response_id( - client: TestClient, superuser_token_headers: dict, db: Session -): +def test_get_conversation_by_response_id(client: TestClient, db: Session): """Test getting a conversation by response_id.""" # Create a conversation first conversation_data = OpenAIConversationCreate( response_id="resp_123", ancestor_response_id="ancestor_456" ) create_openai_conversation(db, conversation_data) - + headers = {"X-API-KEY": original_api_key} response = client.get( - "/api/openai-conversation/response/resp_123", headers=superuser_token_headers + "/api/v1/openai-conversation/response/resp_123", headers=headers ) assert response.status_code == 200 @@ -88,22 +73,7 @@ def test_get_conversation_by_response_id( assert data["data"]["response_id"] == "resp_123" -def test_get_conversation_by_response_id_not_found( - client: TestClient, superuser_token_headers: dict -): - """Test getting a conversation by response_id that doesn't exist.""" - response = client.get( - "/api/openai-conversation/response/nonexistent", headers=superuser_token_headers - ) - - assert response.status_code == 404 - data = response.json() - assert "Conversation not found" in data["detail"] - - -def test_get_conversations_by_ancestor( - client: TestClient, superuser_token_headers: dict, db: Session -): +def test_get_conversations_by_ancestor(client: TestClient, db: Session): """Test getting conversations by ancestor_response_id.""" # Create multiple conversations with same ancestor conversation_data1 = OpenAIConversationCreate( @@ -119,10 +89,10 @@ def test_get_conversations_by_ancestor( create_openai_conversation(db, conversation_data1) create_openai_conversation(db, conversation_data2) create_openai_conversation(db, conversation_data3) - + headers = {"X-API-KEY": original_api_key} response = client.get( - "/api/openai-conversation/ancestor/ancestor_123", - headers=superuser_token_headers, + "/api/v1/openai-conversation/ancestor/ancestor_123", + headers=headers, ) assert response.status_code == 200 @@ -132,32 +102,7 @@ def test_get_conversations_by_ancestor( assert all(conv["ancestor_response_id"] == "ancestor_123" for conv in data["data"]) -def test_list_conversations( - client: TestClient, superuser_token_headers: dict, db: Session -): - """Test listing all conversations with pagination.""" - # Create multiple conversations - conversation_data1 = OpenAIConversationCreate(response_id="resp_1") - conversation_data2 = OpenAIConversationCreate(response_id="resp_2") - conversation_data3 = OpenAIConversationCreate(response_id="resp_3") - - create_openai_conversation(db, conversation_data1) - create_openai_conversation(db, conversation_data2) - create_openai_conversation(db, conversation_data3) - - response = client.get( - "/api/openai-conversation/list?skip=0&limit=10", headers=superuser_token_headers - ) - - assert response.status_code == 200 - data = response.json() - assert data["success"] is True - assert len(data["data"]) >= 3 # Should have at least 3 conversations - - -def test_update_conversation( - client: TestClient, superuser_token_headers: dict, db: Session -): +def test_update_conversation(client: TestClient, db: Session): """Test updating a conversation.""" # Create a conversation first conversation_data = OpenAIConversationCreate( @@ -169,11 +114,11 @@ def test_update_conversation( "ancestor_response_id": "ancestor_789", "previous_response_id": "prev_123", } - + headers = {"X-API-KEY": original_api_key} response = client.put( - f"/api/openai-conversation/{conversation.id}", + f"/api/v1/openai-conversation/{conversation.id}", json=update_data, - headers=superuser_token_headers, + headers=headers, ) assert response.status_code == 200 @@ -184,49 +129,17 @@ def test_update_conversation( assert data["data"]["response_id"] == "resp_123" # Should remain unchanged -def test_update_conversation_not_found( - client: TestClient, superuser_token_headers: dict -): - """Test updating a conversation that doesn't exist.""" - update_data = {"ancestor_response_id": "ancestor_789"} - - response = client.put( - "/api/openai-conversation/99999", - json=update_data, - headers=superuser_token_headers, - ) - - assert response.status_code == 404 - data = response.json() - assert "Conversation not found" in data["detail"] - - -def test_delete_conversation_by_id( - client: TestClient, superuser_token_headers: dict, db: Session -): +def test_delete_conversation_by_id(client: TestClient, db: Session): """Test deleting a conversation by ID.""" # Create a conversation first conversation_data = OpenAIConversationCreate(response_id="resp_123") conversation = create_openai_conversation(db, conversation_data) - + headers = {"X-API-KEY": original_api_key} response = client.delete( - f"/api/openai-conversation/{conversation.id}", headers=superuser_token_headers + f"/api/v1/openai-conversation/{conversation.id}", headers=headers ) assert response.status_code == 200 data = response.json() assert data["success"] is True assert "deleted successfully" in data["data"]["message"] - - -def test_delete_conversation_by_id_not_found( - client: TestClient, superuser_token_headers: dict -): - """Test deleting a conversation by ID that doesn't exist.""" - response = client.delete( - "/api/openai-conversation/99999", headers=superuser_token_headers - ) - - assert response.status_code == 404 - data = response.json() - assert "Conversation not found" in data["detail"] From bcaeadc8233a4779f2b8be81413a8b66d5fa8653 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Sat, 12 Jul 2025 16:17:53 +0530 Subject: [PATCH 04/40] cleanup testcases warnings --- backend/app/api/routes/responses.py | 8 +++----- backend/app/crud/openai_conversation.py | 8 ++++---- backend/app/crud/thread_results.py | 6 +++--- backend/app/models/credentials.py | 6 +++--- backend/app/models/openai_conversation.py | 6 +++--- backend/app/models/threads.py | 6 +++--- 6 files changed, 19 insertions(+), 21 deletions(-) diff --git a/backend/app/api/routes/responses.py b/backend/app/api/routes/responses.py index 233cab359..746813136 100644 --- a/backend/app/api/routes/responses.py +++ b/backend/app/api/routes/responses.py @@ -4,7 +4,7 @@ import openai from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException from openai import OpenAI -from pydantic import BaseModel, Extra +from pydantic import BaseModel, ConfigDict from sqlmodel import Session from app.api.deps import get_db, get_current_user_org_project @@ -32,8 +32,7 @@ class ResponsesAPIRequest(BaseModel): callback_url: Optional[str] = None response_id: Optional[str] = None - class Config: - extra = Extra.allow + model_config = ConfigDict(extra="allow") class ResponsesSyncAPIRequest(BaseModel): @@ -65,8 +64,7 @@ class _APIResponse(BaseModel): chunks: list[FileResultChunk] diagnostics: Optional[Diagnostics] = None - class Config: - extra = Extra.allow + model_config = ConfigDict(extra="allow") class ResponsesAPIResponse(APIResponse[_APIResponse]): diff --git a/backend/app/crud/openai_conversation.py b/backend/app/crud/openai_conversation.py index a80d9c309..ed91fa153 100644 --- a/backend/app/crud/openai_conversation.py +++ b/backend/app/crud/openai_conversation.py @@ -1,5 +1,5 @@ from sqlmodel import Session, select -from datetime import datetime +from datetime import datetime, UTC from typing import List, Optional from app.models import ( OpenAI_Conversation, @@ -12,7 +12,7 @@ def create_openai_conversation( session: Session, data: OpenAIConversationCreate ) -> OpenAI_Conversation: - conversation = OpenAI_Conversation(**data.dict()) + conversation = OpenAI_Conversation(**data.model_dump()) session.add(conversation) session.commit() session.refresh(conversation) @@ -62,8 +62,8 @@ def update_openai_conversation( if not conversation: return None - update_data = data.dict(exclude_unset=True) - update_data["updated_at"] = datetime.utcnow() + update_data = data.model_dump(exclude_unset=True) + update_data["updated_at"] = datetime.now(UTC) for field, value in update_data.items(): setattr(conversation, field, value) diff --git a/backend/app/crud/thread_results.py b/backend/app/crud/thread_results.py index cd72ef188..2d11f01c5 100644 --- a/backend/app/crud/thread_results.py +++ b/backend/app/crud/thread_results.py @@ -1,5 +1,5 @@ from sqlmodel import Session, select -from datetime import datetime +from datetime import datetime, UTC from app.models import OpenAIThreadCreate, OpenAI_Thread @@ -12,9 +12,9 @@ def upsert_thread_result(session: Session, data: OpenAIThreadCreate): existing.response = data.response existing.status = data.status existing.error = data.error - existing.updated_at = datetime.utcnow() + existing.updated_at = datetime.now(UTC) else: - new_thread = OpenAI_Thread(**data.dict()) + new_thread = OpenAI_Thread(**data.model_dump()) session.add(new_thread) session.commit() diff --git a/backend/app/models/credentials.py b/backend/app/models/credentials.py index 0a05cff2d..b6e9e98a4 100644 --- a/backend/app/models/credentials.py +++ b/backend/app/models/credentials.py @@ -1,7 +1,7 @@ from typing import Dict, Any, Optional import sqlalchemy as sa from sqlmodel import Field, Relationship, SQLModel -from datetime import datetime +from datetime import datetime, UTC from app.core.util import now @@ -58,11 +58,11 @@ class Credential(CredsBase, table=True): ) inserted_at: datetime = Field( default_factory=now, - sa_column=sa.Column(sa.DateTime, default=datetime.utcnow), + sa_column=sa.Column(sa.DateTime, default=lambda: datetime.now(UTC)), ) updated_at: datetime = Field( default_factory=now, - sa_column=sa.Column(sa.DateTime, onupdate=datetime.utcnow), + sa_column=sa.Column(sa.DateTime, onupdate=lambda: datetime.now(UTC)), ) deleted_at: Optional[datetime] = Field( default=None, sa_column=sa.Column(sa.DateTime, nullable=True) diff --git a/backend/app/models/openai_conversation.py b/backend/app/models/openai_conversation.py index d73ca6b9b..70c91cd28 100644 --- a/backend/app/models/openai_conversation.py +++ b/backend/app/models/openai_conversation.py @@ -1,6 +1,6 @@ from sqlmodel import SQLModel, Field from typing import Optional -from datetime import datetime +from datetime import datetime, UTC class OpenAIConversationBase(SQLModel): @@ -27,5 +27,5 @@ class OpenAIConversationPublic(OpenAIConversationBase): class OpenAI_Conversation(OpenAIConversationBase, table=True): id: int = Field(default=None, primary_key=True) - inserted_at: datetime = Field(default_factory=datetime.utcnow) - updated_at: datetime = Field(default_factory=datetime.utcnow) + inserted_at: datetime = Field(default_factory=lambda: datetime.now(UTC)) + updated_at: datetime = Field(default_factory=lambda: datetime.now(UTC)) diff --git a/backend/app/models/threads.py b/backend/app/models/threads.py index e353c6760..61748ec2b 100644 --- a/backend/app/models/threads.py +++ b/backend/app/models/threads.py @@ -1,6 +1,6 @@ from sqlmodel import SQLModel, Field from typing import Optional -from datetime import datetime +from datetime import datetime, UTC class OpenAIThreadBase(SQLModel): @@ -17,5 +17,5 @@ class OpenAIThreadCreate(OpenAIThreadBase): class OpenAI_Thread(OpenAIThreadBase, table=True): id: int = Field(default=None, primary_key=True) - inserted_at: datetime = Field(default_factory=datetime.utcnow) - updated_at: datetime = Field(default_factory=datetime.utcnow) + inserted_at: datetime = Field(default_factory=lambda: datetime.now(UTC)) + updated_at: datetime = Field(default_factory=lambda: datetime.now(UTC)) From b83b544e00cd22124b14692aa494a2eaee9dac07 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Sat, 12 Jul 2025 16:34:19 +0530 Subject: [PATCH 05/40] fixing list API --- backend/app/api/routes/openai_conversation.py | 42 +++++++++---------- .../api/routes/test_openai_conversation.py | 27 ++++++++++++ 2 files changed, 48 insertions(+), 21 deletions(-) diff --git a/backend/app/api/routes/openai_conversation.py b/backend/app/api/routes/openai_conversation.py index 5fbfe0703..2d861b5f2 100644 --- a/backend/app/api/routes/openai_conversation.py +++ b/backend/app/api/routes/openai_conversation.py @@ -47,6 +47,27 @@ async def create_conversation( ) +@router.get( + "/list", + response_model=APIResponse[List[OpenAIConversationPublic]], + summary="List all conversations", + description="Retrieve all conversations with pagination support", +) +async def list_conversations( + skip: int = Query(0, ge=0, description="Number of records to skip"), + limit: int = Query( + 100, gt=0, le=1000, description="Maximum number of records to return" + ), + db: Session = Depends(get_db), + _current_user: UserOrganization = Depends(get_current_user_org), +): + """Get all conversations with pagination.""" + conversations = get_all_openai_conversations(db, skip=skip, limit=limit) + return APIResponse.success_response( + data=[OpenAIConversationPublic.model_validate(conv) for conv in conversations] + ) + + @router.get( "/{conversation_id}", response_model=APIResponse[OpenAIConversationPublic], @@ -107,27 +128,6 @@ async def get_conversations_by_ancestor( ) -@router.get( - "/list", - response_model=APIResponse[List[OpenAIConversationPublic]], - summary="List all conversations", - description="Retrieve all conversations with pagination support", -) -async def list_conversations( - skip: int = Query(0, ge=0, description="Number of records to skip"), - limit: int = Query( - 100, gt=0, le=1000, description="Maximum number of records to return" - ), - db: Session = Depends(get_db), - _current_user: UserOrganization = Depends(get_current_user_org), -): - """Get all conversations with pagination.""" - conversations = get_all_openai_conversations(db, skip=skip, limit=limit) - return APIResponse.success_response( - data=[OpenAIConversationPublic.model_validate(conv) for conv in conversations] - ) - - @router.put( "/{conversation_id}", response_model=APIResponse[OpenAIConversationPublic], diff --git a/backend/app/tests/api/routes/test_openai_conversation.py b/backend/app/tests/api/routes/test_openai_conversation.py index 3d0fa72bf..5621d6070 100644 --- a/backend/app/tests/api/routes/test_openai_conversation.py +++ b/backend/app/tests/api/routes/test_openai_conversation.py @@ -143,3 +143,30 @@ def test_delete_conversation_by_id(client: TestClient, db: Session): data = response.json() assert data["success"] is True assert "deleted successfully" in data["data"]["message"] + + +def test_list_conversations(client: TestClient, db: Session): + """Test listing all conversations.""" + # Create multiple conversations + conversation_data1 = OpenAIConversationCreate( + response_id="resp_1", ancestor_response_id="ancestor_1" + ) + conversation_data2 = OpenAIConversationCreate( + response_id="resp_2", ancestor_response_id="ancestor_2" + ) + conversation1 = create_openai_conversation(db, conversation_data1) + conversation2 = create_openai_conversation(db, conversation_data2) + headers = {"X-API-KEY": original_api_key} + response = client.get( + "/api/v1/openai-conversation/list", + headers=headers, + params={"skip": 0, "limit": 100}, + ) + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + # Should contain at least the two conversations we just created + response_ids = [conv["response_id"] for conv in data["data"]] + assert conversation1.response_id in response_ids + assert conversation2.response_id in response_ids From 2c129a37ec7bfb383718a3097dba3ade1e6d5651 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Mon, 14 Jul 2025 15:14:54 +0530 Subject: [PATCH 06/40] first draft of updating entry in db as well --- ...c972b10_add_conversation_content_fields.py | 69 ++++++++ backend/app/api/routes/responses.py | 55 ++++++- backend/app/models/openai_conversation.py | 28 ++++ .../api/routes/test_openai_conversation.py | 118 ++++++++++++-- .../app/tests/api/routes/test_responses.py | 153 +++++++++++++++++- 5 files changed, 410 insertions(+), 13 deletions(-) create mode 100644 backend/app/alembic/versions/ac721c972b10_add_conversation_content_fields.py diff --git a/backend/app/alembic/versions/ac721c972b10_add_conversation_content_fields.py b/backend/app/alembic/versions/ac721c972b10_add_conversation_content_fields.py new file mode 100644 index 000000000..553f897d1 --- /dev/null +++ b/backend/app/alembic/versions/ac721c972b10_add_conversation_content_fields.py @@ -0,0 +1,69 @@ +"""add_conversation_content_fields + +Revision ID: ac721c972b10 +Revises: f5628e3a9988 +Create Date: 2025-07-12 16:53:33.798070 + +""" +from alembic import op +import sqlalchemy as sa +import sqlmodel.sql.sqltypes + + +# revision identifiers, used by Alembic. +revision = "ac721c972b10" +down_revision = "f5628e3a9988" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "openai_conversation", + sa.Column("user_question", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + ) + op.add_column( + "openai_conversation", + sa.Column( + "assistant_response", sqlmodel.sql.sqltypes.AutoString(), nullable=False + ), + ) + op.add_column( + "openai_conversation", + sa.Column("model", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + ) + op.add_column( + "openai_conversation", sa.Column("input_tokens", sa.Integer(), nullable=True) + ) + op.add_column( + "openai_conversation", sa.Column("output_tokens", sa.Integer(), nullable=True) + ) + op.add_column( + "openai_conversation", sa.Column("total_tokens", sa.Integer(), nullable=True) + ) + op.add_column( + "openai_conversation", + sa.Column("assistant_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + ) + op.add_column( + "openai_conversation", sa.Column("project_id", sa.Integer(), nullable=True) + ) + op.add_column( + "openai_conversation", sa.Column("organization_id", sa.Integer(), nullable=True) + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("openai_conversation", "organization_id") + op.drop_column("openai_conversation", "project_id") + op.drop_column("openai_conversation", "assistant_id") + op.drop_column("openai_conversation", "total_tokens") + op.drop_column("openai_conversation", "output_tokens") + op.drop_column("openai_conversation", "input_tokens") + op.drop_column("openai_conversation", "model") + op.drop_column("openai_conversation", "assistant_response") + op.drop_column("openai_conversation", "user_question") + # ### end Alembic commands ### diff --git a/backend/app/api/routes/responses.py b/backend/app/api/routes/responses.py index 746813136..75ab49ad8 100644 --- a/backend/app/api/routes/responses.py +++ b/backend/app/api/routes/responses.py @@ -11,7 +11,8 @@ from app.api.routes.threads import send_callback from app.crud.assistants import get_assistant_by_id from app.crud.credentials import get_provider_credential -from app.models import UserProjectOrg +from app.crud.openai_conversation import create_openai_conversation +from app.models import UserProjectOrg, OpenAIConversationCreate from app.utils import APIResponse, mask_string from app.core.langfuse.langfuse import LangfuseTracer @@ -96,6 +97,8 @@ def process_response( assistant, tracer: LangfuseTracer, project_id: int, + organization_id: int, + session: Session, ): """Process a response and send callback with results, with Langfuse tracing.""" logger.info( @@ -141,6 +144,30 @@ def process_response( f"Successfully generated response: response_id={response.id}, assistant={mask_string(request.assistant_id)}, project_id={project_id}" ) + # Store conversation in database + try: + conversation_data = OpenAIConversationCreate( + response_id=response.id, + previous_response_id=request.response_id, + user_question=request.question, + assistant_response=response.output_text, + model=response.model, + input_tokens=response.usage.input_tokens, + output_tokens=response.usage.output_tokens, + total_tokens=response.usage.total_tokens, + assistant_id=request.assistant_id, + project_id=project_id, + organization_id=organization_id, + ) + create_openai_conversation(session, conversation_data) + logger.info( + f"Conversation stored in database: response_id={response.id}, project_id={project_id}" + ) + except Exception as e: + logger.error( + f"Failed to store conversation in database: {str(e)}, response_id={response.id}, project_id={project_id}" + ) + tracer.end_generation( output={ "response_id": response.id, @@ -262,6 +289,8 @@ async def responses( assistant, tracer, project_id, + organization_id, + _session, ) logger.info( @@ -344,6 +373,30 @@ async def responses_sync( response_chunks = get_file_search_results(response) + # Store conversation in database + try: + conversation_data = OpenAIConversationCreate( + response_id=response.id, + previous_response_id=request.response_id, + user_question=request.question, + assistant_response=response.output_text, + model=response.model, + input_tokens=response.usage.input_tokens, + output_tokens=response.usage.output_tokens, + total_tokens=response.usage.total_tokens, + assistant_id=None, # Not available in sync endpoint + project_id=project_id, + organization_id=organization_id, + ) + create_openai_conversation(_session, conversation_data) + logger.info( + f"Conversation stored in database: response_id={response.id}, project_id={project_id}" + ) + except Exception as e: + logger.error( + f"Failed to store conversation in database: {str(e)}, response_id={response.id}, project_id={project_id}" + ) + tracer.end_generation( output={ "response_id": response.id, diff --git a/backend/app/models/openai_conversation.py b/backend/app/models/openai_conversation.py index 70c91cd28..304fef0c4 100644 --- a/backend/app/models/openai_conversation.py +++ b/backend/app/models/openai_conversation.py @@ -7,6 +7,25 @@ class OpenAIConversationBase(SQLModel): response_id: str = Field(index=True) ancestor_response_id: Optional[str] = Field(default=None, index=True) previous_response_id: Optional[str] = Field(default=None, index=True) + user_question: str = Field(description="The user's input question") + assistant_response: str = Field(description="The assistant's response") + model: str = Field(description="The model used for the response") + input_tokens: Optional[int] = Field( + default=None, description="Number of input tokens" + ) + output_tokens: Optional[int] = Field( + default=None, description="Number of output tokens" + ) + total_tokens: Optional[int] = Field( + default=None, description="Total number of tokens" + ) + assistant_id: Optional[str] = Field( + default=None, description="The assistant ID used" + ) + project_id: Optional[int] = Field(default=None, description="The project ID") + organization_id: Optional[int] = Field( + default=None, description="The organization ID" + ) class OpenAIConversationCreate(OpenAIConversationBase): @@ -17,6 +36,15 @@ class OpenAIConversationUpdate(SQLModel): response_id: Optional[str] = None ancestor_response_id: Optional[str] = None previous_response_id: Optional[str] = None + user_question: Optional[str] = None + assistant_response: Optional[str] = None + model: Optional[str] = None + input_tokens: Optional[int] = None + output_tokens: Optional[int] = None + total_tokens: Optional[int] = None + assistant_id: Optional[str] = None + project_id: Optional[int] = None + organization_id: Optional[int] = None class OpenAIConversationPublic(OpenAIConversationBase): diff --git a/backend/app/tests/api/routes/test_openai_conversation.py b/backend/app/tests/api/routes/test_openai_conversation.py index 5621d6070..d890c68c2 100644 --- a/backend/app/tests/api/routes/test_openai_conversation.py +++ b/backend/app/tests/api/routes/test_openai_conversation.py @@ -17,6 +17,15 @@ def test_create_conversation(client: TestClient): "response_id": "resp_123", "ancestor_response_id": "ancestor_456", "previous_response_id": "prev_789", + "user_question": "What is the capital of France?", + "assistant_response": "The capital of France is Paris.", + "model": "gpt-4o", + "input_tokens": 10, + "output_tokens": 5, + "total_tokens": 15, + "assistant_id": "asst_123", + "project_id": 1, + "organization_id": 1, } headers = {"X-API-KEY": original_api_key} response = client.post( @@ -40,7 +49,17 @@ def test_get_conversation_by_id(client: TestClient, db: Session): """Test getting a conversation by ID.""" # Create a conversation first conversation_data = OpenAIConversationCreate( - response_id="resp_123", ancestor_response_id="ancestor_456" + response_id="resp_123", + ancestor_response_id="ancestor_456", + user_question="What is the capital of France?", + assistant_response="The capital of France is Paris.", + model="gpt-4o", + input_tokens=10, + output_tokens=5, + total_tokens=15, + assistant_id="asst_123", + project_id=1, + organization_id=1, ) conversation = create_openai_conversation(db, conversation_data) headers = {"X-API-KEY": original_api_key} @@ -59,7 +78,17 @@ def test_get_conversation_by_response_id(client: TestClient, db: Session): """Test getting a conversation by response_id.""" # Create a conversation first conversation_data = OpenAIConversationCreate( - response_id="resp_123", ancestor_response_id="ancestor_456" + response_id="resp_123", + ancestor_response_id="ancestor_456", + user_question="What is the capital of France?", + assistant_response="The capital of France is Paris.", + model="gpt-4o", + input_tokens=10, + output_tokens=5, + total_tokens=15, + assistant_id="asst_123", + project_id=1, + organization_id=1, ) create_openai_conversation(db, conversation_data) headers = {"X-API-KEY": original_api_key} @@ -77,13 +106,43 @@ def test_get_conversations_by_ancestor(client: TestClient, db: Session): """Test getting conversations by ancestor_response_id.""" # Create multiple conversations with same ancestor conversation_data1 = OpenAIConversationCreate( - response_id="resp_1", ancestor_response_id="ancestor_123" + response_id="resp_1", + ancestor_response_id="ancestor_123", + user_question="What is the capital of France?", + assistant_response="The capital of France is Paris.", + model="gpt-4o", + input_tokens=10, + output_tokens=5, + total_tokens=15, + assistant_id="asst_123", + project_id=1, + organization_id=1, ) conversation_data2 = OpenAIConversationCreate( - response_id="resp_2", ancestor_response_id="ancestor_123" + response_id="resp_2", + ancestor_response_id="ancestor_123", + user_question="What is the capital of Spain?", + assistant_response="The capital of Spain is Madrid.", + model="gpt-4o", + input_tokens=10, + output_tokens=5, + total_tokens=15, + assistant_id="asst_123", + project_id=1, + organization_id=1, ) conversation_data3 = OpenAIConversationCreate( - response_id="resp_3", ancestor_response_id="ancestor_456" + response_id="resp_3", + ancestor_response_id="ancestor_456", + user_question="What is the capital of Italy?", + assistant_response="The capital of Italy is Rome.", + model="gpt-4o", + input_tokens=10, + output_tokens=5, + total_tokens=15, + assistant_id="asst_123", + project_id=1, + organization_id=1, ) create_openai_conversation(db, conversation_data1) @@ -106,7 +165,17 @@ def test_update_conversation(client: TestClient, db: Session): """Test updating a conversation.""" # Create a conversation first conversation_data = OpenAIConversationCreate( - response_id="resp_123", ancestor_response_id="ancestor_456" + response_id="resp_123", + ancestor_response_id="ancestor_456", + user_question="What is the capital of France?", + assistant_response="The capital of France is Paris.", + model="gpt-4o", + input_tokens=10, + output_tokens=5, + total_tokens=15, + assistant_id="asst_123", + project_id=1, + organization_id=1, ) conversation = create_openai_conversation(db, conversation_data) @@ -132,7 +201,18 @@ def test_update_conversation(client: TestClient, db: Session): def test_delete_conversation_by_id(client: TestClient, db: Session): """Test deleting a conversation by ID.""" # Create a conversation first - conversation_data = OpenAIConversationCreate(response_id="resp_123") + conversation_data = OpenAIConversationCreate( + response_id="resp_123", + user_question="What is the capital of France?", + assistant_response="The capital of France is Paris.", + model="gpt-4o", + input_tokens=10, + output_tokens=5, + total_tokens=15, + assistant_id="asst_123", + project_id=1, + organization_id=1, + ) conversation = create_openai_conversation(db, conversation_data) headers = {"X-API-KEY": original_api_key} response = client.delete( @@ -149,10 +229,30 @@ def test_list_conversations(client: TestClient, db: Session): """Test listing all conversations.""" # Create multiple conversations conversation_data1 = OpenAIConversationCreate( - response_id="resp_1", ancestor_response_id="ancestor_1" + response_id="resp_1", + ancestor_response_id="ancestor_1", + user_question="What is the capital of France?", + assistant_response="The capital of France is Paris.", + model="gpt-4o", + input_tokens=10, + output_tokens=5, + total_tokens=15, + assistant_id="asst_123", + project_id=1, + organization_id=1, ) conversation_data2 = OpenAIConversationCreate( - response_id="resp_2", ancestor_response_id="ancestor_2" + response_id="resp_2", + ancestor_response_id="ancestor_2", + user_question="What is the capital of Spain?", + assistant_response="The capital of Spain is Madrid.", + model="gpt-4o", + input_tokens=10, + output_tokens=5, + total_tokens=15, + assistant_id="asst_123", + project_id=1, + organization_id=1, ) conversation1 = create_openai_conversation(db, conversation_data1) conversation2 = create_openai_conversation(db, conversation_data2) diff --git a/backend/app/tests/api/routes/test_responses.py b/backend/app/tests/api/routes/test_responses.py index e24b7a4d0..1eb540200 100644 --- a/backend/app/tests/api/routes/test_responses.py +++ b/backend/app/tests/api/routes/test_responses.py @@ -5,7 +5,7 @@ from sqlmodel import select from app.api.routes.responses import router -from app.models import Project +from app.models import Project, OpenAI_Conversation # Wrap the router in a FastAPI app instance app = FastAPI() @@ -31,7 +31,7 @@ def test_responses_endpoint_success( # Setup the mock response object with real values for all used fields mock_response = MagicMock() mock_response.id = "mock_response_id" - mock_response.output_text = "Test output" + mock_response.output_text = "Test assistant response" mock_response.model = "gpt-4o" mock_response.usage.input_tokens = 10 mock_response.usage.output_tokens = 5 @@ -90,7 +90,7 @@ def test_responses_endpoint_without_vector_store( # Setup the mock response object mock_response = MagicMock() mock_response.id = "mock_response_id" - mock_response.output_text = "Test output" + mock_response.output_text = "Test assistant response" mock_response.model = "gpt-4" mock_response.usage.input_tokens = 10 mock_response.usage.output_tokens = 5 @@ -128,3 +128,150 @@ def test_responses_endpoint_without_vector_store( temperature=mock_assistant.temperature, input=[{"role": "user", "content": "What is Glific?"}], ) + + +@patch("app.api.routes.responses.OpenAI") +@patch("app.api.routes.responses.get_provider_credential") +@patch("app.api.routes.responses.get_assistant_by_id") +@patch("app.api.routes.responses.create_openai_conversation") +def test_responses_endpoint_stores_conversation( + mock_create_conversation, + mock_get_assistant, + mock_get_credential, + mock_openai, + db, +): + """Test that the /responses endpoint stores conversation in database.""" + # Setup mock credentials + mock_get_credential.return_value = {"api_key": "test_api_key"} + + # Setup mock assistant + mock_assistant = MagicMock() + mock_assistant.model = "gpt-4o" + mock_assistant.instructions = "Test instructions" + mock_assistant.temperature = 0.1 + mock_assistant.vector_store_id = "vs_test" + mock_assistant.max_num_results = 20 + mock_get_assistant.return_value = mock_assistant + + # Setup mock OpenAI client + mock_client = MagicMock() + mock_openai.return_value = mock_client + + # Setup the mock response object + mock_response = MagicMock() + mock_response.id = "mock_response_id" + mock_response.output_text = "Test assistant response" + mock_response.model = "gpt-4o" + mock_response.usage.input_tokens = 10 + mock_response.usage.output_tokens = 5 + mock_response.usage.total_tokens = 15 + mock_response.output = [] + mock_client.responses.create.return_value = mock_response + + # Get the Glific project ID + glific_project = db.exec(select(Project).where(Project.name == "Glific")).first() + if not glific_project: + pytest.skip("Glific project not found in the database") + + # Use the original API key from seed data + original_api_key = "ApiKey No3x47A5qoIGhm0kVKjQ77dhCqEdWRIQZlEPzzzh7i8" + + headers = {"X-API-KEY": original_api_key} + request_data = { + "assistant_id": "assistant_123", + "question": "What is Glific?", + "callback_url": "http://example.com/callback", + } + + response = client.post("/responses", json=request_data, headers=headers) + assert response.status_code == 200 + response_json = response.json() + assert response_json["success"] is True + assert response_json["data"]["status"] == "processing" + + # Verify that create_openai_conversation was called with correct data + mock_create_conversation.assert_called_once() + call_args = mock_create_conversation.call_args + conversation_data = call_args[0][1] # Second argument is the conversation data + + assert conversation_data.response_id == "mock_response_id" + assert conversation_data.user_question == "What is Glific?" + assert conversation_data.assistant_response == "Test assistant response" + assert conversation_data.model == "gpt-4o" + assert conversation_data.input_tokens == 10 + assert conversation_data.output_tokens == 5 + assert conversation_data.total_tokens == 15 + assert conversation_data.assistant_id == "assistant_123" + assert conversation_data.project_id == glific_project.id + + +@patch("app.api.routes.responses.OpenAI") +@patch("app.api.routes.responses.get_provider_credential") +@patch("app.api.routes.responses.get_assistant_by_id") +def test_responses_sync_endpoint_stores_conversation( + mock_get_assistant, + mock_get_credential, + mock_openai, + db, +): + """Test that the /responses/sync endpoint stores conversation in database.""" + # Setup mock credentials + mock_get_credential.return_value = {"api_key": "test_api_key"} + + # Setup mock OpenAI client + mock_client = MagicMock() + mock_openai.return_value = mock_client + + # Setup the mock response object + mock_response = MagicMock() + mock_response.id = "mock_response_id" + mock_response.output_text = "Test assistant response" + mock_response.model = "gpt-4o" + mock_response.usage.input_tokens = 10 + mock_response.usage.output_tokens = 5 + mock_response.usage.total_tokens = 15 + mock_response.output = [] + mock_client.responses.create.return_value = mock_response + + # Get the Glific project ID + glific_project = db.exec(select(Project).where(Project.name == "Glific")).first() + if not glific_project: + pytest.skip("Glific project not found in the database") + + # Use the original API key from seed data + original_api_key = "ApiKey No3x47A5qoIGhm0kVKjQ77dhCqEdWRIQZlEPzzzh7i8" + + headers = {"X-API-KEY": original_api_key} + request_data = { + "model": "gpt-4o", + "instructions": "Test instructions", + "vector_store_ids": ["vs_test"], + "max_num_results": 20, + "temperature": 0.1, + "question": "What is Glific?", + } + + response = client.post("/responses/sync", json=request_data, headers=headers) + assert response.status_code == 200 + response_json = response.json() + assert response_json["success"] is True + assert response_json["data"]["status"] == "success" + + # Verify that conversation was stored in database + conversation = db.exec( + select(OpenAI_Conversation).where( + OpenAI_Conversation.response_id == "mock_response_id" + ) + ).first() + + assert conversation is not None + assert conversation.response_id == "mock_response_id" + assert conversation.user_question == "What is Glific?" + assert conversation.assistant_response == "Test assistant response" + assert conversation.model == "gpt-4o" + assert conversation.input_tokens == 10 + assert conversation.output_tokens == 5 + assert conversation.total_tokens == 15 + assert conversation.assistant_id == "assistant_123" + assert conversation.project_id == glific_project.id From 7a703b069ca804d1075bb47438292dfa4cc1fbaf Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Tue, 15 Jul 2025 12:56:56 +0530 Subject: [PATCH 07/40] cleanups --- ...c972b10_add_conversation_content_fields.py | 69 ------------------- ...28e3a9988_add_openai_conversation_table.py | 8 +++ backend/app/api/routes/responses.py | 6 -- backend/app/models/openai_conversation.py | 12 ---- 4 files changed, 8 insertions(+), 87 deletions(-) delete mode 100644 backend/app/alembic/versions/ac721c972b10_add_conversation_content_fields.py diff --git a/backend/app/alembic/versions/ac721c972b10_add_conversation_content_fields.py b/backend/app/alembic/versions/ac721c972b10_add_conversation_content_fields.py deleted file mode 100644 index 553f897d1..000000000 --- a/backend/app/alembic/versions/ac721c972b10_add_conversation_content_fields.py +++ /dev/null @@ -1,69 +0,0 @@ -"""add_conversation_content_fields - -Revision ID: ac721c972b10 -Revises: f5628e3a9988 -Create Date: 2025-07-12 16:53:33.798070 - -""" -from alembic import op -import sqlalchemy as sa -import sqlmodel.sql.sqltypes - - -# revision identifiers, used by Alembic. -revision = "ac721c972b10" -down_revision = "f5628e3a9988" -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column( - "openai_conversation", - sa.Column("user_question", sqlmodel.sql.sqltypes.AutoString(), nullable=False), - ) - op.add_column( - "openai_conversation", - sa.Column( - "assistant_response", sqlmodel.sql.sqltypes.AutoString(), nullable=False - ), - ) - op.add_column( - "openai_conversation", - sa.Column("model", sqlmodel.sql.sqltypes.AutoString(), nullable=False), - ) - op.add_column( - "openai_conversation", sa.Column("input_tokens", sa.Integer(), nullable=True) - ) - op.add_column( - "openai_conversation", sa.Column("output_tokens", sa.Integer(), nullable=True) - ) - op.add_column( - "openai_conversation", sa.Column("total_tokens", sa.Integer(), nullable=True) - ) - op.add_column( - "openai_conversation", - sa.Column("assistant_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True), - ) - op.add_column( - "openai_conversation", sa.Column("project_id", sa.Integer(), nullable=True) - ) - op.add_column( - "openai_conversation", sa.Column("organization_id", sa.Integer(), nullable=True) - ) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column("openai_conversation", "organization_id") - op.drop_column("openai_conversation", "project_id") - op.drop_column("openai_conversation", "assistant_id") - op.drop_column("openai_conversation", "total_tokens") - op.drop_column("openai_conversation", "output_tokens") - op.drop_column("openai_conversation", "input_tokens") - op.drop_column("openai_conversation", "model") - op.drop_column("openai_conversation", "assistant_response") - op.drop_column("openai_conversation", "user_question") - # ### end Alembic commands ### diff --git a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py index 1c8616a08..7f85ae49a 100644 --- a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py +++ b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py @@ -28,6 +28,14 @@ def upgrade(): sa.Column( "previous_response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True ), + sa.Column("user_question", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column( + "assistant_response", sqlmodel.sql.sqltypes.AutoString(), nullable=False + ), + sa.Column("model", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("assistant_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("project_id", sa.Integer(), nullable=True), + sa.Column("organization_id", sa.Integer(), nullable=True), sa.Column("id", sa.Integer(), nullable=False), sa.Column("inserted_at", sa.DateTime(), nullable=False), sa.Column("updated_at", sa.DateTime(), nullable=False), diff --git a/backend/app/api/routes/responses.py b/backend/app/api/routes/responses.py index 75ab49ad8..d6e570ca5 100644 --- a/backend/app/api/routes/responses.py +++ b/backend/app/api/routes/responses.py @@ -152,9 +152,6 @@ def process_response( user_question=request.question, assistant_response=response.output_text, model=response.model, - input_tokens=response.usage.input_tokens, - output_tokens=response.usage.output_tokens, - total_tokens=response.usage.total_tokens, assistant_id=request.assistant_id, project_id=project_id, organization_id=organization_id, @@ -381,9 +378,6 @@ async def responses_sync( user_question=request.question, assistant_response=response.output_text, model=response.model, - input_tokens=response.usage.input_tokens, - output_tokens=response.usage.output_tokens, - total_tokens=response.usage.total_tokens, assistant_id=None, # Not available in sync endpoint project_id=project_id, organization_id=organization_id, diff --git a/backend/app/models/openai_conversation.py b/backend/app/models/openai_conversation.py index 304fef0c4..7ba6aef5d 100644 --- a/backend/app/models/openai_conversation.py +++ b/backend/app/models/openai_conversation.py @@ -10,15 +10,6 @@ class OpenAIConversationBase(SQLModel): user_question: str = Field(description="The user's input question") assistant_response: str = Field(description="The assistant's response") model: str = Field(description="The model used for the response") - input_tokens: Optional[int] = Field( - default=None, description="Number of input tokens" - ) - output_tokens: Optional[int] = Field( - default=None, description="Number of output tokens" - ) - total_tokens: Optional[int] = Field( - default=None, description="Total number of tokens" - ) assistant_id: Optional[str] = Field( default=None, description="The assistant ID used" ) @@ -39,9 +30,6 @@ class OpenAIConversationUpdate(SQLModel): user_question: Optional[str] = None assistant_response: Optional[str] = None model: Optional[str] = None - input_tokens: Optional[int] = None - output_tokens: Optional[int] = None - total_tokens: Optional[int] = None assistant_id: Optional[str] = None project_id: Optional[int] = None organization_id: Optional[int] = None From 2c51cdf9d82ac1842a2c0528cdb36b0e772429f2 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Tue, 15 Jul 2025 14:49:19 +0530 Subject: [PATCH 08/40] updating migration --- ...28e3a9988_add_openai_conversation_table.py | 97 ++----------------- 1 file changed, 10 insertions(+), 87 deletions(-) diff --git a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py index 7f85ae49a..d6842ac62 100644 --- a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py +++ b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py @@ -18,9 +18,9 @@ def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### op.create_table( "openai_conversation", + sa.Column("id", sa.Integer(), nullable=False), sa.Column("response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), sa.Column( "ancestor_response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True @@ -33,13 +33,16 @@ def upgrade(): "assistant_response", sqlmodel.sql.sqltypes.AutoString(), nullable=False ), sa.Column("model", sqlmodel.sql.sqltypes.AutoString(), nullable=False), - sa.Column("assistant_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True), - sa.Column("project_id", sa.Integer(), nullable=True), - sa.Column("organization_id", sa.Integer(), nullable=True), - sa.Column("id", sa.Integer(), nullable=False), + sa.Column("assistant_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("project_id", sa.Integer(), nullable=False), + sa.Column("organization_id", sa.Integer(), nullable=False), sa.Column("inserted_at", sa.DateTime(), nullable=False), sa.Column("updated_at", sa.DateTime(), nullable=False), sa.PrimaryKeyConstraint("id"), + sa.ForeignKeyConstraint( + ["organization_id"], ["organization.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["project_id"], ["project.id"], ondelete="CASCADE"), ) op.create_index( op.f("ix_openai_conversation_ancestor_response_id"), @@ -59,94 +62,15 @@ def upgrade(): ["response_id"], unique=False, ) - op.alter_column( - "credential", "credential", existing_type=sa.VARCHAR(), nullable=True - ) - op.drop_constraint( - "credential_organization_id_fkey", "credential", type_="foreignkey" - ) - op.drop_constraint("credential_project_id_fkey", "credential", type_="foreignkey") - op.create_foreign_key(None, "credential", "project", ["project_id"], ["id"]) op.create_foreign_key( - None, "credential", "organization", ["organization_id"], ["id"] - ) - op.alter_column( - "openai_assistant", - "instructions", - existing_type=sa.TEXT(), - type_=sqlmodel.sql.sqltypes.AutoString(), - existing_nullable=False, + None, "openai_conversation", "project", ["project_id"], ["id"] ) - op.create_index( - op.f("ix_openai_assistant_assistant_id"), - "openai_assistant", - ["assistant_id"], - unique=True, - ) - op.drop_constraint( - "openai_assistant_organization_id_fkey", "openai_assistant", type_="foreignkey" - ) - op.drop_constraint( - "openai_assistant_project_id_fkey", "openai_assistant", type_="foreignkey" - ) - op.create_foreign_key(None, "openai_assistant", "project", ["project_id"], ["id"]) op.create_foreign_key( - None, "openai_assistant", "organization", ["organization_id"], ["id"] + None, "openai_conversation", "organization", ["organization_id"], ["id"] ) - # ### end Alembic commands ### def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_constraint(None, "openai_assistant", type_="foreignkey") - op.drop_constraint(None, "openai_assistant", type_="foreignkey") - op.create_foreign_key( - "openai_assistant_project_id_fkey", - "openai_assistant", - "project", - ["project_id"], - ["id"], - ondelete="CASCADE", - ) - op.create_foreign_key( - "openai_assistant_organization_id_fkey", - "openai_assistant", - "organization", - ["organization_id"], - ["id"], - ondelete="CASCADE", - ) - op.drop_index( - op.f("ix_openai_assistant_assistant_id"), table_name="openai_assistant" - ) - op.alter_column( - "openai_assistant", - "instructions", - existing_type=sqlmodel.sql.sqltypes.AutoString(), - type_=sa.TEXT(), - existing_nullable=False, - ) - op.drop_constraint(None, "credential", type_="foreignkey") - op.drop_constraint(None, "credential", type_="foreignkey") - op.create_foreign_key( - "credential_project_id_fkey", - "credential", - "project", - ["project_id"], - ["id"], - ondelete="SET NULL", - ) - op.create_foreign_key( - "credential_organization_id_fkey", - "credential", - "organization", - ["organization_id"], - ["id"], - ondelete="CASCADE", - ) - op.alter_column( - "credential", "credential", existing_type=sa.VARCHAR(), nullable=False - ) op.drop_index( op.f("ix_openai_conversation_response_id"), table_name="openai_conversation" ) @@ -159,4 +83,3 @@ def downgrade(): table_name="openai_conversation", ) op.drop_table("openai_conversation") - # ### end Alembic commands ### From ce3e455a21adf83d9adf5fa43bb6957dcb6010b3 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Tue, 15 Jul 2025 15:20:37 +0530 Subject: [PATCH 09/40] updating models --- backend/app/api/routes/openai_conversation.py | 5 +---- backend/app/models/openai_conversation.py | 10 ++++------ 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/backend/app/api/routes/openai_conversation.py b/backend/app/api/routes/openai_conversation.py index 2d861b5f2..22a385ea4 100644 --- a/backend/app/api/routes/openai_conversation.py +++ b/backend/app/api/routes/openai_conversation.py @@ -135,15 +135,12 @@ async def get_conversations_by_ancestor( description="Update an existing conversation by ID", ) async def update_conversation( + conversation_data: OpenAIConversationUpdate, conversation_id: int = Path(..., description="The conversation ID"), - conversation_data: OpenAIConversationUpdate = None, db: Session = Depends(get_db), _current_user: UserProjectOrg = Depends(get_current_user_org_project), ): """Update a conversation by its ID.""" - if not conversation_data: - raise HTTPException(status_code=400, detail="Update data is required") - conversation = update_openai_conversation(db, conversation_id, conversation_data) if not conversation: raise HTTPException(status_code=404, detail="Conversation not found") diff --git a/backend/app/models/openai_conversation.py b/backend/app/models/openai_conversation.py index 7ba6aef5d..d48663704 100644 --- a/backend/app/models/openai_conversation.py +++ b/backend/app/models/openai_conversation.py @@ -13,10 +13,8 @@ class OpenAIConversationBase(SQLModel): assistant_id: Optional[str] = Field( default=None, description="The assistant ID used" ) - project_id: Optional[int] = Field(default=None, description="The project ID") - organization_id: Optional[int] = Field( - default=None, description="The organization ID" - ) + project_id: int = Field(foreign_key="project.id") + organization_id: int = Field(foreign_key="organization.id") class OpenAIConversationCreate(OpenAIConversationBase): @@ -31,8 +29,8 @@ class OpenAIConversationUpdate(SQLModel): assistant_response: Optional[str] = None model: Optional[str] = None assistant_id: Optional[str] = None - project_id: Optional[int] = None - organization_id: Optional[int] = None + project_id: Optional[int] = Field(default=None, foreign_key="project.id") + organization_id: Optional[int] = Field(default=None, foreign_key="organization.id") class OpenAIConversationPublic(OpenAIConversationBase): From 6c9853ec36b7b846133891e4ac1449b1cf94deaf Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Mon, 21 Jul 2025 22:16:06 +0530 Subject: [PATCH 10/40] fixing migration --- .../f5628e3a9988_add_openai_conversation_table.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py index d6842ac62..04ba927b9 100644 --- a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py +++ b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py @@ -1,8 +1,8 @@ -"""add openai_conversation table +"""your migration message -Revision ID: f5628e3a9988 -Revises: 3389c67fdcb4 -Create Date: 2025-07-12 12:59:51.909268 +Revision ID: 8811e4d2fcf9 +Revises: 4aa1f48c6321 +Create Date: 2025-07-21 22:07:01.129760 """ from alembic import op @@ -11,8 +11,8 @@ # revision identifiers, used by Alembic. -revision = "f5628e3a9988" -down_revision = "3389c67fdcb4" +revision = "8811e4d2fcf9" +down_revision = "4aa1f48c6321" branch_labels = None depends_on = None From 529796260653c7287b794efe8b7ee84cfe613ee5 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Mon, 21 Jul 2025 22:54:07 +0530 Subject: [PATCH 11/40] fixing testcases --- backend/app/crud/api_key.py | 9 ++++---- .../api/routes/test_openai_conversation.py | 23 ++++++++----------- 2 files changed, 14 insertions(+), 18 deletions(-) diff --git a/backend/app/crud/api_key.py b/backend/app/crud/api_key.py index 7a8b7c166..a305612c0 100644 --- a/backend/app/crud/api_key.py +++ b/backend/app/crud/api_key.py @@ -168,11 +168,12 @@ def get_api_key_by_user_id(session: Session, user_id: int) -> APIKeyPublic | Non """ Retrieves the API key associated with a user by their user_id. """ - api_key = ( - session.query(APIKey) - .filter(APIKey.user_id == user_id, APIKey.is_deleted == False) - .first() + statement = ( + select(APIKey) + .where(APIKey.user_id == user_id, APIKey.is_deleted == False) + .limit(1) ) + api_key = session.exec(statement).first() if not api_key: return None diff --git a/backend/app/tests/api/routes/test_openai_conversation.py b/backend/app/tests/api/routes/test_openai_conversation.py index d890c68c2..0fbb943d1 100644 --- a/backend/app/tests/api/routes/test_openai_conversation.py +++ b/backend/app/tests/api/routes/test_openai_conversation.py @@ -2,17 +2,16 @@ from fastapi.testclient import TestClient from sqlmodel import Session -from app.models.openai_conversation import ( - OpenAIConversationCreate, - OpenAIConversationUpdate, -) +from app.models.openai_conversation import OpenAIConversationCreate from app.crud.openai_conversation import create_openai_conversation +from app.tests.utils.utils import get_project -original_api_key = "ApiKey No3x47A5qoIGhm0kVKjQ77dhCqEdWRIQZlEPzzzh7i8" - -def test_create_conversation(client: TestClient): +def test_create_conversation( + client: TestClient, db: Session, normal_user_api_key_headers: dict[str, str] +): """Test creating a new conversation.""" + project = get_project(db) conversation_data = { "response_id": "resp_123", "ancestor_response_id": "ancestor_456", @@ -20,18 +19,14 @@ def test_create_conversation(client: TestClient): "user_question": "What is the capital of France?", "assistant_response": "The capital of France is Paris.", "model": "gpt-4o", - "input_tokens": 10, - "output_tokens": 5, - "total_tokens": 15, "assistant_id": "asst_123", - "project_id": 1, - "organization_id": 1, + "project_id": project.id, + "organization_id": project.organization_id, } - headers = {"X-API-KEY": original_api_key} response = client.post( "/api/v1/openai-conversation/create", json=conversation_data, - headers=headers, + headers=normal_user_api_key_headers, ) assert response.status_code == 200 From 025ccd4ecbebd74adf72d5e2de0cb4daec2ccc9b Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Mon, 21 Jul 2025 23:01:19 +0530 Subject: [PATCH 12/40] fixing testcases --- .../api/routes/test_openai_conversation.py | 87 +++++++++++-------- 1 file changed, 51 insertions(+), 36 deletions(-) diff --git a/backend/app/tests/api/routes/test_openai_conversation.py b/backend/app/tests/api/routes/test_openai_conversation.py index 0fbb943d1..c71d92a29 100644 --- a/backend/app/tests/api/routes/test_openai_conversation.py +++ b/backend/app/tests/api/routes/test_openai_conversation.py @@ -40,8 +40,11 @@ def test_create_conversation( assert "updated_at" in data["data"] -def test_get_conversation_by_id(client: TestClient, db: Session): +def test_get_conversation_by_id( + client: TestClient, db: Session, normal_user_api_key_headers: dict[str, str] +): """Test getting a conversation by ID.""" + project = get_project(db) # Create a conversation first conversation_data = OpenAIConversationCreate( response_id="resp_123", @@ -53,13 +56,13 @@ def test_get_conversation_by_id(client: TestClient, db: Session): output_tokens=5, total_tokens=15, assistant_id="asst_123", - project_id=1, - organization_id=1, + project_id=project.id, + organization_id=project.organization_id, ) conversation = create_openai_conversation(db, conversation_data) - headers = {"X-API-KEY": original_api_key} response = client.get( - f"/api/v1/openai-conversation/{conversation.id}", headers=headers + f"/api/v1/openai-conversation/{conversation.id}", + headers=normal_user_api_key_headers, ) assert response.status_code == 200 @@ -69,8 +72,11 @@ def test_get_conversation_by_id(client: TestClient, db: Session): assert data["data"]["response_id"] == "resp_123" -def test_get_conversation_by_response_id(client: TestClient, db: Session): +def test_get_conversation_by_response_id( + client: TestClient, db: Session, normal_user_api_key_headers: dict[str, str] +): """Test getting a conversation by response_id.""" + project = get_project(db) # Create a conversation first conversation_data = OpenAIConversationCreate( response_id="resp_123", @@ -82,13 +88,13 @@ def test_get_conversation_by_response_id(client: TestClient, db: Session): output_tokens=5, total_tokens=15, assistant_id="asst_123", - project_id=1, - organization_id=1, + project_id=project.id, + organization_id=project.organization_id, ) create_openai_conversation(db, conversation_data) - headers = {"X-API-KEY": original_api_key} response = client.get( - "/api/v1/openai-conversation/response/resp_123", headers=headers + "/api/v1/openai-conversation/response/resp_123", + headers=normal_user_api_key_headers, ) assert response.status_code == 200 @@ -97,8 +103,11 @@ def test_get_conversation_by_response_id(client: TestClient, db: Session): assert data["data"]["response_id"] == "resp_123" -def test_get_conversations_by_ancestor(client: TestClient, db: Session): +def test_get_conversations_by_ancestor( + client: TestClient, db: Session, normal_user_api_key_headers: dict[str, str] +): """Test getting conversations by ancestor_response_id.""" + project = get_project(db) # Create multiple conversations with same ancestor conversation_data1 = OpenAIConversationCreate( response_id="resp_1", @@ -110,8 +119,8 @@ def test_get_conversations_by_ancestor(client: TestClient, db: Session): output_tokens=5, total_tokens=15, assistant_id="asst_123", - project_id=1, - organization_id=1, + project_id=project.id, + organization_id=project.organization_id, ) conversation_data2 = OpenAIConversationCreate( response_id="resp_2", @@ -123,8 +132,8 @@ def test_get_conversations_by_ancestor(client: TestClient, db: Session): output_tokens=5, total_tokens=15, assistant_id="asst_123", - project_id=1, - organization_id=1, + project_id=project.id, + organization_id=project.organization_id, ) conversation_data3 = OpenAIConversationCreate( response_id="resp_3", @@ -136,17 +145,16 @@ def test_get_conversations_by_ancestor(client: TestClient, db: Session): output_tokens=5, total_tokens=15, assistant_id="asst_123", - project_id=1, - organization_id=1, + project_id=project.id, + organization_id=project.organization_id, ) create_openai_conversation(db, conversation_data1) create_openai_conversation(db, conversation_data2) create_openai_conversation(db, conversation_data3) - headers = {"X-API-KEY": original_api_key} response = client.get( "/api/v1/openai-conversation/ancestor/ancestor_123", - headers=headers, + headers=normal_user_api_key_headers, ) assert response.status_code == 200 @@ -156,8 +164,11 @@ def test_get_conversations_by_ancestor(client: TestClient, db: Session): assert all(conv["ancestor_response_id"] == "ancestor_123" for conv in data["data"]) -def test_update_conversation(client: TestClient, db: Session): +def test_update_conversation( + client: TestClient, db: Session, normal_user_api_key_headers: dict[str, str] +): """Test updating a conversation.""" + project = get_project(db) # Create a conversation first conversation_data = OpenAIConversationCreate( response_id="resp_123", @@ -169,8 +180,8 @@ def test_update_conversation(client: TestClient, db: Session): output_tokens=5, total_tokens=15, assistant_id="asst_123", - project_id=1, - organization_id=1, + project_id=project.id, + organization_id=project.organization_id, ) conversation = create_openai_conversation(db, conversation_data) @@ -178,11 +189,10 @@ def test_update_conversation(client: TestClient, db: Session): "ancestor_response_id": "ancestor_789", "previous_response_id": "prev_123", } - headers = {"X-API-KEY": original_api_key} response = client.put( f"/api/v1/openai-conversation/{conversation.id}", json=update_data, - headers=headers, + headers=normal_user_api_key_headers, ) assert response.status_code == 200 @@ -193,8 +203,11 @@ def test_update_conversation(client: TestClient, db: Session): assert data["data"]["response_id"] == "resp_123" # Should remain unchanged -def test_delete_conversation_by_id(client: TestClient, db: Session): +def test_delete_conversation_by_id( + client: TestClient, db: Session, normal_user_api_key_headers: dict[str, str] +): """Test deleting a conversation by ID.""" + project = get_project(db) # Create a conversation first conversation_data = OpenAIConversationCreate( response_id="resp_123", @@ -205,13 +218,13 @@ def test_delete_conversation_by_id(client: TestClient, db: Session): output_tokens=5, total_tokens=15, assistant_id="asst_123", - project_id=1, - organization_id=1, + project_id=project.id, + organization_id=project.organization_id, ) conversation = create_openai_conversation(db, conversation_data) - headers = {"X-API-KEY": original_api_key} response = client.delete( - f"/api/v1/openai-conversation/{conversation.id}", headers=headers + f"/api/v1/openai-conversation/{conversation.id}", + headers=normal_user_api_key_headers, ) assert response.status_code == 200 @@ -220,8 +233,11 @@ def test_delete_conversation_by_id(client: TestClient, db: Session): assert "deleted successfully" in data["data"]["message"] -def test_list_conversations(client: TestClient, db: Session): +def test_list_conversations( + client: TestClient, db: Session, normal_user_api_key_headers: dict[str, str] +): """Test listing all conversations.""" + project = get_project(db) # Create multiple conversations conversation_data1 = OpenAIConversationCreate( response_id="resp_1", @@ -233,8 +249,8 @@ def test_list_conversations(client: TestClient, db: Session): output_tokens=5, total_tokens=15, assistant_id="asst_123", - project_id=1, - organization_id=1, + project_id=project.id, + organization_id=project.organization_id, ) conversation_data2 = OpenAIConversationCreate( response_id="resp_2", @@ -246,15 +262,14 @@ def test_list_conversations(client: TestClient, db: Session): output_tokens=5, total_tokens=15, assistant_id="asst_123", - project_id=1, - organization_id=1, + project_id=project.id, + organization_id=project.organization_id, ) conversation1 = create_openai_conversation(db, conversation_data1) conversation2 = create_openai_conversation(db, conversation_data2) - headers = {"X-API-KEY": original_api_key} response = client.get( "/api/v1/openai-conversation/list", - headers=headers, + headers=normal_user_api_key_headers, params={"skip": 0, "limit": 100}, ) From f8c2176394491ace0d998f84951bc19ad87e0ea0 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Tue, 22 Jul 2025 15:57:21 +0530 Subject: [PATCH 13/40] fixing testcases --- .../app/tests/api/routes/test_responses.py | 92 +------------------ 1 file changed, 4 insertions(+), 88 deletions(-) diff --git a/backend/app/tests/api/routes/test_responses.py b/backend/app/tests/api/routes/test_responses.py index 5435cf777..599011283 100644 --- a/backend/app/tests/api/routes/test_responses.py +++ b/backend/app/tests/api/routes/test_responses.py @@ -135,6 +135,7 @@ def test_responses_endpoint_stores_conversation( mock_get_credential, mock_openai, db, + normal_user_api_key_headers, ): """Test that the /responses endpoint stores conversation in database.""" # Setup mock credentials @@ -158,28 +159,18 @@ def test_responses_endpoint_stores_conversation( mock_response.id = "mock_response_id" mock_response.output_text = "Test assistant response" mock_response.model = "gpt-4o" - mock_response.usage.input_tokens = 10 - mock_response.usage.output_tokens = 5 - mock_response.usage.total_tokens = 15 mock_response.output = [] mock_client.responses.create.return_value = mock_response - # Get the Glific project ID - glific_project = db.exec(select(Project).where(Project.name == "Glific")).first() - if not glific_project: - pytest.skip("Glific project not found in the database") - - # Use the original API key from seed data - original_api_key = "ApiKey No3x47A5qoIGhm0kVKjQ77dhCqEdWRIQZlEPzzzh7i8" - - headers = {"X-API-KEY": original_api_key} request_data = { "assistant_id": "assistant_123", "question": "What is Glific?", "callback_url": "http://example.com/callback", } - response = client.post("/responses", json=request_data, headers=headers) + response = client.post( + "/responses", json=request_data, headers=normal_user_api_key_headers + ) assert response.status_code == 200 response_json = response.json() assert response_json["success"] is True @@ -194,79 +185,4 @@ def test_responses_endpoint_stores_conversation( assert conversation_data.user_question == "What is Glific?" assert conversation_data.assistant_response == "Test assistant response" assert conversation_data.model == "gpt-4o" - assert conversation_data.input_tokens == 10 - assert conversation_data.output_tokens == 5 - assert conversation_data.total_tokens == 15 assert conversation_data.assistant_id == "assistant_123" - assert conversation_data.project_id == glific_project.id - - -@patch("app.api.routes.responses.OpenAI") -@patch("app.api.routes.responses.get_provider_credential") -@patch("app.api.routes.responses.get_assistant_by_id") -def test_responses_sync_endpoint_stores_conversation( - mock_get_assistant, - mock_get_credential, - mock_openai, - db, -): - """Test that the /responses/sync endpoint stores conversation in database.""" - # Setup mock credentials - mock_get_credential.return_value = {"api_key": "test_api_key"} - - # Setup mock OpenAI client - mock_client = MagicMock() - mock_openai.return_value = mock_client - - # Setup the mock response object - mock_response = MagicMock() - mock_response.id = "mock_response_id" - mock_response.output_text = "Test assistant response" - mock_response.model = "gpt-4o" - mock_response.usage.input_tokens = 10 - mock_response.usage.output_tokens = 5 - mock_response.usage.total_tokens = 15 - mock_response.output = [] - mock_client.responses.create.return_value = mock_response - - # Get the Glific project ID - glific_project = db.exec(select(Project).where(Project.name == "Glific")).first() - if not glific_project: - pytest.skip("Glific project not found in the database") - - # Use the original API key from seed data - original_api_key = "ApiKey No3x47A5qoIGhm0kVKjQ77dhCqEdWRIQZlEPzzzh7i8" - - headers = {"X-API-KEY": original_api_key} - request_data = { - "model": "gpt-4o", - "instructions": "Test instructions", - "vector_store_ids": ["vs_test"], - "max_num_results": 20, - "temperature": 0.1, - "question": "What is Glific?", - } - - response = client.post("/responses/sync", json=request_data, headers=headers) - assert response.status_code == 200 - response_json = response.json() - assert response_json["success"] is True - assert response_json["data"]["status"] == "success" - - # Verify that conversation was stored in database - conversation = db.exec( - select(OpenAI_Conversation).where( - OpenAI_Conversation.response_id == "mock_response_id" - ) - ).first() - - assert conversation is not None - assert conversation.response_id == "mock_response_id" - assert conversation.user_question == "What is Glific?" - assert conversation.assistant_response == "Test assistant response" - assert conversation.model == "gpt-4o" - assert conversation.input_tokens == 10 - assert conversation.output_tokens == 5 - assert conversation.total_tokens == 15 - assert conversation.assistant_id == "assistant_123" - assert conversation.project_id == glific_project.id From a5d07b3b36f18b3cddd9d460c5bc10c25a7953c1 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Tue, 22 Jul 2025 16:06:07 +0530 Subject: [PATCH 14/40] moving from assistant_response to response --- ...28e3a9988_add_openai_conversation_table.py | 4 +--- backend/app/api/routes/responses.py | 4 ++-- backend/app/models/openai_conversation.py | 4 ++-- .../api/routes/test_openai_conversation.py | 20 +++++++++---------- .../app/tests/api/routes/test_responses.py | 2 +- 5 files changed, 16 insertions(+), 18 deletions(-) diff --git a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py index 04ba927b9..34407acbd 100644 --- a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py +++ b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py @@ -29,9 +29,7 @@ def upgrade(): "previous_response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True ), sa.Column("user_question", sqlmodel.sql.sqltypes.AutoString(), nullable=False), - sa.Column( - "assistant_response", sqlmodel.sql.sqltypes.AutoString(), nullable=False - ), + sa.Column("response", sqlmodel.sql.sqltypes.AutoString(), nullable=False), sa.Column("model", sqlmodel.sql.sqltypes.AutoString(), nullable=False), sa.Column("assistant_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), sa.Column("project_id", sa.Integer(), nullable=False), diff --git a/backend/app/api/routes/responses.py b/backend/app/api/routes/responses.py index 9a5eceeb0..0a07ea069 100644 --- a/backend/app/api/routes/responses.py +++ b/backend/app/api/routes/responses.py @@ -150,7 +150,7 @@ def process_response( response_id=response.id, previous_response_id=request.response_id, user_question=request.question, - assistant_response=response.output_text, + response=response.output_text, model=response.model, assistant_id=request.assistant_id, project_id=project_id, @@ -376,7 +376,7 @@ async def responses_sync( response_id=response.id, previous_response_id=request.response_id, user_question=request.question, - assistant_response=response.output_text, + response=response.output_text, model=response.model, assistant_id=None, # Not available in sync endpoint project_id=project_id, diff --git a/backend/app/models/openai_conversation.py b/backend/app/models/openai_conversation.py index d48663704..f2dc43753 100644 --- a/backend/app/models/openai_conversation.py +++ b/backend/app/models/openai_conversation.py @@ -8,7 +8,7 @@ class OpenAIConversationBase(SQLModel): ancestor_response_id: Optional[str] = Field(default=None, index=True) previous_response_id: Optional[str] = Field(default=None, index=True) user_question: str = Field(description="The user's input question") - assistant_response: str = Field(description="The assistant's response") + response: str = Field(description="The assistant's response") model: str = Field(description="The model used for the response") assistant_id: Optional[str] = Field( default=None, description="The assistant ID used" @@ -26,7 +26,7 @@ class OpenAIConversationUpdate(SQLModel): ancestor_response_id: Optional[str] = None previous_response_id: Optional[str] = None user_question: Optional[str] = None - assistant_response: Optional[str] = None + response: Optional[str] = None model: Optional[str] = None assistant_id: Optional[str] = None project_id: Optional[int] = Field(default=None, foreign_key="project.id") diff --git a/backend/app/tests/api/routes/test_openai_conversation.py b/backend/app/tests/api/routes/test_openai_conversation.py index c71d92a29..c991dda46 100644 --- a/backend/app/tests/api/routes/test_openai_conversation.py +++ b/backend/app/tests/api/routes/test_openai_conversation.py @@ -17,7 +17,7 @@ def test_create_conversation( "ancestor_response_id": "ancestor_456", "previous_response_id": "prev_789", "user_question": "What is the capital of France?", - "assistant_response": "The capital of France is Paris.", + "response": "The capital of France is Paris.", "model": "gpt-4o", "assistant_id": "asst_123", "project_id": project.id, @@ -50,7 +50,7 @@ def test_get_conversation_by_id( response_id="resp_123", ancestor_response_id="ancestor_456", user_question="What is the capital of France?", - assistant_response="The capital of France is Paris.", + response="The capital of France is Paris.", model="gpt-4o", input_tokens=10, output_tokens=5, @@ -82,7 +82,7 @@ def test_get_conversation_by_response_id( response_id="resp_123", ancestor_response_id="ancestor_456", user_question="What is the capital of France?", - assistant_response="The capital of France is Paris.", + response="The capital of France is Paris.", model="gpt-4o", input_tokens=10, output_tokens=5, @@ -113,7 +113,7 @@ def test_get_conversations_by_ancestor( response_id="resp_1", ancestor_response_id="ancestor_123", user_question="What is the capital of France?", - assistant_response="The capital of France is Paris.", + response="The capital of France is Paris.", model="gpt-4o", input_tokens=10, output_tokens=5, @@ -126,7 +126,7 @@ def test_get_conversations_by_ancestor( response_id="resp_2", ancestor_response_id="ancestor_123", user_question="What is the capital of Spain?", - assistant_response="The capital of Spain is Madrid.", + response="The capital of Spain is Madrid.", model="gpt-4o", input_tokens=10, output_tokens=5, @@ -139,7 +139,7 @@ def test_get_conversations_by_ancestor( response_id="resp_3", ancestor_response_id="ancestor_456", user_question="What is the capital of Italy?", - assistant_response="The capital of Italy is Rome.", + response="The capital of Italy is Rome.", model="gpt-4o", input_tokens=10, output_tokens=5, @@ -174,7 +174,7 @@ def test_update_conversation( response_id="resp_123", ancestor_response_id="ancestor_456", user_question="What is the capital of France?", - assistant_response="The capital of France is Paris.", + response="The capital of France is Paris.", model="gpt-4o", input_tokens=10, output_tokens=5, @@ -212,7 +212,7 @@ def test_delete_conversation_by_id( conversation_data = OpenAIConversationCreate( response_id="resp_123", user_question="What is the capital of France?", - assistant_response="The capital of France is Paris.", + response="The capital of France is Paris.", model="gpt-4o", input_tokens=10, output_tokens=5, @@ -243,7 +243,7 @@ def test_list_conversations( response_id="resp_1", ancestor_response_id="ancestor_1", user_question="What is the capital of France?", - assistant_response="The capital of France is Paris.", + response="The capital of France is Paris.", model="gpt-4o", input_tokens=10, output_tokens=5, @@ -256,7 +256,7 @@ def test_list_conversations( response_id="resp_2", ancestor_response_id="ancestor_2", user_question="What is the capital of Spain?", - assistant_response="The capital of Spain is Madrid.", + response="The capital of Spain is Madrid.", model="gpt-4o", input_tokens=10, output_tokens=5, diff --git a/backend/app/tests/api/routes/test_responses.py b/backend/app/tests/api/routes/test_responses.py index 599011283..f5ef83943 100644 --- a/backend/app/tests/api/routes/test_responses.py +++ b/backend/app/tests/api/routes/test_responses.py @@ -183,6 +183,6 @@ def test_responses_endpoint_stores_conversation( assert conversation_data.response_id == "mock_response_id" assert conversation_data.user_question == "What is Glific?" - assert conversation_data.assistant_response == "Test assistant response" + assert conversation_data.response == "Test assistant response" assert conversation_data.model == "gpt-4o" assert conversation_data.assistant_id == "assistant_123" From 0f3c0807091c08d9c60a6d6b16d714bd19d5f66a Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Tue, 22 Jul 2025 16:07:52 +0530 Subject: [PATCH 15/40] fix pre commit changes --- backend/app/crud/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/app/crud/__init__.py b/backend/app/crud/__init__.py index 139e2b78d..f171aa573 100644 --- a/backend/app/crud/__init__.py +++ b/backend/app/crud/__init__.py @@ -53,7 +53,7 @@ get_all_openai_conversations, update_openai_conversation, delete_openai_conversation, -) +) from .assistants import ( get_assistant_by_id, From 56fa02beb84d46c8242f759923381e08a07a881a Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Tue, 22 Jul 2025 16:20:36 +0530 Subject: [PATCH 16/40] migration msg --- .../versions/f5628e3a9988_add_openai_conversation_table.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py index 34407acbd..098ccc9e7 100644 --- a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py +++ b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py @@ -1,4 +1,4 @@ -"""your migration message +"""add openai_conversation table Revision ID: 8811e4d2fcf9 Revises: 4aa1f48c6321 From c3d6fe14cc395955523294684d0d24c819badf93 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Tue, 22 Jul 2025 16:25:43 +0530 Subject: [PATCH 17/40] migration msg --- backend/app/api/routes/openai_conversation.py | 3 +-- backend/app/tests/api/routes/test_responses.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/backend/app/api/routes/openai_conversation.py b/backend/app/api/routes/openai_conversation.py index 22a385ea4..b638fa49b 100644 --- a/backend/app/api/routes/openai_conversation.py +++ b/backend/app/api/routes/openai_conversation.py @@ -1,5 +1,4 @@ -from typing import List, Optional -from uuid import UUID +from typing import List from fastapi import APIRouter, Depends, HTTPException, Query, Path from sqlmodel import Session diff --git a/backend/app/tests/api/routes/test_responses.py b/backend/app/tests/api/routes/test_responses.py index f5ef83943..2783a159b 100644 --- a/backend/app/tests/api/routes/test_responses.py +++ b/backend/app/tests/api/routes/test_responses.py @@ -146,7 +146,7 @@ def test_responses_endpoint_stores_conversation( mock_assistant.model = "gpt-4o" mock_assistant.instructions = "Test instructions" mock_assistant.temperature = 0.1 - mock_assistant.vector_store_id = "vs_test" + mock_assistant.vector_store_ids = "vs_test" mock_assistant.max_num_results = 20 mock_get_assistant.return_value = mock_assistant From 2622931f1ecea179943fdd9d47897824ac79547e Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Tue, 22 Jul 2025 16:31:21 +0530 Subject: [PATCH 18/40] using built in types --- backend/app/api/routes/openai_conversation.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/backend/app/api/routes/openai_conversation.py b/backend/app/api/routes/openai_conversation.py index b638fa49b..c7021a3f5 100644 --- a/backend/app/api/routes/openai_conversation.py +++ b/backend/app/api/routes/openai_conversation.py @@ -1,4 +1,3 @@ -from typing import List from fastapi import APIRouter, Depends, HTTPException, Query, Path from sqlmodel import Session @@ -48,7 +47,7 @@ async def create_conversation( @router.get( "/list", - response_model=APIResponse[List[OpenAIConversationPublic]], + response_model=APIResponse[list[OpenAIConversationPublic]], summary="List all conversations", description="Retrieve all conversations with pagination support", ) @@ -111,7 +110,7 @@ async def get_conversation_by_response_id( @router.get( "/ancestor/{ancestor_response_id}", - response_model=APIResponse[List[OpenAIConversationPublic]], + response_model=APIResponse[list[OpenAIConversationPublic]], summary="Get conversations by ancestor", description="Retrieve all conversations that have the specified ancestor_response_id", ) From 062d079778eaf8b6a4e0863614340860ac92ad3f Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Tue, 22 Jul 2025 16:33:44 +0530 Subject: [PATCH 19/40] cleanups --- .../api/routes/test_openai_conversation.py | 27 ------------------- 1 file changed, 27 deletions(-) diff --git a/backend/app/tests/api/routes/test_openai_conversation.py b/backend/app/tests/api/routes/test_openai_conversation.py index c991dda46..ac7e555d8 100644 --- a/backend/app/tests/api/routes/test_openai_conversation.py +++ b/backend/app/tests/api/routes/test_openai_conversation.py @@ -52,9 +52,6 @@ def test_get_conversation_by_id( user_question="What is the capital of France?", response="The capital of France is Paris.", model="gpt-4o", - input_tokens=10, - output_tokens=5, - total_tokens=15, assistant_id="asst_123", project_id=project.id, organization_id=project.organization_id, @@ -84,9 +81,6 @@ def test_get_conversation_by_response_id( user_question="What is the capital of France?", response="The capital of France is Paris.", model="gpt-4o", - input_tokens=10, - output_tokens=5, - total_tokens=15, assistant_id="asst_123", project_id=project.id, organization_id=project.organization_id, @@ -115,9 +109,6 @@ def test_get_conversations_by_ancestor( user_question="What is the capital of France?", response="The capital of France is Paris.", model="gpt-4o", - input_tokens=10, - output_tokens=5, - total_tokens=15, assistant_id="asst_123", project_id=project.id, organization_id=project.organization_id, @@ -128,9 +119,6 @@ def test_get_conversations_by_ancestor( user_question="What is the capital of Spain?", response="The capital of Spain is Madrid.", model="gpt-4o", - input_tokens=10, - output_tokens=5, - total_tokens=15, assistant_id="asst_123", project_id=project.id, organization_id=project.organization_id, @@ -141,9 +129,6 @@ def test_get_conversations_by_ancestor( user_question="What is the capital of Italy?", response="The capital of Italy is Rome.", model="gpt-4o", - input_tokens=10, - output_tokens=5, - total_tokens=15, assistant_id="asst_123", project_id=project.id, organization_id=project.organization_id, @@ -176,9 +161,6 @@ def test_update_conversation( user_question="What is the capital of France?", response="The capital of France is Paris.", model="gpt-4o", - input_tokens=10, - output_tokens=5, - total_tokens=15, assistant_id="asst_123", project_id=project.id, organization_id=project.organization_id, @@ -214,9 +196,6 @@ def test_delete_conversation_by_id( user_question="What is the capital of France?", response="The capital of France is Paris.", model="gpt-4o", - input_tokens=10, - output_tokens=5, - total_tokens=15, assistant_id="asst_123", project_id=project.id, organization_id=project.organization_id, @@ -245,9 +224,6 @@ def test_list_conversations( user_question="What is the capital of France?", response="The capital of France is Paris.", model="gpt-4o", - input_tokens=10, - output_tokens=5, - total_tokens=15, assistant_id="asst_123", project_id=project.id, organization_id=project.organization_id, @@ -258,9 +234,6 @@ def test_list_conversations( user_question="What is the capital of Spain?", response="The capital of Spain is Madrid.", model="gpt-4o", - input_tokens=10, - output_tokens=5, - total_tokens=15, assistant_id="asst_123", project_id=project.id, organization_id=project.organization_id, From 2171457aaff94ee290d99fb83085c80d1b4104f6 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Wed, 23 Jul 2025 11:34:47 +0530 Subject: [PATCH 20/40] fixing few review comments --- backend/app/api/routes/openai_conversation.py | 25 +------------- backend/app/crud/openai_conversation.py | 1 - backend/app/models/openai_conversation.py | 8 +++-- .../api/routes/test_openai_conversation.py | 33 ------------------- 4 files changed, 7 insertions(+), 60 deletions(-) diff --git a/backend/app/api/routes/openai_conversation.py b/backend/app/api/routes/openai_conversation.py index c7021a3f5..445767325 100644 --- a/backend/app/api/routes/openai_conversation.py +++ b/backend/app/api/routes/openai_conversation.py @@ -22,29 +22,6 @@ router = APIRouter(prefix="/openai-conversation", tags=["openai_conversation"]) -@router.post( - "/create", - response_model=APIResponse[OpenAIConversationPublic], - summary="Create a new OpenAI conversation", - description="Create a new conversation entry with response_id, ancestor_response_id, and previous_response_id", -) -async def create_conversation( - conversation_data: OpenAIConversationCreate, - db: Session = Depends(get_db), - _current_user: UserProjectOrg = Depends(get_current_user_org_project), -): - """Create a new OpenAI conversation entry.""" - try: - conversation = create_openai_conversation(db, conversation_data) - return APIResponse.success_response( - data=OpenAIConversationPublic.model_validate(conversation) - ) - except Exception as e: - raise HTTPException( - status_code=400, detail=f"Failed to create conversation: {str(e)}" - ) - - @router.get( "/list", response_model=APIResponse[list[OpenAIConversationPublic]], @@ -54,7 +31,7 @@ async def create_conversation( async def list_conversations( skip: int = Query(0, ge=0, description="Number of records to skip"), limit: int = Query( - 100, gt=0, le=1000, description="Maximum number of records to return" + 100, gt=0, le=100, description="Maximum number of records to return" ), db: Session = Depends(get_db), _current_user: UserOrganization = Depends(get_current_user_org), diff --git a/backend/app/crud/openai_conversation.py b/backend/app/crud/openai_conversation.py index ed91fa153..d4c8aeebf 100644 --- a/backend/app/crud/openai_conversation.py +++ b/backend/app/crud/openai_conversation.py @@ -5,7 +5,6 @@ OpenAI_Conversation, OpenAIConversationCreate, OpenAIConversationUpdate, - OpenAIConversationPublic, ) diff --git a/backend/app/models/openai_conversation.py b/backend/app/models/openai_conversation.py index f2dc43753..e8f7ad3ba 100644 --- a/backend/app/models/openai_conversation.py +++ b/backend/app/models/openai_conversation.py @@ -13,8 +13,12 @@ class OpenAIConversationBase(SQLModel): assistant_id: Optional[str] = Field( default=None, description="The assistant ID used" ) - project_id: int = Field(foreign_key="project.id") - organization_id: int = Field(foreign_key="organization.id") + project_id: int = Field( + default=None, foreign_key="project.id", nullable=False, ondelete="CASCADE" + ) + organization_id: int = Field( + foreign_key="organization.id", nullable=False, ondelete="CASCADE" + ) class OpenAIConversationCreate(OpenAIConversationBase): diff --git a/backend/app/tests/api/routes/test_openai_conversation.py b/backend/app/tests/api/routes/test_openai_conversation.py index ac7e555d8..539d8b672 100644 --- a/backend/app/tests/api/routes/test_openai_conversation.py +++ b/backend/app/tests/api/routes/test_openai_conversation.py @@ -7,39 +7,6 @@ from app.tests.utils.utils import get_project -def test_create_conversation( - client: TestClient, db: Session, normal_user_api_key_headers: dict[str, str] -): - """Test creating a new conversation.""" - project = get_project(db) - conversation_data = { - "response_id": "resp_123", - "ancestor_response_id": "ancestor_456", - "previous_response_id": "prev_789", - "user_question": "What is the capital of France?", - "response": "The capital of France is Paris.", - "model": "gpt-4o", - "assistant_id": "asst_123", - "project_id": project.id, - "organization_id": project.organization_id, - } - response = client.post( - "/api/v1/openai-conversation/create", - json=conversation_data, - headers=normal_user_api_key_headers, - ) - - assert response.status_code == 200 - data = response.json() - assert data["success"] is True - assert data["data"]["response_id"] == "resp_123" - assert data["data"]["ancestor_response_id"] == "ancestor_456" - assert data["data"]["previous_response_id"] == "prev_789" - assert "id" in data["data"] - assert "inserted_at" in data["data"] - assert "updated_at" in data["data"] - - def test_get_conversation_by_id( client: TestClient, db: Session, normal_user_api_key_headers: dict[str, str] ): From 23e15b92439952a1d9d5940430f5d9e6dca80035 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Wed, 23 Jul 2025 12:11:23 +0530 Subject: [PATCH 21/40] updated testcases --- backend/app/models/openai_conversation.py | 10 ++++-- .../api/routes/test_openai_conversation.py | 36 ++++++++++--------- .../app/tests/api/routes/test_responses.py | 14 ++++---- 3 files changed, 34 insertions(+), 26 deletions(-) diff --git a/backend/app/models/openai_conversation.py b/backend/app/models/openai_conversation.py index e8f7ad3ba..3370fc978 100644 --- a/backend/app/models/openai_conversation.py +++ b/backend/app/models/openai_conversation.py @@ -9,9 +9,15 @@ class OpenAIConversationBase(SQLModel): previous_response_id: Optional[str] = Field(default=None, index=True) user_question: str = Field(description="The user's input question") response: str = Field(description="The assistant's response") - model: str = Field(description="The model used for the response") + model: str = Field( + description="The model used for the response", min_length=5, max_length=40 + ) assistant_id: Optional[str] = Field( - default=None, description="The assistant ID used" + unique=True, + default=None, + description="The assistant ID used", + min_length=20, + max_length=40, ) project_id: int = Field( default=None, foreign_key="project.id", nullable=False, ondelete="CASCADE" diff --git a/backend/app/tests/api/routes/test_openai_conversation.py b/backend/app/tests/api/routes/test_openai_conversation.py index 539d8b672..63b565909 100644 --- a/backend/app/tests/api/routes/test_openai_conversation.py +++ b/backend/app/tests/api/routes/test_openai_conversation.py @@ -14,12 +14,12 @@ def test_get_conversation_by_id( project = get_project(db) # Create a conversation first conversation_data = OpenAIConversationCreate( - response_id="resp_123", + response_id="resp_test688080a1c52c819c937", ancestor_response_id="ancestor_456", user_question="What is the capital of France?", response="The capital of France is Paris.", model="gpt-4o", - assistant_id="asst_123", + assistant_id="asst_testXLnzQYrQlAEzrOA", project_id=project.id, organization_id=project.organization_id, ) @@ -33,7 +33,7 @@ def test_get_conversation_by_id( data = response.json() assert data["success"] is True assert data["data"]["id"] == conversation.id - assert data["data"]["response_id"] == "resp_123" + assert data["data"]["response_id"] == "resp_test688080a1c52c819c937" def test_get_conversation_by_response_id( @@ -43,25 +43,25 @@ def test_get_conversation_by_response_id( project = get_project(db) # Create a conversation first conversation_data = OpenAIConversationCreate( - response_id="resp_123", + response_id="resp_test688080a1c52c819c937", ancestor_response_id="ancestor_456", user_question="What is the capital of France?", response="The capital of France is Paris.", model="gpt-4o", - assistant_id="asst_123", + assistant_id="asst_testXLnzQYrQlAEzrOA", project_id=project.id, organization_id=project.organization_id, ) create_openai_conversation(db, conversation_data) response = client.get( - "/api/v1/openai-conversation/response/resp_123", + "/api/v1/openai-conversation/response/resp_test688080a1c52c819c937", headers=normal_user_api_key_headers, ) assert response.status_code == 200 data = response.json() assert data["success"] is True - assert data["data"]["response_id"] == "resp_123" + assert data["data"]["response_id"] == "resp_test688080a1c52c819c937" def test_get_conversations_by_ancestor( @@ -76,7 +76,7 @@ def test_get_conversations_by_ancestor( user_question="What is the capital of France?", response="The capital of France is Paris.", model="gpt-4o", - assistant_id="asst_123", + assistant_id="asst_testXLnzQYrQlAEzrOA", project_id=project.id, organization_id=project.organization_id, ) @@ -86,7 +86,7 @@ def test_get_conversations_by_ancestor( user_question="What is the capital of Spain?", response="The capital of Spain is Madrid.", model="gpt-4o", - assistant_id="asst_123", + assistant_id="asst_testXLnzQYrQlAEzrOA", project_id=project.id, organization_id=project.organization_id, ) @@ -96,7 +96,7 @@ def test_get_conversations_by_ancestor( user_question="What is the capital of Italy?", response="The capital of Italy is Rome.", model="gpt-4o", - assistant_id="asst_123", + assistant_id="asst_testXLnzQYrQlAEzrOA", project_id=project.id, organization_id=project.organization_id, ) @@ -123,12 +123,12 @@ def test_update_conversation( project = get_project(db) # Create a conversation first conversation_data = OpenAIConversationCreate( - response_id="resp_123", + response_id="resp_test688080a1c52c819c937", ancestor_response_id="ancestor_456", user_question="What is the capital of France?", response="The capital of France is Paris.", model="gpt-4o", - assistant_id="asst_123", + assistant_id="asst_testXLnzQYrQlAEzrOA", project_id=project.id, organization_id=project.organization_id, ) @@ -149,7 +149,9 @@ def test_update_conversation( assert data["success"] is True assert data["data"]["ancestor_response_id"] == "ancestor_789" assert data["data"]["previous_response_id"] == "prev_123" - assert data["data"]["response_id"] == "resp_123" # Should remain unchanged + assert ( + data["data"]["response_id"] == "resp_test688080a1c52c819c937" + ) # Should remain unchanged def test_delete_conversation_by_id( @@ -159,11 +161,11 @@ def test_delete_conversation_by_id( project = get_project(db) # Create a conversation first conversation_data = OpenAIConversationCreate( - response_id="resp_123", + response_id="resp_test688080a1c52c819c937", user_question="What is the capital of France?", response="The capital of France is Paris.", model="gpt-4o", - assistant_id="asst_123", + assistant_id="asst_testXLnzQYrQlAEzrOA", project_id=project.id, organization_id=project.organization_id, ) @@ -191,7 +193,7 @@ def test_list_conversations( user_question="What is the capital of France?", response="The capital of France is Paris.", model="gpt-4o", - assistant_id="asst_123", + assistant_id="asst_testXLnzQYrQlAEzrOA", project_id=project.id, organization_id=project.organization_id, ) @@ -201,7 +203,7 @@ def test_list_conversations( user_question="What is the capital of Spain?", response="The capital of Spain is Madrid.", model="gpt-4o", - assistant_id="asst_123", + assistant_id="asst_testXLnzQYrQlAEzrOA", project_id=project.id, organization_id=project.organization_id, ) diff --git a/backend/app/tests/api/routes/test_responses.py b/backend/app/tests/api/routes/test_responses.py index 2783a159b..8d91b230a 100644 --- a/backend/app/tests/api/routes/test_responses.py +++ b/backend/app/tests/api/routes/test_responses.py @@ -28,7 +28,7 @@ def test_responses_endpoint_success( # Setup the mock response object with real values for all used fields mock_response = MagicMock() - mock_response.id = "mock_response_id" + mock_response.id = "resp_test688080a1c52c819c937" mock_response.output_text = "Test assistant response" mock_response.model = "gpt-4o" mock_response.usage.input_tokens = 10 @@ -86,7 +86,7 @@ def test_responses_endpoint_without_vector_store( # Setup the mock response object mock_response = MagicMock() - mock_response.id = "mock_response_id" + mock_response.id = "resp_test688080a1c52c819c937" mock_response.output_text = "Test assistant response" mock_response.model = "gpt-4" mock_response.usage.input_tokens = 10 @@ -101,7 +101,7 @@ def test_responses_endpoint_without_vector_store( pytest.skip("Glific project not found in the database") request_data = { - "assistant_id": "assistant_123", + "assistant_id": "asst_testXLnzQYrQlAEzrOA", "question": "What is Glific?", "callback_url": "http://example.com/callback", } @@ -156,14 +156,14 @@ def test_responses_endpoint_stores_conversation( # Setup the mock response object mock_response = MagicMock() - mock_response.id = "mock_response_id" + mock_response.id = "resp_test688080a1c52c819c937" mock_response.output_text = "Test assistant response" mock_response.model = "gpt-4o" mock_response.output = [] mock_client.responses.create.return_value = mock_response request_data = { - "assistant_id": "assistant_123", + "assistant_id": "asst_testXLnzQYrQlAEzrOA", "question": "What is Glific?", "callback_url": "http://example.com/callback", } @@ -181,8 +181,8 @@ def test_responses_endpoint_stores_conversation( call_args = mock_create_conversation.call_args conversation_data = call_args[0][1] # Second argument is the conversation data - assert conversation_data.response_id == "mock_response_id" + assert conversation_data.response_id == "resp_test688080a1c52c819c937" assert conversation_data.user_question == "What is Glific?" assert conversation_data.response == "Test assistant response" assert conversation_data.model == "gpt-4o" - assert conversation_data.assistant_id == "assistant_123" + assert conversation_data.assistant_id == "asst_testXLnzQYrQlAEzrOA" From 7b642c14e3e1ed44d9f00fdaee2087e7ccc1b740 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Wed, 23 Jul 2025 13:10:08 +0530 Subject: [PATCH 22/40] cleanups --- backend/app/api/routes/openai_conversation.py | 1 - 1 file changed, 1 deletion(-) diff --git a/backend/app/api/routes/openai_conversation.py b/backend/app/api/routes/openai_conversation.py index 445767325..20a247367 100644 --- a/backend/app/api/routes/openai_conversation.py +++ b/backend/app/api/routes/openai_conversation.py @@ -9,7 +9,6 @@ OpenAIConversationPublic, ) from app.crud.openai_conversation import ( - create_openai_conversation, get_openai_conversation_by_id, get_openai_conversation_by_response_id, get_openai_conversations_by_ancestor, From 533a3f2440988f16cb5458d0d7e17402595e5d3b Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Thu, 24 Jul 2025 10:36:00 +0530 Subject: [PATCH 23/40] remove OpenAIConversationUpdate --- backend/app/api/routes/openai_conversation.py | 28 +------------------ backend/app/crud/openai_conversation.py | 27 +----------------- backend/app/models/openai_conversation.py | 12 -------- 3 files changed, 2 insertions(+), 65 deletions(-) diff --git a/backend/app/api/routes/openai_conversation.py b/backend/app/api/routes/openai_conversation.py index 20a247367..cd7134f8e 100644 --- a/backend/app/api/routes/openai_conversation.py +++ b/backend/app/api/routes/openai_conversation.py @@ -3,11 +3,7 @@ from app.api.deps import get_db, get_current_user_org, get_current_user_org_project from app.models import UserOrganization, UserProjectOrg -from app.models.openai_conversation import ( - OpenAIConversationCreate, - OpenAIConversationUpdate, - OpenAIConversationPublic, -) +from app.models.openai_conversation import OpenAIConversationPublic from app.crud.openai_conversation import ( get_openai_conversation_by_id, get_openai_conversation_by_response_id, @@ -102,28 +98,6 @@ async def get_conversations_by_ancestor( ) -@router.put( - "/{conversation_id}", - response_model=APIResponse[OpenAIConversationPublic], - summary="Update conversation", - description="Update an existing conversation by ID", -) -async def update_conversation( - conversation_data: OpenAIConversationUpdate, - conversation_id: int = Path(..., description="The conversation ID"), - db: Session = Depends(get_db), - _current_user: UserProjectOrg = Depends(get_current_user_org_project), -): - """Update a conversation by its ID.""" - conversation = update_openai_conversation(db, conversation_id, conversation_data) - if not conversation: - raise HTTPException(status_code=404, detail="Conversation not found") - - return APIResponse.success_response( - data=OpenAIConversationPublic.model_validate(conversation) - ) - - @router.delete( "/{conversation_id}", response_model=APIResponse[dict], diff --git a/backend/app/crud/openai_conversation.py b/backend/app/crud/openai_conversation.py index d4c8aeebf..fbdd008f8 100644 --- a/backend/app/crud/openai_conversation.py +++ b/backend/app/crud/openai_conversation.py @@ -1,11 +1,7 @@ from sqlmodel import Session, select from datetime import datetime, UTC from typing import List, Optional -from app.models import ( - OpenAI_Conversation, - OpenAIConversationCreate, - OpenAIConversationUpdate, -) +from app.models import OpenAI_Conversation, OpenAIConversationCreate def create_openai_conversation( @@ -52,27 +48,6 @@ def get_all_openai_conversations( return session.exec(statement).all() -def update_openai_conversation( - session: Session, - conversation_id: int, - data: OpenAIConversationUpdate, -) -> Optional[OpenAI_Conversation]: - conversation = get_openai_conversation_by_id(session, conversation_id) - if not conversation: - return None - - update_data = data.model_dump(exclude_unset=True) - update_data["updated_at"] = datetime.now(UTC) - - for field, value in update_data.items(): - setattr(conversation, field, value) - - session.add(conversation) - session.commit() - session.refresh(conversation) - return conversation - - def delete_openai_conversation(session: Session, conversation_id: int) -> bool: conversation = get_openai_conversation_by_id(session, conversation_id) if not conversation: diff --git a/backend/app/models/openai_conversation.py b/backend/app/models/openai_conversation.py index 3370fc978..6d52acb49 100644 --- a/backend/app/models/openai_conversation.py +++ b/backend/app/models/openai_conversation.py @@ -31,18 +31,6 @@ class OpenAIConversationCreate(OpenAIConversationBase): pass # Used for requests, no `id` or timestamps -class OpenAIConversationUpdate(SQLModel): - response_id: Optional[str] = None - ancestor_response_id: Optional[str] = None - previous_response_id: Optional[str] = None - user_question: Optional[str] = None - response: Optional[str] = None - model: Optional[str] = None - assistant_id: Optional[str] = None - project_id: Optional[int] = Field(default=None, foreign_key="project.id") - organization_id: Optional[int] = Field(default=None, foreign_key="organization.id") - - class OpenAIConversationPublic(OpenAIConversationBase): id: int inserted_at: datetime From de51df09ec948cc46474e2afaacc76ba18362a5b Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Thu, 24 Jul 2025 10:55:03 +0530 Subject: [PATCH 24/40] cleanups --- backend/app/api/routes/openai_conversation.py | 1 - backend/app/crud/__init__.py | 1 - 2 files changed, 2 deletions(-) diff --git a/backend/app/api/routes/openai_conversation.py b/backend/app/api/routes/openai_conversation.py index cd7134f8e..5fb6e32ac 100644 --- a/backend/app/api/routes/openai_conversation.py +++ b/backend/app/api/routes/openai_conversation.py @@ -9,7 +9,6 @@ get_openai_conversation_by_response_id, get_openai_conversations_by_ancestor, get_all_openai_conversations, - update_openai_conversation, delete_openai_conversation, ) from app.utils import APIResponse diff --git a/backend/app/crud/__init__.py b/backend/app/crud/__init__.py index e799367ca..e517fdd3c 100644 --- a/backend/app/crud/__init__.py +++ b/backend/app/crud/__init__.py @@ -51,7 +51,6 @@ get_openai_conversation_by_response_id, get_openai_conversations_by_ancestor, get_all_openai_conversations, - update_openai_conversation, delete_openai_conversation, ) From 191ca69d26b4f5e63f78a4db6cf3ccb5ec69252a Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Thu, 24 Jul 2025 10:56:05 +0530 Subject: [PATCH 25/40] removing update conversation testcases and cleanups --- .../api/routes/test_openai_conversation.py | 38 ------------------- 1 file changed, 38 deletions(-) diff --git a/backend/app/tests/api/routes/test_openai_conversation.py b/backend/app/tests/api/routes/test_openai_conversation.py index 63b565909..f976c215b 100644 --- a/backend/app/tests/api/routes/test_openai_conversation.py +++ b/backend/app/tests/api/routes/test_openai_conversation.py @@ -116,44 +116,6 @@ def test_get_conversations_by_ancestor( assert all(conv["ancestor_response_id"] == "ancestor_123" for conv in data["data"]) -def test_update_conversation( - client: TestClient, db: Session, normal_user_api_key_headers: dict[str, str] -): - """Test updating a conversation.""" - project = get_project(db) - # Create a conversation first - conversation_data = OpenAIConversationCreate( - response_id="resp_test688080a1c52c819c937", - ancestor_response_id="ancestor_456", - user_question="What is the capital of France?", - response="The capital of France is Paris.", - model="gpt-4o", - assistant_id="asst_testXLnzQYrQlAEzrOA", - project_id=project.id, - organization_id=project.organization_id, - ) - conversation = create_openai_conversation(db, conversation_data) - - update_data = { - "ancestor_response_id": "ancestor_789", - "previous_response_id": "prev_123", - } - response = client.put( - f"/api/v1/openai-conversation/{conversation.id}", - json=update_data, - headers=normal_user_api_key_headers, - ) - - assert response.status_code == 200 - data = response.json() - assert data["success"] is True - assert data["data"]["ancestor_response_id"] == "ancestor_789" - assert data["data"]["previous_response_id"] == "prev_123" - assert ( - data["data"]["response_id"] == "resp_test688080a1c52c819c937" - ) # Should remain unchanged - - def test_delete_conversation_by_id( client: TestClient, db: Session, normal_user_api_key_headers: dict[str, str] ): From 8c78d68466fa834b8e0a292c64082d9663113f86 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Thu, 24 Jul 2025 11:00:04 +0530 Subject: [PATCH 26/40] cleanups --- backend/app/models/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py index 7c5f23587..db0bbcfd9 100644 --- a/backend/app/models/__init__.py +++ b/backend/app/models/__init__.py @@ -58,7 +58,6 @@ OpenAI_Conversation, OpenAIConversationBase, OpenAIConversationCreate, - OpenAIConversationUpdate, OpenAIConversationPublic, ) From 2050db629e3ee900e93b4e8d26c9824a3e84c0b0 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Thu, 24 Jul 2025 11:06:46 +0530 Subject: [PATCH 27/40] cleaning up models and migrations --- .../f5628e3a9988_add_openai_conversation_table.py | 2 +- backend/app/models/openai_conversation.py | 15 ++++++++------- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py index 098ccc9e7..df4c5dde2 100644 --- a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py +++ b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py @@ -29,7 +29,7 @@ def upgrade(): "previous_response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True ), sa.Column("user_question", sqlmodel.sql.sqltypes.AutoString(), nullable=False), - sa.Column("response", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("response", sqlmodel.sql.sqltypes.AutoString(), nullable=True), sa.Column("model", sqlmodel.sql.sqltypes.AutoString(), nullable=False), sa.Column("assistant_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), sa.Column("project_id", sa.Integer(), nullable=False), diff --git a/backend/app/models/openai_conversation.py b/backend/app/models/openai_conversation.py index 6d52acb49..2f928f170 100644 --- a/backend/app/models/openai_conversation.py +++ b/backend/app/models/openai_conversation.py @@ -4,20 +4,21 @@ class OpenAIConversationBase(SQLModel): - response_id: str = Field(index=True) + response_id: str = Field(index=True, min_length=10) ancestor_response_id: Optional[str] = Field(default=None, index=True) previous_response_id: Optional[str] = Field(default=None, index=True) - user_question: str = Field(description="The user's input question") - response: str = Field(description="The assistant's response") + user_question: str = Field(description="The user's input question", min_length=1) + response: Optional[str] = Field(description="The assistant's response") + # there are models with small name like o1 and usually fine tuned models have long names model: str = Field( - description="The model used for the response", min_length=5, max_length=40 + description="The model used for the response", min_length=1, max_length=150 ) + # usually follow the pattern of asst_WD9bumYqTtpSvxxxxx assistant_id: Optional[str] = Field( - unique=True, default=None, description="The assistant ID used", - min_length=20, - max_length=40, + min_length=10, + max_length=50, ) project_id: int = Field( default=None, foreign_key="project.id", nullable=False, ondelete="CASCADE" From 746b037187624a9b88ba6b458ed3210576759595 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Thu, 24 Jul 2025 11:30:42 +0530 Subject: [PATCH 28/40] cleanups --- backend/app/tests/api/routes/test_responses.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/app/tests/api/routes/test_responses.py b/backend/app/tests/api/routes/test_responses.py index 8b7ac997b..3cc8e1700 100644 --- a/backend/app/tests/api/routes/test_responses.py +++ b/backend/app/tests/api/routes/test_responses.py @@ -5,7 +5,7 @@ from sqlmodel import select from app.api.routes.responses import router -from app.models import Project, OpenAI_Conversation +from app.models import Project # Wrap the router in a FastAPI app instance app = FastAPI() From b69365ff3ce28da983c65e80956d0064ac0e9cfe Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Thu, 24 Jul 2025 12:18:11 +0530 Subject: [PATCH 29/40] updated migration heads --- .../f5628e3a9988_add_openai_conversation_table.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py index df4c5dde2..cc95bab98 100644 --- a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py +++ b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py @@ -1,8 +1,8 @@ """add openai_conversation table -Revision ID: 8811e4d2fcf9 -Revises: 4aa1f48c6321 -Create Date: 2025-07-21 22:07:01.129760 +Revision ID: ff579a9523c5 +Revises: e8ee93526b37 +Create Date: 2025-07-24 12:16:51.311014 """ from alembic import op @@ -11,8 +11,8 @@ # revision identifiers, used by Alembic. -revision = "8811e4d2fcf9" -down_revision = "4aa1f48c6321" +revision = "ff579a9523c5" +down_revision = "e8ee93526b37" branch_labels = None depends_on = None From 838a3d52431ff649355272aee0baf806be1cc596 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Thu, 24 Jul 2025 12:44:48 +0530 Subject: [PATCH 30/40] added soft delete and auth logic --- ...28e3a9988_add_openai_conversation_table.py | 2 + backend/app/api/routes/openai_conversation.py | 40 +++++++++++-------- backend/app/crud/openai_conversation.py | 15 ++++++- backend/app/models/openai_conversation.py | 2 + .../api/routes/test_openai_conversation.py | 16 ++++++++ 5 files changed, 57 insertions(+), 18 deletions(-) diff --git a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py index cc95bab98..4a6f870a5 100644 --- a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py +++ b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py @@ -34,8 +34,10 @@ def upgrade(): sa.Column("assistant_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), sa.Column("project_id", sa.Integer(), nullable=False), sa.Column("organization_id", sa.Integer(), nullable=False), + sa.Column("is_deleted", sa.Boolean(), nullable=False), sa.Column("inserted_at", sa.DateTime(), nullable=False), sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("deleted_at", sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint("id"), sa.ForeignKeyConstraint( ["organization_id"], ["organization.id"], ondelete="CASCADE" diff --git a/backend/app/api/routes/openai_conversation.py b/backend/app/api/routes/openai_conversation.py index 5fb6e32ac..62233be9a 100644 --- a/backend/app/api/routes/openai_conversation.py +++ b/backend/app/api/routes/openai_conversation.py @@ -20,18 +20,20 @@ "/list", response_model=APIResponse[list[OpenAIConversationPublic]], summary="List all conversations", - description="Retrieve all conversations with pagination support", + description="Retrieve all OpenAI conversations with pagination support", ) async def list_conversations( + session: Session = Depends(get_db), + current_user: UserProjectOrg = Depends(get_current_user_org_project), skip: int = Query(0, ge=0, description="Number of records to skip"), limit: int = Query( 100, gt=0, le=100, description="Maximum number of records to return" ), - db: Session = Depends(get_db), - _current_user: UserOrganization = Depends(get_current_user_org), ): - """Get all conversations with pagination.""" - conversations = get_all_openai_conversations(db, skip=skip, limit=limit) + """Get all conversations with pagination for project and organization""" + conversations = get_all_openai_conversations( + session=session, project_id=current_user.project_id, skip=skip, limit=limit + ) return APIResponse.success_response( data=[OpenAIConversationPublic.model_validate(conv) for conv in conversations] ) @@ -46,11 +48,11 @@ async def list_conversations( async def get_conversation_by_id( conversation_id: int = Path(..., description="The conversation ID"), db: Session = Depends(get_db), - _current_user: UserOrganization = Depends(get_current_user_org), + current_user: UserProjectOrg = Depends(get_current_user_org_project), ): - """Get a conversation by its ID.""" + """Get a conversation by its ID, only if it belongs to the user's project.""" conversation = get_openai_conversation_by_id(db, conversation_id) - if not conversation: + if not conversation or conversation.project_id != current_user.project_id: raise HTTPException(status_code=404, detail="Conversation not found") return APIResponse.success_response( @@ -67,11 +69,11 @@ async def get_conversation_by_id( async def get_conversation_by_response_id( response_id: str = Path(..., description="The response ID"), db: Session = Depends(get_db), - _current_user: UserOrganization = Depends(get_current_user_org), + current_user: UserProjectOrg = Depends(get_current_user_org_project), ): - """Get a conversation by its response_id.""" + """Get a conversation by its response_id, only if it belongs to the user's project.""" conversation = get_openai_conversation_by_response_id(db, response_id) - if not conversation: + if not conversation or conversation.project_id != current_user.project_id: raise HTTPException(status_code=404, detail="Conversation not found") return APIResponse.success_response( @@ -88,12 +90,15 @@ async def get_conversation_by_response_id( async def get_conversations_by_ancestor( ancestor_response_id: str = Path(..., description="The ancestor response ID"), db: Session = Depends(get_db), - _current_user: UserOrganization = Depends(get_current_user_org), + current_user: UserProjectOrg = Depends(get_current_user_org_project), ): - """Get all conversations by ancestor_response_id.""" + """Get all conversations by ancestor_response_id, only for the user's project.""" conversations = get_openai_conversations_by_ancestor(db, ancestor_response_id) + filtered = [ + conv for conv in conversations if conv.project_id == current_user.project_id + ] return APIResponse.success_response( - data=[OpenAIConversationPublic.model_validate(conv) for conv in conversations] + data=[OpenAIConversationPublic.model_validate(conv) for conv in filtered] ) @@ -106,9 +111,12 @@ async def get_conversations_by_ancestor( async def delete_conversation_by_id( conversation_id: int = Path(..., description="The conversation ID"), db: Session = Depends(get_db), - _current_user: UserProjectOrg = Depends(get_current_user_org_project), + current_user: UserProjectOrg = Depends(get_current_user_org_project), ): - """Delete a conversation by its ID.""" + """Delete a conversation by its ID, only if it belongs to the user's project.""" + conversation = get_openai_conversation_by_id(db, conversation_id) + if not conversation or conversation.project_id != current_user.project_id: + raise HTTPException(status_code=404, detail="Conversation not found") success = delete_openai_conversation(db, conversation_id) if not success: raise HTTPException(status_code=404, detail="Conversation not found") diff --git a/backend/app/crud/openai_conversation.py b/backend/app/crud/openai_conversation.py index fbdd008f8..4e98d0e6f 100644 --- a/backend/app/crud/openai_conversation.py +++ b/backend/app/crud/openai_conversation.py @@ -42,9 +42,20 @@ def get_openai_conversations_by_ancestor( def get_all_openai_conversations( - session: Session, skip: int = 0, limit: int = 100 + session: Session, project_id: int, skip: int = 0, limit: int = 100 ) -> List[OpenAI_Conversation]: - statement = select(OpenAI_Conversation).offset(skip).limit(limit) + """ + Return all openai conversations for a given project and organization, with optional pagination. + """ + statement = ( + select(OpenAI_Conversation) + .where( + OpenAI_Conversation.project_id == project_id, + OpenAI_Conversation.is_deleted == False, + ) + .offset(skip) + .limit(limit) + ) return session.exec(statement).all() diff --git a/backend/app/models/openai_conversation.py b/backend/app/models/openai_conversation.py index 2f928f170..5bffd6567 100644 --- a/backend/app/models/openai_conversation.py +++ b/backend/app/models/openai_conversation.py @@ -26,6 +26,8 @@ class OpenAIConversationBase(SQLModel): organization_id: int = Field( foreign_key="organization.id", nullable=False, ondelete="CASCADE" ) + is_deleted: bool = Field(default=False, nullable=False) + deleted_at: Optional[datetime] = Field(default=None, nullable=True) class OpenAIConversationCreate(OpenAIConversationBase): diff --git a/backend/app/tests/api/routes/test_openai_conversation.py b/backend/app/tests/api/routes/test_openai_conversation.py index f976c215b..aea259d16 100644 --- a/backend/app/tests/api/routes/test_openai_conversation.py +++ b/backend/app/tests/api/routes/test_openai_conversation.py @@ -34,6 +34,8 @@ def test_get_conversation_by_id( assert data["success"] is True assert data["data"]["id"] == conversation.id assert data["data"]["response_id"] == "resp_test688080a1c52c819c937" + assert data["data"]["is_deleted"] is False + assert data["data"]["deleted_at"] is None def test_get_conversation_by_response_id( @@ -62,6 +64,8 @@ def test_get_conversation_by_response_id( data = response.json() assert data["success"] is True assert data["data"]["response_id"] == "resp_test688080a1c52c819c937" + assert data["data"]["is_deleted"] is False + assert data["data"]["deleted_at"] is None def test_get_conversations_by_ancestor( @@ -114,12 +118,17 @@ def test_get_conversations_by_ancestor( assert data["success"] is True assert len(data["data"]) == 2 assert all(conv["ancestor_response_id"] == "ancestor_123" for conv in data["data"]) + for conv in data["data"]: + assert conv["is_deleted"] is False + assert conv["deleted_at"] is None def test_delete_conversation_by_id( client: TestClient, db: Session, normal_user_api_key_headers: dict[str, str] ): """Test deleting a conversation by ID.""" + from app.crud.openai_conversation import get_openai_conversation_by_id + project = get_project(db) # Create a conversation first conversation_data = OpenAIConversationCreate( @@ -141,6 +150,10 @@ def test_delete_conversation_by_id( data = response.json() assert data["success"] is True assert "deleted successfully" in data["data"]["message"] + # Fetch from DB and check is_deleted and deleted_at + deleted_conv = get_openai_conversation_by_id(db, conversation.id) + assert deleted_conv.is_deleted is True + assert deleted_conv.deleted_at is not None def test_list_conversations( @@ -184,3 +197,6 @@ def test_list_conversations( response_ids = [conv["response_id"] for conv in data["data"]] assert conversation1.response_id in response_ids assert conversation2.response_id in response_ids + for conv in data["data"]: + assert conv["is_deleted"] is False + assert conv["deleted_at"] is None From 7e5ad21569da4f6bc20f6089bc410a3cb38675d9 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Thu, 24 Jul 2025 14:53:46 +0530 Subject: [PATCH 31/40] cleanups --- backend/app/models/openai_conversation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/app/models/openai_conversation.py b/backend/app/models/openai_conversation.py index 5bffd6567..af0dce460 100644 --- a/backend/app/models/openai_conversation.py +++ b/backend/app/models/openai_conversation.py @@ -21,7 +21,7 @@ class OpenAIConversationBase(SQLModel): max_length=50, ) project_id: int = Field( - default=None, foreign_key="project.id", nullable=False, ondelete="CASCADE" + foreign_key="project.id", nullable=False, ondelete="CASCADE" ) organization_id: int = Field( foreign_key="organization.id", nullable=False, ondelete="CASCADE" From 6e796640361985d54d2e18b23235b2b9c2cbd8c0 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Fri, 25 Jul 2025 10:55:43 +0530 Subject: [PATCH 32/40] fixing testcases --- backend/app/crud/openai_conversation.py | 3 +- .../api/routes/test_openai_conversation.py | 88 +++++++++---------- 2 files changed, 46 insertions(+), 45 deletions(-) diff --git a/backend/app/crud/openai_conversation.py b/backend/app/crud/openai_conversation.py index 4e98d0e6f..7aa2b9ea9 100644 --- a/backend/app/crud/openai_conversation.py +++ b/backend/app/crud/openai_conversation.py @@ -56,7 +56,8 @@ def get_all_openai_conversations( .offset(skip) .limit(limit) ) - return session.exec(statement).all() + results = session.exec(statement).all() + return results def delete_openai_conversation(session: Session, conversation_id: int) -> bool: diff --git a/backend/app/tests/api/routes/test_openai_conversation.py b/backend/app/tests/api/routes/test_openai_conversation.py index aea259d16..439c7f150 100644 --- a/backend/app/tests/api/routes/test_openai_conversation.py +++ b/backend/app/tests/api/routes/test_openai_conversation.py @@ -2,16 +2,15 @@ from fastapi.testclient import TestClient from sqlmodel import Session -from app.models.openai_conversation import OpenAIConversationCreate from app.crud.openai_conversation import create_openai_conversation -from app.tests.utils.utils import get_project +from app.models.openai_conversation import OpenAIConversationCreate +from app.models import APIKeyPublic def test_get_conversation_by_id( - client: TestClient, db: Session, normal_user_api_key_headers: dict[str, str] + client: TestClient, db: Session, user_api_key: APIKeyPublic ): """Test getting a conversation by ID.""" - project = get_project(db) # Create a conversation first conversation_data = OpenAIConversationCreate( response_id="resp_test688080a1c52c819c937", @@ -20,13 +19,13 @@ def test_get_conversation_by_id( response="The capital of France is Paris.", model="gpt-4o", assistant_id="asst_testXLnzQYrQlAEzrOA", - project_id=project.id, - organization_id=project.organization_id, + project_id=user_api_key.project_id, + organization_id=user_api_key.organization_id, ) conversation = create_openai_conversation(db, conversation_data) response = client.get( f"/api/v1/openai-conversation/{conversation.id}", - headers=normal_user_api_key_headers, + headers={"X-API-KEY": user_api_key.key}, ) assert response.status_code == 200 @@ -39,10 +38,9 @@ def test_get_conversation_by_id( def test_get_conversation_by_response_id( - client: TestClient, db: Session, normal_user_api_key_headers: dict[str, str] + client: TestClient, db: Session, user_api_key: APIKeyPublic ): """Test getting a conversation by response_id.""" - project = get_project(db) # Create a conversation first conversation_data = OpenAIConversationCreate( response_id="resp_test688080a1c52c819c937", @@ -51,13 +49,13 @@ def test_get_conversation_by_response_id( response="The capital of France is Paris.", model="gpt-4o", assistant_id="asst_testXLnzQYrQlAEzrOA", - project_id=project.id, - organization_id=project.organization_id, + project_id=user_api_key.project_id, + organization_id=user_api_key.organization_id, ) create_openai_conversation(db, conversation_data) response = client.get( "/api/v1/openai-conversation/response/resp_test688080a1c52c819c937", - headers=normal_user_api_key_headers, + headers={"X-API-KEY": user_api_key.key}, ) assert response.status_code == 200 @@ -69,49 +67,53 @@ def test_get_conversation_by_response_id( def test_get_conversations_by_ancestor( - client: TestClient, db: Session, normal_user_api_key_headers: dict[str, str] + client: TestClient, db: Session, user_api_key: APIKeyPublic ): """Test getting conversations by ancestor_response_id.""" - project = get_project(db) # Create multiple conversations with same ancestor conversation_data1 = OpenAIConversationCreate( - response_id="resp_1", - ancestor_response_id="ancestor_123", + response_id="resp_test688080a1c52c819c937", + ancestor_response_id="resp_test688080a1c52c819c937", user_question="What is the capital of France?", response="The capital of France is Paris.", model="gpt-4o", assistant_id="asst_testXLnzQYrQlAEzrOA", - project_id=project.id, - organization_id=project.organization_id, + project_id=user_api_key.project_id, + organization_id=user_api_key.organization_id, ) conversation_data2 = OpenAIConversationCreate( - response_id="resp_2", - ancestor_response_id="ancestor_123", + response_id="resp_test688080a1c52c819c937_2", + ancestor_response_id="resp_test688080a1c52c819c937", user_question="What is the capital of Spain?", response="The capital of Spain is Madrid.", model="gpt-4o", assistant_id="asst_testXLnzQYrQlAEzrOA", - project_id=project.id, - organization_id=project.organization_id, + project_id=user_api_key.project_id, + organization_id=user_api_key.organization_id, ) conversation_data3 = OpenAIConversationCreate( - response_id="resp_3", - ancestor_response_id="ancestor_456", + response_id="resp_test688080a1c52c819c937_3", + ancestor_response_id="resp_test688080a1c52c819c937", user_question="What is the capital of Italy?", response="The capital of Italy is Rome.", model="gpt-4o", assistant_id="asst_testXLnzQYrQlAEzrOA", - project_id=project.id, - organization_id=project.organization_id, + project_id=user_api_key.project_id, + organization_id=user_api_key.organization_id, ) - create_openai_conversation(db, conversation_data1) - create_openai_conversation(db, conversation_data2) - create_openai_conversation(db, conversation_data3) + conv_1 = create_openai_conversation(db, conversation_data1) + conv_2 = create_openai_conversation(db, conversation_data2) + conv_3 = create_openai_conversation(db, conversation_data3) + + print(conv_1) + print(conv_2) + print(conv_3) response = client.get( "/api/v1/openai-conversation/ancestor/ancestor_123", - headers=normal_user_api_key_headers, + headers={"X-API-KEY": user_api_key.key}, ) + print(response.json()) assert response.status_code == 200 data = response.json() @@ -124,12 +126,11 @@ def test_get_conversations_by_ancestor( def test_delete_conversation_by_id( - client: TestClient, db: Session, normal_user_api_key_headers: dict[str, str] + client: TestClient, db: Session, user_api_key: APIKeyPublic ): """Test deleting a conversation by ID.""" from app.crud.openai_conversation import get_openai_conversation_by_id - project = get_project(db) # Create a conversation first conversation_data = OpenAIConversationCreate( response_id="resp_test688080a1c52c819c937", @@ -137,13 +138,13 @@ def test_delete_conversation_by_id( response="The capital of France is Paris.", model="gpt-4o", assistant_id="asst_testXLnzQYrQlAEzrOA", - project_id=project.id, - organization_id=project.organization_id, + project_id=user_api_key.project_id, + organization_id=user_api_key.organization_id, ) conversation = create_openai_conversation(db, conversation_data) response = client.delete( f"/api/v1/openai-conversation/{conversation.id}", - headers=normal_user_api_key_headers, + headers={"X-API-KEY": user_api_key.key}, ) assert response.status_code == 200 @@ -157,36 +158,35 @@ def test_delete_conversation_by_id( def test_list_conversations( - client: TestClient, db: Session, normal_user_api_key_headers: dict[str, str] + client: TestClient, db: Session, user_api_key: APIKeyPublic ): """Test listing all conversations.""" - project = get_project(db) # Create multiple conversations conversation_data1 = OpenAIConversationCreate( - response_id="resp_1", + response_id="resp_test688080a1c52c819c937", ancestor_response_id="ancestor_1", user_question="What is the capital of France?", response="The capital of France is Paris.", model="gpt-4o", assistant_id="asst_testXLnzQYrQlAEzrOA", - project_id=project.id, - organization_id=project.organization_id, + project_id=user_api_key.project_id, + organization_id=user_api_key.organization_id, ) conversation_data2 = OpenAIConversationCreate( - response_id="resp_2", + response_id="resp_test688080a1c52c819c937_2", ancestor_response_id="ancestor_2", user_question="What is the capital of Spain?", response="The capital of Spain is Madrid.", model="gpt-4o", assistant_id="asst_testXLnzQYrQlAEzrOA", - project_id=project.id, - organization_id=project.organization_id, + project_id=user_api_key.project_id, + organization_id=user_api_key.organization_id, ) conversation1 = create_openai_conversation(db, conversation_data1) conversation2 = create_openai_conversation(db, conversation_data2) response = client.get( "/api/v1/openai-conversation/list", - headers=normal_user_api_key_headers, + headers={"X-API-KEY": user_api_key.key}, params={"skip": 0, "limit": 100}, ) From d1143f2af39aa21dec800122bd9be70f6fe34d78 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Fri, 25 Jul 2025 12:32:01 +0530 Subject: [PATCH 33/40] fixing few more testcases and API --- backend/app/api/routes/openai_conversation.py | 39 +++++++++++-------- backend/app/crud/openai_conversation.py | 21 +++++++--- 2 files changed, 37 insertions(+), 23 deletions(-) diff --git a/backend/app/api/routes/openai_conversation.py b/backend/app/api/routes/openai_conversation.py index 62233be9a..e781ac10d 100644 --- a/backend/app/api/routes/openai_conversation.py +++ b/backend/app/api/routes/openai_conversation.py @@ -47,17 +47,18 @@ async def list_conversations( ) async def get_conversation_by_id( conversation_id: int = Path(..., description="The conversation ID"), - db: Session = Depends(get_db), + session: Session = Depends(get_db), current_user: UserProjectOrg = Depends(get_current_user_org_project), ): """Get a conversation by its ID, only if it belongs to the user's project.""" - conversation = get_openai_conversation_by_id(db, conversation_id) - if not conversation or conversation.project_id != current_user.project_id: - raise HTTPException(status_code=404, detail="Conversation not found") - - return APIResponse.success_response( - data=OpenAIConversationPublic.model_validate(conversation) + conversation = get_openai_conversation_by_id( + session, conversation_id, current_user.project_id ) + if not conversation: + raise HTTPException( + status_code=404, detail=f"Conversation with ID {conversation_id} not found." + ) + return APIResponse.success_response(conversation) @router.get( @@ -68,17 +69,19 @@ async def get_conversation_by_id( ) async def get_conversation_by_response_id( response_id: str = Path(..., description="The response ID"), - db: Session = Depends(get_db), + session: Session = Depends(get_db), current_user: UserProjectOrg = Depends(get_current_user_org_project), ): """Get a conversation by its response_id, only if it belongs to the user's project.""" - conversation = get_openai_conversation_by_response_id(db, response_id) - if not conversation or conversation.project_id != current_user.project_id: - raise HTTPException(status_code=404, detail="Conversation not found") - - return APIResponse.success_response( - data=OpenAIConversationPublic.model_validate(conversation) + conversation = get_openai_conversation_by_response_id( + session, response_id, current_user.project_id ) + if not conversation: + raise HTTPException( + status_code=404, + detail=f"Conversation with response ID {response_id} not found.", + ) + return APIResponse.success_response(conversation) @router.get( @@ -110,14 +113,16 @@ async def get_conversations_by_ancestor( ) async def delete_conversation_by_id( conversation_id: int = Path(..., description="The conversation ID"), - db: Session = Depends(get_db), + session: Session = Depends(get_db), current_user: UserProjectOrg = Depends(get_current_user_org_project), ): """Delete a conversation by its ID, only if it belongs to the user's project.""" - conversation = get_openai_conversation_by_id(db, conversation_id) + conversation = get_openai_conversation_by_id( + session, conversation_id, current_user.project_id + ) if not conversation or conversation.project_id != current_user.project_id: raise HTTPException(status_code=404, detail="Conversation not found") - success = delete_openai_conversation(db, conversation_id) + success = delete_openai_conversation(session, conversation_id) if not success: raise HTTPException(status_code=404, detail="Conversation not found") diff --git a/backend/app/crud/openai_conversation.py b/backend/app/crud/openai_conversation.py index 7aa2b9ea9..2bd3ca3a4 100644 --- a/backend/app/crud/openai_conversation.py +++ b/backend/app/crud/openai_conversation.py @@ -1,5 +1,4 @@ -from sqlmodel import Session, select -from datetime import datetime, UTC +from sqlmodel import Session, and_, select from typing import List, Optional from app.models import OpenAI_Conversation, OpenAIConversationCreate @@ -15,19 +14,29 @@ def create_openai_conversation( def get_openai_conversation_by_id( - session: Session, conversation_id: int + session: Session, openai_conversation_id: str, project_id: int ) -> Optional[OpenAI_Conversation]: + """Get an openai_conversation by its OpenAI openai_conversation ID and project ID.""" statement = select(OpenAI_Conversation).where( - OpenAI_Conversation.id == conversation_id + and_( + OpenAI_Conversation.id == openai_conversation_id, + OpenAI_Conversation.project_id == project_id, + OpenAI_Conversation.is_deleted == False, + ) ) return session.exec(statement).first() def get_openai_conversation_by_response_id( - session: Session, response_id: str + session: Session, response_id: str, project_id: int ) -> Optional[OpenAI_Conversation]: + """Get an openai_conversation by its OpenAI response ID and project ID.""" statement = select(OpenAI_Conversation).where( - OpenAI_Conversation.response_id == response_id + and_( + OpenAI_Conversation.response_id == response_id, + OpenAI_Conversation.project_id == project_id, + OpenAI_Conversation.is_deleted == False, + ) ) return session.exec(statement).first() From 9cd927b65bc80266d655aa4066cb4c44c65fa5a7 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Fri, 25 Jul 2025 12:54:03 +0530 Subject: [PATCH 34/40] fixing ancestor testcases --- backend/app/api/routes/openai_conversation.py | 20 ++++++++++--------- backend/app/crud/openai_conversation.py | 11 +++++++--- .../api/routes/test_openai_conversation.py | 19 +++++++++--------- 3 files changed, 28 insertions(+), 22 deletions(-) diff --git a/backend/app/api/routes/openai_conversation.py b/backend/app/api/routes/openai_conversation.py index e781ac10d..ddc1884e7 100644 --- a/backend/app/api/routes/openai_conversation.py +++ b/backend/app/api/routes/openai_conversation.py @@ -91,18 +91,20 @@ async def get_conversation_by_response_id( description="Retrieve all conversations that have the specified ancestor_response_id", ) async def get_conversations_by_ancestor( - ancestor_response_id: str = Path(..., description="The ancestor response ID"), - db: Session = Depends(get_db), + ancestor_response_id: str = Path(..., description="The ancestor ID"), + session: Session = Depends(get_db), current_user: UserProjectOrg = Depends(get_current_user_org_project), ): - """Get all conversations by ancestor_response_id, only for the user's project.""" - conversations = get_openai_conversations_by_ancestor(db, ancestor_response_id) - filtered = [ - conv for conv in conversations if conv.project_id == current_user.project_id - ] - return APIResponse.success_response( - data=[OpenAIConversationPublic.model_validate(conv) for conv in filtered] + """Get a conversation by its response_id, only if it belongs to the user's project.""" + conversation = get_openai_conversations_by_ancestor( + session, ancestor_response_id, current_user.project_id ) + if not conversation: + raise HTTPException( + status_code=404, + detail=f"Conversation with ancestor ID {ancestor_response_id} not found.", + ) + return APIResponse.success_response(conversation) @router.delete( diff --git a/backend/app/crud/openai_conversation.py b/backend/app/crud/openai_conversation.py index 2bd3ca3a4..a5ff6e4a2 100644 --- a/backend/app/crud/openai_conversation.py +++ b/backend/app/crud/openai_conversation.py @@ -42,10 +42,15 @@ def get_openai_conversation_by_response_id( def get_openai_conversations_by_ancestor( - session: Session, ancestor_response_id: str -) -> List[OpenAI_Conversation]: + session: Session, ancestor_response_id: str, project_id: int +) -> list[OpenAI_Conversation]: + """Get all openai_conversations by ancestor_response_id.""" statement = select(OpenAI_Conversation).where( - OpenAI_Conversation.ancestor_response_id == ancestor_response_id + and_( + OpenAI_Conversation.ancestor_response_id == ancestor_response_id, + OpenAI_Conversation.project_id == project_id, + OpenAI_Conversation.is_deleted == False, + ) ) return session.exec(statement).all() diff --git a/backend/app/tests/api/routes/test_openai_conversation.py b/backend/app/tests/api/routes/test_openai_conversation.py index 439c7f150..4b6963b4b 100644 --- a/backend/app/tests/api/routes/test_openai_conversation.py +++ b/backend/app/tests/api/routes/test_openai_conversation.py @@ -102,24 +102,23 @@ def test_get_conversations_by_ancestor( organization_id=user_api_key.organization_id, ) - conv_1 = create_openai_conversation(db, conversation_data1) - conv_2 = create_openai_conversation(db, conversation_data2) - conv_3 = create_openai_conversation(db, conversation_data3) + create_openai_conversation(db, conversation_data1) + create_openai_conversation(db, conversation_data2) + create_openai_conversation(db, conversation_data3) - print(conv_1) - print(conv_2) - print(conv_3) response = client.get( - "/api/v1/openai-conversation/ancestor/ancestor_123", + "/api/v1/openai-conversation/ancestor/resp_test688080a1c52c819c937", headers={"X-API-KEY": user_api_key.key}, ) - print(response.json()) assert response.status_code == 200 data = response.json() assert data["success"] is True - assert len(data["data"]) == 2 - assert all(conv["ancestor_response_id"] == "ancestor_123" for conv in data["data"]) + assert len(data["data"]) == 3 + assert all( + conv["ancestor_response_id"] == "resp_test688080a1c52c819c937" + for conv in data["data"] + ) for conv in data["data"]: assert conv["is_deleted"] is False assert conv["deleted_at"] is None From 306040b7b3a3a72698bdeff463d04fc774949982 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Fri, 25 Jul 2025 15:28:35 +0530 Subject: [PATCH 35/40] updated testcases --- backend/app/api/routes/openai_conversation.py | 29 ++++++-------- backend/app/crud/openai_conversation.py | 38 ++++++++++++++++--- .../api/routes/test_openai_conversation.py | 13 +++---- 3 files changed, 49 insertions(+), 31 deletions(-) diff --git a/backend/app/api/routes/openai_conversation.py b/backend/app/api/routes/openai_conversation.py index ddc1884e7..91044b6dc 100644 --- a/backend/app/api/routes/openai_conversation.py +++ b/backend/app/api/routes/openai_conversation.py @@ -1,5 +1,5 @@ -from fastapi import APIRouter, Depends, HTTPException, Query, Path from sqlmodel import Session +from fastapi import APIRouter, Depends, HTTPException, Query, Path from app.api.deps import get_db, get_current_user_org, get_current_user_org_project from app.models import UserOrganization, UserProjectOrg @@ -107,27 +107,20 @@ async def get_conversations_by_ancestor( return APIResponse.success_response(conversation) -@router.delete( - "/{conversation_id}", - response_model=APIResponse[dict], - summary="Delete conversation by ID", - description="Delete a conversation by its database ID", -) -async def delete_conversation_by_id( +@router.delete("/{conversation_id}", response_model=APIResponse) +def delete_conversation_by_id( conversation_id: int = Path(..., description="The conversation ID"), session: Session = Depends(get_db), current_user: UserProjectOrg = Depends(get_current_user_org_project), ): - """Delete a conversation by its ID, only if it belongs to the user's project.""" - conversation = get_openai_conversation_by_id( - session, conversation_id, current_user.project_id + """ + Soft delete an conversation by updating flag is_deleted. + """ + delete_openai_conversation( + session=session, + conversation_id=conversation_id, + project_id=current_user.project_id, ) - if not conversation or conversation.project_id != current_user.project_id: - raise HTTPException(status_code=404, detail="Conversation not found") - success = delete_openai_conversation(session, conversation_id) - if not success: - raise HTTPException(status_code=404, detail="Conversation not found") - return APIResponse.success_response( - data={"message": "Conversation deleted successfully"} + data={"message": "Conversation deleted successfully."} ) diff --git a/backend/app/crud/openai_conversation.py b/backend/app/crud/openai_conversation.py index a5ff6e4a2..1f01f69f8 100644 --- a/backend/app/crud/openai_conversation.py +++ b/backend/app/crud/openai_conversation.py @@ -1,7 +1,14 @@ +import logging + +from fastapi import HTTPException from sqlmodel import Session, and_, select from typing import List, Optional + +from app.core.util import now from app.models import OpenAI_Conversation, OpenAIConversationCreate +logger = logging.getLogger(__name__) + def create_openai_conversation( session: Session, data: OpenAIConversationCreate @@ -74,11 +81,30 @@ def get_all_openai_conversations( return results -def delete_openai_conversation(session: Session, conversation_id: int) -> bool: - conversation = get_openai_conversation_by_id(session, conversation_id) - if not conversation: - return False +def delete_openai_conversation( + session: Session, + conversation_id: int, + project_id: int, +) -> OpenAI_Conversation: + """ + Soft delete an conversation by updating is_deleted flag. + """ + existing_conversation = get_openai_conversation_by_id( + session, conversation_id, project_id + ) + if not existing_conversation: + logger.warning( + f"[delete_openai_conversation] Conversation {conversation_id} not found | project_id: {project_id}" + ) + raise HTTPException(status_code=404, detail="Conversation not found.") - session.delete(conversation) + existing_conversation.is_deleted = True + existing_conversation.deleted_at = now() + session.add(existing_conversation) session.commit() - return True + session.refresh(existing_conversation) + + logger.info( + f"[delete_openai_conversation] Conversation {conversation_id} soft deleted successfully. | project_id: {project_id}" + ) + return existing_conversation diff --git a/backend/app/tests/api/routes/test_openai_conversation.py b/backend/app/tests/api/routes/test_openai_conversation.py index 4b6963b4b..930a1a9e7 100644 --- a/backend/app/tests/api/routes/test_openai_conversation.py +++ b/backend/app/tests/api/routes/test_openai_conversation.py @@ -2,8 +2,12 @@ from fastapi.testclient import TestClient from sqlmodel import Session -from app.crud.openai_conversation import create_openai_conversation +from app.crud.openai_conversation import ( + create_openai_conversation, + get_openai_conversation_by_id, +) from app.models.openai_conversation import OpenAIConversationCreate + from app.models import APIKeyPublic @@ -128,8 +132,6 @@ def test_delete_conversation_by_id( client: TestClient, db: Session, user_api_key: APIKeyPublic ): """Test deleting a conversation by ID.""" - from app.crud.openai_conversation import get_openai_conversation_by_id - # Create a conversation first conversation_data = OpenAIConversationCreate( response_id="resp_test688080a1c52c819c937", @@ -141,6 +143,7 @@ def test_delete_conversation_by_id( organization_id=user_api_key.organization_id, ) conversation = create_openai_conversation(db, conversation_data) + print(conversation) response = client.delete( f"/api/v1/openai-conversation/{conversation.id}", headers={"X-API-KEY": user_api_key.key}, @@ -150,10 +153,6 @@ def test_delete_conversation_by_id( data = response.json() assert data["success"] is True assert "deleted successfully" in data["data"]["message"] - # Fetch from DB and check is_deleted and deleted_at - deleted_conv = get_openai_conversation_by_id(db, conversation.id) - assert deleted_conv.is_deleted is True - assert deleted_conv.deleted_at is not None def test_list_conversations( From abcda9c0a0a71a1eb65e25ec8a5e1c9de5c19930 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Fri, 25 Jul 2025 15:36:44 +0530 Subject: [PATCH 36/40] updated conf --- backend/app/tests/api/routes/test_responses.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/backend/app/tests/api/routes/test_responses.py b/backend/app/tests/api/routes/test_responses.py index 06513f7a7..9a96d3130 100644 --- a/backend/app/tests/api/routes/test_responses.py +++ b/backend/app/tests/api/routes/test_responses.py @@ -132,7 +132,7 @@ def test_responses_endpoint_stores_conversation( mock_get_credential, mock_openai, db, - normal_user_api_key_headers, + user_api_key_header, ): """Test that the /responses endpoint stores conversation in database.""" # Setup mock credentials @@ -165,9 +165,7 @@ def test_responses_endpoint_stores_conversation( "callback_url": "http://example.com/callback", } - response = client.post( - "/responses", json=request_data, headers=normal_user_api_key_headers - ) + response = client.post("/responses", json=request_data, headers=user_api_key_header) assert response.status_code == 200 response_json = response.json() assert response_json["success"] is True From 0d2545832a849c0e1bef8d111ba7a19bf1d23b2f Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Fri, 25 Jul 2025 15:47:13 +0530 Subject: [PATCH 37/40] updated minor comments from coderabbit --- .../versions/f5628e3a9988_add_openai_conversation_table.py | 2 +- backend/app/models/openai_conversation.py | 3 ++- backend/app/tests/api/routes/test_responses.py | 3 +-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py index 4a6f870a5..d34cee5fa 100644 --- a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py +++ b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py @@ -23,7 +23,7 @@ def upgrade(): sa.Column("id", sa.Integer(), nullable=False), sa.Column("response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), sa.Column( - "ancestor_response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True + "ancestor_response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False ), sa.Column( "previous_response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True diff --git a/backend/app/models/openai_conversation.py b/backend/app/models/openai_conversation.py index af0dce460..171af933d 100644 --- a/backend/app/models/openai_conversation.py +++ b/backend/app/models/openai_conversation.py @@ -5,7 +5,8 @@ class OpenAIConversationBase(SQLModel): response_id: str = Field(index=True, min_length=10) - ancestor_response_id: Optional[str] = Field(default=None, index=True) + # ancestor_response_id of first response will be itself + ancestor_response_id: str = Field(index=True) previous_response_id: Optional[str] = Field(default=None, index=True) user_question: str = Field(description="The user's input question", min_length=1) response: Optional[str] = Field(description="The assistant's response") diff --git a/backend/app/tests/api/routes/test_responses.py b/backend/app/tests/api/routes/test_responses.py index 9a96d3130..f32032c40 100644 --- a/backend/app/tests/api/routes/test_responses.py +++ b/backend/app/tests/api/routes/test_responses.py @@ -131,7 +131,6 @@ def test_responses_endpoint_stores_conversation( mock_get_assistant, mock_get_credential, mock_openai, - db, user_api_key_header, ): """Test that the /responses endpoint stores conversation in database.""" @@ -143,7 +142,7 @@ def test_responses_endpoint_stores_conversation( mock_assistant.model = "gpt-4o" mock_assistant.instructions = "Test instructions" mock_assistant.temperature = 0.1 - mock_assistant.vector_store_ids = "vs_test" + mock_assistant.vector_store_ids = ["vs_test"] mock_assistant.max_num_results = 20 mock_get_assistant.return_value = mock_assistant From b551c154299ecdf73021703555c6196dcd779a1b Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Fri, 25 Jul 2025 16:34:51 +0530 Subject: [PATCH 38/40] updated testcase --- backend/app/tests/api/routes/test_openai_conversation.py | 1 + 1 file changed, 1 insertion(+) diff --git a/backend/app/tests/api/routes/test_openai_conversation.py b/backend/app/tests/api/routes/test_openai_conversation.py index 930a1a9e7..3895e9e12 100644 --- a/backend/app/tests/api/routes/test_openai_conversation.py +++ b/backend/app/tests/api/routes/test_openai_conversation.py @@ -135,6 +135,7 @@ def test_delete_conversation_by_id( # Create a conversation first conversation_data = OpenAIConversationCreate( response_id="resp_test688080a1c52c819c937", + ancestor_response_id="resp_test688080a1c52c819c937", user_question="What is the capital of France?", response="The capital of France is Paris.", model="gpt-4o", From 81a7d131daada3f57305846f0951ad7908f1acd9 Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Fri, 25 Jul 2025 16:43:11 +0530 Subject: [PATCH 39/40] reverting nullable false --- .../versions/f5628e3a9988_add_openai_conversation_table.py | 2 +- backend/app/models/openai_conversation.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py index d34cee5fa..4a6f870a5 100644 --- a/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py +++ b/backend/app/alembic/versions/f5628e3a9988_add_openai_conversation_table.py @@ -23,7 +23,7 @@ def upgrade(): sa.Column("id", sa.Integer(), nullable=False), sa.Column("response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), sa.Column( - "ancestor_response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False + "ancestor_response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True ), sa.Column( "previous_response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True diff --git a/backend/app/models/openai_conversation.py b/backend/app/models/openai_conversation.py index 171af933d..af0dce460 100644 --- a/backend/app/models/openai_conversation.py +++ b/backend/app/models/openai_conversation.py @@ -5,8 +5,7 @@ class OpenAIConversationBase(SQLModel): response_id: str = Field(index=True, min_length=10) - # ancestor_response_id of first response will be itself - ancestor_response_id: str = Field(index=True) + ancestor_response_id: Optional[str] = Field(default=None, index=True) previous_response_id: Optional[str] = Field(default=None, index=True) user_question: str = Field(description="The user's input question", min_length=1) response: Optional[str] = Field(description="The assistant's response") From 056dc08d6227eaff624a58fbf80c671d63bc453d Mon Sep 17 00:00:00 2001 From: Akhilesh Negi Date: Fri, 25 Jul 2025 17:57:10 +0530 Subject: [PATCH 40/40] cleanups --- backend/app/crud/openai_conversation.py | 46 +++++++++++------------ backend/app/models/__init__.py | 2 +- backend/app/models/openai_conversation.py | 2 +- 3 files changed, 25 insertions(+), 25 deletions(-) diff --git a/backend/app/crud/openai_conversation.py b/backend/app/crud/openai_conversation.py index 1f01f69f8..bd59f5fea 100644 --- a/backend/app/crud/openai_conversation.py +++ b/backend/app/crud/openai_conversation.py @@ -5,15 +5,15 @@ from typing import List, Optional from app.core.util import now -from app.models import OpenAI_Conversation, OpenAIConversationCreate +from app.models import OpenAIConversation, OpenAIConversationCreate logger = logging.getLogger(__name__) def create_openai_conversation( session: Session, data: OpenAIConversationCreate -) -> OpenAI_Conversation: - conversation = OpenAI_Conversation(**data.model_dump()) +) -> OpenAIConversation: + conversation = OpenAIConversation(**data.model_dump()) session.add(conversation) session.commit() session.refresh(conversation) @@ -22,13 +22,13 @@ def create_openai_conversation( def get_openai_conversation_by_id( session: Session, openai_conversation_id: str, project_id: int -) -> Optional[OpenAI_Conversation]: +) -> Optional[OpenAIConversation]: """Get an openai_conversation by its OpenAI openai_conversation ID and project ID.""" - statement = select(OpenAI_Conversation).where( + statement = select(OpenAIConversation).where( and_( - OpenAI_Conversation.id == openai_conversation_id, - OpenAI_Conversation.project_id == project_id, - OpenAI_Conversation.is_deleted == False, + OpenAIConversation.id == openai_conversation_id, + OpenAIConversation.project_id == project_id, + OpenAIConversation.is_deleted == False, ) ) return session.exec(statement).first() @@ -36,13 +36,13 @@ def get_openai_conversation_by_id( def get_openai_conversation_by_response_id( session: Session, response_id: str, project_id: int -) -> Optional[OpenAI_Conversation]: +) -> Optional[OpenAIConversation]: """Get an openai_conversation by its OpenAI response ID and project ID.""" - statement = select(OpenAI_Conversation).where( + statement = select(OpenAIConversation).where( and_( - OpenAI_Conversation.response_id == response_id, - OpenAI_Conversation.project_id == project_id, - OpenAI_Conversation.is_deleted == False, + OpenAIConversation.response_id == response_id, + OpenAIConversation.project_id == project_id, + OpenAIConversation.is_deleted == False, ) ) return session.exec(statement).first() @@ -50,13 +50,13 @@ def get_openai_conversation_by_response_id( def get_openai_conversations_by_ancestor( session: Session, ancestor_response_id: str, project_id: int -) -> list[OpenAI_Conversation]: +) -> list[OpenAIConversation]: """Get all openai_conversations by ancestor_response_id.""" - statement = select(OpenAI_Conversation).where( + statement = select(OpenAIConversation).where( and_( - OpenAI_Conversation.ancestor_response_id == ancestor_response_id, - OpenAI_Conversation.project_id == project_id, - OpenAI_Conversation.is_deleted == False, + OpenAIConversation.ancestor_response_id == ancestor_response_id, + OpenAIConversation.project_id == project_id, + OpenAIConversation.is_deleted == False, ) ) return session.exec(statement).all() @@ -64,15 +64,15 @@ def get_openai_conversations_by_ancestor( def get_all_openai_conversations( session: Session, project_id: int, skip: int = 0, limit: int = 100 -) -> List[OpenAI_Conversation]: +) -> List[OpenAIConversation]: """ Return all openai conversations for a given project and organization, with optional pagination. """ statement = ( - select(OpenAI_Conversation) + select(OpenAIConversation) .where( - OpenAI_Conversation.project_id == project_id, - OpenAI_Conversation.is_deleted == False, + OpenAIConversation.project_id == project_id, + OpenAIConversation.is_deleted == False, ) .offset(skip) .limit(limit) @@ -85,7 +85,7 @@ def delete_openai_conversation( session: Session, conversation_id: int, project_id: int, -) -> OpenAI_Conversation: +) -> OpenAIConversation: """ Soft delete an conversation by updating is_deleted flag. """ diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py index db0bbcfd9..08fd27b2c 100644 --- a/backend/app/models/__init__.py +++ b/backend/app/models/__init__.py @@ -55,7 +55,7 @@ from .threads import OpenAI_Thread, OpenAIThreadBase, OpenAIThreadCreate from .openai_conversation import ( - OpenAI_Conversation, + OpenAIConversation, OpenAIConversationBase, OpenAIConversationCreate, OpenAIConversationPublic, diff --git a/backend/app/models/openai_conversation.py b/backend/app/models/openai_conversation.py index af0dce460..750609037 100644 --- a/backend/app/models/openai_conversation.py +++ b/backend/app/models/openai_conversation.py @@ -40,7 +40,7 @@ class OpenAIConversationPublic(OpenAIConversationBase): updated_at: datetime -class OpenAI_Conversation(OpenAIConversationBase, table=True): +class OpenAIConversation(OpenAIConversationBase, table=True): id: int = Field(default=None, primary_key=True) inserted_at: datetime = Field(default_factory=lambda: datetime.now(UTC)) updated_at: datetime = Field(default_factory=lambda: datetime.now(UTC))