Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
46 commits
Select commit Hold shift + click to select a range
4f5333d
first stab at responseAPI conversation memory
Jul 12, 2025
f01225a
cleanup unnecesary endpoints
Jul 12, 2025
9393a25
cleanup testcases
Jul 12, 2025
bcaeadc
cleanup testcases warnings
Jul 12, 2025
b83b544
fixing list API
Jul 12, 2025
2c129a3
first draft of updating entry in db as well
Jul 14, 2025
7a703b0
cleanups
Jul 15, 2025
2c51cdf
updating migration
Jul 15, 2025
ce3e455
updating models
Jul 15, 2025
acb8392
Merge branch 'main' into feature/response-api-conversation-memory
AkhileshNegi Jul 16, 2025
7bf27c8
Merge branch 'main' into feature/response-api-conversation-memory
AkhileshNegi Jul 21, 2025
6c9853e
fixing migration
Jul 21, 2025
5297962
fixing testcases
Jul 21, 2025
025ccd4
fixing testcases
Jul 21, 2025
f8c2176
fixing testcases
Jul 22, 2025
a5d07b3
moving from assistant_response to response
Jul 22, 2025
0f3c080
fix pre commit changes
Jul 22, 2025
56fa02b
migration msg
Jul 22, 2025
c3d6fe1
migration msg
Jul 22, 2025
2622931
using built in types
Jul 22, 2025
062d079
cleanups
Jul 22, 2025
2171457
fixing few review comments
Jul 23, 2025
23e15b9
updated testcases
Jul 23, 2025
7b642c1
cleanups
Jul 23, 2025
ec92552
Merge branch 'main' into feature/response-api-conversation-memory
AkhileshNegi Jul 24, 2025
533a3f2
remove OpenAIConversationUpdate
Jul 24, 2025
de51df0
cleanups
Jul 24, 2025
191ca69
removing update conversation testcases and cleanups
Jul 24, 2025
8c78d68
cleanups
Jul 24, 2025
2050db6
cleaning up models and migrations
Jul 24, 2025
746b037
cleanups
Jul 24, 2025
b69365f
updated migration heads
Jul 24, 2025
838a3d5
added soft delete and auth logic
Jul 24, 2025
7e5ad21
cleanups
Jul 24, 2025
2075d5f
Merge branch 'main' into feature/response-api-conversation-memory
AkhileshNegi Jul 25, 2025
6e79664
fixing testcases
Jul 25, 2025
d1143f2
fixing few more testcases and API
Jul 25, 2025
9cd927b
fixing ancestor testcases
Jul 25, 2025
306040b
updated testcases
Jul 25, 2025
abcda9c
updated conf
Jul 25, 2025
0d25458
updated minor comments from coderabbit
Jul 25, 2025
b551c15
updated testcase
Jul 25, 2025
81a7d13
reverting nullable false
Jul 25, 2025
e20ce07
Merge branch 'main' into feature/response-api-conversation-memory
AkhileshNegi Jul 25, 2025
056dc08
cleanups
Jul 25, 2025
02d3831
Merge branch 'feature/response-api-conversation-memory' of github.com…
Jul 25, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
"""add openai_conversation table

Revision ID: ff579a9523c5
Revises: e8ee93526b37
Create Date: 2025-07-24 12:16:51.311014

"""
from alembic import op
import sqlalchemy as sa
import sqlmodel.sql.sqltypes


# revision identifiers, used by Alembic.
revision = "ff579a9523c5"
down_revision = "e8ee93526b37"
branch_labels = None
depends_on = None


def upgrade():
op.create_table(
"openai_conversation",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column(
"ancestor_response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column(
"previous_response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("user_question", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("response", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("model", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("assistant_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("project_id", sa.Integer(), nullable=False),
sa.Column("organization_id", sa.Integer(), nullable=False),
sa.Column("is_deleted", sa.Boolean(), nullable=False),
sa.Column("inserted_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.Column("deleted_at", sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.ForeignKeyConstraint(
["organization_id"], ["organization.id"], ondelete="CASCADE"
),
sa.ForeignKeyConstraint(["project_id"], ["project.id"], ondelete="CASCADE"),
)
op.create_index(
op.f("ix_openai_conversation_ancestor_response_id"),
"openai_conversation",
["ancestor_response_id"],
unique=False,
)
op.create_index(
op.f("ix_openai_conversation_previous_response_id"),
"openai_conversation",
["previous_response_id"],
unique=False,
)
op.create_index(
op.f("ix_openai_conversation_response_id"),
"openai_conversation",
["response_id"],
unique=False,
)
op.create_foreign_key(
None, "openai_conversation", "project", ["project_id"], ["id"]
)
op.create_foreign_key(
None, "openai_conversation", "organization", ["organization_id"], ["id"]
)


def downgrade():
op.drop_index(
op.f("ix_openai_conversation_response_id"), table_name="openai_conversation"
)
op.drop_index(
op.f("ix_openai_conversation_previous_response_id"),
table_name="openai_conversation",
)
op.drop_index(
op.f("ix_openai_conversation_ancestor_response_id"),
table_name="openai_conversation",
)
op.drop_table("openai_conversation")
2 changes: 2 additions & 0 deletions backend/app/api/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
utils,
onboarding,
credentials,
openai_conversation,
)
from app.core.config import settings

Expand All @@ -27,6 +28,7 @@
api_router.include_router(documents.router)
api_router.include_router(login.router)
api_router.include_router(onboarding.router)
api_router.include_router(openai_conversation.router)
api_router.include_router(organization.router)
api_router.include_router(project.router)
api_router.include_router(project_user.router)
Expand Down
126 changes: 126 additions & 0 deletions backend/app/api/routes/openai_conversation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
from sqlmodel import Session
from fastapi import APIRouter, Depends, HTTPException, Query, Path

from app.api.deps import get_db, get_current_user_org, get_current_user_org_project
from app.models import UserOrganization, UserProjectOrg
from app.models.openai_conversation import OpenAIConversationPublic
from app.crud.openai_conversation import (
get_openai_conversation_by_id,
get_openai_conversation_by_response_id,
get_openai_conversations_by_ancestor,
get_all_openai_conversations,
delete_openai_conversation,
)
from app.utils import APIResponse

router = APIRouter(prefix="/openai-conversation", tags=["openai_conversation"])


@router.get(
"/list",
response_model=APIResponse[list[OpenAIConversationPublic]],
summary="List all conversations",
description="Retrieve all OpenAI conversations with pagination support",
)
async def list_conversations(
session: Session = Depends(get_db),
current_user: UserProjectOrg = Depends(get_current_user_org_project),
skip: int = Query(0, ge=0, description="Number of records to skip"),
limit: int = Query(
100, gt=0, le=100, description="Maximum number of records to return"
),
):
"""Get all conversations with pagination for project and organization"""
conversations = get_all_openai_conversations(
session=session, project_id=current_user.project_id, skip=skip, limit=limit
)
return APIResponse.success_response(
data=[OpenAIConversationPublic.model_validate(conv) for conv in conversations]
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Are there cases where the objects returned from the DB might not match the expected schema, hence the need for model_validate() here, and also in almost every endpoint?

Copy link
Copy Markdown
Collaborator Author

@AkhileshNegi AkhileshNegi Jul 23, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

its common practice to use model_validate to convert ORM objects into Pydantic models in FastAPI. We are already using it in other routes as well

)


@router.get(
"/{conversation_id}",
response_model=APIResponse[OpenAIConversationPublic],
summary="Get conversation by ID",
description="Retrieve a conversation by its database ID",
)
async def get_conversation_by_id(
conversation_id: int = Path(..., description="The conversation ID"),
session: Session = Depends(get_db),
current_user: UserProjectOrg = Depends(get_current_user_org_project),
):
"""Get a conversation by its ID, only if it belongs to the user's project."""
conversation = get_openai_conversation_by_id(
session, conversation_id, current_user.project_id
)
if not conversation:
raise HTTPException(
status_code=404, detail=f"Conversation with ID {conversation_id} not found."
)
return APIResponse.success_response(conversation)
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Apply proper model validation for consistency.

The endpoint returns the conversation directly without applying model_validate() like other endpoints do. This inconsistency could lead to serialization issues.

Apply model validation for consistency:

-    return APIResponse.success_response(conversation)
+    return APIResponse.success_response(
+        OpenAIConversationPublic.model_validate(conversation)
+    )
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
return APIResponse.success_response(conversation)
return APIResponse.success_response(
OpenAIConversationPublic.model_validate(conversation)
)
🤖 Prompt for AI Agents
In backend/app/api/routes/openai_conversation.py at line 61, the code returns
the conversation object directly without applying model validation, which is
inconsistent with other endpoints and may cause serialization issues. To fix
this, apply the model_validate() method to the conversation object before
returning it in the APIResponse.success_response call to ensure proper
serialization and consistency.



@router.get(
"/response/{response_id}",
response_model=APIResponse[OpenAIConversationPublic],
summary="Get conversation by response ID",
description="Retrieve a conversation by its response_id",
)
async def get_conversation_by_response_id(
response_id: str = Path(..., description="The response ID"),
session: Session = Depends(get_db),
current_user: UserProjectOrg = Depends(get_current_user_org_project),
):
"""Get a conversation by its response_id, only if it belongs to the user's project."""
conversation = get_openai_conversation_by_response_id(
session, response_id, current_user.project_id
)
if not conversation:
raise HTTPException(
status_code=404,
detail=f"Conversation with response ID {response_id} not found.",
)
return APIResponse.success_response(conversation)
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Apply proper model validation for consistency.

Same issue as the previous endpoint - missing model validation.

-    return APIResponse.success_response(conversation)
+    return APIResponse.success_response(
+        OpenAIConversationPublic.model_validate(conversation)
+    )
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
return APIResponse.success_response(conversation)
return APIResponse.success_response(
OpenAIConversationPublic.model_validate(conversation)
)
🤖 Prompt for AI Agents
In backend/app/api/routes/openai_conversation.py at line 84, the return
statement lacks proper model validation for the conversation object. To fix
this, ensure that the conversation data is validated against the appropriate
Pydantic model or schema before returning it in the
APIResponse.success_response. This will maintain consistency with other
endpoints and guarantee the response adheres to the expected data structure.



@router.get(
"/ancestor/{ancestor_response_id}",
response_model=APIResponse[list[OpenAIConversationPublic]],
summary="Get conversations by ancestor",
description="Retrieve all conversations that have the specified ancestor_response_id",
)
async def get_conversations_by_ancestor(
ancestor_response_id: str = Path(..., description="The ancestor ID"),
session: Session = Depends(get_db),
current_user: UserProjectOrg = Depends(get_current_user_org_project),
):
"""Get a conversation by its response_id, only if it belongs to the user's project."""
conversation = get_openai_conversations_by_ancestor(
session, ancestor_response_id, current_user.project_id
)
if not conversation:
raise HTTPException(
status_code=404,
detail=f"Conversation with ancestor ID {ancestor_response_id} not found.",
)
return APIResponse.success_response(conversation)
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Apply proper model validation for consistency.

The endpoint returns a list of conversations without model validation. For consistency with the list endpoint, apply validation.

-    return APIResponse.success_response(conversation)
+    return APIResponse.success_response(
+        [OpenAIConversationPublic.model_validate(conv) for conv in conversation]
+    )

Committable suggestion skipped: line range outside the PR's diff.

🤖 Prompt for AI Agents
In backend/app/api/routes/openai_conversation.py at line 107, the return
statement sends a list of conversations without applying model validation. To
fix this, wrap the conversation list with the appropriate Pydantic model or
schema used for validation in the list endpoint, ensuring the response data is
validated and consistent across endpoints.



@router.delete("/{conversation_id}", response_model=APIResponse)
def delete_conversation_by_id(
conversation_id: int = Path(..., description="The conversation ID"),
session: Session = Depends(get_db),
current_user: UserProjectOrg = Depends(get_current_user_org_project),
):
"""
Soft delete an conversation by updating flag is_deleted.
"""
delete_openai_conversation(
session=session,
conversation_id=conversation_id,
project_id=current_user.project_id,
)
return APIResponse.success_response(
data={"message": "Conversation deleted successfully."}
)
57 changes: 51 additions & 6 deletions backend/app/api/routes/responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,15 @@
import openai
from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException
from openai import OpenAI
from pydantic import BaseModel, Extra
from pydantic import BaseModel, ConfigDict
from sqlmodel import Session

from app.api.deps import get_db, get_current_user_org_project
from app.api.routes.threads import send_callback
from app.crud.assistants import get_assistant_by_id
from app.crud.credentials import get_provider_credential
from app.models import UserProjectOrg
from app.crud.openai_conversation import create_openai_conversation
from app.models import UserProjectOrg, OpenAIConversationCreate
from app.utils import APIResponse, mask_string
from app.core.langfuse.langfuse import LangfuseTracer

Expand All @@ -32,8 +33,7 @@ class ResponsesAPIRequest(BaseModel):
callback_url: Optional[str] = None
response_id: Optional[str] = None

class Config:
extra = Extra.allow
model_config = ConfigDict(extra="allow")


class ResponsesSyncAPIRequest(BaseModel):
Expand Down Expand Up @@ -65,8 +65,7 @@ class _APIResponse(BaseModel):
chunks: list[FileResultChunk]
diagnostics: Optional[Diagnostics] = None

class Config:
extra = Extra.allow
model_config = ConfigDict(extra="allow")


class ResponsesAPIResponse(APIResponse[_APIResponse]):
Expand Down Expand Up @@ -98,6 +97,8 @@ def process_response(
assistant,
tracer: LangfuseTracer,
project_id: int,
organization_id: int,
session: Session,
):
"""Process a response and send callback with results, with Langfuse tracing."""
logger.info(
Expand Down Expand Up @@ -143,6 +144,27 @@ def process_response(
f"Successfully generated response: response_id={response.id}, assistant={mask_string(request.assistant_id)}, project_id={project_id}"
)

# Store conversation in database
try:
conversation_data = OpenAIConversationCreate(
response_id=response.id,
previous_response_id=request.response_id,
user_question=request.question,
response=response.output_text,
model=response.model,
assistant_id=request.assistant_id,
project_id=project_id,
organization_id=organization_id,
)
create_openai_conversation(session, conversation_data)
logger.info(
f"Conversation stored in database: response_id={response.id}, project_id={project_id}"
)
except Exception as e:
logger.error(
f"Failed to store conversation in database: {str(e)}, response_id={response.id}, project_id={project_id}"
)

tracer.end_generation(
output={
"response_id": response.id,
Expand Down Expand Up @@ -264,6 +286,8 @@ async def responses(
assistant,
tracer,
project_id,
organization_id,
_session,
)

logger.info(
Expand Down Expand Up @@ -346,6 +370,27 @@ async def responses_sync(

response_chunks = get_file_search_results(response)

# Store conversation in database
try:
conversation_data = OpenAIConversationCreate(
response_id=response.id,
previous_response_id=request.response_id,
user_question=request.question,
response=response.output_text,
model=response.model,
assistant_id=None, # Not available in sync endpoint
project_id=project_id,
organization_id=organization_id,
)
create_openai_conversation(_session, conversation_data)
logger.info(
f"Conversation stored in database: response_id={response.id}, project_id={project_id}"
)
except Exception as e:
logger.error(
f"Failed to store conversation in database: {str(e)}, response_id={response.id}, project_id={project_id}"
)

tracer.end_generation(
output={
"response_id": response.id,
Expand Down
9 changes: 9 additions & 0 deletions backend/app/crud/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,15 @@

from .thread_results import upsert_thread_result, get_thread_result

from .openai_conversation import (
create_openai_conversation,
get_openai_conversation_by_id,
get_openai_conversation_by_response_id,
get_openai_conversations_by_ancestor,
get_all_openai_conversations,
delete_openai_conversation,
)

from .assistants import (
get_assistant_by_id,
fetch_assistant_from_openai,
Expand Down
9 changes: 5 additions & 4 deletions backend/app/crud/api_key.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,11 +168,12 @@ def get_api_key_by_user_id(session: Session, user_id: int) -> APIKeyPublic | Non
"""
Retrieves the API key associated with a user by their user_id.
"""
api_key = (
session.query(APIKey)
.filter(APIKey.user_id == user_id, APIKey.is_deleted == False)
.first()
statement = (
select(APIKey)
.where(APIKey.user_id == user_id, APIKey.is_deleted == False)
.limit(1)
)
api_key = session.exec(statement).first()

if not api_key:
return None
Expand Down
Loading
Loading