-
Notifications
You must be signed in to change notification settings - Fork 10
Response API: Converstion Memory CRUD #281
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
4f5333d
f01225a
9393a25
bcaeadc
b83b544
2c129a3
7a703b0
2c51cdf
ce3e455
acb8392
7bf27c8
6c9853e
5297962
025ccd4
f8c2176
a5d07b3
0f3c080
56fa02b
c3d6fe1
2622931
062d079
2171457
23e15b9
7b642c1
ec92552
533a3f2
de51df0
191ca69
8c78d68
2050db6
746b037
b69365f
838a3d5
7e5ad21
2075d5f
6e79664
d1143f2
9cd927b
306040b
abcda9c
0d25458
b551c15
81a7d13
e20ce07
056dc08
02d3831
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,85 @@ | ||
| """add openai_conversation table | ||
|
|
||
| Revision ID: ff579a9523c5 | ||
| Revises: e8ee93526b37 | ||
| Create Date: 2025-07-24 12:16:51.311014 | ||
|
|
||
| """ | ||
| from alembic import op | ||
| import sqlalchemy as sa | ||
| import sqlmodel.sql.sqltypes | ||
|
|
||
|
|
||
| # revision identifiers, used by Alembic. | ||
| revision = "ff579a9523c5" | ||
| down_revision = "e8ee93526b37" | ||
| branch_labels = None | ||
| depends_on = None | ||
|
|
||
|
|
||
| def upgrade(): | ||
| op.create_table( | ||
| "openai_conversation", | ||
| sa.Column("id", sa.Integer(), nullable=False), | ||
| sa.Column("response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), | ||
| sa.Column( | ||
| "ancestor_response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True | ||
| ), | ||
| sa.Column( | ||
| "previous_response_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True | ||
| ), | ||
| sa.Column("user_question", sqlmodel.sql.sqltypes.AutoString(), nullable=False), | ||
| sa.Column("response", sqlmodel.sql.sqltypes.AutoString(), nullable=True), | ||
| sa.Column("model", sqlmodel.sql.sqltypes.AutoString(), nullable=False), | ||
| sa.Column("assistant_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), | ||
| sa.Column("project_id", sa.Integer(), nullable=False), | ||
| sa.Column("organization_id", sa.Integer(), nullable=False), | ||
| sa.Column("is_deleted", sa.Boolean(), nullable=False), | ||
| sa.Column("inserted_at", sa.DateTime(), nullable=False), | ||
| sa.Column("updated_at", sa.DateTime(), nullable=False), | ||
| sa.Column("deleted_at", sa.DateTime(), nullable=True), | ||
| sa.PrimaryKeyConstraint("id"), | ||
| sa.ForeignKeyConstraint( | ||
| ["organization_id"], ["organization.id"], ondelete="CASCADE" | ||
| ), | ||
| sa.ForeignKeyConstraint(["project_id"], ["project.id"], ondelete="CASCADE"), | ||
| ) | ||
| op.create_index( | ||
| op.f("ix_openai_conversation_ancestor_response_id"), | ||
| "openai_conversation", | ||
| ["ancestor_response_id"], | ||
| unique=False, | ||
| ) | ||
| op.create_index( | ||
| op.f("ix_openai_conversation_previous_response_id"), | ||
| "openai_conversation", | ||
| ["previous_response_id"], | ||
| unique=False, | ||
| ) | ||
| op.create_index( | ||
| op.f("ix_openai_conversation_response_id"), | ||
| "openai_conversation", | ||
| ["response_id"], | ||
| unique=False, | ||
| ) | ||
| op.create_foreign_key( | ||
| None, "openai_conversation", "project", ["project_id"], ["id"] | ||
| ) | ||
| op.create_foreign_key( | ||
| None, "openai_conversation", "organization", ["organization_id"], ["id"] | ||
| ) | ||
|
|
||
|
|
||
| def downgrade(): | ||
| op.drop_index( | ||
| op.f("ix_openai_conversation_response_id"), table_name="openai_conversation" | ||
| ) | ||
| op.drop_index( | ||
| op.f("ix_openai_conversation_previous_response_id"), | ||
| table_name="openai_conversation", | ||
| ) | ||
| op.drop_index( | ||
| op.f("ix_openai_conversation_ancestor_response_id"), | ||
| table_name="openai_conversation", | ||
| ) | ||
| op.drop_table("openai_conversation") |
| Original file line number | Diff line number | Diff line change | ||||||||
|---|---|---|---|---|---|---|---|---|---|---|
| @@ -0,0 +1,126 @@ | ||||||||||
| from sqlmodel import Session | ||||||||||
| from fastapi import APIRouter, Depends, HTTPException, Query, Path | ||||||||||
|
|
||||||||||
| from app.api.deps import get_db, get_current_user_org, get_current_user_org_project | ||||||||||
| from app.models import UserOrganization, UserProjectOrg | ||||||||||
| from app.models.openai_conversation import OpenAIConversationPublic | ||||||||||
| from app.crud.openai_conversation import ( | ||||||||||
| get_openai_conversation_by_id, | ||||||||||
| get_openai_conversation_by_response_id, | ||||||||||
| get_openai_conversations_by_ancestor, | ||||||||||
| get_all_openai_conversations, | ||||||||||
| delete_openai_conversation, | ||||||||||
| ) | ||||||||||
| from app.utils import APIResponse | ||||||||||
|
|
||||||||||
| router = APIRouter(prefix="/openai-conversation", tags=["openai_conversation"]) | ||||||||||
|
|
||||||||||
|
|
||||||||||
| @router.get( | ||||||||||
| "/list", | ||||||||||
| response_model=APIResponse[list[OpenAIConversationPublic]], | ||||||||||
| summary="List all conversations", | ||||||||||
| description="Retrieve all OpenAI conversations with pagination support", | ||||||||||
| ) | ||||||||||
| async def list_conversations( | ||||||||||
| session: Session = Depends(get_db), | ||||||||||
| current_user: UserProjectOrg = Depends(get_current_user_org_project), | ||||||||||
| skip: int = Query(0, ge=0, description="Number of records to skip"), | ||||||||||
| limit: int = Query( | ||||||||||
| 100, gt=0, le=100, description="Maximum number of records to return" | ||||||||||
| ), | ||||||||||
| ): | ||||||||||
| """Get all conversations with pagination for project and organization""" | ||||||||||
| conversations = get_all_openai_conversations( | ||||||||||
| session=session, project_id=current_user.project_id, skip=skip, limit=limit | ||||||||||
| ) | ||||||||||
| return APIResponse.success_response( | ||||||||||
| data=[OpenAIConversationPublic.model_validate(conv) for conv in conversations] | ||||||||||
| ) | ||||||||||
|
|
||||||||||
|
|
||||||||||
| @router.get( | ||||||||||
| "/{conversation_id}", | ||||||||||
| response_model=APIResponse[OpenAIConversationPublic], | ||||||||||
| summary="Get conversation by ID", | ||||||||||
| description="Retrieve a conversation by its database ID", | ||||||||||
| ) | ||||||||||
| async def get_conversation_by_id( | ||||||||||
| conversation_id: int = Path(..., description="The conversation ID"), | ||||||||||
| session: Session = Depends(get_db), | ||||||||||
| current_user: UserProjectOrg = Depends(get_current_user_org_project), | ||||||||||
| ): | ||||||||||
| """Get a conversation by its ID, only if it belongs to the user's project.""" | ||||||||||
| conversation = get_openai_conversation_by_id( | ||||||||||
| session, conversation_id, current_user.project_id | ||||||||||
| ) | ||||||||||
| if not conversation: | ||||||||||
| raise HTTPException( | ||||||||||
| status_code=404, detail=f"Conversation with ID {conversation_id} not found." | ||||||||||
| ) | ||||||||||
| return APIResponse.success_response(conversation) | ||||||||||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🛠️ Refactor suggestion Apply proper model validation for consistency. The endpoint returns the conversation directly without applying Apply model validation for consistency: - return APIResponse.success_response(conversation)
+ return APIResponse.success_response(
+ OpenAIConversationPublic.model_validate(conversation)
+ )📝 Committable suggestion
Suggested change
🤖 Prompt for AI Agents |
||||||||||
|
|
||||||||||
|
|
||||||||||
| @router.get( | ||||||||||
| "/response/{response_id}", | ||||||||||
| response_model=APIResponse[OpenAIConversationPublic], | ||||||||||
| summary="Get conversation by response ID", | ||||||||||
| description="Retrieve a conversation by its response_id", | ||||||||||
| ) | ||||||||||
| async def get_conversation_by_response_id( | ||||||||||
| response_id: str = Path(..., description="The response ID"), | ||||||||||
| session: Session = Depends(get_db), | ||||||||||
| current_user: UserProjectOrg = Depends(get_current_user_org_project), | ||||||||||
| ): | ||||||||||
| """Get a conversation by its response_id, only if it belongs to the user's project.""" | ||||||||||
| conversation = get_openai_conversation_by_response_id( | ||||||||||
| session, response_id, current_user.project_id | ||||||||||
| ) | ||||||||||
| if not conversation: | ||||||||||
| raise HTTPException( | ||||||||||
| status_code=404, | ||||||||||
| detail=f"Conversation with response ID {response_id} not found.", | ||||||||||
| ) | ||||||||||
| return APIResponse.success_response(conversation) | ||||||||||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🛠️ Refactor suggestion Apply proper model validation for consistency. Same issue as the previous endpoint - missing model validation. - return APIResponse.success_response(conversation)
+ return APIResponse.success_response(
+ OpenAIConversationPublic.model_validate(conversation)
+ )📝 Committable suggestion
Suggested change
🤖 Prompt for AI Agents |
||||||||||
|
|
||||||||||
|
|
||||||||||
| @router.get( | ||||||||||
| "/ancestor/{ancestor_response_id}", | ||||||||||
| response_model=APIResponse[list[OpenAIConversationPublic]], | ||||||||||
| summary="Get conversations by ancestor", | ||||||||||
| description="Retrieve all conversations that have the specified ancestor_response_id", | ||||||||||
| ) | ||||||||||
| async def get_conversations_by_ancestor( | ||||||||||
| ancestor_response_id: str = Path(..., description="The ancestor ID"), | ||||||||||
| session: Session = Depends(get_db), | ||||||||||
| current_user: UserProjectOrg = Depends(get_current_user_org_project), | ||||||||||
| ): | ||||||||||
| """Get a conversation by its response_id, only if it belongs to the user's project.""" | ||||||||||
| conversation = get_openai_conversations_by_ancestor( | ||||||||||
| session, ancestor_response_id, current_user.project_id | ||||||||||
| ) | ||||||||||
| if not conversation: | ||||||||||
| raise HTTPException( | ||||||||||
| status_code=404, | ||||||||||
| detail=f"Conversation with ancestor ID {ancestor_response_id} not found.", | ||||||||||
| ) | ||||||||||
| return APIResponse.success_response(conversation) | ||||||||||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🛠️ Refactor suggestion Apply proper model validation for consistency. The endpoint returns a list of conversations without model validation. For consistency with the list endpoint, apply validation. - return APIResponse.success_response(conversation)
+ return APIResponse.success_response(
+ [OpenAIConversationPublic.model_validate(conv) for conv in conversation]
+ )
🤖 Prompt for AI Agents |
||||||||||
|
|
||||||||||
|
|
||||||||||
| @router.delete("/{conversation_id}", response_model=APIResponse) | ||||||||||
| def delete_conversation_by_id( | ||||||||||
| conversation_id: int = Path(..., description="The conversation ID"), | ||||||||||
| session: Session = Depends(get_db), | ||||||||||
| current_user: UserProjectOrg = Depends(get_current_user_org_project), | ||||||||||
| ): | ||||||||||
| """ | ||||||||||
| Soft delete an conversation by updating flag is_deleted. | ||||||||||
| """ | ||||||||||
| delete_openai_conversation( | ||||||||||
| session=session, | ||||||||||
| conversation_id=conversation_id, | ||||||||||
| project_id=current_user.project_id, | ||||||||||
| ) | ||||||||||
| return APIResponse.success_response( | ||||||||||
| data={"message": "Conversation deleted successfully."} | ||||||||||
| ) | ||||||||||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Are there cases where the objects returned from the DB might not match the expected schema, hence the need for model_validate() here, and also in almost every endpoint?
Uh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
its common practice to use model_validate to convert ORM objects into Pydantic models in FastAPI. We are already using it in other routes as well