File tree Expand file tree Collapse file tree 1 file changed +8
-15
lines changed
litellm/proxy/response_api_endpoints Expand file tree Collapse file tree 1 file changed +8
-15
lines changed Original file line number Diff line number Diff line change 66from litellm .proxy ._types import *
77from litellm .proxy .auth .user_api_key_auth import UserAPIKeyAuth , user_api_key_auth
88from litellm .proxy .common_request_processing import ProxyBaseLLMRequestProcessing
9+ from litellm .types .responses .main import DeleteResponseResult
910
1011router = APIRouter ()
1112
@@ -113,7 +114,7 @@ async def responses_api(
113114 polling_id = ResponsePollingHandler .generate_polling_id ()
114115
115116 # Create initial state in Redis
116- await polling_handler .create_initial_state (
117+ initial_state = await polling_handler .create_initial_state (
117118 polling_id = polling_id ,
118119 request_data = data ,
119120 )
@@ -143,15 +144,7 @@ async def responses_api(
143144
144145 # Return OpenAI Response object format (initial state)
145146 # https://platform.openai.com/docs/api-reference/responses/object
146- return {
147- "id" : polling_id ,
148- "object" : "response" ,
149- "status" : "queued" ,
150- "output" : [],
151- "usage" : None ,
152- "metadata" : data .get ("metadata" , {}),
153- "created_at" : int (datetime .now (timezone .utc ).timestamp ()),
154- }
147+ return initial_state
155148
156149 # Normal response flow
157150 processor = ProxyBaseLLMRequestProcessing (data = data )
@@ -372,11 +365,11 @@ async def delete_response(
372365 success = await polling_handler .delete_polling (response_id )
373366
374367 if success :
375- return {
376- "id" : response_id ,
377- " object" : "response" ,
378- " deleted" : True
379- }
368+ return DeleteResponseResult (
369+ id = response_id ,
370+ object = "response" ,
371+ deleted = True
372+ )
380373 else :
381374 raise HTTPException (
382375 status_code = 500 ,
You can’t perform that action at this time.
0 commit comments