Update main.py
Browse files
main.py
CHANGED
@@ -220,12 +220,13 @@ async def chat(request: ChatRequest, background_tasks: BackgroundTasks, api_key:
|
|
220 |
async for chunk in stream_llm_request(api_key, llm_request):
|
221 |
full_response += chunk
|
222 |
yield chunk
|
|
|
223 |
|
224 |
# Here you might want to add logic to save the conversation or perform other background tasks
|
225 |
# For example:
|
226 |
# background_tasks.add_task(save_conversation, request.user_id, conversation_id, request.query, full_response)
|
227 |
|
228 |
-
|
229 |
return StreamingResponse(response_generator(), media_type="text/event-stream")
|
230 |
|
231 |
except Exception as e:
|
@@ -239,4 +240,4 @@ async def startup_event():
|
|
239 |
|
240 |
if __name__ == "__main__":
|
241 |
import uvicorn
|
242 |
-
uvicorn.run(app, host="0.0.0.0", port=
|
|
|
220 |
async for chunk in stream_llm_request(api_key, llm_request):
|
221 |
full_response += chunk
|
222 |
yield chunk
|
223 |
+
logger.info(f"Finished chat response generation for user: {request.user_id} Full response{full_response}")
|
224 |
|
225 |
# Here you might want to add logic to save the conversation or perform other background tasks
|
226 |
# For example:
|
227 |
# background_tasks.add_task(save_conversation, request.user_id, conversation_id, request.query, full_response)
|
228 |
|
229 |
+
|
230 |
return StreamingResponse(response_generator(), media_type="text/event-stream")
|
231 |
|
232 |
except Exception as e:
|
|
|
240 |
|
241 |
if __name__ == "__main__":
|
242 |
import uvicorn
|
243 |
+
uvicorn.run(app, host="0.0.0.0", port=7860)
|