fix: improve handling of OpenAI responses and simplify MCP response wrapping
This commit is contained in:
@@ -37,10 +37,12 @@ def handle_user_input():
|
|||||||
response = st.session_state.client.get_chat_response(st.session_state.messages)
|
response = st.session_state.client.get_chat_response(st.session_state.messages)
|
||||||
|
|
||||||
# Handle both MCP and standard OpenAI responses
|
# Handle both MCP and standard OpenAI responses
|
||||||
if hasattr(response, "__iter__"):
|
# Check if it's NOT a dict (assuming stream is not a dict)
|
||||||
|
if not isinstance(response, dict):
|
||||||
# Standard OpenAI streaming response
|
# Standard OpenAI streaming response
|
||||||
for chunk in response:
|
for chunk in response:
|
||||||
if chunk.choices[0].delta.content:
|
# Ensure chunk has choices and delta before accessing
|
||||||
|
if chunk.choices and chunk.choices[0].delta and chunk.choices[0].delta.content:
|
||||||
full_response += chunk.choices[0].delta.content
|
full_response += chunk.choices[0].delta.content
|
||||||
response_placeholder.markdown(full_response + "▌")
|
response_placeholder.markdown(full_response + "▌")
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -63,10 +63,6 @@ class OpenAIClient:
|
|||||||
raise Exception(error_msg)
|
raise Exception(error_msg)
|
||||||
|
|
||||||
def _wrap_mcp_response(self, response: dict):
|
def _wrap_mcp_response(self, response: dict):
|
||||||
"""Convert MCP response to OpenAI-compatible format"""
|
"""Return the MCP response dictionary directly (for non-streaming)."""
|
||||||
|
# No conversion needed if app.py handles dicts separately
|
||||||
# Create a generator to simulate streaming response
|
return response
|
||||||
def response_generator():
|
|
||||||
yield {"choices": [{"delta": {"content": response.get("assistant_text", "")}}]}
|
|
||||||
|
|
||||||
return response_generator()
|
|
||||||
|
|||||||
Reference in New Issue
Block a user