fix: improve handling of OpenAI responses and simplify MCP response wrapping

This commit is contained in:
2025-03-26 08:11:51 +00:00
parent 845f2e77dd
commit a7d5a4cb33
2 changed files with 7 additions and 9 deletions

View File

@@ -37,10 +37,12 @@ def handle_user_input():
response = st.session_state.client.get_chat_response(st.session_state.messages) response = st.session_state.client.get_chat_response(st.session_state.messages)
# Handle both MCP and standard OpenAI responses # Handle both MCP and standard OpenAI responses
if hasattr(response, "__iter__"): # Check if it's NOT a dict (assuming stream is not a dict)
if not isinstance(response, dict):
# Standard OpenAI streaming response # Standard OpenAI streaming response
for chunk in response: for chunk in response:
if chunk.choices[0].delta.content: # Ensure chunk has choices and delta before accessing
if chunk.choices and chunk.choices[0].delta and chunk.choices[0].delta.content:
full_response += chunk.choices[0].delta.content full_response += chunk.choices[0].delta.content
response_placeholder.markdown(full_response + "") response_placeholder.markdown(full_response + "")
else: else:

View File

@@ -63,10 +63,6 @@ class OpenAIClient:
raise Exception(error_msg) raise Exception(error_msg)
def _wrap_mcp_response(self, response: dict): def _wrap_mcp_response(self, response: dict):
"""Convert MCP response to OpenAI-compatible format""" """Return the MCP response dictionary directly (for non-streaming)."""
# No conversion needed if app.py handles dicts separately
# Create a generator to simulate streaming response return response
def response_generator():
yield {"choices": [{"delta": {"content": response.get("assistant_text", "")}}]}
return response_generator()