feat: implement custom MCP client and integrate with OpenAI API for enhanced chat functionality
This commit is contained in:
42
src/app.py
42
src/app.py
@@ -1,46 +1,68 @@
|
||||
import atexit
|
||||
|
||||
import streamlit as st
|
||||
|
||||
from openai_client import OpenAIClient
|
||||
|
||||
|
||||
def init_session_state():
|
||||
if "messages" not in st.session_state:
|
||||
st.session_state.messages = []
|
||||
if "client" not in st.session_state:
|
||||
st.session_state.client = OpenAIClient()
|
||||
# Register cleanup for MCP servers
|
||||
if hasattr(st.session_state.client, "mcp_manager"):
|
||||
atexit.register(st.session_state.client.mcp_manager.shutdown)
|
||||
|
||||
|
||||
def display_chat_messages():
|
||||
for message in st.session_state.messages:
|
||||
with st.chat_message(message["role"]):
|
||||
st.markdown(message["content"])
|
||||
|
||||
|
||||
def handle_user_input():
|
||||
if prompt := st.chat_input("Type your message..."):
|
||||
print(f"User input received: {prompt}") # Debug log
|
||||
st.session_state.messages.append({"role": "user", "content": prompt})
|
||||
with st.chat_message("user"):
|
||||
st.markdown(prompt)
|
||||
|
||||
|
||||
try:
|
||||
with st.chat_message("assistant"):
|
||||
response_placeholder = st.empty()
|
||||
full_response = ""
|
||||
|
||||
client = OpenAIClient()
|
||||
print("Calling OpenAI API...") # Debug log
|
||||
for chunk in client.get_chat_response(st.session_state.messages):
|
||||
if chunk.choices[0].delta.content:
|
||||
full_response += chunk.choices[0].delta.content
|
||||
response_placeholder.markdown(full_response + "▌")
|
||||
|
||||
|
||||
print("Processing message...") # Debug log
|
||||
response = st.session_state.client.get_chat_response(st.session_state.messages)
|
||||
|
||||
# Handle both MCP and standard OpenAI responses
|
||||
if hasattr(response, "__iter__"):
|
||||
# Standard OpenAI streaming response
|
||||
for chunk in response:
|
||||
if chunk.choices[0].delta.content:
|
||||
full_response += chunk.choices[0].delta.content
|
||||
response_placeholder.markdown(full_response + "▌")
|
||||
else:
|
||||
# MCP non-streaming response
|
||||
full_response = response.get("assistant_text", "")
|
||||
response_placeholder.markdown(full_response)
|
||||
|
||||
response_placeholder.markdown(full_response)
|
||||
st.session_state.messages.append({"role": "assistant", "content": full_response})
|
||||
print("API call completed successfully") # Debug log
|
||||
print("Message processed successfully") # Debug log
|
||||
|
||||
except Exception as e:
|
||||
st.error(f"Error processing message: {str(e)}")
|
||||
print(f"Error details: {str(e)}") # Debug log
|
||||
|
||||
|
||||
def main():
|
||||
st.title("Streamlit Chat App")
|
||||
init_session_state()
|
||||
display_chat_messages()
|
||||
handle_user_input()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
Reference in New Issue
Block a user