fix: improve error handling and logging in OpenAI client and chat message processing
This commit is contained in:
27
src/app.py
27
src/app.py
@@ -12,22 +12,29 @@ def display_chat_messages():
|
|||||||
|
|
||||||
def handle_user_input():
|
def handle_user_input():
|
||||||
if prompt := st.chat_input("Type your message..."):
|
if prompt := st.chat_input("Type your message..."):
|
||||||
|
print(f"User input received: {prompt}") # Debug log
|
||||||
st.session_state.messages.append({"role": "user", "content": prompt})
|
st.session_state.messages.append({"role": "user", "content": prompt})
|
||||||
with st.chat_message("user"):
|
with st.chat_message("user"):
|
||||||
st.markdown(prompt)
|
st.markdown(prompt)
|
||||||
|
|
||||||
with st.chat_message("assistant"):
|
try:
|
||||||
response_placeholder = st.empty()
|
with st.chat_message("assistant"):
|
||||||
full_response = ""
|
response_placeholder = st.empty()
|
||||||
|
full_response = ""
|
||||||
|
|
||||||
client = OpenAIClient()
|
client = OpenAIClient()
|
||||||
for chunk in client.get_chat_response(st.session_state.messages):
|
print("Calling OpenAI API...") # Debug log
|
||||||
if chunk.choices[0].delta.content:
|
for chunk in client.get_chat_response(st.session_state.messages):
|
||||||
full_response += chunk.choices[0].delta.content
|
if chunk.choices[0].delta.content:
|
||||||
response_placeholder.markdown(full_response + "▌")
|
full_response += chunk.choices[0].delta.content
|
||||||
|
response_placeholder.markdown(full_response + "▌")
|
||||||
|
|
||||||
response_placeholder.markdown(full_response)
|
response_placeholder.markdown(full_response)
|
||||||
st.session_state.messages.append({"role": "assistant", "content": full_response})
|
st.session_state.messages.append({"role": "assistant", "content": full_response})
|
||||||
|
print("API call completed successfully") # Debug log
|
||||||
|
except Exception as e:
|
||||||
|
st.error(f"Error processing message: {str(e)}")
|
||||||
|
print(f"Error details: {str(e)}") # Debug log
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
st.title("Streamlit Chat App")
|
st.title("Streamlit Chat App")
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import os
|
|
||||||
import configparser
|
import configparser
|
||||||
from openai import OpenAI
|
from openai import OpenAI
|
||||||
|
|
||||||
@@ -7,29 +6,34 @@ class OpenAIClient:
|
|||||||
self.config = configparser.ConfigParser()
|
self.config = configparser.ConfigParser()
|
||||||
self.config.read('config/config.ini')
|
self.config.read('config/config.ini')
|
||||||
|
|
||||||
# Configure OpenAI client with OpenRouter-specific headers
|
# Validate configuration
|
||||||
|
if not self.config.has_section('openai'):
|
||||||
|
raise Exception("Missing [openai] section in config.ini")
|
||||||
|
if not self.config['openai'].get('api_key'):
|
||||||
|
raise Exception("Missing api_key in config.ini")
|
||||||
|
|
||||||
|
# Configure OpenAI client
|
||||||
self.client = OpenAI(
|
self.client = OpenAI(
|
||||||
api_key=self.config['openai']['api_key'],
|
api_key=self.config['openai']['api_key'],
|
||||||
base_url=self.config['openai']['base_url'],
|
base_url=self.config['openai']['base_url'],
|
||||||
default_headers={
|
default_headers={
|
||||||
"HTTP-Referer": "https://streamlit-chat-app.com", # Required by OpenRouter
|
"HTTP-Referer": "https://streamlit-chat-app.com",
|
||||||
"X-Title": "Streamlit Chat App" # Optional, helps with analytics
|
"X-Title": "Streamlit Chat App"
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_chat_response(self, messages):
|
def get_chat_response(self, messages):
|
||||||
try:
|
try:
|
||||||
# Ensure messages are correctly formatted
|
print(f"Sending request to {self.config['openai']['base_url']}") # Debug log
|
||||||
formatted_messages = [{"role": msg["role"], "content": msg["content"]} for msg in messages]
|
print(f"Using model: {self.config['openai']['model']}") # Debug log
|
||||||
|
|
||||||
# Make API request
|
|
||||||
response = self.client.chat.completions.create(
|
response = self.client.chat.completions.create(
|
||||||
model=self.config['openai']['model'],
|
model=self.config['openai']['model'],
|
||||||
messages=formatted_messages,
|
messages=messages,
|
||||||
stream=True
|
stream=True
|
||||||
)
|
)
|
||||||
return response
|
return response
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Enhanced error logging
|
error_msg = f"API Error (Code: {getattr(e, 'code', 'N/A')}): {str(e)}"
|
||||||
print(f"Error details: {e}")
|
print(error_msg) # Debug log
|
||||||
raise Exception(f"OpenAI API error: {str(e)}")
|
raise Exception(error_msg)
|
||||||
|
|||||||
Reference in New Issue
Block a user