refactor: remove OpenAIClient implementation to streamline codebase
This commit is contained in:
@@ -1,68 +0,0 @@
|
||||
"""OpenAI client with custom MCP integration."""
|
||||
|
||||
import configparser
|
||||
import logging # Import logging
|
||||
|
||||
from openai import OpenAI
|
||||
|
||||
from mcp_manager import SyncMCPManager
|
||||
|
||||
# Get a logger for this module
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class OpenAIClient:
|
||||
def __init__(self):
|
||||
logger.debug("Initializing OpenAIClient...") # Add init log
|
||||
self.config = configparser.ConfigParser()
|
||||
self.config.read("config/config.ini")
|
||||
|
||||
# Validate configuration
|
||||
if not self.config.has_section("openai"):
|
||||
raise Exception("Missing [openai] section in config.ini")
|
||||
if not self.config["openai"].get("api_key"):
|
||||
raise Exception("Missing api_key in config.ini")
|
||||
|
||||
# Configure OpenAI client
|
||||
self.client = OpenAI(
|
||||
api_key=self.config["openai"]["api_key"], base_url=self.config["openai"]["base_url"], default_headers={"HTTP-Referer": "https://streamlit-chat-app.com", "X-Title": "Streamlit Chat App"}
|
||||
)
|
||||
|
||||
# Initialize MCP manager if configured
|
||||
self.mcp_manager = None
|
||||
if self.config.has_section("mcp"):
|
||||
mcp_config_path = self.config["mcp"].get("servers_json", "config/mcp_config.json")
|
||||
self.mcp_manager = SyncMCPManager(mcp_config_path)
|
||||
|
||||
def get_chat_response(self, messages):
|
||||
try:
|
||||
# Try using MCP if available
|
||||
if self.mcp_manager and self.mcp_manager.initialize():
|
||||
logger.info("Using MCP with tools...") # Use logger
|
||||
last_message = messages[-1]["content"]
|
||||
# Pass API key and base URL from config.ini
|
||||
response = self.mcp_manager.process_query(
|
||||
query=last_message,
|
||||
model_name=self.config["openai"]["model"],
|
||||
api_key=self.config["openai"]["api_key"],
|
||||
base_url=self.config["openai"].get("base_url"), # Use .get for optional base_url
|
||||
)
|
||||
|
||||
if "error" not in response:
|
||||
logger.debug("MCP processing successful, wrapping response.")
|
||||
# Convert to OpenAI-compatible response format
|
||||
return self._wrap_mcp_response(response)
|
||||
|
||||
# Fall back to standard OpenAI
|
||||
logger.info(f"Falling back to standard OpenAI API with model: {self.config['openai']['model']}") # Use logger
|
||||
return self.client.chat.completions.create(model=self.config["openai"]["model"], messages=messages, stream=True)
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"API Error (Code: {getattr(e, 'code', 'N/A')}): {str(e)}"
|
||||
logger.error(error_msg, exc_info=True) # Use logger
|
||||
raise Exception(error_msg)
|
||||
|
||||
def _wrap_mcp_response(self, response: dict):
|
||||
"""Return the MCP response dictionary directly (for non-streaming)."""
|
||||
# No conversion needed if app.py handles dicts separately
|
||||
return response
|
||||
Reference in New Issue
Block a user