fix: format JSON and markdown files for consistency

This commit is contained in:
2025-03-25 18:25:36 +00:00
parent 38ba3fa69b
commit 9c0dca7b15
3 changed files with 32 additions and 32 deletions

View File

@@ -29,27 +29,27 @@ The OpenAIClient will parse this into a format Dolphin MCP can use:
def parse_mcp_config(self):
if not self.config.has_section('mcp'):
return None
mcp_config = {"mcpServers": {}, "models": []}
# Check if MCP is enabled
if not self.config['mcp'].getboolean('enabled', False):
return None
# Parse server configurations
for key in self.config['mcp']:
if key.startswith('server.') and '.' in key[7:]:
parts = key.split('.')
server_name = parts[1]
config_key = '.'.join(parts[2:])
if server_name not in mcp_config["mcpServers"]:
mcp_config["mcpServers"][server_name] = {
"command": "",
"args": [],
"env": {}
}
if config_key == 'command':
mcp_config["mcpServers"][server_name]["command"] = self.config['mcp'][key]
elif config_key == 'args':
@@ -57,7 +57,7 @@ def parse_mcp_config(self):
elif config_key.startswith('env.'):
env_key = config_key[4:]
mcp_config["mcpServers"][server_name]["env"][env_key] = self.config['mcp'][key]
# Add model configuration from existing OpenAI settings
model_config = {
"model": self.config['openai']['model'],
@@ -67,7 +67,7 @@ def parse_mcp_config(self):
"systemMessage": "You are a helpful assistant that can use tools."
}
mcp_config["models"].append(model_config)
return mcp_config
```
@@ -92,7 +92,7 @@ def __init__(self):
```python
async def get_chat_response(self, messages):
mcp_config = self.parse_mcp_config()
if not mcp_config:
# Fall back to standard OpenAI if MCP not enabled
return self.client.chat.completions.create(
@@ -100,7 +100,7 @@ async def get_chat_response(self, messages):
messages=messages,
stream=True
)
# Use Dolphin MCP with our parsed config
return run_interaction(
user_query=messages[-1]["content"],
@@ -121,33 +121,33 @@ from dolphin_mcp import MCPClient, run_interaction
class SyncMCPManager:
"""Synchronous wrapper for MCP server management"""
def __init__(self, config):
self.config = config
self.servers = {}
self.initialized = False
self._lock = threading.Lock()
def initialize(self):
"""Initialize and start all MCP servers synchronously"""
if self.initialized:
return True
with self._lock:
if self.initialized: # Double-check after acquiring lock
return True
if not self.config or "mcpServers" not in self.config:
return False
# Run the async initialization in a synchronous wrapper
loop = asyncio.new_event_loop()
success = loop.run_until_complete(self._async_initialize())
loop.close()
self.initialized = success
return success
async def _async_initialize(self):
"""Async implementation of server initialization"""
success = True
@@ -158,7 +158,7 @@ class SyncMCPManager:
args=server_config.get("args", []),
env=server_config.get("env", {})
)
ok = await client.start()
if ok:
# Get available tools
@@ -170,44 +170,44 @@ class SyncMCPManager:
else:
success = False
print(f"Failed to start MCP server: {server_name}")
return success
def shutdown(self):
"""Shut down all MCP servers synchronously"""
if not self.initialized:
return
with self._lock:
if not self.initialized:
return
loop = asyncio.new_event_loop()
loop.run_until_complete(self._async_shutdown())
loop.close()
self.servers = {}
self.initialized = False
async def _async_shutdown(self):
"""Async implementation of server shutdown"""
for server_info in self.servers.values():
await server_info["client"].stop()
def process_query(self, query, model_name=None):
"""Process a query using MCP tools synchronously"""
if not self.initialized:
self.initialize()
if not self.initialized:
return {"error": "Failed to initialize MCP servers"}
loop = asyncio.new_event_loop()
result = loop.run_until_complete(self._async_process_query(query, model_name))
loop.close()
return result
async def _async_process_query(self, query, model_name=None):
"""Async implementation of query processing"""
return await run_interaction(
@@ -249,7 +249,7 @@ def handle_user_input():
if prompt := st.chat_input("Type your message..."):
if mcp_manager and mcp_manager.initialized:
response = mcp_manager.process_query(
prompt,
prompt,
model_name=client.config['openai']['model']
)
# Handle response...