- Added `process.py` for managing MCP server subprocesses with async capabilities. - Introduced `protocol.py` for handling JSON-RPC communication over streams. - Created `llm_client.py` to support chat completion requests to various LLM providers, integrating with MCP tools. - Defined model configurations in `llm_models.py` for different LLM providers. - Removed the synchronous `mcp_manager.py` in favor of a more modular approach. - Established a provider framework in `providers` directory with a base class and specific implementations. - Implemented `OpenAIProvider` for interacting with OpenAI's API, including streaming support and tool call handling.
141 lines
4.3 KiB
Python
141 lines
4.3 KiB
Python
# src/providers/base.py
|
|
import abc
|
|
from collections.abc import Generator
|
|
from typing import Any
|
|
|
|
|
|
class BaseProvider(abc.ABC):
|
|
"""
|
|
Abstract base class for LLM providers.
|
|
|
|
Defines the common interface for interacting with different LLM APIs,
|
|
including handling chat completions and tool usage.
|
|
"""
|
|
|
|
def __init__(self, api_key: str, base_url: str | None = None):
|
|
"""
|
|
Initialize the provider.
|
|
|
|
Args:
|
|
api_key: The API key for the provider.
|
|
base_url: Optional base URL for the provider's API.
|
|
"""
|
|
self.api_key = api_key
|
|
self.base_url = base_url
|
|
|
|
@abc.abstractmethod
|
|
def create_chat_completion(
|
|
self,
|
|
messages: list[dict[str, str]],
|
|
model: str,
|
|
temperature: float = 0.4,
|
|
max_tokens: int | None = None,
|
|
stream: bool = True,
|
|
tools: list[dict[str, Any]] | None = None,
|
|
) -> Any:
|
|
"""
|
|
Send a chat completion request to the LLM provider.
|
|
|
|
Args:
|
|
messages: List of message dictionaries with 'role' and 'content'.
|
|
model: Model identifier.
|
|
temperature: Sampling temperature (0-1).
|
|
max_tokens: Maximum tokens to generate.
|
|
stream: Whether to stream the response.
|
|
tools: Optional list of tools in the provider-specific format.
|
|
|
|
Returns:
|
|
Provider-specific response object (e.g., API response, stream object).
|
|
"""
|
|
pass
|
|
|
|
@abc.abstractmethod
|
|
def get_streaming_content(self, response: Any) -> Generator[str, None, None]:
|
|
"""
|
|
Extracts and yields content chunks from a streaming response object.
|
|
|
|
Args:
|
|
response: The streaming response object returned by create_chat_completion.
|
|
|
|
Yields:
|
|
String chunks of the response content.
|
|
"""
|
|
pass
|
|
|
|
@abc.abstractmethod
|
|
def get_content(self, response: Any) -> str:
|
|
"""
|
|
Extracts the complete content from a non-streaming response object.
|
|
|
|
Args:
|
|
response: The non-streaming response object.
|
|
|
|
Returns:
|
|
The complete response content as a string.
|
|
"""
|
|
pass
|
|
|
|
@abc.abstractmethod
|
|
def has_tool_calls(self, response: Any) -> bool:
|
|
"""
|
|
Checks if the response object contains tool calls.
|
|
|
|
Args:
|
|
response: The response object (streaming or non-streaming).
|
|
|
|
Returns:
|
|
True if tool calls are present, False otherwise.
|
|
"""
|
|
pass
|
|
|
|
@abc.abstractmethod
|
|
def parse_tool_calls(self, response: Any) -> list[dict[str, Any]]:
|
|
"""
|
|
Parses tool calls from the response object.
|
|
|
|
Args:
|
|
response: The response object containing tool calls.
|
|
|
|
Returns:
|
|
A list of dictionaries, each representing a tool call with details
|
|
like 'id', 'function_name', 'arguments'. The exact structure might
|
|
vary slightly based on provider needs but should contain enough
|
|
info for execution.
|
|
"""
|
|
pass
|
|
|
|
@abc.abstractmethod
|
|
def format_tool_results(self, tool_call_id: str, result: Any) -> dict[str, Any]:
|
|
"""
|
|
Formats the result of a tool execution into the structure expected
|
|
by the provider for follow-up requests.
|
|
|
|
Args:
|
|
tool_call_id: The unique ID of the tool call (from parse_tool_calls).
|
|
result: The data returned by the tool execution.
|
|
|
|
Returns:
|
|
A dictionary representing the tool result in the provider's format.
|
|
"""
|
|
pass
|
|
|
|
@abc.abstractmethod
|
|
def convert_tools(self, tools: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
|
"""
|
|
Converts a list of tools from the standard internal format to the
|
|
provider-specific format required for the API call.
|
|
|
|
Args:
|
|
tools: List of tool definitions in the standard internal format.
|
|
Each dict contains 'server_name', 'name', 'description', 'input_schema'.
|
|
|
|
Returns:
|
|
List of tool definitions in the provider-specific format.
|
|
"""
|
|
pass
|
|
|
|
# Optional: Add a method for follow-up completions if the provider API
|
|
# requires a specific structure different from just appending messages.
|
|
# def create_follow_up_completion(...) -> Any:
|
|
# pass
|