intermediate fix chain tool response to AI

This commit is contained in:
2025-03-02 14:27:52 +00:00
parent 5491ba71aa
commit cd284e8de4
2 changed files with 16 additions and 4 deletions

View File

@@ -100,7 +100,7 @@ class LLMClient:
# Create a follow-up completion with the tool results
logger.info("Making follow-up request with tool results")
follow_up_response = self.provider.create_follow_up_completion(
messages=messages, model=model, temperature=temperature, max_tokens=max_tokens, tool_results=tool_results, original_response=response
messages=messages, model=model, temperature=temperature, max_tokens=max_tokens, tool_results=tool_results, original_response=response, tools=provider_tools
)
content = self.provider.get_content(follow_up_response)
@@ -184,8 +184,18 @@ class LLMClient:
# Only stream on the final iteration
should_stream = (iteration == max_iterations) or not self.provider.has_tool_calls(current_response)
# Get provider-specific tool definitions from Airflow tools
provider_tools = self.provider.convert_tools(self.airflow_tools)
follow_up_response = self.provider.create_follow_up_completion(
messages=messages, model=model, temperature=temperature, max_tokens=max_tokens, tool_results=tool_results, original_response=current_response, stream=should_stream
messages=messages,
model=model,
temperature=temperature,
max_tokens=max_tokens,
tool_results=tool_results,
original_response=current_response,
stream=should_stream,
tools=provider_tools,
)
# Check if this follow-up response has more tool calls

View File

@@ -104,7 +104,7 @@ class AnthropicProvider(BaseLLMProvider):
if callable(response.json):
# If json is a method, call it
try:
logger.info(f"Anthropic response json: {json.dumps(response.json())}")
logger.info(f"Anthropic response json: {json.dumps(response.model_dump_json())}")
except Exception as json_err:
logger.warning(f"Could not serialize response.json(): {str(json_err)}")
else:
@@ -274,6 +274,7 @@ class AnthropicProvider(BaseLLMProvider):
tool_results: dict[str, Any] = None,
original_response: Any = None,
stream: bool = True,
tools: list[dict[str, Any]] | None = None,
) -> Any:
"""
Create a follow-up completion with tool results.
@@ -286,6 +287,7 @@ class AnthropicProvider(BaseLLMProvider):
tool_results: Results of tool executions
original_response: Original response with tool calls
stream: Whether to stream the response
tools: List of tool definitions in Anthropic format
Returns:
Anthropic response object or generator if streaming
@@ -327,7 +329,7 @@ class AnthropicProvider(BaseLLMProvider):
temperature=temperature,
max_tokens=max_tokens,
stream=stream,
tools=None, # No tools needed for follow-up
tools=tools,
)
def get_content(self, response: Any) -> str: