Chat history and growing context
This commit is contained in:
7
airflow-wingman/src/airflow_wingman/mcp_tools.py
Normal file
7
airflow-wingman/src/airflow_wingman/mcp_tools.py
Normal file
@@ -0,0 +1,7 @@
|
||||
import asyncio
|
||||
|
||||
from airflow_mcp_server.tools.tool_manager import get_airflow_tools
|
||||
|
||||
# Get tools with their parameters
|
||||
tools = asyncio.run(get_airflow_tools(mode="safe"))
|
||||
TOOLS = {tool.name: {"description": tool.description, "parameters": tool.inputSchema} for tool in tools}
|
||||
13
airflow-wingman/src/airflow_wingman/notes.py
Normal file
13
airflow-wingman/src/airflow_wingman/notes.py
Normal file
@@ -0,0 +1,13 @@
|
||||
INTERFACE_MESSAGES = {
|
||||
"model_recommendation": {"title": "Note", "content": "For best results with function/tool calling capabilities, we recommend using models like Claude-3.5 Sonnet or GPT-4."},
|
||||
"security_note": {
|
||||
"title": "Security",
|
||||
"content": "For your security, API keys are required for each session and are never stored. If you refresh the page or close the browser, you'll need to enter your API key again.",
|
||||
},
|
||||
"context_window": {
|
||||
"title": "Context Window",
|
||||
"content": "Each model has a maximum context window size that determines how much text it can process. "
|
||||
"For long conversations or large code snippets, consider using models with larger context windows like Claude-3 Opus (200K tokens) or GPT-4 Turbo (128K tokens). "
|
||||
"For better results try to keep the context size as low as possible. Try using new chats instead of reusing the same chat.",
|
||||
},
|
||||
}
|
||||
40
airflow-wingman/src/airflow_wingman/prompt_engineering.py
Normal file
40
airflow-wingman/src/airflow_wingman/prompt_engineering.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""
|
||||
Prompt engineering for the Airflow Wingman plugin.
|
||||
Contains prompts and instructions for the AI assistant.
|
||||
"""
|
||||
|
||||
import json
|
||||
|
||||
from airflow_wingman.mcp_tools import TOOLS
|
||||
|
||||
INSTRUCTIONS = {
|
||||
"default": f"""You are Airflow Wingman, a helpful AI assistant integrated into Apache Airflow.
|
||||
You have deep knowledge of Apache Airflow's architecture, DAGs, operators, and best practices.
|
||||
The Airflow version being used is >=2.10.
|
||||
|
||||
You have access to the following Airflow API tools:
|
||||
|
||||
{json.dumps(TOOLS, indent=2)}
|
||||
|
||||
You can use these tools to fetch information and help users understand and manage their Airflow environment.
|
||||
"""
|
||||
}
|
||||
|
||||
|
||||
def prepare_messages(messages: list[dict[str, str]], instruction_key: str = "default") -> list[dict[str, str]]:
|
||||
"""Prepare messages for the chat completion request.
|
||||
|
||||
Args:
|
||||
messages: List of messages in the conversation
|
||||
instruction_key: Key for the instruction template to use
|
||||
|
||||
Returns:
|
||||
List of message dictionaries ready for the chat completion API
|
||||
"""
|
||||
instruction = INSTRUCTIONS.get(instruction_key, INSTRUCTIONS["default"])
|
||||
|
||||
# Add instruction as first system message if not present
|
||||
if not messages or messages[0].get("role") != "system":
|
||||
messages.insert(0, {"role": "system", "content": instruction})
|
||||
|
||||
return messages
|
||||
@@ -15,9 +15,11 @@
|
||||
<h3 class="panel-title">Airflow Wingman</h3>
|
||||
</div>
|
||||
<div class="alert alert-info" style="margin: 15px;">
|
||||
<p><strong>Note:</strong> For best results with function/tool calling capabilities, we recommend using models like Claude-3.5 Sonnet or GPT-4o. These models excel at understanding and using complex tools effectively.</p>
|
||||
<p><strong>{{ interface_messages.model_recommendation.title }}:</strong> {{ interface_messages.model_recommendation.content }}</p>
|
||||
<hr style="margin: 10px 0;">
|
||||
<p><strong>Security:</strong> For your security, API keys are required for each session and are never stored. If you refresh the page or close the browser, you'll need to enter your API key again. This ensures your API keys remain secure in shared environments.</p>
|
||||
<p><strong>{{ interface_messages.security_note.title }}:</strong> {{ interface_messages.security_note.content }}</p>
|
||||
<hr style="margin: 10px 0;">
|
||||
<p><strong>{{ interface_messages.context_window.title }}:</strong> {{ interface_messages.context_window.content }}</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -104,6 +106,13 @@
|
||||
<!-- Messages will be dynamically added here -->
|
||||
</div>
|
||||
<div class="panel-footer" style="padding: 15px; background-color: white;">
|
||||
<div class="row">
|
||||
<div class="col-md-2">
|
||||
<button class="btn btn-default btn-block" type="button" id="refresh-button" title="Start a new chat">
|
||||
<i class="fa fa-refresh"></i> New Chat
|
||||
</button>
|
||||
</div>
|
||||
<div class="col-md-10">
|
||||
<div class="input-group">
|
||||
<input type="text" class="form-control" id="message-input" placeholder="Type your message...">
|
||||
<span class="input-group-btn">
|
||||
@@ -117,6 +126,8 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.message {
|
||||
@@ -218,9 +229,23 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
|
||||
const messageInput = document.getElementById('message-input');
|
||||
const sendButton = document.getElementById('send-button');
|
||||
const refreshButton = document.getElementById('refresh-button');
|
||||
const chatMessages = document.getElementById('chat-messages');
|
||||
|
||||
let currentMessageDiv = null;
|
||||
let messageHistory = [];
|
||||
|
||||
function clearChat() {
|
||||
// Clear the chat messages
|
||||
chatMessages.innerHTML = '';
|
||||
// Reset message history
|
||||
messageHistory = [];
|
||||
// Clear the input field
|
||||
messageInput.value = '';
|
||||
// Enable input if it was disabled
|
||||
messageInput.disabled = false;
|
||||
sendButton.disabled = false;
|
||||
}
|
||||
|
||||
function addMessage(content, isUser) {
|
||||
const messageDiv = document.createElement('div');
|
||||
@@ -250,17 +275,14 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
addMessage(message, true);
|
||||
|
||||
try {
|
||||
// Create messages array with system message
|
||||
const messages = [
|
||||
{
|
||||
role: 'system',
|
||||
content: 'You are a helpful AI assistant integrated into Apache Airflow.'
|
||||
},
|
||||
{
|
||||
// Add user message to history
|
||||
messageHistory.push({
|
||||
role: 'user',
|
||||
content: message
|
||||
}
|
||||
];
|
||||
});
|
||||
|
||||
// Use full message history for the request
|
||||
const messages = [...messageHistory];
|
||||
|
||||
// Create assistant message div
|
||||
currentMessageDiv = addMessage('', false);
|
||||
@@ -314,6 +336,7 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
// Handle streaming response
|
||||
const reader = response.body.getReader();
|
||||
const decoder = new TextDecoder();
|
||||
let fullResponse = '';
|
||||
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
@@ -327,11 +350,20 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
const content = line.slice(6);
|
||||
if (content) {
|
||||
currentMessageDiv.textContent += content;
|
||||
fullResponse += content;
|
||||
chatMessages.scrollTop = chatMessages.scrollHeight;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add assistant's response to history
|
||||
if (fullResponse) {
|
||||
messageHistory.push({
|
||||
role: 'assistant',
|
||||
content: fullResponse
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error:', error);
|
||||
currentMessageDiv.textContent = `Error: ${error.message}`;
|
||||
@@ -345,6 +377,8 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
sendMessage();
|
||||
}
|
||||
});
|
||||
|
||||
refreshButton.addEventListener('click', clearChat);
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
||||
@@ -6,6 +6,8 @@ from flask_appbuilder import BaseView as AppBuilderBaseView, expose
|
||||
|
||||
from airflow_wingman.llm_client import LLMClient
|
||||
from airflow_wingman.llms_models import MODELS
|
||||
from airflow_wingman.notes import INTERFACE_MESSAGES
|
||||
from airflow_wingman.prompt_engineering import prepare_messages
|
||||
|
||||
|
||||
class WingmanView(AppBuilderBaseView):
|
||||
@@ -18,7 +20,7 @@ class WingmanView(AppBuilderBaseView):
|
||||
def chat(self):
|
||||
"""Render chat interface."""
|
||||
providers = {provider: info["name"] for provider, info in MODELS.items()}
|
||||
return self.render_template("wingman_chat.html", title="Airflow Wingman", models=MODELS, providers=providers)
|
||||
return self.render_template("wingman_chat.html", title="Airflow Wingman", models=MODELS, providers=providers, interface_messages=INTERFACE_MESSAGES)
|
||||
|
||||
@expose("/chat", methods=["POST"])
|
||||
def chat_completion(self):
|
||||
@@ -49,10 +51,14 @@ class WingmanView(AppBuilderBaseView):
|
||||
if missing:
|
||||
raise ValueError(f"Missing required fields: {', '.join(missing)}")
|
||||
|
||||
# Prepare messages with system instruction while maintaining history
|
||||
messages = data["messages"]
|
||||
messages = prepare_messages(messages)
|
||||
|
||||
return {
|
||||
"provider": data["provider"],
|
||||
"model": data["model"],
|
||||
"messages": data["messages"],
|
||||
"messages": messages,
|
||||
"api_key": data["api_key"],
|
||||
"stream": data.get("stream", False),
|
||||
"temperature": data.get("temperature", 0.7),
|
||||
|
||||
Reference in New Issue
Block a user