Compare commits

..

2 Commits

6 changed files with 1416 additions and 10 deletions

View File

@@ -67,7 +67,7 @@ servers_json = config/mcp_config.json
Start the application: Start the application:
```bash ```bash
streamlit run src/app.py uv run mcpapp
``` ```
The app will be available at `http://localhost:8501` The app will be available at `http://localhost:8501`
@@ -82,9 +82,6 @@ Key components:
## Development ## Development
### Running Tests
```bash
pytest
``` ```
### Code Formatting ### Code Formatting
@@ -94,7 +91,7 @@ ruff check . --fix
### Building ### Building
```bash ```bash
python -m build uv build
``` ```
## License ## License

View File

@@ -1,6 +1,7 @@
[base] [base]
# provider can be [ openai|openrouter|anthropic|google] # provider can be [ openai|openrouter|anthropic|google]
provider = openrouter provider = openrouter
streamlit_headless = true
[openrouter] [openrouter]
api_key = YOUR_API_KEY api_key = YOUR_API_KEY

View File

@@ -1,5 +1,5 @@
[project] [project]
name = "streamlit-chat-app" name = "macpapp"
version = "0.1.0" version = "0.1.0"
description = "Streamlit chat app with MCP" description = "Streamlit chat app with MCP"
readme = "README.md" readme = "README.md"
@@ -27,6 +27,9 @@ license-files = ["LICEN[CS]E*"]
GitHub = "https://git.bhakat.dev/abhishekbhakat/mcpapp" GitHub = "https://git.bhakat.dev/abhishekbhakat/mcpapp"
Issues = "https://git.bhakat.dev/abhishekbhakat/mcpapp/issues" Issues = "https://git.bhakat.dev/abhishekbhakat/mcpapp/issues"
[project.scripts]
mcpapp = "run_app:run_streamlit_app"
[project.optional-dependencies] [project.optional-dependencies]
dev = [ dev = [
"build>=1.2.2", "build>=1.2.2",

57
run_app.py Normal file
View File

@@ -0,0 +1,57 @@
import configparser
import os
import subprocess
import sys
def run_streamlit_app():
"""
Reads the configuration file and launches the Streamlit app,
optionally in headless mode.
"""
config_path = "config/config.ini"
headless = False
try:
if os.path.exists(config_path):
config = configparser.ConfigParser()
config.read(config_path)
if config.has_section("base"):
headless = config.getboolean("base", "streamlit_headless", fallback=False)
if headless:
print(f"INFO: Headless mode enabled via {config_path}.")
else:
print(f"INFO: Headless mode disabled via {config_path}.")
else:
print(f"WARNING: [base] section not found in {config_path}. Defaulting to non-headless.")
else:
print(f"WARNING: Configuration file not found at {config_path}. Defaulting to non-headless.")
except Exception as e:
print(f"ERROR: Could not read headless config from {config_path}: {e}. Defaulting to non-headless.")
headless = False # Ensure default on error
# Construct the command
command = [sys.executable, "-m", "streamlit", "run", "src/app.py"]
if headless:
command.extend(["--server.headless", "true"])
print(f"Running command: {' '.join(command)}")
try:
# Run Streamlit using subprocess.run which waits for completion
# Use check=True to raise an error if Streamlit fails
# Capture output might be useful for debugging but can be complex with interactive apps
process = subprocess.Popen(command)
process.wait() # Wait for the Streamlit process to exit
print(f"Streamlit process finished with exit code: {process.returncode}")
except FileNotFoundError:
print("ERROR: 'streamlit' command not found. Make sure Streamlit is installed and in your PATH.")
sys.exit(1)
except Exception as e:
print(f"ERROR: Failed to run Streamlit: {e}")
sys.exit(1)
if __name__ == "__main__":
run_streamlit_app()

View File

@@ -1,13 +1,12 @@
import atexit import atexit
import configparser import configparser
import json # For handling potential error JSON in stream import json
import logging import logging
import streamlit as st import streamlit as st
# Updated imports
from llm_client import LLMClient from llm_client import LLMClient
from src.custom_mcp.manager import SyncMCPManager # Updated import path from src.custom_mcp.manager import SyncMCPManager
# Configure logging for the app # Configure logging for the app
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s") logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
@@ -86,6 +85,7 @@ def init_session_state():
base_url=base_url, base_url=base_url,
) )
st.session_state.model_name = model_name st.session_state.model_name = model_name
st.session_state.provider_name = provider_name # Store provider name
logger.info("LLMClient initialized successfully.") logger.info("LLMClient initialized successfully.")
except Exception as e: except Exception as e:
@@ -183,9 +183,34 @@ def handle_user_input():
def main(): def main():
"""Main function to run the Streamlit app.""" """Main function to run the Streamlit app."""
st.title("MCP Chat App") # Updated title
try: try:
init_session_state() init_session_state()
# --- Display Enhanced Header ---
provider_name = st.session_state.get("provider_name", "Unknown Provider")
model_name = st.session_state.get("model_name", "Unknown Model")
mcp_manager = st.session_state.client.mcp_manager # Get the manager
server_count = 0
tool_count = 0
if mcp_manager and mcp_manager.initialized:
server_count = len(mcp_manager.servers)
try:
# Get tool count (might be slightly slow if many tools/servers)
tool_count = len(mcp_manager.list_all_tools())
except Exception as e:
logger.warning(f"Could not retrieve tool count for header: {e}")
tool_count = "N/A" # Display N/A if listing fails
# Display the new header format
st.markdown(f"# Say Hi to **{provider_name.capitalize()}**!")
st.write(f"MCP Servers: **{server_count}** | Tools: **{tool_count}**")
st.write(f"Model: **{model_name}**")
st.divider()
# -----------------------------
# Removed the previous caption display
display_chat_messages() display_chat_messages()
handle_user_input() handle_user_input()
except Exception as e: except Exception as e:

1323
uv.lock generated Normal file

File diff suppressed because it is too large Load Diff