Add initial project structure with .gitignore, pyproject.toml, and main application files
This commit is contained in:
17
.gitignore
vendored
Normal file
17
.gitignore
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# Virtual environment
|
||||
env/
|
||||
|
||||
# Configuration
|
||||
config/config.ini
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
|
||||
# Streamlit
|
||||
.streamlit/
|
||||
15
pyproject.toml
Normal file
15
pyproject.toml
Normal file
@@ -0,0 +1,15 @@
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
name = "streamlit-chat-app"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"streamlit",
|
||||
"python-dotenv",
|
||||
"openai"
|
||||
]
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["src"]
|
||||
39
src/app.py
Normal file
39
src/app.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import streamlit as st
|
||||
from openai_client import OpenAIClient
|
||||
|
||||
def init_session_state():
|
||||
if "messages" not in st.session_state:
|
||||
st.session_state.messages = []
|
||||
|
||||
def display_chat_messages():
|
||||
for message in st.session_state.messages:
|
||||
with st.chat_message(message["role"]):
|
||||
st.markdown(message["content"])
|
||||
|
||||
def handle_user_input():
|
||||
if prompt := st.chat_input("Type your message..."):
|
||||
st.session_state.messages.append({"role": "user", "content": prompt})
|
||||
with st.chat_message("user"):
|
||||
st.markdown(prompt)
|
||||
|
||||
with st.chat_message("assistant"):
|
||||
response_placeholder = st.empty()
|
||||
full_response = ""
|
||||
|
||||
client = OpenAIClient()
|
||||
for chunk in client.get_chat_response(st.session_state.messages):
|
||||
if chunk.choices[0].delta.content:
|
||||
full_response += chunk.choices[0].delta.content
|
||||
response_placeholder.markdown(full_response + "▌")
|
||||
|
||||
response_placeholder.markdown(full_response)
|
||||
st.session_state.messages.append({"role": "assistant", "content": full_response})
|
||||
|
||||
def main():
|
||||
st.title("Streamlit Chat App")
|
||||
init_session_state()
|
||||
display_chat_messages()
|
||||
handle_user_input()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
23
src/openai_client.py
Normal file
23
src/openai_client.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import os
|
||||
import configparser
|
||||
from openai import OpenAI
|
||||
|
||||
class OpenAIClient:
|
||||
def __init__(self):
|
||||
self.config = configparser.ConfigParser()
|
||||
self.config.read('config/config.ini')
|
||||
self.client = OpenAI(
|
||||
api_key=self.config['openai']['api_key'],
|
||||
base_url=self.config['openai']['base_url']
|
||||
)
|
||||
|
||||
def get_chat_response(self, messages):
|
||||
try:
|
||||
response = self.client.chat.completions.create(
|
||||
model=self.config['openai']['model'],
|
||||
messages=messages,
|
||||
stream=True
|
||||
)
|
||||
return response
|
||||
except Exception as e:
|
||||
raise Exception(f"OpenAI API error: {str(e)}")
|
||||
Reference in New Issue
Block a user