Add initial project structure with .gitignore, pyproject.toml, and main application files

This commit is contained in:
2025-03-25 17:12:45 +00:00
commit d1ef966e65
4 changed files with 94 additions and 0 deletions

17
.gitignore vendored Normal file
View File

@@ -0,0 +1,17 @@
# Python
__pycache__/
*.py[cod]
*$py.class
# Virtual environment
env/
# Configuration
config/config.ini
# IDE
.vscode/
.idea/
# Streamlit
.streamlit/

15
pyproject.toml Normal file
View File

@@ -0,0 +1,15 @@
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[project]
name = "streamlit-chat-app"
version = "0.1.0"
dependencies = [
"streamlit",
"python-dotenv",
"openai"
]
[tool.hatch.build.targets.wheel]
packages = ["src"]

39
src/app.py Normal file
View File

@@ -0,0 +1,39 @@
import streamlit as st
from openai_client import OpenAIClient
def init_session_state():
if "messages" not in st.session_state:
st.session_state.messages = []
def display_chat_messages():
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
def handle_user_input():
if prompt := st.chat_input("Type your message..."):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
response_placeholder = st.empty()
full_response = ""
client = OpenAIClient()
for chunk in client.get_chat_response(st.session_state.messages):
if chunk.choices[0].delta.content:
full_response += chunk.choices[0].delta.content
response_placeholder.markdown(full_response + "")
response_placeholder.markdown(full_response)
st.session_state.messages.append({"role": "assistant", "content": full_response})
def main():
st.title("Streamlit Chat App")
init_session_state()
display_chat_messages()
handle_user_input()
if __name__ == "__main__":
main()

23
src/openai_client.py Normal file
View File

@@ -0,0 +1,23 @@
import os
import configparser
from openai import OpenAI
class OpenAIClient:
def __init__(self):
self.config = configparser.ConfigParser()
self.config.read('config/config.ini')
self.client = OpenAI(
api_key=self.config['openai']['api_key'],
base_url=self.config['openai']['base_url']
)
def get_chat_response(self, messages):
try:
response = self.client.chat.completions.create(
model=self.config['openai']['model'],
messages=messages,
stream=True
)
return response
except Exception as e:
raise Exception(f"OpenAI API error: {str(e)}")