ruslanmv's picture
Update app.py
d212f4f verified
raw
history blame
2.41 kB
# app.py
import streamlit as st
from models import load_model
# Load the model once
demo = load_model()
# Page configuration
st.set_page_config(
page_title="DeepSeek Chatbot - ruslanmv.com",
page_icon="πŸ€–",
layout="centered"
)
# Initialize session state for chat history
if "messages" not in st.session_state:
st.session_state.messages = []
# Sidebar for model parameters
with st.sidebar:
st.header("Model Configuration")
# System message
system_message = st.text_area(
"System Message",
value="You are a friendly Chatbot created by ruslanmv.com",
height=100
)
# Generation parameters
max_tokens = st.slider(
"Max Tokens",
min_value=1,
max_value=4000,
value=512,
step=10
)
temperature = st.slider(
"Temperature",
min_value=0.1,
max_value=4.0,
value=0.7,
step=0.1
)
top_p = st.slider(
"Top-p (nucleus sampling)",
min_value=0.1,
max_value=1.0,
value=0.9,
step=0.1
)
# Main chat interface
st.title("πŸ€– DeepSeek Chatbot")
st.caption("Powered by ruslanmv.com - Configure parameters in the sidebar")
# Display chat messages
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# Chat input
if prompt := st.chat_input("Type your message..."):
# Add user message to chat history
st.session_state.messages.append({"role": "user", "content": prompt})
# Display user message
with st.chat_message("user"):
st.markdown(prompt)
try:
# Generate response using the model
with st.spinner("Generating response..."):
# Pass inputs as positional arguments to the Gradio model
response = demo(
f"{system_message}\n\nUser: {prompt}\nAssistant:",
max_length=max_tokens, # Gradio parameter
temperature=temperature,
top_p=top_p
)
# Display assistant response
with st.chat_message("assistant"):
st.markdown(response)
# Add assistant response to chat history
st.session_state.messages.append({"role": "assistant", "content": response})
except Exception as e:
st.error(f"An error occurred: {str(e)}")