ajaynagotha's picture
Update app.py
23bfc7b verified
raw
history blame
4.32 kB
import streamlit as st
import requests
import logging
import time
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# Set page configuration
st.set_page_config(
page_title="DeepSeek Chatbot - NextGenWebAI",
page_icon="πŸ€–",
layout="wide"
)
# Custom CSS for UI Enhancements
st.markdown("""
<style>
body {
font-family: 'Arial', sans-serif;
}
.stChatMessage {
border-radius: 10px;
padding: 10px;
margin: 5px 0;
}
.user {
background-color: #007BFF;
color: white;
text-align: right;
border-radius: 12px 12px 0px 12px;
}
.assistant {
background-color: #F1F1F1;
color: black;
text-align: left;
border-radius: 12px 12px 12px 0px;
}
.sidebar .sidebar-content {
background-color: #222;
color: white;
}
</style>
""", unsafe_allow_html=True)
# Initialize session state for chat history
if "messages" not in st.session_state:
st.session_state.messages = []
# Sidebar Configuration
with st.sidebar:
st.image("https://huggingface.co/front/thumbnails/hf-logo.png", width=150)
st.header("βš™οΈ Model Configuration")
st.markdown("[πŸ”‘ Get HuggingFace Token](https://huggingface.co/settings/tokens)")
# Model selection
model_options = [
"deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
]
selected_model = st.selectbox("πŸ” Select AI Model", model_options, index=0)
# System prompt
system_message = st.text_area(
"πŸ“œ System Instructions",
value="You are a friendly chatbot. Provide clear and engaging responses.",
height=80
)
# Chat settings
max_tokens = st.slider("πŸ”’ Max Tokens", 10, 4000, 300)
temperature = st.slider("πŸ”₯ Temperature", 0.1, 2.0, 0.7)
top_p = st.slider("🎯 Top-p", 0.1, 1.0, 0.9)
# Function to query the Hugging Face API
def query(payload, api_url):
headers = {"Authorization": f"Bearer {st.secrets['HF_TOKEN']}"}
try:
response = requests.post(api_url, headers=headers, json=payload)
response.raise_for_status() # Raise HTTP errors if any
return response.json()
except requests.exceptions.RequestException as e:
logger.error(f"Request Error: {e}")
return None
# Main Chat Interface
st.title("πŸ€– DeepSeek Chatbot")
st.caption("πŸš€ AI-powered chatbot using Hugging Face API")
# Display chat history with enhanced UI
for message in st.session_state.messages:
with st.chat_message(message["role"]):
class_name = "user" if message["role"] == "user" else "assistant"
st.markdown(f"<div class='{class_name}'>{message['content']}</div>", unsafe_allow_html=True)
# Handle user input
if prompt := st.chat_input("πŸ’¬ Type your message..."):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(f"<div class='user'>{prompt}</div>", unsafe_allow_html=True)
try:
with st.spinner("πŸ€– Thinking..."):
time.sleep(1) # Simulate processing time
full_prompt = f"{system_message}\n\nUser: {prompt}\nAssistant:"
payload = {
"inputs": full_prompt,
"parameters": {
"max_new_tokens": max_tokens,
"temperature": temperature,
"top_p": top_p,
"return_full_text": False
}
}
api_url = f"https://api-inference.huggingface.co/models/{selected_model}"
output = query(payload, api_url)
if output and isinstance(output, list) and 'generated_text' in output[0]:
assistant_response = output[0]['generated_text'].strip()
with st.chat_message("assistant"):
st.markdown(f"<div class='assistant'>{assistant_response}</div>", unsafe_allow_html=True)
st.session_state.messages.append({"role": "assistant", "content": assistant_response})
else:
st.error("⚠️ Unable to generate a response. Please try again.")
except Exception as e:
logger.error(f"Application Error: {str(e)}", exc_info=True)
st.error(f"⚠️ Error: {str(e)}")