ajaynagotha commited on
Commit
6afcb0b
Β·
verified Β·
1 Parent(s): 23bfc7b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -68
app.py CHANGED
@@ -1,7 +1,6 @@
1
  import streamlit as st
2
  import requests
3
  import logging
4
- import time
5
 
6
  # Configure logging
7
  logging.basicConfig(level=logging.INFO)
@@ -9,75 +8,40 @@ logger = logging.getLogger(__name__)
9
 
10
  # Set page configuration
11
  st.set_page_config(
12
- page_title="DeepSeek Chatbot - NextGenWebAI",
13
  page_icon="πŸ€–",
14
  layout="wide"
15
  )
16
 
17
- # Custom CSS for UI Enhancements
18
- st.markdown("""
19
- <style>
20
- body {
21
- font-family: 'Arial', sans-serif;
22
- }
23
- .stChatMessage {
24
- border-radius: 10px;
25
- padding: 10px;
26
- margin: 5px 0;
27
- }
28
- .user {
29
- background-color: #007BFF;
30
- color: white;
31
- text-align: right;
32
- border-radius: 12px 12px 0px 12px;
33
- }
34
- .assistant {
35
- background-color: #F1F1F1;
36
- color: black;
37
- text-align: left;
38
- border-radius: 12px 12px 12px 0px;
39
- }
40
- .sidebar .sidebar-content {
41
- background-color: #222;
42
- color: white;
43
- }
44
- </style>
45
- """, unsafe_allow_html=True)
46
-
47
  # Initialize session state for chat history
48
  if "messages" not in st.session_state:
49
  st.session_state.messages = []
50
 
51
- # Sidebar Configuration
52
- with st.sidebar:
53
- st.image("https://huggingface.co/front/thumbnails/hf-logo.png", width=150)
54
- st.header("βš™οΈ Model Configuration")
55
- st.markdown("[πŸ”‘ Get HuggingFace Token](https://huggingface.co/settings/tokens)")
56
 
57
- # Model selection
58
- model_options = [
59
- "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
60
- ]
61
- selected_model = st.selectbox("πŸ” Select AI Model", model_options, index=0)
62
 
63
- # System prompt
64
- system_message = st.text_area(
65
- "πŸ“œ System Instructions",
66
- value="You are a friendly chatbot. Provide clear and engaging responses.",
67
- height=80
68
- )
69
 
70
- # Chat settings
71
- max_tokens = st.slider("πŸ”’ Max Tokens", 10, 4000, 300)
72
- temperature = st.slider("πŸ”₯ Temperature", 0.1, 2.0, 0.7)
73
- top_p = st.slider("🎯 Top-p", 0.1, 1.0, 0.9)
74
 
75
  # Function to query the Hugging Face API
76
  def query(payload, api_url):
77
  headers = {"Authorization": f"Bearer {st.secrets['HF_TOKEN']}"}
78
  try:
79
  response = requests.post(api_url, headers=headers, json=payload)
80
- response.raise_for_status() # Raise HTTP errors if any
81
  return response.json()
82
  except requests.exceptions.RequestException as e:
83
  logger.error(f"Request Error: {e}")
@@ -85,24 +49,20 @@ def query(payload, api_url):
85
 
86
  # Main Chat Interface
87
  st.title("πŸ€– DeepSeek Chatbot")
88
- st.caption("πŸš€ AI-powered chatbot using Hugging Face API")
89
 
90
- # Display chat history with enhanced UI
91
  for message in st.session_state.messages:
92
- with st.chat_message(message["role"]):
93
- class_name = "user" if message["role"] == "user" else "assistant"
94
- st.markdown(f"<div class='{class_name}'>{message['content']}</div>", unsafe_allow_html=True)
95
 
96
  # Handle user input
97
- if prompt := st.chat_input("πŸ’¬ Type your message..."):
98
  st.session_state.messages.append({"role": "user", "content": prompt})
99
-
100
- with st.chat_message("user"):
101
- st.markdown(f"<div class='user'>{prompt}</div>", unsafe_allow_html=True)
102
 
103
  try:
104
- with st.spinner("πŸ€– Thinking..."):
105
- time.sleep(1) # Simulate processing time
106
  full_prompt = f"{system_message}\n\nUser: {prompt}\nAssistant:"
107
  payload = {
108
  "inputs": full_prompt,
@@ -119,11 +79,11 @@ if prompt := st.chat_input("πŸ’¬ Type your message..."):
119
 
120
  if output and isinstance(output, list) and 'generated_text' in output[0]:
121
  assistant_response = output[0]['generated_text'].strip()
122
- with st.chat_message("assistant"):
123
- st.markdown(f"<div class='assistant'>{assistant_response}</div>", unsafe_allow_html=True)
124
  st.session_state.messages.append({"role": "assistant", "content": assistant_response})
125
  else:
126
- st.error("⚠️ Unable to generate a response. Please try again.")
 
127
  except Exception as e:
128
  logger.error(f"Application Error: {str(e)}", exc_info=True)
129
- st.error(f"⚠️ Error: {str(e)}")
 
1
  import streamlit as st
2
  import requests
3
  import logging
 
4
 
5
  # Configure logging
6
  logging.basicConfig(level=logging.INFO)
 
8
 
9
  # Set page configuration
10
  st.set_page_config(
11
+ page_title="DeepSeek Chatbot",
12
  page_icon="πŸ€–",
13
  layout="wide"
14
  )
15
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  # Initialize session state for chat history
17
  if "messages" not in st.session_state:
18
  st.session_state.messages = []
19
 
20
+ # Sidebar for model configuration
21
+ st.sidebar.title("βš™οΈ Settings")
 
 
 
22
 
23
+ # Model selection
24
+ model_options = ["deepseek-ai/DeepSeek-R1-Distill-Qwen-32B"]
25
+ selected_model = st.sidebar.selectbox("Select AI Model", model_options)
 
 
26
 
27
+ # System message input
28
+ system_message = st.sidebar.text_area(
29
+ "System Message",
30
+ value="You are a friendly chatbot. Provide clear and engaging responses.",
31
+ height=80
32
+ )
33
 
34
+ # Chat configuration settings
35
+ max_tokens = st.sidebar.slider("Max Tokens", 10, 4000, 300)
36
+ temperature = st.sidebar.slider("Temperature", 0.1, 2.0, 0.7)
37
+ top_p = st.sidebar.slider("Top-p", 0.1, 1.0, 0.9)
38
 
39
  # Function to query the Hugging Face API
40
  def query(payload, api_url):
41
  headers = {"Authorization": f"Bearer {st.secrets['HF_TOKEN']}"}
42
  try:
43
  response = requests.post(api_url, headers=headers, json=payload)
44
+ response.raise_for_status()
45
  return response.json()
46
  except requests.exceptions.RequestException as e:
47
  logger.error(f"Request Error: {e}")
 
49
 
50
  # Main Chat Interface
51
  st.title("πŸ€– DeepSeek Chatbot")
52
+ st.write("Chat with an AI-powered assistant.")
53
 
54
+ # Display chat history
55
  for message in st.session_state.messages:
56
+ role = "πŸ§‘β€πŸ’» You" if message["role"] == "user" else "πŸ€– AI"
57
+ st.markdown(f"**{role}:** {message['content']}")
 
58
 
59
  # Handle user input
60
+ if prompt := st.chat_input("Type your message..."):
61
  st.session_state.messages.append({"role": "user", "content": prompt})
62
+ st.markdown(f"**πŸ§‘β€πŸ’» You:** {prompt}")
 
 
63
 
64
  try:
65
+ with st.spinner("Generating response..."):
 
66
  full_prompt = f"{system_message}\n\nUser: {prompt}\nAssistant:"
67
  payload = {
68
  "inputs": full_prompt,
 
79
 
80
  if output and isinstance(output, list) and 'generated_text' in output[0]:
81
  assistant_response = output[0]['generated_text'].strip()
82
+ st.markdown(f"**πŸ€– AI:** {assistant_response}")
 
83
  st.session_state.messages.append({"role": "assistant", "content": assistant_response})
84
  else:
85
+ st.error("Unable to generate a response. Please try again.")
86
+
87
  except Exception as e:
88
  logger.error(f"Application Error: {str(e)}", exc_info=True)
89
+ st.error(f"Error: {str(e)}")