ruslanmv commited on
Commit
2da3f27
·
verified ·
1 Parent(s): 10f0d10

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -42
app.py CHANGED
@@ -1,9 +1,12 @@
1
  # app.py
 
 
2
  import streamlit as st
3
- from models import load_model
4
 
5
- # Load the model once
6
- generator = load_model()
 
7
 
8
  # Page configuration
9
  st.set_page_config(
@@ -16,82 +19,75 @@ st.set_page_config(
16
  if "messages" not in st.session_state:
17
  st.session_state.messages = []
18
 
19
- # Sidebar for model parameters
20
  with st.sidebar:
21
  st.header("Model Configuration")
 
22
 
23
- # System message
24
  system_message = st.text_area(
25
  "System Message",
26
  value="You are a friendly Chatbot created by ruslanmv.com",
27
  height=100
28
  )
29
 
30
- # Generation parameters
31
  max_tokens = st.slider(
32
  "Max Tokens",
33
- min_value=1,
34
- max_value=4000,
35
- value=512,
36
- step=10
37
  )
38
 
39
  temperature = st.slider(
40
  "Temperature",
41
- min_value=0.1,
42
- max_value=4.0,
43
- value=0.7,
44
- step=0.1
45
  )
46
 
47
  top_p = st.slider(
48
- "Top-p (nucleus sampling)",
49
- min_value=0.1,
50
- max_value=1.0,
51
- value=0.9,
52
- step=0.1
53
  )
54
 
55
- # Main chat interface
56
  st.title("🤖 DeepSeek Chatbot")
57
- st.caption("Powered by ruslanmv.com - Configure parameters in the sidebar")
58
 
59
- # Display chat messages
60
  for message in st.session_state.messages:
61
  with st.chat_message(message["role"]):
62
  st.markdown(message["content"])
63
 
64
- # Chat input
65
  if prompt := st.chat_input("Type your message..."):
66
- # Add user message to chat history
67
  st.session_state.messages.append({"role": "user", "content": prompt})
68
 
69
- # Display user message
70
  with st.chat_message("user"):
71
  st.markdown(prompt)
72
-
73
  try:
74
- # Generate response using the model
75
  with st.spinner("Generating response..."):
76
  full_prompt = f"{system_message}\n\nUser: {prompt}\nAssistant:"
77
- response = generator(
78
- full_prompt,
79
- max_length=max_tokens,
80
- temperature=temperature,
81
- top_p=top_p,
82
- do_sample=True,
83
- num_return_sequences=1
84
- )[0]['generated_text']
85
 
86
- # Extract only the assistant's response
87
- assistant_response = response.split("Assistant:")[-1].strip()
88
-
89
- # Display assistant response
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
90
  with st.chat_message("assistant"):
91
  st.markdown(assistant_response)
92
 
93
- # Add assistant response to chat history
94
  st.session_state.messages.append({"role": "assistant", "content": assistant_response})
95
-
96
  except Exception as e:
97
- st.error(f"An error occurred: {str(e)}")
 
1
  # app.py
2
+ import os
3
+ import requests
4
  import streamlit as st
5
+ from models import get_hf_api
6
 
7
+ # Configure API
8
+ API_URL = get_hf_api()
9
+ headers = {"Authorization": f"Bearer {st.secrets['HF_TOKEN']}"}
10
 
11
  # Page configuration
12
  st.set_page_config(
 
19
  if "messages" not in st.session_state:
20
  st.session_state.messages = []
21
 
22
+ # Sidebar configuration
23
  with st.sidebar:
24
  st.header("Model Configuration")
25
+ st.markdown("[Get HuggingFace Token](https://huggingface.co/settings/tokens)")
26
 
 
27
  system_message = st.text_area(
28
  "System Message",
29
  value="You are a friendly Chatbot created by ruslanmv.com",
30
  height=100
31
  )
32
 
 
33
  max_tokens = st.slider(
34
  "Max Tokens",
35
+ 1, 4000, 512
 
 
 
36
  )
37
 
38
  temperature = st.slider(
39
  "Temperature",
40
+ 0.1, 4.0, 0.7
 
 
 
41
  )
42
 
43
  top_p = st.slider(
44
+ "Top-p",
45
+ 0.1, 1.0, 0.9
 
 
 
46
  )
47
 
48
+ # Chat interface
49
  st.title("🤖 DeepSeek Chatbot")
50
+ st.caption("Powered by Hugging Face Inference API - Configure in sidebar")
51
 
52
+ # Display chat history
53
  for message in st.session_state.messages:
54
  with st.chat_message(message["role"]):
55
  st.markdown(message["content"])
56
 
57
+ # Handle input
58
  if prompt := st.chat_input("Type your message..."):
 
59
  st.session_state.messages.append({"role": "user", "content": prompt})
60
 
 
61
  with st.chat_message("user"):
62
  st.markdown(prompt)
63
+
64
  try:
 
65
  with st.spinner("Generating response..."):
66
  full_prompt = f"{system_message}\n\nUser: {prompt}\nAssistant:"
 
 
 
 
 
 
 
 
67
 
68
+ response = requests.post(
69
+ API_URL,
70
+ headers=headers,
71
+ json={
72
+ "inputs": full_prompt,
73
+ "parameters": {
74
+ "max_new_tokens": max_tokens,
75
+ "temperature": temperature,
76
+ "top_p": top_p,
77
+ "return_full_text": False
78
+ }
79
+ }
80
+ ).json()
81
+
82
+ if isinstance(response, list) and len(response) > 0:
83
+ assistant_response = response[0].get('generated_text', '')
84
+ else:
85
+ assistant_response = "Error: Unexpected API response"
86
+
87
  with st.chat_message("assistant"):
88
  st.markdown(assistant_response)
89
 
 
90
  st.session_state.messages.append({"role": "assistant", "content": assistant_response})
91
+
92
  except Exception as e:
93
+ st.error(f"API Error: {str(e)}")