ruslanmv commited on
Commit
dbeaecd
·
verified ·
1 Parent(s): dfdfe2f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -9
app.py CHANGED
@@ -1,13 +1,13 @@
1
  import gradio as gr
 
2
  from functools import lru_cache
3
 
4
  # Cache model loading to optimize performance
5
  @lru_cache(maxsize=3)
6
  def load_hf_model(model_name):
7
- # Use the Gradio-built huggingface loader instead of transformers_gradio
8
  return gr.load(
9
  name=f"deepseek-ai/{model_name}",
10
- src="huggingface", # Changed from transformers_gradio.registry
11
  api_name="/chat"
12
  )
13
 
@@ -20,11 +20,13 @@ MODELS = {
20
 
21
  # --- Chatbot function ---
22
  def chatbot(input_text, history, model_choice, system_message, max_new_tokens, temperature, top_p):
23
- history = history or []
24
-
25
- # Get the selected model component
 
 
26
  model_component = MODELS[model_choice]
27
-
28
  # Create payload for the model
29
  payload = {
30
  "messages": [{"role": "user", "content": input_text}],
@@ -33,16 +35,22 @@ def chatbot(input_text, history, model_choice, system_message, max_new_tokens, t
33
  "temperature": temperature,
34
  "top_p": top_p
35
  }
36
-
37
  # Run inference using the selected model
38
  try:
39
  response = model_component(payload)
40
  assistant_response = response[-1]["content"]
41
  except Exception as e:
42
  assistant_response = f"Error: {str(e)}"
43
-
 
44
  history.append({"role": "user", "content": input_text})
45
  history.append({"role": "assistant", "content": assistant_response})
 
 
 
 
 
46
  return history, history, ""
47
 
48
  # --- Gradio Interface ---
@@ -58,7 +66,8 @@ with gr.Blocks(theme=gr.themes.Soft(), title="DeepSeek Chatbot") as demo:
58
 
59
  with gr.Row():
60
  with gr.Column():
61
- chatbot_output = gr.Chatbot(label="DeepSeek Chatbot", height=500, type='messages')
 
62
  msg = gr.Textbox(label="Your Message", placeholder="Type your message here...")
63
  with gr.Row():
64
  submit_btn = gr.Button("Submit", variant="primary")
@@ -101,5 +110,10 @@ with gr.Blocks(theme=gr.themes.Soft(), title="DeepSeek Chatbot") as demo:
101
  [chatbot_output, chat_history, msg]
102
  )
103
 
 
104
  if __name__ == "__main__":
 
105
  demo.launch()
 
 
 
 
1
  import gradio as gr
2
+ import spaces
3
  from functools import lru_cache
4
 
5
  # Cache model loading to optimize performance
6
  @lru_cache(maxsize=3)
7
  def load_hf_model(model_name):
 
8
  return gr.load(
9
  name=f"deepseek-ai/{model_name}",
10
+ src="huggingface",
11
  api_name="/chat"
12
  )
13
 
 
20
 
21
  # --- Chatbot function ---
22
  def chatbot(input_text, history, model_choice, system_message, max_new_tokens, temperature, top_p):
23
+ # If history is empty, initialize it as a list
24
+ if history is None:
25
+ history = []
26
+
27
+ # Select the model
28
  model_component = MODELS[model_choice]
29
+
30
  # Create payload for the model
31
  payload = {
32
  "messages": [{"role": "user", "content": input_text}],
 
35
  "temperature": temperature,
36
  "top_p": top_p
37
  }
38
+
39
  # Run inference using the selected model
40
  try:
41
  response = model_component(payload)
42
  assistant_response = response[-1]["content"]
43
  except Exception as e:
44
  assistant_response = f"Error: {str(e)}"
45
+
46
+ # Append user and assistant messages in the new format
47
  history.append({"role": "user", "content": input_text})
48
  history.append({"role": "assistant", "content": assistant_response})
49
+
50
+ # Return the updated conversation to display and store
51
+ # 1) chatbot_output = updated history of messages
52
+ # 2) chat_history = same updated history (as state)
53
+ # 3) "" to clear the input textbox
54
  return history, history, ""
55
 
56
  # --- Gradio Interface ---
 
66
 
67
  with gr.Row():
68
  with gr.Column():
69
+ # Use type='messages' for OpenAI-style messages
70
+ chatbot_output = gr.Chatbot(label="DeepSeek Chatbot", height=500, type="messages")
71
  msg = gr.Textbox(label="Your Message", placeholder="Type your message here...")
72
  with gr.Row():
73
  submit_btn = gr.Button("Submit", variant="primary")
 
110
  [chatbot_output, chat_history, msg]
111
  )
112
 
113
+ # (Optional) Remove or modify references to spaces.GPU() if you do not need GPU management
114
  if __name__ == "__main__":
115
+ # Just launch regularly if you don't need spaces.GPU() for hardware acceleration
116
  demo.launch()
117
+
118
+ # If you require GPU on Hugging Face Spaces, you can wrap demo.launch like so instead:
119
+ # spaces.GPU()(demo.launch)()