ruslanmv commited on
Commit
a709247
·
verified ·
1 Parent(s): 2ee149a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -4
app.py CHANGED
@@ -9,7 +9,7 @@ client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
9
  # Define a maximum context length (tokens). Check your model's documentation!
10
  MAX_CONTEXT_LENGTH = 4096 # Example: Adjust this based on your model!
11
 
12
- nvc_prompt_template = r"""<|system|>You are Roos, an NVC (Nonviolent Communication) Chatbot. Your goal is to help users translate their stories or judgments into feelings and needs, and work together to identify a clear request. Follow these steps:
13
  1. **Goal of the Conversation**
14
  - Translate the user’s story or judgments into feelings and needs.
15
  - Work together to identify a clear request, following these steps:
@@ -112,7 +112,7 @@ def truncate_history(history: list[tuple[str, str]], system_message: str, max_le
112
  def respond(
113
  message,
114
  history: list[tuple[str, str]],
115
- system_message,
116
  max_tokens,
117
  temperature,
118
  top_p,
@@ -122,7 +122,7 @@ def respond(
122
  if message.lower() == "clear memory": # Check for the clear memory command
123
  return "", [] # Return empty message and empty history to reset the chat
124
 
125
- formatted_system_message = nvc_prompt_template
126
  truncated_history = truncate_history(history, formatted_system_message, MAX_CONTEXT_LENGTH - max_tokens - 100) # Reserve space for the new message and some generation
127
 
128
  messages = [{"role": "system", "content": formatted_system_message}] # Start with system message as before
@@ -154,7 +154,7 @@ def respond(
154
  demo = gr.ChatInterface(
155
  respond,
156
  additional_inputs=[
157
- gr.Textbox(value=nvc_prompt_template, label="System message", visible=False), # Set the NVC prompt as default and hide the system message box
158
  gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
159
  gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
160
  gr.Slider(
@@ -164,6 +164,7 @@ demo = gr.ChatInterface(
164
  step=0.05,
165
  label="Top-p (nucleus sampling)",
166
  ),
 
167
  ],
168
  )
169
 
 
9
  # Define a maximum context length (tokens). Check your model's documentation!
10
  MAX_CONTEXT_LENGTH = 4096 # Example: Adjust this based on your model!
11
 
12
+ default_nvc_prompt_template = r"""<|system|>You are Roos, an NVC (Nonviolent Communication) Chatbot. Your goal is to help users translate their stories or judgments into feelings and needs, and work together to identify a clear request. Follow these steps:
13
  1. **Goal of the Conversation**
14
  - Translate the user’s story or judgments into feelings and needs.
15
  - Work together to identify a clear request, following these steps:
 
112
  def respond(
113
  message,
114
  history: list[tuple[str, str]],
115
+ system_message, # System message is now an argument
116
  max_tokens,
117
  temperature,
118
  top_p,
 
122
  if message.lower() == "clear memory": # Check for the clear memory command
123
  return "", [] # Return empty message and empty history to reset the chat
124
 
125
+ formatted_system_message = system_message # Use the system_message argument
126
  truncated_history = truncate_history(history, formatted_system_message, MAX_CONTEXT_LENGTH - max_tokens - 100) # Reserve space for the new message and some generation
127
 
128
  messages = [{"role": "system", "content": formatted_system_message}] # Start with system message as before
 
154
  demo = gr.ChatInterface(
155
  respond,
156
  additional_inputs=[
157
+ gr.Textbox(value=default_nvc_prompt_template, label="System message", visible=True), # System message is now visible
158
  gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
159
  gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
160
  gr.Slider(
 
164
  step=0.05,
165
  label="Top-p (nucleus sampling)",
166
  ),
167
+ gr.Button("Clear Memory"), # Keep the Clear Memory Button (as text command trigger)
168
  ],
169
  )
170