rodrisouza commited on
Commit
fd95420
·
verified ·
1 Parent(s): 65d7049

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -11
app.py CHANGED
@@ -85,6 +85,15 @@ def interact(user_input, history, interaction_count):
85
 
86
  prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
87
 
 
 
 
 
 
 
 
 
 
88
  # Generate response using selected model
89
  input_ids = tokenizer(prompt, return_tensors='pt').input_ids.to("cuda")
90
  chat_history_ids = model.generate(input_ids, max_new_tokens=100, pad_token_id=tokenizer.eos_token_id) # Increase max_new_tokens
@@ -94,13 +103,6 @@ def interact(user_input, history, interaction_count):
94
  history.append({"role": "user", "content": user_input})
95
  history.append({"role": "assistant", "content": response})
96
 
97
- # Check if the maximum number of interactions has been reached
98
- interaction_count += 1
99
- print(f"Interaction count: {interaction_count}") # Print the interaction count
100
- if interaction_count >= MAX_INTERACTIONS:
101
- farewell_message = "Thank you for the conversation! Have a great day!"
102
- history.append({"role": "assistant", "content": farewell_message})
103
-
104
  formatted_history = [(entry["content"], None) if entry["role"] == "user" else (None, entry["content"]) for entry in history if entry["role"] in ["user", "assistant"]]
105
  return "", formatted_history, history, interaction_count
106
  except Exception as e:
@@ -111,7 +113,9 @@ def interact(user_input, history, interaction_count):
111
 
112
  # Function to send selected story and initial message
113
  def send_selected_story(title, model_name, system_prompt):
114
- global chat_history, selected_story, data
 
 
115
  data = [] # Reset data for new story
116
  tokenizer, model = load_model(model_name)
117
  selected_story = title
@@ -212,10 +216,10 @@ with gr.Blocks() as demo:
212
  data_table = gr.DataFrame(headers=["User Input", "Chat Response", "Score", "Comment"])
213
 
214
  chat_history_json = gr.JSON(value=[], visible=False)
215
- interaction_state = gr.State(0)
216
 
217
- send_story_button.click(fn=send_selected_story, inputs=[story_dropdown, model_dropdown, system_prompt_dropdown], outputs=[chatbot_output, chat_history_json, data_table, selected_story_textbox, interaction_state])
218
- send_message_button.click(fn=interact, inputs=[chatbot_input, chat_history_json, interaction_state], outputs=[chatbot_input, chatbot_output, chat_history_json, interaction_state])
219
  save_button.click(fn=save_comment_score, inputs=[chatbot_output, score_input, comment_input, story_dropdown, user_dropdown, system_prompt_dropdown], outputs=[data_table, comment_input])
220
 
221
  demo.launch()
 
85
 
86
  prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
87
 
88
+ # Check if the maximum number of interactions has been reached
89
+ interaction_count += 1
90
+ print(f"Interaction count: {interaction_count}") # Print the interaction count
91
+ if interaction_count >= MAX_INTERACTIONS:
92
+ farewell_message = "Thank you for the conversation! Have a great day!"
93
+ history.append({"role": "assistant", "content": farewell_message})
94
+ formatted_history = [(entry["content"], None) if entry["role"] == "user" else (None, entry["content"]) for entry in history if entry["role"] in ["user", "assistant"]]
95
+ return "", formatted_history, history, interaction_count
96
+
97
  # Generate response using selected model
98
  input_ids = tokenizer(prompt, return_tensors='pt').input_ids.to("cuda")
99
  chat_history_ids = model.generate(input_ids, max_new_tokens=100, pad_token_id=tokenizer.eos_token_id) # Increase max_new_tokens
 
103
  history.append({"role": "user", "content": user_input})
104
  history.append({"role": "assistant", "content": response})
105
 
 
 
 
 
 
 
 
106
  formatted_history = [(entry["content"], None) if entry["role"] == "user" else (None, entry["content"]) for entry in history if entry["role"] in ["user", "assistant"]]
107
  return "", formatted_history, history, interaction_count
108
  except Exception as e:
 
113
 
114
  # Function to send selected story and initial message
115
  def send_selected_story(title, model_name, system_prompt):
116
+ global chat_history
117
+ global selected_story
118
+ global data # Ensure data is reset
119
  data = [] # Reset data for new story
120
  tokenizer, model = load_model(model_name)
121
  selected_story = title
 
216
  data_table = gr.DataFrame(headers=["User Input", "Chat Response", "Score", "Comment"])
217
 
218
  chat_history_json = gr.JSON(value=[], visible=False)
219
+ interaction_count_state = gr.State(0)
220
 
221
+ send_story_button.click(fn=send_selected_story, inputs=[story_dropdown, model_dropdown, system_prompt_dropdown], outputs=[chatbot_output, chat_history_json, data_table, selected_story_textbox, interaction_count_state])
222
+ send_message_button.click(fn=interact, inputs=[chatbot_input, chat_history_json, interaction_count_state], outputs=[chatbot_input, chatbot_output, chat_history_json, interaction_count_state])
223
  save_button.click(fn=save_comment_score, inputs=[chatbot_output, score_input, comment_input, story_dropdown, user_dropdown, system_prompt_dropdown], outputs=[data_table, comment_input])
224
 
225
  demo.launch()