rodrisouza commited on
Commit
000aa75
·
verified ·
1 Parent(s): 9f44b5a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -13
app.py CHANGED
@@ -39,9 +39,6 @@ tokenizer, model = None, None
39
  # Initialize the data list
40
  data = []
41
 
42
- # Initialize interaction count
43
- interaction_count = 0
44
-
45
  # Load the model and tokenizer once at the beginning
46
  def load_model(model_name):
47
  global tokenizer, model, selected_model
@@ -73,8 +70,8 @@ chat_history = []
73
 
74
  # Function to handle interaction with model
75
  @spaces.GPU
76
- def interact(user_input, history):
77
- global tokenizer, model, interaction_count
78
  try:
79
  if tokenizer is None or model is None:
80
  raise ValueError("Tokenizer or model is not initialized.")
@@ -88,7 +85,7 @@ def interact(user_input, history):
88
  farewell_message = "Thank you for the conversation! Have a great day!"
89
  history.append({"role": "assistant", "content": farewell_message})
90
  formatted_history = [(entry["content"], None) if entry["role"] == "user" else (None, entry["content"]) for entry in history if entry["role"] in ["user", "assistant"]]
91
- return "", formatted_history, history
92
 
93
  messages = history + [{"role": "user", "content": user_input}]
94
 
@@ -109,7 +106,7 @@ def interact(user_input, history):
109
  history.append({"role": "assistant", "content": response})
110
 
111
  formatted_history = [(entry["content"], None) if entry["role"] == "user" else (None, entry["content"]) for entry in history if entry["role"] in ["user", "assistant"]]
112
- return "", formatted_history, history
113
  except Exception as e:
114
  if torch.cuda.is_available():
115
  torch.cuda.empty_cache()
@@ -118,9 +115,8 @@ def interact(user_input, history):
118
 
119
  # Function to send selected story and initial message
120
  def send_selected_story(title, model_name, system_prompt):
121
- global chat_history, selected_story, data, interaction_count
122
  data = [] # Reset data for new story
123
- interaction_count = 0 # Reset interaction count
124
  tokenizer, model = load_model(model_name)
125
  selected_story = title
126
  story_text = ""
@@ -141,9 +137,9 @@ Here is the story:
141
 
142
  # Generate the first question based on the story
143
  question_prompt = "Please ask a simple question about the story to encourage interaction."
144
- _, formatted_history, chat_history = interact(question_prompt, chat_history)
145
 
146
- return formatted_history, chat_history, gr.update(value=[]), gr.update(value=story_text) # Reset the data table and update the selected story textbox
147
  else:
148
  print("Combined message is empty.")
149
  else:
@@ -220,9 +216,10 @@ with gr.Blocks() as demo:
220
  data_table = gr.DataFrame(headers=["User Input", "Chat Response", "Score", "Comment"])
221
 
222
  chat_history_json = gr.JSON(value=[], visible=False)
 
223
 
224
- send_story_button.click(fn=send_selected_story, inputs=[story_dropdown, model_dropdown, system_prompt_dropdown], outputs=[chatbot_output, chat_history_json, data_table, selected_story_textbox])
225
- send_message_button.click(fn=interact, inputs=[chatbot_input, chat_history_json], outputs=[chatbot_input, chatbot_output, chat_history_json])
226
  save_button.click(fn=save_comment_score, inputs=[chatbot_output, score_input, comment_input, story_dropdown, user_dropdown, system_prompt_dropdown], outputs=[data_table, comment_input])
227
 
228
  demo.launch()
 
39
  # Initialize the data list
40
  data = []
41
 
 
 
 
42
  # Load the model and tokenizer once at the beginning
43
  def load_model(model_name):
44
  global tokenizer, model, selected_model
 
70
 
71
  # Function to handle interaction with model
72
  @spaces.GPU
73
+ def interact(user_input, history, interaction_count):
74
+ global tokenizer, model
75
  try:
76
  if tokenizer is None or model is None:
77
  raise ValueError("Tokenizer or model is not initialized.")
 
85
  farewell_message = "Thank you for the conversation! Have a great day!"
86
  history.append({"role": "assistant", "content": farewell_message})
87
  formatted_history = [(entry["content"], None) if entry["role"] == "user" else (None, entry["content"]) for entry in history if entry["role"] in ["user", "assistant"]]
88
+ return "", formatted_history, history, interaction_count
89
 
90
  messages = history + [{"role": "user", "content": user_input}]
91
 
 
106
  history.append({"role": "assistant", "content": response})
107
 
108
  formatted_history = [(entry["content"], None) if entry["role"] == "user" else (None, entry["content"]) for entry in history if entry["role"] in ["user", "assistant"]]
109
+ return "", formatted_history, history, interaction_count
110
  except Exception as e:
111
  if torch.cuda.is_available():
112
  torch.cuda.empty_cache()
 
115
 
116
  # Function to send selected story and initial message
117
  def send_selected_story(title, model_name, system_prompt):
118
+ global chat_history, selected_story, data
119
  data = [] # Reset data for new story
 
120
  tokenizer, model = load_model(model_name)
121
  selected_story = title
122
  story_text = ""
 
137
 
138
  # Generate the first question based on the story
139
  question_prompt = "Please ask a simple question about the story to encourage interaction."
140
+ _, formatted_history, chat_history, interaction_count = interact(question_prompt, chat_history, 0)
141
 
142
+ return formatted_history, chat_history, gr.update(value=[]), gr.update(value=story_text), interaction_count # Reset the data table and update the selected story textbox
143
  else:
144
  print("Combined message is empty.")
145
  else:
 
216
  data_table = gr.DataFrame(headers=["User Input", "Chat Response", "Score", "Comment"])
217
 
218
  chat_history_json = gr.JSON(value=[], visible=False)
219
+ interaction_state = gr.State(0)
220
 
221
+ send_story_button.click(fn=send_selected_story, inputs=[story_dropdown, model_dropdown, system_prompt_dropdown], outputs=[chatbot_output, chat_history_json, data_table, selected_story_textbox, interaction_state])
222
+ send_message_button.click(fn=interact, inputs=[chatbot_input, chat_history_json, interaction_state], outputs=[chatbot_input, chatbot_output, chat_history_json, interaction_state])
223
  save_button.click(fn=save_comment_score, inputs=[chatbot_output, score_input, comment_input, story_dropdown, user_dropdown, system_prompt_dropdown], outputs=[data_table, comment_input])
224
 
225
  demo.launch()