Tonic commited on
Commit
5fbed6e
·
1 Parent(s): baace61

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -5
app.py CHANGED
@@ -127,16 +127,15 @@ def add_file(history, task_history, file):
127
  def predict(_chatbot, task_history) -> tuple:
128
  print("predict called")
129
  chat_query, chat_response = _chatbot[-1]
130
-
131
  if isinstance(chat_query, tuple):
132
- query = [{'image': chat_query[0]}]
 
133
  else:
134
  query = [{'text': _parse_text(chat_query)}]
135
-
136
  inputs = tokenizer.from_list_format(query)
137
  tokenized_inputs = tokenizer(inputs, return_tensors='pt')
138
  tokenized_inputs = tokenized_inputs.to(model.device)
139
-
140
  pred = model.generate(**tokenized_inputs)
141
  response = tokenizer.decode(pred.cpu()[0], skip_special_tokens=False)
142
 
@@ -149,7 +148,7 @@ def predict(_chatbot, task_history) -> tuple:
149
  image_path = save_image(image, uploaded_file_dir)
150
  _chatbot[-1] = (chat_query, (image_path,))
151
  else:
152
- _chatbot[-1] = (chat_query, "No image to display.")
153
  else:
154
  _chatbot[-1] = (chat_query, response)
155
 
 
127
  def predict(_chatbot, task_history) -> tuple:
128
  print("predict called")
129
  chat_query, chat_response = _chatbot[-1]
 
130
  if isinstance(chat_query, tuple):
131
+ chat_query = chat_query[0]
132
+ query = [{'image': chat_query}]
133
  else:
134
  query = [{'text': _parse_text(chat_query)}]
135
+
136
  inputs = tokenizer.from_list_format(query)
137
  tokenized_inputs = tokenizer(inputs, return_tensors='pt')
138
  tokenized_inputs = tokenized_inputs.to(model.device)
 
139
  pred = model.generate(**tokenized_inputs)
140
  response = tokenizer.decode(pred.cpu()[0], skip_special_tokens=False)
141
 
 
148
  image_path = save_image(image, uploaded_file_dir)
149
  _chatbot[-1] = (chat_query, (image_path,))
150
  else:
151
+ _chatbot[-1] = (chat_query, response) # Make sure this is a tuple of two elements
152
  else:
153
  _chatbot[-1] = (chat_query, response)
154