KaiChen1998 commited on
Commit
dfd1066
·
1 Parent(s): 0b75061
Files changed (1) hide show
  1. app.py +20 -0
app.py CHANGED
@@ -182,7 +182,27 @@ def http_bot(state):
182
  state.messages[-1][-1] = server_error_msg
183
  yield (state, state.to_gradio_chatbot_public()) + (enable_btn,) * 2
184
  return
 
 
185
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
186
  # caption_text = run_mllm_caption(image_tensor, cap_prompt, qa_prompt)
187
  # state.append_message(state.roles[1], "# Caption\n\n" + caption_text)
188
  # logging.info("# Caption\n\n" + caption_text)
 
182
  state.messages[-1][-1] = server_error_msg
183
  yield (state, state.to_gradio_chatbot_public()) + (enable_btn,) * 2
184
  return
185
+ state.messages[-1][-1] = state.messages[-1][-1][:-1]
186
+ yield (state, state.to_gradio_chatbot_public()) + (disable_btn,) * 2
187
 
188
+ # Step 2: Query-conditioned Caption
189
+ state.append_message(state.roles[1], "# Query-conditioned Caption\n\n▌")
190
+ try:
191
+ for generated_text in stream_response(mllm, cap_inputs, mllm_streamer, cao_prompt, mllm_sampling):
192
+ output = generated_text[len(cap_prompt):].strip()
193
+ state.messages[-1][-1] = "# Query-conditioned Caption\n\n" + output + "▌"
194
+ yield (state, state.to_gradio_chatbot_public()) + (disable_btn,) * 2
195
+ except Exception as e:
196
+ os.system("nvidia-smi")
197
+ logging.info(traceback.print_exc())
198
+ state.messages[-1][-1] = server_error_msg
199
+ yield (state, state.to_gradio_chatbot_public()) + (enable_btn,) * 2
200
+ return
201
+ state.messages[-1][-1] = state.messages[-1][-1][:-1]
202
+ yield (state, state.to_gradio_chatbot_public()) + (disable_btn,) * 2
203
+
204
+
205
+
206
  # caption_text = run_mllm_caption(image_tensor, cap_prompt, qa_prompt)
207
  # state.append_message(state.roles[1], "# Caption\n\n" + caption_text)
208
  # logging.info("# Caption\n\n" + caption_text)