BenkHel commited on
Commit
aaacb87
·
verified ·
1 Parent(s): 090efb5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -6
app.py CHANGED
@@ -55,7 +55,9 @@ tokenizer, model, image_processor, context_len = load_pretrained_model(
55
  model.config.training = False
56
 
57
  # FIXED PROMPT
58
- FIXED_PROMPT = "<image>\nWhat material is this item and how to dispose of it?"
 
 
59
 
60
  def clear_history():
61
  state = default_conversation.copy()
@@ -66,13 +68,15 @@ def add_text(state, imagebox, textbox, image_process_mode):
66
  state = conv_templates[conv_mode].copy()
67
 
68
  if imagebox is not None:
69
- textbox = FIXED_PROMPT
70
  image = Image.open(imagebox).convert('RGB')
71
- textbox = (textbox, image, image_process_mode)
72
- state.append_message(state.roles[0], textbox)
73
- state.append_message(state.roles[1], None)
 
 
74
  yield (state, state.to_gradio_chatbot(), "", None) + (disable_btn, disable_btn, disable_btn, enable_btn, enable_btn)
75
 
 
76
  def delete_text(state, image_process_mode):
77
  state.messages[-1][-1] = None
78
  prev_human_msg = state.messages[-2]
@@ -90,7 +94,11 @@ def regenerate(state, image_process_mode):
90
 
91
  @spaces.GPU
92
  def generate(state, imagebox, textbox, image_process_mode, temperature, top_p, max_output_tokens):
93
- prompt = FIXED_PROMPT
 
 
 
 
94
  images = state.get_images(return_pil=True)
95
 
96
  ori_prompt = prompt
 
55
  model.config.training = False
56
 
57
  # FIXED PROMPT
58
+ FIXED_PROMPT = "What type of waste is this item and how to dispose of it?"
59
+ INTERNAL_PROMPT = "<image>\nWhat type of waste is this item and how to dispose of it?"
60
+
61
 
62
  def clear_history():
63
  state = default_conversation.copy()
 
68
  state = conv_templates[conv_mode].copy()
69
 
70
  if imagebox is not None:
 
71
  image = Image.open(imagebox).convert('RGB')
72
+ visible_text = FIXED_PROMPT
73
+ model_prompt = (INTERNAL_PROMPT, image, image_process_mode)
74
+ state.append_message(state.roles[0], visible_text)
75
+ state.append_message(state.roles[1], None)
76
+ state.messages[-2].append(model_prompt)
77
  yield (state, state.to_gradio_chatbot(), "", None) + (disable_btn, disable_btn, disable_btn, enable_btn, enable_btn)
78
 
79
+
80
  def delete_text(state, image_process_mode):
81
  state.messages[-1][-1] = None
82
  prev_human_msg = state.messages[-2]
 
94
 
95
  @spaces.GPU
96
  def generate(state, imagebox, textbox, image_process_mode, temperature, top_p, max_output_tokens):
97
+ hidden_data = state.messages[-2]
98
+ if len(hidden_data) == 3 and isinstance(hidden_data[2], tuple):
99
+ prompt, image, image_process_mode = hidden_data[2]
100
+ else:
101
+ prompt = FIXED_PROMPTT
102
  images = state.get_images(return_pil=True)
103
 
104
  ori_prompt = prompt