prateekbh commited on
Commit
edf9cec
·
verified ·
1 Parent(s): 450cedd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -11
app.py CHANGED
@@ -28,8 +28,10 @@ class StopOnTokens(StoppingCriteria):
28
  return False
29
 
30
  @torch.no_grad()
31
- def response(message, history, image):
 
32
  stop = StopOnTokens()
 
33
 
34
  messages = [{"role": "system", "content": "You are a helpful assistant."}]
35
 
@@ -37,9 +39,6 @@ def response(message, history, image):
37
  messages.append({"role": "user", "content": user_msg})
38
  messages.append({"role": "assistant", "content": assistant_msg})
39
 
40
- if len(messages) == 1:
41
- message = f" <image>{message}"
42
-
43
  messages.append({"role": "user", "content": message})
44
 
45
  model_inputs = processor.tokenizer.apply_chat_template(
@@ -82,20 +81,15 @@ def response(message, history, image):
82
  history[-1][1] = partial_response
83
  yield history, gr.Button(visible=False), gr.Button(visible=True, interactive=True)
84
 
85
- def hello_world():
86
- gr.Warning('This is a warning message.')
87
- return "hello world"
88
-
89
  with gr.Blocks(css=css) as demo:
90
  with gr.Column(elem_id="col-container"):
91
  gr.HTML(title)
92
  image = gr.Image(type="pil")
93
- submit = gr.Button(value="Upload", variant="primary")
94
  chat = gr.Chatbot(show_label=False)
95
- message = gr.Textbox(interactive=True, show_label=False, container=False)
96
  response_handler = (
97
  response,
98
- [message, chat, image],
99
  [submit]
100
  )
101
 
 
28
  return False
29
 
30
  @torch.no_grad()
31
+ def response(history, image):
32
+ gr.Info('Starting...')
33
  stop = StopOnTokens()
34
+ message = "please describe the image as an ecommerce product made out of wood"
35
 
36
  messages = [{"role": "system", "content": "You are a helpful assistant."}]
37
 
 
39
  messages.append({"role": "user", "content": user_msg})
40
  messages.append({"role": "assistant", "content": assistant_msg})
41
 
 
 
 
42
  messages.append({"role": "user", "content": message})
43
 
44
  model_inputs = processor.tokenizer.apply_chat_template(
 
81
  history[-1][1] = partial_response
82
  yield history, gr.Button(visible=False), gr.Button(visible=True, interactive=True)
83
 
 
 
 
 
84
  with gr.Blocks(css=css) as demo:
85
  with gr.Column(elem_id="col-container"):
86
  gr.HTML(title)
87
  image = gr.Image(type="pil")
 
88
  chat = gr.Chatbot(show_label=False)
89
+ submit = gr.Button(value="Upload", variant="primary")
90
  response_handler = (
91
  response,
92
+ [chat, image],
93
  [submit]
94
  )
95