darknoon commited on
Commit
85c421c
·
1 Parent(s): 8c8fdb8

misc, idk why this space isn't showing up!

Browse files
Files changed (1) hide show
  1. app.py +7 -3
app.py CHANGED
@@ -1,6 +1,5 @@
1
  import gradio as gr
2
  import spaces
3
- from huggingface_hub import InferenceClient
4
  import torch
5
  from transformers import AutoModelForCausalLM, ChameleonProcessor, AutoTokenizer, StoppingCriteria, StoppingCriteriaList, TextIteratorStreamer
6
  from threading import Thread
@@ -15,7 +14,12 @@ model = AutoModelForCausalLM.from_pretrained(model_path, torch_dtype=torch.bfloa
15
  model.eval()
16
  processor = ChameleonProcessor.from_pretrained(model_path, use_auth_token=True)
17
  tokenizer = processor.tokenizer
18
- image = Image.open(requests.get("https://uploads4.wikiart.org/images/paul-klee/death-for-the-idea-1915.jpg!Large.jpg", stream=True).raw)
 
 
 
 
 
19
 
20
  @spaces.GPU(duration=90)
21
  def respond(
@@ -39,6 +43,7 @@ def respond(
39
  response = ""
40
 
41
  prompt = "I'm very intrigued by this work of art:<image>Please tell me about the artist."
 
42
 
43
  inputs = processor(prompt, images=[image], return_tensors="pt").to(model.device, dtype=torch.bfloat16)
44
 
@@ -60,7 +65,6 @@ For information on how to customize the ChatInterface, peruse the gradio docs: h
60
  """
61
  demo = gr.ChatInterface(
62
  respond,
63
- multimodal=True,
64
  additional_inputs=[
65
  gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
66
  gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
 
1
  import gradio as gr
2
  import spaces
 
3
  import torch
4
  from transformers import AutoModelForCausalLM, ChameleonProcessor, AutoTokenizer, StoppingCriteria, StoppingCriteriaList, TextIteratorStreamer
5
  from threading import Thread
 
14
  model.eval()
15
  processor = ChameleonProcessor.from_pretrained(model_path, use_auth_token=True)
16
  tokenizer = processor.tokenizer
17
+
18
+ def load_example_image():
19
+ global image
20
+ if not image:
21
+ image = Image.open(requests.get("https://uploads4.wikiart.org/images/paul-klee/death-for-the-idea-1915.jpg!Large.jpg", stream=True).raw)
22
+ return image
23
 
24
  @spaces.GPU(duration=90)
25
  def respond(
 
43
  response = ""
44
 
45
  prompt = "I'm very intrigued by this work of art:<image>Please tell me about the artist."
46
+ image = load_example_image()
47
 
48
  inputs = processor(prompt, images=[image], return_tensors="pt").to(model.device, dtype=torch.bfloat16)
49
 
 
65
  """
66
  demo = gr.ChatInterface(
67
  respond,
 
68
  additional_inputs=[
69
  gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
70
  gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),