burhan112 commited on
Commit
c4d212f
·
verified ·
1 Parent(s): 8df60f2

Update qa.py

Browse files
Files changed (1) hide show
  1. qa.py +13 -0
qa.py CHANGED
@@ -1,3 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  def get_image_answer(image: Image.Image, question: str) -> str:
2
  if image.mode != "RGB":
3
  image = image.convert("RGB")
 
1
+ import torch
2
+ from PIL import Image
3
+ from transformers import Blip2Processor, Blip2ForConditionalGeneration, BitsAndBytesConfig
4
+
5
+ # Load model and processor
6
+ device = "cuda" if torch.cuda.is_available() else "cpu"
7
+ quantization_config = BitsAndBytesConfig(load_in_8bit=True)
8
+
9
+ processor = Blip2Processor.from_pretrained("Salesforce/blip2-flan-t5-xl")
10
+ model = Blip2ForConditionalGeneration.from_pretrained(
11
+ "Salesforce/blip2-flan-t5-xl", device_map="auto"
12
+ )
13
+
14
  def get_image_answer(image: Image.Image, question: str) -> str:
15
  if image.mode != "RGB":
16
  image = image.convert("RGB")