Spaces:
Running
Running
File size: 379 Bytes
5ee37e6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
# app.py
import gradio as gr
from colpali_engine.models import ColQwen2, ColQwen2Processor
import torch
import base64
from PIL import Image
import io
# Initialize model
model = ColQwen2.from_pretrained(
"vidore/colqwen2-v0.1",
torch_dtype=torch.bfloat16,
device_map="auto",
)
processor = ColQwen2Processor.from_pretrained("vidore/colqwen2-v0.1")
model = model.eval() |