Spaces:
Running
Running
import gradio as gr | |
from colpali_engine.models import ColQwen2, ColQwen2Processor | |
import torch | |
import base64 | |
from PIL import Image | |
import io | |
import logging | |
# Setup logging | |
logging.basicConfig(level=logging.INFO) | |
logger = logging.getLogger("colqwen-api") | |
# Initialize model | |
logger.info("Loading ColQwen2 model...") | |
model = ColQwen2.from_pretrained( | |
"vidore/colqwen2-v0.1", | |
torch_dtype=torch.bfloat16, | |
device_map="auto", | |
) | |
processor = ColQwen2Processor.from_pretrained("vidore/colqwen2-v0.1") | |
model = model.eval() | |
logger.info("Model loaded successfully") | |
def process_image(image_data): | |
try: | |
logger.info("Processing image") | |
processed = processor.process_images([image_data]) | |
logger.info("Image processed") | |
with torch.no_grad(): | |
embeddings = model(**processed) | |
logger.info(f"Embeddings generated: {embeddings.shape}") | |
return {"embeddings": embeddings.tolist()} | |
except Exception as e: | |
logger.error(f"Error: {str(e)}", exc_info=True) | |
raise | |
interface = gr.Interface( | |
fn=process_image, | |
inputs=gr.Image(), | |
outputs="json", | |
title="ColQwen2 Embedding API" | |
) | |
interface.launch() |