zb9 commited on
Commit
ab1f8dc
·
verified ·
1 Parent(s): 5ee37e6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +32 -2
app.py CHANGED
@@ -1,16 +1,46 @@
1
- # app.py
2
  import gradio as gr
3
  from colpali_engine.models import ColQwen2, ColQwen2Processor
4
  import torch
5
  import base64
6
  from PIL import Image
7
  import io
 
 
 
 
 
8
 
9
  # Initialize model
 
10
  model = ColQwen2.from_pretrained(
11
  "vidore/colqwen2-v0.1",
12
  torch_dtype=torch.bfloat16,
13
  device_map="auto",
14
  )
15
  processor = ColQwen2Processor.from_pretrained("vidore/colqwen2-v0.1")
16
- model = model.eval()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
  from colpali_engine.models import ColQwen2, ColQwen2Processor
3
  import torch
4
  import base64
5
  from PIL import Image
6
  import io
7
+ import logging
8
+
9
+ # Setup logging
10
+ logging.basicConfig(level=logging.INFO)
11
+ logger = logging.getLogger("colqwen-api")
12
 
13
  # Initialize model
14
+ logger.info("Loading ColQwen2 model...")
15
  model = ColQwen2.from_pretrained(
16
  "vidore/colqwen2-v0.1",
17
  torch_dtype=torch.bfloat16,
18
  device_map="auto",
19
  )
20
  processor = ColQwen2Processor.from_pretrained("vidore/colqwen2-v0.1")
21
+ model = model.eval()
22
+ logger.info("Model loaded successfully")
23
+
24
+ def process_image(image_data):
25
+ try:
26
+ logger.info("Processing image")
27
+ processed = processor.process_images([image_data])
28
+ logger.info("Image processed")
29
+
30
+ with torch.no_grad():
31
+ embeddings = model(**processed)
32
+ logger.info(f"Embeddings generated: {embeddings.shape}")
33
+
34
+ return {"embeddings": embeddings.tolist()}
35
+ except Exception as e:
36
+ logger.error(f"Error: {str(e)}", exc_info=True)
37
+ raise
38
+
39
+ interface = gr.Interface(
40
+ fn=process_image,
41
+ inputs=gr.Image(),
42
+ outputs="json",
43
+ title="ColQwen2 Embedding API"
44
+ )
45
+
46
+ interface.launch()