AnkitS1997 commited on
Commit
70cde98
·
1 Parent(s): e9cb74e

updated port

Browse files
.ipynb_checkpoints/app-checkpoint.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, File, UploadFile
2
+ from PIL import Image
3
+ from transformers import AutoProcessor, Blip2ForConditionalGeneration
4
+ import torch
5
+ import io
6
+
7
+ app = FastAPI()
8
+
9
+ # Load the model and processor
10
+ model = Blip2ForConditionalGeneration.from_pretrained("ybelkada/blip2-opt-2.7b-fp16-sharded")
11
+ model.load_adapter('blip-cpu-model')
12
+ processor = AutoProcessor.from_pretrained("Salesforce/blip2-opt-2.7b")
13
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
14
+ model.to(device)
15
+
16
+ @app.post("/generate-caption/")
17
+ async def generate_caption(file: UploadFile = File(...)):
18
+ image = Image.open(io.BytesIO(await file.read()))
19
+ inputs = processor(images=image, return_tensors="pt").to(device, torch.float16)
20
+
21
+ with torch.no_grad():
22
+ caption_ids = model.generate(**inputs, max_length=128)
23
+ caption = processor.decode(caption_ids[0], skip_special_tokens=True)
24
+
25
+ return {"caption": caption}
.ipynb_checkpoints/start-checkpoint.sh ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ # Start FastAPI
4
+ uvicorn app:app --host 0.0.0.0 --port 8502 &
5
+
6
+ # Start Streamlit
7
+ streamlit run streamlit_app.py --server.port=8501 --server.address=0.0.0.0
.ipynb_checkpoints/streamlit_app-checkpoint.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import requests
3
+ from PIL import Image
4
+
5
+ st.title("Image Captioning with Fine-Tuned BLiPv2 Model")
6
+
7
+ uploaded_file = st.file_uploader("Choose an image...", type=["jpg", "jpeg", "png"])
8
+
9
+ if uploaded_file is not None:
10
+ image = Image.open(uploaded_file)
11
+ st.image(image, caption="Uploaded Image", use_column_width=True)
12
+
13
+ files = {"file": uploaded_file.getvalue()}
14
+ response = requests.post("http://localhost:8502/generate-caption/", files=files)
15
+ caption = response.json().get("caption")
16
+
17
+ st.write("Generated Caption:")
18
+ st.write(f"**{caption}**")
start.sh CHANGED
@@ -1,7 +1,7 @@
1
  #!/bin/bash
2
 
3
  # Start FastAPI
4
- uvicorn app:app --host 0.0.0.0 --port 8501 &
5
 
6
  # Start Streamlit
7
- streamlit run streamlit_app.py --server.port=8502 --server.address=0.0.0.0
 
1
  #!/bin/bash
2
 
3
  # Start FastAPI
4
+ uvicorn app:app --host 0.0.0.0 --port 8502 &
5
 
6
  # Start Streamlit
7
+ streamlit run streamlit_app.py --server.port=8501 --server.address=0.0.0.0
streamlit_app.py CHANGED
@@ -11,7 +11,7 @@ if uploaded_file is not None:
11
  st.image(image, caption="Uploaded Image", use_column_width=True)
12
 
13
  files = {"file": uploaded_file.getvalue()}
14
- response = requests.post("http://localhost:8501/generate-caption/", files=files)
15
  caption = response.json().get("caption")
16
 
17
  st.write("Generated Caption:")
 
11
  st.image(image, caption="Uploaded Image", use_column_width=True)
12
 
13
  files = {"file": uploaded_file.getvalue()}
14
+ response = requests.post("http://localhost:8502/generate-caption/", files=files)
15
  caption = response.json().get("caption")
16
 
17
  st.write("Generated Caption:")