Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -4,18 +4,18 @@ from transformers import pipeline
|
|
4 |
from PIL import Image
|
5 |
import io, os, traceback
|
6 |
|
7 |
-
#
|
8 |
os.environ["HF_HOME"] = "/app/cache"
|
9 |
os.environ["TRANSFORMERS_CACHE"] = "/app/cache"
|
10 |
os.environ["HF_HUB_CACHE"] = "/app/cache/hub"
|
11 |
|
12 |
app = FastAPI()
|
13 |
|
14 |
-
# Load
|
15 |
pipe = pipeline(
|
16 |
"image-to-text",
|
17 |
-
model="
|
18 |
-
device=-1 # CPU
|
19 |
)
|
20 |
|
21 |
@app.get("/")
|
@@ -45,11 +45,11 @@ async def predict_gender(file: UploadFile = File(...)):
|
|
45 |
image_bytes = await file.read()
|
46 |
image = Image.open(io.BytesIO(image_bytes)).convert("RGB")
|
47 |
|
48 |
-
# Run
|
49 |
result = pipe(image, max_new_tokens=32)
|
50 |
caption = result[0]["generated_text"].strip()
|
51 |
|
52 |
-
#
|
53 |
gender = "unknown"
|
54 |
lower_caption = caption.lower()
|
55 |
if "male" in lower_caption or "man" in lower_caption:
|
|
|
4 |
from PIL import Image
|
5 |
import io, os, traceback
|
6 |
|
7 |
+
# Ensure Hugging Face cache directories are writable
|
8 |
os.environ["HF_HOME"] = "/app/cache"
|
9 |
os.environ["TRANSFORMERS_CACHE"] = "/app/cache"
|
10 |
os.environ["HF_HUB_CACHE"] = "/app/cache/hub"
|
11 |
|
12 |
app = FastAPI()
|
13 |
|
14 |
+
# Load BLIP (stable image captioning model)
|
15 |
pipe = pipeline(
|
16 |
"image-to-text",
|
17 |
+
model="Salesforce/blip-image-captioning-base",
|
18 |
+
device=-1 # CPU (works on free tier)
|
19 |
)
|
20 |
|
21 |
@app.get("/")
|
|
|
45 |
image_bytes = await file.read()
|
46 |
image = Image.open(io.BytesIO(image_bytes)).convert("RGB")
|
47 |
|
48 |
+
# Run BLIP captioning
|
49 |
result = pipe(image, max_new_tokens=32)
|
50 |
caption = result[0]["generated_text"].strip()
|
51 |
|
52 |
+
# Simple heuristic for gender detection
|
53 |
gender = "unknown"
|
54 |
lower_caption = caption.lower()
|
55 |
if "male" in lower_caption or "man" in lower_caption:
|