from fastapi import FastAPI, File, UploadFile from fastapi.responses import JSONResponse, HTMLResponse from transformers import pipeline from PIL import Image import io, os, traceback # Ensure Hugging Face cache directories are writable os.environ["HF_HOME"] = "/app/cache" os.environ["TRANSFORMERS_CACHE"] = "/app/cache" os.environ["HF_HUB_CACHE"] = "/app/cache/hub" app = FastAPI() # Load BLIP (stable image captioning model) pipe = pipeline( "image-to-text", model="Salesforce/blip-image-captioning-base", device=-1 # CPU (works on free tier) ) @app.get("/") def home(): return { "message": "API is running. Use POST /predict with an image, or visit /upload to test in browser." } @app.get("/upload", response_class=HTMLResponse) def upload_form(): return """