|
from fastapi import FastAPI, File, UploadFile, HTTPException |
|
import cv2 |
|
import numpy as np |
|
from PIL import Image |
|
import io |
|
import base64 |
|
|
|
app = FastAPI() |
|
|
|
@app.post("/detect/") |
|
async def detect_face(file: UploadFile = File(...)): |
|
try: |
|
image_bytes = await file.read() |
|
image = Image.open(io.BytesIO(image_bytes)) |
|
img_np = np.array(image) |
|
|
|
if img_np.shape[2] == 4: |
|
img_np = cv2.cvtColor(img_np, cv2.COLOR_BGRA2BGR) |
|
|
|
face_cascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_default.xml') |
|
gray = cv2.cvtColor(img_np, cv2.COLOR_BGR2GRAY) |
|
faces = face_cascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5, minSize=(30, 30)) |
|
|
|
if len(faces) == 0: |
|
raise HTTPException(status_code=404, detail="No se detectaron rostros en la imagen.") |
|
|
|
for (x, y, w, h) in faces: |
|
cv2.rectangle(img_np, (x, y), (x+w, y+h), (255, 0, 0), 2) |
|
|
|
result_image = Image.fromarray(cv2.cvtColor(img_np, cv2.COLOR_BGR2RGB)) |
|
img_byte_arr = io.BytesIO() |
|
result_image.save(img_byte_arr, format='JPEG') |
|
img_byte_arr = img_byte_arr.getvalue() |
|
|
|
return { |
|
"message": "Rostros detectados", |
|
"rostros": len(faces), |
|
"imagen_base64": base64.b64encode(img_byte_arr).decode('utf-8') |
|
} |
|
|
|
except Exception as e: |
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|