File size: 1,452 Bytes
1693cc8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
from fastapi import FastAPI, File, UploadFile, HTTPException
import cv2
import numpy as np
from PIL import Image
import io
import base64

app = FastAPI()

@app.post("/detect/")
async def detect_face(file: UploadFile = File(...)):
    try:
        image_bytes = await file.read()
        image = Image.open(io.BytesIO(image_bytes))
        img_np = np.array(image)

        if img_np.shape[2] == 4:
            img_np = cv2.cvtColor(img_np, cv2.COLOR_BGRA2BGR)

        face_cascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_default.xml')
        gray = cv2.cvtColor(img_np, cv2.COLOR_BGR2GRAY)
        faces = face_cascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5, minSize=(30, 30))

        if len(faces) == 0:
            raise HTTPException(status_code=404, detail="No se detectaron rostros en la imagen.")

        for (x, y, w, h) in faces:
            cv2.rectangle(img_np, (x, y), (x+w, y+h), (255, 0, 0), 2)

        result_image = Image.fromarray(cv2.cvtColor(img_np, cv2.COLOR_BGR2RGB))
        img_byte_arr = io.BytesIO()
        result_image.save(img_byte_arr, format='JPEG')
        img_byte_arr = img_byte_arr.getvalue()

        return {
            "message": "Rostros detectados",
            "rostros": len(faces),
            "imagen_base64": base64.b64encode(img_byte_arr).decode('utf-8')
        }

    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))