File size: 1,476 Bytes
7c33baa 6b79e42 7c33baa 6767de4 7c33baa 99f1eb3 6767de4 c0cfc30 61b6cd2 7c33baa 23d0106 7c33baa 99f1eb3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
from __future__ import annotations
from fastapi import FastAPI, File, UploadFile
from fastapi.responses import FileResponse
from fastapi.staticfiles import StaticFiles
import shutil
import torch
import numpy as np
from vtoonify_model import Model
app = FastAPI()
model = Model(device='cuda' if torch.cuda.is_available() else 'cpu')
@app.post("/upload/")
async def process_image(file: UploadFile = File(...)):
# Save the uploaded image locally
with open("uploaded_image.jpg", "wb") as buffer:
shutil.copyfileobj(file.file, buffer)
# Load the model (assuming 'cartoon1' is always used)
exstyle, load_info = model.load_model('cartoon1')
# Process the uploaded image
top, bottom, left, right = 200, 200, 200, 200
aligned_face, _, input_info = model.detect_and_align_image("uploaded_image.jpg", top, bottom, left, right)
processed_image, message = model.image_toonify(aligned_face, exstyle, style_degree=0.5, style_type='cartoon1')
# Save the processed image
with open("result_image.jpg", "wb") as result_buffer:
result_buffer.write(processed_image)
# Return the processed image
return FileResponse("result_image.jpg", media_type="image/jpeg", headers={"Content-Disposition": "attachment; filename=result_image.jpg"})
app.mount("/", StaticFiles(directory="AB", html=True), name="static")
@app.get("/")
def index() -> FileResponse:
return FileResponse(path="/app/AB/index.html", media_type="text/html")
|