Ashrafb commited on
Commit
8bec0c9
·
verified ·
1 Parent(s): 1e68cd0

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +9 -11
main.py CHANGED
@@ -1,16 +1,10 @@
1
  from fastapi import FastAPI, File, UploadFile, Form
2
  from fastapi.responses import StreamingResponse
3
  from fastapi.staticfiles import StaticFiles
 
4
  import shutil
5
  import cv2
6
  import numpy as np
7
- import dlib
8
- from torchvision import transforms
9
- import torch.nn.functional as F
10
- import gradio as gr
11
- import os
12
- import torch
13
- import io
14
  from io import BytesIO
15
 
16
  app = FastAPI()
@@ -23,8 +17,7 @@ def load_model():
23
  from vtoonify_model import Model
24
  model = Model(device='cuda' if torch.cuda.is_available() else 'cpu')
25
  model.load_model('cartoon1')
26
-
27
- # Define endpoints
28
  @app.post("/upload/")
29
  async def process_image(file: UploadFile = File(...), top: int = Form(...), bottom: int = Form(...), left: int = Form(...), right: int = Form(...)):
30
  global model
@@ -42,11 +35,15 @@ async def process_image(file: UploadFile = File(...), top: int = Form(...), bott
42
  aligned_face, instyle, message = model.detect_and_align_image(frame_rgb, top, bottom, left, right)
43
  processed_image, message = model.image_toonify(aligned_face, instyle, model.exstyle, style_degree=0.5, style_type='cartoon1')
44
 
 
 
 
45
  # Convert processed image to bytes
46
- _, encoded_image = cv2.imencode('.jpg', processed_image)
47
 
48
  # Return the processed image as a streaming response
49
- return StreamingResponse(io.BytesIO(encoded_image.tobytes()), media_type="image/jpeg")
 
50
 
51
  # Mount static files directory
52
  app.mount("/", StaticFiles(directory="AB", html=True), name="static")
@@ -55,3 +52,4 @@ app.mount("/", StaticFiles(directory="AB", html=True), name="static")
55
  @app.get("/")
56
  def index():
57
  return FileResponse(path="/app/AB/index.html", media_type="text/html")
 
 
1
  from fastapi import FastAPI, File, UploadFile, Form
2
  from fastapi.responses import StreamingResponse
3
  from fastapi.staticfiles import StaticFiles
4
+
5
  import shutil
6
  import cv2
7
  import numpy as np
 
 
 
 
 
 
 
8
  from io import BytesIO
9
 
10
  app = FastAPI()
 
17
  from vtoonify_model import Model
18
  model = Model(device='cuda' if torch.cuda.is_available() else 'cpu')
19
  model.load_model('cartoon1')
20
+
 
21
  @app.post("/upload/")
22
  async def process_image(file: UploadFile = File(...), top: int = Form(...), bottom: int = Form(...), left: int = Form(...), right: int = Form(...)):
23
  global model
 
35
  aligned_face, instyle, message = model.detect_and_align_image(frame_rgb, top, bottom, left, right)
36
  processed_image, message = model.image_toonify(aligned_face, instyle, model.exstyle, style_degree=0.5, style_type='cartoon1')
37
 
38
+ # Convert BGR to RGB
39
+ processed_image_rgb = cv2.cvtColor(processed_image, cv2.COLOR_BGR2RGB)
40
+
41
  # Convert processed image to bytes
42
+ _, encoded_image = cv2.imencode('.jpg', processed_image_rgb)
43
 
44
  # Return the processed image as a streaming response
45
+ return StreamingResponse(BytesIO(encoded_image.tobytes()), media_type="image/jpeg")
46
+
47
 
48
  # Mount static files directory
49
  app.mount("/", StaticFiles(directory="AB", html=True), name="static")
 
52
  @app.get("/")
53
  def index():
54
  return FileResponse(path="/app/AB/index.html", media_type="text/html")
55
+