KingNish commited on
Commit
1fc20a4
·
verified ·
1 Parent(s): 7f8ad9a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -7
app.py CHANGED
@@ -1,15 +1,14 @@
1
  from __future__ import annotations
2
  import os
3
  import random
4
- import uuid
5
  import gradio as gr
6
  import spaces
7
  import numpy as np
8
  import uuid
 
9
  from diffusers import PixArtAlphaPipeline, LCMScheduler
10
  import torch
11
  from typing import Tuple
12
- from datetime import datetime
13
 
14
 
15
  DESCRIPTION = """ # Instant Image
@@ -81,6 +80,8 @@ pipe = PixArtAlphaPipeline.from_pretrained(
81
  use_safetensors=True,
82
  ).to("cuda:0")
83
  pipe.text_encoder.to_bettertransformer()
 
 
84
 
85
  def apply_style(style_name: str, positive: str, negative: str = "") -> Tuple[str, str]:
86
  p, n = styles.get(style_name, styles[DEFAULT_STYLE_NAME])
@@ -92,11 +93,6 @@ def apply_style(style_name: str, positive: str, negative: str = "") -> Tuple[str
92
  pipe.transformer = torch.compile(pipe.transformer, mode="reduce-overhead", fullgraph=True)
93
  print("Model Compiled!")
94
 
95
- def save_image(img):
96
- unique_name = str(uuid.uuid4()) + ".png"
97
- img.save(unique_name)
98
- return unique_name
99
-
100
  def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
101
  if randomize_seed:
102
  seed = random.randint(0, MAX_SEED)
 
1
  from __future__ import annotations
2
  import os
3
  import random
 
4
  import gradio as gr
5
  import spaces
6
  import numpy as np
7
  import uuid
8
+ from optimum.quanto import freeze, qfloat8, quantize, qint4
9
  from diffusers import PixArtAlphaPipeline, LCMScheduler
10
  import torch
11
  from typing import Tuple
 
12
 
13
 
14
  DESCRIPTION = """ # Instant Image
 
80
  use_safetensors=True,
81
  ).to("cuda:0")
82
  pipe.text_encoder.to_bettertransformer()
83
+ quantize(pipeline.transformer, weights=qint4, exclude="proj_out")
84
+ freeze(pipeline.transformer)
85
 
86
  def apply_style(style_name: str, positive: str, negative: str = "") -> Tuple[str, str]:
87
  p, n = styles.get(style_name, styles[DEFAULT_STYLE_NAME])
 
93
  pipe.transformer = torch.compile(pipe.transformer, mode="reduce-overhead", fullgraph=True)
94
  print("Model Compiled!")
95
 
 
 
 
 
 
96
  def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
97
  if randomize_seed:
98
  seed = random.randint(0, MAX_SEED)