ovi054 commited on
Commit
2fbfe8d
·
verified ·
1 Parent(s): f761af5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -51
app.py CHANGED
@@ -82,57 +82,6 @@ def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, guidan
82
  if lora_id:
83
  pipe.unload_lora_weights()
84
 
85
- # def query(lora_id, prompt, steps=28, cfg_scale=3.5, randomize_seed=True, seed=-1, width=1024, height=1024):
86
- # if prompt == "" or prompt == None:
87
- # return None
88
-
89
- # if lora_id.strip() == "" or lora_id == None:
90
- # lora_id = "black-forest-labs/FLUX.1-dev"
91
-
92
- # key = random.randint(0, 999)
93
-
94
- # API_URL = "https://api-inference.huggingface.co/models/"+ lora_id.strip()
95
-
96
- # API_TOKEN = random.choice([os.getenv("HF_READ_TOKEN")])
97
- # headers = {"Authorization": f"Bearer {API_TOKEN}"}
98
-
99
- # # prompt = GoogleTranslator(source='ru', target='en').translate(prompt)
100
- # # print(f'\033[1mGeneration {key} translation:\033[0m {prompt}')
101
-
102
- # prompt = f"{prompt} | ultra detail, ultra elaboration, ultra quality, perfect."
103
- # # print(f'\033[1mGeneration {key}:\033[0m {prompt}')
104
-
105
- # # If seed is -1, generate a random seed and use it
106
- # if randomize_seed:
107
- # seed = random.randint(1, 4294967296)
108
-
109
- # payload = {
110
- # "inputs": prompt,
111
- # "steps": steps,
112
- # "cfg_scale": cfg_scale,
113
- # "seed": seed,
114
- # "parameters": {
115
- # "width": width, # Pass the width to the API
116
- # "height": height # Pass the height to the API
117
- # }
118
- # }
119
-
120
- # response = requests.post(API_URL, headers=headers, json=payload, timeout=timeout)
121
- # if response.status_code != 200:
122
- # print(f"Error: Failed to get image. Response status: {response.status_code}")
123
- # print(f"Response content: {response.text}")
124
- # if response.status_code == 503:
125
- # raise gr.Error(f"{response.status_code} : The model is being loaded")
126
- # raise gr.Error(f"{response.status_code}")
127
-
128
- # try:
129
- # image_bytes = response.content
130
- # image = Image.open(io.BytesIO(image_bytes))
131
- # print(f'\033[1mGeneration {key} completed!\033[0m ({prompt})')
132
- # return image, seed, seed
133
- # except Exception as e:
134
- # print(f"Error when trying to open the image: {e}")
135
- # return None
136
 
137
 
138
  examples = [
 
82
  if lora_id:
83
  pipe.unload_lora_weights()
84
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
85
 
86
 
87
  examples = [