RanM commited on
Commit
ff2ee2b
·
verified ·
1 Parent(s): 192bf4d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -2
app.py CHANGED
@@ -20,19 +20,20 @@ def generate_image(prompt):
20
  raise ValueError("Model not loaded properly.")
21
 
22
  print(f"Generating image with prompt: {prompt}")
23
- output = model(prompt=prompt, num_inference_steps=1, guidance_scale=0.0)
24
  print(f"Model output: {output}")
25
 
26
  if output is None:
27
  raise ValueError("Model returned None")
28
 
29
  if hasattr(output, 'images') and output.images:
30
- print(f"Image generated")
31
  image = output.images[0]
32
  buffered = BytesIO()
33
  image.save(buffered, format="JPEG")
34
  image_bytes = buffered.getvalue()
35
  img_str = base64.b64encode(image_bytes).decode("utf-8")
 
36
  print(f'img_str: {img_str}')
37
  return img_str, None
38
  else:
@@ -55,6 +56,7 @@ def inference(sentence_mapping, character_dict, selected_style):
55
  for paragraph_number, sentences in sentence_mapping.items():
56
  combined_sentence = " ".join(sentences)
57
  prompt = f"Make an illustration in {selected_style} style from: {combined_sentence}"
 
58
  img_str, error = generate_image(prompt)
59
  if error:
60
  images[paragraph_number] = f"Error: {error}"
@@ -62,6 +64,7 @@ def inference(sentence_mapping, character_dict, selected_style):
62
  images[paragraph_number] = img_str
63
  return images
64
  except Exception as e:
 
65
  return {"error": str(e)}
66
 
67
  gradio_interface = gr.Interface(
 
20
  raise ValueError("Model not loaded properly.")
21
 
22
  print(f"Generating image with prompt: {prompt}")
23
+ output = model(prompt=prompt, num_inference_steps=50, guidance_scale=7.5)
24
  print(f"Model output: {output}")
25
 
26
  if output is None:
27
  raise ValueError("Model returned None")
28
 
29
  if hasattr(output, 'images') and output.images:
30
+ print(f"Image generated successfully")
31
  image = output.images[0]
32
  buffered = BytesIO()
33
  image.save(buffered, format="JPEG")
34
  image_bytes = buffered.getvalue()
35
  img_str = base64.b64encode(image_bytes).decode("utf-8")
36
+ print("Image encoded to base64")
37
  print(f'img_str: {img_str}')
38
  return img_str, None
39
  else:
 
56
  for paragraph_number, sentences in sentence_mapping.items():
57
  combined_sentence = " ".join(sentences)
58
  prompt = f"Make an illustration in {selected_style} style from: {combined_sentence}"
59
+ print(f"Generated prompt for paragraph {paragraph_number}: {prompt}")
60
  img_str, error = generate_image(prompt)
61
  if error:
62
  images[paragraph_number] = f"Error: {error}"
 
64
  images[paragraph_number] = img_str
65
  return images
66
  except Exception as e:
67
+ print(f"An error occurred during inference: {e}")
68
  return {"error": str(e)}
69
 
70
  gradio_interface = gr.Interface(