K00B404 commited on
Commit
c351ba2
·
verified ·
1 Parent(s): 3c00ab6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -31
app.py CHANGED
@@ -4,31 +4,31 @@ from PIL import Image
4
  from io import BytesIO
5
  import os
6
  from huggingface_hub import InferenceClient
 
7
  API_TOKEN = os.getenv("HF_API_TOKEN") # Ensure you've set this environment variable
8
  API_URL = "https://api-inference.huggingface.co/models/enhanceaiteam/Flux-uncensored"
9
 
10
-
11
- def enhanche_prompt(prompt, system_prompt='You are a prompt enhancer', model="meta-llama/Llama-3.2-1B-Instruct", max_tokens=512, stream=False ):
12
- enhancer = InferenceClient(api_key = API_TOKEN)
 
13
  for message in enhancer.chat_completion(
14
- model=model,
15
- messages=[
16
  {"role": "system", "content": system_prompt},
17
  {"role": "user", "content": prompt}
18
  ],
19
- max_tokens=max_tokens,
20
- stream=stream,
21
  ):
22
- result = message.choices[0].delta.content
23
- print(result, end="")
24
- return result
25
-
26
 
27
- # Function to call Hugging Face API and get the generated image
28
  def generate_image(prompt, enhance=False):
29
  if enhance:
30
- prompt = enhanche_prompt(prompt)
31
-
32
  headers = {"Authorization": f"Bearer {API_TOKEN}"}
33
  data = {"inputs": prompt}
34
 
@@ -40,8 +40,8 @@ def generate_image(prompt, enhance=False):
40
  return image
41
  else:
42
  return f"Error: {response.status_code}, {response.text}"
43
-
44
- title_html="""
45
  <center>
46
  <div id="title-container">
47
  <h1 id="title-text">FLUX Capacitor</h1>
@@ -56,17 +56,17 @@ css = """
56
  background-repeat: no-repeat;
57
  background-position: center;
58
  background-attachment: fixed;
59
- color:#000;
60
  }
61
- .dark\:bg-gray-950:is(.dark *) {
62
- --tw-bg-opacity: 1;
63
- background-color: rgb(157, 17, 142);
64
  }
65
  .gradio-container-4-41-0 .prose :last-child {
66
- margin-top: 8px !important;
67
  }
68
  .gradio-container-4-41-0 .prose :last-child {
69
- margin-bottom: -7px !important;
70
  }
71
  .dark {
72
  --button-primary-background-fill: #09e60d70;
@@ -75,29 +75,29 @@ css = """
75
  --background-fill-secondary: #000;
76
  }
77
  .hide-container {
78
- margin-top;-2px;
79
  }
80
  #app-container3 {
81
- background-color: rgba(255, 255, 255, 0.001); /* Corrected to make semi-transparent */
82
  max-width: 300px;
83
  margin-left: auto;
84
  margin-right: auto;
85
  margin-bottom: 10px;
86
  border-radius: 125px;
87
- box-shadow: 0 0 10px rgba(0,0,0,0.1); /* Adjusted shadow opacity */
88
  }
89
  #app-container {
90
- background-color: rgba(255, 255, 255, 0.001); /* Semi-transparent background */
91
  max-width: 300px;
92
- margin: 0 auto; /* Center horizontally */
93
  padding-bottom: 10px;
94
  border-radius: 25px;
95
- box-shadow: 0 0 10px rgba(0, 0, 0, 0.1); /* Adjusted shadow opacity */
96
  }
97
  #title-container {
98
  display: flex;
99
- align-items: center
100
- margin-bottom:10px;
101
  justify-content: center;
102
  }
103
  #title-icon {
@@ -112,7 +112,6 @@ css = """
112
  }
113
  """
114
 
115
-
116
  # Create Gradio interface
117
  def create_ui():
118
  with gr.Blocks(theme='Nymbo/Nymbo_Theme', css=css) as ui:
 
4
  from io import BytesIO
5
  import os
6
  from huggingface_hub import InferenceClient
7
+
8
  API_TOKEN = os.getenv("HF_API_TOKEN") # Ensure you've set this environment variable
9
  API_URL = "https://api-inference.huggingface.co/models/enhanceaiteam/Flux-uncensored"
10
 
11
+ def enhance_prompt(prompt, system_prompt='You are a prompt enhancer', model="meta-llama/Llama-3.2-1B-Instruct", max_tokens=512, stream=False):
12
+ enhancer = InferenceClient(api_key=API_TOKEN)
13
+ response = ""
14
+
15
  for message in enhancer.chat_completion(
16
+ model=model,
17
+ messages=[
18
  {"role": "system", "content": system_prompt},
19
  {"role": "user", "content": prompt}
20
  ],
21
+ max_tokens=max_tokens,
22
+ stream=stream,
23
  ):
24
+ response += message.choices[0].delta.content
25
+
26
+ return response.strip() # Ensure trailing whitespace is removed
 
27
 
 
28
  def generate_image(prompt, enhance=False):
29
  if enhance:
30
+ prompt = enhance_prompt(prompt)
31
+
32
  headers = {"Authorization": f"Bearer {API_TOKEN}"}
33
  data = {"inputs": prompt}
34
 
 
40
  return image
41
  else:
42
  return f"Error: {response.status_code}, {response.text}"
43
+
44
+ title_html = """
45
  <center>
46
  <div id="title-container">
47
  <h1 id="title-text">FLUX Capacitor</h1>
 
56
  background-repeat: no-repeat;
57
  background-position: center;
58
  background-attachment: fixed;
59
+ color: #000;
60
  }
61
+ .dark\\:bg-gray-950:is(.dark *) {
62
+ --tw-bg-opacity: 1;
63
+ background-color: rgb(157, 17, 142);
64
  }
65
  .gradio-container-4-41-0 .prose :last-child {
66
+ margin-top: 8px !important;
67
  }
68
  .gradio-container-4-41-0 .prose :last-child {
69
+ margin-bottom: -7px !important;
70
  }
71
  .dark {
72
  --button-primary-background-fill: #09e60d70;
 
75
  --background-fill-secondary: #000;
76
  }
77
  .hide-container {
78
+ margin-top: -2px;
79
  }
80
  #app-container3 {
81
+ background-color: rgba(255, 255, 255, 0.001);
82
  max-width: 300px;
83
  margin-left: auto;
84
  margin-right: auto;
85
  margin-bottom: 10px;
86
  border-radius: 125px;
87
+ box-shadow: 0 0 10px rgba(0,0,0,0.1);
88
  }
89
  #app-container {
90
+ background-color: rgba(255, 255, 255, 0.001);
91
  max-width: 300px;
92
+ margin: 0 auto;
93
  padding-bottom: 10px;
94
  border-radius: 25px;
95
+ box-shadow: 0 0 10px rgba(0, 0, 0, 0.1);
96
  }
97
  #title-container {
98
  display: flex;
99
+ align-items: center; /* Added semicolon */
100
+ margin-bottom: 10px;
101
  justify-content: center;
102
  }
103
  #title-icon {
 
112
  }
113
  """
114
 
 
115
  # Create Gradio interface
116
  def create_ui():
117
  with gr.Blocks(theme='Nymbo/Nymbo_Theme', css=css) as ui: