Spaces:
Sleeping
Sleeping
Andre
commited on
Commit
·
af7831d
1
Parent(s):
9e5d569
“Update”
Browse files- app.py +1 -0
- config/config.py +1 -11
- config/config_colab.py +1 -7
- config/models.py +5 -0
- {src → config}/prompts.py +0 -0
- src/gradio_interface.py +0 -9
- src/{img_gen_logic.py → img_gen.py} +9 -2
app.py
CHANGED
@@ -3,6 +3,7 @@ import sys
|
|
3 |
import os
|
4 |
|
5 |
# Add the src folder to the Python path
|
|
|
6 |
src_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "src"))
|
7 |
if src_path not in sys.path:
|
8 |
sys.path.append(src_path)
|
|
|
3 |
import os
|
4 |
|
5 |
# Add the src folder to the Python path
|
6 |
+
# Solves all problems w subfolders - option2
|
7 |
src_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "src"))
|
8 |
if src_path not in sys.path:
|
9 |
sys.path.append(src_path)
|
config/config.py
CHANGED
@@ -1,13 +1,9 @@
|
|
1 |
# config.py
|
2 |
import os
|
3 |
from src.prompts import prompts # Import prompts from prompts.py
|
|
|
4 |
|
5 |
# Retrieve the Hugging Face token
|
6 |
-
#try:
|
7 |
-
# Try to get the token from Colab secrets
|
8 |
-
# api_token = userdata.get("HF_CTB_TOKEN")
|
9 |
-
#except ImportError:
|
10 |
-
# Fall back to environment variable (for local execution)
|
11 |
api_token = os.getenv("HF_CTB_TOKEN")
|
12 |
|
13 |
# Debugging: Check if the Hugging Face token is available
|
@@ -17,12 +13,6 @@ else:
|
|
17 |
print("Hugging Face token loaded successfully.")
|
18 |
|
19 |
|
20 |
-
# List of models with aliases
|
21 |
-
models = [
|
22 |
-
{"alias": "FLUX.1-dev", "name": "black-forest-labs/FLUX.1-dev"},
|
23 |
-
{"alias": "Midjourney", "name": "strangerzonehf/Flux-Midjourney-Mix2-LoRA"}
|
24 |
-
]
|
25 |
-
|
26 |
# Debugging: Print prompt and model options
|
27 |
print("Prompt Options:", [p["alias"] for p in prompts])
|
28 |
print("Model Options:", [m["alias"] for m in models])
|
|
|
1 |
# config.py
|
2 |
import os
|
3 |
from src.prompts import prompts # Import prompts from prompts.py
|
4 |
+
from config.models import models
|
5 |
|
6 |
# Retrieve the Hugging Face token
|
|
|
|
|
|
|
|
|
|
|
7 |
api_token = os.getenv("HF_CTB_TOKEN")
|
8 |
|
9 |
# Debugging: Check if the Hugging Face token is available
|
|
|
13 |
print("Hugging Face token loaded successfully.")
|
14 |
|
15 |
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
# Debugging: Print prompt and model options
|
17 |
print("Prompt Options:", [p["alias"] for p in prompts])
|
18 |
print("Model Options:", [m["alias"] for m in models])
|
config/config_colab.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
# config_colab.py
|
2 |
from google.colab import userdata
|
3 |
-
from
|
4 |
|
5 |
# Retrieve the Hugging Face token from Colab secrets
|
6 |
api_token = userdata.get("HF_CTB_TOKEN")
|
@@ -13,12 +13,6 @@ else:
|
|
13 |
print("=== Debug: Success ===")
|
14 |
print("Hugging Face token loaded successfully.")
|
15 |
|
16 |
-
# List of models with aliases
|
17 |
-
models = [
|
18 |
-
{"alias": "FLUX.1-dev", "name": "black-forest-labs/FLUX.1-dev"},
|
19 |
-
{"alias": "Midjourney", "name": "strangerzonehf/Flux-Midjourney-Mix2-LoRA"}
|
20 |
-
]
|
21 |
-
|
22 |
# Debugging: Print prompt and model options
|
23 |
print("=== Debug: Available Options ===")
|
24 |
print("Prompt Options:", [p["alias"] for p in prompts])
|
|
|
1 |
# config_colab.py
|
2 |
from google.colab import userdata
|
3 |
+
from config.prompts import prompts # Import prompts from prompts.py
|
4 |
|
5 |
# Retrieve the Hugging Face token from Colab secrets
|
6 |
api_token = userdata.get("HF_CTB_TOKEN")
|
|
|
13 |
print("=== Debug: Success ===")
|
14 |
print("Hugging Face token loaded successfully.")
|
15 |
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
# Debugging: Print prompt and model options
|
17 |
print("=== Debug: Available Options ===")
|
18 |
print("Prompt Options:", [p["alias"] for p in prompts])
|
config/models.py
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# List of models with aliases
|
2 |
+
models = [
|
3 |
+
{"alias": "FLUX.1-dev", "name": "black-forest-labs/FLUX.1-dev"},
|
4 |
+
{"alias": "Midjourney", "name": "strangerzonehf/Flux-Midjourney-Mix2-LoRA"}
|
5 |
+
]
|
{src → config}/prompts.py
RENAMED
File without changes
|
src/gradio_interface.py
CHANGED
@@ -1,16 +1,7 @@
|
|
1 |
# gradio_interface.py (HuggingFace Spaces)
|
2 |
import gradio as gr
|
3 |
-
from src.img_gen_logic import generate_image # Direct import
|
4 |
from config.config import prompts, models # Direct import
|
5 |
|
6 |
-
def generate(prompt_alias, team, model_alias, custom_prompt, height=360, width=640, num_inference_steps=20, guidance_scale=2.0, seed=-1):
|
7 |
-
try:
|
8 |
-
# Generate the image
|
9 |
-
image_path, message = generate_image(prompt_alias, team, model_alias, custom_prompt, height, width, num_inference_steps, guidance_scale, seed)
|
10 |
-
return image_path, message
|
11 |
-
except Exception as e:
|
12 |
-
return None, f"An error occurred: {e}"
|
13 |
-
|
14 |
# Gradio Interface
|
15 |
with gr.Blocks() as demo:
|
16 |
gr.Markdown("# CtB AI Image Generator")
|
|
|
1 |
# gradio_interface.py (HuggingFace Spaces)
|
2 |
import gradio as gr
|
|
|
3 |
from config.config import prompts, models # Direct import
|
4 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
# Gradio Interface
|
6 |
with gr.Blocks() as demo:
|
7 |
gr.Markdown("# CtB AI Image Generator")
|
src/{img_gen_logic.py → img_gen.py}
RENAMED
@@ -1,10 +1,17 @@
|
|
1 |
-
#
|
2 |
import random
|
3 |
from huggingface_hub import InferenceClient
|
4 |
-
from PIL import Image
|
5 |
from datetime import datetime
|
6 |
from config.config import api_token, models, prompts # Direct import
|
7 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
|
9 |
def generate_image(prompt_alias, team, model_alias, custom_prompt, height=360, width=640, num_inference_steps=20, guidance_scale=2.0, seed=-1):
|
10 |
# Debugging: Check if the token is available
|
|
|
1 |
+
# img_gen.py
|
2 |
import random
|
3 |
from huggingface_hub import InferenceClient
|
|
|
4 |
from datetime import datetime
|
5 |
from config.config import api_token, models, prompts # Direct import
|
6 |
|
7 |
+
def generate(prompt_alias, team, model_alias, custom_prompt, height=360, width=640, num_inference_steps=20, guidance_scale=2.0, seed=-1):
|
8 |
+
try:
|
9 |
+
# Generate the image
|
10 |
+
image_path, message = generate_image(prompt_alias, team, model_alias, custom_prompt, height, width, num_inference_steps, guidance_scale, seed)
|
11 |
+
return image_path, message
|
12 |
+
except Exception as e:
|
13 |
+
return None, f"An error occurred: {e}"
|
14 |
+
|
15 |
|
16 |
def generate_image(prompt_alias, team, model_alias, custom_prompt, height=360, width=640, num_inference_steps=20, guidance_scale=2.0, seed=-1):
|
17 |
# Debugging: Check if the token is available
|