Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -22,7 +22,8 @@ from torch import Tensor, nn
|
|
22 |
from transformers import CLIPTextModel, CLIPTokenizer
|
23 |
from transformers import T5EncoderModel, T5Tokenizer
|
24 |
# from optimum.quanto import freeze, qfloat8, quantize
|
25 |
-
|
|
|
26 |
|
27 |
class HFEmbedder(nn.Module):
|
28 |
def __init__(self, version: str, max_length: int, **hf_kwargs):
|
@@ -779,31 +780,36 @@ TRANSLATORS = {
|
|
779 |
"Austroasiatic": "Helsinki-NLP/opus-mt-aav-en"
|
780 |
}
|
781 |
|
782 |
-
# 번역기 캐시 딕셔너리
|
783 |
translators_cache = {}
|
784 |
|
785 |
-
def
|
786 |
-
|
787 |
-
|
788 |
-
|
789 |
-
|
|
|
790 |
try:
|
|
|
791 |
translator = pipeline(
|
792 |
-
|
793 |
model=model_name,
|
794 |
-
device="cpu"
|
|
|
795 |
)
|
796 |
translators_cache[lang] = translator
|
797 |
print(f"Successfully loaded translator for {lang}")
|
798 |
except Exception as e:
|
799 |
print(f"Error loading translator for {lang}: {e}")
|
800 |
translators_cache[lang] = None
|
|
|
|
|
801 |
|
802 |
def translate_prompt(prompt, source_lang):
|
|
|
803 |
if source_lang == "English":
|
804 |
return prompt
|
805 |
-
|
806 |
-
translator =
|
807 |
if translator is None:
|
808 |
print(f"No translator available for {source_lang}, using original prompt")
|
809 |
return prompt
|
@@ -817,6 +823,8 @@ def translate_prompt(prompt, source_lang):
|
|
817 |
except Exception as e:
|
818 |
print(f"Translation error for {source_lang}: {e}")
|
819 |
return prompt
|
|
|
|
|
820 |
|
821 |
def get_translator(lang):
|
822 |
if lang == "English":
|
@@ -869,12 +877,16 @@ def generate_image(
|
|
869 |
):
|
870 |
# 번역 처리
|
871 |
try:
|
872 |
-
|
873 |
-
|
|
|
|
|
|
|
874 |
except Exception as e:
|
875 |
print(f"Translation failed: {e}")
|
876 |
translated_prompt = prompt
|
877 |
-
|
|
|
878 |
|
879 |
if seed == 0:
|
880 |
seed = int(random.random() * 1000000)
|
@@ -931,10 +943,11 @@ footer {
|
|
931 |
visibility: hidden;
|
932 |
}
|
933 |
"""
|
|
|
934 |
def create_demo():
|
935 |
with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css) as demo:
|
936 |
-
gr.Markdown("# Multilingual
|
937 |
-
gr.Markdown("Supported languages: " + ", ".join(["English"] + sorted(list(TRANSLATORS.keys()))))
|
938 |
|
939 |
with gr.Row():
|
940 |
with gr.Column():
|
@@ -948,6 +961,7 @@ def create_demo():
|
|
948 |
label="Prompt",
|
949 |
value="A beautiful landscape"
|
950 |
)
|
|
|
951 |
|
952 |
|
953 |
width = gr.Slider(minimum=128, maximum=2048, step=64, label="Width", value=768)
|
@@ -975,6 +989,7 @@ def create_demo():
|
|
975 |
output_seed = gr.Text(label="Used Seed")
|
976 |
translated_prompt = gr.Text(label="Translated Prompt")
|
977 |
|
|
|
978 |
examples = [
|
979 |
# English
|
980 |
["A beautiful sunset over mountains", "English", 768, 768, 3.5, 30, 0, False, None, 0.8, True],
|
@@ -988,8 +1003,6 @@ def create_demo():
|
|
988 |
["Un hermoso atardecer en la playa", "Spanish", 768, 768, 3.5, 30, 0, False, None, 0.8, True]
|
989 |
]
|
990 |
|
991 |
-
|
992 |
-
|
993 |
gr.Examples(
|
994 |
examples=examples,
|
995 |
inputs=[
|
@@ -998,7 +1011,7 @@ def create_demo():
|
|
998 |
],
|
999 |
outputs=[output_image, output_seed, translated_prompt],
|
1000 |
fn=generate_image,
|
1001 |
-
cache_examples=
|
1002 |
)
|
1003 |
|
1004 |
|
@@ -1021,8 +1034,6 @@ def create_demo():
|
|
1021 |
return demo
|
1022 |
|
1023 |
if __name__ == "__main__":
|
1024 |
-
print("Initializing translators...")
|
1025 |
-
initialize_translators() # 번역기 초기화
|
1026 |
print("Starting demo...")
|
1027 |
demo = create_demo()
|
1028 |
demo.launch(share=True)
|
|
|
22 |
from transformers import CLIPTextModel, CLIPTokenizer
|
23 |
from transformers import T5EncoderModel, T5Tokenizer
|
24 |
# from optimum.quanto import freeze, qfloat8, quantize
|
25 |
+
|
26 |
+
from transformers import AutoModelForSeq2SeqGeneration, AutoTokenizer, pipeline
|
27 |
|
28 |
class HFEmbedder(nn.Module):
|
29 |
def __init__(self, version: str, max_length: int, **hf_kwargs):
|
|
|
780 |
"Austroasiatic": "Helsinki-NLP/opus-mt-aav-en"
|
781 |
}
|
782 |
|
|
|
783 |
translators_cache = {}
|
784 |
|
785 |
+
def get_translator(lang):
|
786 |
+
"""단일 번역기를 초기화하고 반환하는 함수"""
|
787 |
+
if lang == "English":
|
788 |
+
return None
|
789 |
+
|
790 |
+
if lang not in translators_cache:
|
791 |
try:
|
792 |
+
model_name = TRANSLATORS[lang]
|
793 |
translator = pipeline(
|
794 |
+
"translation",
|
795 |
model=model_name,
|
796 |
+
device="cpu",
|
797 |
+
framework="pt"
|
798 |
)
|
799 |
translators_cache[lang] = translator
|
800 |
print(f"Successfully loaded translator for {lang}")
|
801 |
except Exception as e:
|
802 |
print(f"Error loading translator for {lang}: {e}")
|
803 |
translators_cache[lang] = None
|
804 |
+
|
805 |
+
return translators_cache[lang]
|
806 |
|
807 |
def translate_prompt(prompt, source_lang):
|
808 |
+
"""프롬프트를 번역하는 함수"""
|
809 |
if source_lang == "English":
|
810 |
return prompt
|
811 |
+
|
812 |
+
translator = get_translator(source_lang)
|
813 |
if translator is None:
|
814 |
print(f"No translator available for {source_lang}, using original prompt")
|
815 |
return prompt
|
|
|
823 |
except Exception as e:
|
824 |
print(f"Translation error for {source_lang}: {e}")
|
825 |
return prompt
|
826 |
+
|
827 |
+
|
828 |
|
829 |
def get_translator(lang):
|
830 |
if lang == "English":
|
|
|
877 |
):
|
878 |
# 번역 처리
|
879 |
try:
|
880 |
+
if source_lang != "English":
|
881 |
+
translated_prompt = translate_prompt(prompt, source_lang)
|
882 |
+
print(f"Using translated prompt: {translated_prompt}")
|
883 |
+
else:
|
884 |
+
translated_prompt = prompt
|
885 |
except Exception as e:
|
886 |
print(f"Translation failed: {e}")
|
887 |
translated_prompt = prompt
|
888 |
+
|
889 |
+
|
890 |
|
891 |
if seed == 0:
|
892 |
seed = int(random.random() * 1000000)
|
|
|
943 |
visibility: hidden;
|
944 |
}
|
945 |
"""
|
946 |
+
|
947 |
def create_demo():
|
948 |
with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css) as demo:
|
949 |
+
gr.Markdown("# Multilingual FLUX")
|
950 |
+
gr.Markdown("### Supported languages: " + ", ".join(["English"] + sorted(list(TRANSLATORS.keys()))))
|
951 |
|
952 |
with gr.Row():
|
953 |
with gr.Column():
|
|
|
961 |
label="Prompt",
|
962 |
value="A beautiful landscape"
|
963 |
)
|
964 |
+
|
965 |
|
966 |
|
967 |
width = gr.Slider(minimum=128, maximum=2048, step=64, label="Width", value=768)
|
|
|
989 |
output_seed = gr.Text(label="Used Seed")
|
990 |
translated_prompt = gr.Text(label="Translated Prompt")
|
991 |
|
992 |
+
# 다국어 예제
|
993 |
examples = [
|
994 |
# English
|
995 |
["A beautiful sunset over mountains", "English", 768, 768, 3.5, 30, 0, False, None, 0.8, True],
|
|
|
1003 |
["Un hermoso atardecer en la playa", "Spanish", 768, 768, 3.5, 30, 0, False, None, 0.8, True]
|
1004 |
]
|
1005 |
|
|
|
|
|
1006 |
gr.Examples(
|
1007 |
examples=examples,
|
1008 |
inputs=[
|
|
|
1011 |
],
|
1012 |
outputs=[output_image, output_seed, translated_prompt],
|
1013 |
fn=generate_image,
|
1014 |
+
cache_examples=True
|
1015 |
)
|
1016 |
|
1017 |
|
|
|
1034 |
return demo
|
1035 |
|
1036 |
if __name__ == "__main__":
|
|
|
|
|
1037 |
print("Starting demo...")
|
1038 |
demo = create_demo()
|
1039 |
demo.launch(share=True)
|