John6666 commited on
Commit
9bd99fd
·
verified ·
1 Parent(s): 58b212d

Upload 4 files

Browse files
Files changed (4) hide show
  1. app.py +7 -4
  2. modutils.py +11 -0
  3. requirements.txt +1 -3
  4. tagger.py +12 -21
app.py CHANGED
@@ -168,7 +168,7 @@ from modutils import (list_uniq, download_private_repo, get_model_id_list, get_t
168
  set_textual_inversion_prompt, get_model_pipeline, change_interface_mode, get_t2i_model_info,
169
  get_tupled_model_list, save_gallery_images, set_optimization, set_sampler_settings,
170
  set_quick_presets, process_style_prompt, optimization_list,
171
- preset_styles, preset_quality, preset_sampler_setting)
172
  from env import (hf_token, CIVITAI_API_KEY, HF_LORA_ESSENTIAL_PRIVATE_REPO, HF_VAE_PRIVATE_REPO,
173
  HF_SDXL_EMBEDS_NEGATIVE_PRIVATE_REPO, HF_SDXL_EMBEDS_POSITIVE_PRIVATE_REPO,
174
  directory_models, directory_loras, directory_vaes, directory_embeds, directory_embeds_sdxl,
@@ -822,9 +822,10 @@ with gr.Blocks(theme='NoCrypt/miku@>=1.2.2', elem_id="main", fill_width=True, cs
822
  rating_dbt = gr.Radio(label="Rating", choices=list(V2_RATING_OPTIONS), value="sfw")
823
  generate_db_random_button = gr.Button(value="EXTEND PROMPT 🎲")
824
  with gr.Row():
825
- set_random_seed = gr.Button(value="Seed 🎲", size="sm")
826
- set_params_gui = gr.Button(value="Params ↙️", size="sm")
827
- clear_prompt_gui = gr.Button(value="Clear 🗑️", size="sm")
 
828
 
829
  generate_button = gr.Button(value="GENERATE IMAGE", size="lg", variant="primary")
830
 
@@ -1344,6 +1345,8 @@ with gr.Blocks(theme='NoCrypt/miku@>=1.2.2', elem_id="main", fill_width=True, cs
1344
  queue=False,
1345
  )
1346
  model_name_gui.change(get_t2i_model_info, [model_name_gui], [model_info_gui], queue=False)
 
 
1347
 
1348
  gr.on(
1349
  triggers=[quick_model_type_gui.change, quick_genre_gui.change, quick_speed_gui.change, quick_aspect_gui.change],
 
168
  set_textual_inversion_prompt, get_model_pipeline, change_interface_mode, get_t2i_model_info,
169
  get_tupled_model_list, save_gallery_images, set_optimization, set_sampler_settings,
170
  set_quick_presets, process_style_prompt, optimization_list,
171
+ preset_styles, preset_quality, preset_sampler_setting, translate_to_en)
172
  from env import (hf_token, CIVITAI_API_KEY, HF_LORA_ESSENTIAL_PRIVATE_REPO, HF_VAE_PRIVATE_REPO,
173
  HF_SDXL_EMBEDS_NEGATIVE_PRIVATE_REPO, HF_SDXL_EMBEDS_POSITIVE_PRIVATE_REPO,
174
  directory_models, directory_loras, directory_vaes, directory_embeds, directory_embeds_sdxl,
 
822
  rating_dbt = gr.Radio(label="Rating", choices=list(V2_RATING_OPTIONS), value="sfw")
823
  generate_db_random_button = gr.Button(value="EXTEND PROMPT 🎲")
824
  with gr.Row():
825
+ translate_prompt_gui = gr.Button(value="Translate Prompt 📝", variant="secondary", size="sm")
826
+ set_random_seed = gr.Button(value="Seed 🎲", variant="secondary", size="sm")
827
+ set_params_gui = gr.Button(value="Params ↙️", variant="secondary", size="sm")
828
+ clear_prompt_gui = gr.Button(value="Clear 🗑️", variant="secondary", size="sm")
829
 
830
  generate_button = gr.Button(value="GENERATE IMAGE", size="lg", variant="primary")
831
 
 
1345
  queue=False,
1346
  )
1347
  model_name_gui.change(get_t2i_model_info, [model_name_gui], [model_info_gui], queue=False)
1348
+ translate_prompt_gui.click(translate_to_en, [prompt_gui], [prompt_gui], queue=False)\
1349
+ .then(translate_to_en, [neg_prompt_gui], [neg_prompt_gui], queue=False)
1350
 
1351
  gr.on(
1352
  triggers=[quick_model_type_gui.change, quick_genre_gui.change, quick_speed_gui.change, quick_aspect_gui.change],
modutils.py CHANGED
@@ -27,6 +27,17 @@ def list_sub(a, b):
27
  return [e for e in a if e not in b]
28
 
29
 
 
 
 
 
 
 
 
 
 
 
 
30
  def get_local_model_list(dir_path):
31
  model_list = []
32
  valid_extensions = ('.ckpt', '.pt', '.pth', '.safetensors', '.bin')
 
27
  return [e for e in a if e not in b]
28
 
29
 
30
+ from translatepy import Translator
31
+ translator = Translator()
32
+ def translate_to_en(input: str):
33
+ try:
34
+ output = str(translator.translate(input, 'English'))
35
+ except Exception as e:
36
+ output = input
37
+ print(e)
38
+ return output
39
+
40
+
41
  def get_local_model_list(dir_path):
42
  model_list = []
43
  valid_extensions = ('.ckpt', '.pt', '.pth', '.safetensors', '.bin')
requirements.txt CHANGED
@@ -8,9 +8,7 @@ transformers
8
  optimum[onnxruntime]
9
  dartrs
10
  huggingface_hub
11
- httpx==0.13.3
12
- httpcore
13
- googletrans==4.0.0rc1
14
  timm
15
  rapidfuzz
16
  sentencepiece
 
8
  optimum[onnxruntime]
9
  dartrs
10
  huggingface_hub
11
+ translatepy
 
 
12
  timm
13
  rapidfuzz
14
  sentencepiece
tagger.py CHANGED
@@ -1,11 +1,8 @@
 
1
  from PIL import Image
2
  import torch
3
  import gradio as gr
4
- import spaces
5
- from transformers import (
6
- AutoImageProcessor,
7
- AutoModelForImageClassification,
8
- )
9
  from pathlib import Path
10
 
11
 
@@ -190,18 +187,16 @@ def convert_danbooru_to_e621_prompt(input_prompt: str = "", prompt_type: str = "
190
  return output_prompt
191
 
192
 
 
 
193
  def translate_prompt(prompt: str = ""):
194
- def translate_to_english(prompt):
195
- import httpcore
196
- setattr(httpcore, 'SyncHTTPTransport', 'AsyncHTTPProxy')
197
- from googletrans import Translator
198
- translator = Translator()
199
  try:
200
- translated_prompt = translator.translate(prompt, src='auto', dest='en').text
201
- return translated_prompt
202
  except Exception as e:
 
203
  print(e)
204
- return prompt
205
 
206
  def is_japanese(s):
207
  import unicodedata
@@ -224,17 +219,13 @@ def translate_prompt(prompt: str = ""):
224
 
225
 
226
  def translate_prompt_to_ja(prompt: str = ""):
227
- def translate_to_japanese(prompt):
228
- import httpcore
229
- setattr(httpcore, 'SyncHTTPTransport', 'AsyncHTTPProxy')
230
- from googletrans import Translator
231
- translator = Translator()
232
  try:
233
- translated_prompt = translator.translate(prompt, src='en', dest='ja').text
234
- return translated_prompt
235
  except Exception as e:
 
236
  print(e)
237
- return prompt
238
 
239
  def is_japanese(s):
240
  import unicodedata
 
1
+ import spaces
2
  from PIL import Image
3
  import torch
4
  import gradio as gr
5
+ from transformers import AutoImageProcessor, AutoModelForImageClassification
 
 
 
 
6
  from pathlib import Path
7
 
8
 
 
187
  return output_prompt
188
 
189
 
190
+ from translatepy import Translator
191
+ translator = Translator()
192
  def translate_prompt(prompt: str = ""):
193
+ def translate_to_english(input: str):
 
 
 
 
194
  try:
195
+ output = str(translator.translate(input, 'English'))
 
196
  except Exception as e:
197
+ output = input
198
  print(e)
199
+ return output
200
 
201
  def is_japanese(s):
202
  import unicodedata
 
219
 
220
 
221
  def translate_prompt_to_ja(prompt: str = ""):
222
+ def translate_to_japanese(input: str):
 
 
 
 
223
  try:
224
+ output = str(translator.translate(input, 'Japanese'))
 
225
  except Exception as e:
226
+ output = input
227
  print(e)
228
+ return output
229
 
230
  def is_japanese(s):
231
  import unicodedata