xMcLovinx commited on
Commit
df9f949
·
verified ·
1 Parent(s): 10703e4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -3,15 +3,16 @@ from transformers import MBartForConditionalGeneration, MBart50TokenizerFast
3
  model = MBartForConditionalGeneration.from_pretrained("SnypzZz/Llama2-13b-Language-translate")
4
  tokenizer = MBart50TokenizerFast.from_pretrained("SnypzZz/Llama2-13b-Language-translate", src_lang="en_XX")
5
 
 
 
6
  def d(input):
7
  model_inputs = tokenizer(input, return_tensors="pt")
8
  generated_tokens = model.generate(
9
  **model_inputs,
10
- forced_bos_token_id=tokenizer.lang_code_to_id["de_DE"]
11
  )
12
  output_DE = tokenizer.batch_decode(generated_tokens, skip_special_tokens=True)[0]
13
  output_DE = output_DE.strip("[]' ")
14
  return output_DE
15
 
16
- iface = gr.Interface(fn=d, inputs="textbox", outputs="textbox")
17
  iface.launch()
 
3
  model = MBartForConditionalGeneration.from_pretrained("SnypzZz/Llama2-13b-Language-translate")
4
  tokenizer = MBart50TokenizerFast.from_pretrained("SnypzZz/Llama2-13b-Language-translate", src_lang="en_XX")
5
 
6
+ iface = gr.Interface(fn=d, inputs=["textbox", gr.Dropdown(["de_DE", "es_XX"], label="Choose Output Language")], outputs="textbox")
7
+
8
  def d(input):
9
  model_inputs = tokenizer(input, return_tensors="pt")
10
  generated_tokens = model.generate(
11
  **model_inputs,
12
+ forced_bos_token_id=tokenizer.lang_code_to_id[gr.Dropdown.value]
13
  )
14
  output_DE = tokenizer.batch_decode(generated_tokens, skip_special_tokens=True)[0]
15
  output_DE = output_DE.strip("[]' ")
16
  return output_DE
17
 
 
18
  iface.launch()