Dmytro Vodianytskyi commited on
Commit
0c69b28
·
1 Parent(s): 5ab728f

space updated

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -7,7 +7,7 @@ TOKENIZER = T5Tokenizer.from_pretrained('werent4/mt5TranslatorLT')
7
  MODEL = MT5ForConditionalGeneration.from_pretrained("werent4/mt5TranslatorLT")
8
  MODEL.to(DEVICE)
9
 
10
- def translate(text, model,device, translation_way = "en-lt"):
11
  translations_ways = {
12
  "en-lt": "<EN2LT>",
13
  "lt-en": "<LT2EN>"
@@ -17,7 +17,7 @@ def translate(text, model,device, translation_way = "en-lt"):
17
  text = f"{translations_ways[translation_way]} {text}"
18
  encoded_input = TOKENIZER(input_text, return_tensors="pt", padding=True, truncation=True, max_length=128).to(device)
19
  with torch.no_grad():
20
- output_tokens = model.generate(
21
  **encoded_input,
22
  max_length=128,
23
  num_beams=5,
 
7
  MODEL = MT5ForConditionalGeneration.from_pretrained("werent4/mt5TranslatorLT")
8
  MODEL.to(DEVICE)
9
 
10
+ def translate(text, device,translation_way = "en-lt"):
11
  translations_ways = {
12
  "en-lt": "<EN2LT>",
13
  "lt-en": "<LT2EN>"
 
17
  text = f"{translations_ways[translation_way]} {text}"
18
  encoded_input = TOKENIZER(input_text, return_tensors="pt", padding=True, truncation=True, max_length=128).to(device)
19
  with torch.no_grad():
20
+ output_tokens = MODEL.generate(
21
  **encoded_input,
22
  max_length=128,
23
  num_beams=5,