Dddixyy commited on
Commit
b2a650f
·
verified ·
1 Parent(s): e7a45f0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -10
app.py CHANGED
@@ -1,22 +1,16 @@
1
  import gradio as gr
2
- import torch
3
  from transformers import MarianMTModel, MarianTokenizer
4
 
5
- # Load the MarianMT model and tokenizer
6
  model_name = "Dddixyy/latin-italian-translator"
7
  tokenizer = MarianTokenizer.from_pretrained(model_name)
8
  model = MarianMTModel.from_pretrained(model_name)
9
 
10
  # Translation function
11
  def translate_latin_to_italian(latin_text):
12
- # Truncate input to a maximum length of 512 tokens to avoid overload
13
- inputs = tokenizer(latin_text, return_tensors="pt", padding=True, truncation=True, max_length=512)
14
-
15
- # Use torch.no_grad() to speed up inference by not calculating gradients
16
  with torch.no_grad():
17
  generated_ids = model.generate(inputs["input_ids"])
18
-
19
- # Decode the generated ids into a readable translation
20
  translation = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)
21
  return translation[0]
22
 
@@ -27,9 +21,14 @@ interface = gr.Interface(
27
  outputs="text",
28
  title="Latin to Italian Translator",
29
  description="Translate Latin sentences to Italian using a fine-tuned MarianMT model.",
30
- examples=[["Amor vincit omnia."], ["Veni, vidi, vici."], ["Carpe diem."], ["Alea iacta est."]]
 
 
 
 
 
31
  )
32
 
33
  # Launch the app
34
  if __name__ == "__main__":
35
- interface.launch(server_name="0.0.0.0", server_port=7860)
 
1
  import gradio as gr
 
2
  from transformers import MarianMTModel, MarianTokenizer
3
 
4
+ # Load the model and tokenizer from the Hub
5
  model_name = "Dddixyy/latin-italian-translator"
6
  tokenizer = MarianTokenizer.from_pretrained(model_name)
7
  model = MarianMTModel.from_pretrained(model_name)
8
 
9
  # Translation function
10
  def translate_latin_to_italian(latin_text):
11
+ inputs = tokenizer(latin_text, return_tensors="pt", padding=True, truncation=True)
 
 
 
12
  with torch.no_grad():
13
  generated_ids = model.generate(inputs["input_ids"])
 
 
14
  translation = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)
15
  return translation[0]
16
 
 
21
  outputs="text",
22
  title="Latin to Italian Translator",
23
  description="Translate Latin sentences to Italian using a fine-tuned MarianMT model.",
24
+ examples=[
25
+ ["Amor vincit omnia."],
26
+ ["Veni, vidi, vici."],
27
+ ["Carpe diem."],
28
+ ["Alea iacta est."]
29
+ ]
30
  )
31
 
32
  # Launch the app
33
  if __name__ == "__main__":
34
+ interface.launch()