maulanayyy commited on
Commit
1720554
·
verified ·
1 Parent(s): da289a3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -1,11 +1,11 @@
1
  import gradio as gr
2
  import torch
3
- from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
4
 
5
  # Load the model and tokenizer
6
- model_name = "maulanayyy/code_translation_codet5"
7
- tokenizer = AutoTokenizer.from_pretrained(model_name)
8
- model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
9
 
10
  # Function to perform inference
11
  def translate_code(input_code):
@@ -13,7 +13,7 @@ def translate_code(input_code):
13
  input_text = f"translate Java to C#: {input_code}"
14
 
15
  # Tokenize the input
16
- input_ids = tokenizer(input_text, return_tensors="pt").input_ids
17
 
18
  # Generate the output
19
  with torch.no_grad():
 
1
  import gradio as gr
2
  import torch
3
+ from transformers import T5Tokenizer, T5ForConditionalGeneration
4
 
5
  # Load the model and tokenizer
6
+ model_name = "maulanayyy/code_translation_codet5" # Ganti dengan nama model yang benar
7
+ tokenizer = T5Tokenizer.from_pretrained(model_name)
8
+ model = T5ForConditionalGeneration.from_pretrained(model_name).to("cuda")
9
 
10
  # Function to perform inference
11
  def translate_code(input_code):
 
13
  input_text = f"translate Java to C#: {input_code}"
14
 
15
  # Tokenize the input
16
+ input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to("cuda") # Pastikan input_ids ada di GPU
17
 
18
  # Generate the output
19
  with torch.no_grad():