import gradio as gr import torch from torchtext.data.utils import get_tokenizer from torchtext.vocab import build_vocab_from_iterator from torchtext.datasets import Multi30k from torch import Tensor from typing import Iterable, List from germanToEnglish import Seq2SeqTransformer, translate device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') model = model.load_state_dict(torch.load('./transformer_model.pth', map_location=device)) model.eval() if __name__ == "__main__": # Create the Gradio interface iface = gr.Interface( fn=translate, # Specify the translation function as the main function inputs=[ gr.inputs.Textbox(label="Text") ], outputs=["text"], # Define the output type as text cache_examples=False, # Disable caching of examples title="germanToenglish", # Set the title of the interface ) # Launch the interface iface.launch(share=True)