Bajiyo's picture
Update app.py
efee0cf verified
raw
history blame
970 Bytes
import gradio as gr
from huggingface_hub import from_pretrained_keras
from transformers import AutoTokenizer
# Load the model from Hugging Face
model = from_pretrained_keras("Bajiyo/Malayalam_transliteration")
# Load the tokenizer
tokenizer = AutoTokenizer.from_pretrained("Bajiyo/Malayalam_transliteration")
# Define a function to make predictions
def predict(text):
# Tokenize the input text
inputs = tokenizer(text, return_tensors="tf", padding=True, truncation=True)
# Make predictions using the model
outputs = model.predict(inputs)
# Decode the predicted tokens
predicted_text = tokenizer.decode(outputs.logits[0], skip_special_tokens=True)
return predicted_text
# Create a Gradio interface
inputs = gr.inputs.Textbox(label="Enter Malayalam Text")
outputs = gr.outputs.Textbox(label="Transliteration to English")
interface = gr.Interface(predict, inputs, outputs, title="Malayalam to English Transliteration")
interface.launch()