Spaces:
Sleeping
Sleeping
import gradio as gr | |
import torch | |
from transformers import AutoModelForSequenceClassification, AutoTokenizer | |
# Load pre-trained model & tokenizer (Example: XLM-R for multilingual text classification) | |
model_name = "xlm-roberta-base" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForSequenceClassification.from_pretrained(model_name, num_labels=2) | |
# Define prediction function | |
def classify_text(text): | |
inputs = tokenizer(text, return_tensors="pt") | |
with torch.no_grad(): | |
output = model(**inputs) | |
label = torch.argmax(output.logits, dim=1).item() | |
return "Correct" if label == 1 else "Incorrect" | |
# Gradio UI | |
gradio_app = gr.Interface( | |
fn=classify_text, | |
inputs=gr.Textbox(label="Enter Text"), | |
outputs="text", | |
title="Multi-Language RL Model" | |
) | |
gradio_app.launch() | |