import gradio as gr from transformers import AutoModelForSequenceClassification, AutoTokenizer import torch from transformers import AutoModelForSequenceClassification import os from huggingface_hub import login token = os.getenv("HF_TOKEN") # HF_TOKEN should be set in your environment login(token=token) model_path = "SivaMallikarjun/multi-lang-rl-model" model = AutoModelForSequenceClassification.from_pretrained(model_path) model_path = "SivaMallikarjun/multi-lang-rl-model" model = AutoModelForSequenceClassification.from_pretrained(model_path) tokenizer = AutoTokenizer.from_pretrained(model_path) def classify_text(text): inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True) outputs = model(**inputs) prediction = torch.argmax(outputs.logits, dim=1).item() label = "Correct" if prediction == 1 else "Incorrect" return label iface = gr.Interface(fn=classify_text, inputs="text", outputs="text", title="Multi-Language RL Text Classifier") if __name__ == "__main__": iface.launch()