File size: 1,198 Bytes
869ca68
9b1bc86
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
869ca68
 
9b1bc86
869ca68
9b1bc86
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
import gradio as gr
from transformers import AutoTokenizer, AutoModelForSequenceClassification
import torch
import torch.nn.functional as F

# Load model and tokenizer
model_path = "mjpsm/excuses-classifier-model"
tokenizer = AutoTokenizer.from_pretrained(model_path)
model = AutoModelForSequenceClassification.from_pretrained(model_path)
id2label = model.config.id2label

# Prediction function
def classify_excuse(text):
    inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True)
    with torch.no_grad():
        outputs = model(**inputs)
    probs = F.softmax(outputs.logits, dim=1)
    pred_id = torch.argmax(probs, dim=1).item()
    confidence = probs[0][pred_id].item()
    label = id2label[pred_id]
    return f"Prediction: {label}", f"Confidence: {confidence:.4f}"

# Gradio Interface
interface = gr.Interface(
    fn=classify_excuse,
    inputs=gr.Textbox(lines=4, placeholder="Enter your Zoom excuse here..."),
    outputs=["text", "text"],
    title="🧠 Zoom Excuse Classifier",
    description="Classify a Zoom excuse as either 'reasonable' or 'unreasonable' using a fine-tuned AI model."
)

# Launch app
if __name__ == "__main__":
    interface.launch()