|
import torch |
|
from transformers import AutoTokenizer, AutoModelForSequenceClassification |
|
|
|
|
|
model_name = "AI" |
|
tokenizer = AutoTokenizer.from_pretrained("AI") |
|
model = AutoModelForSequenceClassification.from_pretrained("AI") |
|
|
|
|
|
new_students = int(input("Enter the number of students in the new scenario: ")) |
|
new_temperature = int(input("Enter the temperature in the new scenario: ")) |
|
|
|
|
|
inputs = tokenizer.encode_plus( |
|
"Number of students: {}, Temperature: {}".format(new_students, new_temperature), |
|
padding="max_length", |
|
truncation=True, |
|
max_length=64, |
|
return_tensors="pt" |
|
) |
|
|
|
|
|
with torch.no_grad(): |
|
outputs = model(**inputs) |
|
logits = outputs.logits |
|
predicted_rooms = torch.argmax(logits, dim=1).item() |
|
|
|
|
|
print("Number of students:", new_students) |
|
print("Temperature:", new_temperature) |
|
print("Predicted label for number of rooms:", predicted_rooms) |
|
|