File size: 1,058 Bytes
6efbfeb
 
2d62ac6
6efbfeb
ab6decf
 
 
5e9c0ab
ab6decf
 
2d62ac6
6efbfeb
ab6decf
6efbfeb
 
 
 
 
 
 
2d62ac6
6efbfeb
ab6decf
6efbfeb
ab6decf
2d62ac6
6efbfeb
2d62ac6
 
ab6decf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
import torch
from transformers import AutoTokenizer, AutoModelForSequenceClassification

# Load the model and tokenizer
model_name = "AI"  # Replace with the name or path of the model you want to use
tokenizer = AutoTokenizer.from_pretrained("AI")
model = AutoModelForSequenceClassification.from_pretrained("AI")
# Values for the new scenario
new_students = int(input("Enter the number of students in the new scenario: "))
new_temperature = int(input("Enter the temperature in the new scenario: "))

# Convert the input to tokens
inputs = tokenizer.encode(
    "Number of students: {}, Temperature: {}".format(new_students, new_temperature),
    padding="max_length",
    truncation=True,
    max_length=64,
    return_tensors="pt"
)

# Make the prediction
with torch.no_grad():
    outputs = model(inputs)
    logits = outputs.logits
    predicted_rooms = torch.argmax(logits, dim=1).item()

# Print the results
print("Number of students:", new_students)
print("Temperature:", new_temperature)
print("Predicted label for number of rooms:", predicted_rooms)