durrani commited on
Commit
ab6decf
·
1 Parent(s): 6efbfeb
Files changed (1) hide show
  1. app.py +9 -14
app.py CHANGED
@@ -2,21 +2,16 @@ import torch
2
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
3
 
4
  # Load the model and tokenizer
5
- model_name = "your_model_name" # Replace with the name of the model you want to use
6
- tokenizer = AutoTokenizer.from_pretrained(model_name)
7
- model = AutoModelForSequenceClassification.from_pretrained(model_name)
8
-
9
  # Values for the new scenario
10
- new_students = int(input("Enter new number of students: ")) # Number of students in the new scenario
11
- new_temperature = int(input("Enter new temperature: ")) # Temperature in the new scenario
12
-
13
- # Create the feature array for the new scenario
14
- new_scenario = [1, new_students, new_temperature]
15
 
16
  # Convert the input to tokens
17
- inputs = tokenizer.encode_plus(
18
  "Number of students: {}, Temperature: {}".format(new_students, new_temperature),
19
- add_special_tokens=True,
20
  padding="max_length",
21
  truncation=True,
22
  max_length=64,
@@ -25,11 +20,11 @@ inputs = tokenizer.encode_plus(
25
 
26
  # Make the prediction
27
  with torch.no_grad():
28
- outputs = model(**inputs)
29
  logits = outputs.logits
30
- predicted_rooms = torch.argmax(logits).item()
31
 
32
  # Print the results
33
  print("Number of students:", new_students)
34
  print("Temperature:", new_temperature)
35
- print("Predicted number of rooms:", predicted_rooms)
 
2
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
3
 
4
  # Load the model and tokenizer
5
+ model_name = "AI" # Replace with the name or path of the model you want to use
6
+ tokenizer = AutoTokenizer.from_pretrained("AI")
7
+ model = AutoModelForSequenceClassification.from_pretrained("AI")
 
8
  # Values for the new scenario
9
+ new_students = int(input("Enter the number of students in the new scenario: "))
10
+ new_temperature = int(input("Enter the temperature in the new scenario: "))
 
 
 
11
 
12
  # Convert the input to tokens
13
+ inputs = tokenizer.encode(
14
  "Number of students: {}, Temperature: {}".format(new_students, new_temperature),
 
15
  padding="max_length",
16
  truncation=True,
17
  max_length=64,
 
20
 
21
  # Make the prediction
22
  with torch.no_grad():
23
+ outputs = model(inputs)
24
  logits = outputs.logits
25
+ predicted_rooms = torch.argmax(logits, dim=1).item()
26
 
27
  # Print the results
28
  print("Number of students:", new_students)
29
  print("Temperature:", new_temperature)
30
+ print("Predicted label for number of rooms:", predicted_rooms)