jennasparks commited on
Commit
f663404
·
verified ·
1 Parent(s): d1557bb

input Jacqueline's code

Browse files
Files changed (1) hide show
  1. tasks/text.py +9 -12
tasks/text.py CHANGED
@@ -70,19 +70,16 @@ async def evaluate_text(request: TextEvaluationRequest):
70
  # Update the code below to replace the random baseline by your model inference within the inference pass where the energy consumption and emissions are tracked.
71
  #--------------------------------------------------------------------------------------------
72
 
73
- try:
74
- # Tokenize the input texts
75
- encoded_input = tokenizer(test_dataset["quote"], truncation=True, padding=True, return_tensors="tf")
76
-
77
- # Make predictions
78
- outputs = model(encoded_input["input_ids"], attention_mask=encoded_input["attention_mask"], training=False)
79
- predictions = tf.argmax(outputs.logits, axis=1).numpy()
80
 
81
- # Get true labels
82
- true_labels = test_dataset["label"]
83
- except Exception as e:
84
- print(f"An error occurred during prediction: {str(e)}")
85
- raise
86
 
87
  #--------------------------------------------------------------------------------------------
88
  # YOUR MODEL INFERENCE STOPS HERE
 
70
  # Update the code below to replace the random baseline by your model inference within the inference pass where the energy consumption and emissions are tracked.
71
  #--------------------------------------------------------------------------------------------
72
 
73
+ #make predictions
74
+ predictions = []
75
+
76
+ for i in range(len(test_dataset["quote"])):
77
+ encoded_input = tokenizer(test_dataset["quote"][i], truncation=True, padding=True, return_tensors="tf")
78
+ outputs = model(encoded_input["input_ids"], attention_mask=encoded_input["attention_mask"], training=False)
79
+ predictions.append(tf.argmax(outputs.logits, axis=1))
80
 
81
+ # Get true labels
82
+ true_labels = test_dataset["label"]
 
 
 
83
 
84
  #--------------------------------------------------------------------------------------------
85
  # YOUR MODEL INFERENCE STOPS HERE