Spaces:
Sleeping
Sleeping
Update mmlu_pro_eval_adapted.py
Browse files- mmlu_pro_eval_adapted.py +2 -2
mmlu_pro_eval_adapted.py
CHANGED
|
@@ -146,7 +146,7 @@ def extract_final(text):
|
|
| 146 |
return None
|
| 147 |
|
| 148 |
|
| 149 |
-
def batch_inference(llm, sampling_params, inference_batch):
|
| 150 |
start = time.time()
|
| 151 |
outputs = llm.generate(inference_batch, sampling_params)
|
| 152 |
logging.info("Batch of size: ", str(len(inference_batch)) + ". Time taken: " + str(time.time() - start))
|
|
@@ -296,7 +296,7 @@ def eval_cot(subject, model, tokenizer, val_df, test_df, num_shots=5, debug_mode
|
|
| 296 |
|
| 297 |
|
| 298 |
batch_fn = batch_inference_debug_mode if debug_mode else batch_inference
|
| 299 |
-
pred_batch, response_batch = batch_fn(llm, sampling_params, inference_batches)
|
| 300 |
|
| 301 |
# Add predictions to test DataFrame
|
| 302 |
results_df = test_df.copy()
|
|
|
|
| 146 |
return None
|
| 147 |
|
| 148 |
|
| 149 |
+
def batch_inference(llm, sampling_params, inference_batch, tokenizer):
|
| 150 |
start = time.time()
|
| 151 |
outputs = llm.generate(inference_batch, sampling_params)
|
| 152 |
logging.info("Batch of size: ", str(len(inference_batch)) + ". Time taken: " + str(time.time() - start))
|
|
|
|
| 296 |
|
| 297 |
|
| 298 |
batch_fn = batch_inference_debug_mode if debug_mode else batch_inference
|
| 299 |
+
pred_batch, response_batch = batch_fn(llm, sampling_params, inference_batches, tokenizer)
|
| 300 |
|
| 301 |
# Add predictions to test DataFrame
|
| 302 |
results_df = test_df.copy()
|