Spaces:
Running
Running
removing tqdm logging
Browse files- perplexity.py +1 -2
perplexity.py
CHANGED
|
@@ -20,7 +20,6 @@ from torch.nn import CrossEntropyLoss
|
|
| 20 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 21 |
|
| 22 |
import evaluate
|
| 23 |
-
from evaluate import logging
|
| 24 |
|
| 25 |
|
| 26 |
_CITATION = """\
|
|
@@ -161,7 +160,7 @@ class Perplexity(evaluate.Metric):
|
|
| 161 |
ppls = []
|
| 162 |
loss_fct = CrossEntropyLoss(reduction="none")
|
| 163 |
|
| 164 |
-
for start_index in
|
| 165 |
end_index = min(start_index + batch_size, len(encoded_texts))
|
| 166 |
encoded_batch = encoded_texts[start_index:end_index]
|
| 167 |
attn_mask = attn_masks[start_index:end_index]
|
|
|
|
| 20 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 21 |
|
| 22 |
import evaluate
|
|
|
|
| 23 |
|
| 24 |
|
| 25 |
_CITATION = """\
|
|
|
|
| 160 |
ppls = []
|
| 161 |
loss_fct = CrossEntropyLoss(reduction="none")
|
| 162 |
|
| 163 |
+
for start_index in range(0, len(encoded_texts), batch_size):
|
| 164 |
end_index = min(start_index + batch_size, len(encoded_texts))
|
| 165 |
encoded_batch = encoded_texts[start_index:end_index]
|
| 166 |
attn_mask = attn_masks[start_index:end_index]
|