Datasets:
OALL
/

Modalities:
Text
Formats:
json
Size:
< 1K
Libraries:
Datasets
Dask
amztheory commited on
Commit
75e9ea4
·
verified ·
1 Parent(s): 0d75201

Upload arcee-ai/Arcee-Spark/results_2025-01-19T14-44-12.613580.json with huggingface_hub

Browse files
arcee-ai/Arcee-Spark/results_2025-01-19T14-44-12.613580.json CHANGED
@@ -443,12 +443,12 @@
443
  "acc_norm_stderr": 0.038342347441649924
444
  },
445
  "community|madinah_qa:Arabic Language (General)|0": {
446
- "acc_norm": 0.3480392156862745,
447
- "acc_norm_stderr": 0.019270998708223974
448
  },
449
  "community|madinah_qa:Arabic Language (Grammar)|0": {
450
- "acc_norm": 0.36164383561643837,
451
- "acc_norm_stderr": 0.02518380627086876
452
  },
453
  "community|aratrust:Ethics|0": {
454
  "acc_norm": 0.6666666666666666,
@@ -495,16 +495,16 @@
495
  "acc_norm_stderr": 0.033175029527802366
496
  },
497
  "community|madinah_qa:_average|0": {
498
- "acc_norm": 0.3548415256513564,
499
- "acc_norm_stderr": 0.02222740248954637
500
  },
501
  "community|aratrust:_average|0": {
502
  "acc_norm": 0.8260871312518197,
503
  "acc_norm_stderr": 0.043875875496117905
504
  },
505
  "all": {
506
- "acc_norm": 0.5562164535275852,
507
- "acc_norm_stderr": 0.033593016416660876,
508
  "llm_as_judge": 0.7073599240265892,
509
  "llm_as_judge_stderr": 0.00012967914776479275
510
  },
@@ -6714,29 +6714,29 @@
6714
  },
6715
  "community|madinah_qa:Arabic Language (General)|0": {
6716
  "hashes": {
6717
- "hash_examples": "bef69fb8b3b75f28",
6718
- "hash_full_prompts": "667946e2e4505274",
6719
- "hash_input_tokens": "b32f19a5179cf04c",
6720
  "hash_cont_tokens": "05d3f2bc980e6cbb"
6721
  },
6722
  "truncated": 0,
6723
  "non_truncated": 612,
6724
- "padded": 2346,
6725
- "non_padded": 57,
6726
  "effective_few_shots": 0.0,
6727
  "num_truncated_few_shots": 0
6728
  },
6729
  "community|madinah_qa:Arabic Language (Grammar)|0": {
6730
  "hashes": {
6731
- "hash_examples": "bd066a9e6a140a4b",
6732
- "hash_full_prompts": "21a84be325d051ca",
6733
- "hash_input_tokens": "104af0b295a7c205",
6734
  "hash_cont_tokens": "ac1327c8a93a78f2"
6735
  },
6736
  "truncated": 0,
6737
  "non_truncated": 365,
6738
- "padded": 1521,
6739
- "non_padded": 67,
6740
  "effective_few_shots": 0.0,
6741
  "num_truncated_few_shots": 0
6742
  },
 
443
  "acc_norm_stderr": 0.038342347441649924
444
  },
445
  "community|madinah_qa:Arabic Language (General)|0": {
446
+ "acc_norm": 0.42483660130718953,
447
+ "acc_norm_stderr": 0.01999797303545834
448
  },
449
  "community|madinah_qa:Arabic Language (Grammar)|0": {
450
+ "acc_norm": 0.3589041095890411,
451
+ "acc_norm_stderr": 0.025142011474008363
452
  },
453
  "community|aratrust:Ethics|0": {
454
  "acc_norm": 0.6666666666666666,
 
495
  "acc_norm_stderr": 0.033175029527802366
496
  },
497
  "community|madinah_qa:_average|0": {
498
+ "acc_norm": 0.3918703554481153,
499
+ "acc_norm_stderr": 0.02256999225473335
500
  },
501
  "community|aratrust:_average|0": {
502
  "acc_norm": 0.8260871312518197,
503
  "acc_norm_stderr": 0.043875875496117905
504
  },
505
  "all": {
506
+ "acc_norm": 0.5568494249771023,
507
+ "acc_norm_stderr": 0.03359887265196322,
508
  "llm_as_judge": 0.7073599240265892,
509
  "llm_as_judge_stderr": 0.00012967914776479275
510
  },
 
6714
  },
6715
  "community|madinah_qa:Arabic Language (General)|0": {
6716
  "hashes": {
6717
+ "hash_examples": "25bf94d05f737b63",
6718
+ "hash_full_prompts": "f5e1a1a107741cf1",
6719
+ "hash_input_tokens": "cec0d4a846b0586f",
6720
  "hash_cont_tokens": "05d3f2bc980e6cbb"
6721
  },
6722
  "truncated": 0,
6723
  "non_truncated": 612,
6724
+ "padded": 2394,
6725
+ "non_padded": 9,
6726
  "effective_few_shots": 0.0,
6727
  "num_truncated_few_shots": 0
6728
  },
6729
  "community|madinah_qa:Arabic Language (Grammar)|0": {
6730
  "hashes": {
6731
+ "hash_examples": "e65fe4df843f4380",
6732
+ "hash_full_prompts": "4a89b1c3f69e4c32",
6733
+ "hash_input_tokens": "4aa26f8e283c8675",
6734
  "hash_cont_tokens": "ac1327c8a93a78f2"
6735
  },
6736
  "truncated": 0,
6737
  "non_truncated": 365,
6738
+ "padded": 1581,
6739
+ "non_padded": 7,
6740
  "effective_few_shots": 0.0,
6741
  "num_truncated_few_shots": 0
6742
  },