Datasets:
OALL
/

Modalities:
Text
Formats:
json
Size:
< 1K
Libraries:
Datasets
Dask
amztheory commited on
Commit
0d75201
·
verified ·
1 Parent(s): 96d98a4

Upload airev-ai/emirati-14b-v2/results_2025-01-19T15-03-37.580700.json with huggingface_hub

Browse files
airev-ai/emirati-14b-v2/results_2025-01-19T15-03-37.580700.json CHANGED
@@ -443,12 +443,12 @@
443
  "acc_norm_stderr": 0.037867207062342145
444
  },
445
  "community|madinah_qa:Arabic Language (General)|0": {
446
- "acc_norm": 0.36437908496732024,
447
- "acc_norm_stderr": 0.019469518221573688
448
  },
449
  "community|madinah_qa:Arabic Language (Grammar)|0": {
450
- "acc_norm": 0.3287671232876712,
451
- "acc_norm_stderr": 0.024622384500627866
452
  },
453
  "community|aratrust:Ethics|0": {
454
  "acc_norm": 0.7333333333333333,
@@ -495,16 +495,16 @@
495
  "acc_norm_stderr": 0.032522499835172955
496
  },
497
  "community|madinah_qa:_average|0": {
498
- "acc_norm": 0.34657310412749576,
499
- "acc_norm_stderr": 0.022045951361100775
500
  },
501
  "community|aratrust:_average|0": {
502
  "acc_norm": 0.817660316101802,
503
  "acc_norm_stderr": 0.04730004378760604
504
  },
505
  "all": {
506
- "acc_norm": 0.5434848734348674,
507
- "acc_norm_stderr": 0.03402627669081204,
508
  "llm_as_judge": 0.7723171889838458,
509
  "llm_as_judge_stderr": 0.00010093858300484214
510
  },
@@ -6714,29 +6714,29 @@
6714
  },
6715
  "community|madinah_qa:Arabic Language (General)|0": {
6716
  "hashes": {
6717
- "hash_examples": "bef69fb8b3b75f28",
6718
- "hash_full_prompts": "defb906f381cfcd9",
6719
- "hash_input_tokens": "cfd6f557e92b800e",
6720
  "hash_cont_tokens": "05d3f2bc980e6cbb"
6721
  },
6722
  "truncated": 0,
6723
  "non_truncated": 612,
6724
- "padded": 2346,
6725
- "non_padded": 57,
6726
  "effective_few_shots": 0.0,
6727
  "num_truncated_few_shots": 0
6728
  },
6729
  "community|madinah_qa:Arabic Language (Grammar)|0": {
6730
  "hashes": {
6731
- "hash_examples": "bd066a9e6a140a4b",
6732
- "hash_full_prompts": "070068c411fb3997",
6733
- "hash_input_tokens": "f92e93204cd1dcfd",
6734
  "hash_cont_tokens": "ac1327c8a93a78f2"
6735
  },
6736
  "truncated": 0,
6737
  "non_truncated": 365,
6738
- "padded": 1521,
6739
- "non_padded": 67,
6740
  "effective_few_shots": 0.0,
6741
  "num_truncated_few_shots": 0
6742
  },
 
443
  "acc_norm_stderr": 0.037867207062342145
444
  },
445
  "community|madinah_qa:Arabic Language (General)|0": {
446
+ "acc_norm": 0.46568627450980393,
447
+ "acc_norm_stderr": 0.020180144843307296
448
  },
449
  "community|madinah_qa:Arabic Language (Grammar)|0": {
450
+ "acc_norm": 0.3452054794520548,
451
+ "acc_norm_stderr": 0.0249195770390091
452
  },
453
  "community|aratrust:Ethics|0": {
454
  "acc_norm": 0.7333333333333333,
 
495
  "acc_norm_stderr": 0.032522499835172955
496
  },
497
  "community|madinah_qa:_average|0": {
498
+ "acc_norm": 0.4054458769809294,
499
+ "acc_norm_stderr": 0.022549860941158198
500
  },
501
  "community|aratrust:_average|0": {
502
  "acc_norm": 0.817660316101802,
503
  "acc_norm_stderr": 0.04730004378760604
504
  },
505
  "all": {
506
+ "acc_norm": 0.5444912456203963,
507
+ "acc_norm_stderr": 0.034034890529787386,
508
  "llm_as_judge": 0.7723171889838458,
509
  "llm_as_judge_stderr": 0.00010093858300484214
510
  },
 
6714
  },
6715
  "community|madinah_qa:Arabic Language (General)|0": {
6716
  "hashes": {
6717
+ "hash_examples": "25bf94d05f737b63",
6718
+ "hash_full_prompts": "5c56043c8155627b",
6719
+ "hash_input_tokens": "449ebe5b21f264e0",
6720
  "hash_cont_tokens": "05d3f2bc980e6cbb"
6721
  },
6722
  "truncated": 0,
6723
  "non_truncated": 612,
6724
+ "padded": 2394,
6725
+ "non_padded": 9,
6726
  "effective_few_shots": 0.0,
6727
  "num_truncated_few_shots": 0
6728
  },
6729
  "community|madinah_qa:Arabic Language (Grammar)|0": {
6730
  "hashes": {
6731
+ "hash_examples": "e65fe4df843f4380",
6732
+ "hash_full_prompts": "454a1d8530051200",
6733
+ "hash_input_tokens": "9744219e87e67cbb",
6734
  "hash_cont_tokens": "ac1327c8a93a78f2"
6735
  },
6736
  "truncated": 0,
6737
  "non_truncated": 365,
6738
+ "padded": 1581,
6739
+ "non_padded": 7,
6740
  "effective_few_shots": 0.0,
6741
  "num_truncated_few_shots": 0
6742
  },