Datasets:
OALL
/

Modalities:
Text
Formats:
json
Size:
< 1K
Libraries:
Datasets
Dask
amztheory commited on
Commit
e33c1b9
·
verified ·
1 Parent(s): 2552dd1

Upload Qwen/Qwen2.5-7B-Instruct/results_2025-01-18T04-09-53.206264.json with huggingface_hub

Browse files
Qwen/Qwen2.5-7B-Instruct/results_2025-01-18T04-09-53.206264.json CHANGED
@@ -443,12 +443,12 @@
443
  "acc_norm_stderr": 0.037229657413855394
444
  },
445
  "community|madinah_qa:Arabic Language (General)|0": {
446
- "acc_norm": 0.3758169934640523,
447
- "acc_norm_stderr": 0.01959402113657745
448
  },
449
  "community|madinah_qa:Arabic Language (Grammar)|0": {
450
- "acc_norm": 0.3315068493150685,
451
- "acc_norm_stderr": 0.024674254621621143
452
  },
453
  "community|aratrust:Ethics|0": {
454
  "acc_norm": 0.6833333333333333,
@@ -495,16 +495,16 @@
495
  "acc_norm_stderr": 0.032889701188178425
496
  },
497
  "community|madinah_qa:_average|0": {
498
- "acc_norm": 0.35366192138956043,
499
- "acc_norm_stderr": 0.0221341378790993
500
  },
501
  "community|aratrust:_average|0": {
502
  "acc_norm": 0.8067993068334484,
503
  "acc_norm_stderr": 0.045361759584688265
504
  },
505
  "all": {
506
- "acc_norm": 0.47618260536191287,
507
- "acc_norm_stderr": 0.03400413672713291,
508
  "llm_as_judge": 0.7736942070275339,
509
  "llm_as_judge_stderr": 9.85322699325955e-05
510
  },
@@ -6714,29 +6714,29 @@
6714
  },
6715
  "community|madinah_qa:Arabic Language (General)|0": {
6716
  "hashes": {
6717
- "hash_examples": "bef69fb8b3b75f28",
6718
- "hash_full_prompts": "defb906f381cfcd9",
6719
- "hash_input_tokens": "cfd6f557e92b800e",
6720
  "hash_cont_tokens": "05d3f2bc980e6cbb"
6721
  },
6722
  "truncated": 0,
6723
  "non_truncated": 612,
6724
- "padded": 2346,
6725
- "non_padded": 57,
6726
  "effective_few_shots": 0.0,
6727
  "num_truncated_few_shots": 0
6728
  },
6729
  "community|madinah_qa:Arabic Language (Grammar)|0": {
6730
  "hashes": {
6731
- "hash_examples": "bd066a9e6a140a4b",
6732
- "hash_full_prompts": "070068c411fb3997",
6733
- "hash_input_tokens": "f92e93204cd1dcfd",
6734
  "hash_cont_tokens": "ac1327c8a93a78f2"
6735
  },
6736
  "truncated": 0,
6737
  "non_truncated": 365,
6738
- "padded": 1521,
6739
- "non_padded": 67,
6740
  "effective_few_shots": 0.0,
6741
  "num_truncated_few_shots": 0
6742
  },
 
443
  "acc_norm_stderr": 0.037229657413855394
444
  },
445
  "community|madinah_qa:Arabic Language (General)|0": {
446
+ "acc_norm": 0.4035947712418301,
447
+ "acc_norm_stderr": 0.01984828016840116
448
  },
449
  "community|madinah_qa:Arabic Language (Grammar)|0": {
450
+ "acc_norm": 0.34794520547945207,
451
+ "acc_norm_stderr": 0.024965874481689573
452
  },
453
  "community|aratrust:Ethics|0": {
454
  "acc_norm": 0.6833333333333333,
 
495
  "acc_norm_stderr": 0.032889701188178425
496
  },
497
  "community|madinah_qa:_average|0": {
498
+ "acc_norm": 0.37576998836064107,
499
+ "acc_norm_stderr": 0.022407077325045367
500
  },
501
  "community|aratrust:_average|0": {
502
  "acc_norm": 0.8067993068334484,
503
  "acc_norm_stderr": 0.045361759584688265
504
  },
505
  "all": {
506
+ "acc_norm": 0.4765605210366322,
507
+ "acc_norm_stderr": 0.03400880235868754,
508
  "llm_as_judge": 0.7736942070275339,
509
  "llm_as_judge_stderr": 9.85322699325955e-05
510
  },
 
6714
  },
6715
  "community|madinah_qa:Arabic Language (General)|0": {
6716
  "hashes": {
6717
+ "hash_examples": "25bf94d05f737b63",
6718
+ "hash_full_prompts": "5c56043c8155627b",
6719
+ "hash_input_tokens": "449ebe5b21f264e0",
6720
  "hash_cont_tokens": "05d3f2bc980e6cbb"
6721
  },
6722
  "truncated": 0,
6723
  "non_truncated": 612,
6724
+ "padded": 2394,
6725
+ "non_padded": 9,
6726
  "effective_few_shots": 0.0,
6727
  "num_truncated_few_shots": 0
6728
  },
6729
  "community|madinah_qa:Arabic Language (Grammar)|0": {
6730
  "hashes": {
6731
+ "hash_examples": "e65fe4df843f4380",
6732
+ "hash_full_prompts": "454a1d8530051200",
6733
+ "hash_input_tokens": "9744219e87e67cbb",
6734
  "hash_cont_tokens": "ac1327c8a93a78f2"
6735
  },
6736
  "truncated": 0,
6737
  "non_truncated": 365,
6738
+ "padded": 1581,
6739
+ "non_padded": 7,
6740
  "effective_few_shots": 0.0,
6741
  "num_truncated_few_shots": 0
6742
  },