Datasets:
OALL
/

Modalities:
Text
Formats:
json
Size:
< 1K
Libraries:
Datasets
Dask
amztheory commited on
Commit
7cdee70
·
verified ·
1 Parent(s): b331f0f

Upload SeaLLMs/SeaLLM-7B-v2.5/results_2025-01-23T06-09-59.797286.json with huggingface_hub

Browse files
SeaLLMs/SeaLLM-7B-v2.5/results_2025-01-23T06-09-59.797286.json CHANGED
@@ -443,12 +443,12 @@
443
  "acc_norm_stderr": 0.03786720706234215
444
  },
445
  "community|madinah_qa:Arabic Language (General)|0": {
446
- "acc_norm": 0.4362745098039216,
447
- "acc_norm_stderr": 0.020062874243539135
448
  },
449
  "community|madinah_qa:Arabic Language (Grammar)|0": {
450
- "acc_norm": 0.4136986301369863,
451
- "acc_norm_stderr": 0.025813791864794256
452
  },
453
  "community|aratrust:Ethics|0": {
454
  "acc_norm": 0.7333333333333333,
@@ -495,16 +495,16 @@
495
  "acc_norm_stderr": 0.03325678163195883
496
  },
497
  "community|madinah_qa:_average|0": {
498
- "acc_norm": 0.4249865699704539,
499
- "acc_norm_stderr": 0.022938333054166697
500
  },
501
  "community|aratrust:_average|0": {
502
  "acc_norm": 0.8074843882714251,
503
  "acc_norm_stderr": 0.04804316120171808
504
  },
505
  "all": {
506
- "acc_norm": 0.5416778919090178,
507
- "acc_norm_stderr": 0.0340902500478433,
508
  "llm_as_judge": 0.6792022792022778,
509
  "llm_as_judge_stderr": 0.00015812708387234105
510
  },
@@ -6714,29 +6714,29 @@
6714
  },
6715
  "community|madinah_qa:Arabic Language (General)|0": {
6716
  "hashes": {
6717
- "hash_examples": "bef69fb8b3b75f28",
6718
- "hash_full_prompts": "dbd441fa7521b979",
6719
- "hash_input_tokens": "48d2d2b97e1c7a7d",
6720
  "hash_cont_tokens": "3052f3111aefe0bf"
6721
  },
6722
  "truncated": 0,
6723
  "non_truncated": 612,
6724
- "padded": 2362,
6725
- "non_padded": 41,
6726
  "effective_few_shots": 0.0,
6727
  "num_truncated_few_shots": 0
6728
  },
6729
  "community|madinah_qa:Arabic Language (Grammar)|0": {
6730
  "hashes": {
6731
- "hash_examples": "bd066a9e6a140a4b",
6732
- "hash_full_prompts": "049b75042e583cfb",
6733
- "hash_input_tokens": "10341843047a4069",
6734
  "hash_cont_tokens": "3f57c831d5d970fc"
6735
  },
6736
  "truncated": 0,
6737
  "non_truncated": 365,
6738
- "padded": 1536,
6739
- "non_padded": 52,
6740
  "effective_few_shots": 0.0,
6741
  "num_truncated_few_shots": 0
6742
  },
 
443
  "acc_norm_stderr": 0.03786720706234215
444
  },
445
  "community|madinah_qa:Arabic Language (General)|0": {
446
+ "acc_norm": 0.5261437908496732,
447
+ "acc_norm_stderr": 0.020200164564804588
448
  },
449
  "community|madinah_qa:Arabic Language (Grammar)|0": {
450
+ "acc_norm": 0.4520547945205479,
451
+ "acc_norm_stderr": 0.026086355811749208
452
  },
453
  "community|aratrust:Ethics|0": {
454
  "acc_norm": 0.7333333333333333,
 
495
  "acc_norm_stderr": 0.03325678163195883
496
  },
497
  "community|madinah_qa:_average|0": {
498
+ "acc_norm": 0.48909929268511054,
499
+ "acc_norm_stderr": 0.0231432601882769
500
  },
501
  "community|aratrust:_average|0": {
502
  "acc_norm": 0.8074843882714251,
503
  "acc_norm_stderr": 0.04804316120171808
504
  },
505
  "all": {
506
+ "acc_norm": 0.5427738358870462,
507
+ "acc_norm_stderr": 0.034093753075776806,
508
  "llm_as_judge": 0.6792022792022778,
509
  "llm_as_judge_stderr": 0.00015812708387234105
510
  },
 
6714
  },
6715
  "community|madinah_qa:Arabic Language (General)|0": {
6716
  "hashes": {
6717
+ "hash_examples": "25bf94d05f737b63",
6718
+ "hash_full_prompts": "ddd29160c14f29f6",
6719
+ "hash_input_tokens": "da0c51f65d6be823",
6720
  "hash_cont_tokens": "3052f3111aefe0bf"
6721
  },
6722
  "truncated": 0,
6723
  "non_truncated": 612,
6724
+ "padded": 2392,
6725
+ "non_padded": 11,
6726
  "effective_few_shots": 0.0,
6727
  "num_truncated_few_shots": 0
6728
  },
6729
  "community|madinah_qa:Arabic Language (Grammar)|0": {
6730
  "hashes": {
6731
+ "hash_examples": "e65fe4df843f4380",
6732
+ "hash_full_prompts": "567e5f484271f133",
6733
+ "hash_input_tokens": "8dce158a8b61ad63",
6734
  "hash_cont_tokens": "3f57c831d5d970fc"
6735
  },
6736
  "truncated": 0,
6737
  "non_truncated": 365,
6738
+ "padded": 1577,
6739
+ "non_padded": 11,
6740
  "effective_few_shots": 0.0,
6741
  "num_truncated_few_shots": 0
6742
  },