File size: 973 Bytes
7fa11aa
cdeeed6
 
7fa11aa
 
 
 
53d5dd8
 
7fa11aa
 
53d5dd8
 
 
 
 
7fa11aa
 
482c272
 
 
53d5dd8
7fa11aa
cdeeed6
e1a6930
cdeeed6
 
 
 
 
7fa11aa
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
from huggingface_hub import HfApi
from functools import lru_cache


def get_leaderboard_models():
    api = HfApi()
    
    # List all datasets in the open-llm-leaderboard organization
    datasets = api.list_datasets(author="open-llm-leaderboard")
    
    models = []
    for dataset in datasets:
        if dataset.id.endswith("-details"):
            # Format: "open-llm-leaderboard/<provider>__<model_name>-details"
            model_part = dataset.id.split("/")[-1].replace("-details", "")
            provider, model = model_part.split("__", 1)
            models.append(f"{provider}/{model}")
    
    # Example models
    models = ["meta_llama/Llama-3.1-70B-Instruct"]

    return sorted(models)



@lru_cache(maxsize=1)
def get_leaderboard_models_cached():
    return get_leaderboard_models()


def get_leaderboard_datasets():
    return [
        "ai2_arc",
        "hellaswag",
        "mmlu",
        "truthful_qa",
        "winogrande",
        "gsm8k"
    ]