File size: 4,361 Bytes
d4d998a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3c01baa
d4d998a
3c01baa
d4d998a
 
 
 
 
3c01baa
d4d998a
 
3c01baa
 
 
 
 
 
 
d4d998a
 
 
 
 
 
3c01baa
d4d998a
 
 
 
 
3c01baa
d4d998a
 
3c01baa
d4d998a
 
 
 
 
 
 
3c01baa
d4d998a
 
 
 
3c01baa
 
 
 
d4d998a
 
3c01baa
d4d998a
3c01baa
 
 
d4d998a
 
 
 
 
3c01baa
d4d998a
3c01baa
d4d998a
 
3c01baa
 
d4d998a
 
 
 
3c01baa
d4d998a
 
3c01baa
 
 
 
 
d4d998a
 
 
 
 
3c01baa
 
 
 
d4d998a
3c01baa
d4d998a
 
3c01baa
d4d998a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
"""
Handle submissions to the GuardBench leaderboard.
"""

import json
import os
import tempfile
import uuid
from datetime import datetime
from typing import Dict, List, Tuple

from huggingface_hub import HfApi
from datasets import load_dataset, Dataset

from src.display.formatting import styled_error, styled_message, styled_warning
from src.envs import API, RESULTS_DATASET_ID, TOKEN
from src.leaderboard.processor import process_jsonl_submission, add_entries_to_leaderboard, load_leaderboard_data


def validate_submission(file_path: str) -> Tuple[bool, str]:
    """
    Validate a submission file.
    """
    try:
        entries, message = process_jsonl_submission(file_path)
        if not entries:
            return False, message

        # Additional validation could be added here

        return True, "Submission is valid"
    except Exception as e:
        return False, f"Error validating submission: {e}"


def submit_to_hub(file_path: str, metadata: Dict, dataset_id: str, token: str, version="v0") -> Tuple[bool, str]:
    """
    Submit results to a HuggingFace dataset repository as individual files.

    Args:
        file_path: Path to the submission file
        metadata: Metadata to include with the submission
        dataset_id: The dataset repository ID
        token: HuggingFace API token
        version: The version of the benchmark used (e.g., "v0", "v1")
    """
    try:
        # Process the submission file to validate
        entries, message = process_jsonl_submission(file_path)
        if not entries:
            return False, message

        # Generate a unique submission ID
        model_name = metadata.get("model_name", "unknown")
        model_name_safe = model_name.replace("/", "_").replace(" ", "_")
        timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
        submission_id = f"{model_name_safe}_{timestamp}"

        # Create an API instance
        api = HfApi(token=token)

        # Create a temporary file with metadata added
        with tempfile.NamedTemporaryFile(mode='w', suffix='.jsonl', delete=False) as temp_file:
            # Add metadata to each entry
            for entry in entries:
                # If the entry already has a model_name, don't override it
                if "model_name" not in entry:
                    entry["model_name"] = metadata.get("model_name")

                # Add other metadata if not present
                for key, value in metadata.items():
                    if key != "model_name" and key not in entry:
                        entry[key] = value

                # Ensure version is set
                entry["version"] = version

                # Write to temp file
                temp_file.write(json.dumps(entry) + "\n")

            temp_path = temp_file.name

        # Upload the file to the version-specific directory
        submission_path = f"submissions_{version}/{submission_id}_{version}.jsonl" if version != "v0" else f"submissions/{submission_id}.jsonl"
        api.upload_file(
            path_or_fileobj=temp_path,
            path_in_repo=submission_path,
            repo_id=dataset_id,
            repo_type="dataset",
            commit_message=f"Add submission for {model_name} (version {version})"
        )

        # Clean up the temporary file
        os.unlink(temp_path)

        return True, f"Successfully uploaded submission for {model_name} to {dataset_id} (version {version})"
    except Exception as e:
        return False, f"Error submitting to dataset: {e}"


def process_submission(file_path: str, metadata: Dict, version="v0") -> str:
    """
    Process a submission to the GuardBench leaderboard.

    Args:
        file_path: Path to the submission file
        metadata: Metadata to include with the submission
        version: The version of the benchmark used (e.g., "v0", "v1")
    """
    # Validate submission file
    is_valid, validation_message = validate_submission(file_path)
    if not is_valid:
        return styled_error(validation_message)

    # Add version to metadata
    metadata["version"] = version

    # Submit to HuggingFace dataset repository
    success, message = submit_to_hub(file_path, metadata, RESULTS_DATASET_ID, TOKEN, version=version)
    if not success:
        return styled_error(message)

    return styled_message(f"Submission successful! {message}")