Update app.py
Browse files
app.py
CHANGED
@@ -21,7 +21,7 @@ scheduler = CommitScheduler(
|
|
21 |
repo_id="stabletoolbench/StableToolBench_data", # 替换为你的用户名和数据集仓库名
|
22 |
repo_type="dataset",
|
23 |
folder_path=dataset_dir,
|
24 |
-
path_in_repo="
|
25 |
)
|
26 |
|
27 |
# 根据排名生成表格数据
|
@@ -51,40 +51,39 @@ protected_methods = ["GPT-4-Turbo-Preview (DFS)", "GPT-3.5-Turbo-1106 (DFS)", "G
|
|
51 |
def merge_data(uploaded_data_json):
|
52 |
# No need to call json.loads here because uploaded_data is already a Python dict
|
53 |
new_data = uploaded_data_json
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
json.dump(existing_data, file, indent=4)
|
88 |
|
89 |
# No need to sort here since we're doing it in the generate_table function
|
90 |
return existing_data
|
@@ -97,7 +96,7 @@ def process_file(file_info):
|
|
97 |
uploaded_data_json = json.loads(data_content)
|
98 |
# Merge the uploaded data
|
99 |
merge_data(uploaded_data_json)
|
100 |
-
#
|
101 |
# If we don't return anything here, gradio will not update the table
|
102 |
# We need to return the new data for the table if it's interactive
|
103 |
pass_rate_table = generate_table(existing_data["SolvablePassRateScores"])[1]
|
@@ -245,3 +244,4 @@ with gr.Blocks() as app:
|
|
245 |
|
246 |
if __name__ == "__main__":
|
247 |
app.launch()
|
|
|
|
21 |
repo_id="stabletoolbench/StableToolBench_data", # 替换为你的用户名和数据集仓库名
|
22 |
repo_type="dataset",
|
23 |
folder_path=dataset_dir,
|
24 |
+
path_in_repo="data",
|
25 |
)
|
26 |
|
27 |
# 根据排名生成表格数据
|
|
|
51 |
def merge_data(uploaded_data_json):
|
52 |
# No need to call json.loads here because uploaded_data is already a Python dict
|
53 |
new_data = uploaded_data_json
|
54 |
+
with scheduler.lock: # 确保文件操作的线程安全
|
55 |
+
# Define a helper function to merge scores for an entry
|
56 |
+
def merge_scores(existing_scores, new_scores):
|
57 |
+
for key, value in new_scores.items():
|
58 |
+
existing_scores[key] = value
|
59 |
+
|
60 |
+
# Merge 'SolvablePassRateScores'
|
61 |
+
for new_entry in new_data["SolvablePassRateScores"]:
|
62 |
+
if new_entry["Method"] not in protected_methods:
|
63 |
+
existing_entry = next(
|
64 |
+
(item for item in existing_data["SolvablePassRateScores"] if item["Method"] == new_entry["Method"]),
|
65 |
+
None)
|
66 |
+
if existing_entry:
|
67 |
+
# 这是一个非保护的方法,更新它
|
68 |
+
merge_scores(existing_entry["Scores"], new_entry["Scores"])
|
69 |
+
else:
|
70 |
+
# 这是一个新的方法,添加到列表中
|
71 |
+
existing_data["SolvablePassRateScores"].append(new_entry)
|
72 |
+
|
73 |
+
# Merge 'SolvableWinRateScores'
|
74 |
+
for new_entry in new_data["SolvableWinRateScores"]:
|
75 |
+
if new_entry["Method"] not in protected_methods:
|
76 |
+
existing_entry = next(
|
77 |
+
(item for item in existing_data["SolvableWinRateScores"] if item["Method"] == new_entry["Method"]),
|
78 |
+
None)
|
79 |
+
if existing_entry:
|
80 |
+
merge_scores(existing_entry["Scores"], new_entry["Scores"])
|
81 |
+
else:
|
82 |
+
existing_data["SolvableWinRateScores"].append(new_entry)
|
83 |
+
|
84 |
+
data_file_path = dataset_dir / "leaderboard_data.json"
|
85 |
+
with open(data_file_path, 'w') as file:
|
86 |
+
json.dump(existing_data, file, indent=4)
|
|
|
87 |
|
88 |
# No need to sort here since we're doing it in the generate_table function
|
89 |
return existing_data
|
|
|
96 |
uploaded_data_json = json.loads(data_content)
|
97 |
# Merge the uploaded data
|
98 |
merge_data(uploaded_data_json)
|
99 |
+
scheduler.commit() # Trigger manual commit after processing
|
100 |
# If we don't return anything here, gradio will not update the table
|
101 |
# We need to return the new data for the table if it's interactive
|
102 |
pass_rate_table = generate_table(existing_data["SolvablePassRateScores"])[1]
|
|
|
244 |
|
245 |
if __name__ == "__main__":
|
246 |
app.launch()
|
247 |
+
scheduler.commit() # Ensure all changes are committed on exit
|