Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Commit
Β·
805b1ed
1
Parent(s):
b3de191
Refactor submit_model and submit_gradio_module to accept model_param_limit as a parameter for improved flexibility in model size validation
Browse files
utils.py
CHANGED
@@ -73,7 +73,7 @@ def load_requests(status_folder, task_type=None):
|
|
73 |
|
74 |
return df
|
75 |
|
76 |
-
def submit_model(model_name, revision, precision, params, license, task):
|
77 |
# Load pending and finished requests from the dataset repository
|
78 |
df_pending = load_requests('pending', task_type=task)
|
79 |
df_finished = load_requests('finished', task_type=task)
|
@@ -84,8 +84,8 @@ def submit_model(model_name, revision, precision, params, license, task):
|
|
84 |
return "I think the auto-fetch feature couldn't fetch model info. If your model is not suitable for this task evaluation then this is expected, but if it's suitable and this behavior happened with you then please open a community discussion in the leaderboard discussion section and we will fix it ASAP.", df_pending
|
85 |
|
86 |
# Check if model size is in valid range
|
87 |
-
if float(params) >
|
88 |
-
return "Model size should be less than
|
89 |
|
90 |
# Handle 'Missing' precision
|
91 |
if precision == 'Missing':
|
@@ -163,8 +163,9 @@ def submit_model(model_name, revision, precision, params, license, task):
|
|
163 |
return f"Model {model_name} has been submitted successfully as a {task} π", df_pending
|
164 |
|
165 |
|
166 |
-
def submit_gradio_module(task_type):
|
167 |
var = gr.State(value=task_type)
|
|
|
168 |
with gr.Row(equal_height=True):
|
169 |
model_name_input = gr.Textbox(
|
170 |
label="Model",
|
@@ -241,6 +242,6 @@ def submit_gradio_module(task_type):
|
|
241 |
|
242 |
submit_button.click(
|
243 |
submit_model,
|
244 |
-
inputs=[model_name_input, revision_input, precision_input, params_input, license_input, var],
|
245 |
outputs=[submission_result, pending_gradio_df],
|
246 |
)
|
|
|
73 |
|
74 |
return df
|
75 |
|
76 |
+
def submit_model(model_name, revision, precision, params, license, task, model_param_limit=5000):
|
77 |
# Load pending and finished requests from the dataset repository
|
78 |
df_pending = load_requests('pending', task_type=task)
|
79 |
df_finished = load_requests('finished', task_type=task)
|
|
|
84 |
return "I think the auto-fetch feature couldn't fetch model info. If your model is not suitable for this task evaluation then this is expected, but if it's suitable and this behavior happened with you then please open a community discussion in the leaderboard discussion section and we will fix it ASAP.", df_pending
|
85 |
|
86 |
# Check if model size is in valid range
|
87 |
+
if float(params) > model_param_limit:
|
88 |
+
return f"Model size should be less than {model_param_limit} million parameters. Please check the model size and try again.", df_pending
|
89 |
|
90 |
# Handle 'Missing' precision
|
91 |
if precision == 'Missing':
|
|
|
163 |
return f"Model {model_name} has been submitted successfully as a {task} π", df_pending
|
164 |
|
165 |
|
166 |
+
def submit_gradio_module(task_type, model_param_limit=5000):
|
167 |
var = gr.State(value=task_type)
|
168 |
+
model_param_limit = gr.State(value=model_param_limit)
|
169 |
with gr.Row(equal_height=True):
|
170 |
model_name_input = gr.Textbox(
|
171 |
label="Model",
|
|
|
242 |
|
243 |
submit_button.click(
|
244 |
submit_model,
|
245 |
+
inputs=[model_name_input, revision_input, precision_input, params_input, license_input, var, model_param_limit],
|
246 |
outputs=[submission_result, pending_gradio_df],
|
247 |
)
|