SondosMB commited on
Commit
97d602f
·
verified ·
1 Parent(s): 5d7850e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -25
app.py CHANGED
@@ -408,15 +408,15 @@ with gr.Blocks(css=css_tech_theme) as demo:
408
  <div class="submission-section">
409
  <h2>Submit Your Predictions</h2>
410
  <p>Upload your prediction file and provide your model name to evaluate and submit to the leaderboard.</p> </div>"""):
411
- with gr.Row(elem_id="submission-fields"):
412
- file_input = gr.File(label="Upload Prediction CSV", file_types=[".csv"], interactive=True)
413
- model_name_input = gr.Textbox(label="Model Name", placeholder="Enter your model name")
414
- with gr.Row(elem_id="submission-results"):
415
- overall_accuracy_display = gr.Number(label="Overall Accuracy", interactive=False)
416
- with gr.Row(elem_id="submission-buttons"):
417
- eval_button = gr.Button("Evaluate")
418
- submit_button = gr.Button("Prove and Submit to Leaderboard", visible=False)
419
- eval_status = gr.Textbox(label="Evaluation Status", interactive=False)
420
  def handle_evaluation(file, model_name):
421
  # Check if required inputs are provided
422
  if not file:
@@ -429,22 +429,22 @@ with gr.Blocks(css=css_tech_theme) as demo:
429
  overall_accuracy = 0
430
  else:
431
  overall_accuracy = leaderboard.iloc[-1]["Overall Accuracy"]
432
- # Show the submit button after evaluation
433
- return status, overall_accuracy, gr.update(visible=True)
434
- def handle_submission(file, model_name):
435
- # Handle leaderboard submission
436
- status, _ = evaluate_predictions(file, model_name, add_to_leaderboard=True)
437
- return f"Submission to leaderboard completed: {status}"
438
- eval_button.click(
439
- handle_evaluation,
440
- inputs=[file_input, model_name_input],
441
- outputs=[eval_status, overall_accuracy_display, submit_button],
442
- )
443
- submit_button.click(
444
- handle_submission,
445
- inputs=[file_input, model_name_input],
446
- outputs=[eval_status],
447
- )
448
 
449
 
450
 
 
408
  <div class="submission-section">
409
  <h2>Submit Your Predictions</h2>
410
  <p>Upload your prediction file and provide your model name to evaluate and submit to the leaderboard.</p> </div>"""):
411
+ with gr.Row(elem_id="submission-fields"):
412
+ file_input = gr.File(label="Upload Prediction CSV", file_types=[".csv"], interactive=True)
413
+ model_name_input = gr.Textbox(label="Model Name", placeholder="Enter your model name")
414
+ with gr.Row(elem_id="submission-results"):
415
+ overall_accuracy_display = gr.Number(label="Overall Accuracy", interactive=False)
416
+ with gr.Row(elem_id="submission-buttons"):
417
+ eval_button = gr.Button("Evaluate")
418
+ submit_button = gr.Button("Prove and Submit to Leaderboard", visible=False)
419
+ eval_status = gr.Textbox(label="Evaluation Status", interactive=False)
420
  def handle_evaluation(file, model_name):
421
  # Check if required inputs are provided
422
  if not file:
 
429
  overall_accuracy = 0
430
  else:
431
  overall_accuracy = leaderboard.iloc[-1]["Overall Accuracy"]
432
+ # Show the submit button after evaluation
433
+ return status, overall_accuracy, gr.update(visible=True)
434
+ def handle_submission(file, model_name):
435
+ # Handle leaderboard submission
436
+ status, _ = evaluate_predictions(file, model_name, add_to_leaderboard=True)
437
+ return f"Submission to leaderboard completed: {status}"
438
+ eval_button.click(
439
+ handle_evaluation,
440
+ inputs=[file_input, model_name_input],
441
+ outputs=[eval_status, overall_accuracy_display, submit_button],
442
+ )
443
+ submit_button.click(
444
+ handle_submission,
445
+ inputs=[file_input, model_name_input],
446
+ outputs=[eval_status],
447
+ )
448
 
449
 
450