Spaces:
Runtime error
Runtime error
File size: 2,102 Bytes
08ae6c5 95c19d6 0811d37 95c19d6 2a5f9fb 8b88d2c 0811d37 8b88d2c 95c19d6 8c49cb6 8b88d2c 95c19d6 2a73469 1ffc326 8b88d2c 95c19d6 8b88d2c d084b26 0811d37 95c19d6 8b88d2c 95c19d6 8b88d2c 95c19d6 0811d37 95c19d6 6bc96ff 8c49cb6 95c19d6 0811d37 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 |
import logging
from src.logging import configure_root_logger
logging.getLogger("httpx").setLevel(logging.WARNING)
logging.getLogger("numexpr").setLevel(logging.WARNING)
logging.getLogger("absl").setLevel(logging.WARNING)
configure_root_logger()
from functools import partial
import gradio as gr
from main_backend_harness import run_auto_eval
from src.display.log_visualizer import log_file_to_html_string
from src.display.css_html_js import dark_mode_gradio_js
from src.envs import REFRESH_RATE, REPO_ID, QUEUE_REPO, RESULTS_REPO
from src.logging import setup_logger, log_file
logging.basicConfig(level=logging.INFO)
logger = setup_logger(__name__)
intro_md = f"""
# Intro
This is a visual for the auto evaluator.
"""
links_md = f"""
# Important links
| Description | Link |
|-----------------|------|
| Leaderboard | [{REPO_ID}](https://huggingface.co/spaces/{REPO_ID}) |
| Queue Repo | [{QUEUE_REPO}](https://huggingface.co/datasets/{QUEUE_REPO}) |
| Results Repo | [{RESULTS_REPO}](https://huggingface.co/datasets/{RESULTS_REPO}) |
"""
def button_auto_eval():
logger.info("Manually triggering Auto Eval")
run_auto_eval()
reverse_order_checkbox = gr.Checkbox(label="Reverse Order", value=True)
with gr.Blocks(js=dark_mode_gradio_js) as demo:
gr.Markdown(intro_md)
with gr.Tab("Application"):
output_html = gr.HTML(partial(log_file_to_html_string, reverse=reverse_order_checkbox), every=1)
with gr.Row():
download_button = gr.DownloadButton("Download Log File", value=log_file)
with gr.Accordion("Log View Configuration", open=False):
reverse_order_checkbox.render()
# Add a button that when pressed, triggers run_auto_eval
button = gr.Button("Manually Run Evaluation")
gr.Markdown(links_md)
dummy = gr.Markdown(run_auto_eval, every=REFRESH_RATE, visible=False)
button.click(fn=button_auto_eval, inputs=[], outputs=[])
if __name__ == "__main__":
demo.queue(default_concurrency_limit=40).launch(server_name="0.0.0.0", show_error=True, server_port=7860)
|