Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
File size: 3,506 Bytes
3119795 4a9b060 23510fc 3119795 d1ed69b 5289522 6454c0e d1ed69b 6454c0e 3119795 23510fc 4a9b060 a54ba55 23510fc 816857c 23510fc d1ed69b 23510fc 3119795 d1ed69b 23510fc 4a9b060 23510fc 4a9b060 d1ed69b 23510fc 5289522 23510fc 816857c d1ed69b 23510fc d1ed69b 3119795 b975b7b d1ed69b 6454c0e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 |
import sys
from huggingface_hub import HfApi, whoami
import requests
import yaml
import gradio as gr
from loguru import logger
from yourbench.pipeline import run_pipeline
from yourbench_space.config import generate_base_config, save_config
from yourbench_space.utils import CONFIG_PATH, UPLOAD_DIRECTORY, SubprocessManager, save_files
UPLOAD_DIRECTORY.mkdir(parents=True, exist_ok=True)
logger.remove()
logger.add(sys.stderr, level="INFO")
command = ["uv", "run", "yourbench", f"--config={CONFIG_PATH}"]
manager = SubprocessManager(command)
def update_hf_org_dropdown(oauth_token: gr.OAuthToken | None) -> str:
if oauth_token is None:
print("Please deploy this on Spaces and log in to list organizations.")
return list()
user_info = whoami(oauth_token.token)
org_names = [org["name"] for org in user_info["orgs"]]
user_name = user_info["name"]
org_names.insert(0, user_name)
return gr.Dropdown(org_names, value=user_name, label="Organization")
config_output = gr.Code(label="Generated Config", language="yaml")
with gr.Blocks() as app:
gr.Markdown("## YourBench Configuration")
with gr.Row():
login_btn = gr.LoginButton()
with gr.Tab("Configuration"):
model_name = gr.Textbox(label="Model Name")
hf_org_dropdown = gr.Dropdown(list(), label="Organization", allow_custom_value=True)
app.load(update_hf_org_dropdown, inputs=None, outputs=hf_org_dropdown)
provider = gr.Dropdown(["openrouter", "openai", "huggingface"], value="huggingface", label="Provider", allow_custom_value=True)
base_url = gr.Textbox(label="Base URL")
api_key = gr.Textbox(label="API Key")
max_concurrent_requests = gr.Dropdown([8, 16, 32], value=16, label="Max Concurrent Requests")
preview_button = gr.Button("Generate Config")
preview_button.click(
generate_base_config,
inputs=[hf_org_dropdown, model_name, provider, base_url, api_key, max_concurrent_requests],
outputs=config_output
)
save_button = gr.Button("Save Config")
save_button.click(save_config, inputs=[config_output], outputs=[gr.Textbox(label="Save Status")])
with gr.Tab("Raw Configuration"):
config_output.render()
save_button = gr.Button("Save Config")
save_button.click(save_config, inputs=[config_output], outputs=[gr.Textbox(label="Save Status")])
with gr.Tab("Files"):
file_input = gr.File(label="Upload text files", file_count="multiple", file_types=[".txt", ".md", ".html"])
output = gr.Textbox(label="Log")
file_input.upload(save_files, file_input, output)
with gr.Tab("Run Generation"):
log_output = gr.Code(label="Log Output", language=None, lines=20, interactive=False)
log_timer = gr.Timer(0.05, active=True)
log_timer.tick(manager.read_and_get_output, outputs=log_output)
with gr.Row():
process_status = gr.Checkbox(label="Process Status", interactive=False)
status_timer = gr.Timer(0.05, active=True)
status_timer.tick(manager.is_running, outputs=process_status)
with gr.Row():
start_button = gr.Button("Start Task")
start_button.click(manager.start_process)
stop_button = gr.Button("Stop Task")
stop_button.click(manager.stop_process)
kill_button = gr.Button("Kill Task")
kill_button.click(manager.kill_process)
app.launch()
|