Spaces:
Sleeping
Sleeping
import gradio as gr | |
from test_prompt_generator import generate_prompt | |
tokenizers = [ | |
"google/gemma-7b", | |
"mistralai/Mistral-7B-v0.1", | |
"facebook/opt-2.7b", | |
"microsoft/phi-2", | |
"THUDM/chatglm3-6b", | |
"Qwen/Qwen1.5-7B-Chat", | |
"bigscience/bloom-560m", | |
"ise-uiuc/Magicoder-S-DS-6.7B", | |
"google/flan-t5-base", | |
"TinyLlama/TinyLlama-1.1B-Chat-v1.0", | |
"google-bert/bert-base-uncased", | |
] | |
def generate(model_id, num_tokens, prefix=None, source_text=None): | |
output_file = f"prompt_{num_tokens}.jsonl" | |
prompt = generate_prompt(model_id, int(num_tokens), prefix=prefix, source_text=source_text, output_file=output_file) | |
return prompt, output_file | |
demo = gr.Interface( | |
fn=generate, | |
title="Test Prompt Generator", | |
description="Generate prompts with a given number of tokens for testing transformer models. " | |
"Prompt source: https://archive.org/stream/alicesadventures19033gut/19033.txt", | |
inputs=[ | |
gr.Dropdown( | |
label="Tokenizer", | |
choices=tokenizers, | |
value="mistralai/Mistral-7B-v0.1", | |
allow_custom_value=True, | |
info="Select a tokenizer from this list or paste a model_id from a model on the Hugging Face Hub", | |
), | |
gr.Number( | |
label="Number of Tokens", minimum=4, maximum=2048, value=32, info="Enter a number between 4 and 2048." | |
), | |
gr.Textbox( | |
label="Prefix (optional)", | |
info="If given, the start of the prompt will be this prefix. Example: 'Summarize the following text:'", | |
), | |
gr.Textbox( | |
label="Source text (optional)", | |
info="By default, prompts will be generated from Alice in Wonderland. Enter text here to use that instead.", | |
), | |
], | |
outputs=[gr.Textbox(label="prompt", show_copy_button=True), gr.File(label="Json file")], | |
examples=[ | |
["mistralai/Mistral-7B-v0.1", 32], | |
["mistralai/Mistral-7B-v0.1", 64], | |
["mistralai/Mistral-7B-v0.1", 128], | |
["mistralai/Mistral-7B-v0.1", 512], | |
["mistralai/Mistral-7B-v0.1", 1024], | |
["mistralai/Mistral-7B-v0.1", 2048], | |
], | |
cache_examples=False, | |
allow_flagging=False, | |
) | |
demo.launch() | |