Spaces:
Running
Running
File size: 4,249 Bytes
d9f713b 0aa1075 d9f713b 0aa1075 d9f713b 0aa1075 d9f713b 0aa1075 d9f713b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 |
import csv
import os
from datetime import datetime
from typing import Optional, Union
import gradio as gr
from huggingface_hub import HfApi, Repository
from export import convert
DATASET_REPO_URL = "https://huggingface.co/datasets/optimum/exporters"
DATA_FILENAME = "data.csv"
DATA_FILE = os.path.join("openvino", DATA_FILENAME)
HF_TOKEN = os.environ.get("HF_WRITE_TOKEN")
DATA_DIR = "exporters_data"
repo = None
if HF_TOKEN:
repo = Repository(local_dir=DATA_DIR, clone_from=DATASET_REPO_URL, token=HF_TOKEN)
def export(token: str, model_id: str, task: str) -> str:
if token == "" or model_id == "":
return """
### Invalid input π
Please fill a token and model name.
"""
try:
api = HfApi(token=token)
error, commit_info = convert(api=api, model_id=model_id, task=task, force=False)
if error != "0":
return error
print("[commit_info]", commit_info)
# save in a private dataset
if repo is not None:
repo.git_pull(rebase=True)
with open(os.path.join(DATA_DIR, DATA_FILE), "a") as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=["model_id", "pr_url", "time"])
writer.writerow(
{
"model_id": model_id,
"pr_url": commit_info.pr_url,
"time": str(datetime.now()),
}
)
commit_url = repo.push_to_hub()
print("[dataset]", commit_url)
return f"#### Success π₯ Yay! This model was successfully exported and a PR was open using your token, here: [{commit_info.pr_url}]({commit_info.pr_url})"
except Exception as e:
return f"#### Error: {e}"
TTILE_IMAGE = """
<div
style="
display: block;
margin-left: auto;
margin-right: auto;
width: 50%;
"
>
<img src="https://huggingface.co/spaces/echarlaix/openvino-export/resolve/main/header.png"/>
</div>
"""
TITLE = """
<div
style="
display: inline-flex;
align-items: center;
text-align: center;
max-width: 1400px;
gap: 0.8rem;
font-size: 2.2rem;
"
>
<h1 style="font-weight: 900; margin-bottom: 10px; margin-top: 10px;">
Export your model to OpenVINO with Optimum Intel
</h1>
</div>
"""
DESCRIPTION = """
This Space allows you to automatically export your model to the OpenVINO format.
To export your model you need:
- A read-access token from [https://huggingface.co/settings/tokens](https://huggingface.co/settings/tokens).
Read access is enough given that we will open a PR against the source repo.
- A model id of the model you'd like to export (for example: [distilbert-base-uncased-finetuned-sst-2-english](https://huggingface.co/distilbert-base-uncased-finetuned-sst-2-english))
- A [task](https://huggingface.co/docs/optimum/main/en/exporters/task_manager#pytorch) that will be used to load the model before exporting it. If set to "auto", the task will be automatically inferred.
That's it ! π₯
After the model conversion, we will open a PR against the source repo.
You will then be able to load the resulting model and run inference using [Optimum Intel](https://huggingface.co/docs/optimum/intel/inference).
"""
with gr.Blocks() as demo:
gr.HTML(TTILE_IMAGE)
gr.HTML(TITLE)
with gr.Row():
with gr.Column(scale=50):
gr.Markdown(DESCRIPTION)
with gr.Column(scale=50):
input_token = gr.Textbox(
max_lines=1,
label="Hugging Face token",
)
input_model = gr.Textbox(
max_lines=1,
label="Model name",
placeholder="distilbert-base-uncased-finetuned-sst-2-english",
)
input_task = gr.Textbox(
value="auto",
max_lines=1,
label='Task (can be left to "auto", will be automatically inferred)',
)
btn = gr.Button("Export")
output = gr.Markdown(label="Output")
btn.click(
fn=export,
inputs=[input_token, input_model, input_task],
outputs=output,
)
demo.launch()
|