File size: 4,223 Bytes
d9f713b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
import csv
import os
from datetime import datetime
from typing import Optional, Union
import gradio as gr
from huggingface_hub import HfApi, Repository
from export import convert


DATASET_REPO_URL = "https://huggingface.co/datasets/optimum/exporters"
DATA_FILENAME = "data.csv"
DATA_FILE = os.path.join("openvino", DATA_FILENAME)
HF_TOKEN = os.environ.get("HF_WRITE_TOKEN")
DATA_DIR = "exporters_data"

repo = None
if HF_TOKEN:
    repo = Repository(local_dir=DATA_DIR, clone_from=DATASET_REPO_URL, token=HF_TOKEN)


def export(token: str, model_id: str, task: str) -> str:
    if token == "" or model_id == "":
        return """
        ### Invalid input 🐞
        Please fill a token and model name.
        """
    try:
        api = HfApi(token=token)

        error, commit_info = convert(api=api, model_id=model_id, task=task, force=False)
        if error != "0":
            return error

        print("[commit_info]", commit_info)

        # save in a private dataset
        if repo is not None:
            repo.git_pull(rebase=True)
            with open(os.path.join(DATA_DIR, DATA_FILE), "a") as csvfile:
                writer = csv.DictWriter(csvfile, fieldnames=["model_id", "pr_url", "time"])
                writer.writerow(
                    {
                        "model_id": model_id,
                        "pr_url": commit_info.pr_url,
                        "time": str(datetime.now()),
                    }
                )
            commit_url = repo.push_to_hub()
            print("[dataset]", commit_url)

        return f"#### Success 🔥 Yay! This model was successfully exported and a PR was open using your token, here: [{commit_info.pr_url}]({commit_info.pr_url})"
    except Exception as e:
        return f"#### Error: {e}"


TTILE_IMAGE = """
<div
    style="
        display: block;
        margin-left: auto;
        margin-right: auto;
        width: 50%;
    "
>
<img src="https://huggingface.co/spaces/echarlaix/openvino-export/resolve/main/header.png"/>
</div>
"""

TITLE = """
<div
    style="
        display: inline-flex;
        align-items: center;
        text-align: center;
        max-width: 1400px;
        gap: 0.8rem;
        font-size: 2.2rem;
    "
>
<h1 style="font-weight: 900; margin-bottom: 10px; margin-top: 10px;">
    Export your Transformers and Diffusers model to OpenVINO with 🤗 Optimum Intel (experimental)
</h1>
</div>
"""

DESCRIPTION = """
This Space allows you to automatically export to the OpenVINO format various 🤗 Transformers and Diffusers PyTorch models hosted on the Hugging Face Hub.

Once exported, you will be able to load the resulting model using the [🤗 Optimum Intel](https://huggingface.co/docs/optimum/intel/inference).

To export your model, the steps are as following:
- Paste a read-access token from [https://huggingface.co/settings/tokens](https://huggingface.co/settings/tokens). Read access is enough given that we will open a PR against the source repo.
- Input a model id from the Hub (for example: [distilbert-base-uncased-finetuned-sst-2-english](https://huggingface.co/distilbert-base-uncased-finetuned-sst-2-english))
- Click "Export"
- That’s it! You’ll get feedback if it works or not, and if it worked, you’ll get the URL of the opened PR 🔥
"""

with gr.Blocks() as demo:
    gr.HTML(TTILE_IMAGE)
    gr.HTML(TITLE)

    with gr.Row():
        with gr.Column(scale=50):
            gr.Markdown(DESCRIPTION)

        with gr.Column(scale=50):
            input_token = gr.Textbox(
                max_lines=1,
                label="Hugging Face token",
            )
            input_model = gr.Textbox(
                max_lines=1,
                label="Model name",
                placeholder="distilbert-base-uncased-finetuned-sst-2-english",
            )
            input_task = gr.Textbox(
                value="auto",
                max_lines=1,
                label='Task (can be left to "auto", will be automatically inferred)',
            )

            btn = gr.Button("Export")
            output = gr.Markdown(label="Output")

    btn.click(
        fn=export,
        inputs=[input_token, input_model, input_task],
        outputs=output,
    )


demo.launch()