File size: 1,661 Bytes
cbdc5ec
 
88775d1
cbdc5ec
 
66662af
cbdc5ec
88775d1
cbdc5ec
88775d1
cbdc5ec
66662af
cbdc5ec
 
 
 
 
 
 
 
 
 
 
 
 
cee67d6
 
 
 
 
 
31dbddc
 
 
cee67d6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31dbddc
cee67d6
aac6adc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
import csv
from datetime import datetime
import os
from typing import Optional
import gradio as gr

from huggingface_hub import HfApi, Repository

HF_TOKEN = os.environ.get("HF_TOKEN")

repo: Optional[Repository] = None



DESCRIPTION = """
The steps are the following:

- Paste a read-access token from hf.co/settings/tokens. Read access is enough given that we will open a PR against the source repo.
- Input a model id from the Hub
- Click "Submit"
- That's it! You'll get feedback if it works or not, and if it worked, you'll get the URL of the opened PR 🔥

⚠️ For now only `pytorch_model.bin` files are supported but we'll extend in the future.
"""

title="Convert any model to Safetensors and open a PR"
allow_flagging="never"

def token_text(visible=False):
    return gr.Text(max_lines=1, label="your_hf_token", visible=visible)

def run(a, b, c):
    return f"{a}, {b}, {c}"

with gr.Blocks(title=title) as demo:
    description = gr.Markdown(f"""# {title}""")
    description = gr.Markdown(DESCRIPTION)

    with gr.Row() as r:
        with gr.Column() as c:
            model_id = gr.Text(max_lines=1, label="model_id")
            is_private = gr.Checkbox(label="Private model")
            token = token_text()
            with gr.Row() as c:
                clean = gr.ClearButton()
                submit = gr.Button("Submit", variant="primary")

        with gr.Column() as d:
            output = gr.Markdown()

    is_private.change(lambda s: token_text(s), inputs=is_private, outputs=token)
    submit.click(run, inputs=[model_id, is_private, token], outputs=output, concurrency_limit=1)

demo.queue(max_size=10).launch(show_api=True)