Spaces:
Runtime error
Runtime error
add gradio authentication token
Browse files
app.py
CHANGED
@@ -19,7 +19,12 @@ if HF_TOKEN:
|
|
19 |
repo = Repository(local_dir=DATA_DIR, clone_from=DATASET_REPO_URL, token=HF_TOKEN)
|
20 |
|
21 |
|
22 |
-
def export(model_id: str,
|
|
|
|
|
|
|
|
|
|
|
23 |
if model_id == "" or token == "":
|
24 |
return """
|
25 |
### Invalid input π
|
@@ -91,40 +96,38 @@ After the model conversion, we will open a PR against the source repo to add the
|
|
91 |
|
92 |
To export your model you need:
|
93 |
- A Model ID from the Hub
|
94 |
-
- A read-access token from [https://huggingface.co/settings/tokens](https://huggingface.co/settings/tokens).
|
95 |
|
96 |
That's it ! π₯
|
97 |
"""
|
98 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
99 |
interface = gr.Interface(
|
100 |
fn=export,
|
101 |
inputs=[
|
102 |
-
|
103 |
-
|
104 |
-
placeholder="Search for model id on Huggingface",
|
105 |
-
search_type="model",
|
106 |
-
),
|
107 |
-
gr.Textbox(
|
108 |
-
max_lines=1,
|
109 |
-
label="Hugging Face token",
|
110 |
-
),
|
111 |
-
gr.Textbox(
|
112 |
-
label="Task",
|
113 |
-
info="Can be left to auto, will be automatically inferred",
|
114 |
-
placeholder="auto",
|
115 |
-
max_lines=1,
|
116 |
-
),
|
117 |
],
|
118 |
outputs=[
|
119 |
gr.Markdown(label="output"),
|
120 |
],
|
121 |
-
submit_btn=gr.Button("Export"),
|
122 |
title=TITLE,
|
123 |
description=DESCRIPTION,
|
|
|
124 |
)
|
|
|
125 |
with gr.Blocks() as demo:
|
126 |
-
|
127 |
-
|
128 |
interface.render()
|
129 |
|
130 |
demo.launch()
|
|
|
19 |
repo = Repository(local_dir=DATA_DIR, clone_from=DATASET_REPO_URL, token=HF_TOKEN)
|
20 |
|
21 |
|
22 |
+
def export(model_id: str, task: str, oauth_token: gr.OAuthToken) -> str:
|
23 |
+
|
24 |
+
if oauth_token.token is None:
|
25 |
+
raise ValueError("You must be logged in to use this space")
|
26 |
+
token = oauth_token.token
|
27 |
+
|
28 |
if model_id == "" or token == "":
|
29 |
return """
|
30 |
### Invalid input π
|
|
|
96 |
|
97 |
To export your model you need:
|
98 |
- A Model ID from the Hub
|
|
|
99 |
|
100 |
That's it ! π₯
|
101 |
"""
|
102 |
|
103 |
+
|
104 |
+
|
105 |
+
model_id = HuggingfaceHubSearch(
|
106 |
+
label="Hub Model ID",
|
107 |
+
placeholder="Search for model id on the hub",
|
108 |
+
search_type="model",
|
109 |
+
)
|
110 |
+
task = gr.Textbox(
|
111 |
+
value="auto",
|
112 |
+
label="Task : can be left to auto, will be automatically inferred",
|
113 |
+
)
|
114 |
interface = gr.Interface(
|
115 |
fn=export,
|
116 |
inputs=[
|
117 |
+
model_id,
|
118 |
+
task,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
119 |
],
|
120 |
outputs=[
|
121 |
gr.Markdown(label="output"),
|
122 |
],
|
|
|
123 |
title=TITLE,
|
124 |
description=DESCRIPTION,
|
125 |
+
api_name=False,
|
126 |
)
|
127 |
+
|
128 |
with gr.Blocks() as demo:
|
129 |
+
gr.Markdown("You must be logged in to use this space")
|
130 |
+
gr.LoginButton(min_width=250)
|
131 |
interface.render()
|
132 |
|
133 |
demo.launch()
|
export.py
CHANGED
@@ -7,7 +7,9 @@ from typing import List, Optional, Tuple
|
|
7 |
import torch
|
8 |
|
9 |
from huggingface_hub import (
|
|
|
10 |
CommitOperationAdd,
|
|
|
11 |
HfApi,
|
12 |
get_repo_discussions,
|
13 |
hf_hub_download,
|
|
|
7 |
import torch
|
8 |
|
9 |
from huggingface_hub import (
|
10 |
+
CommitInfo,
|
11 |
CommitOperationAdd,
|
12 |
+
Discussion,
|
13 |
HfApi,
|
14 |
get_repo_discussions,
|
15 |
hf_hub_download,
|