Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
App
Browse files
app.py
CHANGED
@@ -45,26 +45,39 @@ def calculate_memory(model_name:str, library:str, options:list):
|
|
45 |
"Total Size": dtype_total_size,
|
46 |
"Training using Adam": dtype_training_size
|
47 |
})
|
48 |
-
return pd.DataFrame(data)
|
49 |
-
# return f"## {title}\n\n" + markdown_table(data).set_params(
|
50 |
-
# row_sep="markdown", quote=False,
|
51 |
-
# ).get_markdown()
|
52 |
|
|
|
|
|
|
|
53 |
|
54 |
-
|
55 |
-
|
56 |
-
)
|
|
|
|
|
57 |
|
58 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
)
|
69 |
|
70 |
-
|
|
|
|
|
|
|
|
|
|
45 |
"Total Size": dtype_total_size,
|
46 |
"Training using Adam": dtype_training_size
|
47 |
})
|
48 |
+
return f'## {title}', pd.DataFrame(data)
|
|
|
|
|
|
|
49 |
|
50 |
+
with gr.Blocks() as demo:
|
51 |
+
gr.Markdown(
|
52 |
+
"""# Model Memory Calculator
|
53 |
|
54 |
+
This tool will help you calculate how much vRAM is needed to train and perform big model inference
|
55 |
+
on a model hosted on the :hugging_face: Hugging Face Hub. The minimum recommended vRAM needed for a model
|
56 |
+
is denoted as the size of the "largest layer", and training of a model is roughly 4x its size (for Adam).
|
57 |
+
|
58 |
+
Currently this tool supports all models hosted that use `transformers` and `timm`.
|
59 |
|
60 |
+
To use this tool pass in the URL or model name of the model you want to calculate the memory usage for,
|
61 |
+
select which framework it originates from ("auto" will try and detect it from the model metadata), and
|
62 |
+
what precisions you want to use.
|
63 |
+
"""
|
64 |
+
)
|
65 |
+
out_text = gr.Markdown()
|
66 |
+
out = gr.DataFrame(
|
67 |
+
headers=["dtype", "Largest Layer", "Total Size", "Training using Adam"],
|
68 |
+
)
|
69 |
|
70 |
+
inp = gr.Textbox(label="Model Name or URL")
|
71 |
+
with gr.Row():
|
72 |
+
library = gr.Radio(["auto", "transformers", "timm"], label="Library", value="auto")
|
73 |
+
options = gr.CheckboxGroup(
|
74 |
+
["float32", "float16", "int8", "int4"],
|
75 |
+
value="float32"
|
76 |
+
)
|
77 |
+
btn = gr.Button("Calculate Memory Usage", scale=0.5)
|
|
|
78 |
|
79 |
+
btn.click(
|
80 |
+
calculate_memory, inputs=[inp, library, options], outputs=[out_text, out],
|
81 |
+
)
|
82 |
+
|
83 |
+
demo.launch()
|