Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import AutoModel | |
import torch | |
def count_parameters(model_path): | |
try: | |
# Load model on CPU | |
model = AutoModel.from_pretrained(model_path, device_map="cpu") | |
# Count trainable parameters | |
trainable_params = sum(p.numel() for p in model.parameters() if p.requires_grad) | |
# Count total parameters | |
total_params = sum(p.numel() for p in model.parameters()) | |
# Format numbers with commas for readability | |
return f""" | |
Total Parameters: {total_params:,} | |
Trainable Parameters: {trainable_params:,} | |
""" | |
except Exception as e: | |
return f"Error loading model: {str(e)}" | |
# Create Gradio interface | |
demo = gr.Interface( | |
fn=count_parameters, | |
inputs=gr.Textbox( | |
label="Enter Hugging Face Model Path", | |
placeholder="e.g., bert-base-uncased" | |
), | |
outputs=gr.Textbox(label="Parameter Count"), | |
title="Hugging Face Model Parameter Counter", | |
description="Enter a Hugging Face model path to see its parameter count.", | |
examples=[ | |
["bert-base-uncased"], | |
["gpt2"], | |
["roberta-base"] | |
] | |
) | |
if __name__ == "__main__": | |
demo.launch() |