Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import AutoModel | |
import torch | |
def count_parameters(model_path): | |
try: | |
# Load model on CPU | |
model = AutoModel.from_pretrained(model_path, device_map="cpu", trust_remote_code=True) | |
# Count trainable parameters (accounting for weight tying) | |
unique_params = {} | |
for name, p in model.named_parameters(): | |
if p.requires_grad: | |
unique_params[p.data_ptr()] = (name, p.numel()) | |
trainable_params = sum(numel for _, numel in unique_params.values()) | |
# Count total parameters (accounting for weight tying) | |
unique_params = {} | |
for name, p in model.named_parameters(): | |
unique_params[p.data_ptr()] = (name, p.numel()) | |
total_params = sum(numel for _, numel in unique_params.values()) | |
# Format numbers with commas for readability | |
return f""" | |
Total Parameters: {total_params:,} | |
Trainable Parameters: {trainable_params:,} | |
""" | |
except Exception as e: | |
return f"Error loading model: {str(e)}" | |
# Create Gradio interface | |
demo = gr.Interface( | |
fn=count_parameters, | |
inputs=gr.Textbox( | |
label="Enter Hugging Face Model Path", | |
placeholder="e.g., bert-base-uncased" | |
), | |
outputs=gr.Textbox(label="Parameter Count"), | |
title="Hugging Face Model Parameter Counter", | |
description="Enter a Hugging Face model path to see its parameter count.", | |
examples=[ | |
["bert-base-uncased"], | |
["gpt2"], | |
["roberta-base"] | |
] | |
) | |
if __name__ == "__main__": | |
demo.launch() |