File size: 1,155 Bytes
8359bb1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 |
def print_trainable_parameters(model):
"""
Prints the number of trainable parameters in the model.
"""
trainable_params = 0
all_param = 0
all_param_names = []
trainable_param_names = []
prompt_weights = 0
prompt_normalizer = 0
prompt_normalizer_layer = []
soft_prompt_layers = []
for name, param in model.named_parameters():
all_param += param.numel()
all_param_names.append(name)
if param.requires_grad:
print(name)
if 'prompt_encoder.default.embedding' in name:
prompt_weights+= param.numel()
soft_prompt_layers.append(param)
if 'prompt_normalizer' in name:
prompt_normalizer += param.numel()
prompt_normalizer_layer.append(param)
trainable_params += param.numel()
trainable_param_names.append(name)
print(
f"trainable params: {trainable_params} || all params: {all_param} || trainable%: {100 * trainable_params / all_param}"
)
return {"trainable": trainable_params, "all": all_param, "trainable%": 100 * trainable_params / all_param}
|