Fill-Mask
Transformers
PyTorch
Safetensors
bert
custom_code
robinzixuan commited on
Commit
68311fa
·
verified ·
1 Parent(s): 2e772b2

Update modeling_bert.py

Browse files
Files changed (1) hide show
  1. modeling_bert.py +2 -2
modeling_bert.py CHANGED
@@ -261,8 +261,8 @@ class BertSelfAttention(nn.Module):
261
  super().__init__()
262
  if config.hidden_size % config.num_attention_heads != 0 and not hasattr(config, "embedding_size"):
263
  raise ValueError(
264
- f"The hidden size ({
265
- config.hidden_size}) is not a multiple of the number of attention "
266
  f"heads ({config.num_attention_heads})"
267
  )
268
 
 
261
  super().__init__()
262
  if config.hidden_size % config.num_attention_heads != 0 and not hasattr(config, "embedding_size"):
263
  raise ValueError(
264
+ f'''The hidden size ({
265
+ config.hidden_size}) is not a multiple of the number of attention '''
266
  f"heads ({config.num_attention_heads})"
267
  )
268