Johannes commited on
Commit
8d07ee2
·
1 Parent(s): f95745a

updated model

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. configuration_rotarybert.py +0 -2
config.json CHANGED
@@ -15,7 +15,7 @@
15
  "intermediate_size": 3072,
16
  "layer_norm_eps": 1e-12,
17
  "max_position_embeddings": 8192,
18
- "model_type": "bert",
19
  "num_attention_heads": 12,
20
  "num_hidden_layers": 12,
21
  "pad_token_id": 0,
 
15
  "intermediate_size": 3072,
16
  "layer_norm_eps": 1e-12,
17
  "max_position_embeddings": 8192,
18
+ "model_type": "rotarybert",
19
  "num_attention_heads": 12,
20
  "num_hidden_layers": 12,
21
  "pad_token_id": 0,
configuration_rotarybert.py CHANGED
@@ -1,7 +1,5 @@
1
  from transformers.models.bert.configuration_bert import BertConfig
2
 
3
- print ("loading this")
4
-
5
  class RotaryBertConfig(BertConfig):
6
  def __init__(self, **kwargs):
7
  super().__init__(**kwargs)
 
1
  from transformers.models.bert.configuration_bert import BertConfig
2
 
 
 
3
  class RotaryBertConfig(BertConfig):
4
  def __init__(self, **kwargs):
5
  super().__init__(**kwargs)