fix ignore_keys
Browse files
modeling_lsg_xlm_roberta.py
CHANGED
@@ -1040,9 +1040,6 @@ class LSGXLMRobertaForCausalLM(LSGRobertaPreTrainedModel, RobertaForCausalLM):
|
|
1040 |
self.roberta = LSGXLMRobertaModel(config, add_pooling_layer=False)
|
1041 |
self.lm_head = RobertaLMHead(config)
|
1042 |
|
1043 |
-
# The LM head weights require special treatment only when they are tied with the word embeddings
|
1044 |
-
self.update_keys_to_ignore(config, ["lm_head.decoder.weight"])
|
1045 |
-
|
1046 |
# Initialize weights and apply final processing
|
1047 |
self.post_init()
|
1048 |
|
@@ -1070,9 +1067,6 @@ class LSGXLMRobertaForMaskedLM(LSGRobertaPreTrainedModel, RobertaForMaskedLM):
|
|
1070 |
|
1071 |
self.roberta = LSGXLMRobertaModel(config, add_pooling_layer=False)
|
1072 |
self.lm_head = RobertaLMHead(config)
|
1073 |
-
|
1074 |
-
# The LM head weights require special treatment only when they are tied with the word embeddings
|
1075 |
-
self.update_keys_to_ignore(config, ["lm_head.decoder.weight"])
|
1076 |
|
1077 |
# Initialize weights and apply final processing
|
1078 |
self.post_init()
|
|
|
1040 |
self.roberta = LSGXLMRobertaModel(config, add_pooling_layer=False)
|
1041 |
self.lm_head = RobertaLMHead(config)
|
1042 |
|
|
|
|
|
|
|
1043 |
# Initialize weights and apply final processing
|
1044 |
self.post_init()
|
1045 |
|
|
|
1067 |
|
1068 |
self.roberta = LSGXLMRobertaModel(config, add_pooling_layer=False)
|
1069 |
self.lm_head = RobertaLMHead(config)
|
|
|
|
|
|
|
1070 |
|
1071 |
# Initialize weights and apply final processing
|
1072 |
self.post_init()
|