Johannes commited on
Commit
edb161a
·
1 Parent(s): 690bf20

updated model

Browse files
README.md CHANGED
@@ -1,3 +1,11 @@
1
- ---
2
- license: mit
3
- ---
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: mit
3
+ ---
4
+ ```python
5
+ # Load the model
6
+ from transformers import AutoModelForMaskedLM, AutoTokenizer
7
+ model_name = "johahi/specieslm-metazoa-upstream-k6"
8
+ model = AutoModelForMaskedLM.from_pretrained(model_name, trust_remote_code=True)
9
+ # Load the corresponding tokenizer
10
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
11
+ ```
config.json CHANGED
@@ -1,7 +1,9 @@
1
  {
2
- "_name_or_path": "/s/project/denovo-prosit/JohannesHingerl/BERTADN/final_models/huggingface_compatible/rotary_bert_huggingface",
3
  "auto_map":
4
- {"AutoModel": "modeling_rotarybert.RotaryBertModel",
 
 
5
  "AutoModelForMaskedLM": "modeling_rotarybert.RotaryBertForMaskedLM"
6
  },
7
  "attention_probs_dropout_prob": 0.1,
 
1
  {
2
+ "_name_or_path": "johahi/specieslm-metazoa-upstream-k6",
3
  "auto_map":
4
+ {
5
+ "AutoConfig": "configuration_rotarybert.RotaryBertConfig",
6
+ "AutoModel": "modeling_rotarybert.RotaryBertModel",
7
  "AutoModelForMaskedLM": "modeling_rotarybert.RotaryBertForMaskedLM"
8
  },
9
  "attention_probs_dropout_prob": 0.1,
configuration_rotarybert.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ from transformers.models.bert.configuration_bert import BertConfig
2
+
3
+ print ("loading this")
4
+
5
+ class RotaryBertConfig(BertConfig):
6
+ def __init__(self, **kwargs):
7
+ super().__init__(**kwargs)
8
+
generation_config.json DELETED
@@ -1,5 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "pad_token_id": 0,
4
- "transformers_version": "4.45.2"
5
- }
 
 
 
 
 
 
modeling_rotarybert.py CHANGED
@@ -5,6 +5,7 @@ from typing import List, Optional, Tuple, Union
5
  from packaging import version
6
  import torch
7
  import torch.nn as nn
 
8
 
9
  def rotate_half(x, interleaved=False):
10
  if not interleaved:
@@ -157,6 +158,7 @@ class RotaryBertModel(BertModel):
157
  self.encoder = RotaryBertEncoder(config)
158
 
159
  class RotaryBertForMaskedLM(BertForMaskedLM):
 
160
  def __init__(self, config):
161
  super().__init__(config)
162
  self.bert = RotaryBertModel(config)
 
5
  from packaging import version
6
  import torch
7
  import torch.nn as nn
8
+ from .configuration_rotarybert import RotaryBertConfig
9
 
10
  def rotate_half(x, interleaved=False):
11
  if not interleaved:
 
158
  self.encoder = RotaryBertEncoder(config)
159
 
160
  class RotaryBertForMaskedLM(BertForMaskedLM):
161
+ config_class = RotaryBertConfig
162
  def __init__(self, config):
163
  super().__init__(config)
164
  self.bert = RotaryBertModel(config)