mjschock commited on
Commit
61c6716
·
verified ·
1 Parent(s): 3fc4851

Upload model

Browse files
Files changed (3) hide show
  1. config.json +6 -1
  2. model.safetensors +2 -2
  3. modeling_mamba.py +4 -4
config.json CHANGED
@@ -1,6 +1,10 @@
1
  {
 
 
 
2
  "auto_map": {
3
- "AutoConfig": "configuration_mamba.MambaConfig"
 
4
  },
5
  "bias": false,
6
  "conv_bias": true,
@@ -14,6 +18,7 @@
14
  "model_type": "mamba",
15
  "n_layer": 24,
16
  "pad_vocab_size_multiple": 8,
 
17
  "transformers_version": "4.37.2",
18
  "vocab_size": 50280
19
  }
 
1
  {
2
+ "architectures": [
3
+ "MambaModelForCausalLM"
4
+ ],
5
  "auto_map": {
6
+ "AutoConfig": "configuration_mamba.MambaConfig",
7
+ "AutoModelForCausalLM": "modeling_mamba.MambaModelForCausalLM"
8
  },
9
  "bias": false,
10
  "conv_bias": true,
 
18
  "model_type": "mamba",
19
  "n_layer": 24,
20
  "pad_vocab_size_multiple": 8,
21
+ "torch_dtype": "float32",
22
  "transformers_version": "4.37.2",
23
  "vocab_size": 50280
24
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:287cad4048030ae246aeda26c0e703b838c50422fe89f19099298c034b25e7b5
3
- size 516565384
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:699ed6f59fb948186f449c5031e0dc659d504c90d7e018302aa1e190cdb40220
3
+ size 516567560
modeling_mamba.py CHANGED
@@ -227,13 +227,13 @@ class MambaModel(MambaPreTrainedModel):
227
  config: MambaConfig
228
  """
229
  super().__init__(config)
230
- self.config = config
231
 
232
- self.embedding = nn.Embedding(config.vocab_size, config.d_model)
233
  self.layers = nn.ModuleList(
234
- [MambaBlock(config, layer_idx) for layer_idx in range(config.n_layer)]
235
  )
236
- self.norm_f = MambaRMSNorm(config.d_model)
237
 
238
  self.gradient_checkpointing = False
239
  self.post_init()
 
227
  config: MambaConfig
228
  """
229
  super().__init__(config)
230
+ # self.config = config
231
 
232
+ self.embedding = nn.Embedding(self.config.vocab_size, self.config.d_model)
233
  self.layers = nn.ModuleList(
234
+ [MambaBlock(self.config, layer_idx) for layer_idx in range(self.config.n_layer)]
235
  )
236
+ self.norm_f = MambaRMSNorm(self.config.d_model)
237
 
238
  self.gradient_checkpointing = False
239
  self.post_init()