ssaka commited on
Commit
4bfda0a
1 Parent(s): b40c01e

Upload LlamaForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +0 -1
  2. generation_config.json +1 -1
config.json CHANGED
@@ -14,7 +14,6 @@
14
  "num_attention_heads": 32,
15
  "num_hidden_layers": 32,
16
  "num_key_value_heads": 32,
17
- "pad_token_id": 0,
18
  "pretraining_tp": 1,
19
  "rms_norm_eps": 1e-06,
20
  "rope_scaling": null,
 
14
  "num_attention_heads": 32,
15
  "num_hidden_layers": 32,
16
  "num_key_value_heads": 32,
 
17
  "pretraining_tp": 1,
18
  "rms_norm_eps": 1e-06,
19
  "rope_scaling": null,
generation_config.json CHANGED
@@ -3,7 +3,7 @@
3
  "eos_token_id": 2,
4
  "max_length": 4096,
5
  "max_memory": {
6
- "cpu": 35495591936
7
  },
8
  "no_split_module_classes": [
9
  "LlamaDecoderLayer"
 
3
  "eos_token_id": 2,
4
  "max_length": 4096,
5
  "max_memory": {
6
+ "cpu": 27833966592
7
  },
8
  "no_split_module_classes": [
9
  "LlamaDecoderLayer"