Upload LlamaForCausalLM
Browse files- config.json +3 -2
- generation_config.json +1 -2
- pytorch_model-00001-of-00002.bin +1 -1
- pytorch_model-00002-of-00002.bin +1 -1
config.json
CHANGED
@@ -3,6 +3,7 @@
|
|
3 |
"architectures": [
|
4 |
"LlamaForCausalLM"
|
5 |
],
|
|
|
6 |
"bos_token_id": 1,
|
7 |
"eos_token_id": 2,
|
8 |
"hidden_act": "silu",
|
@@ -21,7 +22,7 @@
|
|
21 |
"rope_theta": 10000.0,
|
22 |
"tie_word_embeddings": false,
|
23 |
"torch_dtype": "float16",
|
24 |
-
"transformers_version": "4.
|
25 |
-
"use_cache":
|
26 |
"vocab_size": 32000
|
27 |
}
|
|
|
3 |
"architectures": [
|
4 |
"LlamaForCausalLM"
|
5 |
],
|
6 |
+
"attention_bias": false,
|
7 |
"bos_token_id": 1,
|
8 |
"eos_token_id": 2,
|
9 |
"hidden_act": "silu",
|
|
|
22 |
"rope_theta": 10000.0,
|
23 |
"tie_word_embeddings": false,
|
24 |
"torch_dtype": "float16",
|
25 |
+
"transformers_version": "4.34.0",
|
26 |
+
"use_cache": true,
|
27 |
"vocab_size": 32000
|
28 |
}
|
generation_config.json
CHANGED
@@ -3,6 +3,5 @@
|
|
3 |
"bos_token_id": 1,
|
4 |
"eos_token_id": 2,
|
5 |
"pad_token_id": 0,
|
6 |
-
"transformers_version": "4.
|
7 |
-
"use_cache": false
|
8 |
}
|
|
|
3 |
"bos_token_id": 1,
|
4 |
"eos_token_id": 2,
|
5 |
"pad_token_id": 0,
|
6 |
+
"transformers_version": "4.34.0"
|
|
|
7 |
}
|
pytorch_model-00001-of-00002.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 9976623130
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8e78e022c9dc2237d5ee9d7a308ffbd07549e88d8dcd67a3c66845d23d5fed53
|
3 |
size 9976623130
|
pytorch_model-00002-of-00002.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 3500311811
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:67e49c03622f9b0eff43b7b1308082a4f67d6758742fd40b0afc73c149abd861
|
3 |
size 3500311811
|