semran1 commited on
Commit
23c23ff
·
verified ·
1 Parent(s): 95baacb

Upload LlamaForCausalLM

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "meta-llama/Llama-3.2-3B",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -28,8 +28,7 @@
28
  "registry_requires_subclass": false,
29
  "sparsity_structure": "0:0",
30
  "targets": []
31
- },
32
- "version": "0.8.1"
33
  },
34
  "rms_norm_eps": 1e-05,
35
  "rope_scaling": {
@@ -41,8 +40,8 @@
41
  },
42
  "rope_theta": 500000.0,
43
  "tie_word_embeddings": true,
44
- "torch_dtype": "bfloat16",
45
  "transformers_version": "4.47.1",
46
  "use_cache": true,
47
  "vocab_size": 128256
48
- }
 
1
  {
2
+ "_name_or_path": "output_3b/stage_finetuning",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
28
  "registry_requires_subclass": false,
29
  "sparsity_structure": "0:0",
30
  "targets": []
31
+ }
 
32
  },
33
  "rms_norm_eps": 1e-05,
34
  "rope_scaling": {
 
40
  },
41
  "rope_theta": 500000.0,
42
  "tie_word_embeddings": true,
43
+ "torch_dtype": "float16",
44
  "transformers_version": "4.47.1",
45
  "use_cache": true,
46
  "vocab_size": 128256
47
+ }
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7b594353ab63527420015e922d5b183eedca772fd537493519c0f9670f681b10
3
- size 4965799096
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f43ccecb9aaf86b39956a79df25d6e3ea4231ab76edf7a9dc7cdcfaec3367ecc
3
+ size 4965798912
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f1584bdafefe77d92afd776bc37ad189100c155571de707ebaa495c811d6f707
3
- size 2247734992
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae07f73d2eb03b1f6b204294fe125d5c663be80e285a22246f1f4859cdce720d
3
+ size 1459729880
model.safetensors.index.json CHANGED
@@ -1,9 +1,8 @@
1
  {
2
  "metadata": {
3
- "total_size": 7213504512
4
  },
5
  "weight_map": {
6
- "lm_head.weight": "model-00002-of-00002.safetensors",
7
  "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
8
  "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
9
  "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 6425499648
4
  },
5
  "weight_map": {
 
6
  "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
7
  "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
8
  "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",