PyTorch
English
llama
instruct
values
ethics
oklingefjord commited on
Commit
59467b2
1 Parent(s): b2cc996

Upload folder using huggingface_hub

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/runs/sft__3k-2024-09-19-19-54-34-6e93/lora-out/merged",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -31,5 +31,5 @@
31
  "torch_dtype": "bfloat16",
32
  "transformers_version": "4.44.0",
33
  "use_cache": false,
34
- "vocab_size": 128256
35
  }
 
1
  {
2
+ "_name_or_path": "/runs/sft__v9-2024-10-04-10-27-31-c0d1/lora-out/merged",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
31
  "torch_dtype": "bfloat16",
32
  "transformers_version": "4.44.0",
33
  "use_cache": false,
34
+ "vocab_size": 128259
35
  }
pytorch_model-00001-of-00004.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e125635ae4619926f8b00fc885587b6e8743cf5199e00c44daf5e15745c4072f
3
- size 4976717314
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:69097205f39d07b7b0634f429732f69ad0b5e8b1119595c41985af2e934f38f8
3
+ size 4976741890
pytorch_model-00002-of-00004.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1cd8f81133286451ec2303948d71e1b295e39a43dec8308d98fadceb96c8db1a
3
  size 4999826246
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:10754f22eebb5897bc57e101d0b34de7e529f1400cf299a01cfe8d3b097408cd
3
  size 4999826246
pytorch_model-00003-of-00004.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f1c73261ae6a52c32cbfce746281878783589402ea910b66e95b21c70b02d47e
3
  size 4915938762
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6fbb75e24b85ffa5ae4e3e41e3111acd319356b1fa17e750254938bcb032ae0
3
  size 4915938762
pytorch_model-00004-of-00004.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:24d451c70585b322ea2ed4996abd0e949f8edeb3704f45349eb63e899de688c3
3
- size 1168140873
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:758fb6839b72de9083f80e27076abaf83a9b1d552a96bddba37925c45249bab9
3
+ size 1168165449
pytorch_model.bin.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 16060522496
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "pytorch_model-00004-of-00004.bin",
 
1
  {
2
  "metadata": {
3
+ "total_size": 16060571648
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "pytorch_model-00004-of-00004.bin",
tokenizer.json CHANGED
@@ -3,6 +3,15 @@
3
  "truncation": null,
4
  "padding": null,
5
  "added_tokens": [
 
 
 
 
 
 
 
 
 
6
  {
7
  "id": 128000,
8
  "content": "<|begin_of_text|>",
@@ -2306,6 +2315,33 @@
2306
  "rstrip": false,
2307
  "normalized": false,
2308
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2309
  }
2310
  ],
2311
  "normalizer": null,
 
3
  "truncation": null,
4
  "padding": null,
5
  "added_tokens": [
6
+ {
7
+ "id": 760,
8
+ "content": "\">",
9
+ "single_word": false,
10
+ "lstrip": false,
11
+ "rstrip": false,
12
+ "normalized": false,
13
+ "special": false
14
+ },
15
  {
16
  "id": 128000,
17
  "content": "<|begin_of_text|>",
 
2315
  "rstrip": false,
2316
  "normalized": false,
2317
  "special": true
2318
+ },
2319
+ {
2320
+ "id": 128256,
2321
+ "content": "<value choice-type=\"",
2322
+ "single_word": false,
2323
+ "lstrip": false,
2324
+ "rstrip": false,
2325
+ "normalized": false,
2326
+ "special": false
2327
+ },
2328
+ {
2329
+ "id": 128257,
2330
+ "content": "\" consideration=\"",
2331
+ "single_word": false,
2332
+ "lstrip": false,
2333
+ "rstrip": false,
2334
+ "normalized": false,
2335
+ "special": false
2336
+ },
2337
+ {
2338
+ "id": 128258,
2339
+ "content": "</value>",
2340
+ "single_word": false,
2341
+ "lstrip": false,
2342
+ "rstrip": false,
2343
+ "normalized": false,
2344
+ "special": false
2345
  }
2346
  ],
2347
  "normalizer": null,
tokenizer_config.json CHANGED
@@ -1,5 +1,13 @@
1
  {
2
  "added_tokens_decoder": {
 
 
 
 
 
 
 
 
3
  "128000": {
4
  "content": "<|begin_of_text|>",
5
  "lstrip": false,
@@ -2047,6 +2055,30 @@
2047
  "rstrip": false,
2048
  "single_word": false,
2049
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2050
  }
2051
  },
2052
  "bos_token": "<|begin_of_text|>",
 
1
  {
2
  "added_tokens_decoder": {
3
+ "760": {
4
+ "content": "\">",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": false
10
+ },
11
  "128000": {
12
  "content": "<|begin_of_text|>",
13
  "lstrip": false,
 
2055
  "rstrip": false,
2056
  "single_word": false,
2057
  "special": true
2058
+ },
2059
+ "128256": {
2060
+ "content": "<value choice-type=\"",
2061
+ "lstrip": false,
2062
+ "normalized": false,
2063
+ "rstrip": false,
2064
+ "single_word": false,
2065
+ "special": false
2066
+ },
2067
+ "128257": {
2068
+ "content": "\" consideration=\"",
2069
+ "lstrip": false,
2070
+ "normalized": false,
2071
+ "rstrip": false,
2072
+ "single_word": false,
2073
+ "special": false
2074
+ },
2075
+ "128258": {
2076
+ "content": "</value>",
2077
+ "lstrip": false,
2078
+ "normalized": false,
2079
+ "rstrip": false,
2080
+ "single_word": false,
2081
+ "special": false
2082
  }
2083
  },
2084
  "bos_token": "<|begin_of_text|>",