Initial commit
Browse files- logs.txt +0 -0
- merges.txt +0 -0
- mlc-chat-config.json +81 -0
- ndarray-cache-b16.json +0 -0
- ndarray-cache.json +0 -0
- params_shard_0.bin +3 -0
- params_shard_1.bin +3 -0
- params_shard_10.bin +3 -0
- params_shard_11.bin +3 -0
- params_shard_12.bin +3 -0
- params_shard_13.bin +3 -0
- params_shard_14.bin +3 -0
- params_shard_15.bin +3 -0
- params_shard_16.bin +3 -0
- params_shard_17.bin +3 -0
- params_shard_18.bin +3 -0
- params_shard_19.bin +3 -0
- params_shard_2.bin +3 -0
- params_shard_20.bin +3 -0
- params_shard_21.bin +3 -0
- params_shard_22.bin +3 -0
- params_shard_23.bin +3 -0
- params_shard_24.bin +3 -0
- params_shard_25.bin +3 -0
- params_shard_26.bin +3 -0
- params_shard_27.bin +3 -0
- params_shard_28.bin +3 -0
- params_shard_29.bin +3 -0
- params_shard_3.bin +3 -0
- params_shard_4.bin +3 -0
- params_shard_5.bin +3 -0
- params_shard_6.bin +3 -0
- params_shard_7.bin +3 -0
- params_shard_8.bin +3 -0
- params_shard_9.bin +3 -0
- tokenizer.json +0 -0
- tokenizer_config.json +40 -0
- vocab.json +0 -0
logs.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
mlc-chat-config.json
ADDED
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"version": "0.1.0",
|
3 |
+
"model_type": "qwen2",
|
4 |
+
"quantization": "q4f32_1",
|
5 |
+
"model_config": {
|
6 |
+
"hidden_act": "silu",
|
7 |
+
"hidden_size": 1536,
|
8 |
+
"intermediate_size": 8960,
|
9 |
+
"num_attention_heads": 12,
|
10 |
+
"num_hidden_layers": 28,
|
11 |
+
"num_key_value_heads": 2,
|
12 |
+
"rms_norm_eps": 1e-06,
|
13 |
+
"rope_theta": 1000000.0,
|
14 |
+
"vocab_size": 151936,
|
15 |
+
"tie_word_embeddings": true,
|
16 |
+
"context_window_size": 32768,
|
17 |
+
"prefill_chunk_size": 2048,
|
18 |
+
"tensor_parallel_shards": 1,
|
19 |
+
"head_dim": 128,
|
20 |
+
"dtype": "float32",
|
21 |
+
"max_batch_size": 80
|
22 |
+
},
|
23 |
+
"vocab_size": 151936,
|
24 |
+
"context_window_size": 32768,
|
25 |
+
"sliding_window_size": -1,
|
26 |
+
"prefill_chunk_size": 2048,
|
27 |
+
"attention_sink_size": -1,
|
28 |
+
"tensor_parallel_shards": 1,
|
29 |
+
"temperature": 0.7,
|
30 |
+
"presence_penalty": 0.0,
|
31 |
+
"frequency_penalty": 0.0,
|
32 |
+
"repetition_penalty": 1.1,
|
33 |
+
"top_p": 0.8,
|
34 |
+
"tokenizer_files": [
|
35 |
+
"tokenizer.json",
|
36 |
+
"vocab.json",
|
37 |
+
"merges.txt",
|
38 |
+
"tokenizer_config.json"
|
39 |
+
],
|
40 |
+
"tokenizer_info": {
|
41 |
+
"token_postproc_method": "byte_level",
|
42 |
+
"prepend_space_in_encode": false,
|
43 |
+
"strip_space_in_decode": false
|
44 |
+
},
|
45 |
+
"conv_template": {
|
46 |
+
"name": "chatml",
|
47 |
+
"system_template": "<|im_start|>system\n{system_message}",
|
48 |
+
"system_message": "A conversation between a user and an LLM-based AI assistant. The assistant gives helpful and honest answers.",
|
49 |
+
"system_prefix_token_ids": null,
|
50 |
+
"add_role_after_system_message": true,
|
51 |
+
"roles": {
|
52 |
+
"user": "<|im_start|>user",
|
53 |
+
"assistant": "<|im_start|>assistant"
|
54 |
+
},
|
55 |
+
"role_templates": {
|
56 |
+
"user": "{user_message}",
|
57 |
+
"assistant": "{assistant_message}",
|
58 |
+
"tool": "{tool_message}"
|
59 |
+
},
|
60 |
+
"messages": [],
|
61 |
+
"seps": [
|
62 |
+
"<|im_end|>\n"
|
63 |
+
],
|
64 |
+
"role_content_sep": "\n",
|
65 |
+
"role_empty_sep": "\n",
|
66 |
+
"stop_str": [
|
67 |
+
"<|im_end|>"
|
68 |
+
],
|
69 |
+
"stop_token_ids": [
|
70 |
+
2
|
71 |
+
],
|
72 |
+
"function_string": "",
|
73 |
+
"use_function_calling": false
|
74 |
+
},
|
75 |
+
"pad_token_id": 151643,
|
76 |
+
"bos_token_id": 151643,
|
77 |
+
"eos_token_id": [
|
78 |
+
151645,
|
79 |
+
151643
|
80 |
+
]
|
81 |
+
}
|
ndarray-cache-b16.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
ndarray-cache.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
params_shard_0.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8f9dc3f548b41bb4dd588249e07db9ba4be5c4fea1e4d3e8b0a66b1e82d9bd72
|
3 |
+
size 116686848
|
params_shard_1.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b3519c3fe1d2a52e522af73959d4d0ec64900fdae7add3ba1faef1ebb3b3e9f5
|
3 |
+
size 22330368
|
params_shard_10.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:488e9c8c8d3406b453fb78a8aa7570e721910ba60a23fd0b19a69a40b4ed64b9
|
3 |
+
size 26331136
|
params_shard_11.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f0b39b8566d4c0e3c11aacce9cde6491219271b54c8c38259031e4f0cabc0329
|
3 |
+
size 26331136
|
params_shard_12.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a6d3e0c9f6560819494df53635a943fd05789b1cbf9415c038e69b452f833283
|
3 |
+
size 26331136
|
params_shard_13.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:235743856cc83906eda0956179733820e3dce772b87f7e9620e9de9edf30dc77
|
3 |
+
size 26331136
|
params_shard_14.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4fe713959137bf25547ca81b00c7caecab0e48e7f95dfed3c2557127d79081b9
|
3 |
+
size 26331136
|
params_shard_15.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ef4c29c1bfbdd7e6bb4c810cdefff276e61a3ee015f357e03662037e8032ec75
|
3 |
+
size 26331136
|
params_shard_16.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:de42173d910b01e1a93dc33cc11440d54903b4159511f20ee5241f714a6adece
|
3 |
+
size 26331136
|
params_shard_17.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fa34e7e990d6af2ec42090f625ca2320b9c026273e71075419b491abf1844f72
|
3 |
+
size 26331136
|
params_shard_18.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:00001012bf7486ddab57143a50d3da4abfdd09d6691bc8dcc1a0f25bfac7bab4
|
3 |
+
size 26331136
|
params_shard_19.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:87498b373bbd26119ba1982346facbca2562538b88d0d05996890da6169b8d54
|
3 |
+
size 26331136
|
params_shard_2.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b72729649a803c41f4660bdd19bf417119b3c1c5ed68541ff42768bd6c43a786
|
3 |
+
size 26331136
|
params_shard_20.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:81e7f236e9713dff7ed3d95af20c323f63e2b858ea83cbfa48e34af5350c9930
|
3 |
+
size 26331136
|
params_shard_21.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:12ab780cd495212ca898521040cbd4abae75cea4a9c1fa5090259d303c64ac6d
|
3 |
+
size 26331136
|
params_shard_22.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9097afd0bffb4583b3bb1d6ed9c5102d7f498b4385bd02119c60a2cf50efae3d
|
3 |
+
size 26331136
|
params_shard_23.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f4b39e011c020b4867d15a45584c8a90301e2806e3e47b8f271171aed62ec87e
|
3 |
+
size 26331136
|
params_shard_24.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f64a63c8f5cf40ba95d43d3a6754f2857a10846e8916a01ba0adcc330fa10b4b
|
3 |
+
size 26331136
|
params_shard_25.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:765e9decc546082cebc7cff0913963d48de3e6c8854e8b8130cf0c2f2c1043eb
|
3 |
+
size 26331136
|
params_shard_26.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fc70fa77616b2a268d2ca5772a40996438ffa0fe6c1777f75e8b19d0d7242332
|
3 |
+
size 26331136
|
params_shard_27.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e415c43faf5ece73f21d89b80c0e362537328a883e658386c9f919034dde52c6
|
3 |
+
size 26331136
|
params_shard_28.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:49c728d186b8b075a5841fe7da9d60614c6288198c7e30f62478e3f048e59cf6
|
3 |
+
size 26331136
|
params_shard_29.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:33e131ad43648dd1d2a2166768b70d4f506a84d8db49bec816cc124c810e85df
|
3 |
+
size 18589696
|
params_shard_3.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c4f3d880c0d460b2b6604657257338641883c7804a1cd090e3a59ed08465c46e
|
3 |
+
size 26331136
|
params_shard_4.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3ef2f51e2c0f41cd53e87668e863de4b652ff7f9583b7faea8c5c3ae336e9fde
|
3 |
+
size 26331136
|
params_shard_5.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4f1994c8739213b6f03278daf047c4829a4481ccb0b3bd7cbfc9daf36e595f03
|
3 |
+
size 26331136
|
params_shard_6.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3340fa2e4b9c4bb4ccefe7eb281ad487f48a343ee983d14aa225f20e24e3eeed
|
3 |
+
size 26331136
|
params_shard_7.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e0d9a7c097d67707741b6193c7520bb407871abe72a2acdb7807bbeb7368795
|
3 |
+
size 26331136
|
params_shard_8.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:29519b85b0a21363578430121596a72809916e1e7f2945db7c423fd7076c17d5
|
3 |
+
size 26331136
|
params_shard_9.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b58317b40b1e52b30731a5f62cef3aca8e724dcc2578740926229631c1e8340a
|
3 |
+
size 26331136
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
ADDED
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_prefix_space": false,
|
3 |
+
"added_tokens_decoder": {
|
4 |
+
"151643": {
|
5 |
+
"content": "<|endoftext|>",
|
6 |
+
"lstrip": false,
|
7 |
+
"normalized": false,
|
8 |
+
"rstrip": false,
|
9 |
+
"single_word": false,
|
10 |
+
"special": true
|
11 |
+
},
|
12 |
+
"151644": {
|
13 |
+
"content": "<|im_start|>",
|
14 |
+
"lstrip": false,
|
15 |
+
"normalized": false,
|
16 |
+
"rstrip": false,
|
17 |
+
"single_word": false,
|
18 |
+
"special": true
|
19 |
+
},
|
20 |
+
"151645": {
|
21 |
+
"content": "<|im_end|>",
|
22 |
+
"lstrip": false,
|
23 |
+
"normalized": false,
|
24 |
+
"rstrip": false,
|
25 |
+
"single_word": false,
|
26 |
+
"special": true
|
27 |
+
}
|
28 |
+
},
|
29 |
+
"additional_special_tokens": ["<|im_start|>", "<|im_end|>"],
|
30 |
+
"bos_token": null,
|
31 |
+
"chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
|
32 |
+
"clean_up_tokenization_spaces": false,
|
33 |
+
"eos_token": "<|im_end|>",
|
34 |
+
"errors": "replace",
|
35 |
+
"model_max_length": 32768,
|
36 |
+
"pad_token": "<|endoftext|>",
|
37 |
+
"split_special_tokens": false,
|
38 |
+
"tokenizer_class": "Qwen2Tokenizer",
|
39 |
+
"unk_token": null
|
40 |
+
}
|
vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|