Upload model
Browse files- README.md +1 -1
- adapter_config.json +5 -5
- adapter_model.bin +2 -2
README.md
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
---
|
2 |
library_name: peft
|
3 |
-
base_model:
|
4 |
---
|
5 |
|
6 |
# Model Card for Model ID
|
|
|
1 |
---
|
2 |
library_name: peft
|
3 |
+
base_model: ai-forever/mgpt
|
4 |
---
|
5 |
|
6 |
# Model Card for Model ID
|
adapter_config.json
CHANGED
@@ -1,15 +1,15 @@
|
|
1 |
{
|
2 |
"auto_mapping": null,
|
3 |
-
"base_model_name_or_path": "
|
4 |
-
"encoder_hidden_size":
|
5 |
"inference_mode": true,
|
6 |
-
"num_attention_heads":
|
7 |
-
"num_layers":
|
8 |
"num_transformer_submodules": 1,
|
9 |
"num_virtual_tokens": 30,
|
10 |
"peft_type": "PREFIX_TUNING",
|
11 |
"prefix_projection": false,
|
12 |
"revision": null,
|
13 |
"task_type": "CAUSAL_LM",
|
14 |
-
"token_dim":
|
15 |
}
|
|
|
1 |
{
|
2 |
"auto_mapping": null,
|
3 |
+
"base_model_name_or_path": "ai-forever/mgpt",
|
4 |
+
"encoder_hidden_size": 2048,
|
5 |
"inference_mode": true,
|
6 |
+
"num_attention_heads": 16,
|
7 |
+
"num_layers": 24,
|
8 |
"num_transformer_submodules": 1,
|
9 |
"num_virtual_tokens": 30,
|
10 |
"peft_type": "PREFIX_TUNING",
|
11 |
"prefix_projection": false,
|
12 |
"revision": null,
|
13 |
"task_type": "CAUSAL_LM",
|
14 |
+
"token_dim": 2048
|
15 |
}
|
adapter_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a76c50fa5026fcd61fb6e176ad9e08093cd11a2d97c0ed2ec69972423cb57d15
|
3 |
+
size 11797754
|