Upload 4 files
Browse files- README.md +7 -5
- special_tokens_map.json +1 -7
- tokenizer_config.json +2 -2
README.md
CHANGED
@@ -1,7 +1,6 @@
|
|
1 |
---
|
2 |
-
|
3 |
-
|
4 |
-
- unsloth
|
5 |
---
|
6 |
|
7 |
# Model Card for Model ID
|
@@ -16,7 +15,7 @@ tags:
|
|
16 |
|
17 |
<!-- Provide a longer summary of what this model is. -->
|
18 |
|
19 |
-
|
20 |
|
21 |
- **Developed by:** [More Information Needed]
|
22 |
- **Funded by [optional]:** [More Information Needed]
|
@@ -197,4 +196,7 @@ Carbon emissions can be estimated using the [Machine Learning Impact calculator]
|
|
197 |
|
198 |
## Model Card Contact
|
199 |
|
200 |
-
[More Information Needed]
|
|
|
|
|
|
|
|
1 |
---
|
2 |
+
base_model: unsloth/meta-llama-3.1-8b-bnb-4bit
|
3 |
+
library_name: peft
|
|
|
4 |
---
|
5 |
|
6 |
# Model Card for Model ID
|
|
|
15 |
|
16 |
<!-- Provide a longer summary of what this model is. -->
|
17 |
|
18 |
+
|
19 |
|
20 |
- **Developed by:** [More Information Needed]
|
21 |
- **Funded by [optional]:** [More Information Needed]
|
|
|
196 |
|
197 |
## Model Card Contact
|
198 |
|
199 |
+
[More Information Needed]
|
200 |
+
### Framework versions
|
201 |
+
|
202 |
+
- PEFT 0.12.0
|
special_tokens_map.json
CHANGED
@@ -13,11 +13,5 @@
|
|
13 |
"rstrip": false,
|
14 |
"single_word": false
|
15 |
},
|
16 |
-
"pad_token":
|
17 |
-
"content": "<|finetune_right_pad_id|>",
|
18 |
-
"lstrip": false,
|
19 |
-
"normalized": false,
|
20 |
-
"rstrip": false,
|
21 |
-
"single_word": false
|
22 |
-
}
|
23 |
}
|
|
|
13 |
"rstrip": false,
|
14 |
"single_word": false
|
15 |
},
|
16 |
+
"pad_token": "<|end_of_text|>"
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
}
|
tokenizer_config.json
CHANGED
@@ -2057,7 +2057,7 @@
|
|
2057 |
"attention_mask"
|
2058 |
],
|
2059 |
"model_max_length": 131072,
|
2060 |
-
"pad_token": "<|
|
2061 |
-
"padding_side": "
|
2062 |
"tokenizer_class": "PreTrainedTokenizerFast"
|
2063 |
}
|
|
|
2057 |
"attention_mask"
|
2058 |
],
|
2059 |
"model_max_length": 131072,
|
2060 |
+
"pad_token": "<|end_of_text|>",
|
2061 |
+
"padding_side": "right",
|
2062 |
"tokenizer_class": "PreTrainedTokenizerFast"
|
2063 |
}
|