JordiBayarri
commited on
Commit
•
da51463
1
Parent(s):
ca43645
Update README.md
Browse files
README.md
CHANGED
@@ -203,7 +203,7 @@ input_ids = tokenizer.apply_chat_template(
|
|
203 |
|
204 |
terminators = [
|
205 |
tokenizer.eos_token_id,
|
206 |
-
tokenizer.convert_tokens_to_ids("<|
|
207 |
]
|
208 |
|
209 |
outputs = model.generate(
|
@@ -211,8 +211,10 @@ outputs = model.generate(
|
|
211 |
max_new_tokens=256,
|
212 |
eos_token_id=terminators,
|
213 |
do_sample=True,
|
214 |
-
temperature=0.
|
215 |
-
top_p=0.
|
|
|
|
|
216 |
)
|
217 |
response = outputs[0][input_ids.shape[-1]:]
|
218 |
print(tokenizer.decode(response, skip_special_tokens=True))
|
|
|
203 |
|
204 |
terminators = [
|
205 |
tokenizer.eos_token_id,
|
206 |
+
tokenizer.convert_tokens_to_ids("<|im_end|>")
|
207 |
]
|
208 |
|
209 |
outputs = model.generate(
|
|
|
211 |
max_new_tokens=256,
|
212 |
eos_token_id=terminators,
|
213 |
do_sample=True,
|
214 |
+
temperature=0.7,
|
215 |
+
top_p=0.8,
|
216 |
+
top_k=20,
|
217 |
+
repetition_penalty=1.05
|
218 |
)
|
219 |
response = outputs[0][input_ids.shape[-1]:]
|
220 |
print(tokenizer.decode(response, skip_special_tokens=True))
|