VictorSanh
commited on
Commit
•
1c86537
1
Parent(s):
0f909ad
fix - thanks @joaogante !
Browse files
README.md
CHANGED
@@ -96,7 +96,7 @@ inputs = processor(prompts, return_tensors="pt").to(device)
|
|
96 |
# inputs = processor(prompts[0], return_tensors="pt").to(device)
|
97 |
|
98 |
# Generation args
|
99 |
-
bad_words_ids = tokenizer(["<image>", "<fake_token_around_image>"], add_special_tokens=False).input_ids
|
100 |
|
101 |
generated_ids = model.generate(**inputs, bad_words_ids=bad_words_ids, max_length=100)
|
102 |
generated_text = processor.batch_decode(generated_ids, skip_special_tokens=True)
|
@@ -141,7 +141,7 @@ inputs = processor(prompts, add_end_of_utterance_token=False, return_tensors="pt
|
|
141 |
|
142 |
# Generation args
|
143 |
exit_condition = processor.tokenizer("<end_of_utterance>", add_special_tokens=False).input_ids
|
144 |
-
bad_words_ids = tokenizer(["<image>", "<fake_token_around_image>"], add_special_tokens=False).input_ids
|
145 |
|
146 |
generated_ids = model.generate(**inputs, eos_token_id=exit_condition, bad_words_ids=bad_words_ids, max_length=100)
|
147 |
generated_text = processor.batch_decode(generated_ids, skip_special_tokens=True)
|
|
|
96 |
# inputs = processor(prompts[0], return_tensors="pt").to(device)
|
97 |
|
98 |
# Generation args
|
99 |
+
bad_words_ids = processor.tokenizer(["<image>", "<fake_token_around_image>"], add_special_tokens=False).input_ids
|
100 |
|
101 |
generated_ids = model.generate(**inputs, bad_words_ids=bad_words_ids, max_length=100)
|
102 |
generated_text = processor.batch_decode(generated_ids, skip_special_tokens=True)
|
|
|
141 |
|
142 |
# Generation args
|
143 |
exit_condition = processor.tokenizer("<end_of_utterance>", add_special_tokens=False).input_ids
|
144 |
+
bad_words_ids = processor.tokenizer(["<image>", "<fake_token_around_image>"], add_special_tokens=False).input_ids
|
145 |
|
146 |
generated_ids = model.generate(**inputs, eos_token_id=exit_condition, bad_words_ids=bad_words_ids, max_length=100)
|
147 |
generated_text = processor.batch_decode(generated_ids, skip_special_tokens=True)
|