Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -17,7 +17,7 @@ tokenizer = MT5TokenizerFast.from_pretrained(
|
|
17 |
|
18 |
def generate_text(input_text):
|
19 |
input_ids = tokenizer.encode(input_text, return_tensors="pt")
|
20 |
-
output = model.generate(input_ids, max_new_tokens = len(input_text), num_beams =
|
21 |
generated_text = tokenizer.decode(output[0], skip_special_tokens=True, clean_up_tokenization_spaces=True)
|
22 |
return generated_text
|
23 |
|
|
|
17 |
|
18 |
def generate_text(input_text):
|
19 |
input_ids = tokenizer.encode(input_text, return_tensors="pt")
|
20 |
+
output = model.generate(input_ids, max_new_tokens = len(input_text.split(' ')) * 2, num_beams = 2, early_stopping=True, length_penalty = -5.0)
|
21 |
generated_text = tokenizer.decode(output[0], skip_special_tokens=True, clean_up_tokenization_spaces=True)
|
22 |
return generated_text
|
23 |
|