Vladislawoo commited on
Commit
af40b26
·
1 Parent(s): 683cfcc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -54,7 +54,7 @@ def predict_text(text):
54
 
55
 
56
  def generate_text(model, prompt, max_length=150, temperature=1.0, num_beams=10, top_k=600, top_p=0.75, no_repeat_ngram_size=1, num_return_sequences=1):
57
- input_ids = tokenizer.encode(prompt, return_tensors='pt').to(device)
58
 
59
  with torch.inference_mode():
60
  output = model.generate(
@@ -68,7 +68,7 @@ def generate_text(model, prompt, max_length=150, temperature=1.0, num_beams=10,
68
  no_repeat_ngram_size=no_repeat_ngram_size,
69
  num_return_sequences=num_return_sequences
70
  )
71
- texts = [textwrap.fill(tokenizer.decode(out), 60) for out in output]
72
  return "\n------------------\n".join(texts)
73
 
74
 
 
54
 
55
 
56
  def generate_text(model, prompt, max_length=150, temperature=1.0, num_beams=10, top_k=600, top_p=0.75, no_repeat_ngram_size=1, num_return_sequences=1):
57
+ input_ids = tok.encode(prompt, return_tensors='pt').to(device)
58
 
59
  with torch.inference_mode():
60
  output = model.generate(
 
68
  no_repeat_ngram_size=no_repeat_ngram_size,
69
  num_return_sequences=num_return_sequences
70
  )
71
+ texts = [textwrap.fill(tok.decode(out), 60) for out in output]
72
  return "\n------------------\n".join(texts)
73
 
74