Spaces:
Sleeping
Sleeping
Remove forgotten print
Browse files
app.py
CHANGED
|
@@ -28,7 +28,6 @@ def sortu_testua(alderdia, testua, max_tokens, conf, ngram, beams, top_k, top_p)
|
|
| 28 |
tokens = tokenizer(prompt, return_tensors="pt").to(device)
|
| 29 |
generation = model.generate(inputs=tokens['input_ids'], attention_mask = tokens['attention_mask'], **options)[0]
|
| 30 |
text = tokenizer.decode(generation)
|
| 31 |
-
print(re.split("\[(.*?)\] ", text))
|
| 32 |
return re.split("\[(.*?)\] ", text)[-1]
|
| 33 |
|
| 34 |
def sortu_testu_guztiak(testua, max_tokens, conf, ngram, beams, top_k, top_p):
|
|
|
|
| 28 |
tokens = tokenizer(prompt, return_tensors="pt").to(device)
|
| 29 |
generation = model.generate(inputs=tokens['input_ids'], attention_mask = tokens['attention_mask'], **options)[0]
|
| 30 |
text = tokenizer.decode(generation)
|
|
|
|
| 31 |
return re.split("\[(.*?)\] ", text)[-1]
|
| 32 |
|
| 33 |
def sortu_testu_guztiak(testua, max_tokens, conf, ngram, beams, top_k, top_p):
|