lIlBrother
commited on
Commit
•
acb6b59
1
Parent(s):
52007f1
Update: 사용 예제 정리
Browse files
README.md
CHANGED
@@ -87,19 +87,15 @@ Just using `evaluate-metric/bleu` and `evaluate-metric/rouge` in huggingface `ev
|
|
87 |
from transformers.pipelines import Text2TextGenerationPipeline
|
88 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
89 |
texts = ["그러게 누가 여섯시까지 술을 마시래?"]
|
90 |
-
tokenizer = AutoTokenizer.from_pretrained(
|
91 |
-
|
92 |
-
)
|
93 |
-
model = AutoModelForSeq2SeqLM.from_pretrained(
|
94 |
-
args.model_name_or_path,
|
95 |
-
)
|
96 |
seq2seqlm_pipeline = Text2TextGenerationPipeline(model=model, tokenizer=tokenizer)
|
97 |
kwargs = {
|
98 |
-
"min_length":
|
99 |
-
"max_length":
|
100 |
-
"num_beams":
|
101 |
-
"do_sample":
|
102 |
-
"num_beam_groups":
|
103 |
}
|
104 |
pred = seq2seqlm_pipeline(texts, **kwargs)
|
105 |
print(pred)
|
|
|
87 |
from transformers.pipelines import Text2TextGenerationPipeline
|
88 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
89 |
texts = ["그러게 누가 여섯시까지 술을 마시래?"]
|
90 |
+
tokenizer = AutoTokenizer.from_pretrained("lIlBrother/ko-TextNumbarT")
|
91 |
+
model = AutoModelForSeq2SeqLM.from_pretrained("lIlBrother/ko-TextNumbarT")
|
|
|
|
|
|
|
|
|
92 |
seq2seqlm_pipeline = Text2TextGenerationPipeline(model=model, tokenizer=tokenizer)
|
93 |
kwargs = {
|
94 |
+
"min_length": 0,
|
95 |
+
"max_length": 1206,
|
96 |
+
"num_beams": 100,
|
97 |
+
"do_sample": False,
|
98 |
+
"num_beam_groups": 1,
|
99 |
}
|
100 |
pred = seq2seqlm_pipeline(texts, **kwargs)
|
101 |
print(pred)
|