Update README.md
Browse files
README.md
CHANGED
@@ -28,3 +28,25 @@ generated_quote = pipe(prompt,top_k=2, temperature=2.0,repetition_penalty=2.0)[0
|
|
28 |
print('\n\n', generated_quote)
|
29 |
</code>
|
30 |
</pre>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
28 |
print('\n\n', generated_quote)
|
29 |
</code>
|
30 |
</pre>
|
31 |
+
|
32 |
+
# Streaming option
|
33 |
+
|
34 |
+
<pre>
|
35 |
+
<code>
|
36 |
+
<span style="color:#4CAF50">from</span> transformers <span style="color:#4CAF50">import</span> import AutoModelForCausalLM, AutoTokenizer, TextStreamer, pipeline
|
37 |
+
streamer = TextStreamer(tokenzier, skip_prompt=True)
|
38 |
+
pipe = pipeline(
|
39 |
+
"text-generation",
|
40 |
+
model=model,
|
41 |
+
tokenizer=tokenzier,
|
42 |
+
max_length=40,
|
43 |
+
temperature=0.6,
|
44 |
+
pad_token_id=tokenzier.eos_token_id,
|
45 |
+
top_p=0.95,
|
46 |
+
repetition_penalty=1.2,
|
47 |
+
streamer=streamer
|
48 |
+
)
|
49 |
+
pipe("write a quote based on war and business")
|
50 |
+
|
51 |
+
</code>
|
52 |
+
</pre>
|