MultiTrickFox
commited on
Commit
·
25ebbf6
1
Parent(s):
7f25e3b
Update README.md
Browse files
README.md
CHANGED
@@ -13,12 +13,17 @@ Bloom (2.5 B) Scientific Model fine-tuned on Zen knowledge
|
|
13 |
|
14 |
|
15 |
```python
|
|
|
|
|
|
|
|
|
|
|
16 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
17 |
|
18 |
tokenizer = AutoTokenizer.from_pretrained("MultiTrickFox/bloom-2b5_Zen")
|
19 |
model = AutoModelForCausalLM.from_pretrained("MultiTrickFox/bloom-2b5_Zen")
|
20 |
|
21 |
-
model
|
22 |
tokenizer.pad_token_id = tokenizer.eos_token_id
|
23 |
|
24 |
generator = pipeline('text-generation', model=model, tokenizer=tokenizer)
|
@@ -26,7 +31,7 @@ generator = pipeline('text-generation', model=model, tokenizer=tokenizer)
|
|
26 |
inp = [ """Today""", """Yesterday""" ]
|
27 |
|
28 |
out = generator(
|
29 |
-
inp, do_sample=True,
|
30 |
|
31 |
temperature=.6,
|
32 |
typical_p=.7,
|
|
|
13 |
|
14 |
|
15 |
```python
|
16 |
+
|
17 |
+
|
18 |
+
cuda = False
|
19 |
+
|
20 |
+
|
21 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
22 |
|
23 |
tokenizer = AutoTokenizer.from_pretrained("MultiTrickFox/bloom-2b5_Zen")
|
24 |
model = AutoModelForCausalLM.from_pretrained("MultiTrickFox/bloom-2b5_Zen")
|
25 |
|
26 |
+
model.cuda() if cuda else None
|
27 |
tokenizer.pad_token_id = tokenizer.eos_token_id
|
28 |
|
29 |
generator = pipeline('text-generation', model=model, tokenizer=tokenizer)
|
|
|
31 |
inp = [ """Today""", """Yesterday""" ]
|
32 |
|
33 |
out = generator(
|
34 |
+
inp.cuda() if cuda else inp, do_sample=True,
|
35 |
|
36 |
temperature=.6,
|
37 |
typical_p=.7,
|