Update README.md
Browse files
README.md
CHANGED
@@ -1,3 +1,36 @@
|
|
1 |
---
|
2 |
license: apache-2.0
|
3 |
---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
---
|
2 |
license: apache-2.0
|
3 |
---
|
4 |
+
|
5 |
+
````
|
6 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM
|
7 |
+
|
8 |
+
tokenizer = AutoTokenizer.from_pretrained("RootYuan/opt-1.3b-alpaca")
|
9 |
+
model = AutoModelForCausalLM.from_pretrained("RootYuan/opt-1.3b-alpaca")
|
10 |
+
````
|
11 |
+
usage:
|
12 |
+
````
|
13 |
+
instruction = "Classify the following into animals, plants, and minerals"
|
14 |
+
input = "Oak tree, copper ore, elephant"
|
15 |
+
|
16 |
+
prompts_no_input = f"### Instruction:\n{instruction}\n\n### Response:"
|
17 |
+
prompts_with_input = f"### Instruction:\n{instruction}\n\n### Input:\n{input}\n\n### Response:"
|
18 |
+
prompts = prompts_no_input if input is None else prompts_with_input
|
19 |
+
|
20 |
+
inputs = tokenizer.encode(prompts, return_tensors="pt")
|
21 |
+
outputs = model.generate(inputs, max_new_tokens=64)
|
22 |
+
ans = tokenizer.decode(outputs[0]).strip('</s>')[len(prompts):]
|
23 |
+
if input is None:
|
24 |
+
print(f"Human: {instruction}")
|
25 |
+
else:
|
26 |
+
print(f"Human: {instruction}\nInput: {input}")
|
27 |
+
print(f"Assistant: {ans}")
|
28 |
+
````
|
29 |
+
outputs:
|
30 |
+
````
|
31 |
+
Human: Classify the following into animals, plants, and minerals
|
32 |
+
Input: Oak tree, copper ore, elephant
|
33 |
+
Assistant: Oak tree: Plant
|
34 |
+
Copper ore: Mineral
|
35 |
+
Elephant: Animal
|
36 |
+
````
|