Commit
•
c8263f5
1
Parent(s):
50ac599
Update README.md
Browse files
README.md
CHANGED
@@ -53,12 +53,12 @@ from transformers import AutoTokenizer, AutoModelModelForCausalLM
|
|
53 |
|
54 |
# load base LLM model and tokenizer
|
55 |
model = AutoModelModelForCausalLM.from_pretrained(
|
56 |
-
"philschmid/llama-7b-instruction-generator",
|
57 |
low_cpu_mem_usage=True,
|
58 |
torch_dtype=torch.float16,
|
59 |
load_in_4bit=True,
|
60 |
)
|
61 |
-
tokenizer = AutoTokenizer.from_pretrained("philschmid/llama-7b-instruction-generator")
|
62 |
|
63 |
prompt = f"""### Instruction:
|
64 |
Use the Input below to create an instruction, which could have been used to generate the input using an LLM.
|
|
|
53 |
|
54 |
# load base LLM model and tokenizer
|
55 |
model = AutoModelModelForCausalLM.from_pretrained(
|
56 |
+
"philschmid/llama-2-7b-instruction-generator",
|
57 |
low_cpu_mem_usage=True,
|
58 |
torch_dtype=torch.float16,
|
59 |
load_in_4bit=True,
|
60 |
)
|
61 |
+
tokenizer = AutoTokenizer.from_pretrained("philschmid/llama-2-7b-instruction-generator")
|
62 |
|
63 |
prompt = f"""### Instruction:
|
64 |
Use the Input below to create an instruction, which could have been used to generate the input using an LLM.
|