metadata
library_name: peft
Inference
from datasets import load_dataset
from transformers import AutoTokenizer, AutoModelForCausalLM
from peft import PeftModel
base_model = AutoModelForCausalLM.from_pretrained(
'meta-llama/Llama-2-7b-chat-hf',
trust_remote_code=True,
device_map="auto",
torch_dtype=torch.float16, # optional if you have enough VRAM
)
tokenizer = AutoTokenizer.from_pretrained('FinGPT/fingpt-forecaster_dow30_llama2-7b_lora')
model = PeftModel.from_pretrained('FinGPT/fingpt-forecaster_dow30_llama2-7b_lora')
model = model.eval()
- PEFT 0.5.0