Menouar commited on
Commit
b1d5b7e
1 Parent(s): cd0927a

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +7 -3
README.md CHANGED
@@ -45,7 +45,6 @@ import torch
45
  from peft import AutoPeftModelForCausalLM
46
  from transformers import AutoTokenizer, pipeline
47
 
48
-
49
  # Specify the model ID
50
  peft_model_id = "Menouar/falcon7b-linear-equations"
51
 
@@ -62,8 +61,13 @@ pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
62
 
63
  equation = "Solve for y: 10 + 4y -9y +5 = 4 +8y - 2y + 8 ."
64
 
65
- outputs = pipe(equation, max_new_tokens=172, do_sample=True, temperature=0.1, top_k=50, top_p=0.1,
66
- eos_token_id=pipe.tokenizer.eos_token_id, pad_token_id=pipe.tokenizer.pad_token_id)
 
 
 
 
 
67
 
68
  for seq in outputs:
69
  print(f"{seq['generated_text']}")
 
45
  from peft import AutoPeftModelForCausalLM
46
  from transformers import AutoTokenizer, pipeline
47
 
 
48
  # Specify the model ID
49
  peft_model_id = "Menouar/falcon7b-linear-equations"
50
 
 
61
 
62
  equation = "Solve for y: 10 + 4y -9y +5 = 4 +8y - 2y + 8 ."
63
 
64
+ outputs = pipe(equation,
65
+ max_new_tokens=172,
66
+ do_sample=True,
67
+ temperature=0.1,
68
+ top_k=50, top_p=0.1,
69
+ eos_token_id=pipe.tokenizer.eos_token_id,
70
+ pad_token_id=pipe.tokenizer.pad_token_id)
71
 
72
  for seq in outputs:
73
  print(f"{seq['generated_text']}")