frankaging commited on
Commit
232f846
·
1 Parent(s): 4e359c2

remove gated llama2

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -28,7 +28,7 @@ if not torch.cuda.is_available():
28
 
29
 
30
  if torch.cuda.is_available():
31
- model_id = "meta-llama/Llama-2-7b-chat-hf"
32
  model = AutoModelForCausalLM.from_pretrained(
33
  model_id, device_map="auto", torch_dtype=torch.bfloat16
34
  )
 
28
 
29
 
30
  if torch.cuda.is_available():
31
+ model_id = "NousResearch/Llama-2-7b-chat-hf" # not gated version.
32
  model = AutoModelForCausalLM.from_pretrained(
33
  model_id, device_map="auto", torch_dtype=torch.bfloat16
34
  )