herMaster commited on
Commit
c315a78
1 Parent(s): b76f5cf

changing model and adding temperature parameters

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -34,11 +34,11 @@ print("loading the LLM......................................")
34
  # )
35
 
36
  llm = AutoModelForCausalLM.from_pretrained("TheBloke/Llama-2-7B-Chat-GGUF",
37
- model_file="llama-2-7b-chat.Q3_K_S.gguf",
38
  model_type="llama",
39
  # config = ctransformers.hub.AutoConfig,
40
  # hf = True
41
- # temperature = 0.2,
42
  # max_new_tokens = 1024,
43
  # stop = ['\n']
44
  )
 
34
  # )
35
 
36
  llm = AutoModelForCausalLM.from_pretrained("TheBloke/Llama-2-7B-Chat-GGUF",
37
+ model_file="llama-2-7b-chat.Q3_K_L.gguf",
38
  model_type="llama",
39
  # config = ctransformers.hub.AutoConfig,
40
  # hf = True
41
+ temperature = 0.2,
42
  # max_new_tokens = 1024,
43
  # stop = ['\n']
44
  )