Ashhar commited on
Commit
be41821
·
1 Parent(s): ceec376

reduced temp

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -136,7 +136,7 @@ def predict(prompt):
136
  response = client.chat.completions.create(
137
  model="llama-3.1-70b-versatile",
138
  messages=historyFormatted,
139
- temperature=1.2,
140
  max_tokens=4000,
141
  stream=True
142
  )
 
136
  response = client.chat.completions.create(
137
  model="llama-3.1-70b-versatile",
138
  messages=historyFormatted,
139
+ temperature=0.8,
140
  max_tokens=4000,
141
  stream=True
142
  )