Ashhar
commited on
Commit
·
be41821
1
Parent(s):
ceec376
reduced temp
Browse files
app.py
CHANGED
@@ -136,7 +136,7 @@ def predict(prompt):
|
|
136 |
response = client.chat.completions.create(
|
137 |
model="llama-3.1-70b-versatile",
|
138 |
messages=historyFormatted,
|
139 |
-
temperature=
|
140 |
max_tokens=4000,
|
141 |
stream=True
|
142 |
)
|
|
|
136 |
response = client.chat.completions.create(
|
137 |
model="llama-3.1-70b-versatile",
|
138 |
messages=historyFormatted,
|
139 |
+
temperature=0.8,
|
140 |
max_tokens=4000,
|
141 |
stream=True
|
142 |
)
|