KingNish commited on
Commit
e5c7201
1 Parent(s): cb9089e

Update chatbot.py

Browse files
Files changed (1) hide show
  1. chatbot.py +2 -1
chatbot.py CHANGED
@@ -218,7 +218,7 @@ image_extensions = Image.registered_extensions()
218
  video_extensions = ("avi", "mp4", "mov", "mkv", "flv", "wmv", "mjpeg", "wav", "gif", "webm", "m4v", "3gp")
219
 
220
  # Initialize inference clients for different models
221
- client_mistral = InferenceClient("mistralai/Mistral-7B-Instruct-v0.3")
222
  client_mixtral = InferenceClient("NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO")
223
  client_llama = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
224
  client_mistral_nemo = InferenceClient("mistralai/Mistral-Nemo-Instruct-2407")
@@ -251,6 +251,7 @@ def model_inference(user_prompt, chat_history):
251
 
252
  response = client_mistral.chat_completion(func_caller, max_tokens=200)
253
  response = str(response)
 
254
  try:
255
  response = response[response.find("{"):response.index("</")]
256
  except:
 
218
  video_extensions = ("avi", "mp4", "mov", "mkv", "flv", "wmv", "mjpeg", "wav", "gif", "webm", "m4v", "3gp")
219
 
220
  # Initialize inference clients for different models
221
+ client_mistral = InferenceClient("NousResearch/Hermes-3-Llama-3.1-8B")
222
  client_mixtral = InferenceClient("NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO")
223
  client_llama = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
224
  client_mistral_nemo = InferenceClient("mistralai/Mistral-Nemo-Instruct-2407")
 
251
 
252
  response = client_mistral.chat_completion(func_caller, max_tokens=200)
253
  response = str(response)
254
+ print(response)
255
  try:
256
  response = response[response.find("{"):response.index("</")]
257
  except: