KingNish commited on
Commit
166c44c
1 Parent(s): b7d0207

Update chatbot.py

Browse files
Files changed (1) hide show
  1. chatbot.py +2 -2
chatbot.py CHANGED
@@ -207,7 +207,7 @@ def llava(user_prompt, chat_history):
207
  return inputs
208
 
209
  # Initialize inference clients for different models
210
- client_gemma = InferenceClient("mistralai/Mistral-7B-Instruct-v0.3")
211
  client_mixtral = InferenceClient("NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO")
212
  client_llama = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
213
  client_mistral_nemo = InferenceClient("mistralai/Mistral-Nemo-Instruct-2407")
@@ -247,7 +247,7 @@ def model_inference( user_prompt, chat_history):
247
  message_text = message["text"]
248
  func_caller.append({"role": "user", "content": f'[SYSTEM]You are a helpful assistant. You have access to the following functions: \n {str(functions_metadata)}\n\nTo use these functions respond with:\n<functioncall> {{ "name": "function_name", "arguments": {{ "arg_1": "value_1", "arg_1": "value_1", ... }} }} </functioncall> [USER] {message_text}'})
249
 
250
- response = client_mistral_nemo.chat_completion(func_caller, max_tokens=200)
251
  response = str(response)
252
  try:
253
  response = response[response.find("{"):response.index("</")]
 
207
  return inputs
208
 
209
  # Initialize inference clients for different models
210
+ client_mistral = InferenceClient("mistralai/Mistral-7B-Instruct-v0.3")
211
  client_mixtral = InferenceClient("NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO")
212
  client_llama = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
213
  client_mistral_nemo = InferenceClient("mistralai/Mistral-Nemo-Instruct-2407")
 
247
  message_text = message["text"]
248
  func_caller.append({"role": "user", "content": f'[SYSTEM]You are a helpful assistant. You have access to the following functions: \n {str(functions_metadata)}\n\nTo use these functions respond with:\n<functioncall> {{ "name": "function_name", "arguments": {{ "arg_1": "value_1", "arg_1": "value_1", ... }} }} </functioncall> [USER] {message_text}'})
249
 
250
+ response = client_mistral.chat_completion(func_caller, max_tokens=200)
251
  response = str(response)
252
  try:
253
  response = response[response.find("{"):response.index("</")]