NCTCMumbai commited on
Commit
74a4f1f
·
verified ·
1 Parent(s): 0c44612

Update backend/query_llm.py

Browse files
Files changed (1) hide show
  1. backend/query_llm.py +2 -2
backend/query_llm.py CHANGED
@@ -211,7 +211,7 @@ def generate_gemini(prompt: str, history: str, temperature: float = 0.9, max_new
211
  except Exception as e:
212
  if "Too Many Requests" in str(e):
213
  print("ERROR: Too many requests on Mistral client")
214
- gr.Warning("Unfortunately Mistral is unable to process")
215
  return "Unfortunately, I am not able to process your request now."
216
  elif "Authorization header is invalid" in str(e):
217
  print("Authetification error:", str(e))
@@ -219,7 +219,7 @@ def generate_gemini(prompt: str, history: str, temperature: float = 0.9, max_new
219
  return "Authentication error"
220
  else:
221
  print("Unhandled Exception:", str(e))
222
- gr.Warning("Unfortunately Mistral is unable to process")
223
  return "I do not know what happened, but I couldn't understand you."
224
 
225
 
 
211
  except Exception as e:
212
  if "Too Many Requests" in str(e):
213
  print("ERROR: Too many requests on Mistral client")
214
+ gr.Warning("Unfortunately Gemini is unable to process..Too many requests")
215
  return "Unfortunately, I am not able to process your request now."
216
  elif "Authorization header is invalid" in str(e):
217
  print("Authetification error:", str(e))
 
219
  return "Authentication error"
220
  else:
221
  print("Unhandled Exception:", str(e))
222
+ gr.Warning("Unfortunately Gemini is unable to process")
223
  return "I do not know what happened, but I couldn't understand you."
224
 
225