hadadrjt commited on
Commit
4943e2d
·
1 Parent(s): 6cd97cb

ai: Prepare stable version for 2.1.1-ft-QwQ-32B.

Browse files
Files changed (1) hide show
  1. jarvis.py +21 -16
jarvis.py CHANGED
@@ -147,21 +147,28 @@ def process_ai_response(ai_text):
147
  return ai_text
148
 
149
  async def fetch_response_async(host, provider_key, selected_model, messages, model_config, session_id):
150
- try:
151
- async with httpx.AsyncClient(timeout=5) as client:
152
- data = {"model": selected_model, "messages": messages, **model_config}
153
- extra = {"optillm_approach": "rto|re2|cot_reflection|self_consistency|plansearch|leap|z3|bon|moa|mcts|mcp|router|privacy|executecode|json", "session_id": session_id}
154
- response = await client.post(f"{host}", json={**data, "extra_body": extra, "session_id": session_id}, headers={"Authorization": f"Bearer {provider_key}"})
155
- response.raise_for_status()
156
- try:
157
- resp_json = response.json()
158
- except json.JSONDecodeError:
 
 
 
 
 
 
 
 
159
  return RESPONSES["RESPONSE_2"]
160
- ai_text = resp_json["choices"][0]["message"]["content"] if resp_json.get("choices") and resp_json["choices"][0].get("message") and resp_json["choices"][0]["message"].get("content") else RESPONSES["RESPONSE_2"]
161
- return process_ai_response(ai_text)
162
- except Exception:
163
- marked_item(provider_key, LINUX_SERVER_PROVIDER_KEYS_MARKED, LINUX_SERVER_PROVIDER_KEYS_ATTEMPTS)
164
- return RESPONSES["RESPONSE_2"]
165
 
166
  async def chat_with_model_async(history, user_input, selected_model_display, sess):
167
  if not get_available_items(LINUX_SERVER_PROVIDER_KEYS, LINUX_SERVER_PROVIDER_KEYS_MARKED) or not get_available_items(LINUX_SERVER_HOSTS, LINUX_SERVER_HOSTS_MARKED):
@@ -208,8 +215,6 @@ async def respond_async(multi_input, history, selected_model_display, sess):
208
  if message["text"]:
209
  combined_input += message["text"]
210
  history.append([combined_input, ""])
211
- if len(history) > 3:
212
- history[:] = history[-3:]
213
  ai_response = await chat_with_model_async(history, combined_input, selected_model_display, sess)
214
  history[-1][1] = ""
215
  def convert_to_string(data):
 
147
  return ai_text
148
 
149
  async def fetch_response_async(host, provider_key, selected_model, messages, model_config, session_id):
150
+ timeouts = [60, 80, 120, 240]
151
+ for timeout in timeouts:
152
+ try:
153
+ async with httpx.AsyncClient(timeout=timeout) as client:
154
+ data = {"model": selected_model, "messages": messages, **model_config}
155
+ extra = {"optillm_approach": "rto|re2|cot_reflection|self_consistency|plansearch|leap|z3|bon|moa|mcts|mcp|router|privacy|executecode|json", "session_id": session_id}
156
+ resp = await client.post(host, json={**data, "extra_body": extra, "session_id": session_id}, headers={"Authorization": f"Bearer {provider_key}"})
157
+ resp.raise_for_status()
158
+ try:
159
+ resp_json = resp.json()
160
+ except json.JSONDecodeError:
161
+ return RESPONSES["RESPONSE_2"]
162
+ if isinstance(resp_json, dict) and "choices" in resp_json and isinstance(resp_json["choices"], list) and len(resp_json["choices"]) > 0 and isinstance(resp_json["choices"][0], dict):
163
+ choice = resp_json["choices"][0]
164
+ if "message" in choice and isinstance(choice["message"], dict) and "content" in choice["message"] and isinstance(choice["message"]["content"], str):
165
+ ai_text = choice["message"]["content"]
166
+ return process_ai_response(ai_text)
167
  return RESPONSES["RESPONSE_2"]
168
+ except Exception:
169
+ continue
170
+ marked_item(provider_key, LINUX_SERVER_PROVIDER_KEYS_MARKED, LINUX_SERVER_PROVIDER_KEYS_ATTEMPTS)
171
+ return RESPONSES["RESPONSE_2"]
 
172
 
173
  async def chat_with_model_async(history, user_input, selected_model_display, sess):
174
  if not get_available_items(LINUX_SERVER_PROVIDER_KEYS, LINUX_SERVER_PROVIDER_KEYS_MARKED) or not get_available_items(LINUX_SERVER_HOSTS, LINUX_SERVER_HOSTS_MARKED):
 
215
  if message["text"]:
216
  combined_input += message["text"]
217
  history.append([combined_input, ""])
 
 
218
  ai_response = await chat_with_model_async(history, combined_input, selected_model_display, sess)
219
  history[-1][1] = ""
220
  def convert_to_string(data):