Ocillus commited on
Commit
44febf2
·
verified ·
1 Parent(s): d1e3749

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -4
app.py CHANGED
@@ -169,13 +169,14 @@ def openai_api_call(messages, retries=3, delay=5, online=True):
169
  messages[-1]['content'] = "[System: SEARCH when the user ASKED A QUESTION & remember to CITE(the source is the first tag). Otherwise do not search];" + messages[-1]['content']
170
 
171
  completion = client.chat.completions.create(
172
- model="meta-llama/llama-3.2-11b-vision-instruct:free",
173
  messages=messages,
174
  functions=function_list,
175
  function_call='auto',
176
  timeout=10
177
  )
178
- response_message = completion.choices[0].message
 
179
 
180
  # Check if the model wants to call a function
181
  if response_message.function_call:
@@ -199,7 +200,7 @@ def openai_api_call(messages, retries=3, delay=5, online=True):
199
  if attempt < retries - 1:
200
  time.sleep(delay)
201
  else:
202
- return "Sorry, I am having trouble connecting to the server. Please try again later."
203
 
204
  return "Failed to get a response after multiple attempts."
205
  else:
@@ -472,7 +473,7 @@ async () => {
472
  };
473
  }
474
  """) as demo:
475
- gr.Markdown("# ArcanaUI v0.8")
476
  with gr.Tabs():
477
  with gr.TabItem("Welcome Page"):
478
  with open('introduction.txt',mode='r') as file:
 
169
  messages[-1]['content'] = "[System: SEARCH when the user ASKED A QUESTION & remember to CITE(the source is the first tag). Otherwise do not search];" + messages[-1]['content']
170
 
171
  completion = client.chat.completions.create(
172
+ model="cognitivecomputations/dolphin3.0-r1-mistral-24b:free",
173
  messages=messages,
174
  functions=function_list,
175
  function_call='auto',
176
  timeout=10
177
  )
178
+ print(completion.choices[0].message.content)
179
+ response_message = completion.choices[0].message.content
180
 
181
  # Check if the model wants to call a function
182
  if response_message.function_call:
 
200
  if attempt < retries - 1:
201
  time.sleep(delay)
202
  else:
203
+ return "Sorry, I am having trouble connecting to the server. Please try again later.--Or you can go to Settings > Online and then choose Offline Version by using Ollama and run locally."
204
 
205
  return "Failed to get a response after multiple attempts."
206
  else:
 
473
  };
474
  }
475
  """) as demo:
476
+ gr.Markdown("# ArcanaUI v1")
477
  with gr.Tabs():
478
  with gr.TabItem("Welcome Page"):
479
  with open('introduction.txt',mode='r') as file: