Ocillus commited on
Commit
428cef3
·
verified ·
1 Parent(s): c27ff7d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +63 -42
app.py CHANGED
@@ -10,10 +10,13 @@ from nylon import *
10
  import pandas as pd
11
  import json
12
  import fiber
 
13
  print(f"Gradio version: {gr.__version__}")
14
 
15
  foldername = 'Celsiaaa'
16
  dbmsmode = 'Fiber'
 
 
17
 
18
  try:
19
  with open('settings.arcana',mode='r') as file:
@@ -138,49 +141,67 @@ def execute_function(function_name, function_args):
138
 
139
  mapsearchmode = ['always', 'auto', 'none']
140
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
141
  def openai_api_call(messages, retries=3, delay=5):
142
- global search_mode # Declare search_mode as a global variable
143
-
144
- for attempt in range(retries):
145
- try:
146
- # Modify the user's message if search_mode is 0
147
- if search_mode == 0:
148
- messages[-1]['content'] = "[System: SEARCH when the user ASKED A QUESTION & remember to CITE(the source is the first tag). Otherwise do not search];" + messages[-1]['content']
149
-
150
- completion = client.chat.completions.create(
151
- model="gpt-3.5-turbo",
152
- messages=messages,
153
- functions=function_list,
154
- function_call='auto',
155
- timeout=10
156
- )
157
- response_message = completion.choices[0].message
158
-
159
- # Check if the model wants to call a function
160
- if response_message.function_call:
161
- function_name = response_message.function_call.name
162
- function_args = json.loads(response_message.function_call.arguments)
163
- function_response = execute_function(function_name, function_args)
164
- # Add the function response to the conversation
165
- messages.append(response_message.model_dump()) # The model's request to call the function
166
- messages.append({
167
- "role": "function",
168
- "name": function_name,
169
- "content": json.dumps(function_response)
170
- })
171
- # Make a follow-up call to the model with the function response
172
- return openai_api_call(messages)
173
- else:
174
- return response_message.content
175
-
176
- except Exception as e:
177
- print(f"Attempt {attempt + 1} failed: {e}")
178
- if attempt < retries - 1:
179
- time.sleep(delay)
180
- else:
181
- return "Sorry, I am having trouble connecting to the server. Please try again later."
182
-
183
- return "Failed to get a response after multiple attempts."
 
 
 
184
 
185
 
186
  def handle_search_mode(mode):
 
10
  import pandas as pd
11
  import json
12
  import fiber
13
+ import ollama
14
  print(f"Gradio version: {gr.__version__}")
15
 
16
  foldername = 'Celsiaaa'
17
  dbmsmode = 'Fiber'
18
+ chatmodel = 'llama3.2'
19
+ visionmodel = 'llava'
20
 
21
  try:
22
  with open('settings.arcana',mode='r') as file:
 
141
 
142
  mapsearchmode = ['always', 'auto', 'none']
143
 
144
+ def ollama_api_call(messages, vision=False):
145
+ if vision==True:
146
+ response = ollama.chat(
147
+ model=visionmodel,
148
+ #messages+=[{
149
+ # 'images': ['image.jpg']
150
+ #}]
151
+ )
152
+ else:
153
+ response = ollama.chat(
154
+ model=chatmodel
155
+ )
156
+
157
+ return messages
158
+
159
  def openai_api_call(messages, retries=3, delay=5):
160
+ if online == True:
161
+ global search_mode # Declare search_mode as a global variable
162
+
163
+ for attempt in range(retries):
164
+ try:
165
+ # Modify the user's message if search_mode is 0
166
+ if search_mode == 0:
167
+ messages[-1]['content'] = "[System: SEARCH when the user ASKED A QUESTION & remember to CITE(the source is the first tag). Otherwise do not search];" + messages[-1]['content']
168
+
169
+ completion = client.chat.completions.create(
170
+ model="gpt-3.5-turbo",
171
+ messages=messages,
172
+ functions=function_list,
173
+ function_call='auto',
174
+ timeout=10
175
+ )
176
+ response_message = completion.choices[0].message
177
+
178
+ # Check if the model wants to call a function
179
+ if response_message.function_call:
180
+ function_name = response_message.function_call.name
181
+ function_args = json.loads(response_message.function_call.arguments)
182
+ function_response = execute_function(function_name, function_args)
183
+ # Add the function response to the conversation
184
+ messages.append(response_message.model_dump()) # The model's request to call the function
185
+ messages.append({
186
+ "role": "function",
187
+ "name": function_name,
188
+ "content": json.dumps(function_response)
189
+ })
190
+ # Make a follow-up call to the model with the function response
191
+ return openai_api_call(messages)
192
+ else:
193
+ return response_message.content
194
+
195
+ except Exception as e:
196
+ print(f"Attempt {attempt + 1} failed: {e}")
197
+ if attempt < retries - 1:
198
+ time.sleep(delay)
199
+ else:
200
+ return "Sorry, I am having trouble connecting to the server. Please try again later."
201
+
202
+ return "Failed to get a response after multiple attempts."
203
+ else:
204
+ return ollama_api_call()
205
 
206
 
207
  def handle_search_mode(mode):