Spaces:
Running
Running
Delete app-ss.py
Browse files
app-ss.py
DELETED
@@ -1,211 +0,0 @@
|
|
1 |
-
import threading
|
2 |
-
from flask import Flask, url_for, redirect
|
3 |
-
from flask import request as req
|
4 |
-
from flask_cors import CORS
|
5 |
-
import helpers.helper as helper
|
6 |
-
from helpers.provider import *
|
7 |
-
from utils.llms import gpt4,gpt4stream
|
8 |
-
app = Flask(__name__)
|
9 |
-
CORS(app)
|
10 |
-
import queue
|
11 |
-
from utils.functions import allocate
|
12 |
-
from werkzeug.utils import secure_filename
|
13 |
-
import os
|
14 |
-
from PIL import Image
|
15 |
-
#find . -maxdepth 1 -type f -mmin -10
|
16 |
-
#docker run dezsh/inlets client --url=wss://inlets-testing-secret.onrender.com --upstream=http://192.168.1.8:1331 --token=secret --insecure
|
17 |
-
#QxYciPJQwwfb1zBu
|
18 |
-
#corcel nineteen AI
|
19 |
-
app.config['UPLOAD_FOLDER'] = "static"
|
20 |
-
|
21 |
-
@app.route("/v1/chat/completions", methods=['POST'])
|
22 |
-
@app.route("/chat/completions", methods=['POST'])
|
23 |
-
@app.route("/", methods=['POST'])
|
24 |
-
def chat_completions2():
|
25 |
-
all_request_data = {}
|
26 |
-
all_request_data['json'] = req.get_json(silent=True) or {}
|
27 |
-
all_request_data['headers'] = dict(req.headers)
|
28 |
-
|
29 |
-
all_request_data['args'] = req.args.to_dict(flat=False)
|
30 |
-
all_request_data['form'] = req.form.to_dict(flat=False)
|
31 |
-
try:
|
32 |
-
all_request_data['raw_data'] = req.data.decode('utf-8')
|
33 |
-
except Exception:
|
34 |
-
all_request_data['raw_data'] = f"Could not decode raw data (length: {len(req.data)})"
|
35 |
-
|
36 |
-
|
37 |
-
# # --- Now you can access your original values from this dict ---
|
38 |
-
# print("--- Consolidated Request Data ---")
|
39 |
-
# print(json.dumps(all_request_data, indent=2))
|
40 |
-
# print("--------------------------------")
|
41 |
-
|
42 |
-
streaming = req.json.get('stream', False)
|
43 |
-
model = req.json.get('model', 'gpt-4-turbo')
|
44 |
-
messages = req.json.get('messages')
|
45 |
-
api_keys = req.headers.get('Authorization').replace('Bearer ', '')
|
46 |
-
functions = req.json.get('functions')
|
47 |
-
tools = req.json.get('tools')
|
48 |
-
response_format = req.json.get('response_format')
|
49 |
-
if streaming:
|
50 |
-
helper.stopped=True
|
51 |
-
|
52 |
-
|
53 |
-
if tools!=None:
|
54 |
-
allocate(messages,api_keys,model,tools)
|
55 |
-
else:
|
56 |
-
allocate(messages,api_keys,model,[])
|
57 |
-
|
58 |
-
t = time.time()
|
59 |
-
|
60 |
-
def stream_response(messages,model,api_keys="",functions=[],tools=[]):
|
61 |
-
helper.q = queue.Queue() # create a queue to store the response lines
|
62 |
-
if helper.stopped:
|
63 |
-
helper.stopped=False
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
threading.Thread(target=gpt4stream,args=(messages,model,api_keys)).start() # start the thread
|
68 |
-
|
69 |
-
started=False
|
70 |
-
while True: # loop until the queue is empty
|
71 |
-
try:
|
72 |
-
if 20>time.time()-t>18 and not started :
|
73 |
-
yield 'data: %s\n\n' % json.dumps(helper.streamer("> Thinking"), separators=(',' ':'))
|
74 |
-
time.sleep(2)
|
75 |
-
elif time.time()-t>20 and not started :
|
76 |
-
yield 'data: %s\n\n' % json.dumps(helper.streamer("."), separators=(',' ':'))
|
77 |
-
time.sleep(1)
|
78 |
-
if time.time()-t>100 and not started:
|
79 |
-
yield 'data: %s\n\n' % json.dumps(helper.streamer("Still Thinking...Do not terminate"), separators=(',' ':'))
|
80 |
-
break
|
81 |
-
|
82 |
-
line = helper.q.get(block=False)
|
83 |
-
if "RESULT: " in line:
|
84 |
-
line=line.replace("RESULT: ","")
|
85 |
-
if tools !=None:
|
86 |
-
yield f'data: {json.dumps(helper.stream_func(line,"tools"))}\n\n'
|
87 |
-
else:
|
88 |
-
yield f'data: {json.dumps(helper.end())}\n\n'
|
89 |
-
|
90 |
-
break
|
91 |
-
|
92 |
-
|
93 |
-
if line == "END":
|
94 |
-
yield f'data: {json.dumps(helper.end())}\n\n'
|
95 |
-
break
|
96 |
-
if not started:
|
97 |
-
started = True
|
98 |
-
yield 'data: %s\n\n' % json.dumps(helper.streamer("\n\n"), separators=(',' ':'))
|
99 |
-
|
100 |
-
|
101 |
-
yield 'data: %s\n\n' % json.dumps(helper.streamer(line), separators=(',' ':'))
|
102 |
-
|
103 |
-
helper.q.task_done() # mark the task as done
|
104 |
-
|
105 |
-
|
106 |
-
except helper.queue.Empty:
|
107 |
-
pass
|
108 |
-
except Exception as e:
|
109 |
-
print(e)
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
if not streaming :
|
118 |
-
if functions != None :
|
119 |
-
k=gpt4(messages,None,model)
|
120 |
-
return helper.func_output(k,"functions")
|
121 |
-
elif tools!=None:
|
122 |
-
|
123 |
-
k=gpt4(messages,None,model)
|
124 |
-
return helper.func_output(k,"tools")
|
125 |
-
|
126 |
-
else:
|
127 |
-
|
128 |
-
print("USING GPT_4 NO STREAM")
|
129 |
-
print(model)
|
130 |
-
|
131 |
-
k=gpt4(messages,response_format,model)
|
132 |
-
return helper.output(k)
|
133 |
-
|
134 |
-
elif streaming :
|
135 |
-
return app.response_class(stream_response(messages,model,api_keys,functions,tools), mimetype='text/event-stream')
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
|
141 |
-
|
142 |
-
@app.route('/upload', methods=['GET','POST'])
|
143 |
-
def index():
|
144 |
-
|
145 |
-
# If a post method then handle file upload
|
146 |
-
if req.method == 'POST':
|
147 |
-
|
148 |
-
if 'file' not in req.files:
|
149 |
-
return redirect('/')
|
150 |
-
|
151 |
-
file = req.files['file']
|
152 |
-
|
153 |
-
|
154 |
-
if file :
|
155 |
-
filename = secure_filename(file.filename)
|
156 |
-
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
|
157 |
-
if ("camera" in file.filename or "capture" in file.filename or "IMG" in file.filename or "Screenshot" in file.filename) :
|
158 |
-
img=Image.open(f"static/{filename}")
|
159 |
-
img.thumbnail((512, 512),Image.Resampling.LANCZOS)
|
160 |
-
|
161 |
-
img.save(f"static/{filename}")
|
162 |
-
|
163 |
-
return filename
|
164 |
-
|
165 |
-
|
166 |
-
# Get Files in the directory and create list items to be displayed to the user
|
167 |
-
file_list = ''
|
168 |
-
for f in os.listdir(app.config['UPLOAD_FOLDER']):
|
169 |
-
# Create link html
|
170 |
-
link = url_for("static", filename=f)
|
171 |
-
file_list = file_list + '<li><a href="%s">%s</a></li>' % (link, f)
|
172 |
-
|
173 |
-
# Format return HTML - allow file upload and list all available files
|
174 |
-
return_html = '''
|
175 |
-
<!doctype html>
|
176 |
-
<title>Upload File</title>
|
177 |
-
<h1>Upload File</h1>
|
178 |
-
<form method=post enctype=multipart/form-data>
|
179 |
-
<input type=file name=file><br>
|
180 |
-
<input type=submit value=Upload>
|
181 |
-
</form>
|
182 |
-
<hr>
|
183 |
-
<h1>Files</h1>
|
184 |
-
<ol>%s</ol>
|
185 |
-
''' % file_list
|
186 |
-
|
187 |
-
return return_html
|
188 |
-
|
189 |
-
|
190 |
-
@app.route('/')
|
191 |
-
def yellow_name():
|
192 |
-
return f'Hello world!'
|
193 |
-
|
194 |
-
|
195 |
-
|
196 |
-
@app.route("/v1/models")
|
197 |
-
@app.route("/models")
|
198 |
-
def models():
|
199 |
-
print("Models")
|
200 |
-
return helper.model
|
201 |
-
|
202 |
-
|
203 |
-
|
204 |
-
if __name__ == '__main__':
|
205 |
-
config = {
|
206 |
-
'host': '0.0.0.0',
|
207 |
-
'port': 1337,
|
208 |
-
'debug': True,
|
209 |
-
}
|
210 |
-
|
211 |
-
app.run(**config)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|