File size: 2,020 Bytes
3d1cd49 377e4b5 3d1cd49 377e4b5 3d1cd49 6f36991 3d1cd49 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 |
from flask import Flask, request, Response, stream_with_context
import requests
app = Flask(__name__)
# Target API base URL
TARGET_API = "https://meow.cablyai.com"
# Path mappings
PATH_MAPPINGS = {
"/v1/chat": "/v1/chat",
"/v1/models": "/v1/models"
}
@app.route('/<path:path>', methods=['GET', 'POST'])
def proxy(path):
# Construct the full path
full_path = f"/{path}"
# Apply path mapping if matches
for original_path, new_path in PATH_MAPPINGS.items():
if full_path.startswith(original_path):
full_path = full_path.replace(original_path, new_path, 1)
break
# Construct target URL
target_url = f"{TARGET_API}{full_path}"
# Forward the request to the target API
headers = {key: value for key, value in request.headers if key != 'Host'}
# Handle streaming response
if request.method == 'POST':
response = requests.post(
target_url,
headers=headers,
json=request.get_json(silent=True),
params=request.args,
stream=True
)
elif request.method == 'GET':
response = requests.get(
target_url,
headers=headers,
params=request.args,
stream=True
)
# Create a response with the same status code, headers, and streaming content
def generate():
for chunk in response.iter_content(chunk_size=8192):
yield chunk
# Create flask response
proxy_response = Response(
stream_with_context(generate()),
status=response.status_code
)
# Forward response headers
for key, value in response.headers.items():
if key.lower() not in ('content-length', 'transfer-encoding', 'connection'):
proxy_response.headers[key] = value
return proxy_response
@app.route('/', methods=['GET'])
def index():
return "API Proxy for DeepInfra is running."
if __name__ == '__main__':
app.run(host='0.0.0.0', port=7860, debug=False)
|