gnilets commited on
Commit
ce42a46
·
verified ·
1 Parent(s): 4372e49

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +169 -0
app.py ADDED
@@ -0,0 +1,169 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from functools import partial
2
+ from os import environ
3
+ from typing import Callable, Coroutine
4
+
5
+ from anyio import create_task_group
6
+ from fastapi import FastAPI, HTTPException, Request
7
+ from fastapi.responses import HTMLResponse
8
+ from fastapi.responses import StreamingResponse
9
+ from httpx import AsyncClient, RequestError, Timeout
10
+ from starlette.types import Receive, Scope, Send
11
+ from datetime import datetime, timedelta
12
+
13
+ API_KEYS = [line for line in environ['API_KEYS'].strip().split('\n') if line and line.startswith('sk-')]
14
+ COMPLETIONS_URL = 'https://openrouter.ai/api/v1/chat/completions'
15
+ app = FastAPI(title='reverse-proxy')
16
+
17
+
18
+ class Cache:
19
+ def __init__(self, expire: timedelta):
20
+ self.expire = expire
21
+ self.cache = {}
22
+ self.timestamp = datetime.utcnow()
23
+
24
+ async def get(self, key):
25
+ if datetime.utcnow() - self.timestamp > self.expire:
26
+ self.cache.clear()
27
+ self.timestamp = datetime.utcnow()
28
+ return self.cache.get(key)
29
+
30
+ async def set(self, key, value):
31
+ self.cache[key] = value
32
+
33
+
34
+ cache = Cache(expire=timedelta(hours=1))
35
+
36
+
37
+ def cache_results(func):
38
+ async def wrapper(*args, **kwargs):
39
+ cache_key = f"{func.__name__}:{args}:{kwargs}"
40
+ cached_result = await cache.get(cache_key)
41
+ if cached_result is not None:
42
+ return cached_result
43
+ result = await func(*args, **kwargs)
44
+ await cache.set(cache_key, result)
45
+ return result
46
+
47
+ return wrapper
48
+
49
+
50
+ class AuthError(Exception):
51
+ pass
52
+
53
+
54
+ class CensoredError(Exception):
55
+ pass
56
+
57
+
58
+ @app.middleware('http')
59
+ async def add_cors_headers(request: Request, call_next):
60
+ response = await call_next(request)
61
+ response.headers['Access-Control-Allow-Origin'] = '*'
62
+ response.headers['Access-Control-Allow-Methods'] = 'GET, POST, PUT, DELETE, PATCH, OPTIONS'
63
+ response.headers['Access-Control-Allow-Headers'] = 'Content-Type, Authorization'
64
+ return response
65
+
66
+
67
+ @app.get('/')
68
+ async def root():
69
+ return HTMLResponse('ну пролапс, ну и что')
70
+
71
+
72
+ class OverrideStreamResponse(StreamingResponse):
73
+ async def stream_response(self, send: Send) -> None:
74
+ first_chunk = True
75
+ async for chunk in self.body_iterator:
76
+ if first_chunk:
77
+ await self.send_request_header(send)
78
+ first_chunk = False
79
+ if not isinstance(chunk, bytes):
80
+ chunk = chunk.encode(self.charset)
81
+ await send({'type': 'http.response.body', 'body': chunk, 'more_body': True})
82
+
83
+ if first_chunk:
84
+ await self.send_request_header(send)
85
+ await send({'type': 'http.response.body', 'body': b'', 'more_body': False})
86
+
87
+ async def send_request_header(self, send: Send) -> None:
88
+ await send({'type': 'http.response.start', 'status': self.status_code, 'headers': self.raw_headers, })
89
+
90
+ async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
91
+ async with create_task_group() as task_group:
92
+ async def wrap(func: Callable[[], Coroutine]) -> None:
93
+ await func()
94
+ task_group.cancel_scope.cancel()
95
+
96
+ task_group.start_soon(wrap, partial(self.stream_response, send))
97
+ await wrap(partial(self.listen_for_disconnect, receive))
98
+
99
+ if self.background is not None:
100
+ await self.background()
101
+
102
+
103
+ async def proxy_openai_api(request: Request):
104
+ headers = {k: v for k, v in request.headers.items() if k not in {'host', 'content-length', 'x-forwarded-for', 'x-real-ip', 'connection'}}
105
+
106
+ def update_authorization_header(api_key):
107
+ auth_header_key = next((k for k in headers.keys() if k.lower() == 'authorization'), 'Authorization')
108
+ headers[auth_header_key] = f'Bearer {api_key}'
109
+
110
+ client = AsyncClient(verify=False, follow_redirects=True, timeout=Timeout(connect=10, read=90, write=10, pool=10))
111
+
112
+ request_body = await request.json() if request.method in {'POST', 'PUT'} else None
113
+
114
+ async def stream_api_response(api_key: str):
115
+ update_authorization_header(api_key)
116
+ try:
117
+ streaming = client.stream(request.method, COMPLETIONS_URL, headers=headers, params=request.query_params, json=request_body)
118
+ async with streaming as stream_response:
119
+ if stream_response.status_code in {401, 402, 429}:
120
+ raise AuthError('ключ API недействителен или превышен лимит отправки запросов')
121
+ if stream_response.status_code == 403:
122
+ raise CensoredError('отклонено по цензуре') # это специфичная ошибка именно для опенроутера!
123
+ response.init_headers({k: v for k, v in stream_response.headers.items() if k not in {'content-length', 'content-encoding', 'alt-svc'}})
124
+
125
+ content = bytearray()
126
+ async for chunk in stream_response.aiter_bytes():
127
+ yield chunk
128
+ content.extend(chunk)
129
+
130
+ except RequestError as exc:
131
+ raise HTTPException(status_code=500, detail=f'произошла ошибка при запросе: {exc}')
132
+
133
+ for api_key in API_KEYS:
134
+ try:
135
+ response_generator = stream_api_response(api_key)
136
+ response = OverrideStreamResponse(response_generator)
137
+ return response
138
+ except AuthError:
139
+ print(f'ключ API {api_key} недействителен или превышен лимит отправки запросов')
140
+ continue
141
+ raise HTTPException(status_code=401, detail='все ключи API использованы, доступ запрещен.')
142
+
143
+
144
+ @cache_results
145
+ async def get_free_models():
146
+ async with AsyncClient(follow_redirects=True, timeout=Timeout(10.0, read=30.0, write=10.0, pool=10.0)) as client:
147
+ response = await client.get('https://openrouter.ai/api/v1/models')
148
+ response.raise_for_status()
149
+ data = response.json()
150
+ filtered_models = [model for model in data.get('data', []) if model.get('id', '').endswith(':free')]
151
+ return {'data': filtered_models, 'object': 'list'}
152
+
153
+
154
+ @app.api_route('/v1/models', methods=['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'])
155
+ @app.api_route('/models', methods=['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'])
156
+ async def get_models():
157
+ return await get_free_models()
158
+
159
+
160
+ @app.api_route('/v1/chat/completions', methods=['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'])
161
+ @app.api_route('/chat/completions', methods=['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'])
162
+ async def proxy_handler(request: Request):
163
+ return await proxy_openai_api(request)
164
+
165
+
166
+ if __name__ == '__main__':
167
+ from uvicorn import run
168
+
169
+ run(app, host='0.0.0.0', port=7860)