Update main.py
Browse files
main.py
CHANGED
@@ -90,105 +90,104 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
|
|
90 |
return cls.default_model
|
91 |
|
92 |
@classmethod
|
93 |
-
async def create_async_generator(
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
) -> Any:
|
102 |
-
|
103 |
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
|
125 |
-
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
|
139 |
-
|
140 |
-
|
141 |
-
|
142 |
-
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
150 |
-
|
151 |
-
|
152 |
-
|
153 |
-
|
154 |
-
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
|
162 |
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
|
171 |
-
response.raise_for_status()
|
172 |
-
response_text = await response.text()
|
173 |
|
174 |
-
|
175 |
-
|
|
|
|
|
|
|
|
|
176 |
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
|
|
|
|
182 |
else:
|
183 |
-
|
184 |
-
|
185 |
-
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
if decoded_chunk.strip():
|
190 |
-
yield decoded_chunk
|
191 |
-
|
192 |
|
193 |
# FastAPI app setup
|
194 |
app = FastAPI()
|
@@ -276,4 +275,4 @@ async def chat_completions(request: ChatRequest):
|
|
276 |
|
277 |
@app.get("/niansuhai/v1/models")
|
278 |
async def get_models():
|
279 |
-
return {"models": Blackbox.models}
|
|
|
90 |
return cls.default_model
|
91 |
|
92 |
@classmethod
|
93 |
+
async def create_async_generator(
|
94 |
+
cls,
|
95 |
+
model: str,
|
96 |
+
messages: List[Dict[str, str]],
|
97 |
+
proxy: Optional[str] = None,
|
98 |
+
image: Optional[Any] = None,
|
99 |
+
image_name: Optional[str] = None,
|
100 |
+
**kwargs
|
101 |
+
) -> Any:
|
102 |
+
model = cls.get_model(model)
|
103 |
|
104 |
+
# Check if the model is working
|
105 |
+
if not cls.working or model not in cls.models:
|
106 |
+
raise ModelNotWorkingException(model)
|
107 |
|
108 |
+
headers = {
|
109 |
+
"accept": "*/*",
|
110 |
+
"accept-language": "en-US,en;q=0.9",
|
111 |
+
"cache-control": "no-cache",
|
112 |
+
"content-type": "application/json",
|
113 |
+
"origin": cls.url,
|
114 |
+
"pragma": "no-cache",
|
115 |
+
"referer": f"{cls.url}/",
|
116 |
+
"sec-ch-ua": '"Not;A=Brand";v="24", "Chromium";v="128"',
|
117 |
+
"sec-ch-ua-mobile": "?0",
|
118 |
+
"sec-ch-ua-platform": '"Linux"',
|
119 |
+
"sec-fetch-dest": "empty",
|
120 |
+
"sec-fetch-mode": "cors",
|
121 |
+
"sec-fetch-site": "same-origin",
|
122 |
+
"user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36"
|
123 |
+
}
|
124 |
|
125 |
+
if model in cls.userSelectedModel:
|
126 |
+
prefix = f"@{cls.userSelectedModel[model]}"
|
127 |
+
if not messages[0]['content'].startswith(prefix):
|
128 |
+
messages[0]['content'] = f"{prefix} {messages[0]['content']}"
|
129 |
|
130 |
+
async with ClientSession(headers=headers) as session:
|
131 |
+
if image is not None:
|
132 |
+
messages[-1]["data"] = {
|
133 |
+
"fileText": image_name,
|
134 |
+
"imageBase64": to_data_uri(image)
|
135 |
+
}
|
136 |
+
|
137 |
+
random_id = ''.join(random.choices(string.ascii_letters + string.digits, k=7))
|
138 |
|
139 |
+
data = {
|
140 |
+
"messages": messages,
|
141 |
+
"id": random_id,
|
142 |
+
"previewToken": None,
|
143 |
+
"userId": None,
|
144 |
+
"codeModelMode": True,
|
145 |
+
"agentMode": {},
|
146 |
+
"trendingAgentMode": {},
|
147 |
+
"userSelectedModel": None,
|
148 |
+
"userSystemPrompt": None,
|
149 |
+
"isMicMode": False,
|
150 |
+
"maxTokens": 99999999,
|
151 |
+
"playgroundTopP": 0.9,
|
152 |
+
"playgroundTemperature": 0.5,
|
153 |
+
"isChromeExt": False,
|
154 |
+
"githubToken": None,
|
155 |
+
"clickedAnswer2": False,
|
156 |
+
"clickedAnswer3": False,
|
157 |
+
"clickedForceWebSearch": False,
|
158 |
+
"visitFromDelta": False,
|
159 |
+
"mobileClient": False,
|
160 |
+
"webSearchMode": False,
|
161 |
+
}
|
162 |
|
163 |
+
if model in cls.agentMode:
|
164 |
+
data["agentMode"] = cls.agentMode[model]
|
165 |
+
elif model in cls.trendingAgentMode:
|
166 |
+
data["trendingAgentMode"] = cls.trendingAgentMode[model]
|
167 |
+
elif model in cls.userSelectedModel:
|
168 |
+
data["userSelectedModel"] = cls.userSelectedModel[model]
|
|
|
|
|
|
|
|
|
169 |
|
170 |
+
async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
|
171 |
+
response.raise_for_status()
|
172 |
+
response_text = await response.text()
|
173 |
+
|
174 |
+
if not response_text: # Check for empty response
|
175 |
+
raise ModelNotWorkingException(model)
|
176 |
|
177 |
+
if model == 'ImageGenerationLV45LJp':
|
178 |
+
url_match = re.search(r'https://storage\.googleapis\.com/[^\s\)]+', response_text)
|
179 |
+
if url_match:
|
180 |
+
image_url = url_match.group(0)
|
181 |
+
yield ImageResponse(image_url, alt=messages[-1]['content'])
|
182 |
+
else:
|
183 |
+
raise Exception("Image URL not found in the response")
|
184 |
else:
|
185 |
+
async for chunk in response.content.iter_any():
|
186 |
+
if chunk:
|
187 |
+
decoded_chunk = chunk.decode(errors='ignore') # Handle decoding errors
|
188 |
+
decoded_chunk = re.sub(r'\$@\$v=[^$]+\$@\$', '', decoded_chunk)
|
189 |
+
if decoded_chunk.strip():
|
190 |
+
yield decoded_chunk
|
|
|
|
|
|
|
191 |
|
192 |
# FastAPI app setup
|
193 |
app = FastAPI()
|
|
|
275 |
|
276 |
@app.get("/niansuhai/v1/models")
|
277 |
async def get_models():
|
278 |
+
return {"models": Blackbox.models}
|