Niansuh commited on
Commit
142d11a
·
verified ·
1 Parent(s): 187715b

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +41 -17
main.py CHANGED
@@ -20,7 +20,7 @@ class ImageResponse:
20
 
21
  def to_data_uri(image: Any) -> str:
22
  # Placeholder for actual image encoding
23
- return "data:image/png;base64,..."
24
 
25
  class AsyncGeneratorProvider:
26
  pass
@@ -98,36 +98,65 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
98
  headers = {
99
  "accept": "*/*",
100
  "accept-language": "en-US,en;q=0.9",
 
101
  "content-type": "application/json",
102
  "origin": cls.url,
103
- "user-agent": "Mozilla/5.0 ..."
 
 
 
 
 
 
 
 
104
  }
105
 
106
  if model in cls.userSelectedModel:
107
  prefix = f"@{cls.userSelectedModel[model]}"
108
  if not messages[0]['content'].startswith(prefix):
109
  messages[0]['content'] = f"{prefix} {messages[0]['content']}"
110
-
111
  async with ClientSession(headers=headers) as session:
112
- if model == 'ImageGenerationLV45LJp' and image is not None:
113
  messages[-1]["data"] = {
114
  "fileText": image_name,
115
  "imageBase64": to_data_uri(image)
116
  }
117
-
118
  random_id = ''.join(random.choices(string.ascii_letters + string.digits, k=7))
119
 
120
  data = {
121
  "messages": messages,
122
  "id": random_id,
123
- "agentMode": cls.agentMode.get(model, {}),
124
- "trendingAgentMode": cls.trendingAgentMode.get(model, {}),
125
- "userSelectedModel": cls.userSelectedModel.get(model, None),
 
 
 
 
 
126
  "maxTokens": 1024,
127
  "playgroundTopP": 0.9,
128
  "playgroundTemperature": 0.5,
 
 
 
 
 
 
 
 
129
  }
130
 
 
 
 
 
 
 
 
131
  async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
132
  response.raise_for_status()
133
  if model == 'ImageGenerationLV45LJp':
@@ -141,7 +170,7 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
141
  else:
142
  async for chunk in response.content.iter_any():
143
  if chunk:
144
- decoded_chunk = chunk.decode(errors='ignore')
145
  decoded_chunk = re.sub(r'\$@\$v=[^$]+\$@\$', '', decoded_chunk)
146
  if decoded_chunk.strip():
147
  yield decoded_chunk
@@ -180,25 +209,20 @@ async def chat_completions(request: ChatRequest):
180
 
181
  async_generator = Blackbox.create_async_generator(
182
  model=request.model,
183
- messages=messages,
184
- image=None, # Pass the image if required
185
- image_name=None # Pass image name if required
186
  )
187
 
188
  if request.stream:
189
  async def generate():
190
  async for chunk in async_generator:
191
- yield f"data: {json.dumps(create_response(chunk.url if isinstance(chunk, ImageResponse) else chunk, request.model))}\n\n"
192
  yield "data: [DONE]\n\n"
193
 
194
  return StreamingResponse(generate(), media_type="text/event-stream")
195
  else:
196
  response_content = ""
197
  async for chunk in async_generator:
198
- if isinstance(chunk, ImageResponse):
199
- response_content += f"Image URL: {chunk.url}\n"
200
- else:
201
- response_content += chunk # Concatenate text responses
202
 
203
  return {
204
  "id": f"chatcmpl-{uuid.uuid4()}",
 
20
 
21
  def to_data_uri(image: Any) -> str:
22
  # Placeholder for actual image encoding
23
+ return "data:image/png;base64,..." # Replace with actual base64 data
24
 
25
  class AsyncGeneratorProvider:
26
  pass
 
98
  headers = {
99
  "accept": "*/*",
100
  "accept-language": "en-US,en;q=0.9",
101
+ "cache-control": "no-cache",
102
  "content-type": "application/json",
103
  "origin": cls.url,
104
+ "pragma": "no-cache",
105
+ "referer": f"{cls.url}/",
106
+ "sec-ch-ua": '"Not;A=Brand";v="24", "Chromium";v="128"',
107
+ "sec-ch-ua-mobile": "?0",
108
+ "sec-ch-ua-platform": '"Linux"',
109
+ "sec-fetch-dest": "empty",
110
+ "sec-fetch-mode": "cors",
111
+ "sec-fetch-site": "same-origin",
112
+ "user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36"
113
  }
114
 
115
  if model in cls.userSelectedModel:
116
  prefix = f"@{cls.userSelectedModel[model]}"
117
  if not messages[0]['content'].startswith(prefix):
118
  messages[0]['content'] = f"{prefix} {messages[0]['content']}"
119
+
120
  async with ClientSession(headers=headers) as session:
121
+ if image is not None:
122
  messages[-1]["data"] = {
123
  "fileText": image_name,
124
  "imageBase64": to_data_uri(image)
125
  }
126
+
127
  random_id = ''.join(random.choices(string.ascii_letters + string.digits, k=7))
128
 
129
  data = {
130
  "messages": messages,
131
  "id": random_id,
132
+ "previewToken": None,
133
+ "userId": None,
134
+ "codeModelMode": True,
135
+ "agentMode": {},
136
+ "trendingAgentMode": {},
137
+ "userSelectedModel": None,
138
+ "userSystemPrompt": None,
139
+ "isMicMode": False,
140
  "maxTokens": 1024,
141
  "playgroundTopP": 0.9,
142
  "playgroundTemperature": 0.5,
143
+ "isChromeExt": False,
144
+ "githubToken": None,
145
+ "clickedAnswer2": False,
146
+ "clickedAnswer3": False,
147
+ "clickedForceWebSearch": False,
148
+ "visitFromDelta": False,
149
+ "mobileClient": False,
150
+ "webSearchMode": False,
151
  }
152
 
153
+ if model in cls.agentMode:
154
+ data["agentMode"] = cls.agentMode[model]
155
+ elif model in cls.trendingAgentMode:
156
+ data["trendingAgentMode"] = cls.trendingAgentMode[model]
157
+ elif model in cls.userSelectedModel:
158
+ data["userSelectedModel"] = cls.userSelectedModel[model]
159
+
160
  async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
161
  response.raise_for_status()
162
  if model == 'ImageGenerationLV45LJp':
 
170
  else:
171
  async for chunk in response.content.iter_any():
172
  if chunk:
173
+ decoded_chunk = chunk.decode(errors='ignore') # Handle decoding errors
174
  decoded_chunk = re.sub(r'\$@\$v=[^$]+\$@\$', '', decoded_chunk)
175
  if decoded_chunk.strip():
176
  yield decoded_chunk
 
209
 
210
  async_generator = Blackbox.create_async_generator(
211
  model=request.model,
212
+ messages=messages
 
 
213
  )
214
 
215
  if request.stream:
216
  async def generate():
217
  async for chunk in async_generator:
218
+ yield f"data: {json.dumps(create_response(chunk.content if isinstance(chunk, ImageResponse) else chunk, request.model))}\n\n"
219
  yield "data: [DONE]\n\n"
220
 
221
  return StreamingResponse(generate(), media_type="text/event-stream")
222
  else:
223
  response_content = ""
224
  async for chunk in async_generator:
225
+ response_content += chunk.content if isinstance(chunk, ImageResponse) else chunk # Concatenate response
 
 
 
226
 
227
  return {
228
  "id": f"chatcmpl-{uuid.uuid4()}",