Niansuh commited on
Commit
5ac87b7
·
verified ·
1 Parent(s): b27d93f

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +11 -24
main.py CHANGED
@@ -22,8 +22,7 @@ class ModelNotWorkingException(Exception):
22
  # Mock implementations for ImageResponse and to_data_uri
23
  class ImageResponse:
24
  def __init__(self, url: str, alt: str):
25
- self.url = url
26
- self.alt = alt
27
 
28
  def to_data_uri(image: Any) -> str:
29
  return "data:image/png;base64,..." # Replace with actual base64 data
@@ -38,8 +37,7 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
38
  url = "https://www.blackbox.ai"
39
  api_endpoint = "https://www.blackbox.ai/api/chat"
40
  working = True
41
- supports_stream = True
42
- supports_system_message = True
43
  supports_message_history = True
44
 
45
  default_model = 'blackbox'
@@ -78,27 +76,20 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
78
  "flux": "ImageGenerationLV45LJp",
79
  }
80
 
81
- @classmethod
82
- def get_model(cls, model: str) -> str:
83
  if model in cls.models:
84
- return model
85
- elif model in cls.userSelectedModel:
86
- return model
87
- elif model in cls.model_aliases:
88
  return cls.model_aliases[model]
89
  else:
90
- return cls.default_model
91
-
92
- @classmethod
93
- async def create_async_generator(
94
  cls,
95
  model: str,
96
  messages: List[Dict[str, str]],
97
  proxy: Optional[str] = None,
98
  image: Optional[Any] = None,
99
  image_name: Optional[str] = None,
100
- **kwargs
101
- ) -> Any:
102
  model = cls.get_model(model)
103
 
104
  # Check if the model is working
@@ -166,8 +157,7 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
166
  data["trendingAgentMode"] = cls.trendingAgentMode[model]
167
  elif model in cls.userSelectedModel:
168
  data["userSelectedModel"] = cls.userSelectedModel[model]
169
-
170
- async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
171
  response.raise_for_status()
172
  if model == 'ImageGenerationLV45LJp':
173
  response_text = await response.text()
@@ -180,8 +170,7 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
180
  else:
181
  async for chunk in response.content.iter_any():
182
  if chunk:
183
- decoded_chunk = chunk.decode(errors='ignore') # Handle decoding errors
184
- decoded_chunk = re.sub(r'\$@\$v=[^$]+\$@\$', '', decoded_chunk)
185
  if decoded_chunk.strip():
186
  yield decoded_chunk
187
 
@@ -249,9 +238,7 @@ async def chat_completions(request: ChatRequest):
249
  if isinstance(chunk, ImageResponse):
250
  response_content += f"![image]({chunk.url})\n"
251
  else:
252
- response_content += chunk
253
-
254
- return {
255
  "id": f"chatcmpl-{uuid.uuid4()}",
256
  "object": "chat.completion",
257
  "created": int(datetime.now().timestamp()),
@@ -271,4 +258,4 @@ async def chat_completions(request: ChatRequest):
271
 
272
  @app.get("/niansuhai/v1/models")
273
  async def get_models():
274
- return {"models": Blackbox.models}
 
22
  # Mock implementations for ImageResponse and to_data_uri
23
  class ImageResponse:
24
  def __init__(self, url: str, alt: str):
25
+ self.url = url self.alt = alt
 
26
 
27
  def to_data_uri(image: Any) -> str:
28
  return "data:image/png;base64,..." # Replace with actual base64 data
 
37
  url = "https://www.blackbox.ai"
38
  api_endpoint = "https://www.blackbox.ai/api/chat"
39
  working = True
40
+ supports_stream = True supports_system_message = True
 
41
  supports_message_history = True
42
 
43
  default_model = 'blackbox'
 
76
  "flux": "ImageGenerationLV45LJp",
77
  }
78
 
79
+ @classmethod def get_model(cls, model: str) -> str:
 
80
  if model in cls.models:
81
+ return model elif model in cls.userSelectedModel:
82
+ return model elif model in cls.model_aliases:
 
 
83
  return cls.model_aliases[model]
84
  else:
85
+ return cls.default_model @classmethod async def create_async_generator(
 
 
 
86
  cls,
87
  model: str,
88
  messages: List[Dict[str, str]],
89
  proxy: Optional[str] = None,
90
  image: Optional[Any] = None,
91
  image_name: Optional[str] = None,
92
+ **kwargs ) -> Any:
 
93
  model = cls.get_model(model)
94
 
95
  # Check if the model is working
 
157
  data["trendingAgentMode"] = cls.trendingAgentMode[model]
158
  elif model in cls.userSelectedModel:
159
  data["userSelectedModel"] = cls.userSelectedModel[model]
160
+ async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
 
161
  response.raise_for_status()
162
  if model == 'ImageGenerationLV45LJp':
163
  response_text = await response.text()
 
170
  else:
171
  async for chunk in response.content.iter_any():
172
  if chunk:
173
+ decoded_chunk = chunk.decode(errors='ignore') # Handle decoding errors decoded_chunk = re.sub(r'\$@\$v=[^$]+\$@\$', '', decoded_chunk)
 
174
  if decoded_chunk.strip():
175
  yield decoded_chunk
176
 
 
238
  if isinstance(chunk, ImageResponse):
239
  response_content += f"![image]({chunk.url})\n"
240
  else:
241
+ response_content += chunk return {
 
 
242
  "id": f"chatcmpl-{uuid.uuid4()}",
243
  "object": "chat.completion",
244
  "created": int(datetime.now().timestamp()),
 
258
 
259
  @app.get("/niansuhai/v1/models")
260
  async def get_models():
261
+ return {"models": Blackbox.models}