Niansuh commited on
Commit
80dc124
·
verified ·
1 Parent(s): c7b1ae2

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +148 -9
main.py CHANGED
@@ -1,12 +1,14 @@
1
- from fastapi import FastAPI, HTTPException
2
- from pydantic import BaseModel
3
- from typing import List, Dict, Any
4
  import re
5
  import random
6
  import string
7
  from aiohttp import ClientSession
 
 
 
8
 
9
- # Mocking the ImageResponse and to_data_uri functions
10
  class ImageResponse:
11
  def __init__(self, url: str, alt: str):
12
  self.url = url
@@ -25,14 +27,151 @@ class ProviderModelMixin:
25
  class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
26
  url = "https://www.blackbox.ai"
27
  api_endpoint = "https://www.blackbox.ai/api/chat"
 
 
 
 
 
28
  default_model = 'blackbox'
29
- models = ['blackbox', 'gemini-1.5-flash', "llama-3.1-8b"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
 
31
  @classmethod
32
- async def create_async_generator(cls, model: str, messages: List[Dict[str, str]]) -> Any:
33
- # Mock response for demonstration
34
- return {"content": "This is a mock response from the model."}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
35
 
 
36
  app = FastAPI()
37
 
38
  class Message(BaseModel):
@@ -61,7 +200,7 @@ async def chat_completions(request: ChatRequest):
61
  {
62
  "message": {
63
  "role": "assistant",
64
- "content": response['content']
65
  },
66
  "finish_reason": "stop",
67
  "index": 0
 
1
+ from __future__ import annotations
2
+
 
3
  import re
4
  import random
5
  import string
6
  from aiohttp import ClientSession
7
+ from fastapi import FastAPI, HTTPException
8
+ from pydantic import BaseModel
9
+ from typing import List, Dict, Any, Optional
10
 
11
+ # Mock implementations for ImageResponse and to_data_uri
12
  class ImageResponse:
13
  def __init__(self, url: str, alt: str):
14
  self.url = url
 
27
  class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
28
  url = "https://www.blackbox.ai"
29
  api_endpoint = "https://www.blackbox.ai/api/chat"
30
+ working = True
31
+ supports_stream = True
32
+ supports_system_message = True
33
+ supports_message_history = True
34
+
35
  default_model = 'blackbox'
36
+ models = [
37
+ 'blackbox',
38
+ 'gemini-1.5-flash',
39
+ "llama-3.1-8b",
40
+ 'llama-3.1-70b',
41
+ 'llama-3.1-405b',
42
+ 'ImageGenerationLV45LJp',
43
+ 'gpt-4o',
44
+ 'gemini-pro',
45
+ 'claude-sonnet-3.5',
46
+ ]
47
+
48
+ agentMode = {
49
+ 'ImageGenerationLV45LJp': {'mode': True, 'id': "ImageGenerationLV45LJp", 'name': "Image Generation"},
50
+ }
51
+
52
+ trendingAgentMode = {
53
+ "blackbox": {},
54
+ "gemini-1.5-flash": {'mode': True, 'id': 'Gemini'},
55
+ "llama-3.1-8b": {'mode': True, 'id': "llama-3.1-8b"},
56
+ 'llama-3.1-70b': {'mode': True, 'id': "llama-3.1-70b"},
57
+ 'llama-3.1-405b': {'mode': True, 'id': "llama-3.1-405b"},
58
+ }
59
+
60
+ userSelectedModel = {
61
+ "gpt-4o": "gpt-4o",
62
+ "gemini-pro": "gemini-pro",
63
+ 'claude-sonnet-3.5': "claude-sonnet-3.5",
64
+ }
65
+
66
+ model_aliases = {
67
+ "gemini-flash": "gemini-1.5-flash",
68
+ "flux": "ImageGenerationLV45LJp",
69
+ }
70
+
71
+ @classmethod
72
+ def get_model(cls, model: str) -> str:
73
+ if model in cls.models:
74
+ return model
75
+ elif model in cls.userSelectedModel:
76
+ return model
77
+ elif model in cls.model_aliases:
78
+ return cls.model_aliases[model]
79
+ else:
80
+ return cls.default_model
81
 
82
  @classmethod
83
+ async def create_async_generator(
84
+ cls,
85
+ model: str,
86
+ messages: List[Dict[str, str]],
87
+ proxy: Optional[str] = None,
88
+ image: Optional[Any] = None,
89
+ image_name: Optional[str] = None,
90
+ **kwargs
91
+ ) -> Any:
92
+ model = cls.get_model(model)
93
+
94
+ headers = {
95
+ "accept": "*/*",
96
+ "accept-language": "en-US,en;q=0.9",
97
+ "cache-control": "no-cache",
98
+ "content-type": "application/json",
99
+ "origin": cls.url,
100
+ "pragma": "no-cache",
101
+ "referer": f"{cls.url}/",
102
+ "sec-ch-ua": '"Not;A=Brand";v="24", "Chromium";v="128"',
103
+ "sec-ch-ua-mobile": "?0",
104
+ "sec-ch-ua-platform": '"Linux"',
105
+ "sec-fetch-dest": "empty",
106
+ "sec-fetch-mode": "cors",
107
+ "sec-fetch-site": "same-origin",
108
+ "user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36"
109
+ }
110
+
111
+ if model in cls.userSelectedModel:
112
+ prefix = f"@{cls.userSelectedModel[model]}"
113
+ if not messages[0]['content'].startswith(prefix):
114
+ messages[0]['content'] = f"{prefix} {messages[0]['content']}"
115
+
116
+ async with ClientSession(headers=headers) as session:
117
+ if image is not None:
118
+ messages[-1]["data"] = {
119
+ "fileText": image_name,
120
+ "imageBase64": to_data_uri(image)
121
+ }
122
+
123
+ random_id = ''.join(random.choices(string.ascii_letters + string.digits, k=7))
124
+
125
+ data = {
126
+ "messages": messages,
127
+ "id": random_id,
128
+ "previewToken": None,
129
+ "userId": None,
130
+ "codeModelMode": True,
131
+ "agentMode": {},
132
+ "trendingAgentMode": {},
133
+ "userSelectedModel": None,
134
+ "userSystemPrompt": None,
135
+ "isMicMode": False,
136
+ "maxTokens": 1024,
137
+ "playgroundTopP": 0.9,
138
+ "playgroundTemperature": 0.5,
139
+ "isChromeExt": False,
140
+ "githubToken": None,
141
+ "clickedAnswer2": False,
142
+ "clickedAnswer3": False,
143
+ "clickedForceWebSearch": False,
144
+ "visitFromDelta": False,
145
+ "mobileClient": False,
146
+ "webSearchMode": False,
147
+ }
148
+
149
+ if model in cls.agentMode:
150
+ data["agentMode"] = cls.agentMode[model]
151
+ elif model in cls.trendingAgentMode:
152
+ data["trendingAgentMode"] = cls.trendingAgentMode[model]
153
+ elif model in cls.userSelectedModel:
154
+ data["userSelectedModel"] = cls.userSelectedModel[model]
155
+
156
+ async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
157
+ response.raise_for_status()
158
+ if model == 'ImageGenerationLV45LJp':
159
+ response_text = await response.text()
160
+ url_match = re.search(r'https://storage\.googleapis\.com/[^\s\)]+', response_text)
161
+ if url_match:
162
+ image_url = url_match.group(0)
163
+ yield ImageResponse(image_url, alt=messages[-1]['content'])
164
+ else:
165
+ raise Exception("Image URL not found in the response")
166
+ else:
167
+ async for chunk in response.content.iter_any():
168
+ if chunk:
169
+ decoded_chunk = chunk.decode()
170
+ decoded_chunk = re.sub(r'\$@\$v=[^$]+\$@\$', '', decoded_chunk)
171
+ if decoded_chunk.strip():
172
+ yield decoded_chunk
173
 
174
+ # FastAPI app setup
175
  app = FastAPI()
176
 
177
  class Message(BaseModel):
 
200
  {
201
  "message": {
202
  "role": "assistant",
203
+ "content": response['content'] if isinstance(response, dict) else response
204
  },
205
  "finish_reason": "stop",
206
  "index": 0