Update main.py
Browse files
main.py
CHANGED
@@ -11,12 +11,12 @@ import asyncio
|
|
11 |
import time
|
12 |
from collections import defaultdict
|
13 |
from typing import List, Dict, Any, Optional, Union, AsyncGenerator
|
|
|
14 |
|
15 |
from aiohttp import ClientSession, ClientResponseError
|
16 |
from fastapi import FastAPI, HTTPException, Request, Depends, Header
|
17 |
from fastapi.responses import JSONResponse, StreamingResponse
|
18 |
from pydantic import BaseModel
|
19 |
-
from datetime import datetime # Fix for 'datetime' not defined
|
20 |
|
21 |
# Configure logging
|
22 |
logging.basicConfig(
|
@@ -148,20 +148,6 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
|
|
148 |
"gpt-4o": "/?model=gpt-4o",
|
149 |
"gemini-pro": "/?model=gemini-pro",
|
150 |
"claude-sonnet-3.5": "/?model=claude-sonnet-3.5",
|
151 |
-
"PythonAgent": "/?model=PythonAgent",
|
152 |
-
"JavaAgent": "/?model=JavaAgent",
|
153 |
-
"JavaScriptAgent": "/?model=JavaScriptAgent",
|
154 |
-
"HTMLAgent": "/?model=HTMLAgent",
|
155 |
-
"GoogleCloudAgent": "/?model=GoogleCloudAgent",
|
156 |
-
"AndroidDeveloper": "/?model=AndroidDeveloper",
|
157 |
-
"SwiftDeveloper": "/?model=SwiftDeveloper",
|
158 |
-
"Next.jsAgent": "/?model=Next.jsAgent",
|
159 |
-
"MongoDBAgent": "/?model=MongoDBAgent",
|
160 |
-
"PyTorchAgent": "/?model=PyTorchAgent",
|
161 |
-
"ReactAgent": "/?model=ReactAgent",
|
162 |
-
"XcodeAgent": "/?model=XcodeAgent",
|
163 |
-
"AngularJSAgent": "/?model=AngularJSAgent",
|
164 |
-
"ImageGeneration": "/?model=ImageGeneration",
|
165 |
}
|
166 |
|
167 |
model_aliases = {
|
@@ -346,6 +332,7 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
|
|
346 |
Yields:
|
347 |
Union[str, ImageResponse]: Segments of the generated response or ImageResponse objects.
|
348 |
"""
|
|
|
349 |
model = cls.get_model(model)
|
350 |
|
351 |
chat_id = cls.generate_random_string()
|
@@ -438,6 +425,7 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
|
|
438 |
|
439 |
async with ClientSession(headers=common_headers) as session:
|
440 |
try:
|
|
|
441 |
async with session.post(
|
442 |
cls.api_endpoint,
|
443 |
headers=headers_api_chat_combined,
|
@@ -447,15 +435,19 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
|
|
447 |
response_api_chat.raise_for_status()
|
448 |
text = await response_api_chat.text()
|
449 |
cleaned_response = cls.clean_response(text)
|
|
|
|
|
|
|
|
|
450 |
|
451 |
if model in cls.image_models:
|
452 |
match = re.search(r'!\[.*?\]\((https?://[^\)]+)\)', cleaned_response)
|
453 |
if match:
|
454 |
image_url = match.group(1)
|
455 |
image_response = ImageResponse(images=image_url, alt="Generated Image")
|
456 |
-
yield image_response
|
457 |
else:
|
458 |
-
yield cleaned_response
|
459 |
else:
|
460 |
if websearch:
|
461 |
match = re.search(r'\$~~~\$(.*?)\$~~~\$', cleaned_response, re.DOTALL)
|
@@ -481,22 +473,29 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
|
|
481 |
else:
|
482 |
final_response = cleaned_response
|
483 |
|
484 |
-
yield final_response
|
|
|
485 |
except ClientResponseError as e:
|
486 |
error_text = f"Error {e.status}: {e.message}"
|
|
|
487 |
try:
|
488 |
error_response = await e.response.text()
|
489 |
cleaned_error = cls.clean_response(error_response)
|
490 |
error_text += f" - {cleaned_error}"
|
491 |
except Exception:
|
492 |
pass
|
493 |
-
yield error_text
|
494 |
except Exception as e:
|
495 |
-
|
|
|
|
|
496 |
|
497 |
-
|
|
|
498 |
|
|
|
499 |
try:
|
|
|
500 |
async with session.post(
|
501 |
chat_url,
|
502 |
headers=headers_chat_combined,
|
@@ -504,18 +503,15 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
|
|
504 |
proxy=proxy
|
505 |
) as response_chat:
|
506 |
response_chat.raise_for_status()
|
507 |
-
|
508 |
except ClientResponseError as e:
|
509 |
error_text = f"Error {e.status}: {e.message}"
|
510 |
-
|
511 |
-
|
512 |
-
cleaned_error = cls.clean_response(error_response)
|
513 |
-
error_text += f" - {cleaned_error}"
|
514 |
-
except Exception:
|
515 |
-
pass
|
516 |
-
yield error_text
|
517 |
except Exception as e:
|
518 |
-
|
|
|
|
|
519 |
|
520 |
# Custom exception for model not working
|
521 |
class ModelNotWorkingException(Exception):
|
@@ -633,8 +629,8 @@ async def chat_completions(request: ChatRequest, req: Request, api_key: str = De
|
|
633 |
generator = Blackbox.create_async_generator(
|
634 |
model=request.model,
|
635 |
messages=[{"role": msg.role, "content": msg.content} for msg in request.messages],
|
636 |
-
|
637 |
-
|
638 |
)
|
639 |
|
640 |
logger.info(f"Started streaming response for API key: {api_key} | IP: {client_ip}")
|
|
|
11 |
import time
|
12 |
from collections import defaultdict
|
13 |
from typing import List, Dict, Any, Optional, Union, AsyncGenerator
|
14 |
+
from datetime import datetime # Essential for timestamping
|
15 |
|
16 |
from aiohttp import ClientSession, ClientResponseError
|
17 |
from fastapi import FastAPI, HTTPException, Request, Depends, Header
|
18 |
from fastapi.responses import JSONResponse, StreamingResponse
|
19 |
from pydantic import BaseModel
|
|
|
20 |
|
21 |
# Configure logging
|
22 |
logging.basicConfig(
|
|
|
148 |
"gpt-4o": "/?model=gpt-4o",
|
149 |
"gemini-pro": "/?model=gemini-pro",
|
150 |
"claude-sonnet-3.5": "/?model=claude-sonnet-3.5",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
151 |
}
|
152 |
|
153 |
model_aliases = {
|
|
|
332 |
Yields:
|
333 |
Union[str, ImageResponse]: Segments of the generated response or ImageResponse objects.
|
334 |
"""
|
335 |
+
logger.debug("Starting async generator for model: %s", model)
|
336 |
model = cls.get_model(model)
|
337 |
|
338 |
chat_id = cls.generate_random_string()
|
|
|
425 |
|
426 |
async with ClientSession(headers=common_headers) as session:
|
427 |
try:
|
428 |
+
logger.debug("Sending POST request to Blackbox API at %s", cls.api_endpoint)
|
429 |
async with session.post(
|
430 |
cls.api_endpoint,
|
431 |
headers=headers_api_chat_combined,
|
|
|
435 |
response_api_chat.raise_for_status()
|
436 |
text = await response_api_chat.text()
|
437 |
cleaned_response = cls.clean_response(text)
|
438 |
+
logger.debug("Received response from Blackbox API: %s", cleaned_response)
|
439 |
+
|
440 |
+
# Test yield to verify streaming works
|
441 |
+
yield "Streaming response started...\n"
|
442 |
|
443 |
if model in cls.image_models:
|
444 |
match = re.search(r'!\[.*?\]\((https?://[^\)]+)\)', cleaned_response)
|
445 |
if match:
|
446 |
image_url = match.group(1)
|
447 |
image_response = ImageResponse(images=image_url, alt="Generated Image")
|
448 |
+
yield image_response.json() + "\n"
|
449 |
else:
|
450 |
+
yield cleaned_response + "\n"
|
451 |
else:
|
452 |
if websearch:
|
453 |
match = re.search(r'\$~~~\$(.*?)\$~~~\$', cleaned_response, re.DOTALL)
|
|
|
473 |
else:
|
474 |
final_response = cleaned_response
|
475 |
|
476 |
+
yield final_response + "\n"
|
477 |
+
|
478 |
except ClientResponseError as e:
|
479 |
error_text = f"Error {e.status}: {e.message}"
|
480 |
+
logger.error("ClientResponseError: %s", error_text)
|
481 |
try:
|
482 |
error_response = await e.response.text()
|
483 |
cleaned_error = cls.clean_response(error_response)
|
484 |
error_text += f" - {cleaned_error}"
|
485 |
except Exception:
|
486 |
pass
|
487 |
+
yield f"{error_text}\n"
|
488 |
except Exception as e:
|
489 |
+
error_text = f"Unexpected error during /api/chat request: {str(e)}"
|
490 |
+
logger.error("Exception: %s", error_text)
|
491 |
+
yield f"{error_text}\n"
|
492 |
|
493 |
+
# Test yield after API call
|
494 |
+
yield "Streaming response ended.\n"
|
495 |
|
496 |
+
chat_url = f'{cls.url}/chat/{chat_id}?model={model}'
|
497 |
try:
|
498 |
+
logger.debug("Sending POST request to Chat URL: %s", chat_url)
|
499 |
async with session.post(
|
500 |
chat_url,
|
501 |
headers=headers_chat_combined,
|
|
|
503 |
proxy=proxy
|
504 |
) as response_chat:
|
505 |
response_chat.raise_for_status()
|
506 |
+
logger.debug("Chat POST request successful.")
|
507 |
except ClientResponseError as e:
|
508 |
error_text = f"Error {e.status}: {e.message}"
|
509 |
+
logger.error("ClientResponseError on chat POST: %s", error_text)
|
510 |
+
yield f"{error_text}\n"
|
|
|
|
|
|
|
|
|
|
|
511 |
except Exception as e:
|
512 |
+
error_text = f"Unexpected error during /chat/{chat_id} request: {str(e)}"
|
513 |
+
logger.error("Exception on chat POST: %s", error_text)
|
514 |
+
yield f"{error_text}\n"
|
515 |
|
516 |
# Custom exception for model not working
|
517 |
class ModelNotWorkingException(Exception):
|
|
|
629 |
generator = Blackbox.create_async_generator(
|
630 |
model=request.model,
|
631 |
messages=[{"role": msg.role, "content": msg.content} for msg in request.messages],
|
632 |
+
proxy=request.headers.get('Proxy'), # Assuming proxy info is passed via headers
|
633 |
+
websearch=request.query_params.get('websearch', 'false').lower() == 'true'
|
634 |
)
|
635 |
|
636 |
logger.info(f"Started streaming response for API key: {api_key} | IP: {client_ip}")
|