test24 / main.py
Niansuh's picture
Update main.py
110efbd verified
raw
history blame
2.43 kB
from fastapi import FastAPI
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
import json
import re
import random
import string
from aiohttp import ClientSession
# Pydantic models for request
class Message(BaseModel):
role: str
content: str
class ChatRequest(BaseModel):
model: str
messages: list[Message]
# Blackbox class
class Blackbox:
url = "https://www.blackbox.ai"
api_endpoint = "https://www.blackbox.ai/api/chat"
models = [
'blackbox',
'gemini-1.5-flash',
"llama-3.1-8b",
'llama-3.1-70b',
'llama-3.1-405b',
'ImageGenerationLV45LJp',
'gpt-4o',
'gemini-pro',
'claude-sonnet-3.5',
]
@classmethod
def get_model(cls, model: str) -> str:
return model if model in cls.models else 'blackbox'
@classmethod
async def create_async_generator(cls, model: str, messages: list) -> str:
model = cls.get_model(model)
headers = {
"accept": "*/*",
"content-type": "application/json",
"user-agent": "Mozilla/5.0"
}
async with ClientSession(headers=headers) as session:
random_id = ''.join(random.choices(string.ascii_letters + string.digits, k=7))
data = {
"messages": messages,
"id": random_id,
"maxTokens": 1024,
}
async with session.post(cls.api_endpoint, json=data) as response:
response.raise_for_status()
async for chunk in response.content.iter_any():
if chunk:
decoded_chunk = chunk.decode()
decoded_chunk = re.sub(r'\$@\$v=[^$]+\$@\$', '', decoded_chunk)
yield decoded_chunk.strip()
# FastAPI app
app = FastAPI()
@app.post("/v1/chat/completions")
async def chat_completions(request: ChatRequest):
messages = [{"role": msg.role, "content": msg.content} for msg in request.messages]
async_generator = Blackbox.create_async_generator(
model=request.model,
messages=messages
)
async def event_stream():
async for chunk in async_generator:
yield f"data: {json.dumps({'choices': [{'message': {'role': 'assistant', 'content': chunk}}]}})}\n\n"
return StreamingResponse(event_stream(), media_type="text/event-stream")