File size: 2,426 Bytes
110efbd
 
 
 
628f747
 
 
 
 
110efbd
 
 
 
628f747
110efbd
 
 
628f747
110efbd
 
628f747
 
80dc124
 
 
 
 
 
 
 
 
 
 
 
 
 
 
110efbd
628f747
 
110efbd
80dc124
110efbd
80dc124
 
 
110efbd
80dc124
 
 
 
 
 
 
 
 
 
110efbd
80dc124
110efbd
 
 
 
 
628f747
110efbd
628f747
 
 
 
 
 
a36f60f
628f747
 
 
 
110efbd
 
 
a36f60f
110efbd
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
from fastapi import FastAPI
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
import json
import re
import random
import string
from aiohttp import ClientSession

# Pydantic models for request
class Message(BaseModel):
    role: str
    content: str

class ChatRequest(BaseModel):
    model: str
    messages: list[Message]

# Blackbox class
class Blackbox:
    url = "https://www.blackbox.ai"
    api_endpoint = "https://www.blackbox.ai/api/chat"
    
    models = [
        'blackbox',
        'gemini-1.5-flash',
        "llama-3.1-8b",
        'llama-3.1-70b',
        'llama-3.1-405b',
        'ImageGenerationLV45LJp',
        'gpt-4o',
        'gemini-pro',
        'claude-sonnet-3.5',
    ]

    @classmethod
    def get_model(cls, model: str) -> str:
        return model if model in cls.models else 'blackbox'

    @classmethod
    async def create_async_generator(cls, model: str, messages: list) -> str:
        model = cls.get_model(model)

        headers = {
            "accept": "*/*",
            "content-type": "application/json",
            "user-agent": "Mozilla/5.0"
        }

        async with ClientSession(headers=headers) as session:
            random_id = ''.join(random.choices(string.ascii_letters + string.digits, k=7))
            data = {
                "messages": messages,
                "id": random_id,
                "maxTokens": 1024,
            }

            async with session.post(cls.api_endpoint, json=data) as response:
                response.raise_for_status()
                async for chunk in response.content.iter_any():
                    if chunk:
                        decoded_chunk = chunk.decode()
                        decoded_chunk = re.sub(r'\$@\$v=[^$]+\$@\$', '', decoded_chunk)
                        yield decoded_chunk.strip()

# FastAPI app
app = FastAPI()

@app.post("/v1/chat/completions")
async def chat_completions(request: ChatRequest):
    messages = [{"role": msg.role, "content": msg.content} for msg in request.messages]

    async_generator = Blackbox.create_async_generator(
        model=request.model,
        messages=messages
    )

    async def event_stream():
        async for chunk in async_generator:
            yield f"data: {json.dumps({'choices': [{'message': {'role': 'assistant', 'content': chunk}}]}})}\n\n"

    return StreamingResponse(event_stream(), media_type="text/event-stream")