Update main.py
Browse files
main.py
CHANGED
@@ -5,13 +5,18 @@ import random
|
|
5 |
import string
|
6 |
import uuid
|
7 |
import json
|
8 |
-
|
|
|
9 |
from fastapi import FastAPI, HTTPException
|
10 |
from pydantic import BaseModel
|
11 |
from typing import List, Dict, Any, Optional
|
12 |
from datetime import datetime
|
13 |
from fastapi.responses import StreamingResponse
|
14 |
|
|
|
|
|
|
|
|
|
15 |
# Custom exception for model not working
|
16 |
class ModelNotWorkingException(Exception):
|
17 |
def __init__(self, model: str):
|
@@ -167,28 +172,32 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
|
|
167 |
elif model in cls.userSelectedModel:
|
168 |
data["userSelectedModel"] = cls.userSelectedModel[model]
|
169 |
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
|
|
177 |
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
|
|
|
|
183 |
else:
|
184 |
-
|
185 |
-
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
|
191 |
-
|
|
|
192 |
|
193 |
# FastAPI app setup
|
194 |
app = FastAPI()
|
|
|
5 |
import string
|
6 |
import uuid
|
7 |
import json
|
8 |
+
import logging
|
9 |
+
from aiohttp import ClientSession, ClientError
|
10 |
from fastapi import FastAPI, HTTPException
|
11 |
from pydantic import BaseModel
|
12 |
from typing import List, Dict, Any, Optional
|
13 |
from datetime import datetime
|
14 |
from fastapi.responses import StreamingResponse
|
15 |
|
16 |
+
# Set up logging
|
17 |
+
logging.basicConfig(level=logging.INFO)
|
18 |
+
logger = logging.getLogger(__name__)
|
19 |
+
|
20 |
# Custom exception for model not working
|
21 |
class ModelNotWorkingException(Exception):
|
22 |
def __init__(self, model: str):
|
|
|
172 |
elif model in cls.userSelectedModel:
|
173 |
data["userSelectedModel"] = cls.userSelectedModel[model]
|
174 |
|
175 |
+
try:
|
176 |
+
async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
|
177 |
+
response.raise_for_status()
|
178 |
+
response_text = await response.text()
|
179 |
+
|
180 |
+
# Check if the response is empty
|
181 |
+
if not response_text.strip():
|
182 |
+
raise ModelNotWorkingException(model)
|
183 |
|
184 |
+
if model == 'ImageGenerationLV45LJp':
|
185 |
+
url_match = re.search(r'https://storage\.googleapis\.com/[^\s\)]+', response_text)
|
186 |
+
if url_match:
|
187 |
+
image_url = url_match.group(0)
|
188 |
+
yield ImageResponse(image_url, alt=messages[-1]['content'])
|
189 |
+
else:
|
190 |
+
raise Exception("Image URL not found in the response")
|
191 |
else:
|
192 |
+
async for chunk in response.content.iter_any():
|
193 |
+
if chunk:
|
194 |
+
decoded_chunk = chunk.decode(errors='ignore') # Handle decoding errors
|
195 |
+
decoded_chunk = re.sub(r'\$@\$v=[^$]+\$@\$', '', decoded_chunk)
|
196 |
+
if decoded_chunk.strip():
|
197 |
+
yield decoded_chunk
|
198 |
+
except ClientError as e:
|
199 |
+
logger.error(f"HTTP request failed: {e}")
|
200 |
+
raise HTTPException(status_code=503, detail="Service is unavailable. Please try again later.")
|
201 |
|
202 |
# FastAPI app setup
|
203 |
app = FastAPI()
|