Spaces:
Running
Running
Upload folder using huggingface_hub
Browse files
app.py
CHANGED
@@ -106,23 +106,30 @@ async def on_message(message):
|
|
106 |
'.help : Show this message'
|
107 |
)
|
108 |
else:
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
125 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
126 |
|
127 |
# print(os.getenv("TOKEN"))
|
128 |
client.run(os.getenv('TOKEN'))
|
|
|
106 |
'.help : Show this message'
|
107 |
)
|
108 |
else:
|
109 |
+
try:
|
110 |
+
if len(keys) == 0:
|
111 |
+
raise openai.RateLimitError("All API keys have been used up.")
|
112 |
+
key = random.choice(keys)
|
113 |
+
response = openai.OpenAI(
|
114 |
+
api_key=key,
|
115 |
+
base_url=config["url"],
|
116 |
+
default_headers={
|
117 |
+
"User-Agent": config.get("user_agent", ""),
|
118 |
+
"Cookie": config.get("cookie", "")
|
119 |
+
}
|
120 |
+
).chat.completions.create(
|
121 |
+
model=config["model"],
|
122 |
+
messages=[
|
123 |
+
{"role": "user", "content": message.content}
|
124 |
+
],
|
125 |
+
stream=False
|
126 |
+
).choices[0].message.content
|
127 |
+
await message.channel.send(response)
|
128 |
+
except openai.RateLimitError as e:
|
129 |
+
if str(e).find("You've reached the upper limit for today's usage.") != -1:
|
130 |
+
print(f"[Rate Limited] Remove key {key[:8]}...{key[-8:]}")
|
131 |
+
keys.remove(key)
|
132 |
+
raise
|
133 |
|
134 |
# print(os.getenv("TOKEN"))
|
135 |
client.run(os.getenv('TOKEN'))
|