Spaces:
Paused
Paused
import os | |
import helpers.helper as helper | |
from g4f.client import Client | |
from litellm import completion | |
import random | |
import json | |
import os | |
# from dotenv import load_dotenv | |
# load_dotenv() | |
from g4f.Provider import DeepInfraChat,Glider,LambdaChat,TypeGPT | |
gemini_api_keys=json.loads(os.environ.get("GEMINI_KEY_LIST")) | |
groq_api_keys=json.loads(os.environ.get("GROQ_API_KEYS")) | |
chutes_key=os.environ.get("CHUTES_API_KEY") | |
github_key=os.environ.get("GITHUB_API_KEY") | |
DeepInfraChat.models = ["google/gemma-3-27b-it","deepseek-ai/DeepSeek-R1-Turbo","Qwen/QwQ-32B","deepseek-ai/DeepSeek-R1","deepseek-ai/DeepSeek-V3-0324","meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8","meta-llama/Llama-4-Scout-17B-16E-Instruct","microsoft/Phi-4-multimodal-instruct"] | |
deepinframodels=["meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8","microsoft/Phi-4-multimodal-instruct","google/gemma-3-27b-it","meta-llama/Llama-4-Scout-17B-16E-Instruct"] | |
chutes_models={"MAI-DS-R1-FP8":"microsoft/MAI-DS-R1-FP8","DeepSeek-V3-0324":"deepseek-ai/DeepSeek-V3-0324","deepseek-reasoner":"deepseek-ai/DeepSeek-R1","GLM-4-32B-0414":"THUDM/GLM-4-32B-0414","GLM-Z1-32B-0414":"THUDM/GLM-Z1-32B-0414"} | |
github_models={"gpt4.1":"gpt-4.1","gpt-4o":"gpt-4o","o4-mini":"o4-mini"} | |
REASONING_CORRESPONDANCE = {"DeepSeek-R1-Glider":Glider, "DeepSeekR1-LAMBDA":LambdaChat,"DeepSeekR1":DeepInfraChat,"deepseek-slow":TypeGPT} | |
os.environ["GEMINI_API_KEY"] =random.choice(gemini_api_keys) | |
REASONING_QWQ = {"qwq-32b":DeepInfraChat} | |
from openai import OpenAI | |
clienty = OpenAI( | |
base_url="https://openrouter.ai/api/v1", | |
api_key=os.environ.get("OPENROUTER_API_KEY"), | |
) | |
CHAT_CORRESPONDANCE = {"DeepSeek-V3":DeepInfraChat} | |
client = Client() | |
def clear(): | |
helper.stopped=False | |
helper.q.put_nowait("END") | |
while not helper.q.empty(): | |
try: | |
helper.q.get(block=False) | |
except Exception as e: | |
continue | |
helper.q.task_done() | |
def gpt4(messages,response_format,model="gpt-4"): | |
if len(messages) ==1: | |
messages[0]["role"]="user" | |
response = completion( | |
model="gemini/gemini-2.0-flash", | |
messages=messages, | |
response_format=response_format | |
) | |
return str(response.choices[0].message.content) | |
def gpt4stream(messages,model,api_keys): | |
print(f"-------{model}--------") | |
global llmfree | |
global llmdeepseek | |
global llmgroq | |
cunk="" | |
if model in deepinframodels: | |
try: | |
response = client.chat.completions.create( | |
provider=DeepInfraChat, | |
model=model, | |
messages=messages, | |
stream=True | |
) | |
for part in response: | |
cunk=cunk+(part.choices[0].delta.content or "") | |
if "```json" not in cunk or "```" not in cunk: | |
helper.q.put_nowait(part.choices[0].delta.content or "") | |
if helper.stopped: | |
clear() | |
break | |
except Exception as e: | |
pass | |
helper.q.put_nowait("RESULT: "+cunk) | |
elif model == "DeepSeekR1-togetherAI": | |
response = completion(model="together_ai/deepseek-ai/DeepSeek-R1", messages=messages, stream=True) | |
cunk="" | |
for part in response: | |
cunk=cunk+(part.choices[0].delta.content or "") | |
if "```json" not in cunk: | |
helper.q.put_nowait(part.choices[0].delta.content or "") | |
if helper.stopped: | |
clear() | |
break | |
helper.q.put_nowait("RESULT: "+cunk) | |
elif model == "DeepSeekV3-togetherAI": | |
response = completion(model="together_ai/deepseek-ai/DeepSeek-V3", messages=messages, stream=True) | |
cunk="" | |
for part in response: | |
cunk=cunk+(part.choices[0].delta.content or "") | |
if "```json" not in cunk: | |
helper.q.put_nowait(part.choices[0].delta.content or "") | |
if helper.stopped: | |
clear() | |
break | |
helper.q.put_nowait("RESULT: "+cunk) | |
elif model=="groq/deepseek-r1-distill-llama-70b": | |
os.environ["GROQ_API_KEY"] =random.choice(groq_api_keys) | |
response = completion(model="groq/deepseek-r1-distill-llama-70b", messages=messages, stream=True) | |
cunk="" | |
for part in response: | |
cunk=cunk+(part.choices[0].delta.content or "") | |
if "```json" not in cunk: | |
helper.q.put_nowait(part.choices[0].delta.content or "") | |
if helper.stopped: | |
clear() | |
break | |
helper.q.put_nowait("RESULT: "+cunk) | |
elif model=="groq/qwq-32b": | |
os.environ["GROQ_API_KEY"] =random.choice(groq_api_keys) | |
response = completion(model="groq/qwen-qwq-32b", messages=messages, stream=True) | |
cunk="" | |
for part in response: | |
cunk=cunk+(part.choices[0].delta.content or "") | |
if "```json" not in cunk: | |
helper.q.put_nowait(part.choices[0].delta.content or "") | |
if helper.stopped: | |
clear() | |
break | |
helper.q.put_nowait("RESULT: "+cunk) | |
elif model=="llama-3.3-70b-versatile": | |
response = completion(model="groq/llama-3.3-70b-versatile", messages=messages, stream=True) | |
cunk="" | |
for part in response: | |
cunk=cunk+(part.choices[0].delta.content or "") | |
if "```json" not in cunk: | |
helper.q.put_nowait(part.choices[0].delta.content or "") | |
if helper.stopped: | |
clear() | |
break | |
helper.q.put_nowait("RESULT: "+cunk) | |
elif model in chutes_models: | |
if model == "MAI-DS-R1-FP8" or model=='GLM-Z1-32B-0414': | |
helper.q.put_nowait("<think>") | |
response = completion(model=f"openai/{chutes_models[model]}",api_key=chutes_key,base_url="https://llm.chutes.ai/v1", messages=messages, stream=True) | |
cunk="" | |
for part in response: | |
cunk=cunk+(part.choices[0].delta.content or "") | |
x=str(part.choices[0].delta.content) | |
x=x.replace('>','\n </think> \n ') | |
if "```json" not in cunk: | |
helper.q.put_nowait(x or "") | |
if helper.stopped: | |
clear() | |
break | |
helper.q.put_nowait("RESULT: "+cunk) | |
elif model in github_models: | |
response = completion(model=f"github/{github_models[model]}",api_key=github_key, messages=messages, stream=True) | |
cunk="" | |
for part in response: | |
cunk=cunk+(part.choices[0].delta.content or "") | |
if "```json" not in cunk: | |
helper.q.put_nowait(part.choices[0].delta.content or "") | |
if helper.stopped: | |
clear() | |
break | |
helper.q.put_nowait("RESULT: "+cunk) | |
elif "gemini" in model: | |
for key in gemini_api_keys: | |
try: | |
os.environ["GEMINI_API_KEY"] =key | |
response = completion(model=f"gemini/{model}", messages=messages, stream=True) | |
cunk="" | |
for part in response: | |
cunk=cunk+(part.choices[0].delta.content or "") | |
print(part.choices[0].delta.content or "", end="") | |
if "```json" not in cunk: | |
helper.q.put_nowait(part.choices[0].delta.content or "") | |
if helper.stopped: | |
clear() | |
break | |
break | |
except: | |
pass | |
print("STOPPING") | |
helper.q.put_nowait("RESULT: "+cunk) | |
elif model=="deepseek.r1" or model=="deepseek-chat": | |
cunk="" | |
if "chat" in model: | |
providers = CHAT_CORRESPONDANCE | |
model_name="deepseek-ai/DeepSeek-V3-0324" | |
else: | |
providers = REASONING_CORRESPONDANCE | |
model_name="deepseek-r1" | |
for provider in providers: | |
try: | |
response = client.chat.completions.create( | |
provider=providers[provider], | |
model=model_name, | |
messages=messages, | |
stream=True | |
# Add any other necessary parameters | |
) | |
for part in response: | |
# print(part) | |
cunk=cunk+(str(part.choices[0].delta.content) or "") | |
if helper.stopped: | |
clear() | |
break | |
if ("```json" not in cunk or "```" not in cunk) and (str(part.choices[0].delta.content) != "None"): | |
helper.q.put_nowait(str(part.choices[0].delta.content) or "") | |
break | |
except Exception as e: | |
#helper.q.put_nowait(str(e)) | |
print(e) | |
pass | |
print("STOPPING") | |
helper.q.put_nowait("RESULT: "+cunk) | |
elif model=="qwq-32b" : | |
helper.q.put_nowait("<think>") | |
cunk="" | |
providers=REASONING_QWQ | |
for provider in providers: | |
try: | |
response = client.chat.completions.create( | |
provider=providers[provider], | |
model="Qwen/QwQ-32B", | |
messages=messages, | |
stream=True | |
# Add any other necessary parameters | |
) | |
for part in response: | |
cunk=cunk+(str(part.choices[0].delta.content) or "") | |
if "```json" not in cunk or "```" not in cunk: | |
helper.q.put_nowait(str(part.choices[0].delta.content) or "") | |
if helper.stopped: | |
clear() | |
break | |
break | |
except Exception as e: | |
pass | |
helper.q.put_nowait("RESULT: "+cunk) | |
elif "DeepSeek" in model and "dev" in model: | |
cunk="" | |
if "V3" in model: | |
providers = CHAT_CORRESPONDANCE | |
else: | |
providers = REASONING_CORRESPONDANCE | |
for provider in providers: | |
try: | |
response = client.chat.completions.create( | |
provider=providers[provider], | |
model="deepseek-r1", | |
messages=messages, | |
stream=True | |
# Add any other necessary parameters | |
) | |
for part in response: | |
cunk=cunk+(part.choices[0].delta.content or "") | |
break | |
except Exception as e: | |
pass | |
print("STOPPING") | |
helper.q.put_nowait("RESULT: "+cunk) | |