import os from g4f.client import Client from litellm import completion import random import json import os from g4f.Provider import DeepInfraChat,Glider,LambdaChat,TypeGPT gemini_api_keys=json.loads(os.environ.get("GEMINI_KEY_LIST")) groq_api_keys=json.loads(os.environ.get("GROQ_API_KEYS")) chutes_key=os.environ.get("CHUTES_API_KEY") github_key=os.environ.get("GITHUB_API_KEY") DeepInfraChat.models = ["Qwen/Qwen3-235B-A22B","Qwen/Qwen3-30B-A3B","Qwen/Qwen3-32B","google/gemma-3-27b-it","deepseek-ai/DeepSeek-R1-Turbo","Qwen/QwQ-32B","deepseek-ai/DeepSeek-R1","deepseek-ai/DeepSeek-V3-0324","meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8","meta-llama/Llama-4-Scout-17B-16E-Instruct","microsoft/Phi-4-multimodal-instruct"] deepinframodels=["meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8","microsoft/Phi-4-multimodal-instruct","google/gemma-3-27b-it","meta-llama/Llama-4-Scout-17B-16E-Instruct"] chutes_models={"Qwen3-235B":"Qwen/Qwen3-235B-A22B","MAI-DS-R1-FP8":"microsoft/MAI-DS-R1-FP8","DeepSeek-V3-0324":"deepseek-ai/DeepSeek-V3-0324","deepseek-reasoner":"deepseek-ai/DeepSeek-R1","GLM-4-32B-0414":"THUDM/GLM-4-32B-0414","GLM-Z1-32B-0414":"THUDM/GLM-Z1-32B-0414","DeepSeek-R1T-Chimera":"tngtech/DeepSeek-R1T-Chimera", "DeepSeek-R1-Zero":"deepseek-ai/DeepSeek-R1-Zero"} github_models={"gpt4.1":"gpt-4.1","gpt-4o":"gpt-4o","o4-mini":"o4-mini"} REASONING_CORRESPONDANCE = {"DeepSeek-R1-Glider":Glider, "DeepSeekR1-LAMBDA":LambdaChat,"DeepSeekR1":DeepInfraChat,"deepseek-slow":TypeGPT} os.environ["GEMINI_API_KEY"] =random.choice(gemini_api_keys) REASONING_QWQ = {"qwq-32b":DeepInfraChat} CHAT_CORRESPONDANCE = {"DeepSeek-V3":DeepInfraChat} client = Client() def chat(messages,response_format,model="gpt-4"): if len(messages) ==1: messages[0]["role"]="user" response = completion( model="gemini/gemini-2.0-flash", messages=messages, response_format=response_format ) return str(response.choices[0].message.content) def chatstream(messages,model,api_keys): print(f"-------{model}--------") global llmfree global llmdeepseek global llmgroq cunk="" if model in deepinframodels: try: response = client.chat.completions.create( provider=DeepInfraChat, model=model, messages=messages, stream=True ) for part in response: cunk=cunk+(part.choices[0].delta.content or "") if "```json" not in cunk or "```" not in cunk: yield (part.choices[0].delta.content or "") except Exception as e: pass yield ("RESULT: "+cunk) elif model == "Qwen3-235B-A22B" or model == "Qwen3-30B-A3B" or model == "Qwen3-32B": response = client.chat.completions.create( provider=DeepInfraChat, model=f"Qwen/{model}", messages=messages, stream=True ) for part in response: resp=str(part.choices[0].delta.content) cunk=cunk+(resp or "") if ("```json" not in cunk or "```" not in cunk) and resp != "None": yield (resp or "") yield ("RESULT: "+str(cunk)) elif model == "DeepSeekR1-togetherAI": response = completion(model="together_ai/deepseek-ai/DeepSeek-R1", messages=messages, stream=True) cunk="" for part in response: cunk=cunk+(part.choices[0].delta.content or "") if "```json" not in cunk: yield(part.choices[0].delta.content or "") yield("RESULT: "+cunk) elif model == "DeepSeekV3-togetherAI": response = completion(model="together_ai/deepseek-ai/DeepSeek-V3", messages=messages, stream=True) cunk="" for part in response: cunk=cunk+(part.choices[0].delta.content or "") if "```json" not in cunk: yield(part.choices[0].delta.content or "") yield("RESULT: "+cunk) elif model=="groq/deepseek-r1-distill-llama-70b": os.environ["GROQ_API_KEY"] =random.choice(groq_api_keys) response = completion(model="groq/deepseek-r1-distill-llama-70b", messages=messages, stream=True) cunk="" for part in response: cunk=cunk+(part.choices[0].delta.content or "") if "```json" not in cunk: yield(part.choices[0].delta.content or "") yield("RESULT: "+cunk) elif model=="groq/qwq-32b": os.environ["GROQ_API_KEY"] =random.choice(groq_api_keys) response = completion(model="groq/qwen-qwq-32b", messages=messages, stream=True) cunk="" for part in response: cunk=cunk+(part.choices[0].delta.content or "") if "```json" not in cunk: yield(part.choices[0].delta.content or "") yield("RESULT: "+cunk) elif model=="llama-3.3-70b-versatile": response = completion(model="groq/llama-3.3-70b-versatile", messages=messages, stream=True) cunk="" for part in response: cunk=cunk+(part.choices[0].delta.content or "") if "```json" not in cunk: yield(part.choices[0].delta.content or "") yield("RESULT: "+cunk) elif model in chutes_models: response = completion(model=f"openai/{chutes_models[model]}",api_key=chutes_key,base_url="https://llm.chutes.ai/v1", messages=messages, stream=True) if model == "MAI-DS-R1-FP8" or model == "GLM-Z1-32B-0414" or model == "DeepSeek-R1T-Chimera" or model == "QwQ-32B-ArliAI-RpR-v1": yield(" \n") cunk="" for part in response: cunk=cunk+(part.choices[0].delta.content or "") x=str(part.choices[0].delta.content) # print(part.choices[0]) # x=x.replace(">","> \n \n s ") if "```json" not in cunk: if "None" not in x: yield(x or "") print(x,end="") yield("RESULT: "+cunk) elif model in github_models: response = completion(model=f"github/{github_models[model]}",api_key=github_key, messages=messages, stream=True) cunk="" for part in response: cunk=cunk+(part.choices[0].delta.content or "") if "```json" not in cunk: yield(part.choices[0].delta.content or "") yield("RESULT: "+cunk) elif "gemini" in model: for key in gemini_api_keys: try: os.environ["GEMINI_API_KEY"] =key response = completion(model=f"gemini/{model}", messages=messages, stream=True) cunk="" for part in response: cunk=cunk+(part.choices[0].delta.content or "") print(part.choices[0].delta.content or "", end="") if "```json" not in cunk: yield(part.choices[0].delta.content or "") break except: pass print("STOPPING") yield("RESULT: "+cunk) elif model=="deepseek.r1" or model=="deepseek-chat": cunk="" if "chat" in model: providers = CHAT_CORRESPONDANCE model_name="deepseek-ai/DeepSeek-V3-0324" else: providers = REASONING_CORRESPONDANCE model_name="deepseek-r1" for provider in providers: try: response = client.chat.completions.create( provider=providers[provider], model=model_name, messages=messages, stream=True # Add any other necessary parameters ) for part in response: # print(part) cunk=cunk+(str(part.choices[0].delta.content) or "") if ("```json" not in cunk or "```" not in cunk) and (str(part.choices[0].delta.content) != "None"): yield(str(part.choices[0].delta.content) or "") break except Exception as e: #yield(str(e)) print(e) pass print("STOPPING") yield("RESULT: "+cunk) elif model=="qwq-32b" : yield("") cunk="" providers=REASONING_QWQ for provider in providers: try: response = client.chat.completions.create( provider=providers[provider], model="Qwen/QwQ-32B", messages=messages, stream=True # Add any other necessary parameters ) for part in response: cunk=cunk+(part.choices[0].delta.content or "") if "```json" not in cunk or "```" not in cunk: yield(part.choices[0].delta.content or "") break except Exception as e: pass yield("RESULT: "+cunk) elif "DeepSeek" in model and "dev" in model: cunk="" if "V3" in model: providers = CHAT_CORRESPONDANCE else: providers = REASONING_CORRESPONDANCE for provider in providers: try: response = client.chat.completions.create( provider=providers[provider], model="deepseek-r1", messages=messages, stream=True # Add any other necessary parameters ) for part in response: cunk=cunk+(part.choices[0].delta.content or "") break except Exception as e: pass print("STOPPING") yield("RESULT: "+cunk)