Spaces:
Running
Running
File size: 1,560 Bytes
372d102 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 |
# from g4f.Provider import DeepInfraChat,Glider,Liaobots,Blackbox,ChatGptEs,LambdaChat,TypeGPT
# DeepInfraChat.models = ["google/gemma-3-27b-it","deepseek-ai/DeepSeek-R1-Turbo","Qwen/QwQ-32B","deepseek-ai/DeepSeek-R1","deepseek-ai/DeepSeek-V3-0324","meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8","meta-llama/Llama-4-Scout-17B-16E-Instruct","microsoft/Phi-4-multimodal-instruct"]
# deepinframodels=["meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8","microsoft/Phi-4-multimodal-instruct","google/gemma-3-27b-it","meta-llama/Llama-4-Scout-17B-16E-Instruct"]
# REASONING_CORRESPONDANCE = {"DeepSeek-R1-Glider":Glider, "DeepSeekR1-LAMBDA":LambdaChat,"DeepSeekR1":DeepInfraChat,"deepseek-slow":TypeGPT}
# REASONING_QWQ = {"qwq-32b":DeepInfraChat}
# from g4f.client import Client
# client = Client()
# cunk=""
# providers=REASONING_CORRESPONDANCE
# model_name="deepseek-r1"
# for provider in providers:
# try:
# response = client.chat.completions.create(
# provider=providers[provider],
# model=model_name,
# messages=[{"role": "user", "content": f"Hi!"}],
# stream=True
# # Add any other necessary parameters
# )
# for part in response:
# # print(part)
# cunk=cunk+(str(part.choices[0].delta.content) or "")
# print(str(part.choices[0].delta.content),end="")
# break
# except Exception as e:
# print(f"Error with {provider}: {e}")
# pass
|