|
from openai import OpenAI |
|
from params import OPENAI_MODEL, OPENAI_API_KEY |
|
import llamanet |
|
|
|
|
|
llamanet.run("start", "https://huggingface.co/arcee-ai/Arcee-Spark-GGUF/blob/main/Arcee-Spark-IQ4_XS.gguf") |
|
|
|
|
|
client = OpenAI(api_key=OPENAI_API_KEY) |
|
|
|
def send_to_chatgpt(msg_list): |
|
try: |
|
completion = client.chat.completions.create( |
|
model=OPENAI_MODEL, |
|
messages=msg_list, |
|
temperature=0.6, |
|
stream=True |
|
) |
|
|
|
response = "" |
|
for chunk in completion: |
|
if chunk.choices[0].delta.content is not None: |
|
response += chunk.choices[0].delta.content |
|
|
|
return response, None |
|
|
|
except Exception as e: |
|
print(f"Error in send_to_chatgpt: {str(e)}") |
|
return f"Error: {str(e)}", None |
|
|
|
def send_to_llm(provider, msg_list): |
|
if provider == "llamanet": |
|
return send_to_chatgpt(msg_list) |
|
else: |
|
raise ValueError(f"Unknown provider: {provider}") |
|
|