SwitchAI / models.py
kalashshah19's picture
init
784dbce verified
from dotenv import load_dotenv
import os
import openai
import pandas as pd
load_dotenv()
path = os.getenv("DATA")
def add_new(model,name,query,prompt,answer):
data = {"name" : name, "model" : model, "query" : query, "prompt": prompt, "answer" : answer}
row = pd.DataFrame([data])
if not os.path.isfile(path):
os.makedirs("data")
with open(path, 'w') as file:
file.write("name,model,query,prompt,answer\n")
print("File Created")
old_df = pd.read_csv(path)
new_df = pd.concat([old_df, row], ignore_index=True)
new_df.to_csv(path, index=False)
print("Saved.")
def get_data():
df = pd.read_csv(path)
return df.to_string()
def get_text():
with open(path, 'r', encoding='utf-8') as file:
text = file.read()
return text
class ModelChain:
def __init__(self):
self.client = self.generate_client(os.getenv("OPENROUTER_API_KEY"), os.getenv("BASE_URL"))
self.deepseek_messages = []
self.gemini_messages = []
def generate_client(self,api_key, url):
return openai.OpenAI(
api_key = api_key,
base_url = url,
)
def generate_response(self,model,name, query,prompt):
messages = [{"role":"system","content": prompt},
{"role":"user","content":query}]
try:
result = self.client.chat.completions.create(
model = model,
messages = messages,
)
answer = result.choices[0].message.content
add_new(model,name,query,prompt,answer)
return answer
except Exception as e:
print(f"Response Error : {e}")
if e == "'NoneType' object is not subscriptable":
return f"This AI Model might be Busy at the moment, try another AI Model"
else:
return f"Response Error : {e}"
def main():
chain= Modelschain()
if __name__ == "__main__":
main()