File size: 1,373 Bytes
e636070 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 |
from .BaseLLM import BaseLLM
from openai import OpenAI
import os
class DeepSeek(BaseLLM):
def __init__(self, model="deepseek-chat"):
super(DeepSeek, self).__init__()
self.client = OpenAI(
api_key=os.getenv("DEEPSEEK_API_KEY"),
base_url="https://api.deepseek.com",
)
self.model_name = model
self.messages = []
def initialize_message(self):
self.messages = []
def ai_message(self, payload):
self.messages.append({"role": "ai", "content": payload})
def system_message(self, payload):
self.messages.append({"role": "system", "content": payload})
def user_message(self, payload):
self.messages.append({"role": "user", "content": payload})
def get_response(self,temperature = 0.8):
response = self.client.chat.completions.create(
model="deepseek-chat",
messages=[
{"role": "system", "content": "You are a helpful assistant"},
{"role": "user", "content": "Hello"},
],
stream=False
)
return response.choices[0].message.content
def chat(self,text):
self.initialize_message()
self.user_message(text)
response = self.get_response()
return response
def print_prompt(self):
for message in self.messages:
print(message) |