omkar56 commited on
Commit
f524765
·
1 Parent(s): 0093082

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +2 -2
main.py CHANGED
@@ -27,7 +27,7 @@ max_new_tokens = 2048
27
  def generater(message, history, temperature, top_p, top_k):
28
  prompt = "<s>"
29
  for user_message, assistant_message in history:
30
- prompt += model.config["promptTemplate"].format(user_message)
31
  prompt += assistant_message + "</s>"
32
  prompt += model.config["promptTemplate"].format(message)
33
  print("[prompt]",prompt)
@@ -65,4 +65,4 @@ def generate_text(
65
  history = [] # You might need to handle this based on your actual usage
66
  generatedOutput = generater(message, history, temperature, top_p, top_k)
67
 
68
- return {"generated_text": generatedOutput}
 
27
  def generater(message, history, temperature, top_p, top_k):
28
  prompt = "<s>"
29
  for user_message, assistant_message in history:
30
+ prompt += model.config["systemPrompt"] + model.config["promptTemplate"].format(user_message)
31
  prompt += assistant_message + "</s>"
32
  prompt += model.config["promptTemplate"].format(message)
33
  print("[prompt]",prompt)
 
65
  history = [] # You might need to handle this based on your actual usage
66
  generatedOutput = generater(message, history, temperature, top_p, top_k)
67
 
68
+ return {"generated_text": generatedOutput[-1]}