Delta-Vector commited on
Commit
620ce20
·
verified ·
1 Parent(s): 44a5426

Upload dan-chat-advanced.py

Browse files
Files changed (1) hide show
  1. dan-chat-advanced.py +13 -5
dan-chat-advanced.py CHANGED
@@ -20,6 +20,7 @@ turn_separator = "\n"
20
  system_prefix = "<|im_start|>system\n"
21
  user_prefix = "<|im_start|>user\n"
22
  assistant_prefix = "<|im_start|>assistant\n"
 
23
 
24
  class DanChatMLPromptTokenizingStrategy(PromptTokenizingStrategy):
25
  def __init__(self, prompter, tokenizer, train_on_inputs, sequence_len, *args, **kwargs):
@@ -41,12 +42,19 @@ class DanChatMLPromptTokenizingStrategy(PromptTokenizingStrategy):
41
  not_first_turn = False
42
 
43
  for role, message, loss, prefix in prompt_parts:
44
- prefix = prefix or ""
45
- message = prefix + message
 
46
 
47
- if role in ["system", "user", "human"]:
48
- role_prefix = system_prefix if role == "system" else user_prefix
49
- res = self._tokenize_with_turn(role_prefix, message, not_first_turn)
 
 
 
 
 
 
50
  labels = [IGNORE_TOKEN_ID] * len(res["input_ids"])
51
 
52
  elif role in ["model", "gpt"]:
 
20
  system_prefix = "<|im_start|>system\n"
21
  user_prefix = "<|im_start|>user\n"
22
  assistant_prefix = "<|im_start|>assistant\n"
23
+ tool_prefix = "<|im_start|>tool\n"
24
 
25
  class DanChatMLPromptTokenizingStrategy(PromptTokenizingStrategy):
26
  def __init__(self, prompter, tokenizer, train_on_inputs, sequence_len, *args, **kwargs):
 
42
  not_first_turn = False
43
 
44
  for role, message, loss, prefix in prompt_parts:
45
+ # If prefix is not defined, set it to an empty string
46
+ if prefix is None:
47
+ prefix = ""
48
 
49
+ if role in ["system", "user", "human", "tool"]:
50
+ # Set the role prefix based on the role
51
+ if role == "system":
52
+ role_prefix = system_prefix
53
+ elif role == "user" or role == "human":
54
+ role_prefix = user_prefix
55
+ elif role == "tool":
56
+ role_prefix = tool_prefix
57
+ res = self._tokenize_with_turn(role_prefix, prefix + message, not_first_turn)
58
  labels = [IGNORE_TOKEN_ID] * len(res["input_ids"])
59
 
60
  elif role in ["model", "gpt"]: