Hieucyber2208 commited on
Commit
4af5fd9
·
verified ·
1 Parent(s): 93bc332

Update src/generation/llm.py

Browse files
Files changed (1) hide show
  1. src/generation/llm.py +1 -5
src/generation/llm.py CHANGED
@@ -99,12 +99,8 @@ class LLM:
99
  )
100
  # Decode the generated tokens
101
  response = self.tokenizer.decode(outputs[0], skip_special_tokens=True)
102
- response = response.strip()
103
- keyword = "assistant"
104
- index = response.find(keyword)
105
- output = response[index + len(keyword):].strip() if index != -1 else response
106
  print("Response generated successfully!")
107
- return output
108
  except Exception as e:
109
  raise RuntimeError(f"Failed to generate response: {str(e)}")
110
 
 
99
  )
100
  # Decode the generated tokens
101
  response = self.tokenizer.decode(outputs[0], skip_special_tokens=True)
 
 
 
 
102
  print("Response generated successfully!")
103
+ return response.split('assistant')[2]
104
  except Exception as e:
105
  raise RuntimeError(f"Failed to generate response: {str(e)}")
106