Hieucyber2208 commited on
Commit
64c47c6
·
verified ·
1 Parent(s): b14ef5c

Update src/generation/llm.py

Browse files
Files changed (1) hide show
  1. src/generation/llm.py +2 -5
src/generation/llm.py CHANGED
@@ -93,12 +93,9 @@ class LLM:
93
  outputs = self.llm.generate(
94
  **inputs,
95
  max_new_tokens=max_length,
96
- temperature=0.3,
97
- do_sample=False,
98
- top_p=1.0,
99
- top_k=1,
100
  pad_token_id=self.tokenizer.eos_token_id,
101
- eos_token_id=self.tokenizer.eos_token_id,
102
  )
103
  # Decode the generated tokens
104
  response = self.tokenizer.decode(outputs[0], skip_special_tokens=True)
 
93
  outputs = self.llm.generate(
94
  **inputs,
95
  max_new_tokens=max_length,
96
+ temperature=0.7,
97
+ do_sample=True,
 
 
98
  pad_token_id=self.tokenizer.eos_token_id,
 
99
  )
100
  # Decode the generated tokens
101
  response = self.tokenizer.decode(outputs[0], skip_special_tokens=True)