Hieucyber2208 commited on
Commit
034a76b
·
verified ·
1 Parent(s): 8072d6c

Update src/generation/llm.py

Browse files
Files changed (1) hide show
  1. src/generation/llm.py +1 -1
src/generation/llm.py CHANGED
@@ -100,7 +100,7 @@ class LLM:
100
  # Decode the generated tokens
101
  response = self.tokenizer.decode(outputs[0], skip_special_tokens=True)
102
  print("Response generated successfully!")
103
- return response
104
  except Exception as e:
105
  raise RuntimeError(f"Failed to generate response: {str(e)}")
106
 
 
100
  # Decode the generated tokens
101
  response = self.tokenizer.decode(outputs[0], skip_special_tokens=True)
102
  print("Response generated successfully!")
103
+ return response.strip()
104
  except Exception as e:
105
  raise RuntimeError(f"Failed to generate response: {str(e)}")
106