Spaces:
Running
Running
Update src/generation/llm.py
Browse files- src/generation/llm.py +1 -1
src/generation/llm.py
CHANGED
@@ -100,7 +100,7 @@ class LLM:
|
|
100 |
# Decode the generated tokens
|
101 |
response = self.tokenizer.decode(outputs[0], skip_special_tokens=True)
|
102 |
print("Response generated successfully!")
|
103 |
-
return response
|
104 |
except Exception as e:
|
105 |
raise RuntimeError(f"Failed to generate response: {str(e)}")
|
106 |
|
|
|
100 |
# Decode the generated tokens
|
101 |
response = self.tokenizer.decode(outputs[0], skip_special_tokens=True)
|
102 |
print("Response generated successfully!")
|
103 |
+
return response.strip()
|
104 |
except Exception as e:
|
105 |
raise RuntimeError(f"Failed to generate response: {str(e)}")
|
106 |
|