Spaces:
Sleeping
Sleeping
max token setting
Browse files
app.py
CHANGED
@@ -90,7 +90,7 @@ def generate_translation(system_prompt, prompt):
|
|
90 |
inputs = tokenizer(full_prompt, return_tensors="pt").to(device)
|
91 |
outputs = model.generate(
|
92 |
**inputs,
|
93 |
-
max_new_tokens=
|
94 |
temperature=0.7,
|
95 |
top_p=0.9,
|
96 |
do_sample=True
|
|
|
90 |
inputs = tokenizer(full_prompt, return_tensors="pt").to(device)
|
91 |
outputs = model.generate(
|
92 |
**inputs,
|
93 |
+
max_new_tokens=2048,
|
94 |
temperature=0.7,
|
95 |
top_p=0.9,
|
96 |
do_sample=True
|