cahya commited on
Commit
08000e6
·
1 Parent(s): a0df823

update max_length

Browse files
Files changed (1) hide show
  1. app/app.py +2 -2
app/app.py CHANGED
@@ -70,7 +70,7 @@ def get_generator(model_name: str):
70
  # Disable the st.cache for this function due to issue on newer version of streamlit
71
  # @st.cache(suppress_st_warning=True, hash_funcs={tokenizers.Tokenizer: id})
72
  def process(text_generator, tokenizer, title: str, keywords: str, text: str,
73
- max_length: int = 200, do_sample: bool = True, top_k: int = 50, top_p: float = 0.95,
74
  temperature: float = 1.0, max_time: float = 120.0, seed=42, repetition_penalty=1.0,
75
  penalty_alpha = 0.6):
76
  # st.write("Cache miss: process")
@@ -142,7 +142,7 @@ if prompt_group_name in ["Indonesian Newspaper"]:
142
 
143
  max_length = st.sidebar.number_input(
144
  "Maximum length",
145
- value=200,
146
  max_value=512,
147
  help="The maximum length of the sequence to be generated."
148
  )
 
70
  # Disable the st.cache for this function due to issue on newer version of streamlit
71
  # @st.cache(suppress_st_warning=True, hash_funcs={tokenizers.Tokenizer: id})
72
  def process(text_generator, tokenizer, title: str, keywords: str, text: str,
73
+ max_length: int = 250, do_sample: bool = True, top_k: int = 50, top_p: float = 0.95,
74
  temperature: float = 1.0, max_time: float = 120.0, seed=42, repetition_penalty=1.0,
75
  penalty_alpha = 0.6):
76
  # st.write("Cache miss: process")
 
142
 
143
  max_length = st.sidebar.number_input(
144
  "Maximum length",
145
+ value=250,
146
  max_value=512,
147
  help="The maximum length of the sequence to be generated."
148
  )