acecalisto3 commited on
Commit
86a0af2
·
verified ·
1 Parent(s): d0c838a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -3
app.py CHANGED
@@ -295,7 +295,7 @@ agents =[
295
  "PYTHON_CODE_DEV"
296
  ]
297
  def generate(
298
- prompt, history, agent_name=agents[0], sys_prompt="", temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
299
  ):
300
  seed = random.randint(1,1111111111111111)
301
 
@@ -314,14 +314,14 @@ def generate(
314
 
315
  generate_kwargs = dict(
316
  temperature=temperature,
317
- max_new_tokens=max_new_tokens,
318
  top_p=top_p,
319
  repetition_penalty=repetition_penalty,
320
  do_sample=True,
321
  seed=seed,
322
  )
323
 
324
- formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
325
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
326
  output = ""
327
 
@@ -331,6 +331,7 @@ def generate(
331
  return output
332
 
333
 
 
334
  additional_inputs=[
335
  gr.Dropdown(
336
  label="Agents",
 
295
  "PYTHON_CODE_DEV"
296
  ]
297
  def generate(
298
+ prompt, history, agent_name=agents[0], sys_prompt="", temperature=0.9, max_new_tokens=2048, top_p=0.95, repetition_penalty=1.0,
299
  ):
300
  seed = random.randint(1,1111111111111111)
301
 
 
314
 
315
  generate_kwargs = dict(
316
  temperature=temperature,
317
+ max_new_tokens=max_new_tokens, # Reduced max_new_tokens
318
  top_p=top_p,
319
  repetition_penalty=repetition_penalty,
320
  do_sample=True,
321
  seed=seed,
322
  )
323
 
324
+ formatted_prompt = format_prompt(prompt, history, max_history_turns=5) # Truncated history
325
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
326
  output = ""
327
 
 
331
  return output
332
 
333
 
334
+
335
  additional_inputs=[
336
  gr.Dropdown(
337
  label="Agents",