Bofandra commited on
Commit
5906656
·
verified ·
1 Parent(s): 8b25e9d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -6
app.py CHANGED
@@ -16,6 +16,7 @@ def get_detailed_instruct(task_description: str, query: str) -> str:
16
  return f'Instruct: {task_description}\nQuery: {query}'
17
 
18
  def respond(message,
 
19
  max_tokens = 2048,
20
  temperature = 0.7,
21
  top_p = 0.95,
@@ -37,13 +38,11 @@ def respond(message,
37
  print(time.time())
38
 
39
  #history from chat session
40
- """
41
  for val in history:
42
  if val[0]:
43
  messages.append({"role": "user", "content": val[0]})
44
  if val[1]:
45
  messages.append({"role": "assistant", "content": val[1]})
46
- """
47
 
48
  #latest user question
49
  from googletrans import Translator
@@ -118,7 +117,7 @@ def respond(message,
118
  print("An error occurred:", error)
119
  yield response
120
  """
121
- demo = gr.Interface(
122
  fn=respond,
123
  additional_inputs=[
124
  gr.Slider(minimum=1, maximum=2048, value=2048, step=1, label="Max new tokens"),
@@ -130,9 +129,7 @@ demo = gr.Interface(
130
  step=0.05,
131
  label="Top-p (nucleus sampling)",
132
  ),
133
- ],
134
- inputs="textbox",
135
- outputs="textbox",
136
  cache_examples="lazy",
137
  examples=[
138
  ["Why is men created?"],
 
16
  return f'Instruct: {task_description}\nQuery: {query}'
17
 
18
  def respond(message,
19
+ history: list[tuple[str, str]],
20
  max_tokens = 2048,
21
  temperature = 0.7,
22
  top_p = 0.95,
 
38
  print(time.time())
39
 
40
  #history from chat session
 
41
  for val in history:
42
  if val[0]:
43
  messages.append({"role": "user", "content": val[0]})
44
  if val[1]:
45
  messages.append({"role": "assistant", "content": val[1]})
 
46
 
47
  #latest user question
48
  from googletrans import Translator
 
117
  print("An error occurred:", error)
118
  yield response
119
  """
120
+ demo = gr.ChatInterface(
121
  fn=respond,
122
  additional_inputs=[
123
  gr.Slider(minimum=1, maximum=2048, value=2048, step=1, label="Max new tokens"),
 
129
  step=0.05,
130
  label="Top-p (nucleus sampling)",
131
  ),
132
+ ],
 
 
133
  cache_examples="lazy",
134
  examples=[
135
  ["Why is men created?"],