rafaa commited on
Commit
b0d2e92
·
1 Parent(s): e7ed220

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -26
app.py CHANGED
@@ -3,13 +3,19 @@ import gradio as gr
3
 
4
  client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
5
 
6
- def format_prompt(message, history):
7
- prompt = "<s>"
8
- for user_prompt, bot_response in history:
9
- prompt += f"[INST] {user_prompt} [/INST]"
10
- prompt += f" {bot_response}</s> "
11
- prompt += f"[INST] {message} [/INST]"
12
- return prompt
 
 
 
 
 
 
13
 
14
  def generate(
15
  prompt, history, temperature=0.2, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
@@ -28,7 +34,7 @@ def generate(
28
  seed=42,
29
  )
30
 
31
- formatted_prompt = format_prompt(prompt, history)
32
 
33
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
34
  output = ""
@@ -40,24 +46,7 @@ def generate(
40
 
41
 
42
  mychatbot = gr.Chatbot(
43
- avatar_images=["./user.png", "./botm.png"],
44
- bubble_full_width=False,
45
- show_label=False,
46
- show_copy_button=True,
47
- likeable=True,
48
- )
49
-
50
- def echo(message, history, system_prompt):
51
- # Use the system prompt to modify the model's behavior
52
- # For example, you can prepend the system prompt to the message
53
- # Or you can use it to control some parameters of the model
54
- # Here we just return the system prompt and the message as a simple example
55
- return f"{system_prompt}: {message}"
56
-
57
- with gr.Blocks() as demo:
58
- system_prompt = gr.Textbox("Your name is Stella. You're a helpful AI who helps the user.", label="System Prompt", render=False)
59
- gr.ChatInterface(echo, additional_inputs=[system_prompt], chatbot=mychatbot).launch()
60
-
61
 
62
  demo = gr.ChatInterface(fn=generate,
63
  chatbot=mychatbot,
 
3
 
4
  client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
5
 
6
+ def format_prompt(message, history, system_prompt=None):
7
+ prompt = "<s>"
8
+
9
+ # Adding system prompt if provided
10
+ if system_prompt:
11
+ prompt += f"[SYS] {system_prompt} [/SYS]"
12
+
13
+ for user_prompt, bot_response in history:
14
+ prompt += f"[INST] {user_prompt} [/INST]"
15
+ prompt += f" {bot_response}</s> "
16
+
17
+ prompt += f"[INST] {message} [/INST]"
18
+ return prompt
19
 
20
  def generate(
21
  prompt, history, temperature=0.2, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
 
34
  seed=42,
35
  )
36
 
37
+ formatted_prompt = format_prompt(prompt, history, system_prompt="Hello there! I'm Stella, your friendly AI companion. How can I assist you today?")
38
 
39
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
40
  output = ""
 
46
 
47
 
48
  mychatbot = gr.Chatbot(
49
+ avatar_images=["./user.png", "./botm.png"], bubble_full_width=False, show_label=False, show_copy_button=True, likeable=True,)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
 
51
  demo = gr.ChatInterface(fn=generate,
52
  chatbot=mychatbot,