TheMaisk commited on
Commit
42c8aea
·
verified ·
1 Parent(s): 2d100fa

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -9
app.py CHANGED
@@ -3,18 +3,18 @@ from huggingface_hub import InferenceClient
3
  import gradio as gr
4
  import os
5
 
6
- # Laden der Prompts aus der JSON-Datei
7
  def load_prompts_from_json(file_path):
8
  with open(file_path, 'r') as file:
9
  return json.load(file)
10
 
11
- # Laden der Prompts aus 'prompts.json'
12
  prompts = load_prompts_from_json('prompts.json')
13
 
14
  # Inference client
15
  client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
16
 
17
- # Secret prompt from environment variable
18
  secret_prompt = os.getenv("SECRET_PROMPT")
19
 
20
  def format_prompt(new_message, history, prompt_type='default'):
@@ -26,7 +26,7 @@ def format_prompt(new_message, history, prompt_type='default'):
26
  return prompt
27
 
28
  def generate(prompt, history, temperature=0.25, max_new_tokens=512, top_p=0.95, repetition_penalty=1.0, prompt_type='default'):
29
- # Konfiguration der Parameter
30
  temperature = float(temperature)
31
  if temperature < 1e-2:
32
  temperature = 1e-2
@@ -45,20 +45,40 @@ def generate(prompt, history, temperature=0.25, max_new_tokens=512, top_p=0.95,
45
  for response in stream:
46
  output += response.token.text
47
  yield output
48
- return output, history + [(prompt, output)] # Speichere die Konversation
49
 
50
- # Chatbot ohne Avatare und mit transparentem Design
51
  samir_chatbot = gr.Chatbot(bubble_full_width=True, show_label=False, show_copy_button=False, likeable=False)
52
 
53
- # Dropdown für Prompt-Typen
54
  prompt_type_dropdown = gr.Dropdown(choices=list(prompts.keys()), label="Prompt Type", value='default')
55
 
56
- # Minimalistisches Theme und Gradio Demo Konfiguration
57
  theme = 'syddharth/gray-minimal'
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
  demo = gr.Interface(
59
  fn=generate,
60
  inputs=[
61
- gr.Textbox(lines=2, label="Eingabe"),
62
  "state" # State input for conversation history
63
  ],
64
  outputs=[samir_chatbot],
 
3
  import gradio as gr
4
  import os
5
 
6
+ # Load prompts from JSON file
7
  def load_prompts_from_json(file_path):
8
  with open(file_path, 'r') as file:
9
  return json.load(file)
10
 
11
+ # Load prompts from 'prompts.json'
12
  prompts = load_prompts_from_json('prompts.json')
13
 
14
  # Inference client
15
  client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
16
 
17
+ # Secret prompt from environment variable (if needed)
18
  secret_prompt = os.getenv("SECRET_PROMPT")
19
 
20
  def format_prompt(new_message, history, prompt_type='default'):
 
26
  return prompt
27
 
28
  def generate(prompt, history, temperature=0.25, max_new_tokens=512, top_p=0.95, repetition_penalty=1.0, prompt_type='default'):
29
+ # Configuration of parameters
30
  temperature = float(temperature)
31
  if temperature < 1e-2:
32
  temperature = 1e-2
 
45
  for response in stream:
46
  output += response.token.text
47
  yield output
48
+ return output, history + [(prompt, output)] # Store conversation history
49
 
50
+ # Chatbot without avatars and with transparent design
51
  samir_chatbot = gr.Chatbot(bubble_full_width=True, show_label=False, show_copy_button=False, likeable=False)
52
 
53
+ # Dropdown for prompt types
54
  prompt_type_dropdown = gr.Dropdown(choices=list(prompts.keys()), label="Prompt Type", value='default')
55
 
56
+ # Minimalistic theme and Gradio demo configuration
57
  theme = 'syddharth/gray-minimal'
58
+
59
+ # Choose how you want to handle state:
60
+
61
+ # Option 1: No State Management (if conversation history is not needed)
62
+ demo = gr.Interface(
63
+ fn=generate,
64
+ inputs=[
65
+ gr.Textbox(lines=2, label="Input"),
66
+ gr.Slider(0, 1, value=0.25, label="Temperature"),
67
+ gr.Slider(1, 2048, value=512, step=1, label="Max Tokens"),
68
+ gr.Slider(0, 1, value=0.95, label="Top P"),
69
+ gr.Slider(1, 2, value=1.0, label="Repetition Penalty"),
70
+ prompt_type_dropdown
71
+ ],
72
+ outputs=[samir_chatbot],
73
+ title="Tutorial Master",
74
+ theme=theme
75
+ )
76
+
77
+ # Option 2: State Management for Conversation History
78
  demo = gr.Interface(
79
  fn=generate,
80
  inputs=[
81
+ gr.Textbox(lines=2, label="Input"),
82
  "state" # State input for conversation history
83
  ],
84
  outputs=[samir_chatbot],