aidevhund commited on
Commit
6045172
·
verified ·
1 Parent(s): 8dad82c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -59
app.py CHANGED
@@ -13,19 +13,18 @@ print("OpenAI client initialized.")
13
 
14
  def respond(
15
  message,
16
- history: list[tuple[str, str]],
17
- system_message="You are a helpful assistant.",
18
- max_tokens=512,
19
- temperature=0.7,
20
- top_p=0.95,
21
- frequency_penalty=0.0,
22
- seed=-1
23
  ):
24
  print(f"Received message: {message}")
25
  print(f"History: {history}")
26
- print(f"System message: {system_message}")
27
- print(f"Max tokens: {max_tokens}, Temperature: {temperature}, Top-P: {top_p}")
28
- print(f"Frequency Penalty: {frequency_penalty}, Seed: {seed}")
 
 
 
 
 
29
 
30
  # Convert seed to None if -1 (meaning random)
31
  if seed == -1:
@@ -79,56 +78,9 @@ def respond(
79
  chatbot = gr.Chatbot(height=600, show_copy_button=True, placeholder="Start chatting!", likeable=True, layout="panel")
80
  print("Chatbot interface created.")
81
 
82
- system_message_box = gr.Textbox(value="You are a helpful assistant.", label="System Prompt", visible=False)
83
-
84
- max_tokens_slider = gr.Slider(
85
- minimum=1,
86
- maximum=4096,
87
- value=512,
88
- step=1,
89
- label="Max new tokens"
90
- )
91
- temperature_slider = gr.Slider(
92
- minimum=0.1,
93
- maximum=4.0,
94
- value=0.7,
95
- step=0.1,
96
- label="Temperature"
97
- )
98
- top_p_slider = gr.Slider(
99
- minimum=0.1,
100
- maximum=1.0,
101
- value=0.95,
102
- step=0.05,
103
- label="Top-P"
104
- )
105
- frequency_penalty_slider = gr.Slider(
106
- minimum=-2.0,
107
- maximum=2.0,
108
- value=0.0,
109
- step=0.1,
110
- label="Frequency Penalty"
111
- )
112
- seed_slider = gr.Slider(
113
- minimum=-1,
114
- maximum=65535,
115
- value=-1,
116
- step=1,
117
- label="Seed (-1 for random)"
118
- )
119
-
120
- # Removed the custom_model_box as the model is pre-set
121
-
122
  demo = gr.ChatInterface(
123
  fn=respond,
124
- additional_inputs=[
125
- system_message_box,
126
- max_tokens_slider,
127
- temperature_slider,
128
- top_p_slider,
129
- frequency_penalty_slider,
130
- seed_slider,
131
- ],
132
  fill_height=True,
133
  chatbot=chatbot,
134
  theme="Nymbo/Nymbo_Theme",
@@ -136,7 +88,7 @@ demo = gr.ChatInterface(
136
  print("ChatInterface object created.")
137
 
138
  with demo:
139
- # No need for a model selection accordion since the model is fixed to "meta-llama"
140
  pass
141
 
142
  print("Gradio interface initialized.")
 
13
 
14
  def respond(
15
  message,
16
+ history: list[tuple[str, str]]
 
 
 
 
 
 
17
  ):
18
  print(f"Received message: {message}")
19
  print(f"History: {history}")
20
+
21
+ # Hardcoded system message and other parameters
22
+ system_message = "You are a cryptocurrency trading assistant and market analyst. Your role is to provide users with data-driven insights, technical analysis (RSI, MACD, Bollinger Bands, Moving Averages, Fibonacci retracements, volume analysis, and price action), and investment advice tailored to their risk tolerance. Focus on actionable information, such as market conditions, key indicators, and investment strategies. Avoid speculation and provide clear, concise, and unbiased recommendations based on current data."
23
+ max_tokens = 512
24
+ temperature = 0.7
25
+ top_p = 0.95
26
+ frequency_penalty = 0.0
27
+ seed = -1 # Random seed
28
 
29
  # Convert seed to None if -1 (meaning random)
30
  if seed == -1:
 
78
  chatbot = gr.Chatbot(height=600, show_copy_button=True, placeholder="Start chatting!", likeable=True, layout="panel")
79
  print("Chatbot interface created.")
80
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
81
  demo = gr.ChatInterface(
82
  fn=respond,
83
+ additional_inputs=[], # No additional inputs needed since everything is embedded
 
 
 
 
 
 
 
84
  fill_height=True,
85
  chatbot=chatbot,
86
  theme="Nymbo/Nymbo_Theme",
 
88
  print("ChatInterface object created.")
89
 
90
  with demo:
91
+ # No need for system message input, model selection, or sliders
92
  pass
93
 
94
  print("Gradio interface initialized.")