aidevhund commited on
Commit
8f31e52
·
verified ·
1 Parent(s): c181c4d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +60 -64
app.py CHANGED
@@ -1,47 +1,88 @@
1
  import gradio as gr
 
2
  from openai import OpenAI
3
  import os
4
 
 
5
  ACCESS_TOKEN = os.getenv("HF_TOKEN")
6
- print("Access token loaded.")
7
  TAVILY_API_KEY = os.getenv("TAVILY_API_KEY")
 
8
  client = OpenAI(
9
  base_url="https://api-inference.huggingface.co/v1/",
10
  api_key=ACCESS_TOKEN,
11
  )
12
  print("OpenAI client initialized.")
13
 
14
- # Search Tool
15
- from langchain_community.tools.tavily_search import TavilySearchResults
16
- search_tool = TavilySearchResults(tavily_api_key=TAVILY_API_KEY)
17
  # Define a comprehensive system prompt
18
  SYSTEM_PROMPT = """
19
  You are a highly knowledgeable and reliable Crypto Trading Advisor and Analyzer.
20
  Your goal is to assist users in understanding, analyzing, and making informed decisions about cryptocurrency trading.
21
- You provide accurate, concise, and actionable advice based on real-time data, historical trends, and established best practices.
22
  """
23
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
  # Function to handle chatbot responses
25
  def respond(
26
  message,
27
- history: list[tuple[str, str]],
28
- max_tokens,
29
- temperature,
30
- top_p,
31
- frequency_penalty,
32
- seed
33
  ):
34
  print(f"Received message: {message}")
35
  print(f"History: {history}")
36
 
37
- # Convert seed to None if -1 (meaning random)
38
- if seed == -1:
39
- seed = None
 
 
 
40
 
41
- messages = [{"role": "system", "content": SYSTEM_PROMPT}]
42
- print("System prompt added to messages.")
 
 
 
 
 
 
 
43
 
44
- # Add conversation history to the context
 
 
 
 
 
 
 
 
 
 
45
  for val in history:
46
  user_part = val[0]
47
  assistant_part = val[1]
@@ -50,13 +91,11 @@ def respond(
50
  if assistant_part:
51
  messages.append({"role": "assistant", "content": assistant_part})
52
 
53
- # Append the latest user message
54
  messages.append({"role": "user", "content": message})
55
 
56
  # Start response generation
57
  response = ""
58
- print("Sending request to OpenAI API.")
59
-
60
  for message_chunk in client.chat.completions.create(
61
  model="meta-llama/Llama-3.3-70B-Instruct",
62
  max_tokens=max_tokens,
@@ -76,54 +115,11 @@ def respond(
76
  # Gradio UI
77
  chatbot = gr.Chatbot(height=600, show_copy_button=True, placeholder="Ask about crypto trading or analysis.", likeable=True)
78
 
79
- max_tokens_slider = gr.Slider(
80
- minimum=1,
81
- maximum=4096,
82
- value=512,
83
- step=1,
84
- label="Max new tokens"
85
- )
86
- temperature_slider = gr.Slider(
87
- minimum=0.1,
88
- maximum=4.0,
89
- value=0.7,
90
- step=0.1,
91
- label="Temperature"
92
- )
93
- top_p_slider = gr.Slider(
94
- minimum=0.1,
95
- maximum=1.0,
96
- value=0.95,
97
- step=0.05,
98
- label="Top-P"
99
- )
100
- frequency_penalty_slider = gr.Slider(
101
- minimum=-2.0,
102
- maximum=2.0,
103
- value=0.0,
104
- step=0.1,
105
- label="Frequency Penalty"
106
- )
107
- seed_slider = gr.Slider(
108
- minimum=-1,
109
- maximum=65535,
110
- value=-1,
111
- step=1,
112
- label="Seed (-1 for random)"
113
- )
114
-
115
  demo = gr.ChatInterface(
116
  fn=respond,
117
- additional_inputs=[
118
- max_tokens_slider,
119
- temperature_slider,
120
- top_p_slider,
121
- frequency_penalty_slider,
122
- seed_slider,
123
- ],
124
  fill_height=True,
125
  chatbot=chatbot,
126
  )
127
 
128
  if __name__ == "__main__":
129
- demo.launch()
 
1
  import gradio as gr
2
+ import requests
3
  from openai import OpenAI
4
  import os
5
 
6
+ # Load your API keys from environment variables
7
  ACCESS_TOKEN = os.getenv("HF_TOKEN")
 
8
  TAVILY_API_KEY = os.getenv("TAVILY_API_KEY")
9
+ print("Access token loaded.")
10
  client = OpenAI(
11
  base_url="https://api-inference.huggingface.co/v1/",
12
  api_key=ACCESS_TOKEN,
13
  )
14
  print("OpenAI client initialized.")
15
 
 
 
 
16
  # Define a comprehensive system prompt
17
  SYSTEM_PROMPT = """
18
  You are a highly knowledgeable and reliable Crypto Trading Advisor and Analyzer.
19
  Your goal is to assist users in understanding, analyzing, and making informed decisions about cryptocurrency trading.
 
20
  """
21
 
22
+ # Binance API - Fetch Market Data
23
+ def get_binance_data(symbol: str):
24
+ # Base URL for Binance API
25
+ url = f'https://api.binance.com/api/v3/ticker/24hr?symbol={symbol.upper()}'
26
+
27
+ try:
28
+ # Send GET request to Binance API
29
+ response = requests.get(url)
30
+ data = response.json()
31
+
32
+ if response.status_code != 200:
33
+ return {"error": "Error fetching data from Binance"}
34
+
35
+ # Extract relevant information from the API response
36
+ price = float(data['lastPrice'])
37
+ volume = float(data['volume'])
38
+ market_cap = float(data['quoteVolume']) # Binance doesn't provide market cap directly, so we use quote volume as a proxy
39
+ change_24h = float(data['priceChangePercent'])
40
+
41
+ return {
42
+ 'price': price,
43
+ 'volume': volume,
44
+ 'market_cap': market_cap,
45
+ 'change_24h': change_24h
46
+ }
47
+ except Exception as e:
48
+ return {"error": f"An error occurred: {str(e)}"}
49
+
50
  # Function to handle chatbot responses
51
  def respond(
52
  message,
53
+ history: list[tuple[str, str]]
 
 
 
 
 
54
  ):
55
  print(f"Received message: {message}")
56
  print(f"History: {history}")
57
 
58
+ # Default values for the parameters
59
+ max_tokens = 1024
60
+ temperature = 0.3
61
+ top_p = 0.95
62
+ frequency_penalty = 0.0
63
+ seed = None
64
 
65
+ if "crypto" in message.lower():
66
+ # Extract the cryptocurrency symbol from the message
67
+ crypto_symbol = message.split()[0].upper() + "USDT" # Example: "Bitcoin" -> "BTCUSDT"
68
+ market_data = get_binance_data(crypto_symbol)
69
+
70
+ if 'error' in market_data:
71
+ response = "Error fetching data for this cryptocurrency."
72
+ yield response
73
+ return
74
 
75
+ # Include real-time data in the system prompt
76
+ SYSTEM_PROMPT += f"""
77
+ Current Data for {crypto_symbol}:
78
+ - Price: ${market_data['price']}
79
+ - 24h Change: {market_data['change_24h']}%
80
+ - Volume: {market_data['volume']}
81
+ - Market Cap (proxy via quote volume): ${market_data['market_cap']}
82
+ """
83
+
84
+ # Prepare messages for the assistant
85
+ messages = [{"role": "system", "content": SYSTEM_PROMPT}]
86
  for val in history:
87
  user_part = val[0]
88
  assistant_part = val[1]
 
91
  if assistant_part:
92
  messages.append({"role": "assistant", "content": assistant_part})
93
 
94
+ # Add the latest user message
95
  messages.append({"role": "user", "content": message})
96
 
97
  # Start response generation
98
  response = ""
 
 
99
  for message_chunk in client.chat.completions.create(
100
  model="meta-llama/Llama-3.3-70B-Instruct",
101
  max_tokens=max_tokens,
 
115
  # Gradio UI
116
  chatbot = gr.Chatbot(height=600, show_copy_button=True, placeholder="Ask about crypto trading or analysis.", likeable=True)
117
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
118
  demo = gr.ChatInterface(
119
  fn=respond,
 
 
 
 
 
 
 
120
  fill_height=True,
121
  chatbot=chatbot,
122
  )
123
 
124
  if __name__ == "__main__":
125
+ demo.launch()