Update app.py
Browse files
app.py
CHANGED
@@ -1,7 +1,6 @@
|
|
1 |
import gradio as gr
|
2 |
from openai import OpenAI
|
3 |
import os
|
4 |
-
import requests
|
5 |
|
6 |
ACCESS_TOKEN = os.getenv("HF_TOKEN")
|
7 |
print("Access token loaded.")
|
@@ -48,35 +47,19 @@ You are a highly knowledgeable and reliable Crypto Trading Advisor and Analyzer.
|
|
48 |
### 4. Disclaimer
|
49 |
- Remind users that cryptocurrency trading involves significant risk and past performance does not guarantee future results.
|
50 |
- Clearly state that your responses are for informational purposes only and not financial advice.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
"""
|
52 |
|
53 |
-
#
|
54 |
-
def get_crypto_data(coin_id, vs_currency="usd"):
|
55 |
-
"""
|
56 |
-
CoinGecko API'den kripto para verilerini alır.
|
57 |
-
|
58 |
-
:param coin_id: CoinGecko'daki kripto para kimliği (ör. 'bitcoin')
|
59 |
-
:param vs_currency: Fiyatın hangi para biriminde gösterileceği (ör. 'usd')
|
60 |
-
:return: Kripto para verileri (fiyat, hacim, vs.)
|
61 |
-
"""
|
62 |
-
url = f"https://api.coingecko.com/api/v3/simple/price"
|
63 |
-
params = {
|
64 |
-
"ids": coin_id,
|
65 |
-
"vs_currencies": vs_currency,
|
66 |
-
"include_market_cap": "true",
|
67 |
-
"include_24hr_vol": "true",
|
68 |
-
"include_24hr_change": "true",
|
69 |
-
"include_last_updated_at": "true",
|
70 |
-
}
|
71 |
-
response = requests.get(url, params=params)
|
72 |
-
|
73 |
-
if response.status_code == 200:
|
74 |
-
return response.json()
|
75 |
-
else:
|
76 |
-
print(f"Error fetching data: {response.status_code}, {response.text}")
|
77 |
-
return None
|
78 |
-
|
79 |
-
# OpenAI modeline mesajı hazırlayan ve yanıt üreten fonksiyon
|
80 |
def respond(
|
81 |
message,
|
82 |
history: list[tuple[str, str]],
|
@@ -89,32 +72,14 @@ def respond(
|
|
89 |
print(f"Received message: {message}")
|
90 |
print(f"History: {history}")
|
91 |
|
92 |
-
#
|
93 |
-
|
94 |
-
|
95 |
-
for coin in supported_coins:
|
96 |
-
if coin in message.lower():
|
97 |
-
coin_name = coin
|
98 |
-
break
|
99 |
-
|
100 |
-
# CoinGecko API'sinden veri çek
|
101 |
-
crypto_data = None
|
102 |
-
if coin_name:
|
103 |
-
crypto_data = get_crypto_data(coin_name)
|
104 |
-
print(f"Fetched data for {coin_name}: {crypto_data}")
|
105 |
|
106 |
-
# Sistem mesajını oluştur
|
107 |
messages = [{"role": "system", "content": SYSTEM_PROMPT}]
|
|
|
108 |
|
109 |
-
#
|
110 |
-
if crypto_data and coin_name in crypto_data:
|
111 |
-
price = crypto_data[coin_name]["usd"]
|
112 |
-
volume = crypto_data[coin_name].get("usd_24h_vol", "N/A")
|
113 |
-
market_cap = crypto_data[coin_name].get("usd_market_cap", "N/A")
|
114 |
-
message += f"\n\nReal-time data for {coin_name.capitalize()}:\n"
|
115 |
-
message += f"Price: ${price}\n24h Volume: ${volume}\nMarket Cap: ${market_cap}\n"
|
116 |
-
|
117 |
-
# Sohbet geçmişini mesaja ekle
|
118 |
for val in history:
|
119 |
user_part = val[0]
|
120 |
assistant_part = val[1]
|
@@ -123,11 +88,13 @@ def respond(
|
|
123 |
if assistant_part:
|
124 |
messages.append({"role": "assistant", "content": assistant_part})
|
125 |
|
126 |
-
#
|
127 |
messages.append({"role": "user", "content": message})
|
128 |
|
129 |
-
#
|
130 |
response = ""
|
|
|
|
|
131 |
for message_chunk in client.chat.completions.create(
|
132 |
model="meta-llama/Llama-3.3-70B-Instruct",
|
133 |
max_tokens=max_tokens,
|
@@ -135,14 +102,16 @@ def respond(
|
|
135 |
temperature=temperature,
|
136 |
top_p=top_p,
|
137 |
frequency_penalty=frequency_penalty,
|
138 |
-
seed=
|
139 |
messages=messages,
|
140 |
):
|
141 |
token_text = message_chunk.choices[0].delta.content
|
142 |
response += token_text
|
143 |
yield response
|
144 |
|
145 |
-
|
|
|
|
|
146 |
chatbot = gr.Chatbot(height=600, show_copy_button=True, placeholder="Ask about crypto trading or analysis.", likeable=True)
|
147 |
|
148 |
max_tokens_slider = gr.Slider(
|
@@ -195,4 +164,4 @@ demo = gr.ChatInterface(
|
|
195 |
)
|
196 |
|
197 |
if __name__ == "__main__":
|
198 |
-
demo.launch()
|
|
|
1 |
import gradio as gr
|
2 |
from openai import OpenAI
|
3 |
import os
|
|
|
4 |
|
5 |
ACCESS_TOKEN = os.getenv("HF_TOKEN")
|
6 |
print("Access token loaded.")
|
|
|
47 |
### 4. Disclaimer
|
48 |
- Remind users that cryptocurrency trading involves significant risk and past performance does not guarantee future results.
|
49 |
- Clearly state that your responses are for informational purposes only and not financial advice.
|
50 |
+
### Example Interactions
|
51 |
+
#### Example 1: Market Analysis
|
52 |
+
_User Query:_ "What’s the current trend of Bitcoin?"
|
53 |
+
_Response:_ "Bitcoin is currently trading at $X, showing a [bullish/bearish] trend over the past 24 hours. Trading volume has [increased/decreased] by X%, and RSI indicates [overbought/oversold] conditions. Short-term support is at $Y, and resistance is at $Z."
|
54 |
+
#### Example 2: Portfolio Review
|
55 |
+
_User Query:_ "Is my portfolio balanced?"
|
56 |
+
_Response:_ "Your portfolio comprises X% Bitcoin, Y% Ethereum, and Z% altcoins. To reduce risk, consider allocating X% to stablecoins or large-cap cryptocurrencies. Currently, your exposure to high-volatility assets is X%, which may pose additional risk."
|
57 |
+
#### Example 3: Risk Management
|
58 |
+
_User Query:_ "How do I protect my trades?"
|
59 |
+
_Response:_ "You can use stop-loss orders at $X to limit potential losses or take-profit orders at $Y to secure gains. Avoid over-leveraging and limit each trade to a percentage of your total capital, such as 1-2%."
|
60 |
"""
|
61 |
|
62 |
+
# Function to handle chatbot responses
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
63 |
def respond(
|
64 |
message,
|
65 |
history: list[tuple[str, str]],
|
|
|
72 |
print(f"Received message: {message}")
|
73 |
print(f"History: {history}")
|
74 |
|
75 |
+
# Convert seed to None if -1 (meaning random)
|
76 |
+
if seed == -1:
|
77 |
+
seed = None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
78 |
|
|
|
79 |
messages = [{"role": "system", "content": SYSTEM_PROMPT}]
|
80 |
+
print("System prompt added to messages.")
|
81 |
|
82 |
+
# Add conversation history to the context
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
83 |
for val in history:
|
84 |
user_part = val[0]
|
85 |
assistant_part = val[1]
|
|
|
88 |
if assistant_part:
|
89 |
messages.append({"role": "assistant", "content": assistant_part})
|
90 |
|
91 |
+
# Append the latest user message
|
92 |
messages.append({"role": "user", "content": message})
|
93 |
|
94 |
+
# Start response generation
|
95 |
response = ""
|
96 |
+
print("Sending request to OpenAI API.")
|
97 |
+
|
98 |
for message_chunk in client.chat.completions.create(
|
99 |
model="meta-llama/Llama-3.3-70B-Instruct",
|
100 |
max_tokens=max_tokens,
|
|
|
102 |
temperature=temperature,
|
103 |
top_p=top_p,
|
104 |
frequency_penalty=frequency_penalty,
|
105 |
+
seed=seed,
|
106 |
messages=messages,
|
107 |
):
|
108 |
token_text = message_chunk.choices[0].delta.content
|
109 |
response += token_text
|
110 |
yield response
|
111 |
|
112 |
+
print("Completed response generation.")
|
113 |
+
|
114 |
+
# Gradio UI
|
115 |
chatbot = gr.Chatbot(height=600, show_copy_button=True, placeholder="Ask about crypto trading or analysis.", likeable=True)
|
116 |
|
117 |
max_tokens_slider = gr.Slider(
|
|
|
164 |
)
|
165 |
|
166 |
if __name__ == "__main__":
|
167 |
+
demo.launch()
|