Spaces:
Running
Running
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,573 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import yfinance as yf
|
3 |
+
import numpy as np
|
4 |
+
import pandas as pd
|
5 |
+
import plotly.graph_objects as go
|
6 |
+
from scipy.stats import skew, kurtosis
|
7 |
+
from collections import defaultdict
|
8 |
+
|
9 |
+
# Sidebar for inputs
|
10 |
+
st.sidebar.title("Trading Dashboard")
|
11 |
+
capital_per_trade = st.sidebar.number_input("Capital Per Trade", value=2000, min_value=100)
|
12 |
+
selected_strategy = st.sidebar.selectbox("Select Strategy", ['Momentum', 'Reversal', 'Breakout'])
|
13 |
+
|
14 |
+
# Section 1: Stock and Volume Profile Inputs
|
15 |
+
st.title("Real-time Volume Profile with Market Shape Detection")
|
16 |
+
|
17 |
+
ticker = st.text_input("Enter Stock Ticker", value="AAPL")
|
18 |
+
start_date = st.date_input("Start Date", value=pd.to_datetime("2024-10-17"))
|
19 |
+
|
20 |
+
# Fetch stock data in real-time
|
21 |
+
def fetch_stock_data(ticker, start, interval='1m'):
|
22 |
+
stock_data = yf.download(ticker, start=start, interval=interval)
|
23 |
+
return stock_data
|
24 |
+
|
25 |
+
data = fetch_stock_data(ticker, start_date)
|
26 |
+
|
27 |
+
# Calculate the volume profile with buy and sell volumes
|
28 |
+
def calculate_volume_profile(data, row_layout):
|
29 |
+
price_min = data['Low'].min()
|
30 |
+
price_max = data['High'].max()
|
31 |
+
|
32 |
+
bins = row_layout
|
33 |
+
bin_edges = np.linspace(price_min, price_max, bins)
|
34 |
+
|
35 |
+
volume_profile = pd.DataFrame(index=bin_edges[:-1], columns=['Total Volume'])
|
36 |
+
volume_profile['Total Volume'] = 0
|
37 |
+
|
38 |
+
for index, row in data.iterrows():
|
39 |
+
bin_indices = np.digitize([row['Low'], row['High']], bin_edges) - 1
|
40 |
+
bin_indices = [max(0, min(bins-2, b)) for b in bin_indices]
|
41 |
+
|
42 |
+
volume_profile.iloc[bin_indices[0]:bin_indices[1] + 1, volume_profile.columns.get_loc('Total Volume')] += row['Volume']
|
43 |
+
|
44 |
+
return volume_profile
|
45 |
+
|
46 |
+
# Function to calculate VAH, VAL, POC
|
47 |
+
def calculate_vah_val_poc(volume_profile):
|
48 |
+
total_volume = volume_profile['Total Volume'].sum()
|
49 |
+
cumulative_volume = volume_profile['Total Volume'].cumsum()
|
50 |
+
|
51 |
+
poc = volume_profile['Total Volume'].idxmax() # Price level with highest volume (POC)
|
52 |
+
|
53 |
+
vah_threshold = 0.7 * total_volume
|
54 |
+
val_threshold = 0.3 * total_volume
|
55 |
+
|
56 |
+
vah = volume_profile.index[cumulative_volume >= vah_threshold].min()
|
57 |
+
val = volume_profile.index[cumulative_volume <= val_threshold].max()
|
58 |
+
|
59 |
+
return vah, val, poc
|
60 |
+
|
61 |
+
# Initial quick identification of market profile shape based on POC, VAH, and VAL
|
62 |
+
def quick_identify_profile_shape(vah, val, poc):
|
63 |
+
if poc > vah:
|
64 |
+
return "P-shape (Bullish Accumulation)"
|
65 |
+
elif poc < val:
|
66 |
+
return "b-shape (Bearish Accumulation)"
|
67 |
+
elif vah > poc > val:
|
68 |
+
return "D-shape (Balanced Market)"
|
69 |
+
else:
|
70 |
+
return "B-shape (Double Distribution)"
|
71 |
+
|
72 |
+
# Refine the initial guess with skewness and kurtosis
|
73 |
+
def refine_with_skew_kurtosis(volume_profile, shape_guess):
|
74 |
+
volumes = volume_profile['Total Volume'].values
|
75 |
+
skewness = skew(volumes)
|
76 |
+
kurt = kurtosis(volumes)
|
77 |
+
|
78 |
+
if shape_guess == "P-shape" and skewness < 0:
|
79 |
+
return "b-shape (Bearish Accumulation)"
|
80 |
+
if shape_guess == "b-shape" and skewness > 0:
|
81 |
+
return "P-shape (Bullish Accumulation)"
|
82 |
+
|
83 |
+
if shape_guess == "D-shape" and abs(skewness) > 0.5 and kurt > 0:
|
84 |
+
return "B-shape (Double Distribution)"
|
85 |
+
|
86 |
+
return shape_guess
|
87 |
+
|
88 |
+
# Calculate the volume profile
|
89 |
+
volume_profile = calculate_volume_profile(data, row_layout=24)
|
90 |
+
vah, val, poc = calculate_vah_val_poc(volume_profile)
|
91 |
+
|
92 |
+
# Initial shape identification
|
93 |
+
initial_shape = quick_identify_profile_shape(vah, val, poc)
|
94 |
+
|
95 |
+
# Refined shape identification
|
96 |
+
refined_shape = refine_with_skew_kurtosis(volume_profile, initial_shape)
|
97 |
+
|
98 |
+
# Display the initial and refined market shapes
|
99 |
+
st.write(f"Initial Market Profile Shape: {initial_shape}")
|
100 |
+
st.write(f"Refined Market Profile Shape: {refined_shape}")
|
101 |
+
|
102 |
+
# Plot the volume profile and VAH
|
103 |
+
def plot_volume_profile(volume_profile, vah, val, poc):
|
104 |
+
fig = go.Figure()
|
105 |
+
|
106 |
+
fig.add_trace(go.Bar(
|
107 |
+
y=volume_profile.index,
|
108 |
+
x=volume_profile['Total Volume'],
|
109 |
+
orientation='h',
|
110 |
+
name='Total Volume',
|
111 |
+
marker=dict(color='blue', opacity=0.6)
|
112 |
+
))
|
113 |
+
|
114 |
+
# Highlight VAH, VAL, and POC
|
115 |
+
fig.add_shape(type="line", y0=vah, y1=vah, x0=0, x1=1, line=dict(color="green", dash="dash"))
|
116 |
+
fig.add_shape(type="line", y0=val, y1=val, x0=0, x1=1, line=dict(color="red", dash="dash"))
|
117 |
+
fig.add_shape(type="line", y0=poc, y1=poc, x0=0, x1=1, line=dict(color="orange", dash="dash"))
|
118 |
+
|
119 |
+
# Add annotations for VAH, VAL, and POC
|
120 |
+
fig.add_annotation(xref="paper", yref="y", x=1, y=vah, text=f"VAH at {vah:.2f}", showarrow=False)
|
121 |
+
fig.add_annotation(xref="paper", yref="y", x=1, y=val, text=f"VAL at {val:.2f}", showarrow=False)
|
122 |
+
fig.add_annotation(xref="paper", yref="y", x=1, y=poc, text=f"POC at {poc:.2f}", showarrow=False)
|
123 |
+
|
124 |
+
fig.update_layout(title='Volume Profile with Initial and Refined Market Shape Detection', xaxis_title='Volume', yaxis_title='Price')
|
125 |
+
st.plotly_chart(fig)
|
126 |
+
|
127 |
+
plot_volume_profile(volume_profile, vah, val, poc)
|
128 |
+
|
129 |
+
# # Section 2: 5-Minute Stock Prices for the selected date
|
130 |
+
# st.title("5-Minute Stock Price Data for Selected Date")
|
131 |
+
|
132 |
+
# def fetch_five_minute_data(ticker, selected_date):
|
133 |
+
# start_date_str = selected_date.strftime("%Y-%m-%d")
|
134 |
+
# data = yf.download(ticker, start=start_date_str, end=start_date_str, interval="5m")
|
135 |
+
# return data
|
136 |
+
|
137 |
+
# five_min_data = fetch_five_minute_data(ticker, start_date)
|
138 |
+
|
139 |
+
# if not five_min_data.empty:
|
140 |
+
# five_min_data = five_min_data.reset_index()
|
141 |
+
# st.write("5-Minute Interval Data", five_min_data)
|
142 |
+
# else:
|
143 |
+
# st.write("No 5-minute data available for the selected date.")
|
144 |
+
|
145 |
+
# # Section 3: 30-Minute Data Table for the selected date
|
146 |
+
# st.title("30-Minute Data Table for Selected Date")
|
147 |
+
|
148 |
+
# def fetch_thirty_minute_data(ticker, selected_date):
|
149 |
+
# start_date_str = selected_date.strftime("%Y-%m-%d")
|
150 |
+
# data = yf.download(ticker, start=start_date_str, end=start_date_str, interval="30m")
|
151 |
+
# return data
|
152 |
+
|
153 |
+
# thirty_min_data = fetch_thirty_minute_data(ticker, start_date)
|
154 |
+
|
155 |
+
# if not thirty_min_data.empty:
|
156 |
+
# thirty_min_data = thirty_min_data.reset_index()
|
157 |
+
# st.write("30-Minute Interval Data", thirty_min_data)
|
158 |
+
# else:
|
159 |
+
# st.write("No 30-minute data available for the selected date.")
|
160 |
+
|
161 |
+
# # Section 4: IB Range Signal and Last Day VAL Signal
|
162 |
+
# st.title("IB Range and Last Day's VAL Signal")
|
163 |
+
|
164 |
+
# # Generate a signal for IB Range for today based on mock conditions
|
165 |
+
# ib_range_signal = "IB Range Signal: Small" if thirty_min_data['High'].iloc[0] - thirty_min_data['Low'].iloc[0] < 2 else "IB Range Signal: Large"
|
166 |
+
# st.write(ib_range_signal)
|
167 |
+
|
168 |
+
# # Mock signal based on the previous day's VAL
|
169 |
+
# val_signal = "Last Day's VAL Signal: Bullish" if vah > val else "Last Day's VAL Signal: Bearish"
|
170 |
+
# st.write(val_signal)
|
171 |
+
|
172 |
+
# Section 1: Fetch stock data in real-time
|
173 |
+
# @st.cache_data
|
174 |
+
def fetch_stock_data(ticker, interval='30m'):
|
175 |
+
try:
|
176 |
+
data = yf.download(ticker, period="60d", interval=interval)
|
177 |
+
if data.empty:
|
178 |
+
st.warning(f"No data found for {ticker} with {interval} interval. Please try a different date.")
|
179 |
+
return data.reset_index()
|
180 |
+
except Exception as e:
|
181 |
+
st.error(f"Error fetching data for {ticker}: {e}")
|
182 |
+
return pd.DataFrame()
|
183 |
+
|
184 |
+
data = fetch_stock_data(ticker)
|
185 |
+
|
186 |
+
# st.write(data)
|
187 |
+
|
188 |
+
# Section 2: True Range and ATR Calculations
|
189 |
+
def calculate_atr(data, atr_period=14):
|
190 |
+
data['H-L'] = data['High'] - data['Low']
|
191 |
+
data['H-PC'] = np.abs(data['High'] - data['Adj Close'].shift(1))
|
192 |
+
data['L-PC'] = np.abs(data['Low'] - data['Adj Close'].shift(1))
|
193 |
+
data['TR'] = data[['H-L', 'H-PC', 'L-PC']].max(axis=1)
|
194 |
+
data['ATR_14'] = data['TR'].ewm(alpha=1/atr_period, adjust=False).mean()
|
195 |
+
return data
|
196 |
+
|
197 |
+
data = calculate_atr(data)
|
198 |
+
|
199 |
+
# Fetch daily data to calculate ATR for daily intervals
|
200 |
+
daily_data = yf.download(ticker, period="60d", interval="1d").reset_index()
|
201 |
+
daily_data['H-L'] = daily_data['High'] - daily_data['Low']
|
202 |
+
daily_data['H-PC'] = np.abs(daily_data['High'] - daily_data['Adj Close'].shift(1))
|
203 |
+
daily_data['L-PC'] = np.abs(daily_data['Low'] - daily_data['Adj Close'].shift(1))
|
204 |
+
daily_data['TR'] = daily_data[['H-L', 'H-PC', 'L-PC']].max(axis=1)
|
205 |
+
daily_data['ATR_14_1_day'] = daily_data['TR'].ewm(alpha=1/14, adjust=False).mean()
|
206 |
+
daily_data['Prev_Day_ATR_14_1_Day'] = daily_data['ATR_14_1_day'].shift(1)
|
207 |
+
daily_data['Date'] = pd.to_datetime(daily_data['Date']).dt.date
|
208 |
+
|
209 |
+
# Merge ATR into 30-minute data
|
210 |
+
data['Date'] = pd.to_datetime(data['Datetime']).dt.date
|
211 |
+
final_data = pd.merge(data, daily_data[['Date', 'ATR_14_1_day', 'Prev_Day_ATR_14_1_Day']], on='Date', how='left')
|
212 |
+
|
213 |
+
# st.write(final_data)
|
214 |
+
|
215 |
+
# Section 3: TPO Profile Calculation
|
216 |
+
def calculate_tpo(data, tick_size=0.01, value_area_percent=70):
|
217 |
+
price_levels = np.arange(data['Low'].min(), data['High'].max(), tick_size)
|
218 |
+
tpo_counts = defaultdict(list)
|
219 |
+
letters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
|
220 |
+
letter_idx = 0
|
221 |
+
|
222 |
+
for _, row in data.iterrows():
|
223 |
+
current_letter = letters[letter_idx % len(letters)]
|
224 |
+
for price in price_levels:
|
225 |
+
if row['Low'] <= price <= row['High']:
|
226 |
+
tpo_counts[price].append(current_letter)
|
227 |
+
letter_idx += 1
|
228 |
+
|
229 |
+
total_tpo = sum(len(counts) for counts in tpo_counts.values())
|
230 |
+
value_area_target = total_tpo * value_area_percent / 100
|
231 |
+
|
232 |
+
sorted_tpo = sorted(tpo_counts.items(), key=lambda x: len(x[1]), reverse=True)
|
233 |
+
value_area_tpo = 0
|
234 |
+
vah = 0
|
235 |
+
val = float('inf')
|
236 |
+
|
237 |
+
for price, counts in sorted_tpo:
|
238 |
+
if value_area_tpo + len(counts) <= value_area_target:
|
239 |
+
value_area_tpo += len(counts)
|
240 |
+
vah = max(vah, price)
|
241 |
+
val = min(val, price)
|
242 |
+
else:
|
243 |
+
break
|
244 |
+
|
245 |
+
poc = sorted_tpo[0][0] # Price with highest TPO count
|
246 |
+
return tpo_counts, poc, vah, val
|
247 |
+
|
248 |
+
# Section 4: IB Range, Market Classification, and Signals
|
249 |
+
def calculate_market_profile(data):
|
250 |
+
daily_tpo_profiles = []
|
251 |
+
value_area_percent = 70
|
252 |
+
tick_size = 0.01
|
253 |
+
|
254 |
+
for date, group in data.groupby('Date'):
|
255 |
+
tpo_counts, poc, vah, val = calculate_tpo(group, tick_size, value_area_percent)
|
256 |
+
|
257 |
+
initial_balance_high = group['High'].iloc[:2].max()
|
258 |
+
initial_balance_low = group['Low'].iloc[:2].min()
|
259 |
+
initial_balance_range = initial_balance_high - initial_balance_low
|
260 |
+
|
261 |
+
day_range = group['High'].max() - group['Low'].min()
|
262 |
+
range_extension = day_range - initial_balance_range
|
263 |
+
|
264 |
+
last_row = group.iloc[-1]
|
265 |
+
last_row_close = last_row['Close']
|
266 |
+
last_row_open = last_row['Open']
|
267 |
+
|
268 |
+
# st.write(last_row)
|
269 |
+
|
270 |
+
if day_range <= initial_balance_range * 1.15:
|
271 |
+
day_type = 'Normal Day'
|
272 |
+
elif initial_balance_range < day_range <= initial_balance_range * 2:
|
273 |
+
if last_row_open >= last_row_close:
|
274 |
+
day_type = 'Negative Normal Variation Day'
|
275 |
+
elif last_row_open <= last_row_close:
|
276 |
+
day_type = 'Positive Normal Variation Day'
|
277 |
+
else:
|
278 |
+
day_type = 'Normal Variation Day'
|
279 |
+
elif day_range > initial_balance_range * 2:
|
280 |
+
day_type = 'Trend Day'
|
281 |
+
else:
|
282 |
+
day_type = 'Neutral Day'
|
283 |
+
|
284 |
+
if last_row['Close'] >= initial_balance_high:
|
285 |
+
close_type = 'Closed above Initial High'
|
286 |
+
elif last_row['Close'] <= initial_balance_low:
|
287 |
+
close_type = 'Closed below Initial Low'
|
288 |
+
else:
|
289 |
+
close_type = 'Closed between Initial High and Low'
|
290 |
+
|
291 |
+
# if last_row['Close'] >= vah:
|
292 |
+
# close_type_va = 'Closed above VAH'
|
293 |
+
# elif last_row['Close'] <= initial_balance_low:
|
294 |
+
# close_type_va = 'Closed below VAL'
|
295 |
+
# else:
|
296 |
+
# close_type_va = 'Closed between VAH and VAL'
|
297 |
+
|
298 |
+
tpo_profile = {
|
299 |
+
'Date': date,
|
300 |
+
'POC': round(poc, 2),
|
301 |
+
'VAH': round(vah, 2),
|
302 |
+
'VAL': round(val, 2),
|
303 |
+
'Initial Balance High': round(initial_balance_high, 2),
|
304 |
+
'Initial Balance Low': round(initial_balance_low, 2),
|
305 |
+
'Initial Balance Range': round(initial_balance_range, 2),
|
306 |
+
'Day Range': round(day_range, 2),
|
307 |
+
'Range Extension': round(range_extension, 2),
|
308 |
+
'Day Type': day_type,
|
309 |
+
'Close Type' : close_type
|
310 |
+
# ,
|
311 |
+
# 'Close Type VA':close_type_va
|
312 |
+
}
|
313 |
+
daily_tpo_profiles.append(tpo_profile)
|
314 |
+
|
315 |
+
return pd.DataFrame(daily_tpo_profiles)
|
316 |
+
|
317 |
+
market_profile_df = calculate_market_profile(final_data)
|
318 |
+
|
319 |
+
# Merge TPO profile data into final_data based on the 'Date'
|
320 |
+
final_data = pd.merge(final_data, market_profile_df, on='Date', how='left')
|
321 |
+
|
322 |
+
# st.write(market_profile_df)
|
323 |
+
|
324 |
+
# Section 5: Generate Signals based on Market Profile
|
325 |
+
def generate_signals(market_profile_df):
|
326 |
+
trends = []
|
327 |
+
for i in range(1, len(market_profile_df)):
|
328 |
+
prev_day = market_profile_df.iloc[i - 1]
|
329 |
+
curr_day = market_profile_df.iloc[i]
|
330 |
+
|
331 |
+
if curr_day['Initial Balance High'] > prev_day['VAH']:
|
332 |
+
trend = 'Bullish'
|
333 |
+
elif curr_day['Initial Balance Low'] < prev_day['VAL']:
|
334 |
+
trend = 'Bearish'
|
335 |
+
else:
|
336 |
+
trend = 'Neutral'
|
337 |
+
|
338 |
+
trends.append({
|
339 |
+
'Date': curr_day['Date'],
|
340 |
+
'Trend': trend,
|
341 |
+
'Previous Day VAH': prev_day['VAH'],
|
342 |
+
'Previous Day VAL': prev_day['VAL'],
|
343 |
+
'Previous Day POC': prev_day['POC'],
|
344 |
+
})
|
345 |
+
|
346 |
+
return pd.DataFrame(trends)
|
347 |
+
|
348 |
+
signals_df = generate_signals(market_profile_df)
|
349 |
+
|
350 |
+
# Merge trend data into final_data
|
351 |
+
final_data = pd.merge(final_data, signals_df, on='Date', how='left')
|
352 |
+
|
353 |
+
# st.write(final_data)
|
354 |
+
|
355 |
+
# Define the conditions for Initial Balance Range classification
|
356 |
+
conditions = [
|
357 |
+
final_data['Initial Balance Range'] < final_data['Prev_Day_ATR_14_1_Day'] / 3,
|
358 |
+
(final_data['Initial Balance Range'] >= final_data['Prev_Day_ATR_14_1_Day'] / 3) &
|
359 |
+
(final_data['Initial Balance Range'] <= final_data['Prev_Day_ATR_14_1_Day']),
|
360 |
+
final_data['Initial Balance Range'] > final_data['Prev_Day_ATR_14_1_Day']
|
361 |
+
]
|
362 |
+
|
363 |
+
# Define the corresponding values for each condition
|
364 |
+
choices = ['Small', 'Medium', 'Large']
|
365 |
+
|
366 |
+
# Create the IB Range column using np.select()
|
367 |
+
final_data['IB Range'] = np.select(conditions, choices, default='Unknown')
|
368 |
+
|
369 |
+
# Round all values in final_data to 2 decimals
|
370 |
+
final_data = final_data.round(2)
|
371 |
+
|
372 |
+
# Display the final merged DataFrame
|
373 |
+
# st.write(final_data)
|
374 |
+
|
375 |
+
# Get the unique dates and sort them
|
376 |
+
sorted_dates = sorted(set(final_data['Date']))
|
377 |
+
final_data['2 Day VAH and VAL'] = ''
|
378 |
+
|
379 |
+
# Use a for loop with range() to iterate over the sorted dates by index
|
380 |
+
for i in range(2, len(sorted_dates)):
|
381 |
+
date = sorted_dates[i]
|
382 |
+
previous_date = sorted_dates[i - 1]
|
383 |
+
|
384 |
+
print(f"Current Date: {date}")
|
385 |
+
print(f"Previous Date: {previous_date}")
|
386 |
+
|
387 |
+
# Extract data for the previous date
|
388 |
+
previous_data = final_data[final_data['Date'] == previous_date]
|
389 |
+
|
390 |
+
day_high = previous_data['High'].max()
|
391 |
+
|
392 |
+
day_low = previous_data['Low'].max()
|
393 |
+
|
394 |
+
# Initialize an empty list for actions
|
395 |
+
actions = []
|
396 |
+
# actions.append(date)
|
397 |
+
|
398 |
+
# Ensure previous_data has rows before accessing
|
399 |
+
if not previous_data.empty:
|
400 |
+
# Get the last row of the previous day's data
|
401 |
+
last_row = previous_data.iloc[-1]
|
402 |
+
|
403 |
+
# Compare 'Close' with 'VAH' and 'VAL'
|
404 |
+
if last_row['Close'] >= last_row['VAH']:
|
405 |
+
actions.append('Previous Day Close Above VAH')
|
406 |
+
actions.append('Previous Day Close Bullish')
|
407 |
+
elif last_row['Close'] <= last_row['VAL']:
|
408 |
+
actions.append('Previous Day Close Below VAL')
|
409 |
+
actions.append('Previous Day Close Bearish')
|
410 |
+
else:
|
411 |
+
actions.append('Previous Day Close Neutral')
|
412 |
+
|
413 |
+
if last_row['Previous Day VAH'] >= last_row['VAH'] and last_row['Previous Day VAL'] <= last_row['VAL']:
|
414 |
+
actions.append('Insider Neutral')
|
415 |
+
elif last_row['Previous Day VAH'] <= last_row['VAH'] and last_row['Previous Day VAL'] >= last_row['VAL']:
|
416 |
+
actions.append('Outsider Neutral')
|
417 |
+
|
418 |
+
if last_row['IB Range'] == 'Large' and last_row['Close'] <= last_row['Initial Balance High']:
|
419 |
+
final_day_type = 'Large Range Normal Day'
|
420 |
+
elif last_row['IB Range'] == 'Medium' and day_high >= last_row['Initial Balance High'] and day_low <= last_row['Initial Balance Low']:
|
421 |
+
final_day_type = 'Medium Range Neutral Day'
|
422 |
+
elif last_row['IB Range'] == 'Medium' and last_row['Close'] >= last_row['Initial Balance High']:
|
423 |
+
final_day_type = 'Medium Range +ve Normal Variation Day'
|
424 |
+
elif last_row['IB Range'] == 'Medium' and last_row['Close'] <= last_row['Initial Balance Low']:
|
425 |
+
final_day_type = 'Medium Range -ve Normal Variation Day'
|
426 |
+
elif last_row['IB Range'] == 'Small' and last_row['Close'] >= last_row['Initial Balance High']:
|
427 |
+
final_day_type = 'Small Range +ve Trend Variation Day'
|
428 |
+
elif last_row['IB Range'] == 'Small' and last_row['Close'] <= last_row['Initial Balance Low']:
|
429 |
+
final_day_type = 'Small Range -ve Trend Variation Day'
|
430 |
+
elif last_row['IB Range'] == 'Small' and last_row['Close'] <= last_row['Initial Balance High'] and last_row['Close'] >= last_row['Initial Balance Low']:
|
431 |
+
final_day_type = 'Small Range Non Trend Variation Day'
|
432 |
+
else:
|
433 |
+
final_day_type = ''
|
434 |
+
|
435 |
+
|
436 |
+
# Print or perform further operations with actions
|
437 |
+
print(actions)
|
438 |
+
|
439 |
+
final_data.loc[final_data['Date'] == previous_date, '2 Day VAH and VAL'] = str(actions)
|
440 |
+
final_data.loc[final_data['Date'] == previous_date, 'Adjusted Day Type'] = str(final_day_type)
|
441 |
+
|
442 |
+
|
443 |
+
# Create a 'casted_date' column to only capture the date part of the Datetime
|
444 |
+
final_data['casted_date'] = final_data['Date']
|
445 |
+
|
446 |
+
# Sort by casted_date to ensure correct order
|
447 |
+
final_data = final_data.sort_values(by='casted_date')
|
448 |
+
|
449 |
+
# Create a 'casted_date' column to only capture the date part of the Datetime
|
450 |
+
final_data['casted_date'] = final_data['Date']
|
451 |
+
|
452 |
+
# Get a sorted list of unique dates
|
453 |
+
sorted_dates = sorted(final_data['casted_date'].unique())
|
454 |
+
|
455 |
+
# Find the index of the selected date in the sorted list
|
456 |
+
current_date_index = sorted_dates.index(start_date) if start_date in sorted_dates else None
|
457 |
+
|
458 |
+
# Determine the previous date if it exists
|
459 |
+
previous_date = sorted_dates[current_date_index - 1] if current_date_index and current_date_index > 0 else None
|
460 |
+
|
461 |
+
|
462 |
+
# Filter based on the input date (start_date) from the sidebar
|
463 |
+
filtered_data = final_data[final_data['casted_date'] == start_date]
|
464 |
+
|
465 |
+
# Filter based on the input date (start_date) from the sidebar
|
466 |
+
previous_filtered_data = final_data[final_data['casted_date'] == previous_date]
|
467 |
+
# st.write(filtered_data.columns)
|
468 |
+
|
469 |
+
# Section 7: Display the Data for Selected Date
|
470 |
+
if not filtered_data.empty:
|
471 |
+
st.title(f"Market Profile for {start_date}")
|
472 |
+
st.write(f"Previous Day Type: {previous_filtered_data['Day Type'].values[0]}")
|
473 |
+
st.write(f"Previous Adjusted Day Type: {previous_filtered_data['Adjusted Day Type'].values[0]}")
|
474 |
+
st.write(f"Previous Close Type: {previous_filtered_data['Close Type'].values[0]}")
|
475 |
+
# st.write(f"Close Type VA: {filtered_data['Close Type VA'].values[0]}")
|
476 |
+
st.write(f"Previous 2 Day VAH and VAL:{previous_filtered_data['2 Day VAH and VAL'].values[0]}")
|
477 |
+
st.write(f"IB Range: {filtered_data['Initial Balance Range'].values[0]}")
|
478 |
+
st.write(f"2 Day VAH and VAL: VAH - {filtered_data['VAH'].values[0]}, VAL - {signals_df['Previous Day VAL'].values[-1]}")
|
479 |
+
|
480 |
+
st.write(filtered_data)
|
481 |
+
|
482 |
+
# Probability of repeatability based on the types of days
|
483 |
+
day_type_summary = final_data['Day Type'].value_counts().reset_index()
|
484 |
+
day_type_summary.columns = ['Day Type', 'Number of Days']
|
485 |
+
total_days = len(final_data)
|
486 |
+
day_type_summary['Probability of Repeatability (%)'] = (day_type_summary['Number of Days'] / total_days) * 100
|
487 |
+
|
488 |
+
# Display the probability summary
|
489 |
+
st.title(f"Probability Summary for {ticker}")
|
490 |
+
st.write(day_type_summary)
|
491 |
+
|
492 |
+
# Generate the Comparison Matrix
|
493 |
+
comparison_summary = pd.DataFrame({
|
494 |
+
"Day Type": ["Normal Day", "Normal Variation Day", "Trend Day", "Neutral Day"],
|
495 |
+
"Number of Days (Selected Stock)": [
|
496 |
+
day_type_summary.loc[day_type_summary['Day Type'] == 'Normal Day', 'Number of Days'].values[0] if 'Normal Day' in day_type_summary['Day Type'].values else 0,
|
497 |
+
day_type_summary.loc[day_type_summary['Day Type'] == 'Normal Variation Day', 'Number of Days'].values[0] if 'Normal Variation Day' in day_type_summary['Day Type'].values else 0,
|
498 |
+
day_type_summary.loc[day_type_summary['Day Type'] == 'Trend Day', 'Number of Days'].values[0] if 'Trend Day' in day_type_summary['Day Type'].values else 0,
|
499 |
+
day_type_summary.loc[day_type_summary['Day Type'] == 'Neutral Day', 'Number of Days'].values[0] if 'Neutral Day' in day_type_summary['Day Type'].values else 0
|
500 |
+
],
|
501 |
+
"Probability of Repeatability (Selected Stock)": [
|
502 |
+
day_type_summary.loc[day_type_summary['Day Type'] == 'Normal Day', 'Probability of Repeatability (%)'].values[0] if 'Normal Day' in day_type_summary['Day Type'].values else 0,
|
503 |
+
day_type_summary.loc[day_type_summary['Day Type'] == 'Normal Variation Day', 'Probability of Repeatability (%)'].values[0] if 'Normal Variation Day' in day_type_summary['Day Type'].values else 0,
|
504 |
+
day_type_summary.loc[day_type_summary['Day Type'] == 'Trend Day', 'Probability of Repeatability (%)'].values[0] if 'Trend Day' in day_type_summary['Day Type'].values else 0,
|
505 |
+
day_type_summary.loc[day_type_summary['Day Type'] == 'Neutral Day', 'Probability of Repeatability (%)'].values[0] if 'Neutral Day' in day_type_summary['Day Type'].values else 0
|
506 |
+
]
|
507 |
+
})
|
508 |
+
|
509 |
+
st.title(f"Comparison Matrix for {ticker}")
|
510 |
+
st.write(comparison_summary)
|
511 |
+
|
512 |
+
import plotly.express as px
|
513 |
+
|
514 |
+
# Group by 'Day Type' and count occurrences
|
515 |
+
day_type_summary = final_data.groupby('Day Type').size().reset_index(name='Counts')
|
516 |
+
|
517 |
+
# Group by 'IB Range' and count occurrences
|
518 |
+
ib_range_summary = final_data.groupby('IB Range').size().reset_index(name='Counts')
|
519 |
+
|
520 |
+
# Group by 'Trend' and count occurrences
|
521 |
+
trend_summary = final_data.groupby('Trend').size().reset_index(name='Counts')
|
522 |
+
|
523 |
+
# Group by 'Initial Balance Range' and count occurrences
|
524 |
+
prev_day_type_summary = final_data.groupby('Initial Balance Range').size().reset_index(name='Counts')
|
525 |
+
|
526 |
+
# Visualizing the count of different 'Day Types'
|
527 |
+
fig_day_type = px.bar(day_type_summary, x='Day Type', y='Counts', title='Distribution of Day Types')
|
528 |
+
st.plotly_chart(fig_day_type)
|
529 |
+
|
530 |
+
# Visualizing the count of different 'IB Ranges'
|
531 |
+
fig_ib_range = px.bar(ib_range_summary, x='IB Range', y='Counts', title='Distribution of IB Ranges')
|
532 |
+
st.plotly_chart(fig_ib_range)
|
533 |
+
|
534 |
+
# Visualizing the count of different 'Trends'
|
535 |
+
fig_trend = px.bar(trend_summary, x='Trend', y='Counts', title='Distribution of Market Trends')
|
536 |
+
st.plotly_chart(fig_trend)
|
537 |
+
|
538 |
+
# Visualizing the count of 'Initial Balance Ranges'
|
539 |
+
fig_prev_day_type = px.bar(prev_day_type_summary, x='Initial Balance Range', y='Counts', title='Initial Balance Range')
|
540 |
+
st.plotly_chart(fig_prev_day_type)
|
541 |
+
|
542 |
+
# Visualizing the comparison between '2 Day VAH and VAL'
|
543 |
+
fig_vah_val = px.scatter(final_data, x='VAH', y='VAL', color='IB Range', title='VAH vs VAL with IB Range')
|
544 |
+
st.plotly_chart(fig_vah_val)
|
545 |
+
|
546 |
+
# Visualizing the relationship between Initial Balance Range and Day Range
|
547 |
+
fig_ib_day_range = px.scatter(final_data, x='Initial Balance Range', y='Day Range', color='Day Type', title='Initial Balance Range vs Day Range')
|
548 |
+
st.plotly_chart(fig_ib_day_range)
|
549 |
+
else:
|
550 |
+
st.warning(f"No data found for the selected date: {start_date}")
|
551 |
+
# Section 5: Trade Performance Monitoring
|
552 |
+
st.title("Trade Performance Monitoring")
|
553 |
+
uploaded_file = st.file_uploader("Upload Trade Data (CSV)", type="csv")
|
554 |
+
|
555 |
+
if uploaded_file is not None:
|
556 |
+
trades_df = pd.read_csv(uploaded_file)
|
557 |
+
st.write(trades_df)
|
558 |
+
st.line_chart(trades_df[['Profit/Loss']])
|
559 |
+
|
560 |
+
# Section 6: LLM Chat for Strategy Insights
|
561 |
+
st.title("AI Chat for Strategy Insights")
|
562 |
+
|
563 |
+
if st.button("Ask AI about strategy performance"):
|
564 |
+
llm_response = ChatCompletion.create(
|
565 |
+
model="gpt-3.5-turbo",
|
566 |
+
messages=[
|
567 |
+
{"role": "system", "content": "You are an assistant for a day trader analyzing strategies."},
|
568 |
+
{"role": "user", "content": f"What is your assessment of the {selected_strategy} strategy's performance?"}
|
569 |
+
]
|
570 |
+
)
|
571 |
+
st.write(llm_response.choices[0].message['content'])
|
572 |
+
|
573 |
+
st.success(f"Monitoring strategy '{selected_strategy}' in real-time.")
|