prakashkota commited on
Commit
8b31992
·
1 Parent(s): 42d4336

changed gradio interface to block

Browse files
Files changed (1) hide show
  1. app.py +53 -57
app.py CHANGED
@@ -50,7 +50,6 @@ from pytz import timezone
50
  from pandas.tseries.offsets import BDay
51
  import hashlib
52
 
53
-
54
  import time
55
  import gc
56
 
@@ -61,8 +60,6 @@ lock = threading.Lock()
61
 
62
  # --- Load saved model and scalers --- #
63
  model_dir = "./model"
64
- # import os
65
- # model_dir = os.path.join(os.path.dirname(__file__), "model")
66
 
67
  NN_model = load_model(os.path.join(model_dir, "NN_CPU_model.keras"))
68
 
@@ -88,53 +85,53 @@ def safe_download(*args, retries=3, delay=1, **kwargs):
88
  time.sleep(delay)
89
  raise RuntimeError("yfinance download failed after retries.")
90
 
91
- lock = threading.Lock()
92
-
93
  # --- Inference Function --- #
94
  def predict_stock():
95
-
96
- # lightweight log print near the top of predict_stock() to verify it's hitting the cache config:
97
- print("YFINANCE_NO_CACHE =", os.getenv("YFINANCE_NO_CACHE"))
98
-
99
- # Check for time zone
100
- now_est = datetime.now(timezone("US/Eastern"))
101
- print("Current Eastern Time:", now_est)
102
- print("Trying to fetch data up to:", now_est.strftime('%Y-%m-%d'))
103
-
104
- # --- Clear yfinance cache to get latest volume and price data --- #
105
- cache_path = os.path.expanduser("~/.cache/py-yfinance")
106
- if os.path.exists(cache_path):
107
- print("Clearing yfinance cache...")
108
- shutil.rmtree(cache_path)
109
-
110
- Stock = "NVDA"
111
- start_date = "2020-01-01"
112
- train_end_date = "2024-12-31"
113
- #today = datetime.today().strftime('%Y-%m-%d')
114
- # Use EST for consistently for today
115
- today = now_est.strftime('%Y-%m-%d')
116
-
117
- # Download the full dataset (might contain stale final row)
118
- # solves any error with empty dataframes
119
- try:
120
- full_data = safe_download(
121
- tickers=Stock,
122
- start=start_date,
123
- end=today,
124
- interval="1d",
125
- auto_adjust=False,
126
- actions=False,
127
- progress=False,
128
- threads=True #<-- for parallel downloads, use True
129
- )
130
-
131
- if full_data.empty:
132
- print("yfinance returned empty data for:", today)
133
- return "Error: Stock data not available at this time. Please try again shortly.", pd.DataFrame()
134
-
135
- except Exception as e:
136
- print("yfinance error:", e)
137
- return "Error: Could not fetch stock data. Please try again later.", pd.DataFrame()
 
 
138
 
139
  features = ["Open", "High", "Low", "Close", "Volume"]
140
 
@@ -198,9 +195,10 @@ def predict_stock():
198
 
199
  headers = ["Prediction For Date", "Actual Close", "Predicted Close", "% Error", "±MAPE Range"]
200
  table = tabulate(prediction_df.values, headers=headers, tablefmt="plain")
201
-
202
- """
203
  # Start Sanity Checks
 
 
204
  assert not np.any(np.isnan(X_scaled[-1].reshape(1, -1))), "NaNs detected in input!"
205
  assert X_scaled[-1].reshape(1, -1).shape == (1, X_scaled.shape[1]), f"Unexpected shape: {X_scaled[-1].reshape(1, -1).shape}"
206
  print("X_input shape:", X_scaled[-1].reshape(1, -1).shape)
@@ -221,21 +219,19 @@ def predict_stock():
221
  print("Debug prediction (scaled):", y_debug)
222
  print("Debug prediction (unscaled):", y_debug_unscaled)
223
 
 
 
224
  import hashlib
225
 
226
  def md5(fname):
227
  with open(fname, "rb") as f:
228
  return hashlib.md5(f.read()).hexdigest()
229
 
230
- #print("Model MD5 checksum:", md5(os.path.join(model_dir, "NN_CPU_model.keras")))
231
 
232
  print(full_data.tail(3))
233
 
234
- # End Sanitiy Checks"
235
- """
236
-
237
- # Prints in log and helps to verify any issues with yfinance downloads
238
- print("Attempting to fetch data from", start_date, "to", today)
239
 
240
  return summary, prediction_df[["Date", "Actual Close", "Predicted Close", "% Error", "±MAPE Range"]]
241
 
@@ -255,7 +251,7 @@ demo = gr.Interface(
255
  live=True #<-- changed to True for live queuing
256
  )
257
 
258
- demo.launch(share=True)
259
  """
260
 
261
  with gr.Blocks() as demo:
 
50
  from pandas.tseries.offsets import BDay
51
  import hashlib
52
 
 
53
  import time
54
  import gc
55
 
 
60
 
61
  # --- Load saved model and scalers --- #
62
  model_dir = "./model"
 
 
63
 
64
  NN_model = load_model(os.path.join(model_dir, "NN_CPU_model.keras"))
65
 
 
85
  time.sleep(delay)
86
  raise RuntimeError("yfinance download failed after retries.")
87
 
 
 
88
  # --- Inference Function --- #
89
  def predict_stock():
90
+
91
+ with lock:
92
+
93
+ # lightweight log print near the top of predict_stock() to verify it's hitting the cache config:
94
+ print("YFINANCE_NO_CACHE =", os.getenv("YFINANCE_NO_CACHE"))
95
+
96
+ # Check for time zone
97
+ now_est = datetime.now(timezone("US/Eastern"))
98
+ print("Current Eastern Time:", now_est)
99
+ print("Trying to fetch data up to:", now_est.strftime('%Y-%m-%d'))
100
+
101
+ # --- Clear yfinance cache to get latest volume and price data --- #
102
+ cache_path = os.path.expanduser("~/.cache/py-yfinance")
103
+ if os.path.exists(cache_path):
104
+ print("Clearing yfinance cache...")
105
+ shutil.rmtree(cache_path)
106
+
107
+ Stock = "NVDA"
108
+ start_date = "2020-01-01"
109
+ train_end_date = "2024-12-31"
110
+ #today = datetime.today().strftime('%Y-%m-%d')
111
+ # Use EST for consistently for today
112
+ today = now_est.strftime('%Y-%m-%d')
113
+
114
+ # Download the full dataset (might contain stale final row)
115
+ # solves any error with empty dataframes
116
+ try:
117
+ full_data = safe_download(
118
+ tickers=Stock,
119
+ start=start_date,
120
+ end=today,
121
+ interval="1d",
122
+ auto_adjust=False,
123
+ actions=False,
124
+ progress=False,
125
+ threads=True #<-- for parallel downloads, use True
126
+ )
127
+
128
+ if full_data.empty:
129
+ print("yfinance returned empty data for:", today)
130
+ return "Error: Stock data not available at this time. Please try again shortly.", pd.DataFrame()
131
+
132
+ except Exception as e:
133
+ print("yfinance error:", e)
134
+ return "Error: Could not fetch stock data. Please try again later.", pd.DataFrame()
135
 
136
  features = ["Open", "High", "Low", "Close", "Volume"]
137
 
 
195
 
196
  headers = ["Prediction For Date", "Actual Close", "Predicted Close", "% Error", "±MAPE Range"]
197
  table = tabulate(prediction_df.values, headers=headers, tablefmt="plain")
198
+
 
199
  # Start Sanity Checks
200
+
201
+
202
  assert not np.any(np.isnan(X_scaled[-1].reshape(1, -1))), "NaNs detected in input!"
203
  assert X_scaled[-1].reshape(1, -1).shape == (1, X_scaled.shape[1]), f"Unexpected shape: {X_scaled[-1].reshape(1, -1).shape}"
204
  print("X_input shape:", X_scaled[-1].reshape(1, -1).shape)
 
219
  print("Debug prediction (scaled):", y_debug)
220
  print("Debug prediction (unscaled):", y_debug_unscaled)
221
 
222
+ gc.collect() #<-- garbage collection after predict
223
+
224
  import hashlib
225
 
226
  def md5(fname):
227
  with open(fname, "rb") as f:
228
  return hashlib.md5(f.read()).hexdigest()
229
 
230
+ print("Model MD5 checksum:", md5(os.path.join(model_dir, "NN_CPU_model.keras")))
231
 
232
  print(full_data.tail(3))
233
 
234
+ # End Sanitiy Checks
 
 
 
 
235
 
236
  return summary, prediction_df[["Date", "Actual Close", "Predicted Close", "% Error", "±MAPE Range"]]
237
 
 
251
  live=True #<-- changed to True for live queuing
252
  )
253
 
254
+ demo.launch()
255
  """
256
 
257
  with gr.Blocks() as demo: