cyberosa commited on
Commit
211cb3f
·
1 Parent(s): 90b36ae

updating scripts

Browse files
scripts/closed_markets_divergence.py CHANGED
@@ -1,6 +1,4 @@
1
- from pathlib import Path
2
  import os
3
- import math
4
  import pandas as pd
5
  import numpy as np
6
  from typing import Any, Union
@@ -11,6 +9,7 @@ from concurrent.futures import ThreadPoolExecutor, as_completed
11
  from tqdm import tqdm
12
  import time
13
  from datetime import datetime
 
14
 
15
  NUM_WORKERS = 10
16
  IPFS_POLL_INTERVAL = 0.2
@@ -18,9 +17,6 @@ INVALID_ANSWER_HEX = (
18
  "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
19
  )
20
  INVALID_ANSWER = -1
21
- SCRIPTS_DIR = Path(__file__).parent
22
- ROOT_DIR = SCRIPTS_DIR.parent
23
- DATA_DIR = ROOT_DIR / "data"
24
  SUBGRAPH_API_KEY = os.environ.get("SUBGRAPH_API_KEY", None)
25
  OMEN_SUBGRAPH_URL = Template(
26
  """https://gateway-arbitrum.network.thegraph.com/api/${subgraph_api_key}/subgraphs/id/9fUVQpFwzpdWS9bq5WkAnmKbNNcoBwatMR4yZq81pbbz"""
@@ -105,7 +101,7 @@ def convert_hex_to_int(x: Union[str, float]) -> Union[int, float]:
105
  def get_closed_markets():
106
  print("Reading parquet file with closed markets data from trades")
107
  try:
108
- markets = pd.read_parquet(DATA_DIR / "fpmmTrades.parquet")
109
  except Exception:
110
  print("Error reading the parquet file")
111
 
 
 
1
  import os
 
2
  import pandas as pd
3
  import numpy as np
4
  from typing import Any, Union
 
9
  from tqdm import tqdm
10
  import time
11
  from datetime import datetime
12
+ from utils import DATA_DIR, TMP_DIR
13
 
14
  NUM_WORKERS = 10
15
  IPFS_POLL_INTERVAL = 0.2
 
17
  "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
18
  )
19
  INVALID_ANSWER = -1
 
 
 
20
  SUBGRAPH_API_KEY = os.environ.get("SUBGRAPH_API_KEY", None)
21
  OMEN_SUBGRAPH_URL = Template(
22
  """https://gateway-arbitrum.network.thegraph.com/api/${subgraph_api_key}/subgraphs/id/9fUVQpFwzpdWS9bq5WkAnmKbNNcoBwatMR4yZq81pbbz"""
 
101
  def get_closed_markets():
102
  print("Reading parquet file with closed markets data from trades")
103
  try:
104
+ markets = pd.read_parquet(TMP_DIR / "fpmmTrades.parquet")
105
  except Exception:
106
  print("Error reading the parquet file")
107
 
scripts/metrics.py CHANGED
@@ -252,21 +252,30 @@ def compute_daily_metrics_by_market_creator(
252
 
253
 
254
  def compute_winning_metrics_by_trader(
255
- traders_data: pd.DataFrame, trader_filter: str = None
256
  ) -> pd.DataFrame:
257
  """Function to compute the winning metrics at the trader level per week and with different market creators"""
258
- market_all = traders_data.copy(deep=True)
 
 
 
 
 
259
  market_all["market_creator"] = "all"
260
 
261
  # merging both dataframes
262
- final_traders = pd.concat([market_all, traders_data], ignore_index=True)
263
  final_traders = final_traders.sort_values(by="creation_timestamp", ascending=True)
264
 
265
- if trader_filter == "Olas":
266
- final_traders = final_traders.loc[final_traders["staking"] != "non_Olas"]
267
- else: # non_Olas traders
268
  final_traders = final_traders.loc[final_traders["staking"] == "non_Olas"]
 
 
 
 
269
 
 
 
270
  winning_df = win_metrics_trader_level(final_traders)
271
  winning_df.head()
272
  return winning_df
 
252
 
253
 
254
  def compute_winning_metrics_by_trader(
255
+ traders_data: pd.DataFrame, unknown_info: pd.DataFrame, trader_filter: str = None
256
  ) -> pd.DataFrame:
257
  """Function to compute the winning metrics at the trader level per week and with different market creators"""
258
+ if len(unknown_info) > 0:
259
+ all_data = pd.concat([traders_data, unknown_info], axis=0)
260
+ else:
261
+ all_data = traders_data
262
+
263
+ market_all = all_data.copy(deep=True)
264
  market_all["market_creator"] = "all"
265
 
266
  # merging both dataframes
267
+ final_traders = pd.concat([market_all, all_data], ignore_index=True)
268
  final_traders = final_traders.sort_values(by="creation_timestamp", ascending=True)
269
 
270
+ if trader_filter == "non_Olas": # non_Olas
 
 
271
  final_traders = final_traders.loc[final_traders["staking"] == "non_Olas"]
272
+ elif trader_filter == "Olas":
273
+ final_traders = final_traders.loc[final_traders["staking"] != "non_Olas"]
274
+ else: # all traders
275
+ print("No filtering")
276
 
277
+ if len(final_traders) == 0:
278
+ return pd.DataFrame()
279
  winning_df = win_metrics_trader_level(final_traders)
280
  winning_df.head()
281
  return winning_df
scripts/num_mech_calls.py CHANGED
@@ -69,7 +69,7 @@ def compute_total_mech_calls():
69
  print(f"Error updating the invalid trades parquet {e}")
70
 
71
  print("Reading trades weekly info file")
72
- fpmmTrades = pd.read_parquet(DATA_DIR / "fpmmTrades.parquet")
73
  try:
74
  fpmmTrades["creationTimestamp"] = fpmmTrades["creationTimestamp"].apply(
75
  lambda x: transform_to_datetime(x)
 
69
  print(f"Error updating the invalid trades parquet {e}")
70
 
71
  print("Reading trades weekly info file")
72
+ fpmmTrades = pd.read_parquet(TMP_DIR / "fpmmTrades.parquet")
73
  try:
74
  fpmmTrades["creationTimestamp"] = fpmmTrades["creationTimestamp"].apply(
75
  lambda x: transform_to_datetime(x)
scripts/retention_metrics.py CHANGED
@@ -106,6 +106,8 @@ def calculate_cohort_retention(
106
  df_filtered = df.loc[
107
  (df["market_creator"] == market_creator) & (df["trader_type"] == trader_type)
108
  ]
 
 
109
  df_filtered = df_filtered.sort_values(by="creation_timestamp", ascending=True)
110
  # Get first week of activity for each trader
111
  first_activity = (
 
106
  df_filtered = df.loc[
107
  (df["market_creator"] == market_creator) & (df["trader_type"] == trader_type)
108
  ]
109
+ if len(df_filtered) == 0:
110
+ return pd.DataFrame()
111
  df_filtered = df_filtered.sort_values(by="creation_timestamp", ascending=True)
112
  # Get first week of activity for each trader
113
  first_activity = (