|
import requests |
|
import pandas as pd |
|
import gradio as gr |
|
import plotly.graph_objects as go |
|
import plotly.express as px |
|
from datetime import datetime, timedelta |
|
import json |
|
from web3 import Web3 |
|
from app_trans_new import create_transcation_visualizations |
|
from app_value_locked import fetch_daily_value_locked |
|
OPTIMISM_RPC_URL = 'https://opt-mainnet.g.alchemy.com/v2/U5gnXPYxeyH43MJ9tP8ONBQHEDRav7H0' |
|
|
|
|
|
web3 = Web3(Web3.HTTPProvider(OPTIMISM_RPC_URL)) |
|
|
|
|
|
if not web3.is_connected(): |
|
raise Exception("Failed to connect to the Optimism network.") |
|
|
|
|
|
contract_address = '0x3d77596beb0f130a4415df3D2D8232B3d3D31e44' |
|
|
|
|
|
with open('./contracts/service_registry_abi.json', 'r') as abi_file: |
|
contract_abi = json.load(abi_file) |
|
|
|
|
|
service_registry = web3.eth.contract(address=contract_address, abi=contract_abi) |
|
|
|
def get_transfers(integrator: str, wallet: str) -> str: |
|
url = f"https://li.quest/v1/analytics/transfers?integrator={integrator}&wallet={wallet}" |
|
headers = {"accept": "application/json"} |
|
response = requests.get(url, headers=headers) |
|
return response.json() |
|
|
|
def load_activity_checker_contract(w3, staking_token_address): |
|
""" |
|
Loads the Staking Token and Activity Checker contracts. |
|
|
|
:param w3: Web3 instance |
|
:param staking_token_address: Address of the staking token contract |
|
:return: Tuple of (Staking Token contract instance, Activity Checker contract instance) |
|
""" |
|
try: |
|
|
|
with open('./contracts/StakingToken.json', "r", encoding="utf-8") as file: |
|
staking_token_data = json.load(file) |
|
|
|
staking_token_abi = staking_token_data.get("abi", []) |
|
|
|
|
|
staking_token_contract = w3.eth.contract(address=staking_token_address, abi=staking_token_abi) |
|
|
|
|
|
activity_checker_address = staking_token_contract.functions.activityChecker().call() |
|
|
|
|
|
with open('./contracts/StakingActivityChecker.json', "r", encoding="utf-8") as file: |
|
activity_checker_data = json.load(file) |
|
|
|
activity_checker_abi = activity_checker_data.get("abi", []) |
|
|
|
|
|
activity_checker_contract = w3.eth.contract(address=activity_checker_address, abi=activity_checker_abi) |
|
|
|
return staking_token_contract, activity_checker_contract |
|
|
|
except Exception as e: |
|
print(f"An error occurred while loading the contracts: {e}") |
|
raise |
|
|
|
|
|
def fetch_and_aggregate_transactions(): |
|
total_services = service_registry.functions.totalSupply().call() |
|
aggregated_transactions = [] |
|
daily_agent_counts = {} |
|
daily_agents_with_transactions = {} |
|
|
|
_staking_token_contract, activity_checker_contract = load_activity_checker_contract(web3, '0x88996bbdE7f982D93214881756840cE2c77C4992') |
|
|
|
for service_id in range(1, total_services + 1): |
|
service = service_registry.functions.getService(service_id).call() |
|
|
|
|
|
agent_ids = service[-1] |
|
|
|
|
|
if 25 in agent_ids: |
|
agent_address = service_registry.functions.getAgentInstances(service_id).call()[1][0] |
|
response_transfers = get_transfers("valory", agent_address) |
|
transfers = response_transfers.get("transfers", []) |
|
if isinstance(transfers, list): |
|
aggregated_transactions.extend(transfers) |
|
|
|
|
|
creation_event = service_registry.events.CreateService.create_filter( |
|
from_block=0, argument_filters={'serviceId': service_id, 'configHash': service[2]} |
|
).get_all_entries() |
|
|
|
if creation_event: |
|
block_number = creation_event[0]['blockNumber'] |
|
block = web3.eth.get_block(block_number) |
|
creation_timestamp = datetime.fromtimestamp(block['timestamp']) |
|
date_str = creation_timestamp.strftime('%Y-%m-%d') |
|
print("date_str",date_str) |
|
if date_str not in daily_agent_counts: |
|
daily_agent_counts[date_str] = set() |
|
if date_str not in daily_agents_with_transactions: |
|
daily_agents_with_transactions[date_str] = set() |
|
|
|
service_safe = service[1] |
|
print("agent_address",agent_address,"service_safe",service_safe) |
|
multisig_nonces = activity_checker_contract.functions.getMultisigNonces(service_safe).call()[0] |
|
if multisig_nonces > 0: |
|
daily_agents_with_transactions[date_str].add(agent_address) |
|
daily_agent_counts[date_str].add(agent_address) |
|
|
|
|
|
daily_agent_counts = {date: len(agents) for date, agents in daily_agent_counts.items()} |
|
daily_agents_with_transactions = {date: len(agents) for date, agents in daily_agents_with_transactions.items()} |
|
return aggregated_transactions, daily_agent_counts, daily_agents_with_transactions |
|
|
|
|
|
def process_transactions_and_agents(data): |
|
transactions, daily_agent_counts, daily_agents_with_transactions = data |
|
|
|
|
|
rows = [] |
|
for tx in transactions: |
|
|
|
sending_amount = float(tx["sending"]["amount"]) / (10 ** tx["sending"]["token"]["decimals"]) |
|
receiving_amount = float(tx["receiving"]["amount"]) / (10 ** tx["receiving"]["token"]["decimals"]) |
|
|
|
|
|
sending_timestamp = datetime.utcfromtimestamp(tx["sending"]["timestamp"]) |
|
receiving_timestamp = datetime.utcfromtimestamp(tx["receiving"]["timestamp"]) |
|
|
|
|
|
rows.append({ |
|
"transactionId": tx["transactionId"], |
|
"from_address": tx["fromAddress"], |
|
"to_address": tx["toAddress"], |
|
"sending_chain": tx["sending"]["chainId"], |
|
"receiving_chain": tx["receiving"]["chainId"], |
|
"sending_token_symbol": tx["sending"]["token"]["symbol"], |
|
"receiving_token_symbol": tx["receiving"]["token"]["symbol"], |
|
"sending_amount": sending_amount, |
|
"receiving_amount": receiving_amount, |
|
"sending_amount_usd": float(tx["sending"]["amountUSD"]), |
|
"receiving_amount_usd": float(tx["receiving"]["amountUSD"]), |
|
"sending_gas_used": int(tx["sending"]["gasUsed"]), |
|
"receiving_gas_used": int(tx["receiving"]["gasUsed"]), |
|
"sending_timestamp": sending_timestamp, |
|
"receiving_timestamp": receiving_timestamp, |
|
"date": sending_timestamp.date(), |
|
"week": sending_timestamp.strftime('%Y-%m-%d') |
|
}) |
|
|
|
df_transactions = pd.DataFrame(rows) |
|
df_agents = pd.DataFrame(list(daily_agent_counts.items()), columns=['date', 'agent_count']) |
|
df_agents_with_transactions = pd.DataFrame(list(daily_agents_with_transactions.items()), columns=['date', 'agent_count_with_transactions']) |
|
|
|
|
|
df_agents['date'] = pd.to_datetime(df_agents['date']) |
|
df_agents_with_transactions['date'] = pd.to_datetime(df_agents_with_transactions['date']) |
|
|
|
|
|
df_agents['week'] = df_agents['date'].dt.to_period('W').apply(lambda r: r.start_time) |
|
df_agents_with_transactions['week'] = df_agents_with_transactions['date'].dt.to_period('W').apply(lambda r: r.start_time) |
|
|
|
|
|
df_agents_weekly = df_agents[['week', 'agent_count']].groupby('week').sum().reset_index() |
|
df_agents_with_transactions_weekly = df_agents_with_transactions[['week', 'agent_count_with_transactions']].groupby('week').sum().reset_index() |
|
|
|
return df_transactions, df_agents_weekly, df_agents_with_transactions_weekly, df_agents_with_transactions |
|
|
|
|
|
def create_visualizations(): |
|
transactions_data = fetch_and_aggregate_transactions() |
|
df_transactions, df_agents_weekly, df_agents_with_transactions_weekly, df_agents_with_transactions = process_transactions_and_agents(transactions_data) |
|
|
|
|
|
|
|
df_tvl = fetch_daily_value_locked() |
|
|
|
|
|
df_tvl["total_value_locked_usd"] = df_tvl["amount0_usd"] + df_tvl["amount1_usd"] |
|
df_tvl_daily = df_tvl.groupby(["date", "chain_name"])["total_value_locked_usd"].sum().reset_index() |
|
df_tvl_daily['date'] = pd.to_datetime(df_tvl_daily['date']) |
|
|
|
|
|
df_tvl_daily = df_tvl_daily[df_tvl_daily["total_value_locked_usd"] > 0] |
|
|
|
fig_tvl = px.bar( |
|
df_tvl_daily, |
|
x="date", |
|
y="total_value_locked_usd", |
|
color="chain_name", |
|
title="Total Volume Invested in Pools in Different Chains Daily", |
|
labels={"date": "Date", "total_value_locked_usd": "Total Volume Invested (USD)"}, |
|
barmode='stack', |
|
color_discrete_map={ |
|
"optimism": "blue", |
|
"base": "purple", |
|
"ethereum": "darkgreen" |
|
} |
|
) |
|
fig_tvl.update_layout( |
|
xaxis_title=None, |
|
yaxis=dict(tickmode='linear', tick0=0, dtick=1), |
|
xaxis=dict( |
|
tickmode='array', |
|
tickvals=df_tvl_daily['date'], |
|
ticktext=df_tvl_daily['date'].dt.strftime('%b %d'), |
|
tickangle=90, |
|
), |
|
bargap=0.6, |
|
bargroupgap=0.1, |
|
height=700, |
|
width=1200, |
|
margin=dict(l=50, r=50, t=50, b=50), |
|
showlegend=True, |
|
legend=dict( |
|
yanchor="top", |
|
y=0.99, |
|
xanchor="right", |
|
x=0.99 |
|
) |
|
) |
|
fig_tvl.update_xaxes(tickformat="%b %d") |
|
|
|
|
|
chain_name_map = { |
|
10: "Optimism", |
|
8453: "Base", |
|
1: "Ethereum" |
|
} |
|
df_transactions["sending_chain"] = df_transactions["sending_chain"].map(chain_name_map) |
|
df_transactions["receiving_chain"] = df_transactions["receiving_chain"].map(chain_name_map) |
|
|
|
|
|
df_transactions["sending_chain"] = df_transactions["sending_chain"].astype(str) |
|
df_transactions["receiving_chain"] = df_transactions["receiving_chain"].astype(str) |
|
df_transactions['date'] = pd.to_datetime(df_transactions['date']) |
|
|
|
|
|
df_transactions["is_swap"] = df_transactions.apply(lambda x: x["sending_token_symbol"] != x["receiving_token_symbol"], axis=1) |
|
|
|
|
|
swaps_per_chain = df_transactions[df_transactions["is_swap"]].groupby(["date", "sending_chain"]).size().reset_index(name="swap_count") |
|
fig_swaps_chain = px.bar( |
|
swaps_per_chain, |
|
x="date", |
|
y="swap_count", |
|
color="sending_chain", |
|
title="Chain Daily Activity: Swaps", |
|
labels={"sending_chain": "Transaction Chain", "swap_count": "Daily Swap Nr"}, |
|
barmode="stack", |
|
color_discrete_map={ |
|
"Optimism": "blue", |
|
"Ethereum": "darkgreen", |
|
"Base": "purple" |
|
} |
|
) |
|
fig_swaps_chain.update_layout( |
|
xaxis_title="Date", |
|
yaxis_title="Daily Swap Count", |
|
yaxis=dict(tickmode='linear', tick0=0, dtick=1), |
|
xaxis=dict( |
|
tickmode='array', |
|
tickvals=[d for d in swaps_per_chain['date'] if d.weekday() == 0], |
|
ticktext=[d.strftime('%m-%d') for d in swaps_per_chain['date'] if d.weekday() == 0], |
|
tickangle=45, |
|
), |
|
bargap=0.6, |
|
bargroupgap=0.1, |
|
height=700, |
|
width=1200, |
|
margin=dict(l=50, r=50, t=50, b=50), |
|
showlegend=True, |
|
legend=dict( |
|
yanchor="top", |
|
y=0.99, |
|
xanchor="right", |
|
x=0.99 |
|
) |
|
) |
|
fig_swaps_chain.update_xaxes(tickformat="%m-%d") |
|
|
|
|
|
|
|
df_transactions["is_bridge"] = df_transactions.apply(lambda x: x["sending_chain"] != x["receiving_chain"], axis=1) |
|
|
|
|
|
bridges_per_chain = df_transactions[df_transactions["is_bridge"]].groupby(["date", "sending_chain"]).size().reset_index(name="bridge_count") |
|
fig_bridges_chain = px.bar( |
|
bridges_per_chain, |
|
x="date", |
|
y="bridge_count", |
|
color="sending_chain", |
|
title="Chain Daily Activity: Bridges", |
|
labels={"sending_chain": "Transaction Chain", "bridge_count": "Daily Bridge Nr"}, |
|
barmode="stack", |
|
color_discrete_map={ |
|
"Optimism": "blue", |
|
"Ethereum": "darkgreen", |
|
"Base": "purple" |
|
} |
|
) |
|
fig_bridges_chain.update_layout( |
|
xaxis_title="Date", |
|
yaxis_title="Daily Bridge Count", |
|
yaxis=dict(tickmode='linear', tick0=0, dtick=1), |
|
xaxis=dict( |
|
tickmode='array', |
|
tickvals=[d for d in bridges_per_chain['date'] if d.weekday() == 0], |
|
ticktext=[d.strftime('%m-%d') for d in bridges_per_chain['date'] if d.weekday() == 0], |
|
tickangle=45, |
|
), |
|
bargap=0.6, |
|
bargroupgap=0.1, |
|
height=700, |
|
width=1200, |
|
margin=dict(l=50, r=50, t=50, b=50), |
|
showlegend=True, |
|
legend=dict( |
|
yanchor="top", |
|
y=0.99, |
|
xanchor="right", |
|
x=0.99 |
|
) |
|
) |
|
fig_bridges_chain.update_xaxes(tickformat="%m-%d") |
|
|
|
|
|
|
|
df_agents_with_transactions['date'] = pd.to_datetime(df_agents_with_transactions['date']) |
|
|
|
|
|
daily_agents_df = df_agents_with_transactions.groupby('date').size().reset_index(name='daily_agent_count') |
|
|
|
|
|
daily_agents_df.loc[daily_agents_df['date'] == '2024-10-02', 'daily_agent_count'] = 2 |
|
|
|
|
|
df_agents_with_transactions['week_start'] = df_agents_with_transactions['date'].dt.to_period("W").apply(lambda r: r.start_time) |
|
cumulative_agents_df = df_agents_with_transactions.groupby(['week_start', 'date']).size().groupby(level=0).cumsum().reset_index(name='weekly_agent_count') |
|
|
|
|
|
cumulative_agents_df.loc[cumulative_agents_df['date'] == '2024-10-02', 'weekly_agent_count'] = 2 |
|
|
|
|
|
combined_df = pd.merge(daily_agents_df, cumulative_agents_df, on='date', how='left') |
|
|
|
|
|
fig_agents_registered = go.Figure(data=[ |
|
go.Bar( |
|
name='Daily nr of Registered Agents', |
|
x=combined_df['date'], |
|
y=combined_df['daily_agent_count'], |
|
marker_color='blue' |
|
), |
|
go.Bar( |
|
name='Total Weekly Nr of Registered Agents', |
|
x=combined_df['date'], |
|
y=combined_df['weekly_agent_count'], |
|
marker_color='purple' |
|
) |
|
]) |
|
|
|
|
|
fig_agents_registered.update_layout( |
|
xaxis_title='Date', |
|
yaxis_title='Number of Agents', |
|
title="Nr of Agents Registered", |
|
barmode='group', |
|
yaxis=dict(tickmode='linear', tick0=0, dtick=1), |
|
xaxis=dict( |
|
tickmode='array', |
|
tickvals=combined_df['date'], |
|
ticktext=[d.strftime("%b %d") for d in combined_df['date']], |
|
tickangle=-45 |
|
), |
|
bargap=0.6, |
|
height=700, |
|
width=1200, |
|
margin=dict(l=50, r=50, t=50, b=50), |
|
showlegend=True, |
|
legend=dict( |
|
yanchor="top", |
|
y=0.99, |
|
xanchor="right", |
|
x=0.99 |
|
) |
|
) |
|
|
|
|
|
df_agents_with_transactions['day_of_week'] = df_agents_with_transactions['date'].dt.dayofweek |
|
df_agents_with_transactions_weekly_avg = df_agents_with_transactions.groupby(['week', 'day_of_week'])['agent_count_with_transactions'].mean().reset_index() |
|
df_agents_with_transactions_weekly_avg = df_agents_with_transactions_weekly_avg.groupby('week')['agent_count_with_transactions'].mean().reset_index() |
|
|
|
fig_agents_with_transactions_daily = px.bar( |
|
df_agents_with_transactions_weekly, |
|
x="week", |
|
y="agent_count_with_transactions", |
|
title="Daily Active Agents: Weekly Average Nr of agents with at least 1 transaction daily", |
|
labels={"week": "Week of", "agent_count_with_transactions": "Number of Agents with Transactions"}, |
|
color_discrete_sequence=["darkgreen"] |
|
) |
|
fig_agents_with_transactions_daily.update_layout( |
|
title=dict( |
|
x=0.5,y=0.95,xanchor='center',yanchor='top'), |
|
yaxis=dict(tickmode='linear', tick0=0, dtick=1), |
|
xaxis=dict( |
|
tickmode='array', |
|
tickvals=df_agents_with_transactions_weekly_avg['week'], |
|
ticktext=df_agents_with_transactions_weekly_avg['week'].dt.strftime('%b %d'), |
|
tickangle=0 |
|
), |
|
bargap=0.6, |
|
bargroupgap=0.1, |
|
height=700, |
|
width=1200, |
|
margin=dict(l=50, r=50, t=50, b=50), |
|
showlegend=True, |
|
legend=dict( |
|
yanchor="top", |
|
y=0.99, |
|
xanchor="right", |
|
x=0.99 |
|
) |
|
) |
|
|
|
return fig_swaps_chain, fig_bridges_chain, fig_agents_registered, fig_agents_with_transactions_daily,fig_tvl |
|
|
|
|
|
def dashboard(): |
|
with gr.Blocks() as demo: |
|
gr.Markdown("# Valory Transactions Dashboard") |
|
with gr.Tab("Chain Daily activity"): |
|
fig_tx_chain = create_transcation_visualizations() |
|
gr.Plot(fig_tx_chain) |
|
|
|
fig_swaps_chain, fig_bridges_chain, fig_agents_registered, fig_agents_with_transactions_daily,fig_tvl = create_visualizations() |
|
|
|
with gr.Tab("Swaps Daily"): |
|
gr.Plot(fig_swaps_chain) |
|
|
|
with gr.Tab("Bridges Daily"): |
|
|
|
gr.Plot(fig_bridges_chain) |
|
|
|
with gr.Tab("Nr of Agents Registered"): |
|
|
|
gr.Plot(fig_agents_registered) |
|
|
|
with gr.Tab("DAA"): |
|
|
|
gr.Plot(fig_agents_with_transactions_daily) |
|
|
|
with gr.Tab("Total Value Locked"): |
|
|
|
gr.Plot(fig_tvl) |
|
|
|
return demo |
|
|
|
|
|
if __name__ == "__main__": |
|
dashboard().launch() |