Skip to content

Commit

Permalink
updating data code replaced
Browse files Browse the repository at this point in the history
  • Loading branch information
iamnovichek committed Nov 15, 2024
1 parent ed02839 commit 9b1586d
Show file tree
Hide file tree
Showing 16 changed files with 880 additions and 2 deletions.
4 changes: 3 additions & 1 deletion apps/dashboard_app/data_conector.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,14 @@
import sqlalchemy
from dotenv import load_dotenv

from shared.constants import ZKLEND

load_dotenv()


class DataConnector:
REQUIRED_VARS = ("DB_USER", "DB_PASSWORD", "DB_HOST", "DB_PORT", "DB_NAME")
SQL_QUERY = "SELECT * FROM %s WHERE protocol_id = 'zkLend'"
SQL_QUERY = f"SELECT * FROM %s WHERE protocol_id = '{ZKLEND}'"

def __init__(self):
"""
Expand Down
171 changes: 171 additions & 0 deletions apps/dashboard_app/load_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,171 @@
import asyncio
import itertools
import logging
import math
import time

import pandas



import src.loans_table
import src.main_chart
import src.persistent_state
import src.protocol_stats
import src.protocol_parameters
import src.settings
import src.swap_amm
import src.zklend

from data_conector import DataConnector
from shared.constants import ZKLEND
from shared.protocol_states.zklend import ZkLendState
from shared.protocol_initializers.zklend import ZkLendInitializer
from helpers.tools import get_prices, GS_BUCKET_NAME

def update_data(zklend_state: src.zklend.ZkLendState):
logging.info(f"Updating SQL data from {zklend_state.last_block_number}...")
time_end = time.time()
# TODO: parallelize per protocol
# TODO: stream the data, don't wait until we get all events
zklend_events = src.zklend.zklend_get_events(
start_block_number=zklend_state.last_block_number + 1
)

logging.info(
f"got = {len(zklend_events)} events in {time.time() - time_end}s"
) # TODO: this log will become obsolete

# Iterate over ordered events to obtain the final state of each user.
t1 = time.time()
for _, zklend_event in zklend_events.iterrows():
zklend_state.process_event(event=zklend_event)

logging.info(f"updated state in {time.time() - t1}s")

# TODO: move this to state inits above?
# # Collect token parameters.
t2 = time.time()
asyncio.run(zklend_state.collect_token_parameters())
logging.info(f"collected token parameters in {time.time() - t2}s")

# Get prices of the underlying tokens.
t_prices = time.time()
states = [
zklend_state,
]
underlying_addresses_to_decimals = {}
for state in states:
underlying_addresses_to_decimals.update(
{
x.underlying_address: x.decimals
for x in state.token_parameters.collateral.values()
}
)
underlying_addresses_to_decimals.update(
{
x.underlying_address: x.decimals
for x in state.token_parameters.debt.values()
}
)
underlying_addresses_to_decimals.update(
{
x.address: int(math.log10(x.decimal_factor))
for x in src.settings.TOKEN_SETTINGS.values()
}
)
prices = get_prices(token_decimals=underlying_addresses_to_decimals)
logging.info(f"prices in {time.time() - t_prices}s")

t_swap = time.time()
swap_amms = src.swap_amm.SwapAmm()
asyncio.run(swap_amms.init())
logging.info(f"swap in {time.time() - t_swap}s")

t3 = time.time()
for pair, state in itertools.product(src.settings.PAIRS, states):
protocol = src.protocol_parameters.get_protocol(state=state)
logging.info(
f"Preparing main chart data for protocol = {protocol} and pair = {pair}."
)
# TODO: Decipher `pair` in a smarter way.
collateral_token_underlying_symbol, debt_token_underlying_symbol = pair.split(
"-"
)
_ = src.main_chart.get_main_chart_data(
state=state,
prices=prices,
swap_amms=swap_amms,
collateral_token_underlying_symbol=collateral_token_underlying_symbol,
debt_token_underlying_symbol=debt_token_underlying_symbol,
save_data=True,
)
logging.info(
f"Main chart data for protocol = {protocol} and pair = {pair} prepared in {time.time() - t3}s"
)
logging.info(f"updated graphs in {time.time() - t3}s")

loan_stats = {}
for state in states:
protocol = src.protocol_parameters.get_protocol(state=state)
loan_stats[protocol] = src.loans_table.get_loans_table_data(
state=state, prices=prices, save_data=True
)

general_stats = src.protocol_stats.get_general_stats(
states=states, loan_stats=loan_stats, save_data=True
)
supply_stats = src.protocol_stats.get_supply_stats(
states=states,
prices=prices,
save_data=True,
)
_ = src.protocol_stats.get_collateral_stats(states=states, save_data=True)
debt_stats = src.protocol_stats.get_debt_stats(states=states, save_data=True)
_ = src.protocol_stats.get_utilization_stats(
general_stats=general_stats,
supply_stats=supply_stats,
debt_stats=debt_stats,
save_data=True,
)

max_block_number = zklend_events["block_number"].max()
max_timestamp = zklend_events["timestamp"].max()
last_update = {
"timestamp": str(max_timestamp),
"block_number": str(max_block_number),
}
src.persistent_state.upload_object_as_pickle(
last_update, path=src.persistent_state.LAST_UPDATE_FILENAME
)
zklend_state.save_loan_entities(
path=src.persistent_state.PERSISTENT_STATE_LOAN_ENTITIES_FILENAME
)
zklend_state.clear_loan_entities()
src.persistent_state.upload_object_as_pickle(
zklend_state, path=src.persistent_state.PERSISTENT_STATE_FILENAME
)
loan_entities = pandas.read_parquet(
f"gs://{GS_BUCKET_NAME}/{src.persistent_state.PERSISTENT_STATE_LOAN_ENTITIES_FILENAME}",
engine="fastparquet",
)
zklend_state.set_loan_entities(loan_entities=loan_entities)
logging.info(f"Updated CSV data in {time.time() - time_end}s")

return zklend_state


if __name__ == "__name__":
# Fetching data from DB
connector = DataConnector()
loan_states_data_frame = connector.fetch_data("loan_state", ZKLEND)

# Initializing ZkLend state
zklend_state = ZkLendState()
zklend_initializer = ZkLendInitializer(zklend_state)
user_ids = zklend_initializer.get_user_ids_from_df(loan_states_data_frame)
zklend_initializer.set_last_loan_states_per_users(user_ids)

# Updating data
zklend_initializer.zklend_state = update_data(zklend_initializer.zklend_state)
print(zklend_initializer.zklend_state)
2 changes: 1 addition & 1 deletion apps/data_handler/handlers/loan_states/zklend/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from data_handler.handler_tools.constants import ProtocolAddresses
from data_handler.handlers.loan_states.abstractions import LoanStateComputationBase
from data_handler.handlers.loan_states.zklend.events import ZkLendState
from data_handler.handlers.loan_states.zklend.utils import ZkLendInitializer
from shared.protocol_initializers.zklend import ZkLendInitializer

from shared.constants import ProtocolIDs
from shared.state import State
Expand Down
64 changes: 64 additions & 0 deletions apps/shared/helpers.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,70 @@
import starknet_py

from .blockchain_call import func_call
from .state import State
from shared.protocol_states.zklend import ZkLendState

def add_leading_zeros(hash: str) -> str:
"""
Converts e.g. `0x436d8d078de345c11493bd91512eae60cd2713e05bcaa0bb9f0cba90358c6e` to
`0x00436d8d078de345c11493bd91512eae60cd2713e05bcaa0bb9f0cba90358c6e`.
"""
return "0x" + hash[2:].zfill(64)


async def get_symbol(token_address: str) -> str:
# DAI V2's symbol is `DAI` but we don't want to mix it with DAI = DAI V1.
if (
token_address
== "0x05574eb6b8789a91466f902c380d978e472db68170ff82a5b650b95a58ddf4ad"
):
return "DAI V2"
symbol = await func_call(
addr=token_address,
selector="symbol",
calldata=[],
)
# For some Nostra Mainnet tokens, a list of length 3 is returned.
if len(symbol) > 1:
return starknet_py.cairo.felt.decode_shortstring(symbol[1])
return starknet_py.cairo.felt.decode_shortstring(symbol[0])


def get_protocol(state: State) -> str:
# TODO: Improve the inference.
if isinstance(state, ZkLendState):
return "zkLend"

# We'll add it later

# if isinstance(state, src.hashstack_v0.HashstackV0State):
# return "Hashstack V0"
# if isinstance(state, src.hashstack_v1.HashstackV1State):
# return "Hashstack V1"
# if isinstance(state, src.nostra_alpha.NostraAlphaState) and not isinstance(
# state, src.nostra_mainnet.NostraMainnetState
# ):
# return "Nostra Alpha"
# if isinstance(state, src.nostra_mainnet.NostraMainnetState):
# return "Nostra Mainnet"
raise ValueError


def get_directory(state: State) -> str:
# TODO: Improve the inference.
if isinstance(state, ZkLendState):
return "zklend_data"

# We'll add it later

# if isinstance(state, src.hashstack_v0.HashstackV0State):
# return "hashstack_v0_data"
# if isinstance(state, src.hashstack_v1.HashstackV1State):
# return "hashstack_v1_data"
# if isinstance(state, src.nostra_alpha.NostraAlphaState) and not isinstance(
# state, src.nostra_mainnet.NostraMainnetState
# ):
# return "nostra_alpha_data"
# if isinstance(state, src.nostra_mainnet.NostraMainnetState):
# return "nostra_mainnet_data"
raise ValueError
72 changes: 72 additions & 0 deletions apps/shared/loans_table.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
import pandas

from shared.types import Prices
from shared.state import State

from shared.helpers import get_protocol, get_directory


def get_loans_table_data(
state: State,
prices: Prices,
save_data: bool = False,
) -> pandas.DataFrame:
data = []
for loan_entity_id, loan_entity in state.loan_entities.items():
collateral_usd = loan_entity.compute_collateral_usd(
risk_adjusted=False,
collateral_token_parameters=state.token_parameters.collateral,
collateral_interest_rate_model=state.interest_rate_models.collateral,
prices=prices,
)
risk_adjusted_collateral_usd = loan_entity.compute_collateral_usd(
risk_adjusted=True,
collateral_token_parameters=state.token_parameters.collateral,
collateral_interest_rate_model=state.interest_rate_models.collateral,
prices=prices,
)
debt_usd = loan_entity.compute_debt_usd(
risk_adjusted=False,
debt_token_parameters=state.token_parameters.debt,
debt_interest_rate_model=state.interest_rate_models.debt,
prices=prices,
)

health_factor = loan_entity.compute_health_factor(
standardized=False,
risk_adjusted_collateral_usd=risk_adjusted_collateral_usd,
debt_usd=debt_usd,
)
standardized_health_factor = loan_entity.compute_health_factor(
standardized=True,
risk_adjusted_collateral_usd=risk_adjusted_collateral_usd,
debt_usd=debt_usd,
)

data.append(
{
"User": (
loan_entity_id
),
"Protocol": get_protocol(state=state),
"Collateral (USD)": collateral_usd,
"Risk-adjusted collateral (USD)": risk_adjusted_collateral_usd,
"Debt (USD)": debt_usd,
"Health factor": health_factor,
"Standardized health factor": standardized_health_factor,
"Collateral": loan_entity.get_collateral_str(
collateral_token_parameters=state.token_parameters.collateral,
collateral_interest_rate_model=state.interest_rate_models.collateral,
),
"Debt": loan_entity.get_debt_str(
debt_token_parameters=state.token_parameters.debt,
debt_interest_rate_model=state.interest_rate_models.debt,
),
}
)
data = pandas.DataFrame(data)
if save_data:
directory = get_directory(state=state)
path = f"{directory}/loans.parquet"
src.helpers.save_dataframe(data=data, path=path)
return data
Empty file.
File renamed without changes.
Empty file.
Loading

0 comments on commit 9b1586d

Please sign in to comment.