Skip to content

Commit

Permalink
Merge pull request #5 from Cogwheel-Validator/develop
Browse files Browse the repository at this point in the history
Merge version 0.4.2 as the latest stable
  • Loading branch information
Kamikaza731 authored Dec 10, 2024
2 parents ec60eed + 68e526b commit 3b28eda
Show file tree
Hide file tree
Showing 9 changed files with 289 additions and 26 deletions.
35 changes: 35 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
# Change log

All notable changes to this project will be documented in this file.

The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [Unreleased]

## [0.4.2] - 2024-12-10

### Fixed

- Fixed the issue to send alert info alert when API comes back

## [0.4.1] - 2024-12-09

### Fixed

- Fixed the issue when there is working API, when the API was not working it would return NoneType which caused monitoring tool to fail to send alert if there wasn't working API

## [0.4.0] - 2024-12-05

### Added

- Alert if there is no working API to query from
- Added logic to check if the database schema is up to date and update it if needed

## [0.3.0] - 2024-12-01
First official public version of the monitoring tool

- Monitoring system for wallet balance and unsigned oracle events
- Pagerduty and Telegram alerts
- Dead man switch
- Manage and setup SQLite database on its own
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "tnom"
version = "0.3.0"
version = "0.4.2"
description = "The Nibiru Oracle Monitoring is a tool for monitoring signer wallet for Nibiru Oracle."
authors = ["Kamikaza731"]
readme = "README.md"
Expand Down
4 changes: 2 additions & 2 deletions tnom/alerts/telegram_alert.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,9 @@ async def telegram_alert_trigger(
try:
bot = Bot(telegram_bot_token)
# Convert alert details to string
details_to_str = "```\n" + yaml.dump( # turn dict into yaml and dump it?
details_to_str = yaml.dump( # turn dict into yaml and dump it?
# Future note: look for some better solution later
alert_details, default_flow_style=False) + "\n```"
alert_details, default_flow_style=False)
return await bot.send_message(chat_id=chat_id, text=details_to_str)
except Exception as e:
if isinstance(e, (telegram.error.TelegramError, telegram.error.NetworkError)):
Expand Down
19 changes: 12 additions & 7 deletions tnom/check_apis.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,20 +24,25 @@ async def check_apis(load_config: dict[str, Any]) -> list[str]:
loaded_apis = load_config["APIs"]
tasks = [query.check_latest_block(api, session) for api in loaded_apis]
responses = await asyncio.gather(*tasks, return_exceptions=True)
# Fully functional APIS
# Fully functional APIs
online_apis_with_data = [(api, response) for api, response in zip(
loaded_apis, responses) if not isinstance(response, Exception)]
# Unhealthy APIS
loaded_apis, responses) if not isinstance(response, Exception)
and response is not None]
# Unhealthy APIs
unhealthy_apis = [api for api, response in zip(
loaded_apis, responses) if isinstance(response, Exception)]
loaded_apis, responses) if isinstance(response, Exception)
or response is None]

if not online_apis_with_data:
logging.warning("No healthy APIs found")
logging.info("Unhealthy APIs: %s", unhealthy_apis)
return []

max_block_height = max(api_data[0] for _, api_data in online_apis_with_data)

healthy_apis = [api for api, (block_height, _) in online_apis_with_data
if max_block_height - block_height <= MAX_BLOCK_HEIGHT_DIFF]
logging.info("""Healthy APIs: %s\n Unhealthy APIs: %s\n""",
healthy_apis, unhealthy_apis)
if max_block_height - block_height <= MAX_BLOCK_HEIGHT_DIFF]

logging.info("Healthy APIs: %s\nUnhealthy APIs: %s",
healthy_apis, unhealthy_apis)
return healthy_apis
4 changes: 4 additions & 0 deletions tnom/database_handler/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,23 +3,27 @@
The database_handler package provides functions for interacting with the database.
"""
from .db_manager import (
check_and_update_database_schema,
check_database_exists,
check_if_database_directory_exists,
check_if_epoch_is_recorded,
create_database,
create_database_directory,
overwrite_single_field,
read_current_epoch_data,
read_last_recorded_epoch,
write_epoch_data,
)

__all__ = [
"check_and_update_database_schema",
"check_database_exists",
"check_if_database_directory_exists",
"check_if_epoch_is_recorded",
"create_database",
"create_database_directory",
"overwrite_single_field",
"read_current_epoch_data",
"read_last_recorded_epoch",
"write_epoch_data",
]
95 changes: 92 additions & 3 deletions tnom/database_handler/db_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
"""
from __future__ import annotations

import logging
import sqlite3
from pathlib import Path

Expand All @@ -34,6 +35,87 @@ def check_database_exists(path: Path) -> bool:
"""
return Path(path).exists()

def check_and_update_database_schema(path: Path) -> None:
"""Check if all expected columns exist in the database table.
Add any missing columns to ensure compatibility with newer versions.
Args:
path (Path): The path to the database file.
Returns:
None
"""
# Define the expected columns with their default values and data types
# This makes it easy to add new columns in the future
expected_columns = {
"slash_epoch": "INTEGER PRIMARY KEY",
"miss_counter_events": "INTEGER",
"miss_counter_p1_executed": "INTEGER DEFAULT 0",
"miss_counter_p2_executed": "INTEGER DEFAULT 0",
"miss_counter_p3_executed": "INTEGER DEFAULT 0",
"unsigned_oracle_events": "INTEGER",
"price_feed_addr_balance": "INTEGER",
"small_balance_alert_executed": "INTEGER",
"very_small_balance_alert_executed": "INTEGER",
"consecutive_misses": "INTEGER DEFAULT 0",
"api_cons_miss": "INTEGER DEFAULT 0",
}

try:
with sqlite3.connect(path) as conn:
# Get existing columns
cur = conn.cursor()
cur.execute("PRAGMA table_info(tnom)")
existing_columns = {column[1] for column in cur.fetchall()}

# Check for missing columns and add them
for column_name, column_type in expected_columns.items():
if column_name not in existing_columns:
try:
# Add the missing column
cur.execute(
f"ALTER TABLE tnom ADD COLUMN {column_name} {column_type}")
logging.info("Added missing column: %s", column_name)
except sqlite3.Error as e:
logging.exception("Error adding column %s: %s", column_name, e) # noqa: TRY401

# Commit the changes
conn.commit()

except sqlite3.Error as e:
logging.exception("Database schema update failed: %s", e) # noqa: TRY401
raise

def read_last_recorded_epoch(path: Path) -> int:
"""Read the most recent epoch from the database.
Args:
path (Path): The path to the database file.
Returns:
int: The most recent epoch recorded in the database.
Raises:
ValueError: If no epochs are found in the database.
"""
try:
with sqlite3.connect(path) as conn:
cur = conn.cursor()
cur.execute("SELECT MAX(slash_epoch) FROM tnom")
result = cur.fetchone()[0]

if result is None:
raise ValueError("No epochs found in the database")

return result
except sqlite3.Error as e:
logging.exception("Error reading last epoch: %s", e)
raise


def create_database(path: Path) -> None:
"""Create the database file.
Expand All @@ -56,7 +138,8 @@ def create_database(path: Path) -> None:
price_feed_addr_balance INTEGER,
small_balance_alert_executed INTEGER,
very_small_balance_alert_executed INTEGER,
consecutive_misses INTEGER DEFAULT 0
consecutive_misses INTEGER DEFAULT 0,
api_cons_miss INTEGER DEFAULT 0
)""",
)

Expand Down Expand Up @@ -117,6 +200,7 @@ def read_current_epoch_data(path: Path, epoch: int) -> dict[str, int]:
"very_small_balance_alert_executed": data[
"very_small_balance_alert_executed"],
"consecutive_misses": data["consecutive_misses"],
"api_cons_miss": data["api_cons_miss"],
}

def write_epoch_data(path: Path, data: dict[str, int]) -> None:
Expand Down Expand Up @@ -154,6 +238,7 @@ def write_epoch_data(path: Path, data: dict[str, int]) -> None:
or data.get("small_balance_alert_executed") is None
or data.get("very_small_balance_alert_executed") is None
or data.get("consecutive_misses") is None
or data.get("api_cons_miss") is None
):
msg = "data must contain all required fields"
raise ValueError(msg)
Expand All @@ -162,7 +247,7 @@ def write_epoch_data(path: Path, data: dict[str, int]) -> None:
# Try to insert first
try:
cur.execute(
"INSERT INTO tnom VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"INSERT INTO tnom VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(
data["slash_epoch"],
data["miss_counter_events"],
Expand All @@ -174,6 +259,7 @@ def write_epoch_data(path: Path, data: dict[str, int]) -> None:
data["small_balance_alert_executed"],
data["very_small_balance_alert_executed"],
data["consecutive_misses"],
data["api_cons_miss"],
),
)

Expand All @@ -189,7 +275,8 @@ def write_epoch_data(path: Path, data: dict[str, int]) -> None:
price_feed_addr_balance = ?,
small_balance_alert_executed = ?,
very_small_balance_alert_executed = ?,
consecutive_misses = ?
consecutive_misses = ?,
api_cons_miss = ?
WHERE slash_epoch = ?
""", (
data["miss_counter_events"],
Expand All @@ -202,6 +289,7 @@ def write_epoch_data(path: Path, data: dict[str, int]) -> None:
data["very_small_balance_alert_executed"],
data["consecutive_misses"],
data["slash_epoch"],
data["api_cons_miss"],
))
conn.commit()

Expand Down Expand Up @@ -243,6 +331,7 @@ def overwrite_single_field(path: Path, epoch: int, field: str, value: int) -> No
"small_balance_alert_executed",
"very_small_balance_alert_executed",
"consecutive_misses",
"api_cons_miss",
]

if field not in allowed_columns:
Expand Down
Loading

0 comments on commit 3b28eda

Please sign in to comment.