Skip to content

Commit

Permalink
Initialize counters at zero (#54)
Browse files Browse the repository at this point in the history
* Keep track of all the functions we decorate and provide utilities to display them

* Clean up admin_panel a bit, esp function bookkeeping

* Prototype "cdn" example

* Update the HTML we serve

* Fix readme

* Remove http server for admin panel (old code)

* Prototype "initialize at zero" behavior for counters

* Fix tracker type and type issues with caller param on initialization

* Remove admin panel section from README

* Remove all code related to admin paenl

* Add tests for initializing at zero

* Fix docstring typo

* Add tests for initializing at zero with objective name

* Change initialize_at_zero to initialize_counters

* PR Feedback: Prefer consistency in use of positional args

* Update changelog

---------

Co-authored-by: Brett Beutell <[email protected]>
  • Loading branch information
brettimus and Brett Beutell authored Jun 28, 2023
1 parent 398826a commit 5fda8ec
Show file tree
Hide file tree
Showing 9 changed files with 132 additions and 32 deletions.
6 changes: 2 additions & 4 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).

### Added

-
- Initialize counter metrics at zero #54

### Changed

Expand All @@ -28,11 +28,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).

### Fixed

- Fixed decorator async function handling (#55)

### Security

- Update requests, starlette, fastapi dependencies used by the examples
-

## [0.6](https://github.com/autometrics-dev/autometrics-py/releases/tag/0.6) - 2023-06-23

Expand Down
2 changes: 0 additions & 2 deletions src/autometrics/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1 @@
from .decorator import *

from .admin_panel import *
1 change: 0 additions & 1 deletion src/autometrics/admin_panel/__init__.py

This file was deleted.

22 changes: 0 additions & 22 deletions src/autometrics/admin_panel/function_registry.py

This file was deleted.

9 changes: 8 additions & 1 deletion src/autometrics/decorator.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from typing_extensions import ParamSpec
from .objectives import Objective
from .tracker import get_tracker, Result
from .admin_panel import register_function_info
from .utils import get_module_name, get_caller_function, append_docs_to_docstring


Expand Down Expand Up @@ -42,6 +41,14 @@ def autometrics(
):
"""Decorator for tracking function calls and duration. Supports synchronous and async functions."""

def register_function_info(
function: str,
module: str,
):
get_tracker().initialize_counters(
function=function, module=module, objective=objective
)

def track_start(function: str, module: str):
get_tracker().start(
function=function, module=module, track_concurrency=track_concurrency
Expand Down
88 changes: 88 additions & 0 deletions src/autometrics/test_decorator.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,16 @@ async def error_async_function():
raise RuntimeError("This is a test error")


def never_called_function():
"""This is a sync function that should never be called. Used for testing initialization at zero for counters"""
raise RuntimeError("This function should never be called")


async def never_called_async_function():
"""This is an async function that should never be called. Used for testing initialization at zero for counters"""
raise RuntimeError("This function should never be called")


tracker_types = [TrackerType.PROMETHEUS, TrackerType.OPENTELEMETRY]


Expand Down Expand Up @@ -257,3 +267,81 @@ async def test_async_exception(self):

duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}"""
assert duration_sum in data

def test_initialize_counters_sync(self):
"""This is a test to see if the function calls metric initializes at 0 after invoking the decorator."""

function_name = never_called_function.__name__
wrapped_function = autometrics(never_called_function)
# NOTE - Do not call the function! We want to see if we get counter data for it

blob = generate_latest()
assert blob is not None
data = blob.decode("utf-8")

total_count_ok = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="ok"}} 0.0"""
assert total_count_ok in data

total_count_error = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="error"}} 0.0"""
assert total_count_error in data

def test_initialize_counters_sync_with_objective(self):
"""This is a test to see if the function calls metric initializes at 0 after invoking the decorator."""

objective_name = "test_objective"
success_rate = ObjectivePercentile.P90
objective = Objective(name=objective_name, success_rate=success_rate)

function_name = never_called_function.__name__
wrapped_function = autometrics(objective=objective)(never_called_function)
# NOTE - Do not call the function! We want to see if we get counter data for it

blob = generate_latest()
assert blob is not None
data = blob.decode("utf-8")

total_count_ok = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 0.0"""
assert total_count_ok in data

total_count_error = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="error"}} 0.0"""
assert total_count_error in data

@pytest.mark.asyncio
async def test_initialize_counters_async(self):
"""This is a test to see if the function calls metric initializes at 0 after invoking the decorator for an async function"""

function_name = never_called_async_function.__name__
wrapped_function = autometrics(never_called_async_function)
# NOTE - Do not call the function! We want to see if we get counter data for it even without ever calling it

blob = generate_latest()
assert blob is not None
data = blob.decode("utf-8")

total_count_ok = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="ok"}} 0.0"""
assert total_count_ok in data

total_count_error = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="error"}} 0.0"""
assert total_count_error in data

@pytest.mark.asyncio
async def test_initialize_counters_async_with_objective(self):
"""This is a test to see if the function calls metric initializes at 0 after invoking the decorator for an async function"""

objective_name = "test_objective"
success_rate = ObjectivePercentile.P90
objective = Objective(name=objective_name, success_rate=success_rate)

function_name = never_called_async_function.__name__
wrapped_function = autometrics(objective=objective)(never_called_async_function)
# NOTE - Do not call the function! We want to see if we get counter data for it even without ever calling it

blob = generate_latest()
assert blob is not None
data = blob.decode("utf-8")

total_count_ok = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 0.0"""
assert total_count_ok in data

total_count_error = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="error"}} 0.0"""
assert total_count_error in data
14 changes: 13 additions & 1 deletion src/autometrics/tracker/opentelemetry.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ def __count(
objective: Optional[Objective],
exemplar: Optional[dict],
result: Result,
inc_by: int = 1,
):
objective_name = "" if objective is None else objective.name
percentile = (
Expand All @@ -91,7 +92,7 @@ def __count(
else objective.success_rate.value
)
self.__counter_instance.add(
1,
inc_by,
attributes={
"function": function,
"module": module,
Expand Down Expand Up @@ -184,3 +185,14 @@ def finish(
"module": module,
},
)

def initialize_counters(
self,
function: str,
module: str,
objective: Optional[Objective] = None,
):
"""Initialize tracking metrics for a function call at zero."""
caller = ""
self.__count(function, module, caller, objective, None, Result.OK, 0)
self.__count(function, module, caller, objective, None, Result.ERROR, 0)
14 changes: 13 additions & 1 deletion src/autometrics/tracker/prometheus.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ def _count(
objective: Optional[Objective] = None,
exemplar: Optional[dict] = None,
result: Result = Result.OK,
inc_by: int = 1,
):
"""Increment the counter for the function call."""
objective_name = "" if objective is None else objective.name
Expand All @@ -85,7 +86,7 @@ def _count(
caller,
objective_name,
percentile,
).inc(1, exemplar)
).inc(inc_by, exemplar)

def _histogram(
self,
Expand Down Expand Up @@ -146,3 +147,14 @@ def finish(

if track_concurrency:
self.prom_gauge_concurrency.labels(function, module).dec()

def initialize_counters(
self,
function: str,
module: str,
objective: Optional[Objective] = None,
):
"""Initialize tracking metrics for a function call at zero."""
caller = ""
self._count(function, module, caller, objective, None, Result.OK, 0)
self._count(function, module, caller, objective, None, Result.ERROR, 0)
8 changes: 8 additions & 0 deletions src/autometrics/tracker/tracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,14 @@ def finish(
):
"""Finish tracking metrics for a function call."""

def initialize_counters(
self,
function: str,
module: str,
objective: Optional[Objective] = None,
):
"""Initialize (counter) metrics for a function at zero."""


class TrackerType(Enum):
"""Type of tracker."""
Expand Down

0 comments on commit 5fda8ec

Please sign in to comment.