From 5fda8ec440ad914cfa840a43d4c657c1b391597e Mon Sep 17 00:00:00 2001 From: Brett Beutell Date: Wed, 28 Jun 2023 15:41:54 +0200 Subject: [PATCH] Initialize counters at zero (#54) * Keep track of all the functions we decorate and provide utilities to display them * Clean up admin_panel a bit, esp function bookkeeping * Prototype "cdn" example * Update the HTML we serve * Fix readme * Remove http server for admin panel (old code) * Prototype "initialize at zero" behavior for counters * Fix tracker type and type issues with caller param on initialization * Remove admin panel section from README * Remove all code related to admin paenl * Add tests for initializing at zero * Fix docstring typo * Add tests for initializing at zero with objective name * Change initialize_at_zero to initialize_counters * PR Feedback: Prefer consistency in use of positional args * Update changelog --------- Co-authored-by: Brett Beutell --- CHANGELOG.md | 6 +- src/autometrics/__init__.py | 2 - src/autometrics/admin_panel/__init__.py | 1 - .../admin_panel/function_registry.py | 22 ----- src/autometrics/decorator.py | 9 +- src/autometrics/test_decorator.py | 88 +++++++++++++++++++ src/autometrics/tracker/opentelemetry.py | 14 ++- src/autometrics/tracker/prometheus.py | 14 ++- src/autometrics/tracker/tracker.py | 8 ++ 9 files changed, 132 insertions(+), 32 deletions(-) delete mode 100644 src/autometrics/admin_panel/__init__.py delete mode 100644 src/autometrics/admin_panel/function_registry.py diff --git a/CHANGELOG.md b/CHANGELOG.md index d0a96a3..cb3c000 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,7 +12,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ### Added -- +- Initialize counter metrics at zero #54 ### Changed @@ -28,11 +28,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ### Fixed -- Fixed decorator async function handling (#55) - ### Security -- Update requests, starlette, fastapi dependencies used by the examples +- ## [0.6](https://github.com/autometrics-dev/autometrics-py/releases/tag/0.6) - 2023-06-23 diff --git a/src/autometrics/__init__.py b/src/autometrics/__init__.py index b6c7d9f..ef89133 100644 --- a/src/autometrics/__init__.py +++ b/src/autometrics/__init__.py @@ -1,3 +1 @@ from .decorator import * - -from .admin_panel import * diff --git a/src/autometrics/admin_panel/__init__.py b/src/autometrics/admin_panel/__init__.py deleted file mode 100644 index 76b920d..0000000 --- a/src/autometrics/admin_panel/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .function_registry import register_function_info, get_decorated_functions_list diff --git a/src/autometrics/admin_panel/function_registry.py b/src/autometrics/admin_panel/function_registry.py deleted file mode 100644 index 5563ea5..0000000 --- a/src/autometrics/admin_panel/function_registry.py +++ /dev/null @@ -1,22 +0,0 @@ -# This module will do some bookkeeping on information of any functions that have been wrapped by autometrics decorators - -from typing import List, TypedDict - - -class FunctionInfo(TypedDict): - name: str - module: str - - -FUNCTION_REGISTRY: List[FunctionInfo] = [] - - -def register_function_info(func_name: str, module_name: str): - global FUNCTION_REGISTRY - function_info: FunctionInfo = {"name": func_name, "module": module_name} - FUNCTION_REGISTRY.append(function_info) - - -def get_decorated_functions_list(): - global FUNCTION_REGISTRY - return FUNCTION_REGISTRY diff --git a/src/autometrics/decorator.py b/src/autometrics/decorator.py index ab4bd6f..9e8cfeb 100644 --- a/src/autometrics/decorator.py +++ b/src/autometrics/decorator.py @@ -7,7 +7,6 @@ from typing_extensions import ParamSpec from .objectives import Objective from .tracker import get_tracker, Result -from .admin_panel import register_function_info from .utils import get_module_name, get_caller_function, append_docs_to_docstring @@ -42,6 +41,14 @@ def autometrics( ): """Decorator for tracking function calls and duration. Supports synchronous and async functions.""" + def register_function_info( + function: str, + module: str, + ): + get_tracker().initialize_counters( + function=function, module=module, objective=objective + ) + def track_start(function: str, module: str): get_tracker().start( function=function, module=module, track_concurrency=track_concurrency diff --git a/src/autometrics/test_decorator.py b/src/autometrics/test_decorator.py index 02ce56c..35eb4b1 100644 --- a/src/autometrics/test_decorator.py +++ b/src/autometrics/test_decorator.py @@ -34,6 +34,16 @@ async def error_async_function(): raise RuntimeError("This is a test error") +def never_called_function(): + """This is a sync function that should never be called. Used for testing initialization at zero for counters""" + raise RuntimeError("This function should never be called") + + +async def never_called_async_function(): + """This is an async function that should never be called. Used for testing initialization at zero for counters""" + raise RuntimeError("This function should never be called") + + tracker_types = [TrackerType.PROMETHEUS, TrackerType.OPENTELEMETRY] @@ -257,3 +267,81 @@ async def test_async_exception(self): duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_sum in data + + def test_initialize_counters_sync(self): + """This is a test to see if the function calls metric initializes at 0 after invoking the decorator.""" + + function_name = never_called_function.__name__ + wrapped_function = autometrics(never_called_function) + # NOTE - Do not call the function! We want to see if we get counter data for it + + blob = generate_latest() + assert blob is not None + data = blob.decode("utf-8") + + total_count_ok = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="ok"}} 0.0""" + assert total_count_ok in data + + total_count_error = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="error"}} 0.0""" + assert total_count_error in data + + def test_initialize_counters_sync_with_objective(self): + """This is a test to see if the function calls metric initializes at 0 after invoking the decorator.""" + + objective_name = "test_objective" + success_rate = ObjectivePercentile.P90 + objective = Objective(name=objective_name, success_rate=success_rate) + + function_name = never_called_function.__name__ + wrapped_function = autometrics(objective=objective)(never_called_function) + # NOTE - Do not call the function! We want to see if we get counter data for it + + blob = generate_latest() + assert blob is not None + data = blob.decode("utf-8") + + total_count_ok = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 0.0""" + assert total_count_ok in data + + total_count_error = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="error"}} 0.0""" + assert total_count_error in data + + @pytest.mark.asyncio + async def test_initialize_counters_async(self): + """This is a test to see if the function calls metric initializes at 0 after invoking the decorator for an async function""" + + function_name = never_called_async_function.__name__ + wrapped_function = autometrics(never_called_async_function) + # NOTE - Do not call the function! We want to see if we get counter data for it even without ever calling it + + blob = generate_latest() + assert blob is not None + data = blob.decode("utf-8") + + total_count_ok = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="ok"}} 0.0""" + assert total_count_ok in data + + total_count_error = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="error"}} 0.0""" + assert total_count_error in data + + @pytest.mark.asyncio + async def test_initialize_counters_async_with_objective(self): + """This is a test to see if the function calls metric initializes at 0 after invoking the decorator for an async function""" + + objective_name = "test_objective" + success_rate = ObjectivePercentile.P90 + objective = Objective(name=objective_name, success_rate=success_rate) + + function_name = never_called_async_function.__name__ + wrapped_function = autometrics(objective=objective)(never_called_async_function) + # NOTE - Do not call the function! We want to see if we get counter data for it even without ever calling it + + blob = generate_latest() + assert blob is not None + data = blob.decode("utf-8") + + total_count_ok = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 0.0""" + assert total_count_ok in data + + total_count_error = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="error"}} 0.0""" + assert total_count_error in data diff --git a/src/autometrics/tracker/opentelemetry.py b/src/autometrics/tracker/opentelemetry.py index 22bbb3d..9053868 100644 --- a/src/autometrics/tracker/opentelemetry.py +++ b/src/autometrics/tracker/opentelemetry.py @@ -83,6 +83,7 @@ def __count( objective: Optional[Objective], exemplar: Optional[dict], result: Result, + inc_by: int = 1, ): objective_name = "" if objective is None else objective.name percentile = ( @@ -91,7 +92,7 @@ def __count( else objective.success_rate.value ) self.__counter_instance.add( - 1, + inc_by, attributes={ "function": function, "module": module, @@ -184,3 +185,14 @@ def finish( "module": module, }, ) + + def initialize_counters( + self, + function: str, + module: str, + objective: Optional[Objective] = None, + ): + """Initialize tracking metrics for a function call at zero.""" + caller = "" + self.__count(function, module, caller, objective, None, Result.OK, 0) + self.__count(function, module, caller, objective, None, Result.ERROR, 0) diff --git a/src/autometrics/tracker/prometheus.py b/src/autometrics/tracker/prometheus.py index c3baee2..e5750a6 100644 --- a/src/autometrics/tracker/prometheus.py +++ b/src/autometrics/tracker/prometheus.py @@ -69,6 +69,7 @@ def _count( objective: Optional[Objective] = None, exemplar: Optional[dict] = None, result: Result = Result.OK, + inc_by: int = 1, ): """Increment the counter for the function call.""" objective_name = "" if objective is None else objective.name @@ -85,7 +86,7 @@ def _count( caller, objective_name, percentile, - ).inc(1, exemplar) + ).inc(inc_by, exemplar) def _histogram( self, @@ -146,3 +147,14 @@ def finish( if track_concurrency: self.prom_gauge_concurrency.labels(function, module).dec() + + def initialize_counters( + self, + function: str, + module: str, + objective: Optional[Objective] = None, + ): + """Initialize tracking metrics for a function call at zero.""" + caller = "" + self._count(function, module, caller, objective, None, Result.OK, 0) + self._count(function, module, caller, objective, None, Result.ERROR, 0) diff --git a/src/autometrics/tracker/tracker.py b/src/autometrics/tracker/tracker.py index 040444a..2181cde 100644 --- a/src/autometrics/tracker/tracker.py +++ b/src/autometrics/tracker/tracker.py @@ -36,6 +36,14 @@ def finish( ): """Finish tracking metrics for a function call.""" + def initialize_counters( + self, + function: str, + module: str, + objective: Optional[Objective] = None, + ): + """Initialize (counter) metrics for a function at zero.""" + class TrackerType(Enum): """Type of tracker."""