diff --git a/.github/workflows/build.yml b/.github/workflows/main.yml similarity index 77% rename from .github/workflows/build.yml rename to .github/workflows/main.yml index 8ba8db4..739d136 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/main.yml @@ -1,5 +1,5 @@ --- -name: Lint +name: Lint and test on: pull_request: @@ -8,15 +8,18 @@ on: branches: ["main"] jobs: - build: + lint: runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.7", "3.11", "pypy3.10"] steps: - uses: actions/checkout@v3 - name: Install poetry run: pipx install poetry - uses: actions/setup-python@v4 with: - python-version: "3.11" + python-version: ${{ matrix.python-version }} cache: "poetry" - name: Install dependencies run: poetry install --no-interaction --no-root --with dev diff --git a/CHANGELOG.md b/CHANGELOG.md index cb3c000..89f2e12 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,7 +16,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ### Changed -- +- Caller tracking only tracks autometricised functions, as per spec #59 +- Function name labels now use qualified name, and module labels use module's `__name__` when available #59 ### Deprecated diff --git a/README.md b/README.md index 9dfa357..3388ab0 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,6 @@ ![GitHub_headerImage](https://user-images.githubusercontent.com/3262610/221191767-73b8a8d9-9f8b-440e-8ab6-75cb3c82f2bc.png) +[![Tests](https://github.com/autometrics-dev/autometrics-py/actions/workflows/main.yml/badge.svg)](https://github.com/autometrics-dev/autometrics-py/actions/workflows/main.yml) [![Discord Shield](https://discordapp.com/api/guilds/950489382626951178/widget.png?style=shield)](https://discord.gg/kHtwcH8As9) > A Python port of the Rust @@ -24,7 +25,7 @@ See [Why Autometrics?](https://github.com/autometrics-dev#why-autometrics) for m - [🔍 Identify commits](#identifying-commits-that-introduced-problems) that introduced errors or increased latency - [🚨 Define alerts](#alerts--slos) using SLO best practices directly in your source code - [📊 Grafana dashboards](#dashboards) work out of the box to visualize the performance of instrumented functions & SLOs -- [⚙️ Configurable](#metrics-libraries) metric collection library (`opentelemetry`, `prometheus`, or `metrics`) +- [⚙️ Configurable](#metrics-libraries) metric collection library (`opentelemetry` or `prometheus`) - [📍 Attach exemplars](#exemplars) to connect metrics with traces - ⚡ Minimal runtime overhead @@ -86,39 +87,7 @@ def api_handler(): # ... ``` -Autometrics by default will try to store information on which function calls a decorated function. As such you may want to place the autometrics in the top/first decorator, as otherwise you may get `inner` or `wrapper` as the caller function. - -So instead of writing: - -```py -from functools import wraps -from typing import Any, TypeVar, Callable - -R = TypeVar("R") - -def noop(func: Callable[..., R]) -> Callable[..., R]: - """A noop decorator that does nothing.""" - - @wraps(func) - def inner(*args: Any, **kwargs: Any) -> Any: - return func(*args, **kwargs) - - return inner - -@noop -@autometrics -def api_handler(): - # ... -``` - -You may want to switch the order of the decorator - -```py -@autometrics -@noop -def api_handler(): - # ... -``` +Autometrics keeps track of instrumented functions calling each other. If you have a function that calls another function, metrics for later will include `caller` label set to the name of the autometricised function that called it. #### Metrics Libraries diff --git a/src/autometrics/decorator.py b/src/autometrics/decorator.py index 9e8cfeb..ef6d977 100644 --- a/src/autometrics/decorator.py +++ b/src/autometrics/decorator.py @@ -1,19 +1,28 @@ """Autometrics module.""" +from contextvars import ContextVar import time import inspect from functools import wraps from typing import overload, TypeVar, Callable, Optional, Awaitable from typing_extensions import ParamSpec + from .objectives import Objective from .tracker import get_tracker, Result -from .utils import get_module_name, get_caller_function, append_docs_to_docstring +from .utils import ( + get_function_name, + get_module_name, + append_docs_to_docstring, +) P = ParamSpec("P") T = TypeVar("T") +caller_var: ContextVar[str] = ContextVar("caller", default="") + + # Bare decorator usage @overload def autometrics(func: Callable[P, T]) -> Callable[P, T]: @@ -85,15 +94,17 @@ def sync_decorator(func: Callable[P, T]) -> Callable[P, T]: """Helper for decorating synchronous functions, to track calls and duration.""" module_name = get_module_name(func) - func_name = func.__name__ + func_name = get_function_name(func) register_function_info(func_name, module_name) @wraps(func) def sync_wrapper(*args: P.args, **kwds: P.kwargs) -> T: start_time = time.time() - caller = get_caller_function() + caller = caller_var.get() + context_token = None try: + context_token = caller_var.set(func_name) if track_concurrency: track_start(module=module_name, function=func_name) result = func(*args, **kwds) @@ -111,6 +122,11 @@ def sync_wrapper(*args: P.args, **kwds: P.kwargs) -> T: ) # Reraise exception raise exception + + finally: + if context_token is not None: + caller_var.reset(context_token) + return result sync_wrapper.__doc__ = append_docs_to_docstring(func, func_name, module_name) @@ -120,15 +136,17 @@ def async_decorator(func: Callable[P, Awaitable[T]]) -> Callable[P, Awaitable[T] """Helper for decorating async functions, to track calls and duration.""" module_name = get_module_name(func) - func_name = func.__name__ + func_name = get_function_name(func) register_function_info(func_name, module_name) @wraps(func) async def async_wrapper(*args: P.args, **kwds: P.kwargs) -> T: start_time = time.time() - caller = get_caller_function() + caller = caller_var.get() + context_token = None try: + context_token = caller_var.set(func_name) if track_concurrency: track_start(module=module_name, function=func_name) result = await func(*args, **kwds) @@ -146,6 +164,11 @@ async def async_wrapper(*args: P.args, **kwds: P.kwargs) -> T: ) # Reraise exception raise exception + + finally: + if context_token is not None: + caller_var.reset(context_token) + return result async_wrapper.__doc__ = append_docs_to_docstring(func, func_name, module_name) diff --git a/src/autometrics/test_caller.py b/src/autometrics/test_caller.py new file mode 100644 index 0000000..b199e63 --- /dev/null +++ b/src/autometrics/test_caller.py @@ -0,0 +1,43 @@ +"""Tests for caller tracking.""" +from functools import wraps +from prometheus_client.exposition import generate_latest + +from .decorator import autometrics + + +def test_caller_detection(): + """This is a test to see if the caller is properly detected.""" + + def dummy_decorator(func): + @wraps(func) + def dummy_wrapper(*args, **kwargs): + return func(*args, **kwargs) + + return dummy_wrapper + + def another_decorator(func): + @wraps(func) + def another_wrapper(*args, **kwargs): + return func(*args, **kwargs) + + return another_wrapper + + @dummy_decorator + @autometrics + @another_decorator + def foo(): + pass + + @autometrics + def bar(): + foo() + + bar() + + blob = generate_latest() + assert blob is not None + data = blob.decode("utf-8") + + expected = """function_calls_count_total{caller="test_caller_detection..bar",function="test_caller_detection..foo",module="autometrics.test_caller",objective_name="",objective_percentile="",result="ok"} 1.0""" + assert "wrapper" not in data + assert expected in data diff --git a/src/autometrics/test_decorator.py b/src/autometrics/test_decorator.py index 35eb4b1..73a3419 100644 --- a/src/autometrics/test_decorator.py +++ b/src/autometrics/test_decorator.py @@ -6,9 +6,8 @@ from .decorator import autometrics from .objectives import ObjectiveLatency, Objective, ObjectivePercentile - from .tracker import set_tracker, TrackerType -from .utils import get_caller_function +from .utils import get_function_name, get_module_name def basic_function(sleep_duration: float = 0.0): @@ -60,11 +59,6 @@ class TestDecoratorClass: def test_basic(self): """This is a basic test.""" - # set up the function + basic variables - caller = get_caller_function(depth=1) - assert caller is not None - assert caller != "" - function_name = basic_function.__name__ wrapped_function = autometrics(basic_function) wrapped_function() @@ -72,28 +66,23 @@ def test_basic(self): assert blob is not None data = blob.decode("utf-8") - total_count = f"""function_calls_count_total{{caller="{caller}",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="ok"}} 1.0""" + total_count = f"""function_calls_count_total{{caller="",function="basic_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="ok"}} 1.0""" assert total_count in data for latency in ObjectiveLatency: - query = f"""function_calls_duration_bucket{{function="{function_name}",le="{latency.value}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + query = f"""function_calls_duration_bucket{{function="basic_function",le="{latency.value}",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert query in data - duration_count = f"""function_calls_duration_count{{function="{function_name}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_count = f"""function_calls_duration_count{{function="basic_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_count in data - duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_sum = f"""function_calls_duration_sum{{function="basic_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_sum in data @pytest.mark.asyncio async def test_basic_async(self): """This is a basic test.""" - # set up the function + basic variables - caller = get_caller_function(depth=1) - assert caller is not None - assert caller != "" - function_name = basic_async_function.__name__ wrapped_function = autometrics(basic_async_function) # Test that the function is *still* async after we wrap it @@ -105,33 +94,31 @@ async def test_basic_async(self): assert blob is not None data = blob.decode("utf-8") - total_count = f"""function_calls_count_total{{caller="{caller}",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="ok"}} 1.0""" + total_count = f"""function_calls_count_total{{caller="",function="basic_async_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="ok"}} 1.0""" assert total_count in data for latency in ObjectiveLatency: - query = f"""function_calls_duration_bucket{{function="{function_name}",le="{latency.value}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + query = f"""function_calls_duration_bucket{{function="basic_async_function",le="{latency.value}",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert query in data - duration_count = f"""function_calls_duration_count{{function="{function_name}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_count = f"""function_calls_duration_count{{function="basic_async_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_count in data - duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_sum = f"""function_calls_duration_sum{{function="basic_async_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_sum in data def test_objectives(self): """This is a test that covers objectives.""" # set up the function + objective variables - caller = get_caller_function(depth=1) - assert caller is not None - assert caller != "" objective_name = "test_objective" success_rate = ObjectivePercentile.P90 latency = (ObjectiveLatency.Ms100, ObjectivePercentile.P99) objective = Objective( name=objective_name, success_rate=success_rate, latency=latency ) - function_name = basic_function.__name__ + function_name = get_function_name(basic_function) + module_name = get_module_name(basic_function) wrapped_function = autometrics(objective=objective)(basic_function) sleep_duration = 0.25 @@ -143,19 +130,19 @@ def test_objectives(self): assert blob is not None data = blob.decode("utf-8") - total_count = f"""function_calls_count_total{{caller="{caller}",function="{function_name}",module="test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 1.0""" + total_count = f"""function_calls_count_total{{caller="",function="{function_name}",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 1.0""" assert total_count in data # Check the latency buckets for objective in ObjectiveLatency: count = 0 if float(objective.value) <= sleep_duration else 1 - query = f"""function_calls_duration_bucket{{function="{function_name}",le="{objective.value}",module="test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}} {count}""" + query = f"""function_calls_duration_bucket{{function="{function_name}",le="{objective.value}",module="autometrics.test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}} {count}""" assert query in data - duration_count = f"""function_calls_duration_count{{function="{function_name}",module="test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" + duration_count = f"""function_calls_duration_count{{function="{function_name}",module="autometrics.test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" assert duration_count in data - duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" + duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="autometrics.test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" assert duration_sum in data @pytest.mark.asyncio @@ -163,16 +150,12 @@ async def test_objectives_async(self): """This is a test that covers objectives for async functions.""" # set up the function + objective variables - caller = get_caller_function(depth=1) - assert caller is not None - assert caller != "" objective_name = "test_objective" success_rate = ObjectivePercentile.P90 latency = (ObjectiveLatency.Ms100, ObjectivePercentile.P99) objective = Objective( name=objective_name, success_rate=success_rate, latency=latency ) - function_name = basic_async_function.__name__ wrapped_function = autometrics(objective=objective)(basic_async_function) sleep_duration = 0.25 @@ -187,28 +170,24 @@ async def test_objectives_async(self): assert blob is not None data = blob.decode("utf-8") - total_count = f"""function_calls_count_total{{caller="{caller}",function="{function_name}",module="test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 1.0""" + total_count = f"""function_calls_count_total{{caller="",function="basic_async_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 1.0""" assert total_count in data # Check the latency buckets for objective in ObjectiveLatency: count = 0 if float(objective.value) <= sleep_duration else 1 - query = f"""function_calls_duration_bucket{{function="{function_name}",le="{objective.value}",module="test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}} {count}""" + query = f"""function_calls_duration_bucket{{function="basic_async_function",le="{objective.value}",module="autometrics.test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}} {count}""" assert query in data - duration_count = f"""function_calls_duration_count{{function="{function_name}",module="test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" + duration_count = f"""function_calls_duration_count{{function="basic_async_function",module="autometrics.test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" assert duration_count in data - duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" + duration_sum = f"""function_calls_duration_sum{{function="basic_async_function",module="autometrics.test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" assert duration_sum in data def test_exception(self): """This is a test that covers exceptions.""" - caller = get_caller_function(depth=1) - assert caller is not None - assert caller != "" - function_name = error_function.__name__ wrapped_function = autometrics(error_function) with pytest.raises(RuntimeError) as exception: @@ -220,27 +199,23 @@ def test_exception(self): assert blob is not None data = blob.decode("utf-8") - total_count = f"""function_calls_count_total{{caller="{caller}",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="error"}} 1.0""" + total_count = f"""function_calls_count_total{{caller="",function="error_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="error"}} 1.0""" assert total_count in data for latency in ObjectiveLatency: - query = f"""function_calls_duration_bucket{{function="{function_name}",le="{latency.value}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + query = f"""function_calls_duration_bucket{{function="error_function",le="{latency.value}",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert query in data - duration_count = f"""function_calls_duration_count{{function="{function_name}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_count = f"""function_calls_duration_count{{function="error_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_count in data - duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_sum = f"""function_calls_duration_sum{{function="error_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_sum in data @pytest.mark.asyncio async def test_async_exception(self): """This is a test that covers exceptions.""" - caller = get_caller_function(depth=1) - assert caller is not None - assert caller != "" - function_name = error_async_function.__name__ wrapped_function = autometrics(error_async_function) # Test that the function is *still* async after we wrap it @@ -255,34 +230,33 @@ async def test_async_exception(self): assert blob is not None data = blob.decode("utf-8") - total_count = f"""function_calls_count_total{{caller="{caller}",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="error"}} 1.0""" + total_count = f"""function_calls_count_total{{caller="",function="error_async_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="error"}} 1.0""" assert total_count in data for latency in ObjectiveLatency: - query = f"""function_calls_duration_bucket{{function="{function_name}",le="{latency.value}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + query = f"""function_calls_duration_bucket{{function="error_async_function",le="{latency.value}",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert query in data - duration_count = f"""function_calls_duration_count{{function="{function_name}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_count = f"""function_calls_duration_count{{function="error_async_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_count in data - duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_sum = f"""function_calls_duration_sum{{function="error_async_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_sum in data def test_initialize_counters_sync(self): """This is a test to see if the function calls metric initializes at 0 after invoking the decorator.""" - function_name = never_called_function.__name__ - wrapped_function = autometrics(never_called_function) + autometrics(never_called_function) # NOTE - Do not call the function! We want to see if we get counter data for it blob = generate_latest() assert blob is not None data = blob.decode("utf-8") - total_count_ok = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="ok"}} 0.0""" + total_count_ok = f"""function_calls_count_total{{caller="",function="never_called_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="ok"}} 0.0""" assert total_count_ok in data - total_count_error = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="error"}} 0.0""" + total_count_error = f"""function_calls_count_total{{caller="",function="never_called_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="error"}} 0.0""" assert total_count_error in data def test_initialize_counters_sync_with_objective(self): @@ -292,36 +266,34 @@ def test_initialize_counters_sync_with_objective(self): success_rate = ObjectivePercentile.P90 objective = Objective(name=objective_name, success_rate=success_rate) - function_name = never_called_function.__name__ - wrapped_function = autometrics(objective=objective)(never_called_function) + autometrics(objective=objective)(never_called_function) # NOTE - Do not call the function! We want to see if we get counter data for it blob = generate_latest() assert blob is not None data = blob.decode("utf-8") - total_count_ok = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 0.0""" + total_count_ok = f"""function_calls_count_total{{caller="",function="never_called_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 0.0""" assert total_count_ok in data - total_count_error = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="error"}} 0.0""" + total_count_error = f"""function_calls_count_total{{caller="",function="never_called_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="error"}} 0.0""" assert total_count_error in data @pytest.mark.asyncio async def test_initialize_counters_async(self): """This is a test to see if the function calls metric initializes at 0 after invoking the decorator for an async function""" - function_name = never_called_async_function.__name__ - wrapped_function = autometrics(never_called_async_function) + autometrics(never_called_async_function) # NOTE - Do not call the function! We want to see if we get counter data for it even without ever calling it blob = generate_latest() assert blob is not None data = blob.decode("utf-8") - total_count_ok = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="ok"}} 0.0""" + total_count_ok = f"""function_calls_count_total{{caller="",function="never_called_async_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="ok"}} 0.0""" assert total_count_ok in data - total_count_error = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="",objective_percentile="",result="error"}} 0.0""" + total_count_error = f"""function_calls_count_total{{caller="",function="never_called_async_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="error"}} 0.0""" assert total_count_error in data @pytest.mark.asyncio @@ -332,16 +304,15 @@ async def test_initialize_counters_async_with_objective(self): success_rate = ObjectivePercentile.P90 objective = Objective(name=objective_name, success_rate=success_rate) - function_name = never_called_async_function.__name__ - wrapped_function = autometrics(objective=objective)(never_called_async_function) + autometrics(objective=objective)(never_called_async_function) # NOTE - Do not call the function! We want to see if we get counter data for it even without ever calling it blob = generate_latest() assert blob is not None data = blob.decode("utf-8") - total_count_ok = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 0.0""" + total_count_ok = f"""function_calls_count_total{{caller="",function="never_called_async_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 0.0""" assert total_count_ok in data - total_count_error = f"""function_calls_count_total{{caller="",function="{function_name}",module="test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="error"}} 0.0""" + total_count_error = f"""function_calls_count_total{{caller="",function="never_called_async_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="error"}} 0.0""" assert total_count_error in data diff --git a/src/autometrics/tracker/test_concurrency.py b/src/autometrics/tracker/test_concurrency.py index 652d217..1e6a1a5 100644 --- a/src/autometrics/tracker/test_concurrency.py +++ b/src/autometrics/tracker/test_concurrency.py @@ -6,6 +6,7 @@ from .tracker import set_tracker, TrackerType from ..decorator import autometrics +from ..utils import get_function_name, get_module_name @autometrics(track_concurrency=True) @@ -19,6 +20,9 @@ async def test_concurrency_tracking_prometheus(monkeypatch): # because the library was already initialized with the OpenTelemetry tracker set_tracker(TrackerType.PROMETHEUS) + func_name = get_function_name(sleep) + module_name = get_module_name(sleep) + # Create a 200ms async task loop = asyncio.get_event_loop() task = loop.create_task(sleep(0.2)) @@ -31,9 +35,9 @@ async def test_concurrency_tracking_prometheus(monkeypatch): await task assert blob is not None data = blob.decode("utf-8") - print(data) + assert ( - f"""# TYPE function_calls_concurrent gauge\nfunction_calls_concurrent{{function="sleep",module="test_concurrency"}} 1.0""" + f"""# TYPE function_calls_concurrent gauge\nfunction_calls_concurrent{{function="sleep",module="autometrics.tracker.test_concurrency"}} 1.0""" in data ) diff --git a/src/autometrics/utils.py b/src/autometrics/utils.py index ef65ab7..8bf36c5 100644 --- a/src/autometrics/utils.py +++ b/src/autometrics/utils.py @@ -1,19 +1,16 @@ import inspect import os from collections.abc import Callable + from .prometheus_url import Generator def get_module_name(func: Callable) -> str: """Get the name of the module that contains the function.""" - func_name = func.__name__ - fullname = func.__qualname__ - filename = get_filename_as_module(func) - if fullname == func_name: - return filename - - classname = func.__qualname__.rsplit(".", 1)[0] - return f"{filename}.{classname}" + module = inspect.getmodule(func) + if module is None: + return get_filename_as_module(func) + return module.__name__ def get_filename_as_module(func: Callable) -> str: @@ -27,6 +24,11 @@ def get_filename_as_module(func: Callable) -> str: return module_part +def get_function_name(func: Callable) -> str: + """Get the name of the function.""" + return func.__qualname__ or func.__name__ + + def write_docs(func_name: str, module_name: str): """Write the prometheus query urls to the function docstring.""" generator = Generator(func_name, module_name) @@ -46,10 +48,3 @@ def append_docs_to_docstring(func, func_name, module_name): return write_docs(func_name, module_name) else: return f"{func.__doc__}\n{write_docs(func_name, module_name)}" - - -def get_caller_function(depth: int = 2): - """Get the name of the function. Default depth is 2 to get the caller of the caller of the function being decorated.""" - caller_frame = inspect.stack()[depth] - caller_function_name = caller_frame[3] - return caller_function_name