From 71722f9290edef3858bc2df67aa52e4705ffe8e5 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Sat, 8 Oct 2022 12:09:08 +0530 Subject: [PATCH] [change] Changed code/docs from "iperf" to "iperf3" #443 Closes #443 --- README.rst | 180 +++++------ openwisp_monitoring/check/apps.py | 8 +- openwisp_monitoring/check/base/models.py | 8 +- openwisp_monitoring/check/classes/__init__.py | 2 +- .../check/classes/{iperf.py => iperf3.py} | 122 +++---- openwisp_monitoring/check/settings.py | 12 +- openwisp_monitoring/check/tasks.py | 12 +- ...erf_test_utils.py => iperf3_test_utils.py} | 0 .../tests/{test_iperf.py => test_iperf3.py} | 300 +++++++++--------- .../check/tests/test_models.py | 30 +- .../device/tests/test_admin.py | 12 +- .../monitoring/configuration.py | 10 +- tests/openwisp2/settings.py | 12 +- 13 files changed, 357 insertions(+), 351 deletions(-) rename openwisp_monitoring/check/classes/{iperf.py => iperf3.py} (83%) rename openwisp_monitoring/check/tests/{iperf_test_utils.py => iperf3_test_utils.py} (100%) rename openwisp_monitoring/check/tests/{test_iperf.py => test_iperf3.py} (74%) diff --git a/README.rst b/README.rst index 95215c3e8..59b35c68b 100644 --- a/README.rst +++ b/README.rst @@ -87,9 +87,9 @@ Available Features `RAM usage <#memory-usage>`_, `CPU load <#cpu-load>`_, `flash/disk usage <#disk-usage>`_, mobile signal (LTE/UMTS/GSM `signal strength <#mobile-signal-strength>`_, `signal quality <#mobile-signal-quality>`_, - `access technology in use <#mobile-access-technology-in-use>`_), `bandwidth <#iperf>`_, - `transferred data <#iperf>`_, `restransmits <#iperf>`_, `jitter <#iperf>`_, - `datagram <#iperf>`_, `datagram loss <#iperf>`_ + `access technology in use <#mobile-access-technology-in-use>`_), `bandwidth <#iperf3>`_, + `transferred data <#iperf3>`_, `restransmits <#iperf3>`_, `jitter <#iperf3>`_, + `datagram <#iperf3>`_, `datagram loss <#iperf3>`_ * Maintains a record of `WiFi sessions <#monitoring-wifi-sessions>`_ with clients' MAC address and vendor, session start and stop time and connected device along with other information @@ -107,7 +107,7 @@ Available Features * Extensible metrics and charts: it's possible to define new metrics and new charts * API to retrieve the chart metrics and status information of each device based on `NetJSON DeviceMonitoring `_ -* `Iperf check <#iperf-1>`_ that provides network performance measurements such as maximum +* `Iperf3 check <#iperf3-1>`_ that provides network performance measurements such as maximum achievable bandwidth, jitter, datagram loss etc of the openwrt device using `iperf3 utility `_ ------------ @@ -815,19 +815,19 @@ Mobile Access Technology in use .. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/access-technology.png :align: center -Iperf -~~~~~ +Iperf3 +~~~~~~ +--------------------+---------------------------------------------------------------------------------------------------------------------------+ -| **measurement**: | ``iperf`` | +| **measurement**: | ``iperf3`` | +--------------------+---------------------------------------------------------------------------------------------------------------------------+ -| **types**: | | ``int`` (iperf_result, sent_bytes_tcp, received_bytes_tcp, retransmits, sent_bytes_udp, total_packets, lost_packets), | +| **types**: | | ``int`` (iperf3_result, sent_bytes_tcp, received_bytes_tcp, retransmits, sent_bytes_udp, total_packets, lost_packets), | | | | ``float`` (sent_bps_tcp, received_bps_tcp, sent_bps_udp, jitter, lost_percent) | +--------------------+---------------------------------------------------------------------------------------------------------------------------+ -| **fields**: | | ``iperf_result``, ``sent_bps_tcp``, ``received_bps_tcp``, ``sent_bytes_tcp``, ``received_bytes_tcp``, ``retransmits``, | +| **fields**: | | ``iperf3_result``, ``sent_bps_tcp``, ``received_bps_tcp``, ``sent_bytes_tcp``, ``received_bytes_tcp``, ``retransmits``, | | | | ``sent_bps_udp``, ``sent_bytes_udp``, ``jitter``, ``total_packets``, ``lost_packets``, ``lost_percent`` | +--------------------+---------------------------------------------------------------------------------------------------------------------------+ -| **configuration**: | ``iperf`` | +| **configuration**: | ``iperf3`` | +--------------------+---------------------------------------------------------------------------------------------------------------------------+ | **charts**: | ``bandwidth``, ``transfer``, ``retransmits``, ``jitter``, ``datagram``, ``datagram_loss`` | +--------------------+---------------------------------------------------------------------------------------------------------------------------+ @@ -862,10 +862,10 @@ Iperf .. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/datagram-loss.png :align: center -For more info on how to configure and use Iperf, please refer to -`iperf check usage instructions <#iperf-check-usage-instructions>`_. +For more info on how to configure and use Iperf3, please refer to +`iperf3 check usage instructions <#iperf3-check-usage-instructions>`_. -**Note:** Iperf charts uses ``connect_points=True`` in +**Note:** Iperf3 charts uses ``connect_points=True`` in `default chart configuration <#openwisp_monitoring_charts>`_ that joins it's individual chart data points. Dashboard Monitoring Charts @@ -1014,33 +1014,33 @@ configuration status of a device changes, this ensures the check reacts quickly to events happening in the network and informs the user promptly if there's anything that is not working as intended. -Iperf -~~~~~ +Iperf3 +~~~~~~ This check provides network performance measurements such as maximum achievable bandwidth, jitter, datagram loss etc of the device using `iperf3 utility `_. This check is **disabled by default**. You can enable auto creation of this check by setting the -`OPENWISP_MONITORING_AUTO_IPERF <#OPENWISP_MONITORING_AUTO_IPERF>`_ to ``True``. +`OPENWISP_MONITORING_AUTO_IPERF3 <#OPENWISP_MONITORING_AUTO_IPERF3>`_ to ``True``. -You can also `add the iperf check +You can also `add the iperf3 check <#add-checks-and-alert-settings-from-the-device-page>`_ directly from the device page. It also supports tuning of various parameters. -You can also change the parameters used for iperf checks (e.g. timing, port, username, -password, rsa_publc_key etc) using the `OPENWISP_MONITORING_IPERF_CHECK_CONFIG -<#OPENWISP_MONITORING_IPERF_CHECK_CONFIG>`_ setting. +You can also change the parameters used for iperf3 checks (e.g. timing, port, username, +password, rsa_publc_key etc) using the `OPENWISP_MONITORING_IPERF3_CHECK_CONFIG +<#OPENWISP_MONITORING_IPERF3_CHECK_CONFIG>`_ setting. -**Note:** When setting `OPENWISP_MONITORING_AUTO_IPERF <#OPENWISP_MONITORING_AUTO_IPERF>`_ to ``True``, +**Note:** When setting `OPENWISP_MONITORING_AUTO_IPERF3 <#OPENWISP_MONITORING_AUTO_IPERF3>`_ to ``True``, you may need to update the `metric configuration <#add-checks-and-alert-settings-from-the-device-page>`_ -to enable alerts for the iperf check. +to enable alerts for the iperf3 check. -Iperf Check Usage Instructions ------------------------------- +Iperf3 Check Usage Instructions +------------------------------- -1. Make sure iperf is installed on the device -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +4. Make sure iperf3 is installed on the device +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Register your device to OpenWISP and make sure the `iperf3 openwrt package `_ is installed on the device, @@ -1066,11 +1066,11 @@ to allow SSH access to you device from OpenWISP. :alt: Enable ssh access from openwisp to device :align: center -3. Set up and configure Iperf server settings -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +3. Set up and configure Iperf3 server settings +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -After having deployed your Iperf servers, you need to -configure the iperf settings on the django side of OpenWISP, +After having deployed your Iperf3 servers, you need to +configure the iperf3 settings on the django side of OpenWISP, see the `test project settings for reference `_. @@ -1079,12 +1079,12 @@ Example: .. code-block:: python - OPENWISP_MONITORING_IPERF_CHECK_CONFIG = { + OPENWISP_MONITORING_IPERF3_CHECK_CONFIG = { # 'org_pk' : {'host' : [], 'client_options' : {}} 'a9734710-db30-46b0-a2fc-01f01046fe4f': { - # Some public iperf servers + # Some public iperf3 servers # https://iperf.fr/iperf-servers.php#public-servers - 'host': ['iperf.openwisp.io', '2001:db8::1', '192.168.5.2'], + 'host': ['iperf3.openwisp.io', '2001:db8::1', '192.168.5.2'], 'client_options': { 'port': 5209, 'udp': {'bitrate': '30M'}, @@ -1093,8 +1093,8 @@ Example: }, # another org 'b9734710-db30-46b0-a2fc-01f01046fe4f': { - # available iperf servers - 'host': ['iperf.openwisp2.io', '192.168.5.3'], + # available iperf3 servers + 'host': ['iperf3.openwisp2.io', '192.168.5.3'], 'client_options': { 'port': 5207, 'udp': {'bitrate': '50M'}, @@ -1103,29 +1103,29 @@ Example: }, } -**Note:** If an organization has more than one iperf server configured, then it enables -the iperf checks to run concurrently on different devices. If all of the available servers +**Note:** If an organization has more than one iperf3 server configured, then it enables +the iperf3 checks to run concurrently on different devices. If all of the available servers are busy, then it will add the check back in the queue. -The celery-beat configuration for the iperf check needs to be added too: +The celery-beat configuration for the iperf3 check needs to be added too: .. code-block:: python from celery.schedules import crontab # Celery TIME_ZONE should be equal to django TIME_ZONE - # In order to schedule run_iperf_checks on the correct time intervals + # In order to schedule run_iperf3_checks on the correct time intervals CELERY_TIMEZONE = TIME_ZONE CELERY_BEAT_SCHEDULE = { # Other celery beat configurations - # Celery beat configuration for iperf check - 'run_iperf_checks': { + # Celery beat configuration for iperf3 check + 'run_iperf3_checks': { 'task': 'openwisp_monitoring.check.tasks.run_checks', # https://docs.celeryq.dev/en/latest/userguide/periodic-tasks.html#crontab-schedules # Executes check every 5 mins from 00:00 AM to 6:00 AM (night) 'schedule': crontab(minute='*/5', hour='0-6'), - # Iperf check path - 'args': (['openwisp_monitoring.check.classes.Iperf'],), + # Iperf3 check path + 'args': (['openwisp_monitoring.check.classes.Iperf3'],), 'relative': True, } } @@ -1143,15 +1143,15 @@ configured and running in the background. For testing purposes, you can run this check manually using the `run_checks <#run_checks>`_ command. -After that, you should see the iperf network measurements charts. +After that, you should see the iperf3 network measurements charts. -.. image:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/iperf-charts.png - :alt: Iperf network measurement charts +.. image:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/iperf3-charts.png + :alt: Iperf3 network measurement charts -Iperf check parameters -~~~~~~~~~~~~~~~~~~~~~~ +Iperf3 check parameters +~~~~~~~~~~~~~~~~~~~~~~~ -Currently, iperf check supports the following parameters: +Currently, iperf3 check supports the following parameters: +-----------------------+----------+--------------------------------------------------------------------+ | **Parameter** | **Type** | **Default Value** | @@ -1204,15 +1204,15 @@ Currently, iperf check supports the following parameters: +-----------------------+-------------------------------------------------------------------------------+ To learn how to use these parameters, please see the -`iperf check configuration example <#OPENWISP_MONITORING_IPERF_CHECK_CONFIG>`_. +`iperf3 check configuration example <#OPENWISP_MONITORING_IPERF3_CHECK_CONFIG>`_. Visit the `official documentation `_ to learn more about the iperf3 parameters. -Iperf authentication -~~~~~~~~~~~~~~~~~~~~ +Iperf3 authentication +~~~~~~~~~~~~~~~~~~~~~ -By default iperf check runs without any kind of **authentication**, +By default iperf3 check runs without any kind of **authentication**, in this section we will explain how to configure **RSA authentication** between the **client** and the **server** to restrict connections to authenticated clients. @@ -1231,11 +1231,11 @@ Server side After running the commands mentioned above, the public key will be stored in ``public_key.pem`` which will be used in **rsa_public_key** parameter -in `OPENWISP_MONITORING_IPERF_CHECK_CONFIG -<#OPENWISP_MONITORING_IPERF_CHECK_CONFIG>`_ +in `OPENWISP_MONITORING_IPERF3_CHECK_CONFIG +<#OPENWISP_MONITORING_IPERF3_CHECK_CONFIG>`_ and the private key will be contained in the file ``private_key.pem`` which will be used with **--rsa-private-key-path** command option when -starting the iperf server. +starting the iperf3 server. 2. Create user credentials ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1254,8 +1254,8 @@ Add the above hash with username in ``credentials.csv`` # file format: username,sha256 iperfuser,ee17a7f98cc87a6424fb52682396b2b6c058e9ab70e946188faa0714905771d7 -3. Now start the iperf server with auth options -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +3. Now start the iperf3 server with auth options +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. code-block:: shell @@ -1283,17 +1283,17 @@ You may also check your installed **iperf3 openwrt package** features: Optional features available: CPU affinity setting, IPv6 flow label, TCP congestion algorithm setting, sendfile / zerocopy, socket pacing, authentication # contains 'authentication' -2. Configure iperf check auth parameters -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +2. Configure iperf3 check auth parameters +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Now, add the following iperf authentication parameters -to `OPENWISP_MONITORING_IPERF_CHECK_CONFIG -<#OPENWISP_MONITORING_IPERF_CHECK_CONFIG>`_ +Now, add the following iperf3 authentication parameters +to `OPENWISP_MONITORING_IPERF3_CHECK_CONFIG +<#OPENWISP_MONITORING_IPERF3_CHECK_CONFIG>`_ in the settings: .. code-block:: python - OPENWISP_MONITORING_IPERF_CHECK_CONFIG = { + OPENWISP_MONITORING_IPERF3_CHECK_CONFIG = { 'a9734710-db30-46b0-a2fc-01f01046fe4f': { 'host': ['iperf1.openwisp.io', 'iperf2.openwisp.io', '192.168.5.2'], # All three parameters (username, password, rsa_publc_key) @@ -1333,10 +1333,10 @@ To add a check, you just need to select an available **check type** as shown bel The following example shows how to use the `OPENWISP_MONITORING_METRICS setting <#openwisp_monitoring_metrics>`_ -to reconfigure the system for `iperf check <#iperf-1>`_ to send an alert if +to reconfigure the system for `iperf3 check <#iperf3-1>`_ to send an alert if the measured **TCP bandwidth** has been less than **10 Mbit/s** for more than **2 days**. -1. By default, `Iperf checks <#iperf-1>`_ come with default alert settings, +1. By default, `Iperf3 checks <#iperf3-1>`_ come with default alert settings, but it is easy to customize alert settings through the device page as shown below: .. figure:: https://github.com/openwisp/openwisp-monitoring/raw/docs/docs/1.1/device-inline-alertsettings.png @@ -1350,11 +1350,11 @@ but it is easy to customize alert settings through the device page as shown belo from django.utils.translation import gettext_lazy as _ OPENWISP_MONITORING_METRICS = { - 'iperf': { + 'iperf3': { 'notification': { 'problem': { - 'verbose_name': 'Iperf PROBLEM', - 'verb': _('Iperf bandwidth is less than normal value'), + 'verbose_name': 'Iperf3 PROBLEM', + 'verb': _('Iperf3 bandwidth is less than normal value'), 'level': 'warning', 'email_subject': _( '[{site.name}] PROBLEM: {notification.target} {notification.verb}' @@ -1365,8 +1365,8 @@ but it is easy to customize alert settings through the device page as shown belo ), }, 'recovery': { - 'verbose_name': 'Iperf RECOVERY', - 'verb': _('Iperf bandwidth now back to normal'), + 'verbose_name': 'Iperf3 RECOVERY', + 'verb': _('Iperf3 bandwidth now back to normal'), 'level': 'info', 'email_subject': _( '[{site.name}] RECOVERY: {notification.target} {notification.verb}' @@ -1486,8 +1486,8 @@ validating custom parameters of a ``Check`` object. This setting allows you to choose whether `config_applied <#configuration-applied>`_ checks should be created automatically for newly registered devices. It's enabled by default. -``OPENWISP_MONITORING_AUTO_IPERF`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +``OPENWISP_MONITORING_AUTO_IPERF3`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +--------------+-------------+ | **type**: | ``bool`` | @@ -1495,11 +1495,11 @@ created automatically for newly registered devices. It's enabled by default. | **default**: | ``False`` | +--------------+-------------+ -This setting allows you to choose whether `iperf <#iperf-1>`_ checks should be +This setting allows you to choose whether `iperf3 <#iperf3-1>`_ checks should be created automatically for newly registered devices. It's disabled by default. -``OPENWISP_MONITORING_IPERF_CHECK_CONFIG`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +``OPENWISP_MONITORING_IPERF3_CHECK_CONFIG`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +--------------+-------------+ | **type**: | ``dict`` | @@ -1507,19 +1507,19 @@ created automatically for newly registered devices. It's disabled by default. | **default**: | ``{}`` | +--------------+-------------+ -This setting allows to override the default iperf check configuration defined in -``openwisp_monitoring.check.classes.iperf.DEFAULT_IPERF_CHECK_CONFIG``. +This setting allows to override the default iperf3 check configuration defined in +``openwisp_monitoring.check.classes.iperf3.DEFAULT_IPERF3_CHECK_CONFIG``. -For example, you can change the values of `supported iperf check parameters <#iperf-check-parameters>`_. +For example, you can change the values of `supported iperf3 check parameters <#iperf3-check-parameters>`_. .. code-block:: python - OPENWISP_MONITORING_IPERF_CHECK_CONFIG = { + OPENWISP_MONITORING_IPERF3_CHECK_CONFIG = { # 'org_pk' : {'host' : [], 'client_options' : {}} 'a9734710-db30-46b0-a2fc-01f01046fe4f': { - # Some public iperf servers + # Some public iperf3 servers # https://iperf.fr/iperf-servers.php#public-servers - 'host': ['iperf.openwisp.io', '2001:db8::1', '192.168.5.2'], + 'host': ['iperf3.openwisp.io', '2001:db8::1', '192.168.5.2'], 'client_options': { 'port': 6209, # Number of parallel client streams to run @@ -1529,7 +1529,7 @@ For example, you can change the values of `supported iperf check parameters <#ip 'parallel': 5, # Set the connect_timeout (in milliseconds) for establishing # the initial control connection to the server, the lower the value - # the faster the down iperf server will be detected + # the faster the down iperf3 server will be detected 'connect_timeout': 1, # Window size / socket buffer size 'window': '300K', @@ -1545,8 +1545,8 @@ For example, you can change the values of `supported iperf check parameters <#ip } } -``OPENWISP_MONITORING_IPERF_CHECK_DELETE_RSA_KEY`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +``OPENWISP_MONITORING_IPERF3_CHECK_DELETE_RSA_KEY`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +--------------+-------------------------------+ | **type**: | ``bool`` | @@ -1555,11 +1555,11 @@ For example, you can change the values of `supported iperf check parameters <#ip +--------------+-------------------------------+ This setting allows you to set whether -`iperf check RSA public key <#configure-iperf-check-for-authentication>`_ +`iperf3 check RSA public key <#configure-iperf3-check-for-authentication>`_ will be deleted after successful completion of the check or not. -``OPENWISP_MONITORING_IPERF_CHECK_LOCK_EXPIRE`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +``OPENWISP_MONITORING_IPERF3_CHECK_LOCK_EXPIRE`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +--------------+-------------------------------+ | **type**: | ``int`` | @@ -1567,8 +1567,8 @@ will be deleted after successful completion of the check or not. | **default**: | ``600`` | +--------------+-------------------------------+ -This setting allows you to set a cache lock expiration time for the iperf check when -running on multiple servers. Make sure it is always greater than the total iperf check +This setting allows you to set a cache lock expiration time for the iperf3 check when +running on multiple servers. Make sure it is always greater than the total iperf3 check time, i.e. greater than the TCP + UDP test time. By default, it is set to **600 seconds (10 mins)**. ``OPENWISP_MONITORING_AUTO_CHARTS`` diff --git a/openwisp_monitoring/check/apps.py b/openwisp_monitoring/check/apps.py index 8f030cf4f..e0a45998a 100644 --- a/openwisp_monitoring/check/apps.py +++ b/openwisp_monitoring/check/apps.py @@ -32,11 +32,11 @@ def _connect_signals(self): sender=load_model('config', 'Device'), dispatch_uid='auto_config_check', ) - if app_settings.AUTO_IPERF: - from .base.models import auto_iperf_check_receiver + if app_settings.AUTO_IPERF3: + from .base.models import auto_iperf3_check_receiver post_save.connect( - auto_iperf_check_receiver, + auto_iperf3_check_receiver, sender=load_model('config', 'Device'), - dispatch_uid='auto_iperf_check', + dispatch_uid='auto_iperf3_check', ) diff --git a/openwisp_monitoring/check/base/models.py b/openwisp_monitoring/check/base/models.py index a31f482b7..0d040a7b1 100644 --- a/openwisp_monitoring/check/base/models.py +++ b/openwisp_monitoring/check/base/models.py @@ -11,7 +11,7 @@ from openwisp_monitoring.check import settings as app_settings from openwisp_monitoring.check.tasks import ( auto_create_config_check, - auto_create_iperf_check, + auto_create_iperf3_check, auto_create_ping, ) from openwisp_utils.base import TimeStampedEditableModel @@ -142,9 +142,9 @@ def auto_config_check_receiver(sender, instance, created, **kwargs): ) -def auto_iperf_check_receiver(sender, instance, created, **kwargs): +def auto_iperf3_check_receiver(sender, instance, created, **kwargs): """ - Implements OPENWISP_MONITORING_AUTO_IPERF + Implements OPENWISP_MONITORING_AUTO_IPERF3 The creation step is executed in the background """ # we need to skip this otherwise this task will be executed @@ -152,7 +152,7 @@ def auto_iperf_check_receiver(sender, instance, created, **kwargs): if not created: return transaction_on_commit( - lambda: auto_create_iperf_check.delay( + lambda: auto_create_iperf3_check.delay( model=sender.__name__.lower(), app_label=sender._meta.app_label, object_id=str(instance.pk), diff --git a/openwisp_monitoring/check/classes/__init__.py b/openwisp_monitoring/check/classes/__init__.py index 4a85b5243..a7d9fde29 100644 --- a/openwisp_monitoring/check/classes/__init__.py +++ b/openwisp_monitoring/check/classes/__init__.py @@ -1,3 +1,3 @@ from .config_applied import ConfigApplied # noqa -from .iperf import Iperf # noqa +from .iperf3 import Iperf3 # noqa from .ping import Ping # noqa diff --git a/openwisp_monitoring/check/classes/iperf.py b/openwisp_monitoring/check/classes/iperf3.py similarity index 83% rename from openwisp_monitoring/check/classes/iperf.py rename to openwisp_monitoring/check/classes/iperf3.py index 80cf1c66c..e3f56748a 100644 --- a/openwisp_monitoring/check/classes/iperf.py +++ b/openwisp_monitoring/check/classes/iperf3.py @@ -21,7 +21,7 @@ AlertSettings = load_model('monitoring', 'AlertSettings') DeviceConnection = load_model('connection', 'DeviceConnection') -DEFAULT_IPERF_CHECK_CONFIG = { +DEFAULT_IPERF3_CHECK_CONFIG = { 'host': { 'type': 'array', 'items': { @@ -29,7 +29,7 @@ }, 'default': [], }, - # username, password max_length chosen from iperf3 docs to avoid iperf param errors + # username, password max_length chosen from iperf3 docs to avoid iperf3 param errors 'username': {'type': 'string', 'default': '', 'minLength': 1, 'maxLength': 20}, 'password': {'type': 'string', 'default': '', 'minLength': 1, 'maxLength': 20}, 'rsa_public_key': { @@ -149,7 +149,7 @@ } -def get_iperf_schema(): +def get_iperf3_schema(): schema = { '$schema': 'http://json-schema.org/draft-07/schema#', 'type': 'object', @@ -160,13 +160,13 @@ def get_iperf_schema(): 'rsa_public_key': ['username', 'password'], }, } - schema['properties'] = DEFAULT_IPERF_CHECK_CONFIG + schema['properties'] = DEFAULT_IPERF3_CHECK_CONFIG return schema -class Iperf(BaseCheck): +class Iperf3(BaseCheck): - schema = get_iperf_schema() + schema = get_iperf3_schema() def validate_params(self, params=None): try: @@ -181,23 +181,23 @@ def validate_params(self, params=None): message = '{0}: {1}'.format(message, e.message) raise ValidationError({'params': message}) from e - def _validate_iperf_config(self, org): - # if iperf config is present and validate it's params - if app_settings.IPERF_CHECK_CONFIG: + def _validate_iperf3_config(self, org): + # if iperf3 config is present and validate it's params + if app_settings.IPERF3_CHECK_CONFIG: self.validate_params( - params=app_settings.IPERF_CHECK_CONFIG.get(str(org.id)) + params=app_settings.IPERF3_CHECK_CONFIG.get(str(org.id)) ) def check(self, store=True): lock_acquired = False org = self.related_object.organization - self._validate_iperf_config(org) - available_iperf_servers = self._get_param('host', 'host.default') - if not available_iperf_servers: + self._validate_iperf3_config(org) + available_iperf3_servers = self._get_param('host', 'host.default') + if not available_iperf3_servers: logger.warning( ( - f'Iperf servers for organization "{org}" ' - f'is not configured properly, iperf check skipped!' + f'Iperf3 servers for organization "{org}" ' + f'is not configured properly, iperf3 check skipped!' ) ) return @@ -205,46 +205,46 @@ def check(self, store=True): 'client_options.time', 'client_options.properties.time.default' ) # Try to acquire a lock, or put task back on queue - for server in available_iperf_servers: - server_lock_key = f'ow_monitoring_{org}_iperf_check_{server}' - # Set available_iperf_server to the org device + for server in available_iperf3_servers: + server_lock_key = f'ow_monitoring_{org}_iperf3_check_{server}' + # Set available_iperf3_server to the org device lock_acquired = cache.add( server_lock_key, str(self.related_object), - timeout=app_settings.IPERF_CHECK_LOCK_EXPIRE, + timeout=app_settings.IPERF3_CHECK_LOCK_EXPIRE, ) if lock_acquired: break else: logger.info( ( - f'At the moment, all available iperf servers of organization "{org}" ' + f'At the moment, all available iperf3 servers of organization "{org}" ' f'are busy running checks, putting "{self.check_instance}" back in the queue..' ) ) - # Return the iperf_check task to the queue, - # it will executed after 2 * iperf_check_time (TCP+UDP) + # Return the iperf3_check task to the queue, + # it will executed after 2 * iperf3_check_time (TCP+UDP) self.check_instance.perform_check_delayed(duration=2 * time) return try: - # Execute the iperf check with current available server - result = self._run_iperf_check(store, server, time) + # Execute the iperf3 check with current available server + result = self._run_iperf3_check(store, server, time) finally: # Release the lock after completion of the check cache.delete(server_lock_key) return result - def _run_iperf_check(self, store, server, time): + def _run_iperf3_check(self, store, server, time): device_connection = self._get_device_connection() if not device_connection: logger.warning( - f'Failed to get a working DeviceConnection for "{self.related_object}", iperf check skipped!' + f'Failed to get a working DeviceConnection for "{self.related_object}", iperf3 check skipped!' ) return # The DeviceConnection could fail if the management tunnel is down. if not device_connection.connect(): logger.warning( - f'DeviceConnection for "{self.related_object}" is not working, iperf check skipped!' + f'DeviceConnection for "{self.related_object}" is not working, iperf3 check skipped!' ) return command_tcp, command_udp = self._get_check_commands(server) @@ -260,24 +260,24 @@ def _run_iperf_check(self, store, server, time): ) return - result_tcp = self._get_iperf_result(result, exit_code, mode='TCP') + result_tcp = self._get_iperf3_result(result, exit_code, mode='TCP') # UDP mode result, exit_code = device_connection.connector_instance.exec_command( command_udp, raise_unexpected_exit=False ) - result_udp = self._get_iperf_result(result, exit_code, mode='UDP') + result_udp = self._get_iperf3_result(result, exit_code, mode='UDP') result = {} if store and result_tcp and result_udp: - # Store iperf_result field 1 if any mode passes, store 0 when both fails - iperf_result = result_tcp['iperf_result'] | result_udp['iperf_result'] - result.update({**result_tcp, **result_udp, 'iperf_result': iperf_result}) + # Store iperf3_result field 1 if any mode passes, store 0 when both fails + iperf3_result = result_tcp['iperf3_result'] | result_udp['iperf3_result'] + result.update({**result_tcp, **result_udp, 'iperf3_result': iperf3_result}) self.store_result(result) device_connection.disconnect() return result def _get_check_commands(self, server): """ - Returns tcp & udp commands for iperf check + Returns tcp & udp commands for iperf3 check """ username = self._get_param('username', 'username.default') port = self._get_param( @@ -310,8 +310,8 @@ def _get_check_commands(self, server): 'client_options.properties.udp.properties.length.default', ) - rev_or_bidir, test_end_condition = self._get_iperf_test_conditions() - logger.info(f'«« Iperf server : {server}, Device : {self.related_object} »»') + rev_or_bidir, test_end_condition = self._get_iperf3_test_conditions() + logger.info(f'«« Iperf3 server : {server}, Device : {self.related_object} »»') command_tcp = ( f'iperf3 -c {server} -p {port} {test_end_condition} --connect-timeout {ct} ' f'-b {tcp_bitrate} -l {tcp_length} -w {window} -P {parallel} {rev_or_bidir} -J' @@ -327,7 +327,7 @@ def _get_check_commands(self, server): password = self._get_param('password', 'password.default') key = self._get_param('rsa_public_key', 'rsa_public_key.default') rsa_public_key = self._get_compelete_rsa_key(key) - rsa_public_key_path = '/tmp/iperf-public-key.pem' + rsa_public_key_path = '/tmp/iperf3-public-key.pem' command_tcp = ( f'echo "{rsa_public_key}" > {rsa_public_key_path} && ' @@ -342,14 +342,14 @@ def _get_check_commands(self, server): f'-b {udp_bitrate} -l {udp_length} -w {window} -P {parallel} {rev_or_bidir} -u -J' ) - # If IPERF_CHECK_DELETE_RSA_KEY, remove rsa_public_key from the device - if app_settings.IPERF_CHECK_DELETE_RSA_KEY: + # If IPERF3_CHECK_DELETE_RSA_KEY, remove rsa_public_key from the device + if app_settings.IPERF3_CHECK_DELETE_RSA_KEY: command_udp = f'{command_udp} && rm -f {rsa_public_key_path}' return command_tcp, command_udp - def _get_iperf_test_conditions(self): + def _get_iperf3_test_conditions(self): """ - Returns iperf check test conditions (rev_or_bidir, end_condition) + Returns iperf3 check test conditions (rev_or_bidir, end_condition) """ time = self._get_param( 'client_options.time', 'client_options.properties.time.default' @@ -368,7 +368,7 @@ def _get_iperf_test_conditions(self): 'client_options.properties.bidirectional.default', ) # by default we use 'time' param - # for the iperf test end condition + # for the iperf3 test end condition test_end_condition = f'-t {time}' # if 'bytes' present in config # use it instead of 'time' @@ -425,41 +425,41 @@ def _get_param(self, conf_key, default_conf_key): Returns specified param or its default value according to the schema """ org_id = str(self.related_object.organization.id) - iperf_config = app_settings.IPERF_CHECK_CONFIG + iperf3_config = app_settings.IPERF3_CHECK_CONFIG if self.params: check_params = self._deep_get(self.params, conf_key) if check_params: return check_params - if iperf_config: - iperf_config = iperf_config.get(org_id) - iperf_config_param = self._deep_get(iperf_config, conf_key) - if iperf_config_param: - return iperf_config_param + if iperf3_config: + iperf3_config = iperf3_config.get(org_id) + iperf3_config_param = self._deep_get(iperf3_config, conf_key) + if iperf3_config_param: + return iperf3_config_param - return self._deep_get(DEFAULT_IPERF_CHECK_CONFIG, default_conf_key) + return self._deep_get(DEFAULT_IPERF3_CHECK_CONFIG, default_conf_key) - def _get_iperf_result(self, result, exit_code, mode): + def _get_iperf3_result(self, result, exit_code, mode): """ - Returns iperf test result + Returns iperf3 test result """ try: result = loads(result) except JSONDecodeError: # Errors other than iperf3 test errors logger.warning( - f'Iperf check failed for "{self.related_object}", error - {result.strip()}' + f'Iperf3 check failed for "{self.related_object}", error - {result.strip()}' ) return if mode == 'TCP': if exit_code != 0: logger.warning( - f'Iperf check failed for "{self.related_object}", {result["error"]}' + f'Iperf3 check failed for "{self.related_object}", {result["error"]}' ) return { - 'iperf_result': 0, + 'iperf3_result': 0, 'sent_bps_tcp': 0.0, 'received_bps_tcp': 0.0, 'sent_bytes_tcp': 0, @@ -470,7 +470,7 @@ def _get_iperf_result(self, result, exit_code, mode): sent = result['end']['sum_sent'] received = result['end']['sum_received'] return { - 'iperf_result': 1, + 'iperf3_result': 1, 'sent_bps_tcp': float(sent['bits_per_second']), 'received_bps_tcp': float(received['bits_per_second']), 'sent_bytes_tcp': sent['bytes'], @@ -481,10 +481,10 @@ def _get_iperf_result(self, result, exit_code, mode): elif mode == 'UDP': if exit_code != 0: logger.warning( - f'Iperf check failed for "{self.related_object}", {result["error"]}' + f'Iperf3 check failed for "{self.related_object}", {result["error"]}' ) return { - 'iperf_result': 0, + 'iperf3_result': 0, 'sent_bps_udp': 0.0, 'sent_bytes_udp': 0, 'jitter': 0.0, @@ -494,7 +494,7 @@ def _get_iperf_result(self, result, exit_code, mode): } else: return { - 'iperf_result': 1, + 'iperf3_result': 1, 'sent_bps_udp': float(result['end']['sum']['bits_per_second']), 'sent_bytes_udp': result['end']['sum']['bytes'], 'jitter': float(result['end']['sum']['jitter_ms']), @@ -509,8 +509,8 @@ def store_result(self, result): """ metric = self._get_metric() copied = result.copy() - iperf_result = copied.pop('iperf_result') - metric.write(iperf_result, extra_values=copied) + iperf3_result = copied.pop('iperf3_result') + metric.write(iperf3_result, extra_values=copied) def _get_metric(self): """ @@ -524,7 +524,7 @@ def _get_metric(self): def _create_alert_settings(self, metric): """ - Creates default iperf alert settings with is_active=False + Creates default iperf3 alert settings with is_active=False """ alert_settings = AlertSettings(metric=metric, is_active=False) alert_settings.full_clean() @@ -532,7 +532,7 @@ def _create_alert_settings(self, metric): def _create_charts(self, metric): """ - Creates iperf related charts + Creates iperf3 related charts """ charts = [ 'bandwidth', diff --git a/openwisp_monitoring/check/settings.py b/openwisp_monitoring/check/settings.py index 24d338a81..54b439bab 100644 --- a/openwisp_monitoring/check/settings.py +++ b/openwisp_monitoring/check/settings.py @@ -5,17 +5,17 @@ ( ('openwisp_monitoring.check.classes.Ping', 'Ping'), ('openwisp_monitoring.check.classes.ConfigApplied', 'Configuration Applied'), - ('openwisp_monitoring.check.classes.Iperf', 'Iperf'), + ('openwisp_monitoring.check.classes.Iperf3', 'Iperf3'), ), ) AUTO_PING = get_settings_value('AUTO_PING', True) AUTO_CONFIG_CHECK = get_settings_value('AUTO_DEVICE_CONFIG_CHECK', True) MANAGEMENT_IP_ONLY = get_settings_value('MANAGEMENT_IP_ONLY', True) PING_CHECK_CONFIG = get_settings_value('PING_CHECK_CONFIG', {}) -AUTO_IPERF = get_settings_value('AUTO_IPERF', False) -IPERF_CHECK_CONFIG = get_settings_value('IPERF_CHECK_CONFIG', {}) -IPERF_CHECK_LOCK_EXPIRE = get_settings_value( - 'IPERF_CHECK_LOCK_EXPIRE', 10 * 60 +AUTO_IPERF3 = get_settings_value('AUTO_IPERF3', False) +IPERF3_CHECK_CONFIG = get_settings_value('IPERF3_CHECK_CONFIG', {}) +IPERF3_CHECK_LOCK_EXPIRE = get_settings_value( + 'IPERF3_CHECK_LOCK_EXPIRE', 10 * 60 ) # 10 minutes arbitrarily chosen (must be longer than TCP + UDP test time) -IPERF_CHECK_DELETE_RSA_KEY = get_settings_value('IPERF_CHECK_DELETE_RSA_KEY', True) +IPERF3_CHECK_DELETE_RSA_KEY = get_settings_value('IPERF3_CHECK_DELETE_RSA_KEY', True) CHECKS_LIST = get_settings_value('CHECK_LIST', list(dict(CHECK_CLASSES).keys())) diff --git a/openwisp_monitoring/check/tasks.py b/openwisp_monitoring/check/tasks.py index 76ce82bb1..6a643db48 100644 --- a/openwisp_monitoring/check/tasks.py +++ b/openwisp_monitoring/check/tasks.py @@ -118,16 +118,16 @@ def auto_create_config_check( @shared_task -def auto_create_iperf_check( +def auto_create_iperf3_check( model, app_label, object_id, check_model=None, content_type_model=None ): """ - Called by openwisp_monitoring.check.models.auto_iperf_check_receiver + Called by openwisp_monitoring.check.models.auto_iperf3_check_receiver """ Check = check_model or get_check_model() - iperf_check_path = 'openwisp_monitoring.check.classes.Iperf' + iperf3_check_path = 'openwisp_monitoring.check.classes.Iperf3' has_check = Check.objects.filter( - object_id=object_id, content_type__model='device', check_type=iperf_check_path + object_id=object_id, content_type__model='device', check_type=iperf3_check_path ).exists() # create new check only if necessary if has_check: @@ -135,8 +135,8 @@ def auto_create_iperf_check( content_type_model = content_type_model or ContentType ct = content_type_model.objects.get(app_label=app_label, model=model) check = Check( - name='Iperf', - check_type=iperf_check_path, + name='Iperf3', + check_type=iperf3_check_path, content_type=ct, object_id=object_id, ) diff --git a/openwisp_monitoring/check/tests/iperf_test_utils.py b/openwisp_monitoring/check/tests/iperf3_test_utils.py similarity index 100% rename from openwisp_monitoring/check/tests/iperf_test_utils.py rename to openwisp_monitoring/check/tests/iperf3_test_utils.py diff --git a/openwisp_monitoring/check/tests/test_iperf.py b/openwisp_monitoring/check/tests/test_iperf3.py similarity index 74% rename from openwisp_monitoring/check/tests/test_iperf.py rename to openwisp_monitoring/check/tests/test_iperf3.py index 7060aae8b..f8f639354 100644 --- a/openwisp_monitoring/check/tests/test_iperf.py +++ b/openwisp_monitoring/check/tests/test_iperf3.py @@ -10,13 +10,13 @@ from openwisp_controller.connection.models import DeviceConnection as device_connection from openwisp_controller.connection.settings import UPDATE_STRATEGIES from openwisp_controller.connection.tests.utils import CreateConnectionsMixin, SshServer -from openwisp_monitoring.check.classes.iperf import get_iperf_schema -from openwisp_monitoring.check.classes.iperf import logger as iperf_logger +from openwisp_monitoring.check.classes.iperf3 import get_iperf3_schema +from openwisp_monitoring.check.classes.iperf3 import logger as iperf3_logger from ...device.tests import TestDeviceMonitoringMixin from .. import settings as app_settings -from ..classes import Iperf -from .iperf_test_utils import ( +from ..classes import Iperf3 +from .iperf3_test_utils import ( INVALID_PARAMS, PARAM_ERROR, RESULT_AUTH_FAIL, @@ -32,11 +32,13 @@ Check = load_model('check', 'Check') -class TestIperf(CreateConnectionsMixin, TestDeviceMonitoringMixin, TransactionTestCase): +class TestIperf3( + CreateConnectionsMixin, TestDeviceMonitoringMixin, TransactionTestCase +): - _IPERF = app_settings.CHECK_CLASSES[2][0] + _IPERF3 = app_settings.CHECK_CLASSES[2][0] _RESULT_KEYS = [ - 'iperf_result', + 'iperf3_result', 'sent_bps_tcp', 'received_bps_tcp', 'sent_bytes_tcp', @@ -49,10 +51,10 @@ class TestIperf(CreateConnectionsMixin, TestDeviceMonitoringMixin, TransactionTe 'lost_packets', 'lost_percent', ] - _IPERF_TEST_SERVER = ['iperf.openwisptestserver.com'] - _IPERF_TEST_MULTIPLE_SERVERS = [ - 'iperf.openwisptestserver1.com', - 'iperf.openwisptestserver2.com', + _IPERF3_TEST_SERVER = ['iperf3.openwisptestserver.com'] + _IPERF3_TEST_MULTIPLE_SERVERS = [ + 'iperf3.openwisptestserver1.com', + 'iperf3.openwisptestserver2.com', ] @classmethod @@ -67,7 +69,7 @@ def setUpClass(cls): def tearDownClass(cls): super().tearDownClass() cls.mock_ssh_server.__exit__() - app_settings.IPERF_CHECK_CONFIG = {} + app_settings.IPERF3_CHECK_CONFIG = {} def setUp(self): ckey = self._create_credentials_with_key(port=self.ssh_server.port) @@ -75,20 +77,20 @@ def setUp(self): self.device = self.dc.device self.org_id = str(self.device.organization.id) self.dc.connect() - app_settings.IPERF_CHECK_CONFIG = { - self.org_id: {'host': self._IPERF_TEST_SERVER} + app_settings.IPERF3_CHECK_CONFIG = { + self.org_id: {'host': self._IPERF3_TEST_SERVER} } self._EXPECTED_COMMAND_CALLS = [ call( ( - 'iperf3 -c iperf.openwisptestserver.com -p 5201 -t 10 --connect-timeout 1 ' + 'iperf3 -c iperf3.openwisptestserver.com -p 5201 -t 10 --connect-timeout 1 ' '-b 0 -l 128K -w 0 -P 1 -J' ), raise_unexpected_exit=False, ), call( ( - 'iperf3 -c iperf.openwisptestserver.com -p 5201 -t 10 --connect-timeout 1 ' + 'iperf3 -c iperf3.openwisptestserver.com -p 5201 -t 10 --connect-timeout 1 ' '-b 30M -l 0 -w 0 -P 1 -u -J' ), raise_unexpected_exit=False, @@ -97,30 +99,30 @@ def setUp(self): self._EXPECTED_WARN_CALLS = [ call( ( - f'Iperf check failed for "{self.device}", ' + f'Iperf3 check failed for "{self.device}", ' 'error - unable to connect to server: Connection refused' ) ), call( ( - f'Iperf check failed for "{self.device}", ' + f'Iperf3 check failed for "{self.device}", ' 'error - unable to connect to server: Connection refused' ) ), ] - def _perform_iperf_check(self): - check = Check.objects.get(check_type=self._IPERF) + def _perform_iperf3_check(self): + check = Check.objects.get(check_type=self._IPERF3) return check.perform_check(store=False) def _set_auth_expected_calls(self, config): password = config[self.org_id]['password'] username = config[self.org_id]['username'] - server = 'iperf.openwisptestserver.com' + server = 'iperf3.openwisptestserver.com' test_prefix = '-----BEGIN PUBLIC KEY-----\n' test_suffix = '\n-----END PUBLIC KEY-----' key = config[self.org_id]['rsa_public_key'] - rsa_key_path = '/tmp/iperf-public-key.pem' + rsa_key_path = '/tmp/iperf3-public-key.pem' self._EXPECTED_COMMAND_CALLS = [ call( @@ -143,10 +145,10 @@ def _set_auth_expected_calls(self, config): ), ] - def _assert_iperf_fail_result(self, result): + def _assert_iperf3_fail_result(self, result): for key in self._RESULT_KEYS: self.assertIn(key, result) - self.assertEqual(result['iperf_result'], 0) + self.assertEqual(result['iperf3_result'], 0) self.assertEqual(result['sent_bps_tcp'], 0.0) self.assertEqual(result['received_bps_tcp'], 0.0) self.assertEqual(result['sent_bytes_tcp'], 0) @@ -159,16 +161,16 @@ def _assert_iperf_fail_result(self, result): self.assertEqual(result['lost_percent'], 0.0) @patch.object(Ssh, 'exec_command') - @patch.object(iperf_logger, 'warning') - def test_iperf_check_no_params(self, mock_warn, mock_exec_command): + @patch.object(iperf3_logger, 'warning') + def test_iperf3_check_no_params(self, mock_warn, mock_exec_command): mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] # By default check params {} tcp_result = loads(RESULT_TCP)['end'] udp_result = loads(RESULT_UDP)['end']['sum'] - result = self._perform_iperf_check() + result = self._perform_iperf3_check() for key in self._RESULT_KEYS: self.assertIn(key, result) - self.assertEqual(result['iperf_result'], 1) + self.assertEqual(result['iperf3_result'], 1) self.assertEqual( result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] ) @@ -182,13 +184,13 @@ def test_iperf_check_no_params(self, mock_warn, mock_exec_command): mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) @patch.object(Ssh, 'exec_command') - @patch.object(iperf_logger, 'warning') - def test_iperf_check_params(self, mock_warn, mock_exec_command): + @patch.object(iperf3_logger, 'warning') + def test_iperf3_check_params(self, mock_warn, mock_exec_command): mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] - check = Check.objects.get(check_type=self._IPERF) + check = Check.objects.get(check_type=self._IPERF3) tcp_result = loads(RESULT_TCP)['end'] udp_result = loads(RESULT_UDP)['end']['sum'] - server = self._IPERF_TEST_SERVER[0] + server = self._IPERF3_TEST_SERVER[0] test_prefix = '-----BEGIN PUBLIC KEY-----\n' test_suffix = '\n-----END PUBLIC KEY-----' rsa_key_path = '/tmp/test-rsa.pem' @@ -218,7 +220,7 @@ def test_iperf_check_params(self, mock_warn, mock_exec_command): username = test_params['username'] password = test_params['password'] key = test_params['rsa_public_key'] - rsa_key_path = '/tmp/iperf-public-key.pem' + rsa_key_path = '/tmp/iperf3-public-key.pem' check.params = test_params check.save() self._EXPECTED_COMMAND_CALLS = [ @@ -241,10 +243,10 @@ def test_iperf_check_params(self, mock_warn, mock_exec_command): raise_unexpected_exit=False, ), ] - result = self._perform_iperf_check() + result = self._perform_iperf3_check() for key in self._RESULT_KEYS: self.assertIn(key, result) - self.assertEqual(result['iperf_result'], 1) + self.assertEqual(result['iperf3_result'], 1) self.assertEqual( result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] ) @@ -258,30 +260,30 @@ def test_iperf_check_params(self, mock_warn, mock_exec_command): mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) @patch.object(Ssh, 'exec_command') - @patch.object(iperf_logger, 'warning') - def test_iperf_check_config(self, mock_warn, mock_exec_command): + @patch.object(iperf3_logger, 'warning') + def test_iperf3_check_config(self, mock_warn, mock_exec_command): mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] tcp_result = loads(RESULT_TCP)['end'] udp_result = loads(RESULT_UDP)['end']['sum'] self._EXPECTED_COMMAND_CALLS = [ call( ( - 'iperf3 -c iperf.openwisptestserver.com -p 9201 -k 1M --connect-timeout 2000 ' + 'iperf3 -c iperf3.openwisptestserver.com -p 9201 -k 1M --connect-timeout 2000 ' '-b 10M -l 512K -w 0 -P 1 --bidir -J' ), raise_unexpected_exit=False, ), call( ( - 'iperf3 -c iperf.openwisptestserver.com -p 9201 -k 1M --connect-timeout 2000 ' + 'iperf3 -c iperf3.openwisptestserver.com -p 9201 -k 1M --connect-timeout 2000 ' '-b 50M -l 256K -w 0 -P 1 --bidir -u -J' ), raise_unexpected_exit=False, ), ] - iperf_config = { + iperf3_config = { self.org_id: { - 'host': ['iperf.openwisptestserver.com'], + 'host': ['iperf3.openwisptestserver.com'], 'client_options': { 'port': 9201, 'time': 120, @@ -294,12 +296,12 @@ def test_iperf_check_config(self, mock_warn, mock_exec_command): }, } } - with patch.object(app_settings, 'IPERF_CHECK_CONFIG', iperf_config): - with patch.object(Iperf, 'schema', get_iperf_schema()): - result = self._perform_iperf_check() + with patch.object(app_settings, 'IPERF3_CHECK_CONFIG', iperf3_config): + with patch.object(Iperf3, 'schema', get_iperf3_schema()): + result = self._perform_iperf3_check() for key in self._RESULT_KEYS: self.assertIn(key, result) - self.assertEqual(result['iperf_result'], 1) + self.assertEqual(result['iperf3_result'], 1) self.assertEqual( result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] ) @@ -312,25 +314,25 @@ def test_iperf_check_config(self, mock_warn, mock_exec_command): self.assertEqual(mock_exec_command.call_count, 2) mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) - @patch.object(iperf_logger, 'warning') - def test_iperf_device_connection(self, mock_warn): + @patch.object(iperf3_logger, 'warning') + def test_iperf3_device_connection(self, mock_warn): dc = self.dc with self.subTest('Test active device connection when management tunnel down'): with patch.object( device_connection, 'connect', return_value=False ) as mocked_connect: - self._perform_iperf_check() + self._perform_iperf3_check() mock_warn.assert_called_with( - f'DeviceConnection for "{self.device}" is not working, iperf check skipped!' + f'DeviceConnection for "{self.device}" is not working, iperf3 check skipped!' ) self.assertEqual(mocked_connect.call_count, 1) with self.subTest('Test device connection is not enabled'): dc.enabled = False dc.save() - self._perform_iperf_check() + self._perform_iperf3_check() mock_warn.assert_called_with( - f'Failed to get a working DeviceConnection for "{self.device}", iperf check skipped!' + f'Failed to get a working DeviceConnection for "{self.device}", iperf3 check skipped!' ) with self.subTest('Test device connection is not with right update strategy'): @@ -338,13 +340,13 @@ def test_iperf_device_connection(self, mock_warn): dc.is_working = True dc.enabled = True dc.save() - self._perform_iperf_check() + self._perform_iperf3_check() mock_warn.assert_called_with( - f'Failed to get a working DeviceConnection for "{self.device}", iperf check skipped!' + f'Failed to get a working DeviceConnection for "{self.device}", iperf3 check skipped!' ) def test_iperf_check_content_object_none(self): - check = Check(name='Iperf check', check_type=self._IPERF, params={}) + check = Check(name='Iperf3 check', check_type=self._IPERF3, params={}) try: check.check_instance.validate() except ValidationError as e: @@ -352,10 +354,10 @@ def test_iperf_check_content_object_none(self): else: self.fail('ValidationError not raised') - def test_iperf_check_content_object_not_device(self): + def test_iperf3_check_content_object_not_device(self): check = Check( - name='Iperf check', - check_type=self._IPERF, + name='Iperf3 check', + check_type=self._IPERF3, content_object=self._create_user(), params={}, ) @@ -366,11 +368,11 @@ def test_iperf_check_content_object_not_device(self): else: self.fail('ValidationError not raised') - def test_iperf_check_schema_violation(self): + def test_iperf3_check_schema_violation(self): for invalid_param in INVALID_PARAMS: check = Check( - name='Iperf check', - check_type=self._IPERF, + name='Iperf3 check', + check_type=self._IPERF3, content_object=self.device, params=invalid_param, ) @@ -382,31 +384,33 @@ def test_iperf_check_schema_violation(self): self.fail('ValidationError not raised') @patch.object(Ssh, 'exec_command') - @patch.object(iperf_logger, 'warning') - def test_iperf_check(self, mock_warn, mock_exec_command): + @patch.object(iperf3_logger, 'warning') + def test_iperf3_check(self, mock_warn, mock_exec_command): error = "ash: iperf3: not found" tcp_result = loads(RESULT_TCP)['end'] udp_result = loads(RESULT_UDP)['end']['sum'] - iperf_json_error_config = { + iperf3_json_error_config = { self.org_id: { - 'host': ['iperf.openwisptestserver.com'], + 'host': ['iperf3.openwisptestserver.com'], 'username': 'test', 'password': 'testpass', 'rsa_public_key': 'INVALID_RSA_KEY', } } - with patch.object(app_settings, 'IPERF_CHECK_CONFIG', iperf_json_error_config): + with patch.object( + app_settings, 'IPERF3_CHECK_CONFIG', iperf3_json_error_config + ): with self.subTest('Test iperf3 errors not in json format'): mock_exec_command.side_effect = [(PARAM_ERROR, 1), (PARAM_ERROR, 1)] EXPECTED_WARN_CALLS = [ call( - f'Iperf check failed for "{self.device}", error - {PARAM_ERROR}' + f'Iperf3 check failed for "{self.device}", error - {PARAM_ERROR}' ), call( - f'Iperf check failed for "{self.device}", error - {PARAM_ERROR}' + f'Iperf3 check failed for "{self.device}", error - {PARAM_ERROR}' ), ] - self._perform_iperf_check() + self._perform_iperf3_check() self.assertEqual(mock_warn.call_count, 2) self.assertEqual(mock_exec_command.call_count, 2) mock_warn.assert_has_calls(EXPECTED_WARN_CALLS) @@ -415,7 +419,7 @@ def test_iperf_check(self, mock_warn, mock_exec_command): with self.subTest('Test iperf3 is not installed on the device'): mock_exec_command.side_effect = [(error, 127)] - self._perform_iperf_check() + self._perform_iperf3_check() mock_warn.assert_called_with( f'Iperf3 is not installed on the "{self.device}", error - {error}' ) @@ -424,14 +428,14 @@ def test_iperf_check(self, mock_warn, mock_exec_command): mock_warn.reset_mock() mock_exec_command.reset_mock() - with self.subTest('Test iperf check passes in both TCP & UDP'): + with self.subTest('Test iperf3 check passes in both TCP & UDP'): mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] self.assertEqual(Chart.objects.count(), 2) self.assertEqual(Metric.objects.count(), 2) - result = self._perform_iperf_check() + result = self._perform_iperf3_check() for key in self._RESULT_KEYS: self.assertIn(key, result) - self.assertEqual(result['iperf_result'], 1) + self.assertEqual(result['iperf3_result'], 1) self.assertEqual( result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] ) @@ -453,12 +457,12 @@ def test_iperf_check(self, mock_warn, mock_exec_command): self.assertEqual(result['lost_percent'], udp_result['lost_percent']) self.assertEqual(Chart.objects.count(), 8) self.assertEqual(Check.objects.count(), 3) - iperf_metric = Metric.objects.get(key='iperf') + iperf3_metric = Metric.objects.get(key='iperf3') self.assertEqual(Metric.objects.count(), 3) - self.assertEqual(iperf_metric.content_object, self.device) - points = iperf_metric.read(limit=None, extra_fields=list(result.keys())) + self.assertEqual(iperf3_metric.content_object, self.device) + points = iperf3_metric.read(limit=None, extra_fields=list(result.keys())) self.assertEqual(len(points), 1) - self.assertEqual(points[0]['iperf_result'], result['iperf_result']) + self.assertEqual(points[0]['iperf3_result'], result['iperf3_result']) self.assertEqual(points[0]['sent_bps_tcp'], result['sent_bps_tcp']) self.assertEqual( points[0]['received_bytes_tcp'], result['received_bytes_tcp'] @@ -476,10 +480,10 @@ def test_iperf_check(self, mock_warn, mock_exec_command): mock_warn.reset_mock() mock_exec_command.reset_mock() - with self.subTest('Test iperf check fails in both TCP & UDP'): + with self.subTest('Test iperf3 check fails in both TCP & UDP'): mock_exec_command.side_effect = [(RESULT_FAIL, 1), (RESULT_FAIL, 1)] - result = self._perform_iperf_check() - self._assert_iperf_fail_result(result) + result = self._perform_iperf3_check() + self._assert_iperf3_fail_result(result) self.assertEqual(Chart.objects.count(), 8) self.assertEqual(Metric.objects.count(), 3) self.assertEqual(mock_warn.call_count, 2) @@ -489,12 +493,12 @@ def test_iperf_check(self, mock_warn, mock_exec_command): mock_warn.reset_mock() mock_exec_command.reset_mock() - with self.subTest('Test iperf check TCP pass UDP fail'): + with self.subTest('Test iperf3 check TCP pass UDP fail'): mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_FAIL, 1)] - result = self._perform_iperf_check() + result = self._perform_iperf3_check() for key in self._RESULT_KEYS: self.assertIn(key, result) - self.assertEqual(result['iperf_result'], 1) + self.assertEqual(result['iperf3_result'], 1) self.assertEqual( result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] ) @@ -523,12 +527,12 @@ def test_iperf_check(self, mock_warn, mock_exec_command): mock_warn.reset_mock() mock_exec_command.reset_mock() - with self.subTest('Test iperf check TCP fail UDP pass'): + with self.subTest('Test iperf3 check TCP fail UDP pass'): mock_exec_command.side_effect = [(RESULT_FAIL, 1), (RESULT_UDP, 0)] - result = self._perform_iperf_check() + result = self._perform_iperf3_check() for key in self._RESULT_KEYS: self.assertIn(key, result) - self.assertEqual(result['iperf_result'], 1) + self.assertEqual(result['iperf3_result'], 1) self.assertEqual(result['sent_bps_tcp'], 0.0) self.assertEqual(result['received_bps_tcp'], 0.0) self.assertEqual(result['sent_bytes_tcp'], 0) @@ -547,27 +551,27 @@ def test_iperf_check(self, mock_warn, mock_exec_command): mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) @patch.object(Ssh, 'exec_command') - @patch.object(iperf_logger, 'warning') - def test_iperf_check_auth_config(self, mock_warn, mock_exec_command): - iperf_config = { + @patch.object(iperf3_logger, 'warning') + def test_iperf3_check_auth_config(self, mock_warn, mock_exec_command): + iperf3_config = { self.org_id: { - 'host': self._IPERF_TEST_SERVER, + 'host': self._IPERF3_TEST_SERVER, 'username': 'test', 'password': 'testpass', 'rsa_public_key': TEST_RSA_KEY, } } - iperf_conf_wrong_pass = { + iperf3_conf_wrong_pass = { self.org_id: { - 'host': self._IPERF_TEST_SERVER, + 'host': self._IPERF3_TEST_SERVER, 'username': 'test', 'password': 'wrongpass', 'rsa_public_key': TEST_RSA_KEY, } } - iperf_conf_wrong_user = { + iperf3_conf_wrong_user = { self.org_id: { - 'host': self._IPERF_TEST_SERVER, + 'host': self._IPERF3_TEST_SERVER, 'username': 'wronguser', 'password': 'testpass', 'rsa_public_key': TEST_RSA_KEY, @@ -578,23 +582,23 @@ def test_iperf_check_auth_config(self, mock_warn, mock_exec_command): udp_result = loads(RESULT_UDP)['end']['sum'] self._EXPECTED_WARN_CALLS = [ - call(f'Iperf check failed for "{self.device}", error - {auth_error}'), - call(f'Iperf check failed for "{self.device}", error - {auth_error}'), + call(f'Iperf3 check failed for "{self.device}", error - {auth_error}'), + call(f'Iperf3 check failed for "{self.device}", error - {auth_error}'), ] - with self.subTest('Test iperf check with right config'): + with self.subTest('Test iperf3 check with right config'): with patch.object( app_settings, - 'IPERF_CHECK_CONFIG', - iperf_config - # It is required to mock "Iperf.schema" here so that it - # uses the updated configuration from "IPERF_CHECK_CONFIG" setting. - ), patch.object(Iperf, 'schema', get_iperf_schema()): - self._set_auth_expected_calls(iperf_config) + 'IPERF3_CHECK_CONFIG', + iperf3_config + # It is required to mock "Iperf3.schema" here so that it + # uses the updated configuration from "IPERF3_CHECK_CONFIG" setting. + ), patch.object(Iperf3, 'schema', get_iperf3_schema()): + self._set_auth_expected_calls(iperf3_config) mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] - result = self._perform_iperf_check() + result = self._perform_iperf3_check() for key in self._RESULT_KEYS: self.assertIn(key, result) - self.assertEqual(result['iperf_result'], 1) + self.assertEqual(result['iperf3_result'], 1) self.assertEqual( result['sent_bps_tcp'], tcp_result['sum_sent']['bits_per_second'] ) @@ -609,18 +613,18 @@ def test_iperf_check_auth_config(self, mock_warn, mock_exec_command): mock_warn.reset_mock() mock_exec_command.reset_mock() - with self.subTest('Test iperf check with wrong password'): + with self.subTest('Test iperf3 check with wrong password'): with patch.object( - app_settings, 'IPERF_CHECK_CONFIG', iperf_conf_wrong_pass - ), patch.object(Iperf, 'schema', get_iperf_schema()): - self._set_auth_expected_calls(iperf_conf_wrong_pass) + app_settings, 'IPERF3_CHECK_CONFIG', iperf3_conf_wrong_pass + ), patch.object(Iperf3, 'schema', get_iperf3_schema()): + self._set_auth_expected_calls(iperf3_conf_wrong_pass) mock_exec_command.side_effect = [ (RESULT_AUTH_FAIL, 1), (RESULT_AUTH_FAIL, 1), ] - result = self._perform_iperf_check() - self._assert_iperf_fail_result(result) + result = self._perform_iperf3_check() + self._assert_iperf3_fail_result(result) self.assertEqual(mock_warn.call_count, 2) self.assertEqual(mock_exec_command.call_count, 2) mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS) @@ -628,49 +632,49 @@ def test_iperf_check_auth_config(self, mock_warn, mock_exec_command): mock_warn.reset_mock() mock_exec_command.reset_mock() - with self.subTest('Test iperf check with wrong username'): + with self.subTest('Test iperf3 check with wrong username'): with patch.object( - app_settings, 'IPERF_CHECK_CONFIG', iperf_conf_wrong_user - ), patch.object(Iperf, 'schema', get_iperf_schema()): - self._set_auth_expected_calls(iperf_conf_wrong_user) + app_settings, 'IPERF3_CHECK_CONFIG', iperf3_conf_wrong_user + ), patch.object(Iperf3, 'schema', get_iperf3_schema()): + self._set_auth_expected_calls(iperf3_conf_wrong_user) mock_exec_command.side_effect = [ (RESULT_AUTH_FAIL, 1), (RESULT_AUTH_FAIL, 1), ] - result = self._perform_iperf_check() - self._assert_iperf_fail_result(result) + result = self._perform_iperf3_check() + self._assert_iperf3_fail_result(result) self.assertEqual(mock_warn.call_count, 2) self.assertEqual(mock_exec_command.call_count, 2) mock_warn.assert_has_calls(self._EXPECTED_WARN_CALLS) mock_exec_command.assert_has_calls(self._EXPECTED_COMMAND_CALLS) @patch.object(Ssh, 'exec_command') - @patch.object(iperf_logger, 'warning') - @patch.object(iperf_logger, 'info') + @patch.object(iperf3_logger, 'warning') + @patch.object(iperf3_logger, 'info') @patch.object(cache, 'add') - def test_iperf_check_task_with_multiple_server_config(self, *args): + def test_iperf3_check_task_with_multiple_server_config(self, *args): mock_add = args[0] mock_info = args[1] mock_warn = args[2] mock_exec_command = args[3] org = self.device.organization - iperf_multiple_server_config = { - self.org_id: {'host': self._IPERF_TEST_MULTIPLE_SERVERS} + iperf3_multiple_server_config = { + self.org_id: {'host': self._IPERF3_TEST_MULTIPLE_SERVERS} } - check = Check.objects.get(check_type=self._IPERF) + check = Check.objects.get(check_type=self._IPERF3) self._EXPECTED_COMMAND_CALLS_SERVER_1 = [ call( ( - f'iperf3 -c {self._IPERF_TEST_MULTIPLE_SERVERS[0]} -p 5201 -t 10 --connect-timeout 1 ' + f'iperf3 -c {self._IPERF3_TEST_MULTIPLE_SERVERS[0]} -p 5201 -t 10 --connect-timeout 1 ' '-b 0 -l 128K -w 0 -P 1 -J' ), raise_unexpected_exit=False, ), call( ( - f'iperf3 -c {self._IPERF_TEST_MULTIPLE_SERVERS[0]} -p 5201 -t 10 --connect-timeout 1 ' + f'iperf3 -c {self._IPERF3_TEST_MULTIPLE_SERVERS[0]} -p 5201 -t 10 --connect-timeout 1 ' '-b 30M -l 0 -w 0 -P 1 -u -J' ), raise_unexpected_exit=False, @@ -679,27 +683,27 @@ def test_iperf_check_task_with_multiple_server_config(self, *args): self._EXPECTED_COMMAND_CALLS_SERVER_2 = [ call( ( - f'iperf3 -c {self._IPERF_TEST_MULTIPLE_SERVERS[1]} -p 5201 -t 10 --connect-timeout 1 ' + f'iperf3 -c {self._IPERF3_TEST_MULTIPLE_SERVERS[1]} -p 5201 -t 10 --connect-timeout 1 ' '-b 0 -l 128K -w 0 -P 1 -J' ), raise_unexpected_exit=False, ), call( ( - f'iperf3 -c {self._IPERF_TEST_MULTIPLE_SERVERS[1]} -p 5201 -t 10 --connect-timeout 1 ' + f'iperf3 -c {self._IPERF3_TEST_MULTIPLE_SERVERS[1]} -p 5201 -t 10 --connect-timeout 1 ' '-b 30M -l 0 -w 0 -P 1 -u -J' ), raise_unexpected_exit=False, ), ] - with patch.object(app_settings, 'IPERF_CHECK_CONFIG', {}): - with self.subTest('Test iperf check without config'): - self._perform_iperf_check() + with patch.object(app_settings, 'IPERF3_CHECK_CONFIG', {}): + with self.subTest('Test iperf3 check without config'): + self._perform_iperf3_check() mock_warn.assert_called_with( ( - f'Iperf servers for organization "{org}" ' - f'is not configured properly, iperf check skipped!' + f'Iperf3 servers for organization "{org}" ' + f'is not configured properly, iperf3 check skipped!' ) ) self.assertEqual(mock_warn.call_count, 1) @@ -707,27 +711,29 @@ def test_iperf_check_task_with_multiple_server_config(self, *args): with patch.object( app_settings, - 'IPERF_CHECK_CONFIG', - {'invalid_org_uuid': {'host': self._IPERF_TEST_SERVER, 'time': 10}}, + 'IPERF3_CHECK_CONFIG', + {'invalid_org_uuid': {'host': self._IPERF3_TEST_SERVER, 'time': 10}}, ): - with self.subTest('Test iperf check with invalid config'): - self._perform_iperf_check() + with self.subTest('Test iperf3 check with invalid config'): + self._perform_iperf3_check() mock_warn.assert_called_with( ( - f'Iperf servers for organization "{org}" ' - f'is not configured properly, iperf check skipped!' + f'Iperf3 servers for organization "{org}" ' + f'is not configured properly, iperf3 check skipped!' ) ) self.assertEqual(mock_warn.call_count, 1) mock_warn.reset_mock() with patch.object( - app_settings, 'IPERF_CHECK_CONFIG', iperf_multiple_server_config + app_settings, 'IPERF3_CHECK_CONFIG', iperf3_multiple_server_config ): - with self.subTest('Test iperf check when all iperf servers are available'): + with self.subTest( + 'Test iperf3 check when all iperf3 servers are available' + ): mock_add.return_value = True mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] - self._perform_iperf_check() + self._perform_iperf3_check() self.assertEqual(mock_warn.call_count, 0) self.assertEqual(mock_add.call_count, 1) self.assertEqual(mock_exec_command.call_count, 2) @@ -739,11 +745,11 @@ def test_iperf_check_task_with_multiple_server_config(self, *args): mock_exec_command.reset_mock() with self.subTest( - 'Test iperf check when single iperf server are available' + 'Test iperf3 check when single iperf3 server are available' ): mock_add.side_effect = [False, True] mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] - self._perform_iperf_check() + self._perform_iperf3_check() self.assertEqual(mock_warn.call_count, 0) self.assertEqual(mock_add.call_count, 2) self.assertEqual(mock_exec_command.call_count, 2) @@ -755,17 +761,17 @@ def test_iperf_check_task_with_multiple_server_config(self, *args): mock_exec_command.reset_mock() with self.subTest( - 'Test iperf check when all iperf servers are occupied initially' + 'Test iperf3 check when all iperf3 servers are occupied initially' ): - # If all available iperf servers are occupied initially, - # then push the task back in the queue and acquire the iperf + # If all available iperf3 servers are occupied initially, + # then push the task back in the queue and acquire the iperf3 # server only after completion of previous running checks mock_add.side_effect = [False, False, True] mock_exec_command.side_effect = [(RESULT_TCP, 0), (RESULT_UDP, 0)] - self._perform_iperf_check() + self._perform_iperf3_check() mock_info.has_called_with( ( - f'At the moment, all available iperf servers of organization "{org}" ' + f'At the moment, all available iperf3 servers of organization "{org}" ' f'are busy running checks, putting "{check}" back in the queue..' ) ) diff --git a/openwisp_monitoring/check/tests/test_models.py b/openwisp_monitoring/check/tests/test_models.py index 3bb8e13f6..93ae7a98a 100644 --- a/openwisp_monitoring/check/tests/test_models.py +++ b/openwisp_monitoring/check/tests/test_models.py @@ -9,8 +9,8 @@ from ...device.tests import TestDeviceMonitoringMixin from .. import settings as app_settings -from ..classes import ConfigApplied, Iperf, Ping -from ..tasks import auto_create_config_check, auto_create_iperf_check, auto_create_ping +from ..classes import ConfigApplied, Iperf3, Ping +from ..tasks import auto_create_config_check, auto_create_iperf3_check, auto_create_ping Check = load_model('check', 'Check') Metric = load_model('monitoring', 'Metric') @@ -22,7 +22,7 @@ class TestModels(TestDeviceMonitoringMixin, TransactionTestCase): _PING = app_settings.CHECK_CLASSES[0][0] _CONFIG_APPLIED = app_settings.CHECK_CLASSES[1][0] - _IPERF = app_settings.CHECK_CLASSES[2][0] + _IPERF3 = app_settings.CHECK_CLASSES[2][0] def test_check_str(self): c = Check(name='Test check') @@ -49,12 +49,12 @@ def test_check_class(self): check_type=self._CONFIG_APPLIED, ) self.assertEqual(c.check_class, ConfigApplied) - with self.subTest('Test Iperf check Class'): + with self.subTest('Test Iperf3 check Class'): c = Check( - name='Iperf class check', - check_type=self._IPERF, + name='Iperf3 class check', + check_type=self._IPERF3, ) - self.assertEqual(c.check_class, Iperf) + self.assertEqual(c.check_class, Iperf3) def test_base_check_class(self): path = 'openwisp_monitoring.check.classes.base.BaseCheck' @@ -89,15 +89,15 @@ def test_check_instance(self): self.assertEqual(i.related_object, obj) self.assertEqual(i.params, c.params) - with self.subTest('Test Iperf check instance'): + with self.subTest('Test Iperf3 check instance'): c = Check( - name='Iperf class check', - check_type=self._IPERF, + name='Iperf3 class check', + check_type=self._IPERF3, content_object=obj, params={}, ) i = c.check_instance - self.assertIsInstance(i, Iperf) + self.assertIsInstance(i, Iperf3) self.assertEqual(i.related_object, obj) self.assertEqual(i.params, c.params) @@ -133,10 +133,10 @@ def test_auto_check_creation(self): c2 = Check.objects.filter(check_type=self._CONFIG_APPLIED).first() self.assertEqual(c2.content_object, d) self.assertEqual(self._CONFIG_APPLIED, c2.check_type) - with self.subTest('Test AUTO_IPERF'): - c3 = Check.objects.filter(check_type=self._IPERF).first() + with self.subTest('Test AUTO_IPERF3'): + c3 = Check.objects.filter(check_type=self._IPERF3).first() self.assertEqual(c3.content_object, d) - self.assertEqual(self._IPERF, c3.check_type) + self.assertEqual(self._IPERF3, c3.check_type) def test_device_deleted(self): self.assertEqual(Check.objects.count(), 0) @@ -246,7 +246,7 @@ def test_no_duplicate_check_created(self): app_label=Device._meta.app_label, object_id=str(d.pk), ) - auto_create_iperf_check.delay( + auto_create_iperf3_check.delay( model=Device.__name__.lower(), app_label=Device._meta.app_label, object_id=str(d.pk), diff --git a/openwisp_monitoring/device/tests/test_admin.py b/openwisp_monitoring/device/tests/test_admin.py index cbec4dc19..586df4610 100644 --- a/openwisp_monitoring/device/tests/test_admin.py +++ b/openwisp_monitoring/device/tests/test_admin.py @@ -512,7 +512,7 @@ def _assert_alertsettings_inline_in_response(response): def test_alert_settings_inline_post(self): device = self._create_device() metric = self._create_general_metric( - name='', content_object=device, configuration='iperf' + name='', content_object=device, configuration='iperf3' ) url = reverse('admin:config_device_change', args=[device.pk]) alertsettings = self._create_alert_settings(metric=metric) @@ -526,7 +526,7 @@ def test_alert_settings_inline_post(self): f'{metric_model_name}-content_type-object_id-INITIAL_FORMS': '1', f'{metric_model_name}-content_type-object_id-MIN_NUM_FORMS': '0', f'{metric_model_name}-content_type-object_id-MAX_NUM_FORMS': '1000', - f'{metric_model_name}-content_type-object_id-0-field_name': 'iperf_result', + f'{metric_model_name}-content_type-object_id-0-field_name': 'iperf3_result', f'{metric_model_name}-content_type-object_id-0-id': str(metric.id), f'{metric_model_name}-content_type-object_id-0-alertsettings-TOTAL_FORMS': '1', f'{metric_model_name}-content_type-object_id-0-alertsettings-INITIAL_FORMS': '0', @@ -539,7 +539,7 @@ def test_alert_settings_inline_post(self): f'{metric_model_name}-content_type-object_id-0-alertsettings-0-id': '', f'{metric_model_name}-content_type-object_id-0-alertsettings-0-metric': '', } - # General metrics (clients & traffic) & Iperf are present + # General metrics (clients & traffic) & Iperf3 are present self.assertEqual(Metric.objects.count(), 3) self.assertEqual(AlertSettings.objects.count(), 1) @@ -548,8 +548,8 @@ def _reset_alertsettings_inline(): # Delete AlertSettings objects before any subTests _reset_alertsettings_inline() - # Delete all Metrics other than 'iperf' before any subTests - Metric.objects.exclude(configuration='iperf').delete() + # Delete all Metrics other than 'iperf3' before any subTests + Metric.objects.exclude(configuration='iperf3').delete() def _assert_alertsettings_inline(response, operator, threshold, tolerance): self.assertEqual(response.status_code, 302) @@ -607,7 +607,7 @@ def _assert_alertsettings_inline(response, operator, threshold, tolerance): test_inline_params_absent = { f'{metric_model_name}-content_type-object_id-INITIAL_FORMS': '1', f'{metric_model_name}-content_type-object_id-0-id': str(metric.id), - f'{metric_model_name}-content_type-object_id-0-field_name': 'iperf_result', + f'{metric_model_name}-content_type-object_id-0-field_name': 'iperf3_result', f'{metric_model_name}-content_type-object_id-0-alertsettings-INITIAL_FORMS': '1', f'{metric_model_name}-content_type-object_id-0-alertsettings-0-id': str( alertsettings.id diff --git a/openwisp_monitoring/monitoring/configuration.py b/openwisp_monitoring/monitoring/configuration.py index 2688f96d5..448771222 100644 --- a/openwisp_monitoring/monitoring/configuration.py +++ b/openwisp_monitoring/monitoring/configuration.py @@ -545,11 +545,11 @@ def _get_access_tech(): } }, }, - 'iperf': { - 'label': _('Iperf'), - 'name': 'Iperf', - 'key': 'iperf', - 'field_name': 'iperf_result', + 'iperf3': { + 'label': _('Iperf3'), + 'name': 'Iperf3', + 'key': 'iperf3', + 'field_name': 'iperf3_result', 'related_fields': [ 'sent_bps_tcp', 'received_bps_tcp', diff --git a/tests/openwisp2/settings.py b/tests/openwisp2/settings.py index 87f4e706c..7c9ba5665 100644 --- a/tests/openwisp2/settings.py +++ b/tests/openwisp2/settings.py @@ -171,7 +171,7 @@ CELERY_BROKER_URL = 'memory://' # Celery TIME_ZONE should be equal to django TIME_ZONE -# In order to schedule run_iperf_checks on the correct time intervals +# In order to schedule run_iperf3_checks on the correct time intervals CELERY_TIMEZONE = TIME_ZONE CELERY_BEAT_SCHEDULE = { @@ -187,12 +187,12 @@ ), 'relative': True, }, - 'run_iperf_checks': { + 'run_iperf3_checks': { 'task': 'openwisp_monitoring.check.tasks.run_checks', # https://docs.celeryq.dev/en/latest/userguide/periodic-tasks.html#crontab-schedules - # Executes only iperf check every 5 mins from 00:00 AM to 6:00 AM (night) + # Executes only iperf3 check every 5 mins from 00:00 AM to 6:00 AM (night) 'schedule': crontab(minute='*/5', hour='0-6'), - 'args': (['openwisp_monitoring.check.classes.Iperf'],), + 'args': (['openwisp_monitoring.check.classes.Iperf3'],), 'relative': True, }, } @@ -216,8 +216,8 @@ OPENWISP_MONITORING_MAC_VENDOR_DETECTION = False OPENWISP_MONITORING_API_URLCONF = 'openwisp_monitoring.urls' OPENWISP_MONITORING_API_BASEURL = 'http://testserver' - # for testing AUTO_IPERF - OPENWISP_MONITORING_AUTO_IPERF = True + # for testing AUTO_IPERF3 + OPENWISP_MONITORING_AUTO_IPERF3 = True # Temporarily added to identify slow tests TEST_RUNNER = 'openwisp_utils.tests.TimeLoggingTestRunner'