Skip to content

Commit

Permalink
test(perf): Overlay time series instead of concatenating
Browse files Browse the repository at this point in the history
Previously, we concatenated time series for tests that produced data
from multiple vcpus. Now, we instead overlay them, to accurately record
the net throughput for each second interval, across all workers. This
does not make a difference for permutation tests, but does allow us to
exploit some properties of averages of time series across different
parameterizations.

Signed-off-by: Patrick Roy <[email protected]>
  • Loading branch information
roypat committed Oct 4, 2023
1 parent f0066b5 commit 9edffeb
Show file tree
Hide file tree
Showing 2 changed files with 39 additions and 19 deletions.
34 changes: 20 additions & 14 deletions tests/framework/utils_iperf.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,18 +180,24 @@ def emit_iperf3_metrics(metrics, iperf_result, omit):
)[0]:
metrics.put_metric("cpu_utilization_vmm", cpu_util_data_point, "Percent")

for time_series in iperf_result["g2h"]:
for interval in time_series["intervals"][omit:]:
metrics.put_metric(
"throughput_guest_to_host",
interval["sum"]["bits_per_second"],
"Bits/Second",
)
data_points = zip(
*[time_series["intervals"][omit:] for time_series in iperf_result["g2h"]]
)

for point_in_time in data_points:
metrics.put_metric(
"throughput_guest_to_host",
sum(interval["sum"]["bits_per_second"] for interval in point_in_time),
"Bits/Second",
)

for time_series in iperf_result["h2g"]:
for interval in time_series["intervals"][omit:]:
metrics.put_metric(
"throughput_host_to_guest",
interval["sum"]["bits_per_second"],
"Bits/Second",
)
data_points = zip(
*[time_series["intervals"][omit:] for time_series in iperf_result["h2g"]]
)

for point_in_time in data_points:
metrics.put_metric(
"throughput_host_to_guest",
sum(interval["sum"]["bits_per_second"] for interval in point_in_time),
"Bits/Second",
)
24 changes: 19 additions & 5 deletions tests/integration_tests/performance/test_block_ab.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,22 +106,36 @@ def run_fio(microvm, mode, block_size):

def process_fio_logs(vm, fio_mode, logs_dir, metrics):
"""Parses the fio logs in `{logs_dir}/{fio_mode}_bw.*.log and emits their contents as CloudWatch metrics"""
for job_id in range(vm.vcpus_count):
data = Path(f"{logs_dir}/{fio_mode}_bw.{job_id + 1}.log").read_text("UTF-8")

for line in data.splitlines():
data = [
Path(f"{logs_dir}/{fio_mode}_bw.{job_id + 1}.log")
.read_text("UTF-8")
.splitlines()
for job_id in range(vm.vcpus_count)
]

for tup in zip(*data):
bw_read = 0
bw_write = 0

for line in tup:
_, value, direction, _ = line.split(",", maxsplit=3)
value = int(value.strip())

# See https://fio.readthedocs.io/en/latest/fio_doc.html#log-file-formats
match direction.strip():
case "0":
metrics.put_metric("bw_read", value, "Kilobytes/Second")
bw_read += value
case "1":
metrics.put_metric("bw_write", value, "Kilobytes/Second")
bw_write += value
case _:
assert False

if bw_read:
metrics.put_metric("bw_read", bw_read, "Kilobytes/Second")
if bw_write:
metrics.put_metric("bw_write", bw_write, "Kilobytes/Second")


@pytest.mark.nonci
@pytest.mark.timeout(RUNTIME_SEC * 1000) # 1.40 hours
Expand Down

0 comments on commit 9edffeb

Please sign in to comment.