Skip to content

Commit

Permalink
Add computational resource management. (#15)
Browse files Browse the repository at this point in the history
* Add computational resource management.

* Black formatting

* Use integer type for cpu.

* Add other units of memory as well.

* Fix formatting

* Update CHANGELOG

* Update date in CHANGELOG

* Also test pb and tb

* Update CHANGELOG

* ADd pb and tb to makepyfile_and_add_meta

* Fix formatting
  • Loading branch information
DriesSchaumont authored Oct 3, 2023
1 parent b876d19 commit 6131f4d
Show file tree
Hide file tree
Showing 6 changed files with 311 additions and 58 deletions.
17 changes: 17 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,23 @@
Changelog
*********

0.4.0 (3/10/2023)
=================

New functionality
-----------------

* `run_component` now passes the `cpus`, `memory_b`, `memory_kb`, `memory_mb` `memory_gb`, `memory_tb`, `memory_pb`,
`memory_tb`, `memory_pb` keys, defined in the `meta` dictionairy of the test module,
as memory and cpu contraints to the executed component. The `cpus` and all memory keys
can be set by using `viash (ns) test` with `--cpus` or `--memory` respectively.
The memory and cpu fields can also be set to a hardcoded value in the test script. In this case,
care should be taken to only specify one of `memory_b`, `memory_kb`, `memory_mb`, `memory_gb`, `memory_tb`, `memory_pb`.
If more than one value for memory resources are defined and a conflict exists between the values,
the value from the largest unit of measure is used.

* Added `memory_bytes` and `cpus` fixtures.

0.3.2 (07/06/2023)
=================

Expand Down
54 changes: 44 additions & 10 deletions tests/unittests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,17 +7,51 @@

@pytest.fixture
def makepyfile_and_add_meta(pytester, write_config):
def wrapper(test_module_contents, viash_config, viash_executable):
def wrapper(
test_module_contents,
viash_config,
viash_executable,
cpu=None,
memory_pb=None,
memory_tb=None,
memory_gb=None,
memory_mb=None,
memory_kb=None,
memory_b=None,
):
config_file = write_config(viash_config)
to_insert = f"""\
try:
meta["config"] = "{str(config_file)}"
except NameError:
meta = {{"config": "{str(config_file)}"}}
meta["executable"] = "{viash_executable}"
"""

parsed_to_insert = ast.parse(dedent(to_insert))
to_insert = dedent(
f"""\
try:
meta["config"] = "{str(config_file)}"
except NameError:
meta = {{"config": "{str(config_file)}"}}
meta["executable"] = "{viash_executable}"
"""
)
if cpu:
to_insert += dedent(
f"""\
meta["cpus"] = {cpu}
"""
)
memory_specifiers = {
"memory_pb": memory_pb,
"memory_tb": memory_tb,
"memory_gb": memory_gb,
"memory_mb": memory_mb,
"memory_kb": memory_kb,
"memory_b": memory_b,
}
for memory_specifier, memory_value in memory_specifiers.items():
if memory_value:
to_insert += dedent(
f"""\
meta["{memory_specifier}"] = {memory_value}
"""
)

parsed_to_insert = ast.parse(to_insert)
parsed_module_contents = ast.parse(dedent(test_module_contents))
i = 0
for i, elem in enumerate(parsed_module_contents.body):
Expand Down
152 changes: 121 additions & 31 deletions tests/unittests/fixtures/test_run_component.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,7 @@ def test_loading_run_component(run_component):

# run pytest with the following cmd args
result = pytester.runpytest("-v")

# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines(
[
"*::test_loading_run_component PASSED*",
]
)
assert result.ret == 0
result.assert_outcomes(passed=1)


def test_run_component_no_meta_variable_raises(pytester):
Expand Down Expand Up @@ -54,15 +47,120 @@ def test_loading_run_component(run_component):


@pytest.mark.parametrize(
"config_fixture, expected",
"memory_pb, memory_tb, memory_gb, memory_mb, memory_kb, memory_b, expected_bytes, expected_warning",
[
(None, None, None, None, None, None, None, False), # Not specified
(
6,
6144,
6291456,
6442450944,
6597069766656,
None,
6755399441055744,
False,
), # Memory specified and the same
(
None,
None,
3,
6144,
6291456,
None,
3221225472,
True,
), # Memory specified and different, pick the largest
(None, None, 6, None, None, None, 6442450944, False), # Only one specified
(None, None, 6.5, None, None, None, 6979321856, False),
(None, None, 3.5, 6144, 6291456, None, 3758096384, True),
(None, None, 6, 6144.5, 6291456, None, 6442450944, True),
],
)
def test_run_component_different_memory_specification_warnings(
dummy_config,
pytester,
makepyfile_and_add_meta,
memory_pb,
memory_tb,
memory_gb,
memory_mb,
memory_kb,
memory_b,
expected_bytes,
expected_warning,
):
expected_memory_args = ""
if any([memory_pb, memory_tb, memory_gb, memory_mb, memory_kb, memory_b]):
expected_memory_args = f', "--memory", "{expected_bytes}B"'
expected = (
'["viash", "run", Path(meta["config"]), "--", "bar"%s]' % expected_memory_args
)
makepyfile_and_add_meta(
f"""
import subprocess
from pathlib import Path
def test_loading_run_component(mocker, run_component):
mocked_check_output = mocker.patch('viashpy._run.check_output',
return_value=b"Some dummy output")
mocked_path = mocker.patch('viashpy.testing.Path.is_file', return_value=True)
stdout = run_component(["bar"])
mocked_check_output.assert_called_once_with({expected},
stderr=subprocess.STDOUT)
assert stdout == b"Some dummy output"
""",
dummy_config,
"foo",
memory_pb=memory_pb,
memory_tb=memory_tb,
memory_gb=memory_gb,
memory_mb=memory_mb,
memory_kb=memory_kb,
memory_b=memory_b,
)
result = pytester.runpytest()
expected_outcome_dict = (
{"passed": 1, "warnings": 1} if expected_warning else {"passed": 1}
)
result.assert_outcomes(**expected_outcome_dict)
if expected_warning:
result.stdout.fnmatch_lines(
[
"*Different values were defined in the 'meta' dictionairy that limit memory, choosing the one with the largest unit.*"
]
)
assert result.ret == 0


@pytest.mark.parametrize("memory, expected_bytes", [(None, None), (6, 6442450944)])
@pytest.mark.parametrize("cpu", [None, 2])
@pytest.mark.parametrize(
"config_fixture, expected, arg_prefix",
[
("dummy_config", '["viash", "run", Path(meta["config"]), "--", "bar"]'),
("dummy_config_with_info", '[Path("foo"), "bar"]'),
(
"dummy_config",
'["viash", "run", Path(meta["config"]), "--", "bar"%s%s]',
"--",
),
("dummy_config_with_info", '[Path("foo"), "bar"%s%s]', "---"),
],
)
def test_run_component_executes_subprocess(
request, pytester, makepyfile_and_add_meta, config_fixture, expected
request,
pytester,
makepyfile_and_add_meta,
memory,
expected_bytes,
cpu,
config_fixture,
expected,
arg_prefix,
):
format_string = (
f', "{arg_prefix}cpus", "{cpu}"' if cpu else "",
f', "{arg_prefix}memory", "{expected_bytes}B"' if memory else "",
)
expected = expected % format_string
makepyfile_and_add_meta(
f"""
import subprocess
Expand All @@ -79,14 +177,11 @@ def test_loading_run_component(mocker, run_component):
""",
request.getfixturevalue(config_fixture),
"foo",
cpu=cpu,
memory_gb=memory,
)
result = pytester.runpytest("-v")
result.stdout.fnmatch_lines(
[
"*::test_loading_run_component PASSED*",
]
)
assert result.ret == 0
result.assert_outcomes(passed=1)


def test_run_component_executable_does_not_exist_raises(
Expand Down Expand Up @@ -137,12 +232,8 @@ def test_loading_run_component(mocker, run_component):
executable,
)
result = pytester.runpytest()
result.assert_outcomes(failed=1)
# Check if output from component is shown on error
result.stdout.fnmatch_lines(
[
"*FAILED test_run_component_fails_logging.py::test_loading_run_component*",
]
)
result.stdout.fnmatch_lines(
[
"*This script should fail*",
Expand All @@ -151,21 +242,20 @@ def test_loading_run_component(mocker, run_component):
# Check if stack traces are hidden
result.stdout.no_fnmatch_line("*def wrapper*")
result.stdout.no_fnmatch_line("*def run_component*")
assert result.ret == 1


@pytest.mark.parametrize(
"message_to_check, expected_outcome, expected_exitcode",
"message_to_check, expected_outcome, should_fail",
[
(
"RuntimeError: This script should fail",
"*test_run_component_fails_capturing.py::test_loading_run_component PASSED*",
0,
False,
),
(
"something_something_this_will_not_work",
"*test_run_component_fails_capturing.py::test_loading_run_component FAILED*",
1,
True,
),
],
)
Expand All @@ -175,7 +265,7 @@ def test_run_component_fails_capturing(
dummy_config_with_info,
message_to_check,
expected_outcome,
expected_exitcode,
should_fail,
):
executable = pytester.makefile(
"",
Expand All @@ -200,11 +290,11 @@ def test_loading_run_component(mocker, run_component):
executable,
)
result = pytester.runpytest("-v")
expected_outcome_dict = {"failed": 1} if should_fail else {"passed": 1}
result.assert_outcomes(**expected_outcome_dict)

# Check if output from component is shown on error
result.stdout.fnmatch_lines([expected_outcome])
if expected_exitcode == 0:
result.stdout.no_fnmatch_line("*This script should fail*")
# Check if stack traces are hidden
result.stdout.no_fnmatch_line("*def wrapper*")
result.stdout.no_fnmatch_line("*def run_component*")
assert result.ret == expected_exitcode
2 changes: 1 addition & 1 deletion tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ commands =
[testenv:flake8]
skip_install = true
deps = flake8
commands = flake8 viashpy tests
commands = flake8 viashpy tests --exclude viashpy/__version__.py

[coverage:run]
branch = True
Expand Down
Loading

0 comments on commit 6131f4d

Please sign in to comment.