Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feat: tox implementation #1989

Merged
merged 8 commits into from
Jan 8, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .ci/build_wheel.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@
"win": "win_amd64",
"manylinux1": "manylinux1_x86_64",
"manylinux_2_17": "manylinux_2_17_x86_64",
"linux": "manylinux_2_17_x86_64", # Accommodate tox.ini platform substitutions
"win32": "win_amd64",
"darwin": "any",
}

argParser = argparse.ArgumentParser()
Expand Down
118 changes: 118 additions & 0 deletions tox.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
# This is work in progress, testing workflow in local/CI is gradually being transferred to tox

# Usage instructions:
# `tox` will run all tests sequentially, `tox --parallel` will run all tests in parallel (much faster).
# Run specific selection of tests with `tox -e pretest,<list-of-tests>,posttest` e.g., `tox -e pretest,test-api,test-launcher,posttest`
# `--parallel` flag can be passed when running specific selections.

[tox]
description = Default tox environment list and core configurations

# List all tests to run in parallel or sequential mode here
# So invocation can be specified as `tox`/`tox --parallel` to run all tests in sequential/parallel mode
envlist = pretest,test-{api,launcher,server,local_server,multi_server,remote_workflow,remote_operator,workflow,service,operators},posttest

isolated_build_env = build

[testenv]
description = Default configuration for test environments, unless overridden

pass_env =
PACKAGE_NAME
MODULE
ANSYS_DPF_ACCEPT_LA
ANSYSLMD_LICENSE_FILE
AWP_ROOT242

package = external # To allow custom wheel builds

[testenv:build_external]
description = Environment for custom build of package wheels, solves PyDPF custom wheel building requirement

package_glob = {toxinidir}{/}dist{/}ansys_dpf_core*

# {on_platform} substitution to automatically detect os type.
commands =
python .ci/build_wheel.py -p {on_platform} -w

[testenv:pretest]
description = Environment to kill servers and organize test files prior to testing

deps =
psutil

skip_install = True

commands =
# Clear any running servers that may be locking resources
python -c "import psutil; proc_name = 'Ans.Dpf.Grpc'; nb_procs = len([proc.kill() for proc in psutil.process_iter() if proc_name in proc.name()]); \
print(f'Killed \{nb_procs} \{proc_name} processes.')"

# Organize test files
python -c "\
import os, shutil; \
test_data=['test_launcher','test_server','test_local_server','test_multi_server','test_workflow','test_remote_workflow','test_remote_operator','test_service','test_custom_type_field']; \
[(os.makedirs(d, exist_ok=True), shutil.copy('tests/conftest.py', d), shutil.copy(f'tests/\{d}.py', d) if os.path.exists(f'tests/\{d}.py') else None) for d in test_data]; \
[os.remove(f'tests/\{d}.py') for d in test_data if os.path.exists(f'tests/\{d}.py')]"

[testenv:posttest]
description = Environment to kill servers and revert test files to original state after testing

depends = pretest, test-{api,launcher,server,local_server,multi_server,remote_workflow,remote_operator,workflow,service,operators}

deps =
psutil

skip_install = True

commands =
# Revert project layout to previous state
python -c "\
import os, shutil; \
test_data=['test_launcher','test_server','test_local_server','test_multi_server','test_workflow','test_remote_workflow','test_remote_operator','test_service', 'test_custom_type_field']; \
[shutil.move(f'\{d}/\{d}.py', f'tests/\{d}.py') for d in test_data if os.path.exists(f'\{d}/\{d}.py')]; \
[shutil.rmtree(d) for d in test_data if os.path.exists(d)]"

# Clear any running servers that may be locking resources
python -c "import psutil; proc_name = 'Ans.Dpf.Grpc'; nb_procs = len([proc.kill() for proc in psutil.process_iter() if proc_name in proc.name()]); \
print(f'Killed \{nb_procs} \{proc_name} processes.')"

[testenv:test-{api,launcher,server,local_server,multi_server,remote_workflow,remote_operator,workflow,service,operators}]
description = Environment where project testing configuration is defined

depends = pretest

setenv =
# Pytest extra arguments
COVERAGE = --cov=ansys.dpf.core --cov-report=xml --cov-report=html --log-level=ERROR --cov-append
RERUNS = --reruns=2 --reruns-delay=1
DEBUG = -v -s --durations=10 --durations-min=1.0

api: JUNITXML = --junitxml=tests/junit/test-results.xml
launcher: JUNITXML = --junitxml=tests/junit/test-results2.xml
server: JUNITXML = --junitxml=tests/junit/test-results3.xml
local_server: JUNITXML = --junitxml=tests/junit/test-results4.xml
multi_server: JUNITXML = --junitxml=tests/junit/test-results5.xml
remote_workflow: JUNITXML = --junitxml=tests/junit/test-results6.xml
remote_operator: JUNITXML = --junitxml=tests/junit/test-results7.xml
workflow: JUNITXML = --junitxml=tests/junit/test-results8.xml
service: JUNITXML = --junitxml=tests/junit/test-results9.xml
operators: JUNITXML = --junitxml=../tests/junit/test-results12.xml

# Tests sets
api: PYTEST_PYTHON_FILES = tests
launcher: PYTEST_PYTHON_FILES = test_launcher
server: PYTEST_PYTHON_FILES = test_server
local_server: PYTEST_PYTHON_FILES = test_local_server
multi_server: PYTEST_PYTHON_FILES = test_multi_server
remote_workflow: PYTEST_PYTHON_FILES = test_remote_workflow
remote_operator: PYTEST_PYTHON_FILES = test_remote_operator
workflow: PYTEST_PYTHON_FILES = test_workflow
service: PYTEST_PYTHON_FILES = test_service
operators: PYTEST_PYTHON_FILES = tests/operators

deps =
-r requirements/requirements_test.txt

commands =
pytest {env:PYTEST_PYTHON_FILES} {env:DEBUG} {env:COVERAGE} {env:RERUNS} {env:JUNITXML}
Loading