diff --git a/build/test-requirements.txt b/build/test-requirements.txt index 0650e86fb3d35..3f3101cf949ac 100644 --- a/build/test-requirements.txt +++ b/build/test-requirements.txt @@ -19,3 +19,6 @@ torch-tb-profiler # extension build tests freezegun + +# testing custom pytest plugin require the use of named pipes +namedpipe; platform_system == "Windows" diff --git a/python_files/testing_tools/process_json_util.py b/python_files/testing_tools/process_json_util.py index f116b0d9a8f3f..36067521ea27a 100644 --- a/python_files/testing_tools/process_json_util.py +++ b/python_files/testing_tools/process_json_util.py @@ -2,12 +2,12 @@ # Licensed under the MIT License. import io import json -from typing import List +from typing import List, Dict CONTENT_LENGTH: str = "Content-Length:" -def process_rpc_json(data: str) -> List[str]: +def process_rpc_json(data: str) -> Dict[str, List[str]]: """Process the JSON data which comes from the server.""" str_stream: io.StringIO = io.StringIO(data) @@ -22,7 +22,7 @@ def process_rpc_json(data: str) -> List[str]: if not line or line.isspace(): raise ValueError("Header does not contain Content-Length") - while True: + while True: # keep reading until the number of bytes is the CONTENT_LENGTH line: str = str_stream.readline() if not line or line.isspace(): break diff --git a/python_files/testing_tools/socket_manager.py b/python_files/testing_tools/socket_manager.py index 3392a4d54e073..31b78b254bba9 100644 --- a/python_files/testing_tools/socket_manager.py +++ b/python_files/testing_tools/socket_manager.py @@ -4,6 +4,76 @@ import socket import sys +# set the socket before it gets blocked or overwritten by a user tests +_SOCKET = socket.socket + + +class PipeManager: + def __init__(self, name): + self.name = name + + def __enter__(self): + return self.connect() + + def __exit__(self, *_): + self.close() + + def connect(self): + if sys.platform == "win32": + self._writer = open(self.name, "wt", encoding="utf-8") + # reader created in read method + else: + self._socket = _SOCKET(socket.AF_UNIX, socket.SOCK_STREAM) + self._socket.connect(self.name) + return self + + def close(self): + if sys.platform == "win32": + self._writer.close() + else: + # add exception catch + self._socket.close() + + def write(self, data: str): + if sys.platform == "win32": + try: + # for windows, is should only use \n\n + request = ( + f"""content-length: {len(data)}\ncontent-type: application/json\n\n{data}""" + ) + self._writer.write(request) + self._writer.flush() + except Exception as e: + print("error attempting to write to pipe", e) + raise (e) + else: + # must include the carriage-return defined (as \r\n) for unix systems + request = ( + f"""content-length: {len(data)}\r\ncontent-type: application/json\r\n\r\n{data}""" + ) + self._socket.send(request.encode("utf-8")) + + def read(self, bufsize=1024) -> str: + """Read data from the socket. + + Args: + bufsize (int): Number of bytes to read from the socket. + + Returns: + data (str): Data received from the socket. + """ + if sys.platform == "win32": + # returns a string automatically from read + if not hasattr(self, "_reader"): + self._reader = open(self.name, "rt", encoding="utf-8") + return self._reader.read(bufsize) + else: + # receive bytes and convert to string + while True: + part: bytes = self._socket.recv(bufsize) + data: str = part.decode("utf-8") + return data + class SocketManager(object): """Create a socket and connect to the given address. diff --git a/python_files/tests/pytestadapter/.data/pytest.ini b/python_files/tests/pytestadapter/.data/pytest.ini new file mode 100644 index 0000000000000..ddbcd6544e5dd --- /dev/null +++ b/python_files/tests/pytestadapter/.data/pytest.ini @@ -0,0 +1,5 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +# pytest.ini is specified here so the root directory of the tests is kept at .data instead of referencing +# the parent python_files/pyproject.toml for test_discovery.py and test_execution.py for pytest-adapter tests. diff --git a/python_files/tests/pytestadapter/helpers.py b/python_files/tests/pytestadapter/helpers.py index dd69379a45b9f..978fd7f9ce083 100644 --- a/python_files/tests/pytestadapter/helpers.py +++ b/python_files/tests/pytestadapter/helpers.py @@ -9,27 +9,48 @@ import socket import subprocess import sys +import tempfile import threading +from typing import Any, Dict, List, Optional, Tuple import uuid -from typing import Any, Dict, List, Optional, Tuple, TypedDict +if sys.platform == "win32": + from namedpipe import NPopen + + +script_dir = pathlib.Path(__file__).parent.parent.parent +script_dir_child = pathlib.Path(__file__).parent.parent +sys.path.append(os.fspath(script_dir)) +sys.path.append(os.fspath(script_dir_child)) +sys.path.append(os.fspath(script_dir / "lib" / "python")) +print("sys add path", script_dir) TEST_DATA_PATH = pathlib.Path(__file__).parent / ".data" +CONTENT_LENGTH: str = "Content-Length:" +CONTENT_TYPE: str = "Content-Type:" -def get_absolute_test_id(test_id: str, testPath: pathlib.Path) -> str: - split_id = test_id.split("::")[1:] - absolute_test_id = "::".join([str(testPath), *split_id]) - print("absolute path", absolute_test_id) - return absolute_test_id +@contextlib.contextmanager +def text_to_python_file(text_file_path: pathlib.Path): + """Convert a text file to a python file and yield the python file path.""" + python_file = None + try: + contents = text_file_path.read_text(encoding="utf-8") + python_file = text_file_path.with_suffix(".py") + python_file.write_text(contents, encoding="utf-8") + yield python_file + finally: + if python_file: + os.unlink(os.fspath(python_file)) @contextlib.contextmanager def create_symlink(root: pathlib.Path, target_ext: str, destination_ext: str): + destination = None try: destination = root / destination_ext target = root / target_ext - if destination.exists(): + if destination and destination.exists(): print("destination already exists", destination) try: destination.symlink_to(target) @@ -37,95 +58,140 @@ def create_symlink(root: pathlib.Path, target_ext: str, destination_ext: str): print("error occurred when attempting to create a symlink", e) yield target, destination finally: - destination.unlink() + if destination and destination.exists(): + destination.unlink() print("destination unlinked", destination) -def create_server( - host: str = "127.0.0.1", - port: int = 0, - backlog: int = socket.SOMAXCONN, - timeout: int = 1000, -) -> socket.socket: - """Return a local server socket listening on the given port.""" - server: socket.socket = _new_sock() - if port: - # If binding to a specific port, make sure that the user doesn't have - # to wait until the OS times out waiting for socket in order to use - # that port again if the server or the adapter crash or are force-killed. - if sys.platform == "win32": - server.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) +def process_data_received(data: str) -> List[Dict[str, Any]]: + """Process the all JSON data which comes from the server. After listen is finished, this function will be called. + Here the data must be split into individual JSON messages and then parsed. + + This function also: + - Checks that the jsonrpc value is 2.0 + - Checks that the last JSON message contains the `eot` token. + + """ + json_messages = [] + remaining = data + while remaining: + json_data, remaining = parse_rpc_message(remaining) + # here json_data is a single rpc payload, now check its jsonrpc 2 and save the param data + if "params" not in json_data or "jsonrpc" not in json_data: + raise ValueError("Invalid JSON-RPC message received, missing params or jsonrpc key") + elif json_data["jsonrpc"] != "2.0": + raise ValueError("Invalid JSON-RPC version received, not version 2.0") else: - try: - server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - except (AttributeError, OSError): - pass # Not available everywhere - server.bind((host, port)) - if timeout: - server.settimeout(timeout) - server.listen(backlog) - return server - - -def _new_sock() -> socket.socket: - sock: socket.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP) - options = [ - ("SOL_SOCKET", "SO_KEEPALIVE", 1), - ("IPPROTO_TCP", "TCP_KEEPIDLE", 1), - ("IPPROTO_TCP", "TCP_KEEPINTVL", 3), - ("IPPROTO_TCP", "TCP_KEEPCNT", 5), - ] - - for level, name, value in options: - try: - sock.setsockopt(getattr(socket, level), getattr(socket, name), value) - except (AttributeError, OSError): - pass # May not be available everywhere. + json_messages.append(json_data["params"]) - return sock + last_json = json_messages.pop(-1) + if "eot" not in last_json: + raise ValueError("Last JSON messages does not contain 'eot' as its last payload.") + return json_messages # return the list of json messages, only the params part without the EOT token -CONTENT_LENGTH: str = "Content-Length:" -Env_Dict = TypedDict("Env_Dict", {"TEST_UUID": str, "TEST_PORT": str, "PYTHONPATH": str}) +def parse_rpc_message(data: str) -> Tuple[Dict[str, str], str]: + """Process the JSON data which comes from the server. + A single rpc payload is in the format: + content-length: #LEN# \r\ncontent-type: application/json\r\n\r\n{"jsonrpc": "2.0", "params": ENTIRE_DATA} + with EOT params: "params": {"command_type": "discovery", "eot": true} -def process_rpc_message(data: str) -> Tuple[Dict[str, Any], str]: - """Process the JSON data which comes from the server which runs the pytest discovery.""" + returns: + json_data: A single rpc payload of JSON data from the server. + remaining: The remaining data after the JSON data.""" str_stream: io.StringIO = io.StringIO(data) length: int = 0 - while True: line: str = str_stream.readline() if CONTENT_LENGTH.lower() in line.lower(): length = int(line[len(CONTENT_LENGTH) :]) + + line: str = str_stream.readline() + if CONTENT_TYPE.lower() not in line.lower(): + raise ValueError("Header does not contain Content-Type") + + line = str_stream.readline() + if line not in ["\r\n", "\n"]: + raise ValueError("Header does not contain space to separate header and body") + # if it passes all these checks then it has the right headers break if not line or line.isspace(): raise ValueError("Header does not contain Content-Length") - while True: - line: str = str_stream.readline() - if not line or line.isspace(): - break + while True: # keep reading until the number of bytes is the CONTENT_LENGTH + line: str = str_stream.readline(length) + try: + # try to parse the json, if successful it is single payload so return with remaining data + json_data: dict[str, str] = json.loads(line) + return json_data, str_stream.read() + except json.JSONDecodeError: + print("json decode error") - raw_json: str = str_stream.read(length) - return json.loads(raw_json), str_stream.read() +def _listen_on_pipe_new(listener, result: List[str], completed: threading.Event): + """Listen on the named pipe or Unix domain socket for JSON data from the server. + Created as a separate function for clarity in threading context. + """ + # Windows design + if sys.platform == "win32": + all_data: list = [] + stream = listener.wait() + while True: + # Read data from collection + close = stream.closed + if close: + break + data = stream.readlines() + if not data: + if completed.is_set(): + break # Exit loop if completed event is set + else: + try: + # Attempt to accept another connection if the current one closes unexpectedly + print("attempt another connection") + except socket.timeout: + # On timeout, append all collected data to result and return + # result.append("".join(all_data)) + return + data_decoded = "".join(data) + all_data.append(data_decoded) + # Append all collected data to result array + result.append("".join(all_data)) + else: # Unix design + connection, _ = listener.socket.accept() + listener.socket.settimeout(1) + all_data: list = [] + while True: + # Reading from connection + data: bytes = connection.recv(1024 * 1024) + if not data: + if completed.is_set(): + break # Exit loop if completed event is set + else: + try: + # Attempt to accept another connection if the current one closes unexpectedly + connection, _ = listener.socket.accept() + except socket.timeout: + # On timeout, append all collected data to result and return + result.append("".join(all_data)) + return + all_data.append(data.decode("utf-8")) + # Append all collected data to result array + result.append("".join(all_data)) -def process_rpc_json(data: str) -> List[Dict[str, Any]]: - """Process the JSON data which comes from the server which runs the pytest discovery.""" - json_messages = [] - remaining = data - while remaining: - json_data, remaining = process_rpc_message(remaining) - json_messages.append(json_data) - return json_messages +def _run_test_code(proc_args: List[str], proc_env, proc_cwd: str, completed: threading.Event): + result = subprocess.run(proc_args, env=proc_env, cwd=proc_cwd) + completed.set() + return result def runner(args: List[str]) -> Optional[List[Dict[str, Any]]]: """Run the pytest discovery and return the JSON data from the server.""" + print("\n Running python test subprocess with cwd set to: ", TEST_DATA_PATH) return runner_with_cwd(args, TEST_DATA_PATH) @@ -139,64 +205,70 @@ def runner_with_cwd(args: List[str], path: pathlib.Path) -> Optional[List[Dict[s "vscode_pytest", "-s", ] + args - listener: socket.socket = create_server() - _, port = listener.getsockname() - listener.listen() - - env = os.environ.copy() - env.update( - { - "TEST_UUID": str(uuid.uuid4()), - "TEST_PORT": str(port), - "PYTHONPATH": os.fspath(pathlib.Path(__file__).parent.parent.parent), - } - ) - completed = threading.Event() - - result = [] - t1: threading.Thread = threading.Thread( - target=_listen_on_socket, args=(listener, result, completed) - ) - t1.start() - - t2 = threading.Thread( - target=_run_test_code, - args=(process_args, env, path, completed), - ) - t2.start() - - t1.join() - t2.join() - - return process_rpc_json(result[0]) if result else None - - -def _listen_on_socket(listener: socket.socket, result: List[str], completed: threading.Event): - """Listen on the socket for the JSON data from the server. - Created as a separate function for clarity in threading. - """ - sock, (other_host, other_port) = listener.accept() - listener.settimeout(1) - all_data: list = [] - while True: - data: bytes = sock.recv(1024 * 1024) - if not data: - if completed.is_set(): - break - else: - try: - sock, (other_host, other_port) = listener.accept() - except socket.timeout: - result.append("".join(all_data)) - return - all_data.append(data.decode("utf-8")) - result.append("".join(all_data)) - -def _run_test_code(proc_args: List[str], proc_env, proc_cwd: str, completed: threading.Event): - result = subprocess.run(proc_args, env=proc_env, cwd=proc_cwd) - completed.set() - return result + # Generate pipe name, pipe name specific per OS type. + pipe_name = generate_random_pipe_name("pytest-discovery-test") + + # Windows design + if sys.platform == "win32": + with NPopen("r+t", name=pipe_name, bufsize=0) as pipe: + # Update the environment with the pipe name and PYTHONPATH. + env = os.environ.copy() + env.update( + { + "TEST_RUN_PIPE": pipe.path, + "PYTHONPATH": os.fspath(pathlib.Path(__file__).parent.parent.parent), + } + ) + + completed = threading.Event() + + result = [] # result is a string array to store the data during threading + t1: threading.Thread = threading.Thread( + target=_listen_on_pipe_new, args=(pipe, result, completed) + ) + t1.start() + + t2 = threading.Thread( + target=_run_test_code, + args=(process_args, env, path, completed), + ) + t2.start() + + t1.join() + t2.join() + + return process_data_received(result[0]) if result else None + else: # Unix design + # Update the environment with the pipe name and PYTHONPATH. + env = os.environ.copy() + env.update( + { + "TEST_RUN_PIPE": pipe_name, + "PYTHONPATH": os.fspath(pathlib.Path(__file__).parent.parent.parent), + } + ) + server = UnixPipeServer(pipe_name) + server.start() + + completed = threading.Event() + + result = [] # result is a string array to store the data during threading + t1: threading.Thread = threading.Thread( + target=_listen_on_pipe_new, args=(server, result, completed) + ) + t1.start() + + t2 = threading.Thread( + target=_run_test_code, + args=(process_args, env, path, completed), + ) + t2.start() + + t1.join() + t2.join() + + return process_data_received(result[0]) if result else None def find_test_line_number(test_name: str, test_file_path) -> str: @@ -215,3 +287,64 @@ def find_test_line_number(test_name: str, test_file_path) -> str: return str(i + 1) error_str: str = f"Test {test_name!r} not found on any line in {test_file_path}" raise ValueError(error_str) + + +def get_absolute_test_id(test_id: str, testPath: pathlib.Path) -> str: + """Get the absolute test id by joining the testPath with the test_id.""" + split_id = test_id.split("::")[1:] + absolute_test_id = "::".join([str(testPath), *split_id]) + return absolute_test_id + + +def generate_random_pipe_name(prefix=""): + # Generate a random suffix using UUID4, ensuring uniqueness. + random_suffix = uuid.uuid4().hex[:10] + # Default prefix if not provided. + if not prefix: + prefix = "python-ext-rpc" + + # For Windows, named pipes have a specific naming convention. + if sys.platform == "win32": + return f"\\\\.\\pipe\\{prefix}-{random_suffix}-sock" + + # For Unix-like systems, use either the XDG_RUNTIME_DIR or a temporary directory. + xdg_runtime_dir = os.getenv("XDG_RUNTIME_DIR") + if xdg_runtime_dir: + return os.path.join(xdg_runtime_dir, f"{prefix}-{random_suffix}.sock") + else: + return os.path.join(tempfile.gettempdir(), f"{prefix}-{random_suffix}.sock") + + +class UnixPipeServer: + def __init__(self, name): + self.name = name + self.is_windows = sys.platform == "win32" + if self.is_windows: + raise NotImplementedError( + "This class is only intended for Unix-like systems, not Windows." + ) + else: + # For Unix-like systems, use a Unix domain socket. + self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + # Ensure the socket does not already exist + try: + os.unlink(self.name) + except OSError: + if os.path.exists(self.name): + raise + + def start(self): + if self.is_windows: + raise NotImplementedError( + "This class is only intended for Unix-like systems, not Windows." + ) + else: + # Bind the socket to the address and listen for incoming connections. + self.socket.bind(self.name) + self.socket.listen(1) + print(f"Server listening on {self.name}") + + def stop(self): + # Clean up the server socket. + self.socket.close() + print("Server stopped.") diff --git a/python_files/tests/pytestadapter/test_discovery.py b/python_files/tests/pytestadapter/test_discovery.py index 942f741a47679..e8274bcd97171 100644 --- a/python_files/tests/pytestadapter/test_discovery.py +++ b/python_files/tests/pytestadapter/test_discovery.py @@ -2,7 +2,6 @@ # Licensed under the MIT License. import json import os -import shutil import sys from typing import Any, Dict, List, Optional @@ -13,11 +12,7 @@ from . import expected_discovery_test_output, helpers # noqa: E402 -@pytest.mark.skipif( - sys.platform == "win32", - reason="See https://github.com/microsoft/vscode-python/issues/22965", -) -def test_import_error(tmp_path): +def test_import_error(): """Test pytest discovery on a file that has a pytest marker but does not import pytest. Copies the contents of a .txt file to a .py file in the temporary directory @@ -28,19 +23,13 @@ def test_import_error(tmp_path): Keyword arguments: tmp_path -- pytest fixture that creates a temporary directory. """ - # Saving some files as .txt to avoid that file displaying a syntax error for - # the extension as a whole. Instead, rename it before running this test - # in order to test the error handling. file_path = helpers.TEST_DATA_PATH / "error_pytest_import.txt" - temp_dir = tmp_path / "temp_data" - temp_dir.mkdir() - p = temp_dir / "error_pytest_import.py" - shutil.copyfile(file_path, p) - actual: Optional[List[Dict[str, Any]]] = helpers.runner(["--collect-only", os.fspath(p)]) + with helpers.text_to_python_file(file_path) as p: + actual: Optional[List[Dict[str, Any]]] = helpers.runner(["--collect-only", os.fspath(p)]) + assert actual actual_list: List[Dict[str, Any]] = actual if actual_list is not None: - assert actual_list.pop(-1).get("eot") for actual_item in actual_list: assert all(item in actual_item.keys() for item in ("status", "cwd", "error")) assert actual_item.get("status") == "error" @@ -56,10 +45,6 @@ def test_import_error(tmp_path): assert False -@pytest.mark.skipif( - sys.platform == "win32", - reason="See https://github.com/microsoft/vscode-python/issues/22965", -) def test_syntax_error(tmp_path): """Test pytest discovery on a file that has a syntax error. @@ -75,15 +60,12 @@ def test_syntax_error(tmp_path): # the extension as a whole. Instead, rename it before running this test # in order to test the error handling. file_path = helpers.TEST_DATA_PATH / "error_syntax_discovery.txt" - temp_dir = tmp_path / "temp_data" - temp_dir.mkdir() - p = temp_dir / "error_syntax_discovery.py" - shutil.copyfile(file_path, p) - actual = helpers.runner(["--collect-only", os.fspath(p)]) + with helpers.text_to_python_file(file_path) as p: + actual = helpers.runner(["--collect-only", os.fspath(p)]) + assert actual actual_list: List[Dict[str, Any]] = actual if actual_list is not None: - assert actual_list.pop(-1).get("eot") for actual_item in actual_list: assert all(item in actual_item.keys() for item in ("status", "cwd", "error")) assert actual_item.get("status") == "error" @@ -109,7 +91,6 @@ def test_parameterized_error_collect(): assert actual actual_list: List[Dict[str, Any]] = actual if actual_list is not None: - assert actual_list.pop(-1).get("eot") for actual_item in actual_list: assert all(item in actual_item.keys() for item in ("status", "cwd", "error")) assert actual_item.get("status") == "error" @@ -187,24 +168,27 @@ def test_pytest_collect(file, expected_const): """ actual = helpers.runner( [ - "--collect-only", os.fspath(helpers.TEST_DATA_PATH / file), + "--collect-only", ] ) assert actual actual_list: List[Dict[str, Any]] = actual if actual_list is not None: - assert actual_list.pop(-1).get("eot") actual_item = actual_list.pop(0) assert all(item in actual_item.keys() for item in ("status", "cwd", "error")) assert actual_item.get("status") == "success" assert actual_item.get("cwd") == os.fspath(helpers.TEST_DATA_PATH) - assert not is_same_tree( + assert is_same_tree( actual_item.get("tests"), expected_const ), f"Tests tree does not match expected value. \n Expected: {json.dumps(expected_const, indent=4)}. \n Actual: {json.dumps(actual_item.get('tests'), indent=4)}" +@pytest.mark.skipif( + sys.platform == "win32", + reason="See https://stackoverflow.com/questions/32877260/privlege-error-trying-to-create-symlink-using-python-on-windows-10", +) def test_symlink_root_dir(): """ Test to test pytest discovery with the command line arg --rootdir specified as a symlink path. @@ -224,7 +208,6 @@ def test_symlink_root_dir(): assert actual actual_list: List[Dict[str, Any]] = actual if actual_list is not None: - assert actual_list.pop(-1).get("eot") actual_item = actual_list.pop(0) try: # Check if all requirements @@ -258,8 +241,8 @@ def test_pytest_root_dir(): assert actual actual_list: List[Dict[str, Any]] = actual if actual_list is not None: - assert actual_list.pop(-1).get("eot") actual_item = actual_list.pop(0) + assert all(item in actual_item.keys() for item in ("status", "cwd", "error")) assert actual_item.get("status") == "success" assert actual_item.get("cwd") == os.fspath(helpers.TEST_DATA_PATH / "root") @@ -284,8 +267,8 @@ def test_pytest_config_file(): assert actual actual_list: List[Dict[str, Any]] = actual if actual_list is not None: - assert actual_list.pop(-1).get("eot") actual_item = actual_list.pop(0) + assert all(item in actual_item.keys() for item in ("status", "cwd", "error")) assert actual_item.get("status") == "success" assert actual_item.get("cwd") == os.fspath(helpers.TEST_DATA_PATH / "root") diff --git a/python_files/tests/pytestadapter/test_execution.py b/python_files/tests/pytestadapter/test_execution.py index b4fffd6a640b0..279cd2c7c04da 100644 --- a/python_files/tests/pytestadapter/test_execution.py +++ b/python_files/tests/pytestadapter/test_execution.py @@ -2,15 +2,19 @@ # Licensed under the MIT License. import json import os +import pathlib import shutil +import sys from typing import Any, Dict, List import pytest -import sys -from tests.pytestadapter import expected_execution_test_output +script_dir = pathlib.Path(__file__).parent.parent +sys.path.append(os.fspath(script_dir)) -from .helpers import ( +from tests.pytestadapter import expected_execution_test_output # noqa: E402 + +from .helpers import ( # noqa: E402 TEST_DATA_PATH, create_symlink, get_absolute_test_id, @@ -31,7 +35,6 @@ def test_config_file(): expected_const = expected_execution_test_output.config_file_pytest_expected_execution_output assert actual actual_list: List[Dict[str, Any]] = actual - assert actual_list.pop(-1).get("eot") assert len(actual_list) == len(expected_const) actual_result_dict = dict() if actual_list is not None: @@ -51,8 +54,7 @@ def test_rootdir_specified(): actual = runner_with_cwd(args, new_cwd) expected_const = expected_execution_test_output.config_file_pytest_expected_execution_output assert actual - actual_list: List[Dict[str, Any]] = actual - assert actual_list.pop(-1).get("eot") + actual_list: List[Dict[str, Dict[str, Any]]] = actual assert len(actual_list) == len(expected_const) actual_result_dict = dict() if actual_list is not None: @@ -89,8 +91,8 @@ def test_syntax_error_execution(tmp_path): shutil.copyfile(file_path, p) actual = runner(["error_syntax_discover.py::test_function"]) assert actual - actual_list: List[Dict[str, Any]] = actual - assert actual_list.pop(-1).get("eot") + actual_list: List[Dict[str, Dict[str, Any]]] = actual + if actual_list is not None: for actual_item in actual_list: assert all(item in actual_item.keys() for item in ("status", "cwd", "error")) @@ -112,8 +114,7 @@ def test_bad_id_error_execution(): """ actual = runner(["not/a/real::test_id"]) assert actual - actual_list: List[Dict[str, Any]] = actual - assert actual_list.pop(-1).get("eot") + actual_list: List[Dict[str, Dict[str, Any]]] = actual if actual_list is not None: for actual_item in actual_list: assert all(item in actual_item.keys() for item in ("status", "cwd", "error")) @@ -255,8 +256,7 @@ def test_pytest_execution(test_ids, expected_const): args = test_ids actual = runner(args) assert actual - actual_list: List[Dict[str, Any]] = actual - assert actual_list.pop(-1).get("eot") + actual_list: List[Dict[str, Dict[str, Any]]] = actual assert len(actual_list) == len(expected_const) actual_result_dict = dict() if actual_list is not None: @@ -299,7 +299,6 @@ def test_symlink_run(): assert actual actual_list: List[Dict[str, Any]] = actual if actual_list is not None: - assert actual_list.pop(-1).get("eot") actual_item = actual_list.pop(0) try: # Check if all requirements diff --git a/python_files/tests/unittestadapter/test_discovery.py b/python_files/tests/unittestadapter/test_discovery.py index 462b9cf9b0fef..74eb5a5fb4f39 100644 --- a/python_files/tests/unittestadapter/test_discovery.py +++ b/python_files/tests/unittestadapter/test_discovery.py @@ -80,7 +80,7 @@ def test_parse_unittest_args(args: List[str], expected: List[str]) -> None: def test_simple_discovery() -> None: - """The discover_tests function should return a dictionary with a "success" status, a uuid, no errors, and a test tree + """The discover_tests function should return a dictionary with a "success" status, no errors, and a test tree if unittest discovery was performed successfully. """ start_dir = os.fsdecode(TEST_DATA_PATH) @@ -126,8 +126,7 @@ def test_simple_discovery() -> None: "id_": start_dir, } - uuid = "some-uuid" - actual = discover_tests(start_dir, pattern, None, uuid) + actual = discover_tests(start_dir, pattern, None) assert actual["status"] == "success" assert is_same_tree(actual.get("tests"), expected) @@ -135,7 +134,7 @@ def test_simple_discovery() -> None: def test_simple_discovery_with_top_dir_calculated() -> None: - """The discover_tests function should return a dictionary with a "success" status, a uuid, no errors, and a test tree + """The discover_tests function should return a dictionary with a "success" status, no errors, and a test tree if unittest discovery was performed successfully. """ start_dir = "." @@ -181,10 +180,9 @@ def test_simple_discovery_with_top_dir_calculated() -> None: "id_": os.fsdecode(pathlib.PurePath(TEST_DATA_PATH)), } - uuid = "some-uuid" # Define the CWD to be the root of the test data folder. os.chdir(os.fsdecode(pathlib.PurePath(TEST_DATA_PATH))) - actual = discover_tests(start_dir, pattern, None, uuid) + actual = discover_tests(start_dir, pattern, None) assert actual["status"] == "success" assert is_same_tree(actual.get("tests"), expected) @@ -192,14 +190,13 @@ def test_simple_discovery_with_top_dir_calculated() -> None: def test_empty_discovery() -> None: - """The discover_tests function should return a dictionary with a "success" status, a uuid, no errors, and no test tree + """The discover_tests function should return a dictionary with a "success" status, no errors, and no test tree if unittest discovery was performed successfully but no tests were found. """ start_dir = os.fsdecode(TEST_DATA_PATH) pattern = "discovery_empty*" - uuid = "some-uuid" - actual = discover_tests(start_dir, pattern, None, uuid) + actual = discover_tests(start_dir, pattern, None) assert actual["status"] == "success" assert "tests" in actual @@ -207,7 +204,7 @@ def test_empty_discovery() -> None: def test_error_discovery() -> None: - """The discover_tests function should return a dictionary with an "error" status, a uuid, the discovered tests, and a list of errors + """The discover_tests function should return a dictionary with an "error" status, the discovered tests, and a list of errors if unittest discovery failed at some point. """ # Discover tests in .data/discovery_error/. @@ -256,8 +253,7 @@ def test_error_discovery() -> None: "id_": start_dir, } - uuid = "some-uuid" - actual = discover_tests(start_dir, pattern, None, uuid) + actual = discover_tests(start_dir, pattern, None) assert actual["status"] == "error" assert is_same_tree(expected, actual.get("tests")) @@ -265,14 +261,13 @@ def test_error_discovery() -> None: def test_unit_skip() -> None: - """The discover_tests function should return a dictionary with a "success" status, a uuid, no errors, and test tree. + """The discover_tests function should return a dictionary with a "success" status, no errors, and test tree. if unittest discovery was performed and found a test in one file marked as skipped and another file marked as skipped. """ start_dir = os.fsdecode(TEST_DATA_PATH / "unittest_skip") pattern = "unittest_*" - uuid = "some-uuid" - actual = discover_tests(start_dir, pattern, None, uuid) + actual = discover_tests(start_dir, pattern, None) assert actual["status"] == "success" assert "tests" in actual @@ -295,8 +290,7 @@ def test_complex_tree() -> None: ) pattern = "test_*.py" top_level_dir = os.fsdecode(pathlib.PurePath(TEST_DATA_PATH, "utils_complex_tree")) - uuid = "some-uuid" - actual = discover_tests(start_dir, pattern, top_level_dir, uuid) + actual = discover_tests(start_dir, pattern, top_level_dir) assert actual["status"] == "success" assert "error" not in actual assert is_same_tree( diff --git a/python_files/tests/unittestadapter/test_execution.py b/python_files/tests/unittestadapter/test_execution.py index 519c13bc2e5d7..89f263d44d1a7 100644 --- a/python_files/tests/unittestadapter/test_execution.py +++ b/python_files/tests/unittestadapter/test_execution.py @@ -4,12 +4,15 @@ import os import pathlib import sys +from unittest.mock import patch +from typing import Dict import pytest -script_dir = pathlib.Path(__file__).parent.parent +script_dir = pathlib.Path(__file__).parent.parent.parent sys.path.insert(0, os.fspath(script_dir / "lib" / "python")) +from unittestadapter.pvsc_utils import ExecutionPayloadDict, TestResultTypeAlias # noqa: E402 from unittestadapter.execution import run_tests # noqa: E402 TEST_DATA_PATH = pathlib.Path(__file__).parent / ".data" @@ -22,7 +25,7 @@ def test_no_ids_run() -> None: start_dir: str = os.fspath(TEST_DATA_PATH) testids = [] pattern = "discovery_simple*" - actual = run_tests(start_dir, testids, pattern, None, "fake-uuid", 1, None) + actual = run_tests(start_dir, testids, pattern, None, 1, None) assert actual assert all(item in actual for item in ("cwd", "status")) assert actual["status"] == "success" @@ -33,49 +36,61 @@ def test_no_ids_run() -> None: raise AssertionError("actual['result'] is None") -def test_single_ids_run() -> None: +@pytest.fixture +def mock_send_run_data(): + with patch("unittestadapter.execution.send_run_data") as mock: + yield mock + + +def test_single_ids_run(mock_send_run_data): """This test runs on a single test_id, therefore it should return a dict with a single key-value pair for the result. This single test passes so the outcome should be 'success'. """ id = "discovery_simple.DiscoverySimple.test_one" - actual = run_tests( + os.environ["TEST_RUN_PIPE"] = "fake" + actual: ExecutionPayloadDict = run_tests( os.fspath(TEST_DATA_PATH), [id], "discovery_simple*", None, - "fake-uuid", 1, None, ) - assert actual - assert all(item in actual for item in ("cwd", "status")) - assert actual["status"] == "success" - assert actual["cwd"] == os.fspath(TEST_DATA_PATH) - assert actual["result"] is not None - result = actual["result"] - assert len(result) == 1 - assert id in result - id_result = result[id] - assert id_result is not None - assert "outcome" in id_result - assert id_result["outcome"] == "success" + # Access the arguments + args, _ = mock_send_run_data.call_args + test_actual = args[0] # first argument is the result -def test_subtest_run() -> None: + assert test_actual + actual_result: TestResultTypeAlias | None = actual["result"] + if actual_result is None: + raise AssertionError("actual_result is None") + else: + if not isinstance(actual_result, Dict): + raise AssertionError("actual_result is not a Dict") + assert len(actual_result) == 1 + assert id in actual_result + id_result = actual_result[id] + assert id_result is not None + assert "outcome" in id_result + assert id_result["outcome"] == "success" + + +def test_subtest_run(mock_send_run_data) -> None: """This test runs on a the test_subtest which has a single method, test_even, that uses unittest subtest. The actual result of run should return a dict payload with 6 entry for the 6 subtests. """ id = "test_subtest.NumbersTest.test_even" + os.environ["TEST_RUN_PIPE"] = "fake" actual = run_tests( os.fspath(TEST_DATA_PATH), [id], "test_subtest.py", None, - "fake-uuid", 1, None, ) @@ -161,7 +176,7 @@ def test_subtest_run() -> None: ), ], ) -def test_multiple_ids_run(test_ids, pattern, cwd, expected_outcome) -> None: +def test_multiple_ids_run(mock_send_run_data, test_ids, pattern, cwd, expected_outcome) -> None: """ The following are all successful tests of different formats. @@ -174,7 +189,8 @@ def test_multiple_ids_run(test_ids, pattern, cwd, expected_outcome) -> None: All tests should have the outcome of `success`. """ - actual = run_tests(cwd, test_ids, pattern, None, "fake-uuid", 1, None) + os.environ["TEST_RUN_PIPE"] = "fake" + actual = run_tests(cwd, test_ids, pattern, None, 1, None) assert actual assert all(item in actual for item in ("cwd", "status")) assert actual["status"] == "success" @@ -191,8 +207,10 @@ def test_multiple_ids_run(test_ids, pattern, cwd, expected_outcome) -> None: assert True -def test_failed_tests(): +def test_failed_tests(mock_send_run_data): """This test runs on a single file `test_fail` with two tests that fail.""" + + os.environ["TEST_RUN_PIPE"] = "fake" test_ids = [ "test_fail_simple.RunFailSimple.test_one_fail", "test_fail_simple.RunFailSimple.test_two_fail", @@ -202,7 +220,6 @@ def test_failed_tests(): test_ids, "test_fail_simple*", None, - "fake-uuid", 1, None, ) @@ -226,17 +243,17 @@ def test_failed_tests(): assert True -def test_unknown_id(): +def test_unknown_id(mock_send_run_data): """This test runs on a unknown test_id, therefore it should return an error as the outcome as it attempts to find the given test. """ + os.environ["TEST_RUN_PIPE"] = "fake" test_ids = ["unknown_id"] actual = run_tests( os.fspath(TEST_DATA_PATH), test_ids, "test_fail_simple*", None, - "fake-uuid", 1, None, ) @@ -260,12 +277,13 @@ def test_incorrect_path(): an error as the outcome as it attempts to find the given folder. """ test_ids = ["unknown_id"] + os.environ["TEST_RUN_PIPE"] = "fake" + actual = run_tests( os.fspath(TEST_DATA_PATH / "unknown_folder"), test_ids, "test_fail_simple*", None, - "fake-uuid", 1, None, ) diff --git a/python_files/unittestadapter/discovery.py b/python_files/unittestadapter/discovery.py index 298fe027d1d9a..53f803a6a114a 100644 --- a/python_files/unittestadapter/discovery.py +++ b/python_files/unittestadapter/discovery.py @@ -1,58 +1,37 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. -import json import os import pathlib import sys import traceback import unittest -from typing import List, Optional, Union +from typing import List, Optional script_dir = pathlib.Path(__file__).parent.parent sys.path.append(os.fspath(script_dir)) -sys.path.insert(0, os.fspath(script_dir / "lib" / "python")) - -from typing_extensions import Literal, NotRequired, TypedDict # noqa: E402 - -from testing_tools import socket_manager # noqa: E402 # If I use from utils then there will be an import error in test_discovery.py. from unittestadapter.pvsc_utils import ( # noqa: E402 - TestNode, + VSCodeUnittestError, build_test_tree, parse_unittest_args, + send_post_request, + DiscoveryPayloadDict, + EOTPayloadDict, ) -DEFAULT_PORT = 45454 - - -class PayloadDict(TypedDict): - cwd: str - status: Literal["success", "error"] - tests: Optional[TestNode] - error: NotRequired[List[str]] - - -class EOTPayloadDict(TypedDict): - """A dictionary that is used to send a end of transmission post request to the server.""" - - command_type: Union[Literal["discovery"], Literal["execution"]] - eot: bool - def discover_tests( start_dir: str, pattern: str, top_level_dir: Optional[str], - uuid: Optional[str], -) -> PayloadDict: +) -> DiscoveryPayloadDict: """Returns a dictionary containing details of the discovered tests. The returned dict has the following keys: - cwd: Absolute path to the test start directory; - - uuid: UUID sent by the caller of the Python script, that needs to be sent back as an integrity check; - status: Test discovery status, can be "success" or "error"; - tests: Discoverered tests if any, not present otherwise. Note that the status can be "error" but the payload can still contain tests; - error: Discovery error if any, not present otherwise. @@ -78,12 +57,7 @@ def discover_tests( } """ cwd = os.path.abspath(start_dir) - if "/" in start_dir: # is a subdir - parent_dir = os.path.dirname(start_dir) - sys.path.insert(0, parent_dir) - else: - sys.path.insert(0, cwd) - payload: PayloadDict = {"cwd": cwd, "status": "success", "tests": None} + payload: DiscoveryPayloadDict = {"cwd": cwd, "status": "success", "tests": None} tests = None error: List[str] = [] @@ -114,24 +88,6 @@ def discover_tests( return payload -def post_response(payload: Union[PayloadDict, EOTPayloadDict], port: int, uuid: str) -> None: - # Build the request data (it has to be a POST request or the Node side will not process it), and send it. - addr = ("localhost", port) - data = json.dumps(payload) - request = f"""Content-Length: {len(data)} -Content-Type: application/json -Request-uuid: {uuid} - -{data}""" - try: - with socket_manager.SocketManager(addr) as s: - if s.socket is not None: - s.socket.sendall(request.encode("utf-8")) - except Exception as e: - print(f"Error sending response: {e}") - print(f"Request data: {request}") - - if __name__ == "__main__": # Get unittest discovery arguments. argv = sys.argv[1:] @@ -146,23 +102,21 @@ def post_response(payload: Union[PayloadDict, EOTPayloadDict], port: int, uuid: _locals, ) = parse_unittest_args(argv[index + 1 :]) - testPort = int(os.environ.get("TEST_PORT", DEFAULT_PORT)) - testUuid = os.environ.get("TEST_UUID") - if testPort is DEFAULT_PORT: - print( - "Error[vscode-unittest]: TEST_PORT is not set.", - " TEST_UUID = ", - testUuid, + test_run_pipe = os.getenv("TEST_RUN_PIPE") + if not test_run_pipe: + error_msg = ( + "UNITTEST ERROR: TEST_RUN_PIPE is not set at the time of unittest trying to send data. " + "Please confirm this environment variable is not being changed or removed " + "as it is required for successful test discovery and execution." + f"TEST_RUN_PIPE = {test_run_pipe}\n" ) - if testUuid is not None: - # Perform test discovery. - payload = discover_tests(start_dir, pattern, top_level_dir, testUuid) - # Post this discovery payload. - post_response(payload, testPort, testUuid) - # Post EOT token. - eot_payload: EOTPayloadDict = {"command_type": "discovery", "eot": True} - post_response(eot_payload, testPort, testUuid) - else: - print("Error: no uuid provided or parsed.") - eot_payload: EOTPayloadDict = {"command_type": "discovery", "eot": True} - post_response(eot_payload, testPort, "") + print(error_msg, file=sys.stderr) + raise VSCodeUnittestError(error_msg) + + # Perform test discovery. + payload = discover_tests(start_dir, pattern, top_level_dir) + # Post this discovery payload. + send_post_request(payload, test_run_pipe) + # Post EOT token. + eot_payload: EOTPayloadDict = {"command_type": "discovery", "eot": True} + send_post_request(eot_payload, test_run_pipe) diff --git a/python_files/unittestadapter/execution.py b/python_files/unittestadapter/execution.py index 6b92529064773..1b0cb608a81ab 100644 --- a/python_files/unittestadapter/execution.py +++ b/python_files/unittestadapter/execution.py @@ -17,16 +17,19 @@ sys.path.append(os.fspath(script_dir)) sys.path.insert(0, os.fspath(script_dir / "lib" / "python")) -from typing_extensions import Literal, NotRequired, TypeAlias, TypedDict # noqa: E402 - from testing_tools import process_json_util, socket_manager # noqa: E402 -from unittestadapter.pvsc_utils import parse_unittest_args # noqa: E402 +from unittestadapter.pvsc_utils import ( # noqa: E402 + VSCodeUnittestError, + parse_unittest_args, + send_post_request, + ExecutionPayloadDict, + EOTPayloadDict, + TestExecutionStatus, +) ErrorType = Union[Tuple[Type[BaseException], BaseException, TracebackType], Tuple[None, None, None]] -testPort = 0 -testUuid = 0 +test_run_pipe = "" START_DIR = "" -DEFAULT_PORT = 45454 class TestOutcomeEnum(str, enum.Enum): @@ -127,32 +130,17 @@ def formatResult( "subtest": subtest.id() if subtest else None, } self.formatted[test_id] = result - if testPort == 0 or testUuid == 0: - print("Error sending response, port or uuid unknown to python server.") - send_run_data(result, testPort, testUuid) - - -class TestExecutionStatus(str, enum.Enum): - error = "error" - success = "success" - - -TestResultTypeAlias: TypeAlias = Dict[str, Dict[str, Union[str, None]]] - - -class PayloadDict(TypedDict): - cwd: str - status: TestExecutionStatus - result: Optional[TestResultTypeAlias] - not_found: NotRequired[List[str]] - error: NotRequired[str] - - -class EOTPayloadDict(TypedDict): - """A dictionary that is used to send a end of transmission post request to the server.""" - - command_type: Union[Literal["discovery"], Literal["execution"]] - eot: bool + test_run_pipe = os.getenv("TEST_RUN_PIPE") + if not test_run_pipe: + print( + "UNITTEST ERROR: TEST_RUN_PIPE is not set at the time of unittest trying to send data. " + f"TEST_RUN_PIPE = {test_run_pipe}\n", + file=sys.stderr, + ) + raise VSCodeUnittestError( + "UNITTEST ERROR: TEST_RUN_PIPE is not set at the time of unittest trying to send data. " + ) + send_run_data(result, test_run_pipe) # Args: start_path path to a directory or a file, list of ids that may be empty. @@ -165,20 +153,14 @@ def run_tests( test_ids: List[str], pattern: str, top_level_dir: Optional[str], - uuid: Optional[str], verbosity: int, failfast: Optional[bool], locals: Optional[bool] = None, -) -> PayloadDict: +) -> ExecutionPayloadDict: cwd = os.path.abspath(start_dir) - if "/" in start_dir: # is a subdir - parent_dir = os.path.dirname(start_dir) - sys.path.insert(0, parent_dir) - else: - sys.path.insert(0, cwd) status = TestExecutionStatus.error error = None - payload: PayloadDict = {"cwd": cwd, "status": status, "result": None} + payload: ExecutionPayloadDict = {"cwd": cwd, "status": status, "result": None} try: # If it's a file, split path and file name. @@ -234,7 +216,7 @@ def run_tests( atexit.register(lambda: __socket.close() if __socket else None) -def send_run_data(raw_data, port, uuid): +def send_run_data(raw_data, test_run_pipe): status = raw_data["outcome"] cwd = os.path.abspath(START_DIR) if raw_data["subtest"]: @@ -243,33 +225,8 @@ def send_run_data(raw_data, port, uuid): test_id = raw_data["test"] test_dict = {} test_dict[test_id] = raw_data - payload: PayloadDict = {"cwd": cwd, "status": status, "result": test_dict} - post_response(payload, port, uuid) - - -def post_response(payload: Union[PayloadDict, EOTPayloadDict], port: int, uuid: str) -> None: - # Build the request data (it has to be a POST request or the Node side will not process it), and send it. - addr = ("localhost", port) - global __socket - if __socket is None: - try: - __socket = socket_manager.SocketManager(addr) - __socket.connect() - except Exception as error: - print(f"Plugin error connection error[vscode-pytest]: {error}") - __socket = None - data = json.dumps(payload) - request = f"""Content-Length: {len(data)} -Content-Type: application/json -Request-uuid: {uuid} - -{data}""" - try: - if __socket is not None and __socket.socket is not None: - __socket.socket.sendall(request.encode("utf-8")) - except Exception as ex: - print(f"Error sending response: {ex}") - print(f"Request data: {request}") + payload: ExecutionPayloadDict = {"cwd": cwd, "status": status, "result": test_dict} + send_post_request(payload, test_run_pipe) if __name__ == "__main__": @@ -286,78 +243,72 @@ def post_response(payload: Union[PayloadDict, EOTPayloadDict], port: int, uuid: locals, ) = parse_unittest_args(argv[index + 1 :]) - run_test_ids_port = os.environ.get("RUN_TEST_IDS_PORT") - run_test_ids_port_int = int(run_test_ids_port) if run_test_ids_port is not None else 0 - if run_test_ids_port_int == 0: - print("Error[vscode-unittest]: RUN_TEST_IDS_PORT env var is not set.") - # get data from socket + run_test_ids_pipe = os.environ.get("RUN_TEST_IDS_PIPE") + test_run_pipe = os.getenv("TEST_RUN_PIPE") + + if not run_test_ids_pipe: + print("Error[vscode-unittest]: RUN_TEST_IDS_PIPE env var is not set.") + raise VSCodeUnittestError("Error[vscode-unittest]: RUN_TEST_IDS_PIPE env var is not set.") + if not test_run_pipe: + print("Error[vscode-unittest]: TEST_RUN_PIPE env var is not set.") + raise VSCodeUnittestError("Error[vscode-unittest]: TEST_RUN_PIPE env var is not set.") test_ids_from_buffer = [] + raw_json = None try: - client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - client_socket.connect(("localhost", run_test_ids_port_int)) - buffer = b"" - - while True: - # Receive the data from the client - data = client_socket.recv(1024 * 1024) - if not data: - break - - # Append the received data to the buffer - buffer += data - - try: - # Try to parse the buffer as JSON - test_ids_from_buffer = process_json_util.process_rpc_json(buffer.decode("utf-8")) - # Clear the buffer as complete JSON object is received - buffer = b"" - break - except json.JSONDecodeError: - # JSON decoding error, the complete JSON object is not yet received - continue + with socket_manager.PipeManager(run_test_ids_pipe) as sock: + buffer: str = "" + while True: + # Receive the data from the client + data: str = sock.read() + if not data: + break + + # Append the received data to the buffer + buffer += data + + try: + # Try to parse the buffer as JSON + raw_json = process_json_util.process_rpc_json(buffer) + # Clear the buffer as complete JSON object is received + buffer = "" + print("Received JSON data in run") + break + except json.JSONDecodeError: + # JSON decoding error, the complete JSON object is not yet received + continue except socket.error as e: - print(f"Error: Could not connect to runTestIdsPort: {e}") - print("Error: Could not connect to runTestIdsPort") - - testPort = int(os.environ.get("TEST_PORT", DEFAULT_PORT)) - testUuid = os.environ.get("TEST_UUID") - if testPort is DEFAULT_PORT: - print( - "Error[vscode-unittest]: TEST_PORT is not set.", - " TEST_UUID = ", - testUuid, - ) - if testUuid is None: - print( - "Error[vscode-unittest]: TEST_UUID is not set.", - " TEST_PORT = ", - testPort, - ) - testUuid = "unknown" - if test_ids_from_buffer: - # Perform test execution. - payload = run_tests( - start_dir, - test_ids_from_buffer, - pattern, - top_level_dir, - testUuid, - verbosity, - failfast, - locals, - ) - else: - cwd = os.path.abspath(start_dir) - status = TestExecutionStatus.error - payload: PayloadDict = { - "cwd": cwd, - "status": status, - "error": "No test ids received from buffer", - "result": None, - } + msg = f"Error: Could not connect to RUN_TEST_IDS_PIPE: {e}" + print(msg) + raise VSCodeUnittestError(msg) + + try: + if raw_json and "params" in raw_json: + test_ids_from_buffer = raw_json["params"] + if test_ids_from_buffer: + # Perform test execution. + payload = run_tests( + start_dir, + test_ids_from_buffer, + pattern, + top_level_dir, + verbosity, + failfast, + locals, + ) + else: + # No test ids received from buffer + cwd = os.path.abspath(start_dir) + status = TestExecutionStatus.error + payload: ExecutionPayloadDict = { + "cwd": cwd, + "status": status, + "error": "No test ids received from buffer", + "result": None, + } + send_post_request(payload, test_run_pipe) + except json.JSONDecodeError: + msg = "Error: Could not parse test ids from stdin" + print(msg) + raise VSCodeUnittestError(msg) eot_payload: EOTPayloadDict = {"command_type": "execution", "eot": True} - if testUuid is None: - print("Error sending response, uuid unknown to python server.") - post_response(eot_payload, testPort, "unknown") - else: - post_response(eot_payload, testPort, testUuid) + send_post_request(eot_payload, test_run_pipe) diff --git a/python_files/unittestadapter/pvsc_utils.py b/python_files/unittestadapter/pvsc_utils.py index de4f23957b8ba..a039af43c47b6 100644 --- a/python_files/unittestadapter/pvsc_utils.py +++ b/python_files/unittestadapter/pvsc_utils.py @@ -2,19 +2,23 @@ # Licensed under the MIT License. import argparse +import atexit import enum import inspect +import json import os import pathlib import sys import unittest -from typing import List, Tuple, Union +from typing import List, Optional, Tuple, Union, Dict + script_dir = pathlib.Path(__file__).parent.parent sys.path.append(os.fspath(script_dir)) sys.path.append(os.fspath(script_dir / "lib" / "python")) -from typing_extensions import TypedDict # noqa: E402 +from testing_tools import socket_manager # noqa: E402 +from typing_extensions import Literal, NotRequired, TypeAlias, TypedDict # noqa: E402 # Types @@ -43,6 +47,43 @@ class TestNode(TestData): children: "List[TestNode | TestItem]" +class TestExecutionStatus(str, enum.Enum): + error = "error" + success = "success" + + +TestResultTypeAlias: TypeAlias = Dict[str, Dict[str, Union[str, None]]] + + +class VSCodeUnittestError(Exception): + """A custom exception class for unittest errors.""" + + def __init__(self, message): + super().__init__(message) + + +class DiscoveryPayloadDict(TypedDict): + cwd: str + status: Literal["success", "error"] + tests: Optional[TestNode] + error: NotRequired[List[str]] + + +class ExecutionPayloadDict(TypedDict): + cwd: str + status: TestExecutionStatus + result: Optional[TestResultTypeAlias] + not_found: NotRequired[List[str]] + error: NotRequired[str] + + +class EOTPayloadDict(TypedDict): + """A dictionary that is used to send a end of transmission post request to the server.""" + + command_type: Union[Literal["discovery"], Literal["execution"]] + eot: bool + + # Helper functions for data retrieval. @@ -254,3 +295,60 @@ def parse_unittest_args( parsed_args.failfast, parsed_args.locals, ) + + +__writer = None +atexit.register(lambda: __writer.close() if __writer else None) + + +def send_post_request( + payload: Union[ExecutionPayloadDict, DiscoveryPayloadDict, EOTPayloadDict], + test_run_pipe: str, +): + """ + Sends a post request to the server. + + Keyword arguments: + payload -- the payload data to be sent. + test_run_pipe -- the name of the pipe to send the data to. + """ + if not test_run_pipe: + error_msg = ( + "UNITTEST ERROR: TEST_RUN_PIPE is not set at the time of unittest trying to send data. " + "Please confirm this environment variable is not being changed or removed " + "as it is required for successful test discovery and execution." + f"TEST_RUN_PIPE = {test_run_pipe}\n" + ) + print(error_msg, file=sys.stderr) + raise VSCodeUnittestError(error_msg) + + global __writer + + if __writer is None: + try: + __writer = socket_manager.PipeManager(test_run_pipe) + __writer.connect() + except Exception as error: + error_msg = f"Error attempting to connect to extension named pipe {test_run_pipe}[vscode-unittest]: {error}" + __writer = None + raise VSCodeUnittestError(error_msg) + + rpc = { + "jsonrpc": "2.0", + "params": payload, + } + data = json.dumps(rpc) + + try: + if __writer: + __writer.write(data) + else: + print( + f"Connection error[vscode-unittest], writer is None \n[vscode-unittest] data: \n{data} \n", + file=sys.stderr, + ) + except Exception as error: + print( + f"Exception thrown while attempting to send data[vscode-unittest]: {error} \n[vscode-unittest] data: \n{data}\n", + file=sys.stderr, + ) diff --git a/python_files/vscode_pytest/__init__.py b/python_files/vscode_pytest/__init__.py index 54bed66deae02..30b2e158bbee8 100644 --- a/python_files/vscode_pytest/__init__.py +++ b/python_files/vscode_pytest/__init__.py @@ -8,14 +8,15 @@ import sys import traceback -import pytest -from typing import Any, Dict, List, Optional, Union, Literal, TypedDict # noqa: E402 +import pytest +script_dir = pathlib.Path(__file__).parent.parent +sys.path.append(os.fspath(script_dir)) +sys.path.append(os.fspath(script_dir / "lib" / "python")) from testing_tools import socket_manager # noqa: E402 - -DEFAULT_PORT = 45454 +from typing import Any, Dict, List, Optional, Union, TypedDict, Literal # noqa: E402 class TestData(TypedDict): @@ -51,22 +52,20 @@ def __init__(self, message): IS_DISCOVERY = False map_id_to_path = dict() collected_tests_so_far = list() -TEST_PORT = os.getenv("TEST_PORT") -TEST_UUID = os.getenv("TEST_UUID") +TEST_RUN_PIPE = os.getenv("TEST_RUN_PIPE") SYMLINK_PATH = None def pytest_load_initial_conftests(early_config, parser, args): - global TEST_PORT - global TEST_UUID - TEST_PORT = os.getenv("TEST_PORT") - TEST_UUID = os.getenv("TEST_UUID") - if TEST_UUID is None or TEST_PORT is None: - error_string = ( - "PYTEST ERROR: TEST_UUID and/or TEST_PORT are not set at the time of pytest starting. Please confirm these environment variables are not being" - " changed or removed as they are required for successful test discovery and execution." - f" \nTEST_UUID = {TEST_UUID}\nTEST_PORT = {TEST_PORT}\n" - ) + global TEST_RUN_PIPE + TEST_RUN_PIPE = os.getenv("TEST_RUN_PIPE") + error_string = ( + "PYTEST ERROR: TEST_RUN_PIPE is not set at the time of pytest starting. " + "Please confirm this environment variable is not being changed or removed " + "as it is required for successful test discovery and execution." + f"TEST_RUN_PIPE = {TEST_RUN_PIPE}\n" + ) + if not TEST_RUN_PIPE: print(error_string, file=sys.stderr) if "--collect-only" in args: global IS_DISCOVERY @@ -694,8 +693,8 @@ def get_node_path(node: Any) -> pathlib.Path: return node_path -__socket = None -atexit.register(lambda: __socket.close() if __socket else None) +__writer = None +atexit.register(lambda: __writer.close() if __writer else None) def execution_post( @@ -757,27 +756,24 @@ def send_post_request( payload -- the payload data to be sent. cls_encoder -- a custom encoder if needed. """ - global TEST_PORT - global TEST_UUID - if TEST_UUID is None or TEST_PORT is None: - # if TEST_UUID or TEST_PORT is None, print an error and fail as these are both critical errors + if not TEST_RUN_PIPE: error_msg = ( - "PYTEST ERROR: TEST_UUID and/or TEST_PORT are not set at the time of pytest starting. Please confirm these environment variables are not being" - " changed or removed as they are required for successful pytest discovery and execution." - f" \nTEST_UUID = {TEST_UUID}\nTEST_PORT = {TEST_PORT}\n" + "PYTEST ERROR: TEST_RUN_PIPE is not set at the time of pytest starting. " + "Please confirm this environment variable is not being changed or removed " + "as it is required for successful test discovery and execution." + f"TEST_RUN_PIPE = {TEST_RUN_PIPE}\n" ) print(error_msg, file=sys.stderr) raise VSCodePytestError(error_msg) - addr = ("localhost", int(TEST_PORT)) - global __socket + global __writer - if __socket is None: + if __writer is None: try: - __socket = socket_manager.SocketManager(addr) - __socket.connect() + __writer = socket_manager.PipeManager(TEST_RUN_PIPE) + __writer.connect() except Exception as error: - error_msg = f"Error attempting to connect to extension communication socket[vscode-pytest]: {error}" + error_msg = f"Error attempting to connect to extension named pipe {TEST_RUN_PIPE}[vscode-pytest]: {error}" print(error_msg, file=sys.stderr) print( "If you are on a Windows machine, this error may be occurring if any of your tests clear environment variables" @@ -785,26 +781,25 @@ def send_post_request( "for the correct way to clear environment variables during testing.\n", file=sys.stderr, ) - __socket = None + __writer = None raise VSCodePytestError(error_msg) - data = json.dumps(payload, cls=cls_encoder) - request = f"""Content-Length: {len(data)} -Content-Type: application/json -Request-uuid: {TEST_UUID} - -{data}""" + rpc = { + "jsonrpc": "2.0", + "params": payload, + } + data = json.dumps(rpc, cls=cls_encoder) try: - if __socket is not None and __socket.socket is not None: - __socket.socket.sendall(request.encode("utf-8")) + if __writer: + __writer.write(data) else: print( - f"Plugin error connection error[vscode-pytest], socket is None \n[vscode-pytest] data: \n{request} \n", + f"Plugin error connection error[vscode-pytest], writer is None \n[vscode-pytest] data: \n{data} \n", file=sys.stderr, ) except Exception as error: print( - f"Plugin error, exception thrown while attempting to send data[vscode-pytest]: {error} \n[vscode-pytest] data: \n{request}\n", + f"Plugin error, exception thrown while attempting to send data[vscode-pytest]: {error} \n[vscode-pytest] data: \n{data}\n", file=sys.stderr, ) diff --git a/python_files/vscode_pytest/run_pytest_script.py b/python_files/vscode_pytest/run_pytest_script.py index db5dcb5da2e30..46f6f0b9d9f59 100644 --- a/python_files/vscode_pytest/run_pytest_script.py +++ b/python_files/vscode_pytest/run_pytest_script.py @@ -12,6 +12,8 @@ sys.path.append(os.fspath(script_dir)) sys.path.append(os.fspath(script_dir / "lib" / "python")) from testing_tools import process_json_util # noqa: E402 +from testing_tools import socket_manager # noqa: E402 + # This script handles running pytest via pytest.main(). It is called via run in the # pytest execution adapter and gets the test_ids to run via stdin and the rest of the @@ -24,42 +26,40 @@ sys.path.insert(0, os.getcwd()) # Get the rest of the args to run with pytest. args = sys.argv[1:] - run_test_ids_port = os.environ.get("RUN_TEST_IDS_PORT") - run_test_ids_port_int = int(run_test_ids_port) if run_test_ids_port is not None else 0 - if run_test_ids_port_int == 0: - print("Error[vscode-pytest]: RUN_TEST_IDS_PORT env var is not set.") - test_ids_from_buffer = [] + run_test_ids_pipe = os.environ.get("RUN_TEST_IDS_PIPE") + if not run_test_ids_pipe: + print("Error[vscode-pytest]: RUN_TEST_IDS_PIPE env var is not set.") + raw_json = {} try: - client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - client_socket.connect(("localhost", run_test_ids_port_int)) - print(f"CLIENT: Server listening on port {run_test_ids_port_int}...") - buffer = b"" - - while True: - # Receive the data from the client - data = client_socket.recv(1024 * 1024) - if not data: - break + socket_name = os.environ.get("RUN_TEST_IDS_PIPE") + with socket_manager.PipeManager(socket_name) as sock: + buffer = "" + while True: + # Receive the data from the client as a string + data = sock.read(3000) + if not data: + break - # Append the received data to the buffer - buffer += data + # Append the received data to the buffer + buffer += data - try: - # Try to parse the buffer as JSON - test_ids_from_buffer = process_json_util.process_rpc_json(buffer.decode("utf-8")) - # Clear the buffer as complete JSON object is received - buffer = b"" - print("Received JSON data in run script") - break - except json.JSONDecodeError: - # JSON decoding error, the complete JSON object is not yet received - continue - except UnicodeDecodeError: - continue + try: + # Try to parse the buffer as JSON + raw_json = process_json_util.process_rpc_json(buffer) + # Clear the buffer as complete JSON object is received + buffer = "" + print("Received JSON data in run script") + break + except json.JSONDecodeError: + # JSON decoding error, the complete JSON object is not yet received + continue + except UnicodeDecodeError: + continue except socket.error as e: print(f"Error: Could not connect to runTestIdsPort: {e}") print("Error: Could not connect to runTestIdsPort") try: + test_ids_from_buffer = raw_json["params"] if test_ids_from_buffer: arg_array = ["-p", "vscode_pytest"] + args + test_ids_from_buffer print("Running pytest with args: " + str(arg_array)) @@ -72,4 +72,7 @@ arg_array = ["-p", "vscode_pytest"] + args pytest.main(arg_array) except json.JSONDecodeError: - print("Error: Could not parse test ids from stdin") + print( + "Error: Could not parse test ids from stdin. Raw json received from socket: \n", + raw_json, + ) diff --git a/src/client/common/pipes/namedPipes.ts b/src/client/common/pipes/namedPipes.ts new file mode 100644 index 0000000000000..c6010d491822b --- /dev/null +++ b/src/client/common/pipes/namedPipes.ts @@ -0,0 +1,91 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +import * as crypto from 'crypto'; +import * as net from 'net'; +import * as os from 'os'; +import * as path from 'path'; +import * as rpc from 'vscode-jsonrpc/node'; +import { traceVerbose } from '../../logging'; + +export interface ConnectedServerObj { + serverOnClosePromise(): Promise; +} + +export function createNamedPipeServer( + pipeName: string, + onConnectionCallback: (value: [rpc.MessageReader, rpc.MessageWriter]) => void, +): Promise { + traceVerbose(`Creating named pipe server on ${pipeName}`); + + let connectionCount = 0; + return new Promise((resolve, reject) => { + // create a server, resolves and returns server on listen + const server = net.createServer((socket) => { + // this lambda function is called whenever a client connects to the server + connectionCount += 1; + traceVerbose('new client is connected to the socket, connectionCount: ', connectionCount, pipeName); + socket.on('close', () => { + // close event is emitted by client to the server + connectionCount -= 1; + traceVerbose('client emitted close event, connectionCount: ', connectionCount); + if (connectionCount <= 0) { + // if all clients are closed, close the server + traceVerbose('connection count is <= 0, closing the server: ', pipeName); + server.close(); + } + }); + + // upon connection create a reader and writer and pass it to the callback + onConnectionCallback([ + new rpc.SocketMessageReader(socket, 'utf-8'), + new rpc.SocketMessageWriter(socket, 'utf-8'), + ]); + }); + const closedServerPromise = new Promise((resolveOnServerClose) => { + // get executed on connection close and resolves + // implementation of the promise is the arrow function + server.on('close', resolveOnServerClose); + }); + server.on('error', reject); + + server.listen(pipeName, () => { + // this function is called when the server is listening + server.removeListener('error', reject); + const connectedServer = { + // when onClosed event is called, so is closed function + // goes backwards up the chain, when resolve2 is called, so is onClosed that means server.onClosed() on the other end can work + // event C + serverOnClosePromise: () => closedServerPromise, + }; + resolve(connectedServer); + }); + }); +} + +const { XDG_RUNTIME_DIR } = process.env; +export function generateRandomPipeName(prefix: string): string { + // length of 10 picked because of the name length restriction for sockets + const randomSuffix = crypto.randomBytes(10).toString('hex'); + if (prefix.length === 0) { + prefix = 'python-ext-rpc'; + } + + if (process.platform === 'win32') { + return `\\\\.\\pipe\\${prefix}-${randomSuffix}-sock`; + } + + let result; + if (XDG_RUNTIME_DIR) { + result = path.join(XDG_RUNTIME_DIR, `${prefix}-${randomSuffix}.sock`); + } else { + result = path.join(os.tmpdir(), `${prefix}-${randomSuffix}.sock`); + } + + return result; +} + +export function namedPipeClient(name: string): [rpc.MessageReader, rpc.MessageWriter] { + const socket = net.connect(name); + return [new rpc.SocketMessageReader(socket, 'utf-8'), new rpc.SocketMessageWriter(socket, 'utf-8')]; +} diff --git a/src/client/testing/common/debugLauncher.ts b/src/client/testing/common/debugLauncher.ts index f05fa6bc93730..f9c7747d883ab 100644 --- a/src/client/testing/common/debugLauncher.ts +++ b/src/client/testing/common/debugLauncher.ts @@ -33,8 +33,11 @@ export class DebugLauncher implements ITestDebugLauncher { } public async launchDebugger(options: LaunchOptions, callback?: () => void): Promise { + const deferred = createDeferred(); if (options.token && options.token.isCancellationRequested) { return undefined; + deferred.resolve(); + callback?.(); } const workspaceFolder = DebugLauncher.resolveWorkspaceFolder(options.cwd); @@ -45,7 +48,6 @@ export class DebugLauncher implements ITestDebugLauncher { ); const debugManager = this.serviceContainer.get(IDebugService); - const deferred = createDeferred(); debugManager.onDidTerminateDebugSession(() => { deferred.resolve(); callback?.(); @@ -206,12 +208,11 @@ export class DebugLauncher implements ITestDebugLauncher { launchArgs.request = 'launch'; if (pythonTestAdapterRewriteExperiment) { - if (options.pytestPort && options.pytestUUID && options.runTestIdsPort) { + if (options.pytestPort && options.runTestIdsPort) { launchArgs.env = { ...launchArgs.env, - TEST_PORT: options.pytestPort, - TEST_UUID: options.pytestUUID, - RUN_TEST_IDS_PORT: options.runTestIdsPort, + TEST_RUN_PIPE: options.pytestPort, + RUN_TEST_IDS_PIPE: options.runTestIdsPort, }; } else { throw Error( diff --git a/src/client/testing/testController/common/resultResolver.ts b/src/client/testing/testController/common/resultResolver.ts index 15efc7aa4bb8e..16ee79371b37e 100644 --- a/src/client/testing/testController/common/resultResolver.ts +++ b/src/client/testing/testController/common/resultResolver.ts @@ -14,7 +14,7 @@ import { import * as util from 'util'; import { DiscoveredTestPayload, EOTTestPayload, ExecutionTestPayload, ITestResultResolver } from './types'; import { TestProvider } from '../../types'; -import { traceError, traceLog } from '../../../logging'; +import { traceError, traceVerbose } from '../../../logging'; import { Testing } from '../../../common/utils/localize'; import { clearAllChildren, createErrorTestItem, getTestCaseNodes } from './testItemUtilities'; import { sendTelemetryEvent } from '../../../telemetry'; @@ -49,24 +49,17 @@ export class PythonResultResolver implements ITestResultResolver { payload: DiscoveredTestPayload | EOTTestPayload, deferredTillEOT: Deferred, token?: CancellationToken, - ): Promise { - if (!payload) { + ): void { + if ('eot' in payload && payload.eot === true) { + deferredTillEOT.resolve(); + } else if (!payload) { // No test data is available - return Promise.resolve(); - } - if ('eot' in payload) { - // the payload is an EOT payload, so resolve the deferred promise. - traceLog('ResultResolver EOT received for discovery.'); - const eotPayload = payload as EOTTestPayload; - if (eotPayload.eot === true) { - deferredTillEOT.resolve(); - return Promise.resolve(); - } + } else { + this._resolveDiscovery(payload as DiscoveredTestPayload, token); } - return this._resolveDiscovery(payload as DiscoveredTestPayload, token); } - public _resolveDiscovery(payload: DiscoveredTestPayload, token?: CancellationToken): Promise { + public _resolveDiscovery(payload: DiscoveredTestPayload, token?: CancellationToken): void { const workspacePath = this.workspaceUri.fsPath; const rawTestData = payload as DiscoveredTestPayload; // Check if there were any errors in the discovery process. @@ -109,27 +102,23 @@ export class PythonResultResolver implements ITestResultResolver { tool: this.testProvider, failed: false, }); - return Promise.resolve(); } public resolveExecution( payload: ExecutionTestPayload | EOTTestPayload, runInstance: TestRun, deferredTillEOT: Deferred, - ): Promise { - if (payload !== undefined && 'eot' in payload) { - // the payload is an EOT payload, so resolve the deferred promise. - traceLog('ResultResolver EOT received for execution.'); - const eotPayload = payload as EOTTestPayload; - if (eotPayload.eot === true) { - deferredTillEOT.resolve(); - return Promise.resolve(); - } + ): void { + if ('eot' in payload && payload.eot === true) { + // eot sent once per connection + traceVerbose('EOT received, resolving deferredTillServerClose'); + deferredTillEOT.resolve(); + } else { + this._resolveExecution(payload as ExecutionTestPayload, runInstance); } - return this._resolveExecution(payload as ExecutionTestPayload, runInstance); } - public _resolveExecution(payload: ExecutionTestPayload, runInstance: TestRun): Promise { + public _resolveExecution(payload: ExecutionTestPayload, runInstance: TestRun): void { const rawTestExecData = payload as ExecutionTestPayload; if (rawTestExecData !== undefined && rawTestExecData.result !== undefined) { // Map which holds the subtest information for each test item. @@ -279,6 +268,5 @@ export class PythonResultResolver implements ITestResultResolver { } } } - return Promise.resolve(); } } diff --git a/src/client/testing/testController/common/server.ts b/src/client/testing/testController/common/server.ts deleted file mode 100644 index 5969a5f757085..0000000000000 --- a/src/client/testing/testController/common/server.ts +++ /dev/null @@ -1,333 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -import * as net from 'net'; -import * as crypto from 'crypto'; -import { Disposable, Event, EventEmitter, TestRun } from 'vscode'; -import * as path from 'path'; -import { ChildProcess } from 'child_process'; -import { - ExecutionFactoryCreateWithEnvironmentOptions, - ExecutionResult, - IPythonExecutionFactory, - SpawnOptions, -} from '../../../common/process/types'; -import { traceError, traceInfo, traceLog, traceVerbose } from '../../../logging'; -import { DataReceivedEvent, ITestServer, TestCommandOptions } from './types'; -import { ITestDebugLauncher, LaunchOptions } from '../../common/types'; -import { UNITTEST_PROVIDER } from '../../common/constants'; -import { - MESSAGE_ON_TESTING_OUTPUT_MOVE, - createDiscoveryErrorPayload, - createEOTPayload, - createExecutionErrorPayload, - extractJsonPayload, - fixLogLinesNoTrailing, -} from './utils'; -import { createDeferred } from '../../../common/utils/async'; -import { EnvironmentVariables } from '../../../api/types'; - -export class PythonTestServer implements ITestServer, Disposable { - private _onDataReceived: EventEmitter = new EventEmitter(); - - private uuids: Array = []; - - private server: net.Server; - - private ready: Promise; - - private _onRunDataReceived: EventEmitter = new EventEmitter(); - - private _onDiscoveryDataReceived: EventEmitter = new EventEmitter(); - - constructor(private executionFactory: IPythonExecutionFactory, private debugLauncher: ITestDebugLauncher) { - this.server = net.createServer((socket: net.Socket) => { - let buffer: Buffer = Buffer.alloc(0); // Buffer to accumulate received data - socket.on('data', (data: Buffer) => { - traceVerbose('data received from python server: ', data.toString()); - buffer = Buffer.concat([buffer, data]); // get the new data and add it to the buffer - while (buffer.length > 0) { - try { - // try to resolve data, returned unresolved data - const remainingBuffer = this._resolveData(buffer); - if (remainingBuffer.length === buffer.length) { - // if the remaining buffer is exactly the same as the buffer before processing, - // then there is no more data to process so loop should be exited. - break; - } - buffer = remainingBuffer; - } catch (ex) { - traceError(`Error reading data from buffer: ${ex} observed.`); - buffer = Buffer.alloc(0); - this._onDataReceived.fire({ uuid: '', data: '' }); - } - } - }); - }); - this.ready = new Promise((resolve, _reject) => { - this.server.listen(undefined, 'localhost', () => { - resolve(); - }); - }); - this.server.on('error', (ex) => { - traceLog(`Error starting test server: ${ex}`); - }); - this.server.on('close', () => { - traceLog('Test server closed.'); - }); - this.server.on('listening', () => { - traceLog('Test server listening.'); - }); - this.server.on('connection', () => { - traceLog('Test server connected to a client.'); - }); - } - - savedBuffer = ''; - - public _resolveData(buffer: Buffer): Buffer { - try { - const extractedJsonPayload = extractJsonPayload(buffer.toString(), this.uuids); - // what payload is so small it doesn't include the whole UUID think got this - if (extractedJsonPayload.uuid !== undefined && extractedJsonPayload.cleanedJsonData !== undefined) { - // if a full json was found in the buffer, fire the data received event then keep cycling with the remaining raw data. - traceVerbose(`Firing data received event, ${extractedJsonPayload.cleanedJsonData}`); - this._fireDataReceived(extractedJsonPayload.uuid, extractedJsonPayload.cleanedJsonData); - } else { - traceVerbose( - `extract json payload incomplete, uuid= ${extractedJsonPayload.uuid} and cleanedJsonData= ${extractedJsonPayload.cleanedJsonData}`, - ); - } - buffer = Buffer.from(extractedJsonPayload.remainingRawData); - if (buffer.length === 0) { - // if the buffer is empty, then there is no more data to process so buffer should be cleared. - buffer = Buffer.alloc(0); - } - } catch (ex) { - traceError(`Error attempting to resolve data: ${ex}`); - this._onDataReceived.fire({ uuid: '', data: '' }); - } - return buffer; - } - - private _fireDataReceived(uuid: string, extractedJSON: string): void { - if (extractedJSON.includes(`"tests":`) || extractedJSON.includes(`"command_type": "discovery"`)) { - this._onDiscoveryDataReceived.fire({ - uuid, - data: extractedJSON, - }); - // if the rawData includes result then this is a run request - } else if (extractedJSON.includes(`"result":`) || extractedJSON.includes(`"command_type": "execution"`)) { - this._onRunDataReceived.fire({ - uuid, - data: extractedJSON, - }); - } else { - traceError(`Error processing test server request: request is not recognized as discovery or run.`); - this._onDataReceived.fire({ uuid: '', data: '' }); - } - } - - public serverReady(): Promise { - return this.ready; - } - - public getPort(): number { - return (this.server.address() as net.AddressInfo).port; - } - - public createUUID(): string { - const uuid = crypto.randomUUID(); - this.uuids.push(uuid); - return uuid; - } - - public deleteUUID(uuid: string): void { - this.uuids = this.uuids.filter((u) => u !== uuid); - } - - public get onRunDataReceived(): Event { - return this._onRunDataReceived.event; - } - - public get onDiscoveryDataReceived(): Event { - return this._onDiscoveryDataReceived.event; - } - - public triggerRunDataReceivedEvent(payload: DataReceivedEvent): void { - this._onRunDataReceived.fire(payload); - } - - public triggerDiscoveryDataReceivedEvent(payload: DataReceivedEvent): void { - this._onDiscoveryDataReceived.fire(payload); - } - - public dispose(): void { - this.server.close(); - this._onDataReceived.dispose(); - } - - public get onDataReceived(): Event { - return this._onDataReceived.event; - } - - async sendCommand( - options: TestCommandOptions, - env: EnvironmentVariables, - runTestIdPort?: string, - runInstance?: TestRun, - testIds?: string[], - callback?: () => void, - ): Promise { - const { uuid } = options; - const isDiscovery = (testIds === undefined || testIds.length === 0) && runTestIdPort === undefined; - const mutableEnv = { ...env }; - // get python path from mutable env, it contains process.env as well - const pythonPathParts: string[] = mutableEnv.PYTHONPATH?.split(path.delimiter) ?? []; - const pythonPathCommand = [options.cwd, ...pythonPathParts].join(path.delimiter); - mutableEnv.PYTHONPATH = pythonPathCommand; - mutableEnv.TEST_UUID = uuid.toString(); - mutableEnv.TEST_PORT = this.getPort().toString(); - mutableEnv.RUN_TEST_IDS_PORT = runTestIdPort; - - const spawnOptions: SpawnOptions = { - token: options.token, - cwd: options.cwd, - throwOnStdErr: true, - outputChannel: options.outChannel, - env: mutableEnv, - }; - const isRun = runTestIdPort !== undefined; - // Create the Python environment in which to execute the command. - const creationOptions: ExecutionFactoryCreateWithEnvironmentOptions = { - allowEnvironmentFetchExceptions: false, - resource: options.workspaceFolder, - }; - const execService = await this.executionFactory.createActivatedEnvironment(creationOptions); - const args = [options.command.script].concat(options.command.args); - - if (options.outChannel) { - options.outChannel.appendLine(`python ${args.join(' ')}`); - } - - try { - if (options.debugBool) { - const launchOptions: LaunchOptions = { - cwd: options.cwd, - args, - token: options.token, - testProvider: UNITTEST_PROVIDER, - runTestIdsPort: runTestIdPort, - pytestUUID: uuid.toString(), - pytestPort: this.getPort().toString(), - }; - traceInfo(`Running DEBUG unittest for workspace ${options.cwd} with arguments: ${args}\r\n`); - - await this.debugLauncher!.launchDebugger(launchOptions, () => { - callback?.(); - }); - } else { - if (isRun) { - // This means it is running the test - traceInfo(`Running unittests for workspace ${options.cwd} with arguments: ${args}\r\n`); - } else { - // This means it is running discovery - traceLog(`Discovering unittest tests for workspace ${options.cwd} with arguments: ${args}\r\n`); - } - const deferredTillExecClose = createDeferred>(); - - let resultProc: ChildProcess | undefined; - - runInstance?.token.onCancellationRequested(() => { - traceInfo(`Test run cancelled, killing unittest subprocess for workspace ${options.cwd}.`); - // if the resultProc exists just call kill on it which will handle resolving the ExecClose deferred, otherwise resolve the deferred here. - if (resultProc) { - resultProc?.kill(); - } else { - deferredTillExecClose?.resolve(); - } - }); - - const result = execService?.execObservable(args, spawnOptions); - resultProc = result?.proc; - - // Displays output to user and ensure the subprocess doesn't run into buffer overflow. - // TODO: after a release, remove discovery output from the "Python Test Log" channel and send it to the "Python" channel instead. - // TODO: after a release, remove run output from the "Python Test Log" channel and send it to the "Test Result" channel instead. - if (isDiscovery) { - result?.proc?.stdout?.on('data', (data) => { - const out = fixLogLinesNoTrailing(data.toString()); - spawnOptions?.outputChannel?.append(`${out}`); - traceInfo(out); - }); - result?.proc?.stderr?.on('data', (data) => { - const out = fixLogLinesNoTrailing(data.toString()); - spawnOptions?.outputChannel?.append(`${out}`); - traceError(out); - }); - } else { - result?.proc?.stdout?.on('data', (data) => { - const out = fixLogLinesNoTrailing(data.toString()); - runInstance?.appendOutput(`${out}`); - spawnOptions?.outputChannel?.append(out); - }); - result?.proc?.stderr?.on('data', (data) => { - const out = fixLogLinesNoTrailing(data.toString()); - runInstance?.appendOutput(`${out}`); - spawnOptions?.outputChannel?.append(out); - }); - } - - result?.proc?.on('exit', (code, signal) => { - // if the child has testIds then this is a run request - spawnOptions?.outputChannel?.append(MESSAGE_ON_TESTING_OUTPUT_MOVE); - if (isDiscovery) { - if (code !== 0) { - // This occurs when we are running discovery - traceError( - `Subprocess exited unsuccessfully with exit code ${code} and signal ${signal} on workspace ${options.cwd}. Creating and sending error discovery payload \n`, - ); - this._onDiscoveryDataReceived.fire({ - uuid, - data: JSON.stringify(createDiscoveryErrorPayload(code, signal, options.cwd)), - }); - // then send a EOT payload - this._onDiscoveryDataReceived.fire({ - uuid, - data: JSON.stringify(createEOTPayload(true)), - }); - } - } else if (code !== 0 && testIds) { - // This occurs when we are running the test and there is an error which occurs. - - traceError( - `Subprocess exited unsuccessfully with exit code ${code} and signal ${signal} for workspace ${options.cwd}. Creating and sending error execution payload \n`, - ); - // if the child process exited with a non-zero exit code, then we need to send the error payload. - this._onRunDataReceived.fire({ - uuid, - data: JSON.stringify(createExecutionErrorPayload(code, signal, testIds, options.cwd)), - }); - // then send a EOT payload - this._onRunDataReceived.fire({ - uuid, - data: JSON.stringify(createEOTPayload(true)), - }); - } - deferredTillExecClose.resolve(); - }); - await deferredTillExecClose.promise; - } - } catch (ex) { - traceError(`Error while server attempting to run unittest command for workspace ${options.cwd}: ${ex}`); - this.uuids = this.uuids.filter((u) => u !== uuid); - this._onDataReceived.fire({ - uuid, - data: JSON.stringify({ - status: 'error', - errors: [(ex as Error).message], - }), - }); - } - } -} diff --git a/src/client/testing/testController/common/types.ts b/src/client/testing/testController/common/types.ts index 685f36af007af..319898f3189aa 100644 --- a/src/client/testing/testController/common/types.ts +++ b/src/client/testing/testController/common/types.ts @@ -14,8 +14,8 @@ import { } from 'vscode'; import { ITestDebugLauncher, TestDiscoveryOptions } from '../../common/types'; import { IPythonExecutionFactory } from '../../../common/process/types'; -import { Deferred } from '../../../common/utils/async'; import { EnvironmentVariables } from '../../../common/variables/types'; +import { Deferred } from '../../../common/utils/async'; export type TestRunInstanceOptions = TestRunOptions & { exclude?: readonly TestItem[]; @@ -148,7 +148,6 @@ export type TestCommandOptions = { workspaceFolder: Uri; cwd: string; command: TestDiscoveryCommand | TestExecutionCommand; - uuid: string; token?: CancellationToken; outChannel?: OutputChannel; debugBool?: boolean; @@ -183,6 +182,7 @@ export interface ITestServer { runInstance?: TestRun, testIds?: string[], callback?: () => void, + executionFactory?: IPythonExecutionFactory, ): Promise; serverReady(): Promise; getPort(): number; @@ -199,14 +199,14 @@ export interface ITestResultResolver { payload: DiscoveredTestPayload | EOTTestPayload, deferredTillEOT: Deferred, token?: CancellationToken, - ): Promise; + ): void; resolveExecution( payload: ExecutionTestPayload | EOTTestPayload, runInstance: TestRun, deferredTillEOT: Deferred, - ): Promise; - _resolveDiscovery(payload: DiscoveredTestPayload, token?: CancellationToken): Promise; - _resolveExecution(payload: ExecutionTestPayload, runInstance: TestRun): Promise; + ): void; + _resolveDiscovery(payload: DiscoveredTestPayload, token?: CancellationToken): void; + _resolveExecution(payload: ExecutionTestPayload, runInstance: TestRun): void; } export interface ITestDiscoveryAdapter { // ** first line old method signature, second line new method signature diff --git a/src/client/testing/testController/common/utils.ts b/src/client/testing/testController/common/utils.ts index be1cf8b2ca35c..aa7a5d1152465 100644 --- a/src/client/testing/testController/common/utils.ts +++ b/src/client/testing/testController/common/utils.ts @@ -2,9 +2,9 @@ // Licensed under the MIT License. import * as net from 'net'; import * as path from 'path'; -import { CancellationToken, Position, TestController, TestItem, Uri, Range } from 'vscode'; +import { CancellationToken, Position, TestController, TestItem, Uri, Range, Disposable } from 'vscode'; +import { Message } from 'vscode-jsonrpc'; import { traceError, traceInfo, traceLog, traceVerbose } from '../../../logging'; - import { EnableTestAdapterRewrite } from '../../../common/experiments/groups'; import { IExperimentService } from '../../../common/types'; import { IServiceContainer } from '../../../ioc/types'; @@ -18,6 +18,7 @@ import { ITestResultResolver, } from './types'; import { Deferred, createDeferred } from '../../../common/utils/async'; +import { createNamedPipeServer, generateRandomPipeName } from '../../../common/pipes/namedPipes'; export function fixLogLines(content: string): string { const lines = content.split(/\r?\n/g); @@ -165,6 +166,120 @@ export function pythonTestAdapterRewriteEnabled(serviceContainer: IServiceContai return experiment.inExperimentSync(EnableTestAdapterRewrite.experiment); } +export async function startTestIdsNamedPipe(testIds: string[]): Promise { + const pipeName: string = generateRandomPipeName('python-test-ids'); + // uses callback so the on connect action occurs after the pipe is created + await createNamedPipeServer(pipeName, ([_reader, writer]) => { + traceVerbose('Test Ids named pipe connected'); + // const num = await + const msg = { + jsonrpc: '2.0', + params: testIds, + } as Message; + writer + .write(msg) + .then(() => { + writer.end(); + }) + .catch((ex) => { + traceError('Failed to write test ids to named pipe', ex); + }); + }); + return pipeName; +} + +interface ExecutionResultMessage extends Message { + params: ExecutionTestPayload | EOTTestPayload; +} + +export async function startRunResultNamedPipe( + dataReceivedCallback: (payload: ExecutionTestPayload | EOTTestPayload) => void, + deferredTillServerClose: Deferred, + cancellationToken?: CancellationToken, +): Promise<{ name: string } & Disposable> { + traceVerbose('Starting Test Result named pipe'); + const pipeName: string = generateRandomPipeName('python-test-results'); + let disposeOfServer: () => void = () => { + deferredTillServerClose.resolve(); + /* noop */ + }; + const server = await createNamedPipeServer(pipeName, ([reader, _writer]) => { + // this lambda function is: onConnectionCallback + // this is called once per client connecting to the server + traceVerbose(`Test Result named pipe ${pipeName} connected`); + let perConnectionDisposables: (Disposable | undefined)[] = [reader]; + + // create a function to dispose of the server + disposeOfServer = () => { + // dispose of all data listeners and cancelation listeners + perConnectionDisposables.forEach((d) => d?.dispose()); + perConnectionDisposables = []; + deferredTillServerClose.resolve(); + }; + perConnectionDisposables.push( + // per connection, add a listener for the cancellation token and the data + cancellationToken?.onCancellationRequested(() => { + console.log(`Test Result named pipe ${pipeName} cancelled`); + // if cancel is called on one connection, dispose of all connections + disposeOfServer(); + }), + reader.listen((data: Message) => { + traceVerbose(`Test Result named pipe ${pipeName} received data`); + // if EOT, call decrement connection count (callback) + dataReceivedCallback((data as ExecutionResultMessage).params as ExecutionTestPayload | EOTTestPayload); + }), + ); + server.serverOnClosePromise().then(() => { + // this is called once the server close, once per run instance + traceVerbose(`Test Result named pipe ${pipeName} closed. Disposing of listener/s.`); + // dispose of all data listeners and cancelation listeners + disposeOfServer(); + }); + }); + + return { name: pipeName, dispose: disposeOfServer }; +} + +interface DiscoveryResultMessage extends Message { + params: DiscoveredTestPayload | EOTTestPayload; +} + +export async function startDiscoveryNamedPipe( + callback: (payload: DiscoveredTestPayload | EOTTestPayload) => void, + cancellationToken?: CancellationToken, +): Promise<{ name: string } & Disposable> { + traceVerbose('Starting Test Discovery named pipe'); + const pipeName: string = generateRandomPipeName('python-test-discovery'); + let dispose: () => void = () => { + /* noop */ + }; + await createNamedPipeServer(pipeName, ([reader, _writer]) => { + traceVerbose(`Test Discovery named pipe ${pipeName} connected`); + let disposables: (Disposable | undefined)[] = [reader]; + dispose = () => { + traceVerbose(`Test Discovery named pipe ${pipeName} disposed`); + disposables.forEach((d) => d?.dispose()); + disposables = []; + }; + disposables.push( + cancellationToken?.onCancellationRequested(() => { + traceVerbose(`Test Discovery named pipe ${pipeName} cancelled`); + dispose(); + }), + reader.listen((data: Message) => { + traceVerbose(`Test Discovery named pipe ${pipeName} received data`); + callback((data as DiscoveryResultMessage).params as DiscoveredTestPayload | EOTTestPayload); + }), + reader.onClose(() => { + callback(createEOTPayload(true)); + traceVerbose(`Test Discovery named pipe ${pipeName} closed`); + dispose(); + }), + ); + }); + return { name: pipeName, dispose }; +} + export async function startTestIdServer(testIds: string[]): Promise { const startServer = (): Promise => new Promise((resolve, reject) => { diff --git a/src/client/testing/testController/controller.ts b/src/client/testing/testController/controller.ts index bc9d2ca8299f7..710a6cdce4253 100644 --- a/src/client/testing/testController/controller.ts +++ b/src/client/testing/testController/controller.ts @@ -30,7 +30,6 @@ import { IEventNamePropertyMapping, sendTelemetryEvent } from '../../telemetry'; import { EventName } from '../../telemetry/constants'; import { PYTEST_PROVIDER, UNITTEST_PROVIDER } from '../common/constants'; import { TestProvider } from '../types'; -import { PythonTestServer } from './common/server'; import { DebugTestTag, getNodeByUri, RunTestTag } from './common/testItemUtilities'; import { pythonTestAdapterRewriteEnabled } from './common/utils'; import { @@ -79,8 +78,6 @@ export class PythonTestController implements ITestController, IExtensionSingleAc WorkspaceFolder[] >(); - private pythonTestServer: PythonTestServer; - public readonly onRefreshingCompleted = this.refreshingCompletedEvent.event; public readonly onRefreshingStarted = this.refreshingStartedEvent.event; @@ -153,13 +150,9 @@ export class PythonTestController implements ITestController, IExtensionSingleAc }); return this.refreshTestData(undefined, { forceRefresh: true }); }; - this.pythonTestServer = new PythonTestServer(this.pythonExecFactory, this.debugLauncher); } public async activate(): Promise { - traceVerbose('Waiting for test server to start...'); - await this.pythonTestServer.serverReady(); - traceVerbose('Test server started.'); const workspaces: readonly WorkspaceFolder[] = this.workspaceService.workspaceFolders || []; workspaces.forEach((workspace) => { const settings = this.configSettings.getSettings(workspace.uri); @@ -172,14 +165,12 @@ export class PythonTestController implements ITestController, IExtensionSingleAc testProvider = UNITTEST_PROVIDER; resultResolver = new PythonResultResolver(this.testController, testProvider, workspace.uri); discoveryAdapter = new UnittestTestDiscoveryAdapter( - this.pythonTestServer, this.configSettings, this.testOutputChannel, resultResolver, this.envVarsService, ); executionAdapter = new UnittestTestExecutionAdapter( - this.pythonTestServer, this.configSettings, this.testOutputChannel, resultResolver, @@ -189,14 +180,12 @@ export class PythonTestController implements ITestController, IExtensionSingleAc testProvider = PYTEST_PROVIDER; resultResolver = new PythonResultResolver(this.testController, testProvider, workspace.uri); discoveryAdapter = new PytestTestDiscoveryAdapter( - this.pythonTestServer, this.configSettings, this.testOutputChannel, resultResolver, this.envVarsService, ); executionAdapter = new PytestTestExecutionAdapter( - this.pythonTestServer, this.configSettings, this.testOutputChannel, resultResolver, @@ -391,7 +380,7 @@ export class PythonTestController implements ITestController, IExtensionSingleAc ); const dispose = token.onCancellationRequested(() => { - runInstance.appendOutput(`Run instance cancelled.\r\n`); + runInstance.appendOutput(`\nRun instance cancelled.\r\n`); runInstance.end(); }); @@ -466,6 +455,7 @@ export class PythonTestController implements ITestController, IExtensionSingleAc token, request.profile?.kind === TestRunProfileKind.Debug, this.pythonExecFactory, + this.debugLauncher, ); } // below is old way of running unittest execution @@ -489,6 +479,7 @@ export class PythonTestController implements ITestController, IExtensionSingleAc }), ); } finally { + traceVerbose('Finished running tests, ending runInstance.'); runInstance.appendOutput(`Finished running tests!\r\n`); runInstance.end(); dispose.dispose(); diff --git a/src/client/testing/testController/pytest/pytestDiscoveryAdapter.ts b/src/client/testing/testController/pytest/pytestDiscoveryAdapter.ts index f29a428f3d39c..fbea26593c3c9 100644 --- a/src/client/testing/testController/pytest/pytestDiscoveryAdapter.ts +++ b/src/client/testing/testController/pytest/pytestDiscoveryAdapter.ts @@ -12,19 +12,14 @@ import { IConfigurationService, ITestOutputChannel } from '../../../common/types import { Deferred, createDeferred } from '../../../common/utils/async'; import { EXTENSION_ROOT_DIR } from '../../../constants'; import { traceError, traceInfo, traceVerbose, traceWarn } from '../../../logging'; -import { - DataReceivedEvent, - DiscoveredTestPayload, - ITestDiscoveryAdapter, - ITestResultResolver, - ITestServer, -} from '../common/types'; +import { DiscoveredTestPayload, EOTTestPayload, ITestDiscoveryAdapter, ITestResultResolver } from '../common/types'; import { MESSAGE_ON_TESTING_OUTPUT_MOVE, createDiscoveryErrorPayload, createEOTPayload, createTestingDeferred, fixLogLinesNoTrailing, + startDiscoveryNamedPipe, addValueIfKeyNotExist, } from '../common/utils'; import { IEnvironmentVariablesProvider } from '../../../common/variables/types'; @@ -34,7 +29,6 @@ import { IEnvironmentVariablesProvider } from '../../../common/variables/types'; */ export class PytestTestDiscoveryAdapter implements ITestDiscoveryAdapter { constructor( - public testServer: ITestServer, public configSettings: IConfigurationService, private readonly outputChannel: ITestOutputChannel, private readonly resultResolver?: ITestResultResolver, @@ -42,29 +36,30 @@ export class PytestTestDiscoveryAdapter implements ITestDiscoveryAdapter { ) {} async discoverTests(uri: Uri, executionFactory?: IPythonExecutionFactory): Promise { - const uuid = this.testServer.createUUID(uri.fsPath); const deferredTillEOT: Deferred = createDeferred(); - const dataReceivedDisposable = this.testServer.onDiscoveryDataReceived(async (e: DataReceivedEvent) => { - this.resultResolver?.resolveDiscovery(JSON.parse(e.data), deferredTillEOT); + + const { name, dispose } = await startDiscoveryNamedPipe((data: DiscoveredTestPayload | EOTTestPayload) => { + this.resultResolver?.resolveDiscovery(data, deferredTillEOT); }); - const disposeDataReceiver = function (testServer: ITestServer) { - traceInfo(`Disposing data receiver for ${uri.fsPath} and deleting UUID; pytest discovery.`); - testServer.deleteUUID(uuid); - dataReceivedDisposable.dispose(); - }; + try { - await this.runPytestDiscovery(uri, uuid, executionFactory); + await this.runPytestDiscovery(uri, name, deferredTillEOT, executionFactory); } finally { await deferredTillEOT.promise; - traceVerbose(`deferredTill EOT resolved for ${uri.fsPath}`); - disposeDataReceiver(this.testServer); + traceVerbose('deferredTill EOT resolved'); + dispose(); } // this is only a placeholder to handle function overloading until rewrite is finished const discoveryPayload: DiscoveredTestPayload = { cwd: uri.fsPath, status: 'success' }; return discoveryPayload; } - async runPytestDiscovery(uri: Uri, uuid: string, executionFactory?: IPythonExecutionFactory): Promise { + async runPytestDiscovery( + uri: Uri, + discoveryPipeName: string, + deferredTillEOT: Deferred, + executionFactory?: IPythonExecutionFactory, + ): Promise { const relativePathToPytest = 'python_files'; const fullPluginPath = path.join(EXTENSION_ROOT_DIR, relativePathToPytest); const settings = this.configSettings.getSettings(uri); @@ -86,13 +81,8 @@ export class PytestTestDiscoveryAdapter implements ITestDiscoveryAdapter { const pythonPathParts: string[] = mutableEnv.PYTHONPATH?.split(path.delimiter) ?? []; const pythonPathCommand = [fullPluginPath, ...pythonPathParts].join(path.delimiter); mutableEnv.PYTHONPATH = pythonPathCommand; - mutableEnv.TEST_UUID = uuid.toString(); - mutableEnv.TEST_PORT = this.testServer.getPort().toString(); - traceInfo( - `All environment variables set for pytest discovery for workspace ${uri.fsPath}: ${JSON.stringify( - mutableEnv, - )} \n`, - ); + mutableEnv.TEST_RUN_PIPE = discoveryPipeName; + traceInfo(`All environment variables set for pytest discovery: ${JSON.stringify(mutableEnv)}`); const spawnOptions: SpawnOptions = { cwd, throwOnStdErr: true, @@ -141,16 +131,8 @@ export class PytestTestDiscoveryAdapter implements ITestDiscoveryAdapter { traceError( `Subprocess exited unsuccessfully with exit code ${code} and signal ${signal} on workspace ${uri.fsPath}. Creating and sending error discovery payload`, ); - // if the child process exited with a non-zero exit code, then we need to send the error payload. - this.testServer.triggerDiscoveryDataReceivedEvent({ - uuid, - data: JSON.stringify(createDiscoveryErrorPayload(code, signal, cwd)), - }); - // then send a EOT payload - this.testServer.triggerDiscoveryDataReceivedEvent({ - uuid, - data: JSON.stringify(createEOTPayload(true)), - }); + this.resultResolver?.resolveDiscovery(createDiscoveryErrorPayload(code, signal, cwd), deferredTillEOT); + this.resultResolver?.resolveDiscovery(createEOTPayload(false), deferredTillEOT); } // deferredTillEOT is resolved when all data sent on stdout and stderr is received, close event is only called when this occurs // due to the sync reading of the output. diff --git a/src/client/testing/testController/pytest/pytestExecutionAdapter.ts b/src/client/testing/testController/pytest/pytestExecutionAdapter.ts index 2f41d486ba3cd..5099efde179c9 100644 --- a/src/client/testing/testController/pytest/pytestExecutionAdapter.ts +++ b/src/client/testing/testController/pytest/pytestExecutionAdapter.ts @@ -7,13 +7,7 @@ import { ChildProcess } from 'child_process'; import { IConfigurationService, ITestOutputChannel } from '../../../common/types'; import { Deferred } from '../../../common/utils/async'; import { traceError, traceInfo, traceVerbose } from '../../../logging'; -import { - DataReceivedEvent, - ExecutionTestPayload, - ITestExecutionAdapter, - ITestResultResolver, - ITestServer, -} from '../common/types'; +import { EOTTestPayload, ExecutionTestPayload, ITestExecutionAdapter, ITestResultResolver } from '../common/types'; import { ExecutionFactoryCreateWithEnvironmentOptions, IPythonExecutionFactory, @@ -28,7 +22,6 @@ import { IEnvironmentVariablesProvider } from '../../../common/variables/types'; export class PytestTestExecutionAdapter implements ITestExecutionAdapter { constructor( - public testServer: ITestServer, public configSettings: IConfigurationService, private readonly outputChannel: ITestOutputChannel, private readonly resultResolver?: ITestResultResolver, @@ -43,44 +36,53 @@ export class PytestTestExecutionAdapter implements ITestExecutionAdapter { executionFactory?: IPythonExecutionFactory, debugLauncher?: ITestDebugLauncher, ): Promise { - const uuid = this.testServer.createUUID(uri.fsPath); - // deferredTillEOT is resolved when all data sent over payload is received + // deferredTillEOT awaits EOT message and deferredTillServerClose awaits named pipe server close const deferredTillEOT: Deferred = utils.createTestingDeferred(); + const deferredTillServerClose: Deferred = utils.createTestingDeferred(); - const dataReceivedDisposable = this.testServer.onRunDataReceived((e: DataReceivedEvent) => { - runInstance?.token.isCancellationRequested; - if (runInstance) { - const eParsed = JSON.parse(e.data); - this.resultResolver?.resolveExecution(eParsed, runInstance, deferredTillEOT); + // create callback to handle data received on the named pipe + const dataReceivedCallback = (data: ExecutionTestPayload | EOTTestPayload) => { + if (runInstance && !runInstance.token.isCancellationRequested) { + this.resultResolver?.resolveExecution(data, runInstance, deferredTillEOT); } else { traceError(`No run instance found, cannot resolve execution, for workspace ${uri.fsPath}.`); } - }); - const disposeDataReceiver = function (testServer: ITestServer) { - traceInfo(`Disposing data receiver for ${uri.fsPath} and deleting UUID; pytest execution.`); - testServer.deleteUUID(uuid); - dataReceivedDisposable.dispose(); }; + const { name, dispose: serverDispose } = await utils.startRunResultNamedPipe( + dataReceivedCallback, // callback to handle data received + deferredTillServerClose, // deferred to resolve when server closes + runInstance?.token, // token to cancel + ); runInstance?.token.onCancellationRequested(() => { traceInfo(`Test run cancelled, resolving 'till EOT' deferred for ${uri.fsPath}.`); + // if canceled, stop listening for results deferredTillEOT.resolve(); + serverDispose(); // this will resolve deferredTillServerClose + + const executionPayload: ExecutionTestPayload = { + cwd: uri.fsPath, + status: 'success', + error: '', + }; + return executionPayload; }); try { await this.runTestsNew( uri, testIds, - uuid, + name, + deferredTillEOT, + serverDispose, runInstance, debugBool, executionFactory, debugLauncher, - deferredTillEOT, ); } finally { + // wait for to send EOT await deferredTillEOT.promise; - traceVerbose('deferredTill EOT resolved'); - disposeDataReceiver(this.testServer); + await deferredTillServerClose.promise; } // placeholder until after the rewrite is adopted @@ -96,12 +98,13 @@ export class PytestTestExecutionAdapter implements ITestExecutionAdapter { private async runTestsNew( uri: Uri, testIds: string[], - uuid: string, + resultNamedPipeName: string, + deferredTillEOT: Deferred, + serverDispose: () => void, runInstance?: TestRun, debugBool?: boolean, executionFactory?: IPythonExecutionFactory, debugLauncher?: ITestDebugLauncher, - deferredTillEOT?: Deferred, ): Promise { const relativePathToPytest = 'python_files'; const fullPluginPath = path.join(EXTENSION_ROOT_DIR, relativePathToPytest); @@ -116,8 +119,7 @@ export class PytestTestExecutionAdapter implements ITestExecutionAdapter { const pythonPathParts: string[] = mutableEnv.PYTHONPATH?.split(path.delimiter) ?? []; const pythonPathCommand = [fullPluginPath, ...pythonPathParts].join(path.delimiter); mutableEnv.PYTHONPATH = pythonPathCommand; - mutableEnv.TEST_UUID = uuid.toString(); - mutableEnv.TEST_PORT = this.testServer.getPort().toString(); + mutableEnv.TEST_RUN_PIPE = resultNamedPipeName; // Create the Python environment in which to execute the command. const creationOptions: ExecutionFactoryCreateWithEnvironmentOptions = { @@ -141,13 +143,9 @@ export class PytestTestExecutionAdapter implements ITestExecutionAdapter { } // add port with run test ids to env vars - const pytestRunTestIdsPort = await utils.startTestIdServer(testIds); - mutableEnv.RUN_TEST_IDS_PORT = pytestRunTestIdsPort.toString(); - traceInfo( - `All environment variables set for pytest execution in ${uri.fsPath} workspace: \n ${JSON.stringify( - mutableEnv, - )}`, - ); + const testIdsPipeName = await utils.startTestIdsNamedPipe(testIds); + mutableEnv.RUN_TEST_IDS_PIPE = testIdsPipeName; + traceInfo(`All environment variables set for pytest execution: ${JSON.stringify(mutableEnv)}`); const spawnOptions: SpawnOptions = { cwd, @@ -155,22 +153,21 @@ export class PytestTestExecutionAdapter implements ITestExecutionAdapter { outputChannel: this.outputChannel, stdinStr: testIds.toString(), env: mutableEnv, + token: runInstance?.token, }; if (debugBool) { - const pytestPort = this.testServer.getPort().toString(); - const pytestUUID = uuid.toString(); const launchOptions: LaunchOptions = { cwd, args: testArgs, - token: spawnOptions.token, + token: runInstance?.token, testProvider: PYTEST_PROVIDER, - pytestPort, - pytestUUID, - runTestIdsPort: pytestRunTestIdsPort.toString(), + runTestIdsPort: testIdsPipeName, + pytestPort: resultNamedPipeName, }; traceInfo(`Running DEBUG pytest with arguments: ${testArgs} for workspace ${uri.fsPath} \r\n`); await debugLauncher!.launchDebugger(launchOptions, () => { + serverDispose(); // this will resolve deferredTillServerClose deferredTillEOT?.resolve(); }); } else { @@ -189,7 +186,7 @@ export class PytestTestExecutionAdapter implements ITestExecutionAdapter { if (resultProc) { resultProc?.kill(); } else { - deferredTillExecClose?.resolve(); + deferredTillExecClose.resolve(); } }); @@ -226,21 +223,28 @@ export class PytestTestExecutionAdapter implements ITestExecutionAdapter { traceError( `Subprocess closed unsuccessfully with exit code ${code} and signal ${signal} for workspace ${uri.fsPath}. Creating and sending error execution payload \n`, ); - this.testServer.triggerRunDataReceivedEvent({ - uuid, - data: JSON.stringify(utils.createExecutionErrorPayload(code, signal, testIds, cwd)), - }); - // then send a EOT payload - this.testServer.triggerRunDataReceivedEvent({ - uuid, - data: JSON.stringify(utils.createEOTPayload(true)), - }); + + if (runInstance) { + this.resultResolver?.resolveExecution( + utils.createExecutionErrorPayload(code, signal, testIds, cwd), + runInstance, + deferredTillEOT, + ); + this.resultResolver?.resolveExecution( + utils.createEOTPayload(true), + runInstance, + deferredTillEOT, + ); + } + // this doesn't work, it instead directs us to the noop one which is defined first + // potentially this is due to the server already being close, if this is the case? + serverDispose(); // this will resolve deferredTillServerClose } // deferredTillEOT is resolved when all data sent on stdout and stderr is received, close event is only called when this occurs // due to the sync reading of the output. - deferredTillExecClose?.resolve(); + deferredTillExecClose.resolve(); }); - await deferredTillExecClose?.promise; + await deferredTillExecClose.promise; } } catch (ex) { traceError(`Error while running tests for workspace ${uri}: ${testIds}\r\n${ex}\r\n\r\n`); diff --git a/src/client/testing/testController/unittest/testDiscoveryAdapter.ts b/src/client/testing/testController/unittest/testDiscoveryAdapter.ts index 8cc44b3783c50..8e6edcc16b567 100644 --- a/src/client/testing/testController/unittest/testDiscoveryAdapter.ts +++ b/src/client/testing/testController/unittest/testDiscoveryAdapter.ts @@ -6,82 +6,160 @@ import { Uri } from 'vscode'; import { IConfigurationService, ITestOutputChannel } from '../../../common/types'; import { EXTENSION_ROOT_DIR } from '../../../constants'; import { - DataReceivedEvent, DiscoveredTestPayload, + EOTTestPayload, ITestDiscoveryAdapter, ITestResultResolver, - ITestServer, TestCommandOptions, TestDiscoveryCommand, } from '../common/types'; import { Deferred, createDeferred } from '../../../common/utils/async'; import { EnvironmentVariables, IEnvironmentVariablesProvider } from '../../../common/variables/types'; +import { + ExecutionFactoryCreateWithEnvironmentOptions, + ExecutionResult, + IPythonExecutionFactory, + SpawnOptions, +} from '../../../common/process/types'; +import { + MESSAGE_ON_TESTING_OUTPUT_MOVE, + createDiscoveryErrorPayload, + createEOTPayload, + fixLogLinesNoTrailing, + startDiscoveryNamedPipe, +} from '../common/utils'; +import { traceError, traceInfo, traceLog, traceVerbose } from '../../../logging'; /** * Wrapper class for unittest test discovery. This is where we call `runTestCommand`. */ export class UnittestTestDiscoveryAdapter implements ITestDiscoveryAdapter { constructor( - public testServer: ITestServer, public configSettings: IConfigurationService, private readonly outputChannel: ITestOutputChannel, private readonly resultResolver?: ITestResultResolver, private readonly envVarsService?: IEnvironmentVariablesProvider, ) {} - public async discoverTests(uri: Uri): Promise { + public async discoverTests(uri: Uri, executionFactory?: IPythonExecutionFactory): Promise { const settings = this.configSettings.getSettings(uri); const { unittestArgs } = settings.testing; const cwd = settings.testing.cwd && settings.testing.cwd.length > 0 ? settings.testing.cwd : uri.fsPath; + + const deferredTillEOT: Deferred = createDeferred(); + + const { name, dispose } = await startDiscoveryNamedPipe((data: DiscoveredTestPayload | EOTTestPayload) => { + this.resultResolver?.resolveDiscovery(data, deferredTillEOT); + }); + + // set up env with the pipe name let env: EnvironmentVariables | undefined = await this.envVarsService?.getEnvironmentVariables(uri); if (env === undefined) { env = {} as EnvironmentVariables; } - const command = buildDiscoveryCommand(unittestArgs); + env.TEST_RUN_PIPE = name; - const uuid = this.testServer.createUUID(uri.fsPath); - const deferredTillEOT: Deferred = createDeferred(); + const command = buildDiscoveryCommand(unittestArgs); const options: TestCommandOptions = { workspaceFolder: uri, command, cwd, - uuid, outChannel: this.outputChannel, }; - const dataReceivedDisposable = this.testServer.onDiscoveryDataReceived((e: DataReceivedEvent) => { - this.resultResolver?.resolveDiscovery(JSON.parse(e.data), deferredTillEOT); - }); - const disposeDataReceiver = function (testServer: ITestServer) { - testServer.deleteUUID(uuid); - dataReceivedDisposable.dispose(); - }; - - await this.callSendCommand(options, env, () => { - disposeDataReceiver?.(this.testServer); - }); - await deferredTillEOT.promise; - disposeDataReceiver(this.testServer); + try { + await this.runDiscovery(uri, options, name, cwd, deferredTillEOT, executionFactory); + } finally { + await deferredTillEOT.promise; + traceVerbose('deferredTill EOT resolved'); + dispose(); + } // placeholder until after the rewrite is adopted // TODO: remove after adoption. - const discoveryPayload: DiscoveredTestPayload = { - cwd, - status: 'success', - }; + const discoveryPayload: DiscoveredTestPayload = { cwd, status: 'success' }; return discoveryPayload; } - private async callSendCommand( + async runDiscovery( + uri: Uri, options: TestCommandOptions, - env: EnvironmentVariables, - callback: () => void, - ): Promise { - await this.testServer.sendCommand(options, env, undefined, undefined, [], callback); - const discoveryPayload: DiscoveredTestPayload = { cwd: '', status: 'success' }; - return discoveryPayload; + testRunPipeName: string, + cwd: string, + deferredTillEOT: Deferred, + executionFactory?: IPythonExecutionFactory, + ): Promise { + // get and edit env vars + const mutableEnv = { + ...(await this.envVarsService?.getEnvironmentVariables(uri)), + }; + mutableEnv.TEST_RUN_PIPE = testRunPipeName; + + const spawnOptions: SpawnOptions = { + token: options.token, + cwd: options.cwd, + throwOnStdErr: true, + outputChannel: options.outChannel, + env: mutableEnv, + }; + // Create the Python environment in which to execute the command. + const creationOptions: ExecutionFactoryCreateWithEnvironmentOptions = { + allowEnvironmentFetchExceptions: false, + resource: options.workspaceFolder, + }; + const execService = await executionFactory?.createActivatedEnvironment(creationOptions); + + const args = [options.command.script].concat(options.command.args); + + if (options.outChannel) { + options.outChannel.appendLine(`python ${args.join(' ')}`); + } + + try { + traceLog(`Discovering unittest tests for workspace ${options.cwd} with arguments: ${args}\r\n`); + const deferredTillExecClose = createDeferred>(); + + const result = execService?.execObservable(args, spawnOptions); + + // Displays output to user and ensure the subprocess doesn't run into buffer overflow. + // TODO: after a release, remove discovery output from the "Python Test Log" channel and send it to the "Python" channel instead. + // TODO: after a release, remove run output from the "Python Test Log" channel and send it to the "Test Result" channel instead. + result?.proc?.stdout?.on('data', (data) => { + const out = fixLogLinesNoTrailing(data.toString()); + spawnOptions?.outputChannel?.append(`${out}`); + traceInfo(out); + }); + result?.proc?.stderr?.on('data', (data) => { + const out = fixLogLinesNoTrailing(data.toString()); + spawnOptions?.outputChannel?.append(`${out}`); + traceError(out); + }); + + result?.proc?.on('exit', (code, signal) => { + // if the child has testIds then this is a run request + spawnOptions?.outputChannel?.append(MESSAGE_ON_TESTING_OUTPUT_MOVE); + + if (code !== 0) { + // This occurs when we are running discovery + traceError( + `Subprocess exited unsuccessfully with exit code ${code} and signal ${signal} on workspace ${options.cwd}. Creating and sending error discovery payload \n`, + ); + traceError( + `Subprocess exited unsuccessfully with exit code ${code} and signal ${signal} on workspace ${uri.fsPath}. Creating and sending error discovery payload`, + ); + this.resultResolver?.resolveDiscovery( + createDiscoveryErrorPayload(code, signal, cwd), + deferredTillEOT, + ); + this.resultResolver?.resolveDiscovery(createEOTPayload(false), deferredTillEOT); + } + deferredTillExecClose.resolve(); + }); + await deferredTillExecClose.promise; + } catch (ex) { + traceError(`Error while server attempting to run unittest command for workspace ${uri.fsPath}: ${ex}`); + } } } - function buildDiscoveryCommand(args: string[]): TestDiscoveryCommand { const discoveryScript = path.join(EXTENSION_ROOT_DIR, 'python_files', 'unittestadapter', 'discovery.py'); diff --git a/src/client/testing/testController/unittest/testExecutionAdapter.ts b/src/client/testing/testController/unittest/testExecutionAdapter.ts index 85fd01f093a66..edcfbfef9b632 100644 --- a/src/client/testing/testController/unittest/testExecutionAdapter.ts +++ b/src/client/testing/testController/unittest/testExecutionAdapter.ts @@ -3,21 +3,30 @@ import * as path from 'path'; import { TestRun, Uri } from 'vscode'; +import { ChildProcess } from 'child_process'; import { IConfigurationService, ITestOutputChannel } from '../../../common/types'; import { Deferred, createDeferred } from '../../../common/utils/async'; import { EXTENSION_ROOT_DIR } from '../../../constants'; import { - DataReceivedEvent, + EOTTestPayload, ExecutionTestPayload, ITestExecutionAdapter, ITestResultResolver, - ITestServer, TestCommandOptions, TestExecutionCommand, } from '../common/types'; import { traceError, traceInfo, traceLog } from '../../../logging'; -import { startTestIdServer } from '../common/utils'; +import { MESSAGE_ON_TESTING_OUTPUT_MOVE, fixLogLinesNoTrailing } from '../common/utils'; import { EnvironmentVariables, IEnvironmentVariablesProvider } from '../../../common/variables/types'; +import { + ExecutionFactoryCreateWithEnvironmentOptions, + ExecutionResult, + IPythonExecutionFactory, + SpawnOptions, +} from '../../../common/process/types'; +import { ITestDebugLauncher, LaunchOptions } from '../../common/types'; +import { UNITTEST_PROVIDER } from '../../common/constants'; +import * as utils from '../common/utils'; /** * Wrapper Class for unittest test execution. This is where we call `runTestCommand`? @@ -25,7 +34,6 @@ import { EnvironmentVariables, IEnvironmentVariablesProvider } from '../../../co export class UnittestTestExecutionAdapter implements ITestExecutionAdapter { constructor( - public testServer: ITestServer, public configSettings: IConfigurationService, private readonly outputChannel: ITestOutputChannel, private readonly resultResolver?: ITestResultResolver, @@ -37,73 +45,217 @@ export class UnittestTestExecutionAdapter implements ITestExecutionAdapter { testIds: string[], debugBool?: boolean, runInstance?: TestRun, + executionFactory?: IPythonExecutionFactory, + debugLauncher?: ITestDebugLauncher, ): Promise { - const uuid = this.testServer.createUUID(uri.fsPath); - const deferredTillEOT: Deferred = createDeferred(); - const disposedDataReceived = this.testServer.onRunDataReceived((e: DataReceivedEvent) => { - if (runInstance) { - this.resultResolver?.resolveExecution(JSON.parse(e.data), runInstance, deferredTillEOT); + // deferredTillEOT awaits EOT message and deferredTillServerClose awaits named pipe server close + const deferredTillEOT: Deferred = utils.createTestingDeferred(); + const deferredTillServerClose: Deferred = utils.createTestingDeferred(); + + // create callback to handle data received on the named pipe + const dataReceivedCallback = (data: ExecutionTestPayload | EOTTestPayload) => { + if (runInstance && !runInstance.token.isCancellationRequested) { + this.resultResolver?.resolveExecution(data, runInstance, deferredTillEOT); } else { - traceError('No run instance found, cannot resolve execution.'); + traceError(`No run instance found, cannot resolve execution, for workspace ${uri.fsPath}.`); } - }); - const disposeDataReceiver = function (testServer: ITestServer) { - traceInfo(`Disposing data receiver for ${uri.fsPath} and deleting UUID; unittest execution.`); - testServer.deleteUUID(uuid); - disposedDataReceived.dispose(); }; + const { name: resultNamedPipeName, dispose: serverDispose } = await utils.startRunResultNamedPipe( + dataReceivedCallback, // callback to handle data received + deferredTillServerClose, // deferred to resolve when server closes + runInstance?.token, // token to cancel + ); runInstance?.token.onCancellationRequested(() => { - traceInfo("Test run cancelled, resolving 'till EOT' deferred."); + console.log(`Test run cancelled, resolving 'till EOT' deferred for ${uri.fsPath}.`); + // if canceled, stop listening for results deferredTillEOT.resolve(); + // if canceled, close the server, resolves the deferredTillAllServerClose + deferredTillServerClose.resolve(); + serverDispose(); }); try { - await this.runTestsNew(uri, testIds, uuid, runInstance, debugBool, deferredTillEOT); - await deferredTillEOT.promise; - disposeDataReceiver(this.testServer); + await this.runTestsNew( + uri, + testIds, + resultNamedPipeName, + deferredTillEOT, + serverDispose, + runInstance, + debugBool, + executionFactory, + debugLauncher, + ); } catch (error) { traceError(`Error in running unittest tests: ${error}`); + } finally { + // wait for EOT + await deferredTillEOT.promise; + console.log('deferredTill EOT resolved'); + await deferredTillServerClose.promise; + console.log('Server closed await now resolved'); } - const executionPayload: ExecutionTestPayload = { cwd: uri.fsPath, status: 'success', error: '' }; + const executionPayload: ExecutionTestPayload = { + cwd: uri.fsPath, + status: 'success', + error: '', + }; return executionPayload; } private async runTestsNew( uri: Uri, testIds: string[], - uuid: string, + resultNamedPipeName: string, + deferredTillEOT: Deferred, + serverDispose: () => void, runInstance?: TestRun, debugBool?: boolean, - deferredTillEOT?: Deferred, + executionFactory?: IPythonExecutionFactory, + debugLauncher?: ITestDebugLauncher, ): Promise { const settings = this.configSettings.getSettings(uri); const { unittestArgs } = settings.testing; const cwd = settings.testing.cwd && settings.testing.cwd.length > 0 ? settings.testing.cwd : uri.fsPath; const command = buildExecutionCommand(unittestArgs); - let env: EnvironmentVariables | undefined = await this.envVarsService?.getEnvironmentVariables(uri); - if (env === undefined) { - env = {} as EnvironmentVariables; + let mutableEnv: EnvironmentVariables | undefined = await this.envVarsService?.getEnvironmentVariables(uri); + if (mutableEnv === undefined) { + mutableEnv = {} as EnvironmentVariables; } + const pythonPathParts: string[] = mutableEnv.PYTHONPATH?.split(path.delimiter) ?? []; + const pythonPathCommand = [cwd, ...pythonPathParts].join(path.delimiter); + mutableEnv.PYTHONPATH = pythonPathCommand; + mutableEnv.TEST_RUN_PIPE = resultNamedPipeName; const options: TestCommandOptions = { workspaceFolder: uri, command, cwd, - uuid, debugBool, testIds, outChannel: this.outputChannel, + token: runInstance?.token, }; traceLog(`Running UNITTEST execution for the following test ids: ${testIds}`); - const runTestIdsPort = await startTestIdServer(testIds); + // create named pipe server to send test ids + const testIdsPipeName = await utils.startTestIdsNamedPipe(testIds); + mutableEnv.RUN_TEST_IDS_PIPE = testIdsPipeName; + traceInfo(`All environment variables set for pytest execution: ${JSON.stringify(mutableEnv)}`); - await this.testServer.sendCommand(options, env, runTestIdsPort.toString(), runInstance, testIds, () => { - deferredTillEOT?.resolve(); - }); + const spawnOptions: SpawnOptions = { + token: options.token, + cwd: options.cwd, + throwOnStdErr: true, + outputChannel: options.outChannel, + env: mutableEnv, + }; + // Create the Python environment in which to execute the command. + const creationOptions: ExecutionFactoryCreateWithEnvironmentOptions = { + allowEnvironmentFetchExceptions: false, + resource: options.workspaceFolder, + }; + const execService = await executionFactory?.createActivatedEnvironment(creationOptions); + const args = [options.command.script].concat(options.command.args); + + if (options.outChannel) { + options.outChannel.appendLine(`python ${args.join(' ')}`); + } + + try { + if (options.debugBool) { + const launchOptions: LaunchOptions = { + cwd: options.cwd, + args, + token: options.token, + testProvider: UNITTEST_PROVIDER, + runTestIdsPort: testIdsPipeName, + pytestPort: resultNamedPipeName, // change this from pytest + }; + traceInfo(`Running DEBUG unittest for workspace ${options.cwd} with arguments: ${args}\r\n`); + + if (debugLauncher === undefined) { + traceError('Debug launcher is not defined'); + throw new Error('Debug launcher is not defined'); + } + await debugLauncher.launchDebugger(launchOptions, () => { + serverDispose(); // this will resolve the deferredTillAllServerClose + deferredTillEOT?.resolve(); + }); + } else { + // This means it is running the test + traceInfo(`Running unittests for workspace ${cwd} with arguments: ${args}\r\n`); + + const deferredTillExecClose = createDeferred>(); + + let resultProc: ChildProcess | undefined; + + runInstance?.token.onCancellationRequested(() => { + traceInfo(`Test run cancelled, killing unittest subprocess for workspace ${cwd}.`); + // if the resultProc exists just call kill on it which will handle resolving the ExecClose deferred, otherwise resolve the deferred here. + if (resultProc) { + resultProc?.kill(); + } else { + deferredTillExecClose?.resolve(); + } + }); + + const result = execService?.execObservable(args, spawnOptions); + resultProc = result?.proc; + + // Displays output to user and ensure the subprocess doesn't run into buffer overflow. + // TODO: after a release, remove discovery output from the "Python Test Log" channel and send it to the "Python" channel instead. + // TODO: after a release, remove run output from the "Python Test Log" channel and send it to the "Test Result" channel instead. + + result?.proc?.stdout?.on('data', (data) => { + const out = fixLogLinesNoTrailing(data.toString()); + runInstance?.appendOutput(`${out}`); + spawnOptions?.outputChannel?.append(out); + }); + result?.proc?.stderr?.on('data', (data) => { + const out = fixLogLinesNoTrailing(data.toString()); + runInstance?.appendOutput(`${out}`); + spawnOptions?.outputChannel?.append(out); + }); + + result?.proc?.on('exit', (code, signal) => { + // if the child has testIds then this is a run request + spawnOptions?.outputChannel?.append(MESSAGE_ON_TESTING_OUTPUT_MOVE); + if (code !== 0 && testIds) { + // This occurs when we are running the test and there is an error which occurs. + + traceError( + `Subprocess exited unsuccessfully with exit code ${code} and signal ${signal} for workspace ${options.cwd}. Creating and sending error execution payload \n`, + ); + if (runInstance) { + this.resultResolver?.resolveExecution( + utils.createExecutionErrorPayload(code, signal, testIds, cwd), + runInstance, + deferredTillEOT, + ); + this.resultResolver?.resolveExecution( + utils.createEOTPayload(true), + runInstance, + deferredTillEOT, + ); + } + serverDispose(); + } + deferredTillExecClose.resolve(); + }); + await deferredTillExecClose.promise; + } + } catch (ex) { + traceError(`Error while running tests for workspace ${uri}: ${testIds}\r\n${ex}\r\n\r\n`); + return Promise.reject(ex); + } // placeholder until after the rewrite is adopted // TODO: remove after adoption. - const executionPayload: ExecutionTestPayload = { cwd, status: 'success', error: '' }; + const executionPayload: ExecutionTestPayload = { + cwd, + status: 'success', + error: '', + }; return executionPayload; } } diff --git a/src/test/testing/common/testingAdapter.test.ts b/src/test/testing/common/testingAdapter.test.ts index c52c4c8ba6aa1..2b6d4887cda10 100644 --- a/src/test/testing/common/testingAdapter.test.ts +++ b/src/test/testing/common/testingAdapter.test.ts @@ -8,13 +8,11 @@ import * as assert from 'assert'; import * as fs from 'fs'; import { PytestTestDiscoveryAdapter } from '../../../client/testing/testController/pytest/pytestDiscoveryAdapter'; import { ITestController, ITestResultResolver } from '../../../client/testing/testController/common/types'; -import { PythonTestServer } from '../../../client/testing/testController/common/server'; import { IPythonExecutionFactory } from '../../../client/common/process/types'; -import { ITestDebugLauncher } from '../../../client/testing/common/types'; import { IConfigurationService, ITestOutputChannel } from '../../../client/common/types'; import { IServiceContainer } from '../../../client/ioc/types'; import { EXTENSION_ROOT_DIR_FOR_TESTS, initialize } from '../../initialize'; -import { traceLog } from '../../../client/logging'; +import { traceError, traceLog } from '../../../client/logging'; import { PytestTestExecutionAdapter } from '../../../client/testing/testController/pytest/pytestExecutionAdapter'; import { UnittestTestDiscoveryAdapter } from '../../../client/testing/testController/unittest/testDiscoveryAdapter'; import { UnittestTestExecutionAdapter } from '../../../client/testing/testController/unittest/testExecutionAdapter'; @@ -25,9 +23,7 @@ import { IEnvironmentVariablesProvider } from '../../../client/common/variables/ suite('End to End Tests: test adapters', () => { let resultResolver: ITestResultResolver; - let pythonTestServer: PythonTestServer; let pythonExecFactory: IPythonExecutionFactory; - let debugLauncher: ITestDebugLauncher; let configService: IConfigurationService; let serviceContainer: IServiceContainer; let envVarsService: IEnvironmentVariablesProvider; @@ -75,13 +71,13 @@ suite('End to End Tests: test adapters', () => { try { fs.symlink(target, dest, 'dir', (err) => { if (err) { - console.error(err); + traceError(err); } else { - console.log('Symlink created successfully for end to end tests.'); + traceLog('Symlink created successfully for end to end tests.'); } }); } catch (err) { - console.error(err); + traceError(err); } }); @@ -89,13 +85,10 @@ suite('End to End Tests: test adapters', () => { // create objects that were injected configService = serviceContainer.get(IConfigurationService); pythonExecFactory = serviceContainer.get(IPythonExecutionFactory); - debugLauncher = serviceContainer.get(ITestDebugLauncher); testController = serviceContainer.get(ITestController); envVarsService = serviceContainer.get(IEnvironmentVariablesProvider); // create objects that were not injected - pythonTestServer = new PythonTestServer(pythonExecFactory, debugLauncher); - await pythonTestServer.serverReady(); testOutputChannel = typeMoq.Mock.ofType(); testOutputChannel @@ -115,22 +108,19 @@ suite('End to End Tests: test adapters', () => { // Whatever you need to return }); }); - teardown(async () => { - pythonTestServer.dispose(); - }); suiteTeardown(async () => { // remove symlink const dest = rootPathDiscoverySymlink; if (fs.existsSync(dest)) { fs.unlink(dest, (err) => { if (err) { - console.error(err); + traceError(err); } else { - console.log('Symlink removed successfully after tests.'); + traceLog('Symlink removed successfully after tests.'); } }); } else { - console.log('Symlink was not found to remove after tests, exiting successfully'); + traceLog('Symlink was not found to remove after tests, exiting successfully'); } }); test('unittest discovery adapter small workspace', async () => { @@ -144,6 +134,7 @@ suite('End to End Tests: test adapters', () => { workspaceUri = Uri.parse(rootPathSmallWorkspace); resultResolver = new PythonResultResolver(testController, unittestProvider, workspaceUri); let callCount = 0; + // const deferredTillEOT = createTestingDeferred(); resultResolver._resolveDiscovery = async (payload, _token?) => { traceLog(`resolveDiscovery ${payload}`); callCount = callCount + 1; @@ -157,14 +148,13 @@ suite('End to End Tests: test adapters', () => { // run unittest discovery const discoveryAdapter = new UnittestTestDiscoveryAdapter( - pythonTestServer, configService, testOutputChannel.object, resultResolver, envVarsService, ); - await discoveryAdapter.discoverTests(workspaceUri).finally(() => { + await discoveryAdapter.discoverTests(workspaceUri, pythonExecFactory).finally(() => { // verification after discovery is complete // 1. Check the status is "success" @@ -204,14 +194,13 @@ suite('End to End Tests: test adapters', () => { configService.getSettings(workspaceUri).testing.unittestArgs = ['-s', '.', '-p', '*test*.py']; // run discovery const discoveryAdapter = new UnittestTestDiscoveryAdapter( - pythonTestServer, configService, testOutputChannel.object, resultResolver, envVarsService, ); - await discoveryAdapter.discoverTests(workspaceUri).finally(() => { + await discoveryAdapter.discoverTests(workspaceUri, pythonExecFactory).finally(() => { // 1. Check the status is "success" assert.strictEqual( actualData.status, @@ -234,25 +223,23 @@ suite('End to End Tests: test adapters', () => { status: 'success' | 'error'; error?: string[]; }; + // set workspace to test workspace folder + workspaceUri = Uri.parse(rootPathSmallWorkspace); resultResolver = new PythonResultResolver(testController, pytestProvider, workspaceUri); let callCount = 0; resultResolver._resolveDiscovery = async (payload, _token?) => { - traceLog(`resolveDiscovery ${payload}`); callCount = callCount + 1; actualData = payload; return Promise.resolve(); }; // run pytest discovery const discoveryAdapter = new PytestTestDiscoveryAdapter( - pythonTestServer, configService, testOutputChannel.object, resultResolver, envVarsService, ); - // set workspace to test workspace folder - workspaceUri = Uri.parse(rootPathSmallWorkspace); await discoveryAdapter.discoverTests(workspaceUri, pythonExecFactory).finally(() => { // verification after discovery is complete @@ -295,7 +282,6 @@ suite('End to End Tests: test adapters', () => { }; // run pytest discovery const discoveryAdapter = new PytestTestDiscoveryAdapter( - pythonTestServer, configService, testOutputChannel.object, resultResolver, @@ -372,7 +358,6 @@ suite('End to End Tests: test adapters', () => { }; // run pytest discovery const discoveryAdapter = new PytestTestDiscoveryAdapter( - pythonTestServer, configService, testOutputChannel.object, resultResolver, @@ -427,7 +412,6 @@ suite('End to End Tests: test adapters', () => { configService.getSettings(workspaceUri).testing.unittestArgs = ['-s', '.', '-p', '*test*.py']; // run execution const executionAdapter = new UnittestTestExecutionAdapter( - pythonTestServer, configService, testOutputChannel.object, resultResolver, @@ -451,7 +435,13 @@ suite('End to End Tests: test adapters', () => { }) .returns(() => false); await executionAdapter - .runTests(workspaceUri, ['test_simple.SimpleClass.test_simple_unit'], false, testRun.object) + .runTests( + workspaceUri, + ['test_simple.SimpleClass.test_simple_unit'], + false, + testRun.object, + pythonExecFactory, + ) .finally(() => { // verify that the _resolveExecution was called once per test assert.strictEqual(callCount, 1, 'Expected _resolveExecution to be called once'); @@ -502,7 +492,6 @@ suite('End to End Tests: test adapters', () => { // run unittest execution const executionAdapter = new UnittestTestExecutionAdapter( - pythonTestServer, configService, testOutputChannel.object, resultResolver, @@ -526,7 +515,13 @@ suite('End to End Tests: test adapters', () => { }) .returns(() => false); await executionAdapter - .runTests(workspaceUri, ['test_parameterized_subtest.NumbersTest.test_even'], false, testRun.object) + .runTests( + workspaceUri, + ['test_parameterized_subtest.NumbersTest.test_even'], + false, + testRun.object, + pythonExecFactory, + ) .then(() => { // verify that the _resolveExecution was called once per test assert.strictEqual(callCount, 2000, 'Expected _resolveExecution to be called once'); @@ -572,7 +567,6 @@ suite('End to End Tests: test adapters', () => { // run pytest execution const executionAdapter = new PytestTestExecutionAdapter( - pythonTestServer, configService, testOutputChannel.object, resultResolver, @@ -670,7 +664,6 @@ suite('End to End Tests: test adapters', () => { // run pytest execution const executionAdapter = new PytestTestExecutionAdapter( - pythonTestServer, configService, testOutputChannel.object, resultResolver, @@ -743,7 +736,6 @@ suite('End to End Tests: test adapters', () => { configService.getSettings(workspaceUri).testing.unittestArgs = ['-s', '.', '-p', '*test*.py']; const discoveryAdapter = new UnittestTestDiscoveryAdapter( - pythonTestServer, configService, testOutputChannel.object, resultResolver, @@ -758,7 +750,7 @@ suite('End to End Tests: test adapters', () => { onCancellationRequested: () => undefined, } as any), ); - await discoveryAdapter.discoverTests(workspaceUri).finally(() => { + await discoveryAdapter.discoverTests(workspaceUri, pythonExecFactory).finally(() => { assert.strictEqual(callCount, 1, 'Expected _resolveDiscovery to be called once'); assert.strictEqual(failureOccurred, false, failureMsg); }); @@ -803,7 +795,6 @@ suite('End to End Tests: test adapters', () => { }; // run pytest discovery const discoveryAdapter = new PytestTestDiscoveryAdapter( - pythonTestServer, configService, testOutputChannel.object, resultResolver, @@ -879,7 +870,6 @@ suite('End to End Tests: test adapters', () => { // run pytest execution const executionAdapter = new UnittestTestExecutionAdapter( - pythonTestServer, configService, testOutputChannel.object, resultResolver, @@ -894,7 +884,7 @@ suite('End to End Tests: test adapters', () => { onCancellationRequested: () => undefined, } as any), ); - await executionAdapter.runTests(workspaceUri, testIds, false, testRun.object).finally(() => { + await executionAdapter.runTests(workspaceUri, testIds, false, testRun.object, pythonExecFactory).finally(() => { assert.strictEqual(callCount, 1, 'Expected _resolveExecution to be called once'); assert.strictEqual(failureOccurred, false, failureMsg); }); @@ -929,7 +919,7 @@ suite('End to End Tests: test adapters', () => { failureMsg = err ? (err as Error).toString() : ''; failureOccurred = true; } - return Promise.resolve(); + // return Promise.resolve(); }; const testId = `${rootPathErrorWorkspace}/test_seg_fault.py::TestSegmentationFault::test_segfault`; @@ -941,7 +931,6 @@ suite('End to End Tests: test adapters', () => { // run pytest execution const executionAdapter = new PytestTestExecutionAdapter( - pythonTestServer, configService, testOutputChannel.object, resultResolver, diff --git a/src/test/testing/common/testingPayloadsEot.test.ts b/src/test/testing/common/testingPayloadsEot.test.ts deleted file mode 100644 index 2b8b9c0667dfe..0000000000000 --- a/src/test/testing/common/testingPayloadsEot.test.ts +++ /dev/null @@ -1,222 +0,0 @@ -/* eslint-disable @typescript-eslint/no-explicit-any */ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -import { TestController, TestRun, Uri } from 'vscode'; -import * as typeMoq from 'typemoq'; -import * as path from 'path'; -import * as assert from 'assert'; -import * as net from 'net'; -import { Observable } from 'rxjs'; -import * as crypto from 'crypto'; -// import { PytestTestDiscoveryAdapter } from '../../../client/testing/testController/pytest/pytestDiscoveryAdapter'; -import * as sinon from 'sinon'; -import { ITestController, ITestResultResolver } from '../../../client/testing/testController/common/types'; -import { PythonTestServer } from '../../../client/testing/testController/common/server'; -import { IPythonExecutionFactory, IPythonExecutionService, Output } from '../../../client/common/process/types'; -import { ITestDebugLauncher } from '../../../client/testing/common/types'; -import { IConfigurationService, ITestOutputChannel } from '../../../client/common/types'; -import { IServiceContainer } from '../../../client/ioc/types'; -import { initialize } from '../../initialize'; -import { PytestTestExecutionAdapter } from '../../../client/testing/testController/pytest/pytestExecutionAdapter'; -import { PythonResultResolver } from '../../../client/testing/testController/common/resultResolver'; -import { PYTEST_PROVIDER } from '../../../client/testing/common/constants'; -import { MockChildProcess } from '../../mocks/mockChildProcess'; -import { - PAYLOAD_SINGLE_CHUNK, - PAYLOAD_MULTI_CHUNK, - PAYLOAD_SPLIT_ACROSS_CHUNKS_ARRAY, - DataWithPayloadChunks, - PAYLOAD_SPLIT_MULTI_CHUNK_ARRAY, - PAYLOAD_ONLY_HEADER_MULTI_CHUNK, -} from '../testController/payloadTestCases'; -import { traceLog } from '../../../client/logging'; - -const FAKE_UUID = 'fake-u-u-i-d'; -export interface TestCase { - name: string; - value: DataWithPayloadChunks; -} - -const testCases: Array = [ - { - name: 'header in single chunk edge case', - value: PAYLOAD_ONLY_HEADER_MULTI_CHUNK(FAKE_UUID), - }, - { - name: 'single payload single chunk', - value: PAYLOAD_SINGLE_CHUNK(FAKE_UUID), - }, - { - name: 'multiple payloads per buffer chunk', - value: PAYLOAD_MULTI_CHUNK(FAKE_UUID), - }, - { - name: 'single payload across multiple buffer chunks', - value: PAYLOAD_SPLIT_ACROSS_CHUNKS_ARRAY(FAKE_UUID), - }, - { - name: 'two chunks, payload split and two payloads in a chunk', - value: PAYLOAD_SPLIT_MULTI_CHUNK_ARRAY(FAKE_UUID), - }, -]; - -suite('EOT tests', () => { - let resultResolver: ITestResultResolver; - let pythonTestServer: PythonTestServer; - let debugLauncher: ITestDebugLauncher; - let configService: IConfigurationService; - let serviceContainer: IServiceContainer; - let workspaceUri: Uri; - let testOutputChannel: typeMoq.IMock; - let testController: TestController; - let stubExecutionFactory: typeMoq.IMock; - let client: net.Socket; - let mockProc: MockChildProcess; - const sandbox = sinon.createSandbox(); - // const unittestProvider: TestProvider = UNITTEST_PROVIDER; - // const pytestProvider: TestProvider = PYTEST_PROVIDER; - const rootPathSmallWorkspace = path.join('src'); - suiteSetup(async () => { - serviceContainer = (await initialize()).serviceContainer; - }); - - setup(async () => { - // create objects that were injected - configService = serviceContainer.get(IConfigurationService); - debugLauncher = serviceContainer.get(ITestDebugLauncher); - testController = serviceContainer.get(ITestController); - - // create client to act as python server which sends testing result response - client = new net.Socket(); - client.on('error', (error) => { - traceLog('Socket connection error:', error); - }); - - mockProc = new MockChildProcess('', ['']); - const output2 = new Observable>(() => { - /* no op */ - }); - - // stub out execution service and factory so mock data is returned from client. - const stubExecutionService = ({ - execObservable: () => { - client.connect(pythonTestServer.getPort()); - return { - proc: mockProc, - out: output2, - dispose: () => { - /* no-body */ - }, - }; - }, - } as unknown) as IPythonExecutionService; - - stubExecutionFactory = typeMoq.Mock.ofType(); - stubExecutionFactory - .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) - .returns(() => Promise.resolve(stubExecutionService)); - - // stub create UUID - - const v4Stub = sandbox.stub(crypto, 'randomUUID'); - v4Stub.returns(FAKE_UUID); - - // create python test server - pythonTestServer = new PythonTestServer(stubExecutionFactory.object, debugLauncher); - await pythonTestServer.serverReady(); - // handles output from client - testOutputChannel = typeMoq.Mock.ofType(); - testOutputChannel - .setup((x) => x.append(typeMoq.It.isAny())) - .callback((appendVal: any) => { - traceLog('out - ', appendVal.toString()); - }) - .returns(() => { - // Whatever you need to return - }); - testOutputChannel - .setup((x) => x.appendLine(typeMoq.It.isAny())) - .callback((appendVal: any) => { - traceLog('outL - ', appendVal.toString()); - }) - .returns(() => { - // Whatever you need to return - }); - }); - teardown(async () => { - pythonTestServer.dispose(); - sandbox.restore(); - }); - testCases.forEach((testCase) => { - test(`Testing Payloads: ${testCase.name}`, async () => { - let actualCollectedResult = ''; - client.on('connect', async () => { - traceLog('socket connected, sending stubbed data'); - // payload is a string array, each string represents one line written to the buffer - const { payloadArray } = testCase.value; - for (let i = 0; i < payloadArray.length; i = i + 1) { - await (async (clientSub, payloadSub) => { - if (!clientSub.write(payloadSub)) { - // If write returns false, wait for the 'drain' event before proceeding - await new Promise((resolve) => clientSub.once('drain', resolve)); - } - })(client, payloadArray[i]); - } - mockProc.emit('close', 0, null); - client.end(); - }); - let errorBool = false; - let errorMessage = ''; - resultResolver = new PythonResultResolver(testController, PYTEST_PROVIDER, workspaceUri); - resultResolver._resolveExecution = async (payload, _token?) => { - // the payloads that get to the _resolveExecution are all data and should be successful. - actualCollectedResult = actualCollectedResult + JSON.stringify(payload.result); - if (payload.status !== 'success') { - errorBool = true; - errorMessage = "Expected status to be 'success'"; - } - if (!payload.result) { - errorBool = true; - errorMessage = 'Expected results to be present'; - } - - return Promise.resolve(); - }; - // set workspace to test workspace folder - workspaceUri = Uri.parse(rootPathSmallWorkspace); - - // run pytest execution - const executionAdapter = new PytestTestExecutionAdapter( - pythonTestServer, - configService, - testOutputChannel.object, - resultResolver, - ); - const testRun = typeMoq.Mock.ofType(); - testRun - .setup((t) => t.token) - .returns( - () => - ({ - onCancellationRequested: () => undefined, - } as any), - ); - await executionAdapter - .runTests( - workspaceUri, - [`${rootPathSmallWorkspace}/test_simple.py::test_a`], - false, - testRun.object, - stubExecutionFactory.object, - ) - .then(() => { - assert.strictEqual( - testCase.value.data, - actualCollectedResult, - "Expected collected result to match 'data'", - ); - assert.strictEqual(errorBool, false, errorMessage); - }); - }); - }); -}); diff --git a/src/test/testing/testController/payloadTestCases.ts b/src/test/testing/testController/payloadTestCases.ts index f7f94a926f5ff..af33b46c5a36f 100644 --- a/src/test/testing/testController/payloadTestCases.ts +++ b/src/test/testing/testController/payloadTestCases.ts @@ -23,7 +23,7 @@ const SINGLE_UNITTEST_SUBTEST = { }, }; -const SINGLE_PYTEST_PAYLOAD = { +export const SINGLE_PYTEST_PAYLOAD = { cwd: 'path/to', status: 'success', result: { @@ -73,6 +73,13 @@ Request-uuid: ${uuid} ${JSON.stringify(data)}`; } +export function createPayload2(data: unknown): string { + return `Content-Length: ${JSON.stringify(data).length} +Content-Type: application/json + +${JSON.stringify(data)}`; +} + export function PAYLOAD_SINGLE_CHUNK(uuid: string): DataWithPayloadChunks { const payload = createPayload(uuid, SINGLE_UNITTEST_SUBTEST); diff --git a/src/test/testing/testController/pytest/pytestDiscoveryAdapter.unit.test.ts b/src/test/testing/testController/pytest/pytestDiscoveryAdapter.unit.test.ts index 3b1e8fec6d6d4..ab41ef12b7264 100644 --- a/src/test/testing/testController/pytest/pytestDiscoveryAdapter.unit.test.ts +++ b/src/test/testing/testController/pytest/pytestDiscoveryAdapter.unit.test.ts @@ -10,7 +10,6 @@ import * as fs from 'fs'; import * as sinon from 'sinon'; import { IConfigurationService, ITestOutputChannel } from '../../../../client/common/types'; import { PytestTestDiscoveryAdapter } from '../../../../client/testing/testController/pytest/pytestDiscoveryAdapter'; -import { ITestServer } from '../../../../client/testing/testController/common/types'; import { IPythonExecutionFactory, IPythonExecutionService, @@ -20,52 +19,46 @@ import { import { EXTENSION_ROOT_DIR } from '../../../../client/constants'; import { MockChildProcess } from '../../../mocks/mockChildProcess'; import { Deferred, createDeferred } from '../../../../client/common/utils/async'; +import * as util from '../../../../client/testing/testController/common/utils'; suite('pytest test discovery adapter', () => { - let testServer: typeMoq.IMock; let configService: IConfigurationService; let execFactory = typeMoq.Mock.ofType(); let adapter: PytestTestDiscoveryAdapter; let execService: typeMoq.IMock; let deferred: Deferred; let outputChannel: typeMoq.IMock; - let portNum: number; - let uuid: string; let expectedPath: string; let uri: Uri; let expectedExtraVariables: Record; let mockProc: MockChildProcess; let deferred2: Deferred; + let utilsStartDiscoveryNamedPipeStub: sinon.SinonStub; setup(() => { const mockExtensionRootDir = typeMoq.Mock.ofType(); mockExtensionRootDir.setup((m) => m.toString()).returns(() => '/mocked/extension/root/dir'); + utilsStartDiscoveryNamedPipeStub = sinon.stub(util, 'startDiscoveryNamedPipe'); + utilsStartDiscoveryNamedPipeStub.callsFake(() => + Promise.resolve({ + name: 'discoveryResultPipe-mockName', + dispose: () => { + /* no-op */ + }, + }), + ); + // constants - portNum = 12345; - uuid = 'uuid123'; expectedPath = path.join('/', 'my', 'test', 'path'); uri = Uri.file(expectedPath); const relativePathToPytest = 'python_files'; const fullPluginPath = path.join(EXTENSION_ROOT_DIR, relativePathToPytest); expectedExtraVariables = { PYTHONPATH: fullPluginPath, - TEST_UUID: uuid, - TEST_PORT: portNum.toString(), + TEST_RUN_PIPE: 'discoveryResultPipe-mockName', }; - // set up test server - testServer = typeMoq.Mock.ofType(); - testServer.setup((t) => t.getPort()).returns(() => portNum); - testServer.setup((t) => t.createUUID(typeMoq.It.isAny())).returns(() => uuid); - testServer - .setup((t) => t.onDiscoveryDataReceived(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns(() => ({ - dispose: () => { - /* no-body */ - }, - })); - // set up config service configService = ({ getSettings: () => ({ @@ -109,8 +102,9 @@ suite('pytest test discovery adapter', () => { deferred.resolve(); return Promise.resolve(execService.object); }); + sinon.stub(fs, 'lstatSync').returns({ isFile: () => true, isSymbolicLink: () => false } as fs.Stats); - adapter = new PytestTestDiscoveryAdapter(testServer.object, configService, outputChannel.object); + adapter = new PytestTestDiscoveryAdapter(configService, outputChannel.object); adapter.discoverTests(uri, execFactory.object); // add in await and trigger await deferred.promise; @@ -161,7 +155,7 @@ suite('pytest test discovery adapter', () => { return Promise.resolve(execService.object); }); - adapter = new PytestTestDiscoveryAdapter(testServer.object, configServiceNew, outputChannel.object); + adapter = new PytestTestDiscoveryAdapter(configServiceNew, outputChannel.object); adapter.discoverTests(uri, execFactory.object); // add in await and trigger await deferred.promise; @@ -211,7 +205,7 @@ suite('pytest test discovery adapter', () => { return Promise.resolve(execService.object); }); - adapter = new PytestTestDiscoveryAdapter(testServer.object, configServiceNew, outputChannel.object); + adapter = new PytestTestDiscoveryAdapter(configServiceNew, outputChannel.object); adapter.discoverTests(uri, execFactory.object); // add in await and trigger await deferred.promise; diff --git a/src/test/testing/testController/pytest/pytestExecutionAdapter.unit.test.ts b/src/test/testing/testController/pytest/pytestExecutionAdapter.unit.test.ts index 26a19ed0fd095..b82a663cf86cd 100644 --- a/src/test/testing/testController/pytest/pytestExecutionAdapter.unit.test.ts +++ b/src/test/testing/testController/pytest/pytestExecutionAdapter.unit.test.ts @@ -8,7 +8,6 @@ import * as sinon from 'sinon'; import * as path from 'path'; import { Observable } from 'rxjs/Observable'; import { IConfigurationService, ITestOutputChannel } from '../../../../client/common/types'; -import { ITestServer } from '../../../../client/testing/testController/common/types'; import { IPythonExecutionFactory, IPythonExecutionService, @@ -24,7 +23,6 @@ import { MockChildProcess } from '../../../mocks/mockChildProcess'; import { traceInfo } from '../../../../client/logging'; suite('pytest test execution adapter', () => { - let testServer: typeMoq.IMock; let configService: IConfigurationService; let execFactory = typeMoq.Mock.ofType(); let adapter: PytestTestExecutionAdapter; @@ -35,17 +33,9 @@ suite('pytest test execution adapter', () => { (global as any).EXTENSION_ROOT_DIR = EXTENSION_ROOT_DIR; let myTestPath: string; let mockProc: MockChildProcess; - let utilsStartServerStub: sinon.SinonStub; + let utilsStartTestIdsNamedPipeStub: sinon.SinonStub; + let utilsStartRunResultNamedPipeStub: sinon.SinonStub; setup(() => { - testServer = typeMoq.Mock.ofType(); - testServer.setup((t) => t.getPort()).returns(() => 12345); - testServer - .setup((t) => t.onRunDataReceived(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns(() => ({ - dispose: () => { - /* no-body */ - }, - })); configService = ({ getSettings: () => ({ testing: { pytestArgs: ['.'] }, @@ -53,8 +43,6 @@ suite('pytest test execution adapter', () => { isTestExecution: () => false, } as unknown) as IConfigurationService; - // mock out the result resolver - // set up exec service with child process mockProc = new MockChildProcess('', ['']); const output = new Observable>(() => { @@ -75,7 +63,9 @@ suite('pytest test execution adapter', () => { }; }); execFactory = typeMoq.Mock.ofType(); - utilsStartServerStub = sinon.stub(util, 'startTestIdServer'); + + // added + utilsStartTestIdsNamedPipeStub = sinon.stub(util, 'startTestIdsNamedPipe'); debugLauncher = typeMoq.Mock.ofType(); execFactory .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) @@ -91,6 +81,16 @@ suite('pytest test execution adapter', () => { execService.setup((p) => ((p as unknown) as any).then).returns(() => undefined); debugLauncher.setup((p) => ((p as unknown) as any).then).returns(() => undefined); myTestPath = path.join('/', 'my', 'test', 'path', '/'); + + utilsStartRunResultNamedPipeStub = sinon.stub(util, 'startRunResultNamedPipe'); + utilsStartRunResultNamedPipeStub.callsFake(() => + Promise.resolve({ + name: 'runResultPipe-mockName', + dispose: () => { + /* no-op */ + }, + }), + ); }); teardown(() => { sinon.restore(); @@ -105,25 +105,22 @@ suite('pytest test execution adapter', () => { deferred2.resolve(); return Promise.resolve(execService.object); }); - utilsStartServerStub.callsFake(() => { + utilsStartTestIdsNamedPipeStub.callsFake(() => { deferred3.resolve(); - return Promise.resolve(54321); + return Promise.resolve({ + name: 'mockName', + dispose: () => { + /* no-op */ + }, + }); }); const testRun = typeMoq.Mock.ofType(); testRun.setup((t) => t.token).returns(() => ({ onCancellationRequested: () => undefined } as any)); const uri = Uri.file(myTestPath); - const uuid = 'uuid123'; - testServer - .setup((t) => t.onRunDataReceived(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns(() => ({ - dispose: () => { - /* no-body */ - }, - })); - testServer.setup((t) => t.createUUID(typeMoq.It.isAny())).returns(() => uuid); const outputChannel = typeMoq.Mock.ofType(); - adapter = new PytestTestExecutionAdapter(testServer.object, configService, outputChannel.object); + adapter = new PytestTestExecutionAdapter(configService, outputChannel.object); const testIds = ['test1id', 'test2id']; + adapter.runTests(uri, testIds, false, testRun.object, execFactory.object); // add in await and trigger @@ -132,7 +129,7 @@ suite('pytest test execution adapter', () => { mockProc.trigger('close'); // assert - sinon.assert.calledWithExactly(utilsStartServerStub, testIds); + sinon.assert.calledWithExactly(utilsStartTestIdsNamedPipeStub, testIds); }); test('pytest execution called with correct args', async () => { const deferred2 = createDeferred(); @@ -144,24 +141,15 @@ suite('pytest test execution adapter', () => { deferred2.resolve(); return Promise.resolve(execService.object); }); - utilsStartServerStub.callsFake(() => { + utilsStartTestIdsNamedPipeStub.callsFake(() => { deferred3.resolve(); - return Promise.resolve(54321); + return Promise.resolve('testIdPipe-mockName'); }); const testRun = typeMoq.Mock.ofType(); testRun.setup((t) => t.token).returns(() => ({ onCancellationRequested: () => undefined } as any)); const uri = Uri.file(myTestPath); - const uuid = 'uuid123'; - testServer - .setup((t) => t.onRunDataReceived(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns(() => ({ - dispose: () => { - /* no-body */ - }, - })); - testServer.setup((t) => t.createUUID(typeMoq.It.isAny())).returns(() => uuid); const outputChannel = typeMoq.Mock.ofType(); - adapter = new PytestTestExecutionAdapter(testServer.object, configService, outputChannel.object); + adapter = new PytestTestExecutionAdapter(configService, outputChannel.object); adapter.runTests(uri, [], false, testRun.object, execFactory.object); await deferred2.promise; @@ -175,8 +163,8 @@ suite('pytest test execution adapter', () => { const expectedArgs = [pathToPythonScript, rootDirArg]; const expectedExtraVariables = { PYTHONPATH: pathToPythonFiles, - TEST_UUID: 'uuid123', - TEST_PORT: '12345', + TEST_RUN_PIPE: 'runResultPipe-mockName', + RUN_TEST_IDS_PIPE: 'testIdPipe-mockName', }; execService.verify( (x) => @@ -184,9 +172,8 @@ suite('pytest test execution adapter', () => { expectedArgs, typeMoq.It.is((options) => { assert.equal(options.env?.PYTHONPATH, expectedExtraVariables.PYTHONPATH); - assert.equal(options.env?.TEST_UUID, expectedExtraVariables.TEST_UUID); - assert.equal(options.env?.TEST_PORT, expectedExtraVariables.TEST_PORT); - assert.equal(options.env?.RUN_TEST_IDS_PORT, '54321'); + assert.equal(options.env?.TEST_RUN_PIPE, expectedExtraVariables.TEST_RUN_PIPE); + assert.equal(options.env?.RUN_TEST_IDS_PIPE, expectedExtraVariables.RUN_TEST_IDS_PIPE); assert.equal(options.cwd, uri.fsPath); assert.equal(options.throwOnStdErr, true); return true; @@ -205,9 +192,9 @@ suite('pytest test execution adapter', () => { deferred2.resolve(); return Promise.resolve(execService.object); }); - utilsStartServerStub.callsFake(() => { + utilsStartTestIdsNamedPipeStub.callsFake(() => { deferred3.resolve(); - return Promise.resolve(54321); + return Promise.resolve('testIdPipe-mockName'); }); const testRun = typeMoq.Mock.ofType(); testRun.setup((t) => t.token).returns(() => ({ onCancellationRequested: () => undefined } as any)); @@ -219,17 +206,8 @@ suite('pytest test execution adapter', () => { isTestExecution: () => false, } as unknown) as IConfigurationService; const uri = Uri.file(myTestPath); - const uuid = 'uuid123'; - testServer - .setup((t) => t.onRunDataReceived(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns(() => ({ - dispose: () => { - /* no-body */ - }, - })); - testServer.setup((t) => t.createUUID(typeMoq.It.isAny())).returns(() => uuid); const outputChannel = typeMoq.Mock.ofType(); - adapter = new PytestTestExecutionAdapter(testServer.object, configService, outputChannel.object); + adapter = new PytestTestExecutionAdapter(configService, outputChannel.object); adapter.runTests(uri, [], false, testRun.object, execFactory.object); await deferred2.promise; @@ -242,8 +220,8 @@ suite('pytest test execution adapter', () => { const expectedArgs = [pathToPythonScript, `--rootdir=${newCwd}`]; const expectedExtraVariables = { PYTHONPATH: pathToPythonFiles, - TEST_UUID: 'uuid123', - TEST_PORT: '12345', + TEST_RUN_PIPE: 'runResultPipe-mockName', + RUN_TEST_IDS_PIPE: 'testIdPipe-mockName', }; execService.verify( @@ -252,9 +230,8 @@ suite('pytest test execution adapter', () => { expectedArgs, typeMoq.It.is((options) => { assert.equal(options.env?.PYTHONPATH, expectedExtraVariables.PYTHONPATH); - assert.equal(options.env?.TEST_UUID, expectedExtraVariables.TEST_UUID); - assert.equal(options.env?.TEST_PORT, expectedExtraVariables.TEST_PORT); - assert.equal(options.env?.RUN_TEST_IDS_PORT, '54321'); + assert.equal(options.env?.TEST_RUN_PIPE, expectedExtraVariables.TEST_RUN_PIPE); + assert.equal(options.env?.RUN_TEST_IDS_PIPE, expectedExtraVariables.RUN_TEST_IDS_PIPE); assert.equal(options.cwd, newCwd); assert.equal(options.throwOnStdErr, true); return true; @@ -266,9 +243,9 @@ suite('pytest test execution adapter', () => { test('Debug launched correctly for pytest', async () => { const deferred3 = createDeferred(); const deferredEOT = createDeferred(); - utilsStartServerStub.callsFake(() => { + utilsStartTestIdsNamedPipeStub.callsFake(() => { deferred3.resolve(); - return Promise.resolve(54321); + return Promise.resolve('testIdPipe-mockName'); }); debugLauncher .setup((dl) => dl.launchDebugger(typeMoq.It.isAny(), typeMoq.It.isAny())) @@ -288,17 +265,8 @@ suite('pytest test execution adapter', () => { } as any), ); const uri = Uri.file(myTestPath); - const uuid = 'uuid123'; - testServer - .setup((t) => t.onRunDataReceived(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns(() => ({ - dispose: () => { - /* no-body */ - }, - })); - testServer.setup((t) => t.createUUID(typeMoq.It.isAny())).returns(() => uuid); const outputChannel = typeMoq.Mock.ofType(); - adapter = new PytestTestExecutionAdapter(testServer.object, configService, outputChannel.object); + adapter = new PytestTestExecutionAdapter(configService, outputChannel.object); await adapter.runTests(uri, [], true, testRun.object, execFactory.object, debugLauncher.object); await deferred3.promise; debugLauncher.verify( @@ -308,15 +276,14 @@ suite('pytest test execution adapter', () => { assert.equal(launchOptions.cwd, uri.fsPath); assert.deepEqual(launchOptions.args, [`--rootdir=${myTestPath}`, '--capture=no']); assert.equal(launchOptions.testProvider, 'pytest'); - assert.equal(launchOptions.pytestPort, '12345'); - assert.equal(launchOptions.pytestUUID, 'uuid123'); - assert.strictEqual(launchOptions.runTestIdsPort, '54321'); + assert.equal(launchOptions.pytestPort, 'runResultPipe-mockName'); + assert.strictEqual(launchOptions.runTestIdsPort, 'testIdPipe-mockName'); + assert.notEqual(launchOptions.token, undefined); return true; }), typeMoq.It.isAny(), ), typeMoq.Times.once(), ); - testServer.verify((x) => x.deleteUUID(typeMoq.It.isAny()), typeMoq.Times.once()); }); }); diff --git a/src/test/testing/testController/server.unit.test.ts b/src/test/testing/testController/server.unit.test.ts deleted file mode 100644 index 62f5b8327219d..0000000000000 --- a/src/test/testing/testController/server.unit.test.ts +++ /dev/null @@ -1,384 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -/* eslint-disable @typescript-eslint/no-explicit-any */ - -import * as assert from 'assert'; -import * as net from 'net'; -import * as sinon from 'sinon'; -import * as crypto from 'crypto'; -import { Observable } from 'rxjs'; -import * as typeMoq from 'typemoq'; -import { OutputChannel, Uri } from 'vscode'; -import { - IPythonExecutionFactory, - IPythonExecutionService, - ObservableExecutionResult, - Output, -} from '../../../client/common/process/types'; -import { PythonTestServer } from '../../../client/testing/testController/common/server'; -import { ITestDebugLauncher, LaunchOptions } from '../../../client/testing/common/types'; -import { Deferred, createDeferred } from '../../../client/common/utils/async'; -import { MockChildProcess } from '../../mocks/mockChildProcess'; -import { - PAYLOAD_MULTI_CHUNK, - PAYLOAD_SINGLE_CHUNK, - PAYLOAD_SPLIT_ACROSS_CHUNKS_ARRAY, - DataWithPayloadChunks, -} from './payloadTestCases'; -import { traceLog } from '../../../client/logging'; - -const testCases = [ - { - val: () => PAYLOAD_SINGLE_CHUNK('fake-uuid'), - }, - { - val: () => PAYLOAD_MULTI_CHUNK('fake-uuid'), - }, - { - val: () => PAYLOAD_SPLIT_ACROSS_CHUNKS_ARRAY('fake-uuid'), - }, -]; - -suite('Python Test Server, DataWithPayloadChunks', () => { - const FAKE_UUID = 'fake-uuid'; - let server: PythonTestServer; - let v4Stub: sinon.SinonStub; - let debugLauncher: ITestDebugLauncher; - let mockProc: MockChildProcess; - let execService: typeMoq.IMock; - let deferred: Deferred; - const sandbox = sinon.createSandbox(); - - setup(async () => { - // set up test command options - - v4Stub = sandbox.stub(crypto, 'randomUUID'); - v4Stub.returns(FAKE_UUID); - - // set up exec service with child process - mockProc = new MockChildProcess('', ['']); - execService = typeMoq.Mock.ofType(); - const outputObservable = new Observable>(() => { - /* no op */ - }); - execService - .setup((x) => x.execObservable(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns(() => ({ - proc: mockProc, - out: outputObservable, - dispose: () => { - /* no-body */ - }, - })); - execService.setup((p) => ((p as unknown) as any).then).returns(() => undefined); - }); - - teardown(() => { - sandbox.restore(); - server.dispose(); - }); - - testCases.forEach((testCase) => { - test(`run correctly`, async () => { - const testCaseDataObj: DataWithPayloadChunks = testCase.val(); - let eventData = ''; - const client = new net.Socket(); - - deferred = createDeferred(); - mockProc = new MockChildProcess('', ['']); - const output2 = new Observable>(() => { - /* no op */ - }); - const stubExecutionService2 = ({ - execObservable: () => { - client.connect(server.getPort()); - return { - proc: mockProc, - out: output2, - dispose: () => { - /* no-body */ - }, - }; - }, - } as unknown) as IPythonExecutionService; - - const stubExecutionFactory2 = ({ - createActivatedEnvironment: () => Promise.resolve(stubExecutionService2), - } as unknown) as IPythonExecutionFactory; - server = new PythonTestServer(stubExecutionFactory2, debugLauncher); - const uuid = server.createUUID(); - const options = { - command: { script: 'myscript', args: ['-foo', 'foo'] }, - workspaceFolder: Uri.file('/foo/bar'), - cwd: '/foo/bar', - uuid, - }; - - const dataWithPayloadChunks = testCaseDataObj; - - await server.serverReady(); - let errorOccur = false; - let errorMessage = ''; - server.onRunDataReceived(({ data }) => { - try { - const resultData = JSON.parse(data).result; - eventData = eventData + JSON.stringify(resultData); - } catch (e) { - errorOccur = true; - errorMessage = 'Error parsing data'; - } - deferred.resolve(); - }); - client.on('connect', () => { - traceLog('Socket connected, local port:', client.localPort); - // since this test is a single payload as a single chunk there should be a single line in the payload. - for (const line of dataWithPayloadChunks.payloadArray) { - client.write(line); - } - client.end(); - }); - client.on('error', (error) => { - traceLog('Socket connection error:', error); - }); - - server.sendCommand(options, {}); - await deferred.promise; - const expectedResult = dataWithPayloadChunks.data; - assert.deepStrictEqual(eventData, expectedResult); - assert.deepStrictEqual(errorOccur, false, errorMessage); - }); - }); -}); - -suite('Python Test Server, Send command etc', () => { - const FAKE_UUID = 'fake-uuid'; - let server: PythonTestServer; - let v4Stub: sinon.SinonStub; - let debugLauncher: ITestDebugLauncher; - let mockProc: MockChildProcess; - let execService: typeMoq.IMock; - let deferred: Deferred; - const sandbox = sinon.createSandbox(); - - setup(async () => { - // set up test command options - - v4Stub = sandbox.stub(crypto, 'randomUUID'); - v4Stub.returns(FAKE_UUID); - - // set up exec service with child process - mockProc = new MockChildProcess('', ['']); - execService = typeMoq.Mock.ofType(); - execService.setup((p) => ((p as unknown) as any).then).returns(() => undefined); - }); - - teardown(() => { - sandbox.restore(); - server.dispose(); - }); - test('sendCommand should add the port to the command being sent and add the correct extra spawn variables', async () => { - const deferred2 = createDeferred(); - const RUN_TEST_IDS_PORT_CONST = '5678'; - let error = false; - let errorMessage = ''; - execService - .setup((x) => x.execObservable(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns((_args, options2) => { - try { - assert.strictEqual( - options2.env.PYTHONPATH, - '/foo/bar', - 'Expect python path to exist as extra variable and be set correctly', - ); - assert.strictEqual( - options2.env.RUN_TEST_IDS_PORT, - RUN_TEST_IDS_PORT_CONST, - 'Expect test id port to be in extra variables and set correctly', - ); - assert.strictEqual( - options2.env.TEST_UUID, - FAKE_UUID, - 'Expect test uuid to be in extra variables and set correctly', - ); - assert.strictEqual( - options2.env.TEST_PORT, - '12345', - 'Expect server port to be set correctly as a env var', - ); - } catch (e) { - error = true; - errorMessage = `error occurred, assertion was incorrect, ${e}`; - } - return typeMoq.Mock.ofType>().object; - }); - const execFactory = typeMoq.Mock.ofType(); - execFactory - .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) - .returns(() => { - deferred2.resolve(); - return Promise.resolve(execService.object); - }); - server = new PythonTestServer(execFactory.object, debugLauncher); - sinon.stub(server, 'getPort').returns(12345); - // const portServer = server.getPort(); - await server.serverReady(); - const options = { - command: { script: 'myscript', args: ['-foo', 'foo'] }, - workspaceFolder: Uri.file('/foo/bar'), - cwd: '/foo/bar', - uuid: FAKE_UUID, - }; - try { - server.sendCommand(options, {}, RUN_TEST_IDS_PORT_CONST); - } catch (e) { - assert(false, `Error sending command, ${e}`); - } - // add in await and trigger - await deferred2.promise; - mockProc.trigger('close'); - - const expectedArgs = ['myscript', '-foo', 'foo']; - execService.verify((x) => x.execObservable(expectedArgs, typeMoq.It.isAny()), typeMoq.Times.once()); - if (error) { - assert(false, errorMessage); - } - }); - test('sendCommand should add right extra variables to command during debug', async () => { - const deferred2 = createDeferred(); - const RUN_TEST_IDS_PORT_CONST = '5678'; - const error = false; - const errorMessage = ''; - const debugLauncherMock = typeMoq.Mock.ofType(); - let actualLaunchOptions: LaunchOptions = {} as LaunchOptions; - const deferred4 = createDeferred(); - debugLauncherMock - .setup((x) => x.launchDebugger(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns((options, _) => { - actualLaunchOptions = options; - deferred4.resolve(); - return Promise.resolve(); - }); - execService - .setup((x) => x.execObservable(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns(() => typeMoq.Mock.ofType>().object); - const execFactory = typeMoq.Mock.ofType(); - execFactory - .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) - .returns(() => { - deferred2.resolve(); - return Promise.resolve(execService.object); - }); - server = new PythonTestServer(execFactory.object, debugLauncherMock.object); - sinon.stub(server, 'getPort').returns(12345); - // const portServer = server.getPort(); - await server.serverReady(); - const options = { - command: { script: 'myscript', args: ['-foo', 'foo'] }, - workspaceFolder: Uri.file('/foo/bar'), - cwd: '/foo/bar', - uuid: FAKE_UUID, - debugBool: true, - }; - try { - server.sendCommand(options, {}, RUN_TEST_IDS_PORT_CONST); - } catch (e) { - assert(false, `Error sending command, ${e}`); - } - // add in await and trigger - await deferred2.promise; - await deferred4.promise; - mockProc.trigger('close'); - - assert.notDeepEqual(actualLaunchOptions, {}, 'launch options should be set'); - assert.strictEqual(actualLaunchOptions.cwd, '/foo/bar'); - assert.strictEqual(actualLaunchOptions.testProvider, 'unittest'); - assert.strictEqual(actualLaunchOptions.pytestPort, '12345'); - assert.strictEqual(actualLaunchOptions.pytestUUID, 'fake-uuid'); - assert.strictEqual(actualLaunchOptions.runTestIdsPort, '5678'); - - debugLauncherMock.verify((x) => x.launchDebugger(typeMoq.It.isAny(), typeMoq.It.isAny()), typeMoq.Times.once()); - if (error) { - assert(false, errorMessage); - } - }); - - test('sendCommand should write to an output channel if it is provided as an option', async () => { - const output2: string[] = []; - const outChannel = { - appendLine: (str: string) => { - output2.push(str); - }, - } as OutputChannel; - const options = { - command: { - script: 'myscript', - args: ['-foo', 'foo'], - }, - workspaceFolder: Uri.file('/foo/bar'), - cwd: '/foo/bar', - uuid: FAKE_UUID, - outChannel, - }; - deferred = createDeferred(); - const execFactory = typeMoq.Mock.ofType(); - execFactory - .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) - .returns(() => { - deferred.resolve(); - return Promise.resolve(execService.object); - }); - - server = new PythonTestServer(execFactory.object, debugLauncher); - await server.serverReady(); - - server.sendCommand(options, {}); - // add in await and trigger - await deferred.promise; - mockProc.trigger('close'); - - const expected = ['python', 'myscript', '-foo', 'foo'].join(' '); - assert.equal(output2.length, 1); - assert.deepStrictEqual(output2, [expected]); - }); - - test('If script execution fails during sendCommand, an onDataReceived event should be fired with the "error" status', async () => { - let eventData: { status: string; errors: string[] } | undefined; - const deferred2 = createDeferred(); - const deferred3 = createDeferred(); - const stubExecutionService = typeMoq.Mock.ofType(); - stubExecutionService.setup((p) => ((p as unknown) as any).then).returns(() => undefined); - stubExecutionService - .setup((x) => x.execObservable(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns(() => { - deferred3.resolve(); - throw new Error('Failed to execute'); - }); - const options = { - command: { script: 'myscript', args: ['-foo', 'foo'] }, - workspaceFolder: Uri.file('/foo/bar'), - cwd: '/foo/bar', - uuid: FAKE_UUID, - }; - const stubExecutionFactory = typeMoq.Mock.ofType(); - stubExecutionFactory - .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) - .returns(() => { - deferred2.resolve(); - return Promise.resolve(stubExecutionService.object); - }); - - server = new PythonTestServer(stubExecutionFactory.object, debugLauncher); - await server.serverReady(); - - server.onDataReceived(({ data }) => { - eventData = JSON.parse(data); - }); - - server.sendCommand(options, {}); - await deferred2.promise; - await deferred3.promise; - assert.notEqual(eventData, undefined); - assert.deepStrictEqual(eventData?.status, 'error'); - assert.deepStrictEqual(eventData?.errors, ['Failed to execute']); - }); -}); diff --git a/src/test/testing/testController/testCancellationRunAdapters.unit.test.ts b/src/test/testing/testController/testCancellationRunAdapters.unit.test.ts index a0fb4eea85894..bddf057d4f0c7 100644 --- a/src/test/testing/testController/testCancellationRunAdapters.unit.test.ts +++ b/src/test/testing/testController/testCancellationRunAdapters.unit.test.ts @@ -11,35 +11,30 @@ import { IConfigurationService, ITestOutputChannel } from '../../../client/commo import { Deferred, createDeferred } from '../../../client/common/utils/async'; import { EXTENSION_ROOT_DIR } from '../../../client/constants'; import { ITestDebugLauncher } from '../../../client/testing/common/types'; -import { ITestServer } from '../../../client/testing/testController/common/types'; import { PytestTestExecutionAdapter } from '../../../client/testing/testController/pytest/pytestExecutionAdapter'; import { UnittestTestExecutionAdapter } from '../../../client/testing/testController/unittest/testExecutionAdapter'; import { MockChildProcess } from '../../mocks/mockChildProcess'; import * as util from '../../../client/testing/testController/common/utils'; +const adapters: Array = ['pytest', 'unittest']; + suite('Execution Flow Run Adapters', () => { - let testServer: typeMoq.IMock; + // define suit level variables let configService: IConfigurationService; - let execFactory = typeMoq.Mock.ofType(); - let adapter: PytestTestExecutionAdapter; - let execService: typeMoq.IMock; - let deferred: Deferred; + let execFactoryStub = typeMoq.Mock.ofType(); + let execServiceStub: typeMoq.IMock; + // let deferred: Deferred; let debugLauncher: typeMoq.IMock; (global as any).EXTENSION_ROOT_DIR = EXTENSION_ROOT_DIR; let myTestPath: string; let mockProc: MockChildProcess; - let utilsStartServerStub: sinon.SinonStub; + let utilsStartTestIdsNamedPipe: sinon.SinonStub; + let utilsStartRunResultNamedPipe: sinon.SinonStub; + let serverDisposeStub: sinon.SinonStub; setup(() => { - testServer = typeMoq.Mock.ofType(); - testServer.setup((t) => t.getPort()).returns(() => 12345); - testServer - .setup((t) => t.onRunDataReceived(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns(() => ({ - dispose: () => { - /* no-body */ - }, - })); + // general vars + myTestPath = path.join('/', 'my', 'test', 'path', '/'); configService = ({ getSettings: () => ({ testing: { pytestArgs: ['.'], unittestArgs: ['-v', '-s', '.', '-p', 'test*'] }, @@ -47,335 +42,206 @@ suite('Execution Flow Run Adapters', () => { isTestExecution: () => false, } as unknown) as IConfigurationService; - // mock out the result resolver + // set up execService and execFactory, all mocked + execServiceStub = typeMoq.Mock.ofType(); + execFactoryStub = typeMoq.Mock.ofType(); - // set up exec service with child process - mockProc = new MockChildProcess('', ['']); - const output = new Observable>(() => { - /* no op */ - }); - execService = typeMoq.Mock.ofType(); - execService - .setup((x) => x.execObservable(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns(() => ({ - proc: mockProc, - out: output, - dispose: () => { - /* no-body */ - }, - })); - execFactory = typeMoq.Mock.ofType(); - utilsStartServerStub = sinon.stub(util, 'startTestIdServer'); + // mocked utility functions that handle pipe related functions + utilsStartTestIdsNamedPipe = sinon.stub(util, 'startTestIdsNamedPipe'); + utilsStartRunResultNamedPipe = sinon.stub(util, 'startRunResultNamedPipe'); + serverDisposeStub = sinon.stub(); + + // debug specific mocks debugLauncher = typeMoq.Mock.ofType(); - execFactory - .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) - .returns(() => Promise.resolve(execService.object)); - deferred = createDeferred(); - execService - .setup((x) => x.exec(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns(() => { - deferred.resolve(); - return Promise.resolve({ stdout: '{}' }); - }); - execFactory.setup((p) => ((p as unknown) as any).then).returns(() => undefined); - execService.setup((p) => ((p as unknown) as any).then).returns(() => undefined); debugLauncher.setup((p) => ((p as unknown) as any).then).returns(() => undefined); - myTestPath = path.join('/', 'my', 'test', 'path', '/'); }); teardown(() => { sinon.restore(); }); - test('PYTEST cancelation token called mid-run resolves correctly', async () => { - // mock test run and cancelation token - const testRunMock = typeMoq.Mock.ofType(); - const cancellationToken = new CancellationTokenSource(); - const { token } = cancellationToken; - testRunMock.setup((t) => t.token).returns(() => token); - // mock exec service and exec factory - const execServiceMock = typeMoq.Mock.ofType(); - execServiceMock - .setup((x) => x.execObservable(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns(() => { - cancellationToken.cancel(); - return { - proc: mockProc, - out: typeMoq.Mock.ofType>>().object, - dispose: () => { - /* no-body */ - }, - }; + adapters.forEach((adapter) => { + test(`Adapter ${adapter}: cancelation token called mid-run resolves correctly`, async () => { + // mock test run and cancelation token + const testRunMock = typeMoq.Mock.ofType(); + const cancellationToken = new CancellationTokenSource(); + const { token } = cancellationToken; + testRunMock.setup((t) => t.token).returns(() => token); + + // // mock exec service and exec factory + execServiceStub + .setup((x) => x.execObservable(typeMoq.It.isAny(), typeMoq.It.isAny())) + .returns(() => { + cancellationToken.cancel(); + return { + proc: mockProc, + out: typeMoq.Mock.ofType>>().object, + dispose: () => { + /* no-body */ + }, + }; + }); + execFactoryStub + .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) + .returns(() => Promise.resolve(execServiceStub.object)); + execFactoryStub.setup((p) => ((p as unknown) as any).then).returns(() => undefined); + execServiceStub.setup((p) => ((p as unknown) as any).then).returns(() => undefined); + + // test ids named pipe mocking + const deferredStartTestIdsNamedPipe = createDeferred(); + utilsStartTestIdsNamedPipe.callsFake(() => { + deferredStartTestIdsNamedPipe.resolve(); + return Promise.resolve('named-pipe'); }); - const execFactoryMock = typeMoq.Mock.ofType(); - execFactoryMock - .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) - .returns(() => Promise.resolve(execServiceMock.object)); - execFactoryMock.setup((p) => ((p as unknown) as any).then).returns(() => undefined); - execServiceMock.setup((p) => ((p as unknown) as any).then).returns(() => undefined); - const deferredStartServer = createDeferred(); - utilsStartServerStub.callsFake(() => { - deferredStartServer.resolve(); - return Promise.resolve(54321); - }); - // mock EOT token & ExecClose token - const deferredEOT = createDeferred(); - const deferredExecClose = createDeferred(); - const utilsCreateEOTStub: sinon.SinonStub = sinon.stub(util, 'createTestingDeferred'); - utilsCreateEOTStub.callsFake(() => { - if (utilsCreateEOTStub.callCount === 1) { - return deferredEOT; - } - return deferredExecClose; - }); - // set up test server - testServer - .setup((t) => t.onRunDataReceived(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns(() => ({ - dispose: () => { - /* no-body */ - }, - })); - testServer.setup((t) => t.createUUID(typeMoq.It.isAny())).returns(() => 'uuid123'); - adapter = new PytestTestExecutionAdapter( - testServer.object, - configService, - typeMoq.Mock.ofType().object, - ); - await adapter.runTests( - Uri.file(myTestPath), - [], - false, - testRunMock.object, - execFactoryMock.object, - debugLauncher.object, - ); - // wait for server to start to keep test from failing - await deferredStartServer.promise; + // run result pipe mocking and the related server close dispose + let deferredTillServerCloseTester: Deferred | undefined; + utilsStartRunResultNamedPipe.callsFake((_callback, deferredTillServerClose, _token) => { + deferredTillServerCloseTester = deferredTillServerClose; + return Promise.resolve({ name: 'named-pipes-socket-name', dispose: serverDisposeStub }); + }); + serverDisposeStub.callsFake(() => { + console.log('server disposed'); + if (deferredTillServerCloseTester) { + deferredTillServerCloseTester.resolve(); + } else { + console.log('deferredTillServerCloseTester is undefined'); + throw new Error( + 'deferredTillServerCloseTester is undefined, should be defined from startRunResultNamedPipe', + ); + } + }); - testServer.verify((x) => x.deleteUUID(typeMoq.It.isAny()), typeMoq.Times.once()); - }); - test('PYTEST cancelation token called mid-debug resolves correctly', async () => { - // mock test run and cancelation token - const testRunMock = typeMoq.Mock.ofType(); - const cancellationToken = new CancellationTokenSource(); - const { token } = cancellationToken; - testRunMock.setup((t) => t.token).returns(() => token); - // mock exec service and exec factory - const execServiceMock = typeMoq.Mock.ofType(); - debugLauncher - .setup((dl) => dl.launchDebugger(typeMoq.It.isAny(), typeMoq.It.isAny())) - .callback((_options, callback) => { - if (callback) { - callback(); + // mock EOT token & ExecClose token + const deferredEOT = createDeferred(); + const deferredExecClose = createDeferred(); + const utilsCreateEOTStub: sinon.SinonStub = sinon.stub(util, 'createTestingDeferred'); + utilsCreateEOTStub.callsFake(() => { + if (utilsCreateEOTStub.callCount === 1) { + return deferredEOT; } - }) - .returns(async () => { - cancellationToken.cancel(); - return Promise.resolve(); + return deferredExecClose; }); - const execFactoryMock = typeMoq.Mock.ofType(); - execFactoryMock - .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) - .returns(() => Promise.resolve(execServiceMock.object)); - execFactoryMock.setup((p) => ((p as unknown) as any).then).returns(() => undefined); - execServiceMock.setup((p) => ((p as unknown) as any).then).returns(() => undefined); - const deferredStartServer = createDeferred(); - utilsStartServerStub.callsFake(() => { - deferredStartServer.resolve(); - return Promise.resolve(54321); - }); - // mock EOT token & ExecClose token - const deferredEOT = createDeferred(); - const deferredExecClose = createDeferred(); - const utilsCreateEOTStub: sinon.SinonStub = sinon.stub(util, 'createTestingDeferred'); - utilsCreateEOTStub.callsFake(() => { - if (utilsCreateEOTStub.callCount === 1) { - return deferredEOT; - } - return deferredExecClose; + // define adapter and run tests + const testAdapter = createAdapter(adapter, configService, typeMoq.Mock.ofType().object); + await testAdapter.runTests( + Uri.file(myTestPath), + [], + false, + testRunMock.object, + execFactoryStub.object, + debugLauncher.object, + ); + // wait for server to start to keep test from failing + await deferredStartTestIdsNamedPipe.promise; + + // assert the server dispose function was called correctly + sinon.assert.calledOnce(serverDisposeStub); }); - // set up test server - testServer - .setup((t) => t.onRunDataReceived(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns(() => ({ - dispose: () => { - /* no-body */ - }, - })); - testServer.setup((t) => t.createUUID(typeMoq.It.isAny())).returns(() => 'uuid123'); - adapter = new PytestTestExecutionAdapter( - testServer.object, - configService, - typeMoq.Mock.ofType().object, - ); - await adapter.runTests( - Uri.file(myTestPath), - [], - true, - testRunMock.object, - execFactoryMock.object, - debugLauncher.object, - ); - // wait for server to start to keep test from failing - await deferredStartServer.promise; - - testServer.verify((x) => x.deleteUUID(typeMoq.It.isAny()), typeMoq.Times.once()); - }); - test('UNITTEST cancelation token called mid-run resolves correctly', async () => { - // mock test run and cancelation token - const testRunMock = typeMoq.Mock.ofType(); - const cancellationToken = new CancellationTokenSource(); - const { token } = cancellationToken; - testRunMock.setup((t) => t.token).returns(() => token); - - // Stub send command to then have token canceled - const stubTestServer = typeMoq.Mock.ofType(); - stubTestServer - .setup((t) => - t.sendCommand( - typeMoq.It.isAny(), - typeMoq.It.isAny(), - typeMoq.It.isAny(), - typeMoq.It.isAny(), - typeMoq.It.isAny(), - typeMoq.It.isAny(), - ), - ) - .returns(() => { - cancellationToken.cancel(); - return Promise.resolve(); + test(`Adapter ${adapter}: token called mid-debug resolves correctly`, async () => { + // mock test run and cancelation token + const testRunMock = typeMoq.Mock.ofType(); + const cancellationToken = new CancellationTokenSource(); + const { token } = cancellationToken; + testRunMock.setup((t) => t.token).returns(() => token); + + // // mock exec service and exec factory + execServiceStub + .setup((x) => x.execObservable(typeMoq.It.isAny(), typeMoq.It.isAny())) + .returns(() => { + cancellationToken.cancel(); + return { + proc: mockProc, + out: typeMoq.Mock.ofType>>().object, + dispose: () => { + /* no-body */ + }, + }; + }); + execFactoryStub + .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) + .returns(() => Promise.resolve(execServiceStub.object)); + execFactoryStub.setup((p) => ((p as unknown) as any).then).returns(() => undefined); + execServiceStub.setup((p) => ((p as unknown) as any).then).returns(() => undefined); + + // test ids named pipe mocking + const deferredStartTestIdsNamedPipe = createDeferred(); + utilsStartTestIdsNamedPipe.callsFake(() => { + deferredStartTestIdsNamedPipe.resolve(); + return Promise.resolve('named-pipe'); }); - stubTestServer.setup((t) => t.createUUID(typeMoq.It.isAny())).returns(() => 'uuid123'); - stubTestServer - .setup((t) => t.onRunDataReceived(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns(() => ({ - dispose: () => { - /* no-body */ - }, - })); - // mock exec service and exec factory - const execServiceMock = typeMoq.Mock.ofType(); - execServiceMock - .setup((x) => x.execObservable(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns(() => { - cancellationToken.cancel(); - return { - proc: mockProc, - out: typeMoq.Mock.ofType>>().object, - dispose: () => { - /* no-body */ - }, - }; + // run result pipe mocking and the related server close dispose + let deferredTillServerCloseTester: Deferred | undefined; + utilsStartRunResultNamedPipe.callsFake((_callback, deferredTillServerClose, _token) => { + deferredTillServerCloseTester = deferredTillServerClose; + return Promise.resolve({ + name: 'named-pipes-socket-name', + dispose: serverDisposeStub, + }); }); - const execFactoryMock = typeMoq.Mock.ofType(); - execFactoryMock - .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) - .returns(() => Promise.resolve(execServiceMock.object)); - execFactoryMock.setup((p) => ((p as unknown) as any).then).returns(() => undefined); - execServiceMock.setup((p) => ((p as unknown) as any).then).returns(() => undefined); - - const deferredStartServer = createDeferred(); - utilsStartServerStub.callsFake(() => { - deferredStartServer.resolve(); - return Promise.resolve(54321); - }); - // mock EOT token & ExecClose token - const deferredEOT = createDeferred(); - const deferredExecClose = createDeferred(); - const utilsCreateEOTStub: sinon.SinonStub = sinon.stub(util, 'createTestingDeferred'); - utilsCreateEOTStub.callsFake(() => { - if (utilsCreateEOTStub.callCount === 1) { - return deferredEOT; - } - return deferredExecClose; - }); - // set up test server - const unittestAdapter = new UnittestTestExecutionAdapter( - stubTestServer.object, - configService, - typeMoq.Mock.ofType().object, - ); - await unittestAdapter.runTests(Uri.file(myTestPath), [], false, testRunMock.object); - // wait for server to start to keep test from failing - await deferredStartServer.promise; - - stubTestServer.verify((x) => x.deleteUUID(typeMoq.It.isAny()), typeMoq.Times.once()); - }); - test('UNITTEST cancelation token called mid-debug resolves correctly', async () => { - // mock test run and cancelation token - const testRunMock = typeMoq.Mock.ofType(); - const cancellationToken = new CancellationTokenSource(); - const { token } = cancellationToken; - testRunMock.setup((t) => t.token).returns(() => token); - - // Stub send command to then have token canceled - const stubTestServer = typeMoq.Mock.ofType(); - stubTestServer - .setup((t) => - t.sendCommand( - typeMoq.It.isAny(), - typeMoq.It.isAny(), - typeMoq.It.isAny(), - typeMoq.It.isAny(), - typeMoq.It.isAny(), - typeMoq.It.isAny(), - ), - ) - .returns(() => { - cancellationToken.cancel(); - return Promise.resolve(); + serverDisposeStub.callsFake(() => { + console.log('server disposed'); + if (deferredTillServerCloseTester) { + deferredTillServerCloseTester.resolve(); + } else { + console.log('deferredTillServerCloseTester is undefined'); + throw new Error( + 'deferredTillServerCloseTester is undefined, should be defined from startRunResultNamedPipe', + ); + } }); - stubTestServer.setup((t) => t.createUUID(typeMoq.It.isAny())).returns(() => 'uuid123'); - stubTestServer - .setup((t) => t.onRunDataReceived(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns(() => ({ - dispose: () => { - /* no-body */ - }, - })); - // mock exec service and exec factory - const execServiceMock = typeMoq.Mock.ofType(); - debugLauncher - .setup((dl) => dl.launchDebugger(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns(async () => { - cancellationToken.cancel(); - return Promise.resolve(); + // mock EOT token & ExecClose token + const deferredEOT = createDeferred(); + const deferredExecClose = createDeferred(); + const utilsCreateEOTStub: sinon.SinonStub = sinon.stub(util, 'createTestingDeferred'); + utilsCreateEOTStub.callsFake(() => { + if (utilsCreateEOTStub.callCount === 1) { + return deferredEOT; + } + return deferredExecClose; }); - const execFactoryMock = typeMoq.Mock.ofType(); - execFactoryMock - .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) - .returns(() => Promise.resolve(execServiceMock.object)); - execFactoryMock.setup((p) => ((p as unknown) as any).then).returns(() => undefined); - execServiceMock.setup((p) => ((p as unknown) as any).then).returns(() => undefined); - const deferredStartServer = createDeferred(); - utilsStartServerStub.callsFake(() => { - deferredStartServer.resolve(); - return Promise.resolve(54321); + // debugLauncher mocked + debugLauncher + .setup((dl) => dl.launchDebugger(typeMoq.It.isAny(), typeMoq.It.isAny())) + .callback((_options, callback) => { + if (callback) { + callback(); + } + }) + .returns(async () => { + cancellationToken.cancel(); + return Promise.resolve(); + }); + + // define adapter and run tests + const testAdapter = createAdapter(adapter, configService, typeMoq.Mock.ofType().object); + await testAdapter.runTests( + Uri.file(myTestPath), + [], + true, + testRunMock.object, + execFactoryStub.object, + debugLauncher.object, + ); + // wait for server to start to keep test from failing + await deferredStartTestIdsNamedPipe.promise; + + // TODO: fix the server disposal so it is called once not twice, + // currently not a problem but would be useful to improve clarity + sinon.assert.called(serverDisposeStub); }); - // mock EOT token & ExecClose token - const deferredEOT = createDeferred(); - const deferredExecClose = createDeferred(); - const utilsCreateEOTStub: sinon.SinonStub = sinon.stub(util, 'createTestingDeferred'); - utilsCreateEOTStub.callsFake(() => { - if (utilsCreateEOTStub.callCount === 1) { - return deferredEOT; - } - return deferredExecClose; - }); - // set up test server - const unittestAdapter = new UnittestTestExecutionAdapter( - stubTestServer.object, - configService, - typeMoq.Mock.ofType().object, - ); - await unittestAdapter.runTests(Uri.file(myTestPath), [], false, testRunMock.object); - // wait for server to start to keep test from failing - await deferredStartServer.promise; - - stubTestServer.verify((x) => x.deleteUUID(typeMoq.It.isAny()), typeMoq.Times.once()); }); }); + +// Helper function to create an adapter based on the specified type +function createAdapter( + adapterType: string, + configService: IConfigurationService, + outputChannel: ITestOutputChannel, +): PytestTestExecutionAdapter | UnittestTestExecutionAdapter { + if (adapterType === 'pytest') return new PytestTestExecutionAdapter(configService, outputChannel); + if (adapterType === 'unittest') return new UnittestTestExecutionAdapter(configService, outputChannel); + throw Error('un-compatible adapter type'); +} diff --git a/src/test/testing/testController/unittest/testDiscoveryAdapter.unit.test.ts b/src/test/testing/testController/unittest/testDiscoveryAdapter.unit.test.ts index c5fb0d421da63..7c700fdd4ec4a 100644 --- a/src/test/testing/testController/unittest/testDiscoveryAdapter.unit.test.ts +++ b/src/test/testing/testController/unittest/testDiscoveryAdapter.unit.test.ts @@ -1,92 +1,173 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. +/* eslint-disable @typescript-eslint/no-explicit-any */ import * as assert from 'assert'; import * as path from 'path'; import * as typemoq from 'typemoq'; import { Uri } from 'vscode'; +import { Observable } from 'rxjs'; +import * as sinon from 'sinon'; import { IConfigurationService, ITestOutputChannel } from '../../../../client/common/types'; import { EXTENSION_ROOT_DIR } from '../../../../client/constants'; -import { ITestServer, TestCommandOptions } from '../../../../client/testing/testController/common/types'; import { UnittestTestDiscoveryAdapter } from '../../../../client/testing/testController/unittest/testDiscoveryAdapter'; -import { createDeferred } from '../../../../client/common/utils/async'; +import { Deferred, createDeferred } from '../../../../client/common/utils/async'; +import { MockChildProcess } from '../../../mocks/mockChildProcess'; +import * as util from '../../../../client/testing/testController/common/utils'; +import { + IPythonExecutionFactory, + IPythonExecutionService, + Output, + SpawnOptions, +} from '../../../../client/common/process/types'; suite('Unittest test discovery adapter', () => { let stubConfigSettings: IConfigurationService; let outputChannel: typemoq.IMock; + let mockProc: MockChildProcess; + let execService: typemoq.IMock; + let execFactory = typemoq.Mock.ofType(); + let deferred: Deferred; + let expectedExtraVariables: Record; + let expectedPath: string; + let uri: Uri; + let utilsStartDiscoveryNamedPipeStub: sinon.SinonStub; setup(() => { + expectedPath = path.join('/', 'new', 'cwd'); stubConfigSettings = ({ getSettings: () => ({ testing: { unittestArgs: ['-v', '-s', '.', '-p', 'test*'] }, }), } as unknown) as IConfigurationService; outputChannel = typemoq.Mock.ofType(); - }); - - test('DiscoverTests should send the discovery command to the test server with the correct args', async () => { - let options: TestCommandOptions | undefined; - const deferred = createDeferred(); - const stubTestServer = ({ - sendCommand(opt: TestCommandOptions): Promise { - delete opt.outChannel; - options = opt; + // set up exec service with child process + mockProc = new MockChildProcess('', ['']); + const output = new Observable>(() => { + /* no op */ + }); + execService = typemoq.Mock.ofType(); + execService + .setup((x) => x.execObservable(typemoq.It.isAny(), typemoq.It.isAny())) + .returns(() => { deferred.resolve(); - return Promise.resolve(); - }, - onDiscoveryDataReceived: () => { - // no body - }, - createUUID: () => '123456789', - } as unknown) as ITestServer; + console.log('execObservable is returning'); + return { + proc: mockProc, + out: output, + dispose: () => { + /* no-body */ + }, + }; + }); + execFactory = typemoq.Mock.ofType(); + deferred = createDeferred(); + execFactory + .setup((x) => x.createActivatedEnvironment(typemoq.It.isAny())) + .returns(() => Promise.resolve(execService.object)); + execFactory.setup((p) => ((p as unknown) as any).then).returns(() => undefined); + execService.setup((p) => ((p as unknown) as any).then).returns(() => undefined); - const uri = Uri.file('/foo/bar'); - const script = path.join(EXTENSION_ROOT_DIR, 'python_files', 'unittestadapter', 'discovery.py'); + // constants + expectedPath = path.join('/', 'my', 'test', 'path'); + uri = Uri.file(expectedPath); + expectedExtraVariables = { + TEST_RUN_PIPE: 'discoveryResultPipe-mockName', + }; - const adapter = new UnittestTestDiscoveryAdapter(stubTestServer, stubConfigSettings, outputChannel.object); - adapter.discoverTests(uri); + utilsStartDiscoveryNamedPipeStub = sinon.stub(util, 'startDiscoveryNamedPipe'); + utilsStartDiscoveryNamedPipeStub.callsFake(() => + Promise.resolve({ + name: 'discoveryResultPipe-mockName', + dispose: () => { + /* no-op */ + }, + }), + ); + }); + teardown(() => { + sinon.restore(); + }); + test('DiscoverTests should send the discovery command to the test server with the correct args', async () => { + const adapter = new UnittestTestDiscoveryAdapter(stubConfigSettings, outputChannel.object); + adapter.discoverTests(uri, execFactory.object); + const script = path.join(EXTENSION_ROOT_DIR, 'python_files', 'unittestadapter', 'discovery.py'); + const argsExpected = [script, '--udiscovery', '-v', '-s', '.', '-p', 'test*']; + + // must await until the execObservable is called in order to verify it await deferred.promise; - assert.deepStrictEqual(options?.command?.args, ['--udiscovery', '-v', '-s', '.', '-p', 'test*']); - assert.deepStrictEqual(options.workspaceFolder, uri); - assert.deepStrictEqual(options.cwd, uri.fsPath); - assert.deepStrictEqual(options.command.script, script); - assert.deepStrictEqual(options.uuid, '123456789'); + + execService.verify( + (x) => + x.execObservable( + typemoq.It.is>((argsActual) => { + try { + assert.equal(argsActual.length, argsExpected.length); + assert.deepEqual(argsActual, argsExpected); + return true; + } catch (e) { + console.error(e); + throw e; + } + }), + typemoq.It.is((options) => { + try { + assert.deepEqual(options.env, expectedExtraVariables); + assert.equal(options.cwd, expectedPath); + assert.equal(options.throwOnStdErr, true); + return true; + } catch (e) { + console.error(e); + throw e; + } + }), + ), + typemoq.Times.once(), + ); }); test('DiscoverTests should respect settings.testings.cwd when present', async () => { - let options: TestCommandOptions | undefined; + const expectedNewPath = path.join('/', 'new', 'cwd'); stubConfigSettings = ({ getSettings: () => ({ - testing: { unittestArgs: ['-v', '-s', '.', '-p', 'test*'], cwd: '/foo' }, + testing: { unittestArgs: ['-v', '-s', '.', '-p', 'test*'], cwd: expectedNewPath.toString() }, }), } as unknown) as IConfigurationService; - - const deferred = createDeferred(); - const stubTestServer = ({ - sendCommand(opt: TestCommandOptions): Promise { - delete opt.outChannel; - options = opt; - deferred.resolve(); - return Promise.resolve(); - }, - onDiscoveryDataReceived: () => { - // no body - }, - createUUID: () => '123456789', - } as unknown) as ITestServer; - - const uri = Uri.file('/foo/bar'); - const newCwd = '/foo'; + const adapter = new UnittestTestDiscoveryAdapter(stubConfigSettings, outputChannel.object); + adapter.discoverTests(uri, execFactory.object); const script = path.join(EXTENSION_ROOT_DIR, 'python_files', 'unittestadapter', 'discovery.py'); + const argsExpected = [script, '--udiscovery', '-v', '-s', '.', '-p', 'test*']; - const adapter = new UnittestTestDiscoveryAdapter(stubTestServer, stubConfigSettings, outputChannel.object); - adapter.discoverTests(uri); + // must await until the execObservable is called in order to verify it await deferred.promise; - assert.deepStrictEqual(options?.command?.args, ['--udiscovery', '-v', '-s', '.', '-p', 'test*']); - assert.deepStrictEqual(options.workspaceFolder, uri); - assert.deepStrictEqual(options.cwd, newCwd); - assert.deepStrictEqual(options.command.script, script); - assert.deepStrictEqual(options.uuid, '123456789'); + + execService.verify( + (x) => + x.execObservable( + typemoq.It.is>((argsActual) => { + try { + assert.equal(argsActual.length, argsExpected.length); + assert.deepEqual(argsActual, argsExpected); + return true; + } catch (e) { + console.error(e); + throw e; + } + }), + typemoq.It.is((options) => { + try { + assert.deepEqual(options.env, expectedExtraVariables); + assert.equal(options.cwd, expectedNewPath); + assert.equal(options.throwOnStdErr, true); + return true; + } catch (e) { + console.error(e); + throw e; + } + }), + ), + typemoq.Times.once(), + ); }); }); diff --git a/src/test/testing/testController/unittest/testExecutionAdapter.unit.test.ts b/src/test/testing/testController/unittest/testExecutionAdapter.unit.test.ts index bb82f49b47773..6d4757eff8d12 100644 --- a/src/test/testing/testController/unittest/testExecutionAdapter.unit.test.ts +++ b/src/test/testing/testController/unittest/testExecutionAdapter.unit.test.ts @@ -1,114 +1,287 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - +/* eslint-disable @typescript-eslint/no-explicit-any */ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. import * as assert from 'assert'; -import * as path from 'path'; -import * as typemoq from 'typemoq'; -import { Uri } from 'vscode'; +import { TestRun, Uri } from 'vscode'; +import * as typeMoq from 'typemoq'; import * as sinon from 'sinon'; +import * as path from 'path'; +import { Observable } from 'rxjs/Observable'; import { IConfigurationService, ITestOutputChannel } from '../../../../client/common/types'; +import { + IPythonExecutionFactory, + IPythonExecutionService, + Output, + SpawnOptions, +} from '../../../../client/common/process/types'; +import { createDeferred, Deferred } from '../../../../client/common/utils/async'; +import { ITestDebugLauncher, LaunchOptions } from '../../../../client/testing/common/types'; +import * as util from '../../../../client/testing/testController/common/utils'; import { EXTENSION_ROOT_DIR } from '../../../../client/constants'; -import { ITestServer, TestCommandOptions } from '../../../../client/testing/testController/common/types'; +import { MockChildProcess } from '../../../mocks/mockChildProcess'; +import { traceInfo } from '../../../../client/logging'; import { UnittestTestExecutionAdapter } from '../../../../client/testing/testController/unittest/testExecutionAdapter'; -import * as util from '../../../../client/testing/testController/common/utils'; suite('Unittest test execution adapter', () => { - let stubConfigSettings: IConfigurationService; - let outputChannel: typemoq.IMock; - + let configService: IConfigurationService; + let execFactory = typeMoq.Mock.ofType(); + let adapter: UnittestTestExecutionAdapter; + let execService: typeMoq.IMock; + let deferred: Deferred; + let deferred4: Deferred; + let debugLauncher: typeMoq.IMock; + (global as any).EXTENSION_ROOT_DIR = EXTENSION_ROOT_DIR; + let myTestPath: string; + let mockProc: MockChildProcess; + let utilsStartTestIdsNamedPipeStub: sinon.SinonStub; + let utilsStartRunResultNamedPipeStub: sinon.SinonStub; setup(() => { - stubConfigSettings = ({ + configService = ({ getSettings: () => ({ - testing: { unittestArgs: ['-v', '-s', '.', '-p', 'test*'] }, + testing: { unittestArgs: ['.'] }, }), + isTestExecution: () => false, } as unknown) as IConfigurationService; - outputChannel = typemoq.Mock.ofType(); - sinon.stub(util, 'startTestIdServer').returns(Promise.resolve(54321)); + + // set up exec service with child process + mockProc = new MockChildProcess('', ['']); + const output = new Observable>(() => { + /* no op */ + }); + deferred4 = createDeferred(); + execService = typeMoq.Mock.ofType(); + execService + .setup((x) => x.execObservable(typeMoq.It.isAny(), typeMoq.It.isAny())) + .returns(() => { + deferred4.resolve(); + return { + proc: mockProc, + out: output, + dispose: () => { + /* no-body */ + }, + }; + }); + execFactory = typeMoq.Mock.ofType(); + + // added + utilsStartTestIdsNamedPipeStub = sinon.stub(util, 'startTestIdsNamedPipe'); + debugLauncher = typeMoq.Mock.ofType(); + execFactory + .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) + .returns(() => Promise.resolve(execService.object)); + deferred = createDeferred(); + execService + .setup((x) => x.exec(typeMoq.It.isAny(), typeMoq.It.isAny())) + .returns(() => { + deferred.resolve(); + return Promise.resolve({ stdout: '{}' }); + }); + execFactory.setup((p) => ((p as unknown) as any).then).returns(() => undefined); + execService.setup((p) => ((p as unknown) as any).then).returns(() => undefined); + debugLauncher.setup((p) => ((p as unknown) as any).then).returns(() => undefined); + myTestPath = path.join('/', 'my', 'test', 'path', '/'); + + utilsStartRunResultNamedPipeStub = sinon.stub(util, 'startRunResultNamedPipe'); + utilsStartRunResultNamedPipeStub.callsFake(() => + Promise.resolve({ + name: 'runResultPipe-mockName', + dispose: () => { + /* no-op */ + }, + }), + ); }); teardown(() => { sinon.restore(); }); + test('startTestIdServer called with correct testIds', async () => { + const deferred2 = createDeferred(); + const deferred3 = createDeferred(); + execFactory = typeMoq.Mock.ofType(); + execFactory + .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) + .returns(() => { + deferred2.resolve(); + return Promise.resolve(execService.object); + }); + utilsStartTestIdsNamedPipeStub.callsFake(() => { + deferred3.resolve(); + return Promise.resolve({ + name: 'mockName', + dispose: () => { + /* no-op */ + }, + }); + }); + const testRun = typeMoq.Mock.ofType(); + testRun.setup((t) => t.token).returns(() => ({ onCancellationRequested: () => undefined } as any)); + const uri = Uri.file(myTestPath); + const outputChannel = typeMoq.Mock.ofType(); + adapter = new UnittestTestExecutionAdapter(configService, outputChannel.object); + const testIds = ['test1id', 'test2id']; - test('runTests should send the run command to the test server', async () => { - let options: TestCommandOptions | undefined; - let errorBool = false; - let errorMessage = ''; - const stubTestServer = ({ - sendCommand(opt: TestCommandOptions, runTestIdPort?: string): Promise { - delete opt.outChannel; - options = opt; - if (runTestIdPort === undefined) { - errorBool = true; - errorMessage = 'runTestIdPort is undefined'; - } - return Promise.resolve(); - }, - onRunDataReceived: () => { - // no body - }, - createUUID: () => '123456789', - } as unknown) as ITestServer; + adapter.runTests(uri, testIds, false, testRun.object, execFactory.object); - const uri = Uri.file('/foo/bar'); - const script = path.join(EXTENSION_ROOT_DIR, 'python_files', 'unittestadapter', 'execution.py'); + // add in await and trigger + await deferred2.promise; + await deferred3.promise; + mockProc.trigger('close'); - const adapter = new UnittestTestExecutionAdapter(stubTestServer, stubConfigSettings, outputChannel.object); - const testIds = ['test1id', 'test2id']; - adapter.runTests(uri, testIds, false).then(() => { - const expectedOptions: TestCommandOptions = { - workspaceFolder: uri, - command: { script, args: ['--udiscovery', '-v', '-s', '.', '-p', 'test*'] }, - cwd: uri.fsPath, - uuid: '123456789', - debugBool: false, - testIds, - }; - assert.deepStrictEqual(options, expectedOptions); - assert.equal(errorBool, false, errorMessage); + // assert + sinon.assert.calledWithExactly(utilsStartTestIdsNamedPipeStub, testIds); + }); + test('unittest execution called with correct args', async () => { + const deferred2 = createDeferred(); + const deferred3 = createDeferred(); + execFactory = typeMoq.Mock.ofType(); + execFactory + .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) + .returns(() => { + deferred2.resolve(); + return Promise.resolve(execService.object); + }); + utilsStartTestIdsNamedPipeStub.callsFake(() => { + deferred3.resolve(); + return Promise.resolve('testIdPipe-mockName'); }); + const testRun = typeMoq.Mock.ofType(); + testRun.setup((t) => t.token).returns(() => ({ onCancellationRequested: () => undefined } as any)); + const uri = Uri.file(myTestPath); + const outputChannel = typeMoq.Mock.ofType(); + adapter = new UnittestTestExecutionAdapter(configService, outputChannel.object); + adapter.runTests(uri, [], false, testRun.object, execFactory.object); + + await deferred2.promise; + await deferred3.promise; + await deferred4.promise; + mockProc.trigger('close'); + + const pathToPythonFiles = path.join(EXTENSION_ROOT_DIR, 'python_files'); + const pathToExecutionScript = path.join(pathToPythonFiles, 'unittestadapter', 'execution.py'); + const expectedArgs = [pathToExecutionScript, '--udiscovery', '.']; + const expectedExtraVariables = { + PYTHONPATH: myTestPath, + TEST_RUN_PIPE: 'runResultPipe-mockName', + RUN_TEST_IDS_PIPE: 'testIdPipe-mockName', + }; + execService.verify( + (x) => + x.execObservable( + expectedArgs, + typeMoq.It.is((options) => { + assert.equal(options.env?.PYTHONPATH, expectedExtraVariables.PYTHONPATH); + assert.equal(options.env?.TEST_RUN_PIPE, expectedExtraVariables.TEST_RUN_PIPE); + assert.equal(options.env?.RUN_TEST_IDS_PIPE, expectedExtraVariables.RUN_TEST_IDS_PIPE); + assert.equal(options.cwd, uri.fsPath); + assert.equal(options.throwOnStdErr, true); + return true; + }), + ), + typeMoq.Times.once(), + ); }); - test('runTests should respect settings.testing.cwd when present', async () => { - stubConfigSettings = ({ + test('unittest execution respects settings.testing.cwd when present', async () => { + const deferred2 = createDeferred(); + const deferred3 = createDeferred(); + execFactory = typeMoq.Mock.ofType(); + execFactory + .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) + .returns(() => { + deferred2.resolve(); + return Promise.resolve(execService.object); + }); + utilsStartTestIdsNamedPipeStub.callsFake(() => { + deferred3.resolve(); + return Promise.resolve('testIdPipe-mockName'); + }); + const testRun = typeMoq.Mock.ofType(); + testRun.setup((t) => t.token).returns(() => ({ onCancellationRequested: () => undefined } as any)); + const newCwd = path.join('new', 'path'); + configService = ({ getSettings: () => ({ - testing: { unittestArgs: ['-v', '-s', '.', '-p', 'test*'], cwd: '/foo' }, + testing: { unittestArgs: ['.'], cwd: newCwd }, }), + isTestExecution: () => false, } as unknown) as IConfigurationService; - let options: TestCommandOptions | undefined; - let errorBool = false; - let errorMessage = ''; - const stubTestServer = ({ - sendCommand(opt: TestCommandOptions, runTestIdPort?: string): Promise { - delete opt.outChannel; - options = opt; - if (runTestIdPort === undefined) { - errorBool = true; - errorMessage = 'runTestIdPort is undefined'; - } - return Promise.resolve(); - }, - onRunDataReceived: () => { - // no body - }, - createUUID: () => '123456789', - } as unknown) as ITestServer; + const uri = Uri.file(myTestPath); + const outputChannel = typeMoq.Mock.ofType(); + adapter = new UnittestTestExecutionAdapter(configService, outputChannel.object); + adapter.runTests(uri, [], false, testRun.object, execFactory.object); - const newCwd = '/foo'; - const uri = Uri.file('/foo/bar'); - const script = path.join(EXTENSION_ROOT_DIR, 'python_files', 'unittestadapter', 'execution.py'); + await deferred2.promise; + await deferred3.promise; + await deferred4.promise; + mockProc.trigger('close'); - const adapter = new UnittestTestExecutionAdapter(stubTestServer, stubConfigSettings, outputChannel.object); - const testIds = ['test1id', 'test2id']; - adapter.runTests(uri, testIds, false).then(() => { - const expectedOptions: TestCommandOptions = { - workspaceFolder: uri, - command: { script, args: ['--udiscovery', '-v', '-s', '.', '-p', 'test*'] }, - cwd: newCwd, - uuid: '123456789', - debugBool: false, - testIds, - }; - assert.deepStrictEqual(options, expectedOptions); - assert.equal(errorBool, false, errorMessage); + const pathToPythonFiles = path.join(EXTENSION_ROOT_DIR, 'python_files'); + const pathToExecutionScript = path.join(pathToPythonFiles, 'unittestadapter', 'execution.py'); + const expectedArgs = [pathToExecutionScript, '--udiscovery', '.']; + const expectedExtraVariables = { + PYTHONPATH: newCwd, + TEST_RUN_PIPE: 'runResultPipe-mockName', + RUN_TEST_IDS_PIPE: 'testIdPipe-mockName', + }; + + execService.verify( + (x) => + x.execObservable( + expectedArgs, + typeMoq.It.is((options) => { + assert.equal(options.env?.PYTHONPATH, expectedExtraVariables.PYTHONPATH); + assert.equal(options.env?.TEST_RUN_PIPE, expectedExtraVariables.TEST_RUN_PIPE); + assert.equal(options.env?.RUN_TEST_IDS_PIPE, expectedExtraVariables.RUN_TEST_IDS_PIPE); + assert.equal(options.cwd, newCwd); + assert.equal(options.throwOnStdErr, true); + return true; + }), + ), + typeMoq.Times.once(), + ); + }); + test('Debug launched correctly for unittest', async () => { + const deferred3 = createDeferred(); + const deferredEOT = createDeferred(); + utilsStartTestIdsNamedPipeStub.callsFake(() => { + deferred3.resolve(); + return Promise.resolve('testIdPipe-mockName'); }); + debugLauncher + .setup((dl) => dl.launchDebugger(typeMoq.It.isAny(), typeMoq.It.isAny())) + .returns(async () => { + traceInfo('stubs launch debugger'); + deferredEOT.resolve(); + }); + const utilsCreateEOTStub: sinon.SinonStub = sinon.stub(util, 'createTestingDeferred'); + utilsCreateEOTStub.callsFake(() => deferredEOT); + const testRun = typeMoq.Mock.ofType(); + testRun + .setup((t) => t.token) + .returns( + () => + ({ + onCancellationRequested: () => undefined, + } as any), + ); + const uri = Uri.file(myTestPath); + const outputChannel = typeMoq.Mock.ofType(); + adapter = new UnittestTestExecutionAdapter(configService, outputChannel.object); + await adapter.runTests(uri, [], true, testRun.object, execFactory.object, debugLauncher.object); + await deferred3.promise; + debugLauncher.verify( + (x) => + x.launchDebugger( + typeMoq.It.is((launchOptions) => { + assert.equal(launchOptions.cwd, uri.fsPath); + assert.equal(launchOptions.testProvider, 'unittest'); + assert.equal(launchOptions.pytestPort, 'runResultPipe-mockName'); + assert.strictEqual(launchOptions.runTestIdsPort, 'testIdPipe-mockName'); + assert.notEqual(launchOptions.token, undefined); + return true; + }), + typeMoq.It.isAny(), + ), + typeMoq.Times.once(), + ); }); }); diff --git a/src/test/testing/testController/workspaceTestAdapter.unit.test.ts b/src/test/testing/testController/workspaceTestAdapter.unit.test.ts index abee275c1bb64..9a07d4451e850 100644 --- a/src/test/testing/testController/workspaceTestAdapter.unit.test.ts +++ b/src/test/testing/testController/workspaceTestAdapter.unit.test.ts @@ -12,7 +12,7 @@ import { UnittestTestExecutionAdapter } from '../../../client/testing/testContro import { WorkspaceTestAdapter } from '../../../client/testing/testController/workspaceTestAdapter'; import * as Telemetry from '../../../client/telemetry'; import { EventName } from '../../../client/telemetry/constants'; -import { ITestResultResolver, ITestServer } from '../../../client/testing/testController/common/types'; +import { ITestResultResolver } from '../../../client/testing/testController/common/types'; import * as testItemUtilities from '../../../client/testing/testController/common/testItemUtilities'; import * as util from '../../../client/testing/testController/common/utils'; import * as ResultResolver from '../../../client/testing/testController/common/resultResolver'; @@ -20,7 +20,6 @@ import { IPythonExecutionFactory } from '../../../client/common/process/types'; suite('Workspace test adapter', () => { suite('Test discovery', () => { - let stubTestServer: ITestServer; let stubConfigSettings: IConfigurationService; let stubResultResolver: ITestResultResolver; @@ -29,6 +28,7 @@ suite('Workspace test adapter', () => { let outputChannel: typemoq.IMock; let telemetryEvent: { eventName: EventName; properties: Record }[] = []; + let execFactory: typemoq.IMock; // Stubbed test controller (see comment around L.40) let testController: TestController; @@ -41,15 +41,6 @@ suite('Workspace test adapter', () => { }), } as unknown) as IConfigurationService; - stubTestServer = ({ - sendCommand(): Promise { - return Promise.resolve(); - }, - onDataReceived: () => { - // no body - }, - } as unknown) as ITestServer; - stubResultResolver = ({ resolveDiscovery: () => { // no body @@ -128,16 +119,8 @@ suite('Workspace test adapter', () => { test('If discovery failed correctly create error node', async () => { discoverTestsStub.rejects(new Error('foo')); - const testDiscoveryAdapter = new UnittestTestDiscoveryAdapter( - stubTestServer, - stubConfigSettings, - outputChannel.object, - ); - const testExecutionAdapter = new UnittestTestExecutionAdapter( - stubTestServer, - stubConfigSettings, - outputChannel.object, - ); + const testDiscoveryAdapter = new UnittestTestDiscoveryAdapter(stubConfigSettings, outputChannel.object); + const testExecutionAdapter = new UnittestTestExecutionAdapter(stubConfigSettings, outputChannel.object); const uriFoo = Uri.parse('foo'); const workspaceTestAdapter = new WorkspaceTestAdapter( 'unittest', @@ -165,7 +148,7 @@ suite('Workspace test adapter', () => { const buildErrorNodeOptionsStub = sinon.stub(util, 'buildErrorNodeOptions').returns(errorTestItemOptions); const testProvider = 'unittest'; - const execFactory = typemoq.Mock.ofType(); + execFactory = typemoq.Mock.ofType(); await workspaceTestAdapter.discoverTests(testController, undefined, execFactory.object); sinon.assert.calledWithMatch(createErrorTestItemStub, sinon.match.any, sinon.match.any); @@ -175,16 +158,8 @@ suite('Workspace test adapter', () => { test("When discovering tests, the workspace test adapter should call the test discovery adapter's discoverTest method", async () => { discoverTestsStub.resolves(); - const testDiscoveryAdapter = new UnittestTestDiscoveryAdapter( - stubTestServer, - stubConfigSettings, - outputChannel.object, - ); - const testExecutionAdapter = new UnittestTestExecutionAdapter( - stubTestServer, - stubConfigSettings, - outputChannel.object, - ); + const testDiscoveryAdapter = new UnittestTestDiscoveryAdapter(stubConfigSettings, outputChannel.object); + const testExecutionAdapter = new UnittestTestExecutionAdapter(stubConfigSettings, outputChannel.object); const workspaceTestAdapter = new WorkspaceTestAdapter( 'unittest', testDiscoveryAdapter, @@ -193,7 +168,7 @@ suite('Workspace test adapter', () => { stubResultResolver, ); - await workspaceTestAdapter.discoverTests(testController); + await workspaceTestAdapter.discoverTests(testController, undefined, execFactory.object); sinon.assert.calledOnce(discoverTestsStub); }); @@ -209,16 +184,8 @@ suite('Workspace test adapter', () => { }), ); - const testDiscoveryAdapter = new UnittestTestDiscoveryAdapter( - stubTestServer, - stubConfigSettings, - outputChannel.object, - ); - const testExecutionAdapter = new UnittestTestExecutionAdapter( - stubTestServer, - stubConfigSettings, - outputChannel.object, - ); + const testDiscoveryAdapter = new UnittestTestDiscoveryAdapter(stubConfigSettings, outputChannel.object); + const testExecutionAdapter = new UnittestTestExecutionAdapter(stubConfigSettings, outputChannel.object); const workspaceTestAdapter = new WorkspaceTestAdapter( 'unittest', testDiscoveryAdapter, @@ -239,16 +206,8 @@ suite('Workspace test adapter', () => { test('If discovery succeeds, send a telemetry event with the "failed" key set to false', async () => { discoverTestsStub.resolves({ status: 'success' }); - const testDiscoveryAdapter = new UnittestTestDiscoveryAdapter( - stubTestServer, - stubConfigSettings, - outputChannel.object, - ); - const testExecutionAdapter = new UnittestTestExecutionAdapter( - stubTestServer, - stubConfigSettings, - outputChannel.object, - ); + const testDiscoveryAdapter = new UnittestTestDiscoveryAdapter(stubConfigSettings, outputChannel.object); + const testExecutionAdapter = new UnittestTestExecutionAdapter(stubConfigSettings, outputChannel.object); const workspaceTestAdapter = new WorkspaceTestAdapter( 'unittest', @@ -258,7 +217,7 @@ suite('Workspace test adapter', () => { stubResultResolver, ); - await workspaceTestAdapter.discoverTests(testController); + await workspaceTestAdapter.discoverTests(testController, undefined, execFactory.object); sinon.assert.calledWith(sendTelemetryStub, EventName.UNITTEST_DISCOVERY_DONE); assert.strictEqual(telemetryEvent.length, 2); @@ -270,16 +229,8 @@ suite('Workspace test adapter', () => { test('If discovery failed, send a telemetry event with the "failed" key set to true, and add an error node to the test controller', async () => { discoverTestsStub.rejects(new Error('foo')); - const testDiscoveryAdapter = new UnittestTestDiscoveryAdapter( - stubTestServer, - stubConfigSettings, - outputChannel.object, - ); - const testExecutionAdapter = new UnittestTestExecutionAdapter( - stubTestServer, - stubConfigSettings, - outputChannel.object, - ); + const testDiscoveryAdapter = new UnittestTestDiscoveryAdapter(stubConfigSettings, outputChannel.object); + const testExecutionAdapter = new UnittestTestExecutionAdapter(stubConfigSettings, outputChannel.object); const workspaceTestAdapter = new WorkspaceTestAdapter( 'unittest', @@ -299,7 +250,6 @@ suite('Workspace test adapter', () => { }); }); suite('Test execution workspace test adapter', () => { - let stubTestServer: ITestServer; let stubConfigSettings: IConfigurationService; let stubResultResolver: ITestResultResolver; let executionTestsStub: sinon.SinonStub; @@ -323,15 +273,6 @@ suite('Workspace test adapter', () => { }), } as unknown) as IConfigurationService; - stubTestServer = ({ - sendCommand(): Promise { - return Promise.resolve(); - }, - onDataReceived: () => { - // no body - }, - } as unknown) as ITestServer; - stubResultResolver = ({ resolveDiscovery: () => { // no body @@ -405,16 +346,8 @@ suite('Workspace test adapter', () => { sandbox.restore(); }); test('When executing tests, the right tests should be sent to be executed', async () => { - const testDiscoveryAdapter = new UnittestTestDiscoveryAdapter( - stubTestServer, - stubConfigSettings, - outputChannel.object, - ); - const testExecutionAdapter = new UnittestTestExecutionAdapter( - stubTestServer, - stubConfigSettings, - outputChannel.object, - ); + const testDiscoveryAdapter = new UnittestTestDiscoveryAdapter(stubConfigSettings, outputChannel.object); + const testExecutionAdapter = new UnittestTestExecutionAdapter(stubConfigSettings, outputChannel.object); const workspaceTestAdapter = new WorkspaceTestAdapter( 'unittest', testDiscoveryAdapter, @@ -461,16 +394,8 @@ suite('Workspace test adapter', () => { }); test("When executing tests, the workspace test adapter should call the test execute adapter's executionTest method", async () => { - const testDiscoveryAdapter = new UnittestTestDiscoveryAdapter( - stubTestServer, - stubConfigSettings, - outputChannel.object, - ); - const testExecutionAdapter = new UnittestTestExecutionAdapter( - stubTestServer, - stubConfigSettings, - outputChannel.object, - ); + const testDiscoveryAdapter = new UnittestTestDiscoveryAdapter(stubConfigSettings, outputChannel.object); + const testExecutionAdapter = new UnittestTestExecutionAdapter(stubConfigSettings, outputChannel.object); const workspaceTestAdapter = new WorkspaceTestAdapter( 'unittest', testDiscoveryAdapter, @@ -495,16 +420,8 @@ suite('Workspace test adapter', () => { }), ); - const testDiscoveryAdapter = new UnittestTestDiscoveryAdapter( - stubTestServer, - stubConfigSettings, - outputChannel.object, - ); - const testExecutionAdapter = new UnittestTestExecutionAdapter( - stubTestServer, - stubConfigSettings, - outputChannel.object, - ); + const testDiscoveryAdapter = new UnittestTestDiscoveryAdapter(stubConfigSettings, outputChannel.object); + const testExecutionAdapter = new UnittestTestExecutionAdapter(stubConfigSettings, outputChannel.object); const workspaceTestAdapter = new WorkspaceTestAdapter( 'unittest', testDiscoveryAdapter, @@ -525,16 +442,8 @@ suite('Workspace test adapter', () => { test('If execution failed correctly create error node', async () => { executionTestsStub.rejects(new Error('foo')); - const testDiscoveryAdapter = new UnittestTestDiscoveryAdapter( - stubTestServer, - stubConfigSettings, - outputChannel.object, - ); - const testExecutionAdapter = new UnittestTestExecutionAdapter( - stubTestServer, - stubConfigSettings, - outputChannel.object, - ); + const testDiscoveryAdapter = new UnittestTestDiscoveryAdapter(stubConfigSettings, outputChannel.object); + const testExecutionAdapter = new UnittestTestExecutionAdapter(stubConfigSettings, outputChannel.object); const workspaceTestAdapter = new WorkspaceTestAdapter( 'unittest', @@ -571,16 +480,8 @@ suite('Workspace test adapter', () => { test('If execution failed, send a telemetry event with the "failed" key set to true, and add an error node to the test controller', async () => { executionTestsStub.rejects(new Error('foo')); - const testDiscoveryAdapter = new UnittestTestDiscoveryAdapter( - stubTestServer, - stubConfigSettings, - outputChannel.object, - ); - const testExecutionAdapter = new UnittestTestExecutionAdapter( - stubTestServer, - stubConfigSettings, - outputChannel.object, - ); + const testDiscoveryAdapter = new UnittestTestDiscoveryAdapter(stubConfigSettings, outputChannel.object); + const testExecutionAdapter = new UnittestTestExecutionAdapter(stubConfigSettings, outputChannel.object); const workspaceTestAdapter = new WorkspaceTestAdapter( 'unittest',