diff --git a/requirements/base.txt b/requirements/base.txt index 5d6c01573b..42177e9c21 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,13 +6,13 @@ boto3>=1.19.5,==1.* jmespath~=0.10.0 PyYAML~=5.3 cookiecutter~=2.1.1 -aws-sam-translator==1.48.0 +aws-sam-translator==1.50.0 #docker minor version updates can include breaking changes. Auto update micro version only. docker~=4.2.0 dateparser~=1.0 requests==2.25.1 serverlessrepo==0.1.10 -aws_lambda_builders==1.18.0 +aws_lambda_builders==1.19.0 tomlkit==0.7.2 watchdog==2.1.2 diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index 12e2bcc7cc..5095fbad8a 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -12,15 +12,15 @@ attrs==20.3.0 \ --hash=sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6 \ --hash=sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700 # via jsonschema -aws-lambda-builders==1.18.0 \ - --hash=sha256:555c1bb72bab633deeb806cc347b40ed865d63166e536c89ff71f0ba739577b1 \ - --hash=sha256:c0bd4b4288f0aa9cba27493065f5fb986425b5b49620b93a391620403eeb97e0 \ - --hash=sha256:c5235699d36b8edda7e649fbc3a23ed868eda1e15c4a83528df33939bdb75754 +aws-lambda-builders==1.19.0 \ + --hash=sha256:32e26425ad626c7e2c41989c894c2c5f70cce7574ed7729e37cdd262a049fd8a \ + --hash=sha256:38fcb9023df09f3c39504498cf45a213a29b176be5cec36126b13b77604731bd \ + --hash=sha256:61e3f1e77b62ab72b97f822c09385ce16dc0e5478b52de7296a79570be41be73 # via aws-sam-cli (setup.py) -aws-sam-translator==1.48.0 \ - --hash=sha256:7171037323dfa30f8f73e9bccb9210e4c384a585e087219a9518a5204f0a2c44 \ - --hash=sha256:be18dfa3dfe7ab291d281667c5f73ac62dbe6bfe86df7d122e4258b906b736f0 \ - --hash=sha256:ca4f8f9910d7713aeaba59346775bfb3198f6acb47c6704572f9bd3fc0fb5bf0 +aws-sam-translator==1.50.0 \ + --hash=sha256:09668d12b5d330412421d30d4a8e826da6fe06f5a451f771c3b37f48f1b25889 \ + --hash=sha256:85bea2739e1b4a61b3e4add8a12f727d7a8e459e3da195dfd0cd2e756be054ec \ + --hash=sha256:d375e9333c0262ed74b6d7ae90938060713ab17341f4e06c5cdbfd755902d9b4 # via aws-sam-cli (setup.py) backports-zoneinfo==0.2.1 \ --hash=sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf \ diff --git a/samcli/__init__.py b/samcli/__init__.py index cf4f9029d9..65e71d76b4 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = "1.53.0" +__version__ = "1.56.0" diff --git a/samcli/cli/cli_config_file.py b/samcli/cli/cli_config_file.py index 9e2b4aa020..24ca409c64 100644 --- a/samcli/cli/cli_config_file.py +++ b/samcli/cli/cli_config_file.py @@ -128,16 +128,22 @@ def configuration_callback(cmd_name, option_name, saved_callback, provider, ctx, cmd_name = cmd_name or ctx.info_name param.default = None config_env_name = ctx.params.get("config_env") or DEFAULT_ENV + config_file = ctx.params.get("config_file") or DEFAULT_CONFIG_FILE_NAME config_dir = getattr(ctx, "samconfig_dir", None) or os.getcwd() # If --config-file is an absolute path, use it, if not, start from config_dir - config_file_name = config_file if os.path.isabs(config_file) else os.path.join(config_dir, config_file) + config_file_path = config_file if os.path.isabs(config_file) else os.path.join(config_dir, config_file) + if config_file and config_file != DEFAULT_CONFIG_FILE_NAME and not Path(config_file_path).absolute().is_file(): + error_msg = f"Config file {config_file} does not exist or could not be read!" + LOG.debug(error_msg) + raise ConfigException(error_msg) + config = get_ctx_defaults( cmd_name, provider, ctx, config_env_name=config_env_name, - config_file=config_file_name, + config_file=config_file_path, ) ctx.default_map.update(config) diff --git a/samcli/cli/types.py b/samcli/cli/types.py index 2724b928a1..308af34ccd 100644 --- a/samcli/cli/types.py +++ b/samcli/cli/types.py @@ -178,8 +178,16 @@ class CfnTags(click.ParamType): """ Custom Click options type to accept values for tag parameters. tag parameters can be of the type KeyName1=string KeyName2=string + + If multiple_values_per_key is set to True, the returned dictionary will map + each key to a list of corresponding values. + + E.g. Input: KeyName1=Value1 KeyName1=Value2 Output: {KeyName1 : [Value1, Value2]} """ + def __init__(self, multiple_values_per_key=False): + self.multiple_values_per_key = multiple_values_per_key + _EXAMPLE = "KeyName1=string KeyName2=string" # Tags have additional constraints and they allow "+ - = . _ : / @" apart from alpha-numerics. TAG_REGEX = '[A-Za-z0-9\\"_:\\.\\/\\+-\\@=]' @@ -212,7 +220,7 @@ def convert(self, value, param, ctx): parsed, tags = self._space_separated_key_value_parser(val) if parsed: for k in tags: - result[_unquote_wrapped_quotes(k)] = _unquote_wrapped_quotes(tags[k]) + self._add_value(result, _unquote_wrapped_quotes(k), _unquote_wrapped_quotes(tags[k])) else: groups = re.findall(self._pattern, val) @@ -220,8 +228,7 @@ def convert(self, value, param, ctx): fail = True for group in groups: key, v = group - # assign to result['KeyName1'] = string and so on. - result[_unquote_wrapped_quotes(key)] = _unquote_wrapped_quotes(v) + self._add_value(result, _unquote_wrapped_quotes(key), _unquote_wrapped_quotes(v)) if fail: return self.fail( @@ -232,6 +239,17 @@ def convert(self, value, param, ctx): return result + def _add_value(self, result: dict, key: str, new_value: str): + """ + Add a given value to a given key in the result map. + """ + if self.multiple_values_per_key: + if not result.get(key): + result[key] = [] + result[key].append(new_value) + return + result[key] = new_value + @staticmethod def _standard_key_value_parser(tag_value): """ diff --git a/samcli/commands/_utils/experimental.py b/samcli/commands/_utils/experimental.py index b38d004d3c..955628c35e 100644 --- a/samcli/commands/_utils/experimental.py +++ b/samcli/commands/_utils/experimental.py @@ -43,7 +43,9 @@ class ExperimentalFlag: """Class for storing all experimental related ConfigEntries""" All = ExperimentalEntry("experimentalAll", EXPERIMENTAL_ENV_VAR_PREFIX + "FEATURES") - Esbuild = ExperimentalEntry("experimentalEsbuild", EXPERIMENTAL_ENV_VAR_PREFIX + "ESBUILD") + BuildPerformance = ExperimentalEntry( + "experimentalBuildPerformance", EXPERIMENTAL_ENV_VAR_PREFIX + "BUILD_PERFORMANCE" + ) IaCsSupport = { "terraform": ExperimentalEntry( "experimentalTerraformSupport", EXPERIMENTAL_ENV_VAR_PREFIX + "TERRAFORM_SUPPORT" diff --git a/samcli/commands/build/build_context.py b/samcli/commands/build/build_context.py index ab58f9173f..e512671401 100644 --- a/samcli/commands/build/build_context.py +++ b/samcli/commands/build/build_context.py @@ -9,8 +9,9 @@ import click -from samcli.commands._utils.experimental import ExperimentalFlag, prompt_experimental +from samcli.lib.build.bundler import EsbuildBundlerManager from samcli.lib.providers.sam_api_provider import SamApiProvider +from samcli.lib.telemetry.event import EventTracker from samcli.lib.utils.packagetype import IMAGE from samcli.commands._utils.template import get_template_data @@ -34,6 +35,7 @@ BuildError, UnsupportedBuilderLibraryVersionError, ContainerBuildNotSupported, + ApplicationBuildResult, ) from samcli.commands._utils.constants import DEFAULT_BUILD_DIR from samcli.lib.build.workflow_config import UnsupportedRuntimeException @@ -222,6 +224,8 @@ def run(self): if is_sam_template: SamApiProvider.check_implicit_api_resource_ids(self.stacks) + self._stacks = self._handle_build_pre_processing() + try: builder = ApplicationBuilder( self.get_resources_to_build(), @@ -243,29 +247,14 @@ def run(self): raise UserException(str(ex), wrapped_from=ex.__class__.__name__) from ex try: - self._check_esbuild_warning() self._check_exclude_warning() + build_result = builder.build() - artifacts = build_result.artifacts - stack_output_template_path_by_stack_path = { - stack.stack_path: stack.get_output_template_path(self.build_dir) for stack in self.stacks - } - for stack in self.stacks: - modified_template = builder.update_template( - stack, - artifacts, - stack_output_template_path_by_stack_path, - ) - output_template_path = stack.get_output_template_path(self.build_dir) + self._handle_build_post_processing(builder, build_result) - if self._create_auto_dependency_layer: - LOG.debug("Auto creating dependency layer for each function resource into a nested stack") - nested_stack_manager = NestedStackManager( - stack, self._stack_name, self.build_dir, modified_template, build_result - ) - modified_template = nested_stack_manager.generate_auto_dependency_layer_stack() - move_template(stack.location, output_template_path, modified_template) + for f in self.get_resources_to_build().functions: + EventTracker.track_event("BuildFunctionRuntime", f.runtime) click.secho("\nBuild Succeeded", fg="green") @@ -307,6 +296,54 @@ def run(self): wrapped_from = deep_wrap if deep_wrap else ex.__class__.__name__ raise UserException(str(ex), wrapped_from=wrapped_from) from ex + def _handle_build_pre_processing(self) -> List[Stack]: + """ + Pre-modify the stacks as required before invoking the build + :return: List of modified stacks + """ + stacks = [] + if any(EsbuildBundlerManager(stack).esbuild_configured() for stack in self.stacks): + # esbuild is configured in one of the stacks, will check and update stack metadata accordingly + for stack in self.stacks: + stacks.append(EsbuildBundlerManager(stack).set_sourcemap_metadata_from_env()) + self.function_provider.update(stacks, self._use_raw_codeuri, locate_layer_nested=self._locate_layer_nested) + return stacks if stacks else self.stacks + + def _handle_build_post_processing(self, builder: ApplicationBuilder, build_result: ApplicationBuildResult) -> None: + """ + Add any template modifications necessary before moving the template to build directory + :param stack: Stack resources + :param template: Current template file + :param build_result: Result of the application build + :return: Modified template dict + """ + artifacts = build_result.artifacts + + stack_output_template_path_by_stack_path = { + stack.stack_path: stack.get_output_template_path(self.build_dir) for stack in self.stacks + } + for stack in self.stacks: + modified_template = builder.update_template( + stack, + artifacts, + stack_output_template_path_by_stack_path, + ) + output_template_path = stack.get_output_template_path(self.build_dir) + + stack_name = self._stack_name if self._stack_name else "" + if self._create_auto_dependency_layer: + LOG.debug("Auto creating dependency layer for each function resource into a nested stack") + nested_stack_manager = NestedStackManager( + stack, stack_name, self.build_dir, modified_template, build_result + ) + modified_template = nested_stack_manager.generate_auto_dependency_layer_stack() + + esbuild_manager = EsbuildBundlerManager(stack=stack, template=modified_template) + if esbuild_manager.esbuild_configured(): + modified_template = esbuild_manager.set_sourcemap_env_from_metadata() + + move_template(stack.location, output_template_path, modified_template) + @staticmethod def gen_success_msg(artifacts_dir: str, output_template_path: str, is_default_build_dir: bool) -> str: @@ -589,28 +626,8 @@ def _is_layer_buildable(layer: LayerVersion): return False return True - _ESBUILD_WARNING_MESSAGE = ( - "Using esbuild for bundling Node.js and TypeScript is a beta feature.\n" - "Please confirm if you would like to proceed with using esbuild to build your function.\n" - "You can also enable this beta feature with 'sam build --beta-features'." - ) - _EXCLUDE_WARNING_MESSAGE = "Resource expected to be built, but marked as excluded.\nBuilding anyways..." - def _check_esbuild_warning(self) -> None: - """ - Prints warning message and confirms that the user wants to enable beta features - """ - resources_to_build = self.get_resources_to_build() - is_building_esbuild = False - for function in resources_to_build.functions: - if function.metadata and function.metadata.get("BuildMethod", "") == "esbuild": - is_building_esbuild = True - break - - if is_building_esbuild: - prompt_experimental(ExperimentalFlag.Esbuild, self._ESBUILD_WARNING_MESSAGE) - def _check_exclude_warning(self) -> None: """ Prints warning message if a single resource to build is also being excluded diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index b29199b42a..aaaed1934f 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -9,6 +9,7 @@ from samcli.cli.cli_config_file import TomlProvider, configuration_option from samcli.cli.main import aws_creds_options, common_options, pass_context, print_cmdline_args from samcli.commands._utils.cdk_support_decorators import unsupported_command_cdk +from samcli.commands._utils.click_mutex import ClickMutex from samcli.commands._utils.options import ( capabilities_option, guided_deploy_stack_name, @@ -107,6 +108,26 @@ required=False, is_flag=True, help="Preserves the state of previously provisioned resources when an operation fails.", + cls=ClickMutex, + incompatible_params=["on_failure"], +) +@click.option( + "--on-failure", + default="ROLLBACK", + type=click.Choice(["ROLLBACK", "DELETE", "DO_NOTHING"]), + required=False, + help=""" + Provide an action to determine what will happen when a stack fails to create. Three actions are available:\n + - ROLLBACK: This will rollback a stack to a previous known good state.\n + - DELETE: The stack will rollback to a previous state if one exists, otherwise the stack will be deleted.\n + - DO_NOTHING: The stack will not rollback or delete, this is the same as disabling rollback.\n + Default behaviour is ROLLBACK.\n\n + + This option is mutually exclusive with --disable-rollback/--no-disable-rollback. You can provide + --on-failure or --disable-rollback/--no-disable-rollback but not both at the same time. + """, + cls=ClickMutex, + incompatible_params=["disable_rollback", "no_disable_rollback"], ) @stack_name_option(callback=guided_deploy_stack_name) # pylint: disable=E1120 @s3_bucket_option(disable_callback=True) # pylint: disable=E1120 @@ -162,6 +183,7 @@ def cli( config_file, config_env, disable_rollback, + on_failure, ): """ `sam deploy` command entry point @@ -196,6 +218,7 @@ def cli( config_env, resolve_image_repos, disable_rollback, + on_failure, ) # pragma: no cover @@ -228,6 +251,7 @@ def do_cli( config_env, resolve_image_repos, disable_rollback, + on_failure, ): """ Implementation of the ``cli`` method @@ -330,5 +354,6 @@ def do_cli( use_changeset=True, disable_rollback=guided_context.disable_rollback if guided else disable_rollback, poll_delay=poll_delay, + on_failure=on_failure, ) as deploy_context: deploy_context.run() diff --git a/samcli/commands/deploy/deploy_context.py b/samcli/commands/deploy/deploy_context.py index b96374288b..0138ec6c2f 100644 --- a/samcli/commands/deploy/deploy_context.py +++ b/samcli/commands/deploy/deploy_context.py @@ -30,6 +30,7 @@ hide_noecho_parameter_overrides, ) from samcli.lib.deploy.deployer import Deployer +from samcli.lib.deploy.utils import FailureMode from samcli.lib.intrinsic_resolver.intrinsics_symbol_table import IntrinsicsSymbolTable from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.providers.sam_stack_provider import SamLocalStackProvider @@ -73,6 +74,7 @@ def __init__( use_changeset, disable_rollback, poll_delay, + on_failure, ): self.template_file = template_file self.stack_name = stack_name @@ -103,6 +105,7 @@ def __init__( self.use_changeset = use_changeset self.disable_rollback = disable_rollback self.poll_delay = poll_delay + self.on_failure = FailureMode(on_failure) if on_failure else FailureMode.ROLLBACK def __enter__(self): return self @@ -179,20 +182,20 @@ def run(self): def deploy( self, - stack_name, - template_str, - parameters, - capabilities, - no_execute_changeset, - role_arn, - notification_arns, - s3_uploader, - tags, - region, - fail_on_empty_changeset=True, - confirm_changeset=False, - use_changeset=True, - disable_rollback=False, + stack_name: str, + template_str: str, + parameters: List[dict], + capabilities: List[str], + no_execute_changeset: bool, + role_arn: str, + notification_arns: List[str], + s3_uploader: S3Uploader, + tags: List[str], + region: str, + fail_on_empty_changeset: bool = True, + confirm_changeset: bool = False, + use_changeset: bool = True, + disable_rollback: bool = False, ): """ Deploy the stack to cloudformation. @@ -264,14 +267,26 @@ def deploy( if not click.confirm(f"{self.MSG_CONFIRM_CHANGESET}", default=False): return - self.deployer.execute_changeset(result["Id"], stack_name, disable_rollback) - self.deployer.wait_for_execute(stack_name, changeset_type, disable_rollback) + # Stop the rollback in the case of DO_NOTHING or if this is a new stack and DELETE is specified + # DO_NOTHING behaves the same disable_rollback, they both preserve the current state of the stack + do_disable_rollback = ( + self.on_failure in [FailureMode.DO_NOTHING, FailureMode.DELETE] or disable_rollback + ) + + self.deployer.execute_changeset(result["Id"], stack_name, do_disable_rollback) + self.deployer.wait_for_execute(stack_name, changeset_type, do_disable_rollback, self.on_failure) click.echo(self.MSG_EXECUTE_SUCCESS.format(stack_name=stack_name, region=region)) except deploy_exceptions.ChangeEmptyError as ex: if fail_on_empty_changeset: raise click.echo(str(ex)) + except deploy_exceptions.DeployFailedError: + # Failed to deploy, check for DELETE action otherwise skip + if self.on_failure != FailureMode.DELETE: + raise + + self.deployer.rollback_delete_stack(stack_name) else: try: @@ -284,6 +299,7 @@ def deploy( notification_arns=notification_arns, s3_uploader=s3_uploader, tags=tags, + on_failure=self.on_failure, ) LOG.debug(result) diff --git a/samcli/commands/deploy/exceptions.py b/samcli/commands/deploy/exceptions.py index c8bf1655a1..574754d48c 100644 --- a/samcli/commands/deploy/exceptions.py +++ b/samcli/commands/deploy/exceptions.py @@ -75,3 +75,9 @@ def __init__(self): ) super().__init__(message=message_fmt) + + +class DeployStackStatusMissingError(UserException): + def __init__(self, stack_name): + message_fmt = "Was not able to find a stack with the name: {msg}, please check your parameters and try again." + super().__init__(message=message_fmt.format(msg=stack_name)) diff --git a/samcli/commands/local/lib/local_lambda.py b/samcli/commands/local/lib/local_lambda.py index 1204c6eff3..ef768a3d20 100644 --- a/samcli/commands/local/lib/local_lambda.py +++ b/samcli/commands/local/lib/local_lambda.py @@ -216,6 +216,10 @@ def get_invoke_config(self, function: Function) -> FunctionConfig: def _make_env_vars(self, function: Function) -> EnvironmentVariables: """Returns the environment variables configuration for this function + Priority order for environment variables (high to low): + 1. Function specific env vars from json file + 2. Global env vars from json file + Parameters ---------- function : samcli.commands.local.lib.provider.Function @@ -234,42 +238,48 @@ def _make_env_vars(self, function: Function) -> EnvironmentVariables: """ function_id = function.function_id - name = function.name + logical_id = function.name + function_name = function.functionname full_path = function.full_path variables = None if isinstance(function.environment, dict) and "Variables" in function.environment: variables = function.environment["Variables"] else: - LOG.debug("No environment variables found for function '%s'", name) + LOG.debug("No environment variables found for function '%s'", logical_id) - # This could either be in standard format, or a CloudFormation parameter file format. + # This could either be in standard format, or a CloudFormation parameter file format, or mix of both. # # Standard format is {FunctionName: {key:value}, FunctionName: {key:value}} # CloudFormation parameter file is {"Parameters": {key:value}} + # Mixed format is {FunctionName: {key:value}, "Parameters": {key:value}} for env_var_value in self.env_vars_values.values(): if not isinstance(env_var_value, dict): - reason = """ - Environment variables must be in either CloudFormation parameter file - format or in {FunctionName: {key:value}} JSON pairs - """ + reason = "Environment variables {} in incorrect format".format(env_var_value) LOG.debug(reason) raise OverridesNotWellDefinedError(reason) + overrides = {} + # environment variables for specific resources take precedence over + # the single environment variable for all resources if "Parameters" in self.env_vars_values: - LOG.debug("Environment variables overrides data is in CloudFormation parameter file format") + LOG.debug("Environment variables data found in the CloudFormation parameter file format") # CloudFormation parameter file format - overrides = self.env_vars_values["Parameters"] - else: + parameter_result = self.env_vars_values.get("Parameters", {}) + overrides.update(parameter_result) + + # Precedence: logical_id -> function_id -> function name -> full_path, customer can use any of them + fn_file_env_vars = ( + self.env_vars_values.get(logical_id, None) + or self.env_vars_values.get(function_id, None) + or self.env_vars_values.get(function_name, None) + or self.env_vars_values.get(full_path, None) + ) + if fn_file_env_vars: # Standard format - LOG.debug("Environment variables overrides data is standard format") - # Precedence: logical_id -> function_id -> full_path, customer can use any of them - overrides = ( - self.env_vars_values.get(name, None) - or self.env_vars_values.get(function_id, None) - or self.env_vars_values.get(full_path, None) - ) + LOG.debug("Environment variables data found for specific function in standard format") + overrides.update(fn_file_env_vars) shell_env = os.environ aws_creds = self.get_aws_creds() diff --git a/samcli/commands/pipeline/init/interactive_init_flow.py b/samcli/commands/pipeline/init/interactive_init_flow.py index 6dea4c8a95..2c8159a1e4 100644 --- a/samcli/commands/pipeline/init/interactive_init_flow.py +++ b/samcli/commands/pipeline/init/interactive_init_flow.py @@ -113,14 +113,30 @@ def _generate_from_custom_location( """ pipeline_template_git_location: str = click.prompt("Template Git location") if os.path.exists(pipeline_template_git_location): - return self._generate_from_pipeline_template(Path(pipeline_template_git_location)) + pipeline_template_local_dir = Path(pipeline_template_git_location) + else: + with osutils.mkdir_temp(ignore_errors=True) as tempdir: + tempdir_path = Path(tempdir) + pipeline_template_local_dir = _clone_pipeline_templates( + pipeline_template_git_location, tempdir_path, CUSTOM_PIPELINE_TEMPLATE_REPO_LOCAL_NAME + ) - with osutils.mkdir_temp(ignore_errors=True) as tempdir: - tempdir_path = Path(tempdir) - pipeline_template_local_dir: Path = _clone_pipeline_templates( - pipeline_template_git_location, tempdir_path, CUSTOM_PIPELINE_TEMPLATE_REPO_LOCAL_NAME + if os.path.exists(pipeline_template_local_dir.joinpath("manifest.yaml")): + pipeline_templates_manifest: PipelineTemplatesManifest = _read_app_pipeline_templates_manifest( + pipeline_template_local_dir + ) + # The manifest contains multiple pipeline-templates so select one + selected_pipeline_template_metadata: PipelineTemplateMetadata = _prompt_pipeline_template( + pipeline_templates_manifest + ) + selected_pipeline_template_dir: Path = pipeline_template_local_dir.joinpath( + selected_pipeline_template_metadata.location ) - return self._generate_from_pipeline_template(pipeline_template_local_dir) + else: + # If the repository does not contain a manifest, treat it as a pipeline template directory. + selected_pipeline_template_dir = pipeline_template_local_dir + + return self._generate_from_pipeline_template(selected_pipeline_template_dir) def _prompt_run_bootstrap_within_pipeline_init( self, stage_configuration_names: List[str], number_of_stages: int diff --git a/samcli/commands/sync/command.py b/samcli/commands/sync/command.py index b81e84cfd0..23cb8144f2 100644 --- a/samcli/commands/sync/command.py +++ b/samcli/commands/sync/command.py @@ -30,7 +30,9 @@ ) from samcli.cli.cli_config_file import configuration_option, TomlProvider from samcli.commands._utils.click_mutex import ClickMutex +from samcli.lib.telemetry.event import EventTracker, track_long_event from samcli.commands.sync.sync_context import SyncContext +from samcli.lib.build.bundler import EsbuildBundlerManager from samcli.lib.utils.colors import Colored from samcli.lib.utils.version_checker import check_newer_version from samcli.lib.bootstrap.bootstrap import manage_stack @@ -134,6 +136,7 @@ @capabilities_option(default=DEFAULT_CAPABILITIES) # pylint: disable=E1120 @pass_context @track_command +@track_long_event("SyncUsed", "Start", "SyncUsed", "End") @image_repository_validation @track_template_warnings([CodeDeployWarning.__name__, CodeDeployConditionWarning.__name__]) @check_newer_version @@ -236,8 +239,12 @@ def do_cli( s3_bucket_name = s3_bucket or manage_stack(profile=profile, region=region) + if dependency_layer is True: + dependency_layer = check_enable_dependency_layer(template_file) + build_dir = DEFAULT_BUILD_DIR_WITH_AUTO_DEPENDENCY_LAYER if dependency_layer else DEFAULT_BUILD_DIR LOG.debug("Using build directory as %s", build_dir) + EventTracker.track_event("UsedFeature", "Accelerate") with BuildContext( resource_identifier=None, @@ -308,6 +315,7 @@ def do_cli( signing_profiles=None, disable_rollback=False, poll_delay=poll_delay, + on_failure=None, ) as deploy_context: with SyncContext(dependency_layer, build_context.build_dir, build_context.cache_dir): if watch: @@ -413,3 +421,20 @@ def execute_watch( """ watch_manager = WatchManager(template, build_context, package_context, deploy_context, auto_dependency_layer) watch_manager.start() + + +def check_enable_dependency_layer(template_file: str): + """ + Check if auto dependency layer should be enabled + :param template_file: template file string + :return: True if ADL should be enabled, False otherwise + """ + stacks, _ = SamLocalStackProvider.get_stacks(template_file) + for stack in stacks: + esbuild = EsbuildBundlerManager(stack) + if esbuild.esbuild_configured(): + # Disable ADL if esbuild is configured. esbuild already makes the package size + # small enough to ensure that ADL isn't needed to improve performance + click.secho("esbuild is configured, disabling auto dependency layer.", fg="yellow") + return False + return True diff --git a/samcli/lib/bootstrap/nested_stack/nested_stack_manager.py b/samcli/lib/bootstrap/nested_stack/nested_stack_manager.py index 0263470d34..792472852e 100644 --- a/samcli/lib/bootstrap/nested_stack/nested_stack_manager.py +++ b/samcli/lib/bootstrap/nested_stack/nested_stack_manager.py @@ -8,6 +8,7 @@ from pathlib import Path from typing import Dict, Optional, cast +from samcli.commands._utils.experimental import is_experimental_enabled, ExperimentalFlag from samcli.commands._utils.template import move_template from samcli.lib.bootstrap.nested_stack.nested_stack_builder import NestedStackBuilder from samcli.lib.build.app_builder import ApplicationBuildResult @@ -158,9 +159,13 @@ def update_layer_folder( if layer_root_folder.exists(): shutil.rmtree(layer_root_folder) layer_contents_folder = layer_root_folder.joinpath(get_layer_subfolder(function_runtime)) - layer_contents_folder.mkdir(BUILD_DIR_PERMISSIONS, parents=True) + layer_root_folder.mkdir(BUILD_DIR_PERMISSIONS, parents=True) if os.path.isdir(dependencies_dir): - osutils.copytree(dependencies_dir, str(layer_contents_folder)) + if is_experimental_enabled(ExperimentalFlag.BuildPerformance): + osutils.create_symlink_or_copy(dependencies_dir, str(layer_contents_folder)) + else: + layer_contents_folder.mkdir(BUILD_DIR_PERMISSIONS, parents=True) + osutils.copytree(dependencies_dir, str(layer_contents_folder)) NestedStackManager._add_layer_readme_info(str(layer_root_folder), function_logical_id) return str(layer_root_folder) diff --git a/samcli/lib/build/build_graph.py b/samcli/lib/build/build_graph.py index ffd51f4959..5801fcab52 100644 --- a/samcli/lib/build/build_graph.py +++ b/samcli/lib/build/build_graph.py @@ -14,6 +14,7 @@ import tomlkit +from samcli.commands._utils.experimental import is_experimental_enabled, ExperimentalFlag from samcli.lib.build.exceptions import InvalidBuildGraphException from samcli.lib.providers.provider import Function, LayerVersion from samcli.lib.samlib.resource_metadata_normalizer import ( @@ -45,6 +46,7 @@ LAYER_FIELD = "layer" ARCHITECTURE_FIELD = "architecture" HANDLER_FIELD = "handler" +SHARED_CODEURI_SUFFIX = "Shared" def _function_build_definition_to_toml_table( @@ -625,7 +627,12 @@ def get_build_dir(self, artifact_root_dir: str) -> str: Return the directory path relative to root build directory """ self._validate_functions() - return self.functions[0].get_build_dir(artifact_root_dir) + build_dir = self.functions[0].get_build_dir(artifact_root_dir) + if is_experimental_enabled(ExperimentalFlag.BuildPerformance) and len(self.functions) > 1: + # If there are multiple functions with the same build definition, + # just put them into one single shared artifacts directory. + build_dir = f"{build_dir}-{SHARED_CODEURI_SUFFIX}" + return build_dir def get_resource_full_paths(self) -> str: """Returns list of functions' full path information as a list of str""" diff --git a/samcli/lib/build/build_strategy.py b/samcli/lib/build/build_strategy.py index 25469fa7e6..1407d63b3a 100644 --- a/samcli/lib/build/build_strategy.py +++ b/samcli/lib/build/build_strategy.py @@ -10,6 +10,7 @@ from copy import deepcopy from typing import Callable, Dict, List, Any, Optional, cast, Set, Tuple, TypeVar +from samcli.commands._utils.experimental import is_experimental_enabled, ExperimentalFlag from samcli.lib.utils import osutils from samcli.lib.utils.async_utils import AsyncContext from samcli.lib.utils.hash import dir_checksum @@ -178,12 +179,20 @@ def build_single_function_definition(self, build_definition: FunctionBuildDefini if build_definition.packagetype == ZIP: for function in build_definition.functions: if function.full_path != single_full_path: - # for zip function we need to copy over the artifacts - # artifacts directory will be created by the builder - artifacts_dir = function.get_build_dir(self._build_dir) - LOG.debug("Copying artifacts from %s to %s", single_build_dir, artifacts_dir) - osutils.copytree(single_build_dir, artifacts_dir) - function_build_results[function.full_path] = artifacts_dir + # for zip function we need to refer over the result + # artifacts directory which have built as the action above + if is_experimental_enabled(ExperimentalFlag.BuildPerformance): + LOG.debug( + "Using previously build shared location %s for function %s", result, function.full_path + ) + function_build_results[function.full_path] = result + else: + # for zip function we need to copy over the artifacts + # artifacts directory will be created by the builder + artifacts_dir = function.get_build_dir(self._build_dir) + LOG.debug("Copying artifacts from %s to %s", single_build_dir, artifacts_dir) + osutils.copytree(single_build_dir, artifacts_dir) + function_build_results[function.full_path] = artifacts_dir elif build_definition.packagetype == IMAGE: for function in build_definition.functions: if function.full_path != single_full_path: @@ -278,19 +287,37 @@ def build_single_function_definition(self, build_definition: FunctionBuildDefini build_definition.source_hash = source_hash # Since all the build contents are same for a build definition, just copy any one of them into the cache for _, value in build_result.items(): - osutils.copytree(value, cache_function_dir) + osutils.copytree(value, str(cache_function_dir)) break else: LOG.info( "Valid cache found, copying previously built resources for following functions (%s)", build_definition.get_resource_full_paths(), ) - for function in build_definition.functions: - # artifacts directory will be created by the builder - artifacts_dir = function.get_build_dir(self._build_dir) - LOG.debug("Copying artifacts from %s to %s", cache_function_dir, artifacts_dir) - osutils.copytree(cache_function_dir, artifacts_dir) - function_build_results[function.full_path] = artifacts_dir + if is_experimental_enabled(ExperimentalFlag.BuildPerformance): + first_function_artifacts_dir: Optional[str] = None + for function in build_definition.functions: + if not first_function_artifacts_dir: + # artifacts directory will be created by the builder + artifacts_dir = build_definition.get_build_dir(self._build_dir) + LOG.debug("Linking artifacts from %s to %s", cache_function_dir, artifacts_dir) + osutils.create_symlink_or_copy(str(cache_function_dir), artifacts_dir) + function_build_results[function.full_path] = artifacts_dir + first_function_artifacts_dir = artifacts_dir + else: + LOG.debug( + "Function (%s) build folder is updated to %s", + function.full_path, + first_function_artifacts_dir, + ) + function_build_results[function.full_path] = first_function_artifacts_dir + else: + for function in build_definition.functions: + # artifacts directory will be created by the builder + artifacts_dir = function.get_build_dir(self._build_dir) + LOG.debug("Copying artifacts from %s to %s", cache_function_dir, artifacts_dir) + osutils.copytree(str(cache_function_dir), artifacts_dir) + function_build_results[function.full_path] = artifacts_dir return function_build_results @@ -317,7 +344,7 @@ def build_single_layer_definition(self, layer_definition: LayerBuildDefinition) layer_definition.source_hash = source_hash # Since all the build contents are same for a build definition, just copy any one of them into the cache for _, value in build_result.items(): - osutils.copytree(value, cache_function_dir) + osutils.copytree(value, str(cache_function_dir)) break else: LOG.info( @@ -325,9 +352,14 @@ def build_single_layer_definition(self, layer_definition: LayerBuildDefinition) layer_definition.get_resource_full_paths(), ) # artifacts directory will be created by the builder - artifacts_dir = str(pathlib.Path(self._build_dir, layer_definition.layer.full_path)) - LOG.debug("Copying artifacts from %s to %s", cache_function_dir, artifacts_dir) - osutils.copytree(cache_function_dir, artifacts_dir) + artifacts_dir = layer_definition.layer.get_build_dir(self._build_dir) + + if is_experimental_enabled(ExperimentalFlag.BuildPerformance): + LOG.debug("Linking artifacts folder from %s to %s", cache_function_dir, artifacts_dir) + osutils.create_symlink_or_copy(str(cache_function_dir), artifacts_dir) + else: + LOG.debug("Copying artifacts from %s to %s", cache_function_dir, artifacts_dir) + osutils.copytree(str(cache_function_dir), artifacts_dir) layer_build_result[layer_definition.layer.full_path] = artifacts_dir return layer_build_result diff --git a/samcli/lib/build/bundler.py b/samcli/lib/build/bundler.py new file mode 100644 index 0000000000..71d3c68556 --- /dev/null +++ b/samcli/lib/build/bundler.py @@ -0,0 +1,189 @@ +""" +Handles bundler properties as needed to modify the build process +""" +import logging +from copy import deepcopy +from typing import Dict, Optional + +from samcli.lib.providers.provider import Stack +from samcli.lib.providers.sam_function_provider import SamFunctionProvider + +LOG = logging.getLogger(__name__) + +ESBUILD_PROPERTY = "esbuild" + + +class EsbuildBundlerManager: + def __init__(self, stack: Stack, template: Optional[Dict] = None): + self._stack = stack + self._previous_template = template or dict() + + def esbuild_configured(self) -> bool: + """ + Checks if esbuild is configured on any resource in a given stack + :return: True if there is a function instance using esbuild as the build method + """ + function_provider = SamFunctionProvider( + [self._stack], use_raw_codeuri=True, ignore_code_extraction_warnings=True + ) + functions = list(function_provider.get_all()) + for function in functions: + if function.metadata and function.metadata.get("BuildMethod", "") == ESBUILD_PROPERTY: + return True + return False + + def set_sourcemap_metadata_from_env(self) -> Stack: + """ + Checks if sourcemaps are set in lambda environment and updates build metadata accordingly. + :return: Modified stack + """ + modified_stack = deepcopy(self._stack) + + using_source_maps = False + stack_resources = modified_stack.resources + + for name, resource in stack_resources.items(): + metadata = resource.get("Metadata", {}) + + if not self._esbuild_in_metadata(metadata): + continue + + node_option_set = self._is_node_option_set(resource) + + # check if Sourcemap is provided and append --enable-source-map if not set + build_properties = metadata.get("BuildProperties", {}) + source_map = build_properties.get("Sourcemap", None) + + # check if --enable-source-map is provided and append Sourcemap: true if it is not set + if source_map is None and node_option_set: + LOG.info( + "\n--enable-source-maps set without Sourcemap, adding Sourcemap to" + " Metadata BuildProperties for %s", + name, + ) + + resource.setdefault("Metadata", {}) + resource["Metadata"].setdefault("BuildProperties", {}) + resource["Metadata"]["BuildProperties"]["Sourcemap"] = True + + using_source_maps = True + + if using_source_maps: + self._warn_using_source_maps() + + return modified_stack + + def set_sourcemap_env_from_metadata(self) -> Dict: + """ + Appends ``NODE_OPTIONS: --enable-source-maps``, if Sourcemap is set to true + and sets Sourcemap to true if ``NODE_OPTIONS: --enable-source-maps`` is provided. + :return: Dict containing deep-copied, updated template + """ + using_source_maps = False + invalid_node_option = False + + template = deepcopy(self._previous_template) + template_resources = template.get("Resources", {}) + + # We check the stack resources since they contain the global values, we modify the template + stack_resources = self._stack.resources + + for name, stack_resource in stack_resources.items(): + metadata = stack_resource.get("Metadata", {}) + + if not self._esbuild_in_metadata(metadata): + continue + + node_option_set = self._is_node_option_set(stack_resource) + + template_resource = template_resources.get(name, {}) + + # check if Sourcemap is provided and append --enable-source-map if not set + build_properties = metadata.get("BuildProperties", {}) + source_map = build_properties.get("Sourcemap", None) + + if source_map and not node_option_set: + LOG.info( + "\nSourcemap set without --enable-source-maps, adding" + " --enable-source-maps to function %s NODE_OPTIONS", + name, + ) + + template_resource.setdefault("Properties", {}) + template_resource["Properties"].setdefault("Environment", {}) + template_resource["Properties"]["Environment"].setdefault("Variables", {}) + existing_options = template_resource["Properties"]["Environment"]["Variables"].setdefault( + "NODE_OPTIONS", "" + ) + + # make sure the NODE_OPTIONS is a string + if not isinstance(existing_options, str): + invalid_node_option = True + else: + template_resource["Properties"]["Environment"]["Variables"]["NODE_OPTIONS"] = " ".join( + [existing_options, "--enable-source-maps"] + ) + + using_source_maps = True + + if using_source_maps: + self._warn_using_source_maps() + + if invalid_node_option: + self._warn_invalid_node_options() + + return template + + @staticmethod + def _esbuild_in_metadata(metadata: Dict) -> bool: + """ + Checks if esbuild is configured in the function's metadata + :param metadata: dict of metadata properties of a function + :return: True if esbuild is configured, False otherwise + """ + return bool(metadata.get("BuildMethod", "") == ESBUILD_PROPERTY) + + @staticmethod + def _is_node_option_set(resource: Dict) -> bool: + """ + Checks if the template has NODE_OPTIONS --enable-source-maps set + + Parameters + ---------- + resource : Dict + The resource dictionary to lookup if --enable-source-maps is set + + Returns + ------- + bool + True if --enable-source-maps is set, otherwise false + """ + try: + node_options = resource["Properties"]["Environment"]["Variables"]["NODE_OPTIONS"] + + return "--enable-source-maps" in node_options.split() + except (KeyError, AttributeError): + return False + + @staticmethod + def _warn_invalid_node_options() -> None: + """ + Log warning for invalid node options + """ + LOG.info( + "\nNODE_OPTIONS is not a string! As a result, the NODE_OPTIONS environment variable will " + "not be set correctly, please make sure it is a string. " + "Visit https://nodejs.org/api/cli.html#node_optionsoptions for more details.\n", + ) + + @staticmethod + def _warn_using_source_maps() -> None: + """ + Log warning telling user that node options will be set + :return: + """ + LOG.info( + "\nYou are using source maps, note that this comes with a performance hit!" + " Set Sourcemap to false and remove" + " NODE_OPTIONS: --enable-source-maps to disable source maps.\n", + ) diff --git a/samcli/lib/build/workflow_config.py b/samcli/lib/build/workflow_config.py index 3a006ca2e4..d6b1f55926 100644 --- a/samcli/lib/build/workflow_config.py +++ b/samcli/lib/build/workflow_config.py @@ -4,95 +4,24 @@ import os import logging -from collections import namedtuple from typing import Dict, List, Optional, Tuple, Union, cast -LOG = logging.getLogger(__name__) - -CONFIG = namedtuple( - "Capability", - ["language", "dependency_manager", "application_framework", "manifest_name", "executable_search_paths"], -) - -PYTHON_PIP_CONFIG = CONFIG( - language="python", - dependency_manager="pip", - application_framework=None, - manifest_name="requirements.txt", - executable_search_paths=None, -) - -NODEJS_NPM_CONFIG = CONFIG( - language="nodejs", - dependency_manager="npm", - application_framework=None, - manifest_name="package.json", - executable_search_paths=None, -) - -RUBY_BUNDLER_CONFIG = CONFIG( - language="ruby", - dependency_manager="bundler", - application_framework=None, - manifest_name="Gemfile", - executable_search_paths=None, -) - -JAVA_GRADLE_CONFIG = CONFIG( - language="java", - dependency_manager="gradle", - application_framework=None, - manifest_name="build.gradle", - executable_search_paths=None, -) - -JAVA_KOTLIN_GRADLE_CONFIG = CONFIG( - language="java", - dependency_manager="gradle", - application_framework=None, - manifest_name="build.gradle.kts", - executable_search_paths=None, +from samcli.lib.build.workflows import ( + CONFIG, + PYTHON_PIP_CONFIG, + NODEJS_NPM_CONFIG, + RUBY_BUNDLER_CONFIG, + JAVA_GRADLE_CONFIG, + JAVA_KOTLIN_GRADLE_CONFIG, + JAVA_MAVEN_CONFIG, + DOTNET_CLIPACKAGE_CONFIG, + GO_MOD_CONFIG, + PROVIDED_MAKE_CONFIG, + NODEJS_NPM_ESBUILD_CONFIG, ) +from samcli.lib.telemetry.event import EventTracker -JAVA_MAVEN_CONFIG = CONFIG( - language="java", - dependency_manager="maven", - application_framework=None, - manifest_name="pom.xml", - executable_search_paths=None, -) - -DOTNET_CLIPACKAGE_CONFIG = CONFIG( - language="dotnet", - dependency_manager="cli-package", - application_framework=None, - manifest_name=".csproj", - executable_search_paths=None, -) - -GO_MOD_CONFIG = CONFIG( - language="go", - dependency_manager="modules", - application_framework=None, - manifest_name="go.mod", - executable_search_paths=None, -) - -PROVIDED_MAKE_CONFIG = CONFIG( - language="provided", - dependency_manager=None, - application_framework=None, - manifest_name="Makefile", - executable_search_paths=None, -) - -NODEJS_NPM_ESBUILD_CONFIG = CONFIG( - language="nodejs", - dependency_manager="npm-esbuild", - application_framework=None, - manifest_name="package.json", - executable_search_paths=None, -) +LOG = logging.getLogger(__name__) class UnsupportedRuntimeException(Exception): @@ -278,6 +207,9 @@ def get_workflow_config( # Identify workflow configuration from the workflow selector. config = cast(WorkFlowSelector, selector).get_config(code_dir, project_dir) + + EventTracker.track_event("BuildWorkflowUsed", f"{config.language}-{config.dependency_manager}") + return config except ValueError as ex: raise UnsupportedRuntimeException( diff --git a/samcli/lib/build/workflows.py b/samcli/lib/build/workflows.py new file mode 100644 index 0000000000..a413f696c1 --- /dev/null +++ b/samcli/lib/build/workflows.py @@ -0,0 +1,102 @@ +"""Module for storing information about existing workflows.""" + +from collections import namedtuple +from typing import List + +CONFIG = namedtuple( + "Capability", + ["language", "dependency_manager", "application_framework", "manifest_name", "executable_search_paths"], +) + +PYTHON_PIP_CONFIG = CONFIG( + language="python", + dependency_manager="pip", + application_framework=None, + manifest_name="requirements.txt", + executable_search_paths=None, +) + +NODEJS_NPM_CONFIG = CONFIG( + language="nodejs", + dependency_manager="npm", + application_framework=None, + manifest_name="package.json", + executable_search_paths=None, +) + +RUBY_BUNDLER_CONFIG = CONFIG( + language="ruby", + dependency_manager="bundler", + application_framework=None, + manifest_name="Gemfile", + executable_search_paths=None, +) + +JAVA_GRADLE_CONFIG = CONFIG( + language="java", + dependency_manager="gradle", + application_framework=None, + manifest_name="build.gradle", + executable_search_paths=None, +) + +JAVA_KOTLIN_GRADLE_CONFIG = CONFIG( + language="java", + dependency_manager="gradle", + application_framework=None, + manifest_name="build.gradle.kts", + executable_search_paths=None, +) + +JAVA_MAVEN_CONFIG = CONFIG( + language="java", + dependency_manager="maven", + application_framework=None, + manifest_name="pom.xml", + executable_search_paths=None, +) + +DOTNET_CLIPACKAGE_CONFIG = CONFIG( + language="dotnet", + dependency_manager="cli-package", + application_framework=None, + manifest_name=".csproj", + executable_search_paths=None, +) + +GO_MOD_CONFIG = CONFIG( + language="go", + dependency_manager="modules", + application_framework=None, + manifest_name="go.mod", + executable_search_paths=None, +) + +PROVIDED_MAKE_CONFIG = CONFIG( + language="provided", + dependency_manager=None, + application_framework=None, + manifest_name="Makefile", + executable_search_paths=None, +) + +NODEJS_NPM_ESBUILD_CONFIG = CONFIG( + language="nodejs", + dependency_manager="npm-esbuild", + application_framework=None, + manifest_name="package.json", + executable_search_paths=None, +) + +ALL_CONFIGS: List[CONFIG] = [ + PYTHON_PIP_CONFIG, + NODEJS_NPM_CONFIG, + RUBY_BUNDLER_CONFIG, + JAVA_GRADLE_CONFIG, + JAVA_KOTLIN_GRADLE_CONFIG, + JAVA_MAVEN_CONFIG, + DOTNET_CLIPACKAGE_CONFIG, + GO_MOD_CONFIG, + PROVIDED_MAKE_CONFIG, + NODEJS_NPM_ESBUILD_CONFIG, +] diff --git a/samcli/lib/deploy/deployer.py b/samcli/lib/deploy/deployer.py index f9f60d3288..1c97aff699 100644 --- a/samcli/lib/deploy/deployer.py +++ b/samcli/lib/deploy/deployer.py @@ -25,12 +25,13 @@ import botocore -from samcli.lib.deploy.utils import DeployColor +from samcli.lib.deploy.utils import DeployColor, FailureMode from samcli.commands.deploy.exceptions import ( DeployFailedError, ChangeSetError, DeployStackOutPutFailedError, DeployBucketInDifferentRegionError, + DeployStackStatusMissingError, ) from samcli.commands._utils.table_print import pprint_column_names, pprint_columns, newline_per_item, MIN_OFFSET from samcli.commands.deploy import exceptions as deploy_exceptions @@ -361,11 +362,14 @@ def get_last_event_time(self, stack_name): table_header=DESCRIBE_STACK_EVENTS_TABLE_HEADER_NAME, display_sleep=True, ) - def describe_stack_events(self, stack_name, time_stamp_marker, **kwargs): + def describe_stack_events( + self, stack_name: str, time_stamp_marker: float, on_failure: FailureMode = FailureMode.ROLLBACK, **kwargs + ): """ Calls CloudFormation to get current stack events :param stack_name: Name or ID of the stack :param time_stamp_marker: last event time on the stack to start streaming events from. + :param on_failure: The action to take if the stack fails to deploy :param kwargs: Other arguments to pass to pprint_columns() """ @@ -379,7 +383,10 @@ def describe_stack_events(self, stack_name, time_stamp_marker, **kwargs): time.sleep(0 if retry_attempts else self.client_sleep) paginator = self._client.get_paginator("describe_stack_events") response_iterator = paginator.paginate(StackName=stack_name) - new_events = deque() # event buffer + + # Event buffer + new_events = deque() # type: deque + for event_items in response_iterator: for event in event_items["StackEvents"]: # Skip already shown old event entries or former deployments @@ -424,6 +431,12 @@ def describe_stack_events(self, stack_name, time_stamp_marker, **kwargs): # Reset retry attempts if iteration is a success to use client_sleep again retry_attempts = 0 except botocore.exceptions.ClientError as ex: + if ( + "Stack with id {0} does not exist".format(stack_name) in str(ex) + and on_failure == FailureMode.DELETE + ): + return + retry_attempts = retry_attempts + 1 if retry_attempts > self.max_attempts: LOG.error("Describing stack events for %s failed: %s", stack_name, str(ex)) @@ -443,7 +456,13 @@ def _is_root_stack_event(event: Dict) -> bool: def _check_stack_not_in_progress(status: str) -> bool: return "IN_PROGRESS" not in status - def wait_for_execute(self, stack_name: str, stack_operation: str, disable_rollback: bool) -> None: + def wait_for_execute( + self, + stack_name: str, + stack_operation: str, + disable_rollback: bool, + on_failure: FailureMode = FailureMode.ROLLBACK, + ) -> None: """ Wait for stack operation to execute and return when execution completes. If the stack has "Outputs," they will be printed. @@ -456,6 +475,8 @@ def wait_for_execute(self, stack_name: str, stack_operation: str, disable_rollba The type of the stack operation, 'CREATE' or 'UPDATE' disable_rollback : bool Preserves the state of previously provisioned resources when an operation fails + on_failure : FailureMode + The action to take when the operation fails """ sys.stdout.write( "\n{} - Waiting for stack create/update " @@ -463,7 +484,7 @@ def wait_for_execute(self, stack_name: str, stack_operation: str, disable_rollba ) sys.stdout.flush() - self.describe_stack_events(stack_name, time.time() * 1000) + self.describe_stack_events(stack_name, time.time() * 1000, on_failure) # Pick the right waiter if stack_operation == "CREATE": @@ -481,15 +502,21 @@ def wait_for_execute(self, stack_name: str, stack_operation: str, disable_rollba waiter.wait(StackName=stack_name, WaiterConfig=waiter_config) except botocore.exceptions.WaiterError as ex: LOG.debug("Execute stack waiter exception", exc_info=ex) - if disable_rollback: + if disable_rollback and on_failure is not FailureMode.DELETE: + # This will only display the message if disable rollback is set or if DO_NOTHING is specified msg = self._gen_deploy_failed_with_rollback_disabled_msg(stack_name) LOG.info(self._colored.red(msg)) raise deploy_exceptions.DeployFailedError(stack_name=stack_name, msg=str(ex)) - outputs = self.get_stack_outputs(stack_name=stack_name, echo=False) - if outputs: - self._display_stack_outputs(outputs) + try: + outputs = self.get_stack_outputs(stack_name=stack_name, echo=False) + if outputs: + self._display_stack_outputs(outputs) + except DeployStackOutPutFailedError as ex: + # Show exception if we aren't deleting stacks + if on_failure != FailureMode.DELETE: + raise ex def create_and_wait_for_changeset( self, stack_name, cfn_template, parameter_values, capabilities, role_arn, notification_arns, s3_uploader, tags @@ -542,6 +569,7 @@ def sync( notification_arns: Optional[List[str]], s3_uploader: Optional[S3Uploader], tags: Optional[Dict], + on_failure: FailureMode, ): """ Call the sync command to directly update stack or create stack @@ -556,6 +584,7 @@ def sync( :param notification_arns: Arns for sending notifications :param s3_uploader: S3Uploader object to upload files to S3 buckets :param tags: Array of tags passed to CloudFormation + :param on_failure: FailureMode enum indicating the action to take on stack creation failure :return: """ exists = self.has_stack(stack_name) @@ -584,15 +613,23 @@ def sync( kwargs = self._process_kwargs(kwargs, s3_uploader, capabilities, role_arn, notification_arns) try: + disable_rollback = False + if on_failure == FailureMode.DO_NOTHING: + disable_rollback = True msg = "" if exists: + kwargs["DisableRollback"] = disable_rollback + result = self.update_stack(**kwargs) - self.wait_for_execute(stack_name, "UPDATE", False) + self.wait_for_execute(stack_name, "UPDATE", disable_rollback, on_failure=on_failure) msg = "\nStack update succeeded. Sync infra completed.\n" else: + # Pass string representation of enum + kwargs["OnFailure"] = str(on_failure) + result = self.create_stack(**kwargs) - self.wait_for_execute(stack_name, "CREATE", False) + self.wait_for_execute(stack_name, "CREATE", disable_rollback, on_failure=on_failure) msg = "\nStack creation succeeded. Sync infra completed.\n" LOG.info(self._colored.green(msg)) @@ -642,6 +679,110 @@ def get_stack_outputs(self, stack_name, echo=True): except botocore.exceptions.ClientError as ex: raise DeployStackOutPutFailedError(stack_name=stack_name, msg=str(ex)) from ex + def rollback_delete_stack(self, stack_name: str): + """ + Try to rollback the stack to a sucessful state, if there is no good state then delete the stack + + Parameters + ---------- + :param stack_name: str + The name of the stack + """ + kwargs = { + "StackName": stack_name, + } + + current_state = self._get_stack_status(stack_name) + + try: + if current_state == "UPDATE_FAILED": + LOG.info("Stack %s failed to update, rolling back stack to previous state...", stack_name) + + self._client.rollback_stack(**kwargs) + self.describe_stack_events(stack_name, time.time() * 1000, FailureMode.DELETE) + self._rollback_wait(stack_name) + + current_state = self._get_stack_status(stack_name) + + failed_states = ["CREATE_FAILED", "UPDATE_FAILED", "ROLLBACK_COMPLETE", "ROLLBACK_FAILED"] + + if current_state in failed_states: + LOG.info("Stack %s failed to create/update correctly, deleting stack", stack_name) + + self._client.delete_stack(**kwargs) + + # only a stack that failed to create will have stack events, deleting + # from a ROLLBACK_COMPLETE state will not return anything + # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/cloudformation.html#CloudFormation.Client.delete_stack + if current_state == "CREATE_FAILED": + self.describe_stack_events(stack_name, time.time() * 1000, FailureMode.DELETE) + + waiter = self._client.get_waiter("stack_delete_complete") + waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 30, "MaxAttempts": 120}) + + LOG.info("\nStack %s has been deleted", stack_name) + else: + LOG.info("Stack %s has rolled back successfully", stack_name) + except botocore.exceptions.ClientError as ex: + raise DeployStackStatusMissingError(stack_name) from ex + except botocore.exceptions.WaiterError: + LOG.error( + "\nStack %s failed to delete properly! Please manually clean up any persistent resources.", + stack_name, + ) + except KeyError: + LOG.info("Stack %s is not found, skipping", stack_name) + + def _get_stack_status(self, stack_name: str) -> str: + """ + Returns the status of the stack + + Parameters + ---------- + :param stack_name: str + The name of the stack + + Parameters + ---------- + :return: str + A string representing the status of the stack + """ + stack = self._client.describe_stacks(StackName=stack_name) + stack_status = str(stack["Stacks"][0]["StackStatus"]) + + return stack_status + + def _rollback_wait(self, stack_name: str, wait_time: int = 30, max_retries: int = 120): + """ + Manual waiter for rollback status, waits until we get *_ROLLBACK_COMPLETE or ROLLBACK_FAILED + + Parameters + ---------- + :param stack_name: str + The name of the stack + :param wait_time: int + The time to wait between polls, default 30 seconds + :param max_retries: int + The number of polls before timing out + """ + status = "" + retries = 0 + + while retries < max_retries: + status = self._get_stack_status(stack_name) + + if "ROLLBACK_COMPLETE" in status or status == "ROLLBACK_FAILED": + return + + retries = retries + 1 + time.sleep(wait_time) + + LOG.error( + "Stack %s never reached a *_ROLLBACK_COMPLETE or ROLLBACK_FAILED state, we got %s instead.", + stack_name, + status, + ) + @staticmethod def _gen_deploy_failed_with_rollback_disabled_msg(stack_name): return """\nFailed to deploy. Automatic rollback disabled for this deployment.\n diff --git a/samcli/lib/deploy/utils.py b/samcli/lib/deploy/utils.py index 6c556c7068..71b2fc88d4 100644 --- a/samcli/lib/deploy/utils.py +++ b/samcli/lib/deploy/utils.py @@ -2,6 +2,7 @@ Utilities for Deploy """ +from enum import Enum from samcli.lib.utils.colors import Colored @@ -32,3 +33,12 @@ def get_stack_events_status_color(self, status): def get_changeset_action_color(self, action): return self.changeset_color_map.get(action, "yellow") + + +class FailureMode(Enum): + ROLLBACK = "ROLLBACK" + DELETE = "DELETE" + DO_NOTHING = "DO_NOTHING" + + def __str__(self): + return self.value diff --git a/samcli/lib/providers/sam_function_provider.py b/samcli/lib/providers/sam_function_provider.py index 62369b75fe..03f69dff79 100644 --- a/samcli/lib/providers/sam_function_provider.py +++ b/samcli/lib/providers/sam_function_provider.py @@ -79,6 +79,26 @@ def stacks(self) -> List[Stack]: """ return self._stacks + def update( + self, + stacks: List[Stack], + use_raw_codeuri: bool = False, + ignore_code_extraction_warnings: bool = False, + locate_layer_nested: bool = False, + ) -> None: + """ + Hydrate the function provider with updated stacks + :param dict stacks: List of stacks functions are extracted from + :param bool use_raw_codeuri: Do not resolve adjust core_uri based on the template path, use the raw uri. + Note(xinhol): use_raw_codeuri is temporary to fix a bug, and will be removed for a permanent solution. + :param bool ignore_code_extraction_warnings: Ignores Log warnings + :param bool locate_layer_nested: resolved nested layer reference to their actual location in the nested stack + """ + self._stacks = stacks + self.functions = SamFunctionProvider._extract_functions( + self._stacks, use_raw_codeuri, ignore_code_extraction_warnings, locate_layer_nested + ) + def get(self, name: str) -> Optional[Function]: """ Returns the function given name or LogicalId of the function. Every SAM resource has a logicalId, but it may diff --git a/samcli/lib/sync/sync_flow_executor.py b/samcli/lib/sync/sync_flow_executor.py index 0cba6305cd..082a57c70d 100644 --- a/samcli/lib/sync/sync_flow_executor.py +++ b/samcli/lib/sync/sync_flow_executor.py @@ -10,6 +10,7 @@ from concurrent.futures import ThreadPoolExecutor, Future from botocore.exceptions import ClientError +from samcli.lib.telemetry.event import EventName, EventTracker, EventType from samcli.lib.utils.colors import Colored from samcli.lib.providers.exceptions import MissingLocalDefinition @@ -331,7 +332,13 @@ def _sync_flow_execute_wrapper(sync_flow: SyncFlow) -> SyncFlowResult: SyncFlowException """ dependent_sync_flows = [] + sync_types = EventType.get_accepted_values(EventName.SYNC_FLOW_START) + sync_type: Optional[str] = type(sync_flow).__name__ + if sync_type not in sync_types: + sync_type = None try: + if sync_type: + EventTracker.track_event("SyncFlowStart", sync_type) dependent_sync_flows = sync_flow.execute() except ClientError as e: if e.response.get("Error", dict()).get("Code", "") == "ResourceNotFoundException": @@ -339,4 +346,7 @@ def _sync_flow_execute_wrapper(sync_flow: SyncFlow) -> SyncFlowResult: raise SyncFlowException(sync_flow, e) from e except Exception as e: raise SyncFlowException(sync_flow, e) from e + finally: + if sync_type: + EventTracker.track_event("SyncFlowEnd", sync_type) return SyncFlowResult(sync_flow=sync_flow, dependent_sync_flows=dependent_sync_flows) diff --git a/samcli/lib/telemetry/event.py b/samcli/lib/telemetry/event.py new file mode 100644 index 0000000000..f04a0aaf26 --- /dev/null +++ b/samcli/lib/telemetry/event.py @@ -0,0 +1,297 @@ +""" +Represents Events and their values. +""" + +from datetime import datetime +from enum import Enum +import logging +import threading +from typing import List, Optional + +from samcli.cli.context import Context +from samcli.lib.build.workflows import ALL_CONFIGS +from samcli.lib.telemetry.telemetry import Telemetry +from samcli.local.common.runtime_template import INIT_RUNTIMES + + +LOG = logging.getLogger(__name__) + + +class EventName(Enum): + """Enum for the names of available events to track.""" + + USED_FEATURE = "UsedFeature" + BUILD_FUNCTION_RUNTIME = "BuildFunctionRuntime" + SYNC_USED = "SyncUsed" + SYNC_FLOW_START = "SyncFlowStart" + SYNC_FLOW_END = "SyncFlowEnd" + BUILD_WORKFLOW_USED = "BuildWorkflowUsed" + + +class EventType: + """Class for Events and the types of values they may have.""" + + _SYNC_FLOWS = [ + "AliasVersionSyncFlow", + "AutoDependencyLayerSyncFlow", + "AutoDependencyLayerParentSyncFlow", + "FunctionSyncFlow", + "FunctionLayerReferenceSync", + "GenericApiSyncFlow", + "HttpApiSyncFlow", + "ImageFunctionSyncFlow", + "LayerSyncFlow", + "RestApiSyncFlow", + "StepFunctionsSyncFlow", + "ZipFunctionSyncFlow", + ] + _WORKFLOWS = [f"{config.language}-{config.dependency_manager}" for config in ALL_CONFIGS] + + _event_values = { # Contains allowable values for Events + EventName.USED_FEATURE: [ + "Accelerate", + "CDK", + ], + EventName.BUILD_FUNCTION_RUNTIME: INIT_RUNTIMES, + EventName.SYNC_USED: [ + "Start", + "End", + ], + EventName.SYNC_FLOW_START: _SYNC_FLOWS, + EventName.SYNC_FLOW_END: _SYNC_FLOWS, + EventName.BUILD_WORKFLOW_USED: _WORKFLOWS, + } + + @staticmethod + def get_accepted_values(event_name: EventName) -> List[str]: + """Get all acceptable values for a given Event name.""" + if event_name not in EventType._event_values: + return [] + return EventType._event_values[event_name] + + +class Event: + """Class to represent Events that occur in SAM CLI.""" + + event_name: EventName + event_value: str # Validated by EventType.get_accepted_values to never be an arbitrary string + thread_id = threading.get_ident() # The thread ID; used to group Events from the same command run + time_stamp: str + + def __init__(self, event_name: str, event_value: str): + Event._verify_event(event_name, event_value) + self.event_name = EventName(event_name) + self.event_value = event_value + self.time_stamp = str(datetime.utcnow())[:-3] # format microseconds from 6 -> 3 figures to allow SQL casting + + def __eq__(self, other): + return self.event_name == other.event_name and self.event_value == other.event_value + + def __repr__(self): + return ( + f"Event(event_name={self.event_name.value}, " + f"event_value={self.event_value}, " + f"thread_id={self.thread_id}, " + f"time_stamp={self.time_stamp})" + ) + + def to_json(self): + return { + "event_name": self.event_name.value, + "event_value": self.event_value, + "thread_id": self.thread_id, + "time_stamp": self.time_stamp, + } + + @staticmethod + def _verify_event(event_name: str, event_value: str) -> None: + """Raise an EventCreationError if either the event name or value is not valid.""" + if event_name not in Event._get_event_names(): + raise EventCreationError(f"Event '{event_name}' does not exist.") + if event_value not in EventType.get_accepted_values(EventName(event_name)): + raise EventCreationError(f"Event '{event_name}' does not accept value '{event_value}'.") + + @staticmethod + def _get_event_names() -> List[str]: + """Retrieves a list of all valid event names.""" + return [event.value for event in EventName] + + +class EventTracker: + """Class to track and recreate Events as they occur.""" + + _events: List[Event] = [] + _event_lock = threading.Lock() + _session_id: Optional[str] = None + + MAX_EVENTS: int = 50 # Maximum number of events to store before sending + + @staticmethod + def track_event(event_name: str, event_value: str): + """Method to track an event where and when it occurs. + + Place this method in the codepath of the event that you would + like to track. For instance, if you would like to track when + FeatureX is used, append this method to the end of that function. + + Parameters + ---------- + event_name: str + The name of the Event. Must be a valid EventName value, or an + EventCreationError will be thrown. + event_value: str + The value of the Event. Must be a valid EventType value for the + passed event_name, or an EventCreationError will be thrown. + + Examples + -------- + >>> def feature_x(...): + # do things + EventTracker.track_event("UsedFeature", "FeatureX") + + >>> def feature_y(...) -> Any: + # do things + EventTracker.track_event("UsedFeature", "FeatureY") + return some_value + """ + try: + should_send: bool = False + with EventTracker._event_lock: + EventTracker._events.append(Event(event_name, event_value)) + # Get the session ID (needed for multithreading sending) + if not EventTracker._session_id: + try: + ctx = Context.get_current_context() + if ctx: + EventTracker._session_id = ctx.session_id + except RuntimeError: + LOG.debug("EventTracker: Unable to obtain session ID") + if len(EventTracker._events) >= EventTracker.MAX_EVENTS: + should_send = True + if should_send: + EventTracker.send_events() + except EventCreationError as e: + LOG.debug("Error occurred while trying to track an event: %s", e) + + @staticmethod + def get_tracked_events() -> List[Event]: + """Retrieve a list of all currently tracked Events.""" + with EventTracker._event_lock: + return EventTracker._events + + @staticmethod + def clear_trackers(): + """Clear the current list of tracked Events before the next session.""" + with EventTracker._event_lock: + EventTracker._events = [] + + @staticmethod + def send_events() -> threading.Thread: + """Call a thread to send the current list of Events via Telemetry.""" + send_thread = threading.Thread(target=EventTracker._send_events_in_thread) + send_thread.start() + return send_thread + + @staticmethod + def _send_events_in_thread(): + """Send the current list of Events via Telemetry.""" + from samcli.lib.telemetry.metric import Metric # pylint: disable=cyclic-import + + msa = {} + + with EventTracker._event_lock: + if not EventTracker._events: # Don't do anything if there are no events to send + return + + msa["events"] = [e.to_json() for e in EventTracker._events] + EventTracker._events = [] # Manual clear_trackers() since we're within the lock + + telemetry = Telemetry() + metric = Metric("events") + metric.add_data("sessionId", EventTracker._session_id) + metric.add_data("metricSpecificAttributes", msa) + telemetry.emit(metric) + + +def track_long_event(start_event_name: str, start_event_value: str, end_event_name: str, end_event_value: str): + """Decorator for tracking events that occur at start and end of a function. + + The decorator tracks two Events total, where the first Event occurs + at the start of the decorated function's execution (prior to its + first line) and the second Event occurs after the function has ended + (after the final line of the function has executed). + If this decorator is being placed in a function that also contains the + `track_command` decorator, ensure that this decorator is placed BELOW + `track_command`. Otherwise, the current list of Events will be sent + before the end_event will be added, resulting in an additional 'events' + metric with only that single Event. + + Parameters + ---------- + start_event_name: str + The name of the Event that is executed at the start of the + decorated function's execution. Must be a valid EventName + value or the decorator will not run. + start_event_value: str + The value of the Event that is executed at the start of the + decorated function's execution. Must be a valid EventType + value for the passed `start_event_name` or the decorator + will not run. + end_event_name: str + The name of the Event that is executed at the end of the + decorated function's execution. Must be a valid EventName + value or the decorator will not run. + end_event_value: str + The value of the Event that is executed at the end of the + decorated function's execution. Must be a valid EventType + value for the passed `end_event_name` or the decorator + will not run. + + Examples + -------- + >>> @track_long_event("FuncStart", "Func1", "FuncEnd", "Func1") + def func1(...): + # do things + + >>> @track_long_event("FuncStart", "Func2", "FuncEnd", "Func2") + def func2(...): + # do things + """ + should_track = True + try: + # Check that passed values are valid Events + Event(start_event_name, start_event_value) + Event(end_event_name, end_event_value) + except EventCreationError as e: + LOG.debug("Error occurred while trying to track an event: %s\nDecorator not run.", e) + should_track = False + + def decorator_for_events(func): + """The actual decorator""" + + def wrapped(*args, **kwargs): + # Track starting event + if should_track: + EventTracker.track_event(start_event_name, start_event_value) + exception = None + # Run the function + try: + return_value = func(*args, **kwargs) + except Exception as e: + exception = e + # Track ending event + if should_track: + EventTracker.track_event(end_event_name, end_event_value) + EventTracker.send_events() # Ensure Events are sent at the end of execution + if exception: + raise exception + + return return_value + + return wrapped + + return decorator_for_events + + +class EventCreationError(Exception): + """Exception called when an Event is not properly created.""" diff --git a/samcli/lib/telemetry/metric.py b/samcli/lib/telemetry/metric.py index f6336e2ddd..470882f41b 100644 --- a/samcli/lib/telemetry/metric.py +++ b/samcli/lib/telemetry/metric.py @@ -7,7 +7,8 @@ import uuid import platform import logging -from typing import Optional +import traceback +from typing import Optional, Tuple import click @@ -17,6 +18,8 @@ from samcli.lib.warnings.sam_cli_warning import TemplateWarningsChecker from samcli.commands.exceptions import UserException from samcli.lib.telemetry.cicd import CICDDetector, CICDPlatform +from samcli.lib.telemetry.event import EventTracker +from samcli.lib.telemetry.project_metadata import get_git_remote_origin_url, get_project_name, get_initial_commit_hash from samcli.commands._utils.experimental import get_all_experimental_statues from .telemetry import Telemetry from ..iac.cdk.utils import is_cdk_project @@ -115,6 +118,8 @@ def wrapped(*args, **kwargs): return_value = None exit_reason = "success" exit_code = 0 + stack_trace = None + exception_message = None duration_fn = _timer() try: @@ -131,12 +136,14 @@ def wrapped(*args, **kwargs): exit_reason = type(ex).__name__ else: exit_reason = ex.wrapped_from + stack_trace, exception_message = _get_stack_trace_info(ex) except Exception as ex: exception = ex # Standard Unix practice to return exit code 255 on fatal/unhandled exit. exit_code = 255 exit_reason = type(ex).__name__ + stack_trace, exception_message = _get_stack_trace_info(ex) try: ctx = Context.get_current_context() @@ -144,6 +151,8 @@ def wrapped(*args, **kwargs): try: template_dict = ctx.template_dict project_type = ProjectTypes.CDK.value if is_cdk_project(template_dict) else ProjectTypes.CFN.value + if project_type == ProjectTypes.CDK.value: + EventTracker.track_event("UsedFeature", "CDK") metric_specific_attributes["projectType"] = project_type except AttributeError: LOG.debug("Template is not provided in context, skip adding project type metric") @@ -153,12 +162,18 @@ def wrapped(*args, **kwargs): metric.add_data("debugFlagProvided", bool(ctx.debug)) metric.add_data("region", ctx.region or "") metric.add_data("commandName", ctx.command_path) # Full command path. ex: sam local start-api - if metric_specific_attributes: - metric.add_data("metricSpecificAttributes", metric_specific_attributes) + # Project metadata metrics + metric_specific_attributes["gitOrigin"] = get_git_remote_origin_url() + metric_specific_attributes["projectName"] = get_project_name() + metric_specific_attributes["initialCommit"] = get_initial_commit_hash() + metric.add_data("metricSpecificAttributes", metric_specific_attributes) # Metric about command's execution characteristics metric.add_data("duration", duration_fn()) metric.add_data("exitReason", exit_reason) metric.add_data("exitCode", exit_code) + metric.add_data("stackTrace", stack_trace) + metric.add_data("exceptionMessage", exception_message) + EventTracker.send_events() # Sends Event metrics to Telemetry before commandRun metrics telemetry.emit(metric) except RuntimeError: LOG.debug("Unable to find Click Context for getting session_id.") @@ -170,6 +185,62 @@ def wrapped(*args, **kwargs): return wrapped +def _get_stack_trace_info(exception: Exception) -> Tuple[str, str]: + """ + Takes an Exception instance and extracts the following: + 1. Stack trace in a readable string format with user-sensitive paths cleaned + 2. Exception mesage including the fully-qualified exception name and value + + Parameters + ---------- + exception : Exception + Exception instance + + Returns + ------- + (str, str) + (stack trace, exception message) + """ + tb_exception = traceback.TracebackException.from_exception(exception) + _clean_stack_summary_paths(tb_exception.stack) + stack_trace = "".join(list(tb_exception.format())) + exception_msg = list(tb_exception.format_exception_only())[-1] + + return (stack_trace, exception_msg) + + +def _clean_stack_summary_paths(stack_summary: traceback.StackSummary) -> None: + """ + Cleans the user-sensitive paths contained within a StackSummary instance + + Parameters + ---------- + stack_summary : traceback.StackSummary + StackSummary instance + """ + for frame in stack_summary: + path = frame.filename + separator = "\\" if "\\" in path else "/" + + # Case 1: If "site-packages" is found within path, replace its leading segment with: /../ or \..\ + # i.e. /python3.8/site-packages/boto3/test.py becomes /../site-packages/boto3/test.py + site_packages_idx = path.rfind("site-packages") + if site_packages_idx != -1: + frame.filename = f"{separator}..{separator}{path[site_packages_idx:]}" + continue + + # Case 2: If "samcli" is found within path, do the same replacement as previous + samcli_idx = path.rfind("samcli") + if samcli_idx != -1: + frame.filename = f"{separator}..{separator}{path[samcli_idx:]}" + continue + + # Case 3: Keep only the last file within the path, and do the same replacement as previous + path_split = path.split(separator) + if len(path_split) > 0: + frame.filename = f"{separator}..{separator}{path_split[-1]}" + + def _timer(): """ Timer to measure the elapsed time between two calls in milliseconds. When you first call this method, diff --git a/samcli/lib/telemetry/project_metadata.py b/samcli/lib/telemetry/project_metadata.py new file mode 100644 index 0000000000..c825db9022 --- /dev/null +++ b/samcli/lib/telemetry/project_metadata.py @@ -0,0 +1,111 @@ +""" +Creates and encrypts metadata regarding SAM CLI projects. +""" + +import hashlib +from os import getcwd +import re +import subprocess +from typing import List, Optional + +from samcli.cli.global_config import GlobalConfig + + +def get_git_remote_origin_url() -> Optional[str]: + """ + Retrieve an encrypted version of the project's git remote origin url, if it exists. + + Returns + ------- + str | None + A SHA256 hexdigest string of the git remote origin url, formatted such that the + encrypted value follows the pattern //.git. + If telemetry is opted out of by the user, or the `.git` folder is not found + (the directory is not a git repository), returns None + """ + if not bool(GlobalConfig().telemetry_enabled): + return None + + git_url = None + try: + runcmd = subprocess.run( + ["git", "config", "--get", "remote.origin.url"], capture_output=True, shell=True, check=True, text=True + ) + metadata = _parse_remote_origin_url(str(runcmd.stdout)) + git_url = "/".join(metadata) + ".git" # Format to //.git + except subprocess.CalledProcessError: + return None # Not a git repo + + return _encrypt_value(git_url) + + +def get_project_name() -> Optional[str]: + """ + Retrieve an encrypted version of the project's name, as defined by the .git folder (or directory name if no .git). + + Returns + ------- + str | None + A SHA256 hexdigest string of either the name of the project, or the name of the + current working directory that the command is running in. + If telemetry is opted out of by the user, returns None + """ + if not bool(GlobalConfig().telemetry_enabled): + return None + + project_name = "" + try: + runcmd = subprocess.run( + ["git", "config", "--get", "remote.origin.url"], capture_output=True, shell=True, check=True, text=True + ) + project_name = _parse_remote_origin_url(str(runcmd.stdout))[2] # dir is git repo, get project name from URL + except subprocess.CalledProcessError: + project_name = getcwd().replace("\\", "/") # dir is not a git repo, get directory name + + return _encrypt_value(project_name) + + +def get_initial_commit_hash() -> Optional[str]: + """ + Retrieve an encrypted version of the project's initial commit hash, if it exists. + + Returns + ------- + str | None + A SHA256 hexdigest string of the git project's initial commit hash. + If telemetry is opted out of by the user, or the `.git` folder is not found + (the directory is not a git repository), returns None. + """ + if not bool(GlobalConfig().telemetry_enabled): + return None + + metadata = None + try: + runcmd = subprocess.run( + ["git", "rev-list", "--max-parents=0", "HEAD"], capture_output=True, shell=True, check=True, text=True + ) + metadata = runcmd.stdout.strip() + except subprocess.CalledProcessError: + return None # Not a git repo + + return _encrypt_value(metadata) + + +def _parse_remote_origin_url(url: str) -> List[str]: + """ + Parse a `git remote origin url` into its hostname, owner, and project name. + + Returns + ------- + List[str] + A list of 3 strings, with indeces corresponding to 0:hostname, 1:owner, 2:project_name + """ + pattern = re.compile(r"(?:https?://|git@)(?P\S*)(?:/|:)(?P\S*)/(?P\S*)\.git") + return [str(item) for item in pattern.findall(url)[0]] + + +def _encrypt_value(value: str) -> str: + """Encrypt a string, and then return the encrypted value as a byte string.""" + h = hashlib.sha256() + h.update(value.encode("utf-8")) + return h.hexdigest() diff --git a/samcli/lib/utils/osutils.py b/samcli/lib/utils/osutils.py index 0813c98b58..d53dc9ffb5 100644 --- a/samcli/lib/utils/osutils.py +++ b/samcli/lib/utils/osutils.py @@ -186,3 +186,16 @@ def convert_to_unix_line_ending(file_path: str) -> None: content = content.replace(b"\r\n", b"\n") with open(file_path, "wb") as file: file.write(content) + + +def create_symlink_or_copy(source: str, destination: str) -> None: + """Tries to create symlink, if it fails it will copy source into destination""" + LOG.debug("Creating symlink; source: %s, destination: %s", source, destination) + try: + os.symlink(Path(source).absolute(), Path(destination).absolute()) + except OSError as ex: + LOG.warning( + "Symlink operation is failed, falling back to copying files", + exc_info=ex if LOG.isEnabledFor(logging.DEBUG) else None, + ) + copytree(source, destination) diff --git a/samcli/lib/utils/tar.py b/samcli/lib/utils/tar.py index 3f4ee3eb74..d080e10294 100644 --- a/samcli/lib/utils/tar.py +++ b/samcli/lib/utils/tar.py @@ -8,7 +8,7 @@ @contextmanager -def create_tarball(tar_paths, tar_filter=None): +def create_tarball(tar_paths, tar_filter=None, mode="w"): """ Context Manger that creates the tarball of the Docker Context to use for building the image @@ -17,6 +17,9 @@ def create_tarball(tar_paths, tar_filter=None): tar_paths dict(str, str) Key representing a full path to the file or directory and the Value representing the path within the tarball + mode str + The mode in which the tarfile is opened. Defaults to "w". + Yields ------ IO @@ -24,7 +27,7 @@ def create_tarball(tar_paths, tar_filter=None): """ tarballfile = TemporaryFile() - with tarfile.open(fileobj=tarballfile, mode="w") as archive: + with tarfile.open(fileobj=tarballfile, mode=mode) as archive: for path_on_system, path_in_tarball in tar_paths.items(): archive.add(path_on_system, arcname=path_in_tarball, filter=tar_filter) diff --git a/samcli/runtime_config.json b/samcli/runtime_config.json index 47f8cd1dcf..2e3c0ffda0 100644 --- a/samcli/runtime_config.json +++ b/samcli/runtime_config.json @@ -1,3 +1,3 @@ { - "app_template_repo_commit": "dca20ade1b290262b6457006b52abadeacdddaf8" + "app_template_repo_commit": "f7af69d483450d09f1bd5ea300d57e00032370c7" } diff --git a/tests/functional/commands/validate/lib/models/function_with_event_schedule_state.yaml b/tests/functional/commands/validate/lib/models/function_with_event_schedule_state.yaml new file mode 100644 index 0000000000..17a13f6549 --- /dev/null +++ b/tests/functional/commands/validate/lib/models/function_with_event_schedule_state.yaml @@ -0,0 +1,35 @@ +Transform: "AWS::Serverless-2016-10-31" +Parameters: + ScheduleState: + Type: String + Default: Disabled + +Resources: + ScheduledFunction: + Type: 'AWS::Serverless::Function' + Properties: + CodeUri: s3://sam-demo-bucket/hello.zip?versionId=3Tcgv52_0GaDvhDva4YciYeqRyPnpIcO + Handler: hello.handler + Runtime: python3.10 + Events: + Schedule1: + Type: Schedule + Properties: + Schedule: 'rate(1 minute)' + Name: test-schedule + Description: Test Schedule + State: "Enabled" + Schedule2: + Type: Schedule + Properties: + Schedule: 'rate(1 minute)' + Name: test-schedule + Description: Test Schedule + State: !Sub "Enabled" + Schedule3: + Type: Schedule + Properties: + Schedule: 'rate(1 minute)' + Name: test-schedule + Description: Test Schedule + State: !Ref ScheduleState \ No newline at end of file diff --git a/tests/functional/commands/validate/lib/models/function_with_msk.yaml b/tests/functional/commands/validate/lib/models/function_with_msk.yaml new file mode 100644 index 0000000000..6ac5e8ddc8 --- /dev/null +++ b/tests/functional/commands/validate/lib/models/function_with_msk.yaml @@ -0,0 +1,20 @@ +AWSTemplateFormatVersion: '2010-09-09' +Parameters: {} + +Resources: + MyMskStreamProcessor: + Type: AWS::Serverless::Function + Properties: + Runtime: nodejs12.x + Handler: index.handler + CodeUri: s3://sam-demo-bucket/kafka.zip + Events: + MyMskEvent: + Type: MSK + Properties: + StartingPosition: LATEST + Stream: !Sub arn:aws:kafka:${AWS::Region}:012345678901:cluster/mycluster/6cc0432b-8618-4f44-bccc-e1fbd8fb7c4d-2 + Topics: + - "MyDummyTestTopic" + ConsumerGroupId: consumergroup1 + diff --git a/tests/functional/commands/validate/lib/models/function_with_msk_with_intrinsics.yaml b/tests/functional/commands/validate/lib/models/function_with_msk_with_intrinsics.yaml new file mode 100644 index 0000000000..de380a72c2 --- /dev/null +++ b/tests/functional/commands/validate/lib/models/function_with_msk_with_intrinsics.yaml @@ -0,0 +1,35 @@ +AWSTemplateFormatVersion: '2010-09-09' +Parameters: + StartingPositionValue: + Type: String + Default: LATEST + + StreamValue: + Type: String + Default: arn:aws:kafka:us-east-1:012345678901:cluster/mycluster/6cc0432b-8618-4f44-bccc-e1fbd8fb7c4d-2 + + TopicsValue: + Type: CommaDelimitedList + Default: Topic + + ConsumerGroupValue: + Type: String + Default: consumergroup1 + + +Resources: + MyMskStreamProcessor: + Type: AWS::Serverless::Function + Properties: + Runtime: nodejs12.x + Handler: index.handler + CodeUri: s3://sam-demo-bucket/kafka.zip + Events: + MyMskEvent: + Type: MSK + Properties: + StartingPosition: !Ref StartingPositionValue + Stream: !Ref StreamValue + Topics: !Ref TopicsValue + ConsumerGroupId: !Ref ConsumerGroupValue + diff --git a/tests/functional/commands/validate/lib/models/function_with_self_managed_kafka.yaml b/tests/functional/commands/validate/lib/models/function_with_self_managed_kafka.yaml index a5ed1dfaf1..ef4b7d5d4a 100644 --- a/tests/functional/commands/validate/lib/models/function_with_self_managed_kafka.yaml +++ b/tests/functional/commands/validate/lib/models/function_with_self_managed_kafka.yaml @@ -23,4 +23,5 @@ Resources: URI: subnet:subnet-12345 - Type: VPC_SECURITY_GROUP URI: security_group:sg-67890 + ConsumerGroupId: consumergroup1 diff --git a/tests/functional/commands/validate/lib/models/self_managed_kafka_with_intrinsics.yaml b/tests/functional/commands/validate/lib/models/self_managed_kafka_with_intrinsics.yaml index 5a0f3b3806..8c13ff6ed5 100644 --- a/tests/functional/commands/validate/lib/models/self_managed_kafka_with_intrinsics.yaml +++ b/tests/functional/commands/validate/lib/models/self_managed_kafka_with_intrinsics.yaml @@ -14,8 +14,12 @@ Parameters: KafkaBootstrapServersValue: Type: CommaDelimitedList Default: abc.xyz.com:9092,123.45.67.89:9096 - - + + ConsumerGroupValue: + Type: String + Default: consumergroup1 + + Resources: KafkaFunction: Type: 'AWS::Serverless::Function' @@ -43,6 +47,8 @@ Resources: - Type: BASIC_AUTH URI: Ref: KafkaUserSecret + ConsumerGroupId: + Ref: ConsumerGroupValue KafkaUserSecret: Type: AWS::SecretsManager::Secret diff --git a/tests/integration/buildcmd/build_integ_base.py b/tests/integration/buildcmd/build_integ_base.py index ec50e2504d..54bd14e679 100644 --- a/tests/integration/buildcmd/build_integ_base.py +++ b/tests/integration/buildcmd/build_integ_base.py @@ -78,6 +78,7 @@ def get_command_list( build_image=None, exclude=None, region=None, + beta_features=False, ): command_list = [self.cmd, "build"] @@ -130,6 +131,9 @@ def get_command_list( if region: command_list += ["--region", region] + if beta_features: + command_list += ["--beta-features"] + return command_list def verify_docker_container_cleanedup(self, runtime): @@ -310,6 +314,8 @@ def _verify_built_artifact(self, build_dir, function_logical_id, expected_files, class BuildIntegEsbuildBase(BuildIntegBase): FUNCTION_LOGICAL_ID = "Function" + # Everything should be minifed to one line and a second line for the sourcemap mapping + MAX_MINIFIED_LINE_COUNT = 2 def _test_with_default_package_json( self, runtime, use_container, code_uri, expected_files, handler, architecture=None @@ -317,8 +323,6 @@ def _test_with_default_package_json( overrides = self.get_override(runtime, code_uri, architecture, handler) cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) - cmdlist.append("--beta-features") - LOG.info("Running Command: {}".format(cmdlist)) run_command(cmdlist, cwd=self.working_dir) @@ -335,10 +339,43 @@ def _test_with_default_package_json( self.built_template, self.FUNCTION_LOGICAL_ID, self._make_parameter_override_arg(overrides), expected ) + self._verify_esbuild_properties(self.default_build_dir, self.FUNCTION_LOGICAL_ID, handler) + if use_container: self.verify_docker_container_cleanedup(runtime) self.verify_pulled_image(runtime, architecture) + def _test_with_various_properties(self, overrides): + overrides = self.get_override(**overrides) + cmdlist = self.get_command_list(parameter_overrides=overrides) + + LOG.info("Running Command: {}".format(cmdlist)) + run_command(cmdlist, cwd=self.working_dir) + + expected = {"body": '{"message":"hello world!"}', "statusCode": 200} + if not SKIP_DOCKER_TESTS and overrides["Architectures"] == X86_64: + # ARM64 is not supported yet for invoking + self._verify_invoke_built_function( + self.built_template, self.FUNCTION_LOGICAL_ID, self._make_parameter_override_arg(overrides), expected + ) + + self._verify_esbuild_properties(self.default_build_dir, self.FUNCTION_LOGICAL_ID, overrides["Handler"]) + + def _verify_esbuild_properties(self, build_dir, function_logical_id, handler): + filename = handler.split(".")[0] + resource_artifact_dir = build_dir.joinpath(function_logical_id) + self._verify_sourcemap_created(filename, resource_artifact_dir) + self._verify_function_minified(filename, resource_artifact_dir) + + def _verify_function_minified(self, filename, resource_artifact_dir): + with open(Path(resource_artifact_dir, f"{filename}.js"), "r") as handler_file: + x = len(handler_file.readlines()) + self.assertLessEqual(x, self.MAX_MINIFIED_LINE_COUNT) + + def _verify_sourcemap_created(self, filename, resource_artifact_dir): + all_artifacts = set(os.listdir(str(resource_artifact_dir))) + self.assertIn(f"{filename}.js.map", all_artifacts) + def _verify_built_artifact(self, build_dir, function_logical_id, expected_files): self.assertTrue(build_dir.exists(), "Build directory should be created") @@ -489,7 +526,6 @@ def _verify_built_artifact(self, build_dir, function_logical_id, expected_files) class BuildIntegJavaBase(BuildIntegBase): - FUNCTION_LOGICAL_ID = "Function" def _test_with_building_java( @@ -771,7 +807,22 @@ def _verify_build_artifact(self, build_dir, function_logical_id): build_dir_files = os.listdir(str(build_dir)) self.assertIn("template.yaml", build_dir_files) - self.assertIn(function_logical_id, build_dir_files) + + # confirm function logical id is in the built template + template_dict = {} + with open(Path(build_dir).joinpath("template.yaml"), "r") as template_file: + template_dict = yaml_parse(template_file.read()) + self.assertIn(function_logical_id, template_dict.get("Resources", {}).keys()) + + # confirm build folder for the function exist in the build directory + built_folder = ( + template_dict.get("Resources", {}).get(function_logical_id, {}).get("Properties", {}).get("CodeUri") + ) + if not built_folder: + built_folder = ( + template_dict.get("Resources", {}).get(function_logical_id, {}).get("Properties", {}).get("ContentUri") + ) + self.assertIn(built_folder, build_dir_files) def _verify_process_code_and_output(self, command_result): self.assertEqual(command_result.process.returncode, 0) diff --git a/tests/integration/buildcmd/test_build_cmd.py b/tests/integration/buildcmd/test_build_cmd.py index 3ce39b01e7..b458c2ddcc 100644 --- a/tests/integration/buildcmd/test_build_cmd.py +++ b/tests/integration/buildcmd/test_build_cmd.py @@ -439,6 +439,29 @@ def test_building_default_package_json( self._test_with_default_package_json(runtime, use_container, code_uri, expected_files, handler, architecture) +@skipIf( + ((IS_WINDOWS and RUNNING_ON_CI) and not CI_OVERRIDE), + "Skip build tests on windows when running in CI unless overridden", +) +@parameterized_class( + ("template",), + [ + ("esbuild_templates/template_with_metadata_node_options.yaml",), + ("esbuild_templates/template_with_metadata_global_node_options.yaml",), + ], +) +class TestBuildCommand_EsbuildFunctionProperties(BuildIntegEsbuildBase): + @pytest.mark.flaky(reruns=3) + def test_environment_generates_sourcemap(self): + overrides = { + "runtime": "nodejs16.x", + "code_uri": "../Esbuild/TypeScript", + "handler": "app.lambdaHandler", + "architecture": "x86_64", + } + self._test_with_various_properties(overrides) + + class TestBuildCommand_NodeFunctions_With_Specified_Architecture(BuildIntegNodeBase): template = "template_with_architecture.yaml" @@ -1469,8 +1492,11 @@ def _get_python_version(self): ((IS_WINDOWS and RUNNING_ON_CI) and not CI_OVERRIDE), "Skip build tests on windows when running in CI unless overridden", ) +# remove following parameterized_class when BuildImprovements22 experimental flag is removed +@parameterized_class(("beta_features",), [(True,), (False,)]) class TestBuildWithDedupBuilds(DedupBuildIntegBase): template = "dedup-functions-template.yaml" + beta_features = False # parameterized @parameterized.expand( [ @@ -1514,7 +1540,9 @@ def test_dedup_build(self, use_container, code_uri, function1_handler, function2 "Function2Handler": function2_handler, "FunctionRuntime": runtime, } - cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) + cmdlist = self.get_command_list( + use_container=use_container, parameter_overrides=overrides, beta_features=self.beta_features + ) LOG.info("Running Command: {}".format(cmdlist)) # Built using `native` python-pip builder for a python project. @@ -1569,15 +1597,18 @@ def test_dedup_build(self, use_container): ((IS_WINDOWS and RUNNING_ON_CI) and not CI_OVERRIDE), "Skip build tests on windows when running in CI unless overridden", ) +# remove following parameterized_class when BuildImprovements22 experimental flag is removed +@parameterized_class(("beta_features",), [(True,), (False,)]) class TestBuildWithDedupBuildsMakefile(DedupBuildIntegBase): template = "dedup-functions-makefile-template.yaml" + beta_features = False # parameterized @pytest.mark.flaky(reruns=3) def test_dedup_build_makefile(self): """ Build template above in the container and verify that each function call returns as expected """ - cmdlist = self.get_command_list() + cmdlist = self.get_command_list(beta_features=self.beta_features) LOG.info("Running Command: {}".format(cmdlist)) # Built using `native` python-pip builder for a python project. @@ -1599,8 +1630,11 @@ def _verify_process_code_and_output(self, command_result): ((IS_WINDOWS and RUNNING_ON_CI) and not CI_OVERRIDE), "Skip build tests on windows when running in CI unless overridden", ) +# remove following parameterized_class when BuildImprovements22 experimental flag is removed +@parameterized_class(("beta_features",), [(True,), (False,)]) class TestBuildWithCacheBuilds(CachedBuildIntegBase): template = "dedup-functions-template.yaml" + beta_features = False # parameterized @parameterized.expand( [ @@ -1644,7 +1678,9 @@ def test_cache_build(self, use_container, code_uri, function1_handler, function2 "Function2Handler": function2_handler, "FunctionRuntime": runtime, } - cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides, cached=True) + cmdlist = self.get_command_list( + use_container=use_container, parameter_overrides=overrides, cached=True, beta_features=self.beta_features + ) LOG.info("Running Command: %s", cmdlist) # Built using `native` python-pip builder for a python project. @@ -1777,8 +1813,11 @@ def test_repeated_cached_build_hits_cache(self, use_container): ((IS_WINDOWS and RUNNING_ON_CI) and not CI_OVERRIDE), "Skip build tests on windows when running in CI unless overridden", ) +# remove following parameterized_class when BuildImprovements22 experimental flag is removed +@parameterized_class(("beta_features",), [(True,), (False,)]) class TestParallelBuilds(DedupBuildIntegBase): template = "dedup-functions-template.yaml" + beta_features = False # parameterized @parameterized.expand( [ @@ -1822,7 +1861,9 @@ def test_dedup_build(self, use_container, code_uri, function1_handler, function2 "Function2Handler": function2_handler, "FunctionRuntime": runtime, } - cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides, parallel=True) + cmdlist = self.get_command_list( + use_container=use_container, parameter_overrides=overrides, parallel=True, beta_features=self.beta_features + ) LOG.info("Running Command: %s", cmdlist) # Built using `native` python-pip builder for a python project. @@ -1840,8 +1881,11 @@ def test_dedup_build(self, use_container, code_uri, function1_handler, function2 ((IS_WINDOWS and RUNNING_ON_CI) and not CI_OVERRIDE), "Skip build tests on windows when running in CI unless overridden", ) +# remove following parameterized_class when BuildImprovements22 experimental flag is removed +@parameterized_class(("beta_features",), [(True,), (False,)]) class TestParallelBuildsJavaWithLayers(DedupBuildIntegBase): template = "template-java-maven-with-layers.yaml" + beta_features = False # parameterized @pytest.mark.flaky(reruns=3) def test_dedup_build(self): @@ -1849,7 +1893,7 @@ def test_dedup_build(self): Build template above and verify that each function call returns as expected """ - cmdlist = self.get_command_list(parallel=True) + cmdlist = self.get_command_list(parallel=True, beta_features=self.beta_features) command_result = run_command(cmdlist, cwd=self.working_dir) self.assertEqual(command_result.process.returncode, 0) diff --git a/tests/integration/deploy/deploy_integ_base.py b/tests/integration/deploy/deploy_integ_base.py index cbce4674ea..c9ec13133e 100644 --- a/tests/integration/deploy/deploy_integ_base.py +++ b/tests/integration/deploy/deploy_integ_base.py @@ -104,9 +104,11 @@ def get_deploy_command_list( signing_profiles=None, resolve_image_repos=False, disable_rollback=False, + on_failure=None, ): command_list = [get_sam_command(), "deploy"] + # Cast all string parameters to preserve behaviour across platforms if guided: command_list = command_list + ["--guided"] if s3_bucket: @@ -165,6 +167,8 @@ def get_deploy_command_list( command_list = command_list + ["--resolve-image-repos"] if disable_rollback: command_list = command_list + ["--disable-rollback"] + if on_failure: + command_list = command_list + ["--on-failure", str(on_failure)] return command_list diff --git a/tests/integration/deploy/test_deploy_command.py b/tests/integration/deploy/test_deploy_command.py index e196bf815e..e35a1baa4f 100644 --- a/tests/integration/deploy/test_deploy_command.py +++ b/tests/integration/deploy/test_deploy_command.py @@ -3,12 +3,12 @@ import tempfile import time import uuid -import json from pathlib import Path from unittest import skipIf import boto3 +import botocore from botocore.exceptions import ClientError import docker from botocore.config import Config @@ -20,7 +20,6 @@ from tests.integration.package.package_integ_base import PackageIntegBase from tests.testing_utils import RUNNING_ON_CI, RUNNING_TEST_FOR_MASTER_ON_CI, RUN_BY_CANARY from tests.testing_utils import run_command, run_command_with_input -from samcli.lib.bootstrap.companion_stack.data_types import CompanionStack # Deploy tests require credentials and CI/CD will only add credentials to the env if the PR is from the same repo. # This is to restrict package tests to run outside of CI/CD, when the branch is not master or tests are not run by Canary @@ -1286,6 +1285,251 @@ def test_deploy_logs_warning_with_cdk_project(self, template_file): self.assertIn(warning_message, deploy_process_execute.stdout) self.assertEqual(deploy_process_execute.process.returncode, 0) + @parameterized.expand(["aws-dynamodb-error.yaml"]) + def test_deploy_on_failure_do_nothing_new_invalid_stack(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = self._method_to_stack_name(self.id()) + self.stacks.append({"name": stack_name}) + + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix=self.s3_prefix, + s3_bucket=self.s3_bucket.name, + image_repository=self.ecr_repo_name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="ShardCountParameter=1", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes foo_bar=baz", + confirm_changeset=False, + on_failure="DO_NOTHING", + ) + + deploy_process_execute = run_command(deploy_command_list) + self.assertEqual(deploy_process_execute.process.returncode, 1) + + stderr = deploy_process_execute.stderr.strip() + self.assertIn( + bytes( + f"Error: Failed to create/update the stack: {stack_name}, Waiter StackCreateComplete failed: " + f'Waiter encountered a terminal failure state: For expression "Stacks[].StackStatus" ' + f'we matched expected path: "CREATE_FAILED" at least once', + encoding="utf-8", + ), + stderr, + ) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_on_failure_do_nothing_existing_stack(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = self._method_to_stack_name(self.id()) + self.stacks.append({"name": stack_name}) + + # First deploy a simple template that should work + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix=self.s3_prefix, + s3_bucket=self.s3_bucket.name, + image_repository=self.ecr_repo_name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes foo_bar=baz", + confirm_changeset=False, + ) + + deploy_process_execute = run_command(deploy_command_list) + self.assertEqual(deploy_process_execute.process.returncode, 0) + + # Failing template + template_path = self.test_data_path.joinpath("aws-dynamodb-error.yaml") + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix=self.s3_prefix, + s3_bucket=self.s3_bucket.name, + image_repository=self.ecr_repo_name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="ShardCountParameter=1", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes foo_bar=baz", + confirm_changeset=False, + on_failure="DO_NOTHING", + ) + + deploy_process_execute = run_command(deploy_command_list) + self.assertEqual(deploy_process_execute.process.returncode, 1) + + stderr = deploy_process_execute.stderr.strip() + self.assertIn( + bytes( + f"Error: Failed to create/update the stack: {stack_name}, Waiter StackUpdateComplete failed: " + f'Waiter encountered a terminal failure state: For expression "Stacks[].StackStatus" ' + f'we matched expected path: "UPDATE_FAILED" at least once', + encoding="utf-8", + ), + stderr, + ) + + @parameterized.expand(["aws-dynamodb-error.yaml"]) + def test_deploy_on_failure_delete_new_stack(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = self._method_to_stack_name(self.id()) + self.stacks.append({"name": stack_name}) + + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix=self.s3_prefix, + s3_bucket=self.s3_bucket.name, + image_repository=self.ecr_repo_name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="ShardCountParameter=1", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes foo_bar=baz", + confirm_changeset=False, + on_failure="DELETE", + ) + + deploy_process_execute = run_command(deploy_command_list) + self.assertEqual(deploy_process_execute.process.returncode, 0) + + # Check if the stack is deleted from CloudFormation + stack_exists = True + try: + self.cfn_client.describe_stacks(StackName=stack_name) + except botocore.exceptions.ClientError: + stack_exists = False + + self.assertFalse(stack_exists) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_on_failure_delete_existing_stack(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = self._method_to_stack_name(self.id()) + self.stacks.append({"name": stack_name}) + + # First deploy a simple template that should work + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix=self.s3_prefix, + s3_bucket=self.s3_bucket.name, + image_repository=self.ecr_repo_name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes foo_bar=baz", + confirm_changeset=False, + ) + + deploy_process_execute = run_command(deploy_command_list) + self.assertEqual(deploy_process_execute.process.returncode, 0) + + # Failing template + template_path = self.test_data_path.joinpath("aws-dynamodb-error.yaml") + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix=self.s3_prefix, + s3_bucket=self.s3_bucket.name, + image_repository=self.ecr_repo_name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="ShardCountParameter=1", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes foo_bar=baz", + confirm_changeset=False, + on_failure="DELETE", + ) + + deploy_process_execute = run_command(deploy_command_list) + self.assertEqual(deploy_process_execute.process.returncode, 0) + + # Check if the stack rolled back successfully + result = self.cfn_client.describe_stacks(StackName=stack_name) + self.assertEqual(str(result["Stacks"][0]["StackStatus"]), "UPDATE_ROLLBACK_COMPLETE") + + @parameterized.expand(["aws-dynamodb-error.yaml"]) + def test_deploy_on_failure_delete_existing_stack_fails(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = self._method_to_stack_name(self.id()) + self.stacks.append({"name": stack_name}) + + # Deploy bad stack with no rollback + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix=self.s3_prefix, + s3_bucket=self.s3_bucket.name, + image_repository=self.ecr_repo_name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes foo_bar=baz", + confirm_changeset=False, + disable_rollback=True, + ) + + deploy_process_execute = run_command(deploy_command_list) + + # Failing template + template_path = self.test_data_path.joinpath(template_file) + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix=self.s3_prefix, + s3_bucket=self.s3_bucket.name, + image_repository=self.ecr_repo_name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="ShardCountParameter=1", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes foo_bar=baz", + confirm_changeset=False, + on_failure="DELETE", + ) + + deploy_process_execute = run_command(deploy_command_list) + self.assertEqual(deploy_process_execute.process.returncode, 0) + + # Check if the stack is deleted from CloudFormation + stack_exists = True + try: + self.cfn_client.describe_stacks(StackName=stack_name) + except botocore.exceptions.ClientError: + stack_exists = False + + self.assertFalse(stack_exists) + @parameterized.expand(["aws-serverless-function.yaml"]) def test_update_stack_correct_stack_outputs(self, template): template_path = self.test_data_path.joinpath(template) diff --git a/tests/integration/local/invoke/test_integration_cli_images.py b/tests/integration/local/invoke/test_integration_cli_images.py index d87e981ffd..8453f2a040 100644 --- a/tests/integration/local/invoke/test_integration_cli_images.py +++ b/tests/integration/local/invoke/test_integration_cli_images.py @@ -188,6 +188,22 @@ def test_invoke_with_env_vars_with_functionname_defined(self, function_name): process_stdout = stdout.strip() self.assertEqual(process_stdout.decode("utf-8"), '"MyVar"') + @parameterized.expand([("EchoGlobalCustomEnvVarFunction")]) + @pytest.mark.flaky(reruns=3) + def test_invoke_with_global_env_vars_function(self, function_name): + command_list = self.get_command_list( + function_name, template_path=self.template_path, event_path=self.event_path, env_var_path=self.env_var_path + ) + + process = Popen(command_list, stdout=PIPE) + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() + self.assertEqual(process_stdout.decode("utf-8"), '"GlobalVar"') + @pytest.mark.flaky(reruns=3) def test_invoke_when_function_writes_stdout(self): command_list = self.get_command_list( diff --git a/tests/integration/local/invoke/test_integrations_cli.py b/tests/integration/local/invoke/test_integrations_cli.py index a8d3cf022e..54849668f7 100644 --- a/tests/integration/local/invoke/test_integrations_cli.py +++ b/tests/integration/local/invoke/test_integrations_cli.py @@ -201,6 +201,22 @@ def test_invoke_with_env_vars_with_functionname_defined(self, function_name): process_stdout = stdout.strip() self.assertEqual(process_stdout.decode("utf-8"), '"MyVar"') + @parameterized.expand([("EchoGlobalCustomEnvVarFunction")]) + @pytest.mark.flaky(reruns=3) + def test_invoke_with_global_env_vars_function(self, function_name): + command_list = self.get_command_list( + function_name, template_path=self.template_path, event_path=self.event_path, env_var_path=self.env_var_path + ) + + process = Popen(command_list, stdout=PIPE) + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() + self.assertEqual(process_stdout.decode("utf-8"), '"GlobalVar"') + @pytest.mark.flaky(reruns=3) def test_invoke_with_invoke_image_provided(self): command_list = self.get_command_list( diff --git a/tests/integration/pipeline/test_init_command.py b/tests/integration/pipeline/test_init_command.py index 54f7635757..542fd84b33 100644 --- a/tests/integration/pipeline/test_init_command.py +++ b/tests/integration/pipeline/test_init_command.py @@ -106,7 +106,26 @@ def test_failed_when_generated_file_already_exist_not_override(self): # also check the Jenkinsfile is not overridden self.assertEqual("", open("Jenkinsfile", "r").read()) - def test_custom_template(self): + def test_custom_template_with_manifest(self): + generated_file = Path("weather") + self.generated_files.append(generated_file) + + custom_template_path = Path(__file__).parent.parent.joinpath( + Path("testdata", "pipeline", "custom_template_with_manifest") + ) + inputs = ["2", str(custom_template_path), "2", "", "Rainy"] # custom template + + init_command_list = self.get_init_command_list() + init_process_execute = run_command_with_inputs(init_command_list, inputs) + + self.assertEqual(init_process_execute.process.returncode, 0) + + self.assertTrue(generated_file.exists()) + + with open(generated_file, "r") as f: + self.assertEqual("Rainy\n", f.read()) + + def test_custom_template_without_manifest(self): generated_file = Path("weather") self.generated_files.append(generated_file) @@ -139,7 +158,7 @@ def test_with_pipelineconfig_has_all_stage_values(self, with_bootstrap): [default.pipeline_bootstrap] [default.pipeline_bootstrap.parameters] pipeline_user = "arn:aws:iam::123:user/aws-sam-cli-managed-test-pipeline-res-PipelineUser-123" - + [test] [test.pipeline_bootstrap] [test.pipeline_bootstrap.parameters] @@ -148,7 +167,7 @@ def test_with_pipelineconfig_has_all_stage_values(self, with_bootstrap): artifacts_bucket = "test-bucket" image_repository = "test-ecr" region = "us-east-2" - + [prod] [prod.pipeline_bootstrap] [prod.pipeline_bootstrap.parameters] diff --git a/tests/integration/sync/test_sync_adl.py b/tests/integration/sync/test_sync_adl.py index 39cc6517a6..5910fd5d70 100644 --- a/tests/integration/sync/test_sync_adl.py +++ b/tests/integration/sync/test_sync_adl.py @@ -2,7 +2,9 @@ import os.path from unittest import skipIf -from samcli.lib.utils.resources import AWS_LAMBDA_FUNCTION +from samcli.commands._utils.experimental import set_experimental, ExperimentalFlag +from samcli.lib.utils.resources import AWS_LAMBDA_FUNCTION, AWS_LAMBDA_LAYERVERSION +from tests.integration.sync.sync_integ_base import SyncIntegBase from tests.integration.sync.test_sync_code import TestSyncCodeBase, SKIP_SYNC_TESTS, TestSyncCode from tests.integration.sync.test_sync_watch import TestSyncWatchBase from tests.testing_utils import run_command_with_input, read_until_string, IS_WINDOWS @@ -166,3 +168,40 @@ def _verify_lambda_response(_lambda_response): self.assertIn("extra_message", lambda_response) self._confirm_lambda_response(self._get_lambda_response(lambda_functions[0]), _verify_lambda_response) + + +@skipIf(SKIP_SYNC_TESTS or IS_WINDOWS, "Skip sync tests in CI/CD only") +class TestDisableAdlForEsbuildFunctions(SyncIntegBase): + template_file = "code/before/template-esbuild.yaml" + dependency_layer = True + + def test_sync_esbuild(self): + template_path = str(self.test_data_path.joinpath(self.template_file)) + stack_name = self._method_to_stack_name(self.id()) + self.stacks.append({"name": stack_name}) + + sync_command_list = self.get_sync_command_list( + template_file=template_path, + code=False, + watch=False, + dependency_layer=self.dependency_layer, + stack_name=stack_name, + parameter_overrides="Parameter=Clarity", + image_repository=self.ecr_repo_name, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key, + capabilities_list=["CAPABILITY_IAM", "CAPABILITY_AUTO_EXPAND"], + tags="integ=true clarity=yes foo_bar=baz", + ) + sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode()) + self.assertEqual(sync_process_execute.process.returncode, 0) + self.assertIn("Sync infra completed.", str(sync_process_execute.stderr)) + + self.stack_resources = self._get_stacks(stack_name) + lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION) + for lambda_function in lambda_functions: + lambda_response = json.loads(self._get_lambda_response(lambda_function)) + self.assertEqual(lambda_response.get("message"), "hello world") + + layers = self.stack_resources.get(AWS_LAMBDA_LAYERVERSION) + self.assertIsNone(layers) diff --git a/tests/integration/sync/test_sync_code.py b/tests/integration/sync/test_sync_code.py index e9d237380f..243430c91d 100644 --- a/tests/integration/sync/test_sync_code.py +++ b/tests/integration/sync/test_sync_code.py @@ -618,3 +618,47 @@ def test_sync_code_nested_getattr_layer(self): lambda_response = json.loads(self._get_lambda_response(lambda_function)) self.assertIn("extra_message", lambda_response) self.assertEqual(lambda_response.get("message"), "9") + + +@skipIf(SKIP_SYNC_TESTS, "Skip sync tests in CI/CD only") +class TestSyncCodeEsbuildFunctionTemplate(TestSyncCodeBase): + template = "template-esbuild.yaml" + folder = "code" + dependency_layer = False + + def test_sync_code_esbuild_function(self): + shutil.rmtree(Path(TestSyncCodeBase.temp_dir).joinpath("esbuild_function"), ignore_errors=True) + shutil.copytree( + self.test_data_path.joinpath("code").joinpath("after").joinpath("esbuild_function"), + Path(TestSyncCodeBase.temp_dir).joinpath("esbuild_function"), + ) + + self.stack_resources = self._get_stacks(TestSyncCodeBase.stack_name) + + # Run code sync + sync_command_list = self.get_sync_command_list( + template_file=TestSyncCodeBase.template_path, + code=True, + watch=False, + resource_list=["AWS::Serverless::Function"], + dependency_layer=self.dependency_layer, + stack_name=TestSyncCodeBase.stack_name, + parameter_overrides="Parameter=Clarity", + image_repository=self.ecr_repo_name, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key, + tags="integ=true clarity=yes foo_bar=baz", + ) + sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode()) + + self.assertEqual(sync_process_execute.process.returncode, 0) + + # CFN Api call here to collect all the stack resources + self.stack_resources = self._get_stacks(TestSyncCodeBase.stack_name) + # Lambda Api call here, which tests both the function and the layer + lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION) + for lambda_function in lambda_functions: + if lambda_function == "HelloWorldFunction": + lambda_response = json.loads(self._get_lambda_response(lambda_function)) + self.assertIn("extra_message", lambda_response) + self.assertEqual(lambda_response.get("message"), "Hello world!") diff --git a/tests/integration/sync/test_sync_infra.py b/tests/integration/sync/test_sync_infra.py index a06e52db6d..9fb0f7dfc4 100644 --- a/tests/integration/sync/test_sync_infra.py +++ b/tests/integration/sync/test_sync_infra.py @@ -403,3 +403,37 @@ def _run_sync_and_validate_lambda_call(self, dependency_layer: bool, template_pa self.assertIn("sum", lambda_response) self.assertEqual(lambda_response.get("message"), "hello world") self.assertEqual(lambda_response.get("sum"), 12) + + +@skipIf(SKIP_SYNC_TESTS, "Skip sync tests in CI/CD only") +class TestSyncInfraWithEsbuild(SyncIntegBase): + dependency_layer = False + + @parameterized.expand(["code/before/template-esbuild.yaml"]) + def test_sync_infra_esbuild(self, template_file): + template_path = str(self.test_data_path.joinpath(template_file)) + stack_name = self._method_to_stack_name(self.id()) + self.stacks.append({"name": stack_name}) + + sync_command_list = self.get_sync_command_list( + template_file=template_path, + code=False, + watch=False, + dependency_layer=self.dependency_layer, + stack_name=stack_name, + parameter_overrides="Parameter=Clarity", + image_repository=self.ecr_repo_name, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key, + capabilities_list=["CAPABILITY_IAM", "CAPABILITY_AUTO_EXPAND"], + tags="integ=true clarity=yes foo_bar=baz", + ) + sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode()) + self.assertEqual(sync_process_execute.process.returncode, 0) + self.assertIn("Sync infra completed.", str(sync_process_execute.stderr)) + + self.stack_resources = self._get_stacks(stack_name) + lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION) + for lambda_function in lambda_functions: + lambda_response = json.loads(self._get_lambda_response(lambda_function)) + self.assertEqual(lambda_response.get("message"), "hello world") diff --git a/tests/integration/sync/test_sync_watch.py b/tests/integration/sync/test_sync_watch.py index 423bc580f5..6fa89b5133 100644 --- a/tests/integration/sync/test_sync_watch.py +++ b/tests/integration/sync/test_sync_watch.py @@ -146,6 +146,47 @@ def _verify_infra_changes(self, resources): self.assertEqual(self._get_sfn_response(state_machine), '"World 2"') +@skipIf(SKIP_SYNC_TESTS, "Skip sync tests in CI/CD only") +class TestSyncWatchEsbuildBase(TestSyncWatchBase): + @classmethod + def setUpClass(cls): + PackageIntegBase.setUpClass() + cls.test_data_path = Path(__file__).resolve().parents[1].joinpath("testdata", "sync") + + def setUp(self): + super().setUp() + + def _setup_verify_infra(self): + template_path = self.test_dir.joinpath(self.template_before) + self.stacks.append({"name": self.stack_name}) + + # Start watch + sync_command_list = self.get_sync_command_list( + template_file=str(template_path), + code=False, + watch=True, + dependency_layer=self.dependency_layer, + stack_name=self.stack_name, + parameter_overrides="Parameter=Clarity", + image_repository=self.ecr_repo_name, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key, + tags="integ=true clarity=yes foo_bar=baz", + ) + self.watch_process = start_persistent_process(sync_command_list, cwd=self.test_dir) + + read_until_string(self.watch_process, "Enter Y to proceed with the command, or enter N to cancel:\n") + self.watch_process.stdin.write("y\n") + + read_until_string(self.watch_process, "\x1b[32mInfra sync completed.\x1b[0m\n", timeout=600) + + self.stack_resources = self._get_stacks(self.stack_name) + lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION) + for lambda_function in lambda_functions: + lambda_response = json.loads(self._get_lambda_response(lambda_function)) + self.assertEqual(lambda_response.get("message"), "hello world") + + @parameterized_class( [{"runtime": "python", "dependency_layer": True}, {"runtime": "python", "dependency_layer": False}] ) @@ -345,3 +386,31 @@ def test_sync_watch_code_nested_stack(self): state_machine = self.stack_resources.get(AWS_STEPFUNCTIONS_STATEMACHINE)[0] time.sleep(SFN_SLEEP) self.assertEqual(self._get_sfn_response(state_machine), '"World 2"') + + +class TestSyncWatchCodeEsbuild(TestSyncWatchEsbuildBase): + dependency_layer = False + template_before = str(Path("code", "before", "template-esbuild.yaml")) + + def test_sync_watch_code(self): + self.stack_resources = self._get_stacks(self.stack_name) + + # Test Lambda Function + lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION) + for lambda_function in lambda_functions: + lambda_response = json.loads(self._get_lambda_response(lambda_function)) + self.assertNotIn("extra_message", lambda_response) + self.assertEqual(lambda_response.get("message"), "hello world") + + self.update_file( + self.test_dir.joinpath("code", "after", "esbuild_function", "app.ts"), + self.test_dir.joinpath("code", "before", "esbuild_function", "app.ts"), + ) + read_until_string( + self.watch_process, "\x1b[32mFinished syncing Lambda Function HelloWorldFunction.\x1b[0m\n", timeout=30 + ) + lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION) + for lambda_function in lambda_functions: + lambda_response = json.loads(self._get_lambda_response(lambda_function)) + self.assertIn("extra_message", lambda_response) + self.assertEqual(lambda_response.get("message"), "Hello world!") diff --git a/tests/integration/telemetry/test_experimental_metric.py b/tests/integration/telemetry/test_experimental_metric.py index d9a829054d..61e3f4147d 100644 --- a/tests/integration/telemetry/test_experimental_metric.py +++ b/tests/integration/telemetry/test_experimental_metric.py @@ -60,10 +60,15 @@ def test_must_send_experimental_metrics_if_experimental_command(self): "metricSpecificAttributes": { "experimentalAll": False, "experimentalEsbuild": False, + "gitOrigin": ANY, + "projectName": ANY, + "initialCommit": ANY, }, "duration": ANY, "exitReason": ANY, "exitCode": ANY, + "stackTrace": ANY, + "exceptionMessage": ANY, } } ] @@ -114,10 +119,15 @@ def test_must_send_experimental_metrics_if_experimental_option(self): "metricSpecificAttributes": { "experimentalAll": True, "experimentalEsbuild": True, + "gitOrigin": ANY, + "projectName": ANY, + "initialCommit": ANY, }, "duration": ANY, "exitReason": ANY, "exitCode": ANY, + "stackTrace": ANY, + "exceptionMessage": ANY, } } ] @@ -151,8 +161,11 @@ def test_must_send_cdk_project_type_metrics(self): process.communicate() all_requests = server.get_all_requests() - self.assertEqual(1, len(all_requests), "Command run metric must be sent") + self.assertGreaterEqual(len(all_requests), 1, "Command run metric must be sent") request = all_requests[0] + for req in all_requests: + if "commandRun" in req["data"]["metrics"][0]: + request = req # We're only testing the commandRun metric self.assertIn("Content-Type", request["headers"]) self.assertEqual(request["headers"]["Content-Type"], "application/json") @@ -171,10 +184,17 @@ def test_must_send_cdk_project_type_metrics(self): "debugFlagProvided": ANY, "region": ANY, "commandName": ANY, - "metricSpecificAttributes": {"projectType": "CDK"}, + "metricSpecificAttributes": { + "projectType": "CDK", + "gitOrigin": ANY, + "projectName": ANY, + "initialCommit": ANY, + }, "duration": ANY, "exitReason": ANY, "exitCode": ANY, + "stackTrace": ANY, + "exceptionMessage": ANY, } } ] @@ -217,9 +237,12 @@ def test_must_send_not_experimental_metrics_if_not_experimental(self): "debugFlagProvided": ANY, "region": ANY, "commandName": ANY, + "metricSpecificAttributes": ANY, "duration": ANY, "exitReason": ANY, "exitCode": ANY, + "stackTrace": ANY, + "exceptionMessage": ANY, } } ] diff --git a/tests/integration/testdata/buildcmd/Esbuild/Node/package.json b/tests/integration/testdata/buildcmd/Esbuild/Node/package.json index 98b95e1da7..9cf3cb2227 100644 --- a/tests/integration/testdata/buildcmd/Esbuild/Node/package.json +++ b/tests/integration/testdata/buildcmd/Esbuild/Node/package.json @@ -6,9 +6,7 @@ "author": "", "license": "APACHE2.0", "dependencies": { - "minimal-request-promise": "*" - }, - "devDependencies": { + "minimal-request-promise": "*", "esbuild": "^0.14.14" } } \ No newline at end of file diff --git a/tests/integration/testdata/buildcmd/Esbuild/TypeScript/package.json b/tests/integration/testdata/buildcmd/Esbuild/TypeScript/package.json index b03e1b7258..5d85d71bd3 100644 --- a/tests/integration/testdata/buildcmd/Esbuild/TypeScript/package.json +++ b/tests/integration/testdata/buildcmd/Esbuild/TypeScript/package.json @@ -7,9 +7,7 @@ "license": "APACHE2.0", "dependencies": { "minimal-request-promise": "*", - "@types/aws-lambda": "^8.10.92" - }, - "devDependencies": { + "@types/aws-lambda": "^8.10.92", "esbuild": "^0.14.14" } } \ No newline at end of file diff --git a/tests/integration/testdata/buildcmd/esbuild_templates/template_with_metadata_global_node_options.yaml b/tests/integration/testdata/buildcmd/esbuild_templates/template_with_metadata_global_node_options.yaml new file mode 100644 index 0000000000..28d66f2c35 --- /dev/null +++ b/tests/integration/testdata/buildcmd/esbuild_templates/template_with_metadata_global_node_options.yaml @@ -0,0 +1,35 @@ +AWSTemplateFormatVersion : '2010-09-09' +Transform: AWS::Serverless-2016-10-31 + +Parameteres: + Runtime: + Type: String + CodeUri: + Type: String + Handler: + Type: String + Architectures: + Type: String + +Globals: + Function: + Environment: + Variables: + NODE_OPTIONS: --enable-source-maps + +Resources: + Function: + Type: AWS::Serverless::Function + Properties: + Handler: !Ref Handler + Runtime: !Ref Runtime + CodeUri: !Ref CodeUri + Timeout: 600 + Architectures: + - !Ref Architectures + Metadata: + BuildMethod: esbuild + BuildProperties: + Minify: true + Target: "es2020" + Sourcemap: true diff --git a/tests/integration/testdata/buildcmd/esbuild_templates/template_with_metadata_node_options.yaml b/tests/integration/testdata/buildcmd/esbuild_templates/template_with_metadata_node_options.yaml new file mode 100644 index 0000000000..d75cfad192 --- /dev/null +++ b/tests/integration/testdata/buildcmd/esbuild_templates/template_with_metadata_node_options.yaml @@ -0,0 +1,32 @@ +AWSTemplateFormatVersion : '2010-09-09' +Transform: AWS::Serverless-2016-10-31 + +Parameteres: + Runtime: + Type: String + CodeUri: + Type: String + Handler: + Type: String + Architectures: + Type: String + +Resources: + Function: + Type: AWS::Serverless::Function + Properties: + Handler: !Ref Handler + Runtime: !Ref Runtime + CodeUri: !Ref CodeUri + Timeout: 600 + Environment: + Variables: + NODE_OPTIONS: --enable-source-maps + Architectures: + - !Ref Architectures + Metadata: + BuildMethod: esbuild + BuildProperties: + Minify: true + Target: "es2020" + Sourcemap: true diff --git a/tests/integration/testdata/buildcmd/template_with_metadata.yaml b/tests/integration/testdata/buildcmd/template_with_metadata.yaml index 14e02fda90..99236aa8d5 100644 --- a/tests/integration/testdata/buildcmd/template_with_metadata.yaml +++ b/tests/integration/testdata/buildcmd/template_with_metadata.yaml @@ -25,6 +25,6 @@ Resources: Metadata: BuildMethod: esbuild BuildProperties: - Minify: false + Minify: true Target: "es2020" Sourcemap: true diff --git a/tests/integration/testdata/invoke/template.yml b/tests/integration/testdata/invoke/template.yml index 615206f4be..e616252c97 100644 --- a/tests/integration/testdata/invoke/template.yml +++ b/tests/integration/testdata/invoke/template.yml @@ -77,6 +77,17 @@ Resources: Variables: CustomEnvVar: "MyOtherVar" Timeout: 600 + + EchoGlobalCustomEnvVarFunction: + Type: AWS::Serverless::Function + Properties: + Handler: main.custom_env_var_echo_hanler + Runtime: python3.6 + CodeUri: . + Environment: + Variables: + CustomEnvVar: "MyOtherVar" + Timeout: 600 EchoCustomEnvVarWithFunctionNameDefinedFunction: Type: AWS::Serverless::Function diff --git a/tests/integration/testdata/invoke/template_image.yaml b/tests/integration/testdata/invoke/template_image.yaml index 7bc106c7cb..ce3b7d0719 100644 --- a/tests/integration/testdata/invoke/template_image.yaml +++ b/tests/integration/testdata/invoke/template_image.yaml @@ -86,6 +86,17 @@ Resources: Variables: CustomEnvVar: "MyOtherVar" Timeout: 600 + + EchoGlobalCustomEnvVarFunction: + Type: AWS::Serverless::Function + Properties: + Handler: main.custom_env_var_echo_hanler + Runtime: python3.6 + CodeUri: . + Environment: + Variables: + CustomEnvVar: "MyOtherVar" + Timeout: 600 EchoCustomEnvVarWithFunctionNameDefinedFunction: Type: AWS::Serverless::Function diff --git a/tests/integration/testdata/invoke/vars.json b/tests/integration/testdata/invoke/vars.json index 028f91196c..4a6dc57a93 100644 --- a/tests/integration/testdata/invoke/vars.json +++ b/tests/integration/testdata/invoke/vars.json @@ -2,6 +2,9 @@ "EchoCustomEnvVarFunction": { "CustomEnvVar": "MyVar" }, + "Parameters": { + "CustomEnvVar": "GlobalVar" + }, "EchoCustomEnvVarWithFunctionNameDefinedFunction": { "CustomEnvVar": "MyVar" } diff --git a/tests/integration/testdata/pipeline/custom_template_with_manifest/food/cookiecutter.json b/tests/integration/testdata/pipeline/custom_template_with_manifest/food/cookiecutter.json new file mode 100644 index 0000000000..c7bf08973d --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template_with_manifest/food/cookiecutter.json @@ -0,0 +1,4 @@ +{ + "outputDir": "aws-sam-pipeline", + "food": "" +} diff --git a/tests/integration/testdata/pipeline/custom_template_with_manifest/food/metadata.json b/tests/integration/testdata/pipeline/custom_template_with_manifest/food/metadata.json new file mode 100644 index 0000000000..689fe297f8 --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template_with_manifest/food/metadata.json @@ -0,0 +1,3 @@ +{ + "number_of_stages": 0 +} diff --git a/tests/integration/testdata/pipeline/custom_template_with_manifest/food/questions.json b/tests/integration/testdata/pipeline/custom_template_with_manifest/food/questions.json new file mode 100644 index 0000000000..65cdfa2c1c --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template_with_manifest/food/questions.json @@ -0,0 +1,7 @@ +{ + "questions": [{ + "key": "food", + "question": "What is your favorite food?", + "default": "Pizza" + }] +} diff --git a/tests/integration/testdata/pipeline/custom_template_with_manifest/food/{{cookiecutter.outputDir}}/food b/tests/integration/testdata/pipeline/custom_template_with_manifest/food/{{cookiecutter.outputDir}}/food new file mode 100644 index 0000000000..b32b65249f --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template_with_manifest/food/{{cookiecutter.outputDir}}/food @@ -0,0 +1 @@ +{{cookiecutter.food}} diff --git a/tests/integration/testdata/pipeline/custom_template_with_manifest/manifest.yaml b/tests/integration/testdata/pipeline/custom_template_with_manifest/manifest.yaml new file mode 100644 index 0000000000..bf1b75cead --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template_with_manifest/manifest.yaml @@ -0,0 +1,12 @@ +providers: + - displayName: Food + id: food + - displayName: Weather + id: weather +templates: + - displayName: Food pipeline + provider: food + location: food/ + - displayName: Weather pipeline + provider: weather + location: weather/ diff --git a/tests/integration/testdata/pipeline/custom_template_with_manifest/weather/cookiecutter.json b/tests/integration/testdata/pipeline/custom_template_with_manifest/weather/cookiecutter.json new file mode 100644 index 0000000000..d606a52b05 --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template_with_manifest/weather/cookiecutter.json @@ -0,0 +1,4 @@ +{ + "outputDir": "aws-sam-pipeline", + "weather": "" +} diff --git a/tests/integration/testdata/pipeline/custom_template_with_manifest/weather/metadata.json b/tests/integration/testdata/pipeline/custom_template_with_manifest/weather/metadata.json new file mode 100644 index 0000000000..689fe297f8 --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template_with_manifest/weather/metadata.json @@ -0,0 +1,3 @@ +{ + "number_of_stages": 0 +} diff --git a/tests/integration/testdata/pipeline/custom_template_with_manifest/weather/questions.json b/tests/integration/testdata/pipeline/custom_template_with_manifest/weather/questions.json new file mode 100644 index 0000000000..fd8264e9a6 --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template_with_manifest/weather/questions.json @@ -0,0 +1,7 @@ +{ + "questions": [{ + "key": "weather", + "question": "How is the weather today?", + "default": "Sunny" + }] +} diff --git a/tests/integration/testdata/pipeline/custom_template_with_manifest/weather/{{cookiecutter.outputDir}}/weather b/tests/integration/testdata/pipeline/custom_template_with_manifest/weather/{{cookiecutter.outputDir}}/weather new file mode 100644 index 0000000000..3501ffd0ae --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template_with_manifest/weather/{{cookiecutter.outputDir}}/weather @@ -0,0 +1 @@ +{{cookiecutter.weather}} diff --git a/tests/integration/testdata/sync/code/after/esbuild_function/app.ts b/tests/integration/testdata/sync/code/after/esbuild_function/app.ts new file mode 100644 index 0000000000..37f827eef4 --- /dev/null +++ b/tests/integration/testdata/sync/code/after/esbuild_function/app.ts @@ -0,0 +1,26 @@ +import axios from "axios"; + +export const lambdaHandler = async (): Promise => { + let response: object; + + try { + response = { + 'statusCode': 200, + 'body': JSON.stringify({ + message: 'Hello world!', + extra_message: "banana" + }) + } + } catch (err) { + console.log(err); + + response = { + 'statusCode': 500, + 'body': JSON.stringify({ + message: 'exception happened' + }) + } + } + + return response; +}; diff --git a/tests/integration/testdata/sync/code/after/esbuild_function/package.json b/tests/integration/testdata/sync/code/after/esbuild_function/package.json new file mode 100644 index 0000000000..dc4c142f42 --- /dev/null +++ b/tests/integration/testdata/sync/code/after/esbuild_function/package.json @@ -0,0 +1,13 @@ +{ + "name": "hello_world", + "version": "1.0.0", + "description": "hello world sample for NodeJS", + "main": "app.js", + "author": "SAM CLI", + "license": "MIT", + "dependencies": { + "axios": ">=0.21.1", + "@faker-js/faker": "7.1.0", + "esbuild": "^0.14.51" + } +} \ No newline at end of file diff --git a/tests/integration/testdata/sync/code/after/esbuild_function/tsconfig.json b/tests/integration/testdata/sync/code/after/esbuild_function/tsconfig.json new file mode 100644 index 0000000000..11ac54ab9c --- /dev/null +++ b/tests/integration/testdata/sync/code/after/esbuild_function/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "target": "es2020", + "strict": true, + "preserveConstEnums": true, + "noEmit": true, + "sourceMap": false, + "module":"es2015", + "moduleResolution":"node", + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + }, + "exclude": ["node_modules", "**/*.test.ts"] +} \ No newline at end of file diff --git a/tests/integration/testdata/sync/code/before/esbuild_function/app.ts b/tests/integration/testdata/sync/code/before/esbuild_function/app.ts new file mode 100644 index 0000000000..a8563066a8 --- /dev/null +++ b/tests/integration/testdata/sync/code/before/esbuild_function/app.ts @@ -0,0 +1,26 @@ +// @ts-ignore +import axios from "axios"; + +export const lambdaHandler = async (): Promise => { + let response: object; + + try { + response = { + 'statusCode': 200, + 'body': JSON.stringify({ + message: 'hello world', + }) + } + } catch (err) { + console.log(err); + + response = { + 'statusCode': 500, + 'body': JSON.stringify({ + message: 'exception happened' + }) + } + } + + return response; +}; diff --git a/tests/integration/testdata/sync/code/before/esbuild_function/package.json b/tests/integration/testdata/sync/code/before/esbuild_function/package.json new file mode 100644 index 0000000000..13a2153701 --- /dev/null +++ b/tests/integration/testdata/sync/code/before/esbuild_function/package.json @@ -0,0 +1,13 @@ +{ + "name": "hello_world", + "version": "1.0.0", + "description": "hello world sample for NodeJS", + "main": "app.js", + "author": "SAM CLI", + "license": "MIT", + "dependencies": { + "axios": "^0.27.2", + "esbuild": "^0.14.51" + } + } + \ No newline at end of file diff --git a/tests/integration/testdata/sync/code/before/esbuild_function/tsconfig.json b/tests/integration/testdata/sync/code/before/esbuild_function/tsconfig.json new file mode 100644 index 0000000000..11ac54ab9c --- /dev/null +++ b/tests/integration/testdata/sync/code/before/esbuild_function/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "target": "es2020", + "strict": true, + "preserveConstEnums": true, + "noEmit": true, + "sourceMap": false, + "module":"es2015", + "moduleResolution":"node", + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + }, + "exclude": ["node_modules", "**/*.test.ts"] +} \ No newline at end of file diff --git a/tests/integration/testdata/sync/code/before/template-esbuild.yaml b/tests/integration/testdata/sync/code/before/template-esbuild.yaml new file mode 100644 index 0000000000..79ed202600 --- /dev/null +++ b/tests/integration/testdata/sync/code/before/template-esbuild.yaml @@ -0,0 +1,18 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 + +Globals: + Function: + Timeout: 10 + +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: esbuild_function/ + Handler: app.lambdaHandler + Runtime: nodejs16.x + Metadata: + BuildMethod: esbuild + BuildProperties: + Sourcemap: true \ No newline at end of file diff --git a/tests/unit/cli/test_cli_config_file.py b/tests/unit/cli/test_cli_config_file.py index 2332bd36f9..e09a23edaa 100644 --- a/tests/unit/cli/test_cli_config_file.py +++ b/tests/unit/cli/test_cli_config_file.py @@ -7,7 +7,7 @@ from samcli.commands.exceptions import ConfigException from samcli.cli.cli_config_file import TomlProvider, configuration_option, configuration_callback, get_ctx_defaults -from samcli.lib.config.samconfig import SamConfig, DEFAULT_ENV, DEFAULT_CONFIG_FILE_NAME +from samcli.lib.config.samconfig import DEFAULT_ENV class MockContext: @@ -84,6 +84,7 @@ def setUp(self): self.ctx = MagicMock() self.param = MagicMock() self.value = MagicMock() + self.config_file = "otherconfig.toml" class Dummy: pass @@ -110,6 +111,54 @@ def test_callback_with_valid_config_env(self): self.assertIn(arg, self.saved_callback.call_args[0]) self.assertNotIn(self.value, self.saved_callback.call_args[0]) + def test_callback_with_invalid_config_file(self): + mock_context1 = MockContext(info_name="sam", parent=None) + mock_context2 = MockContext(info_name="local", parent=mock_context1) + mock_context3 = MockContext(info_name="start-api", parent=mock_context2) + self.ctx.parent = mock_context3 + self.ctx.info_name = "test_info" + self.ctx.params = {"config_file": "invalid_config_file"} + setattr(self.ctx, "samconfig_dir", None) + with self.assertRaises(ConfigException): + configuration_callback( + cmd_name=self.cmd_name, + option_name=self.option_name, + saved_callback=self.saved_callback, + provider=self.provider, + ctx=self.ctx, + param=self.param, + value=self.value, + ) + + def test_callback_with_valid_config_file_path(self): + mock_context1 = MockContext(info_name="sam", parent=None) + mock_context2 = MockContext(info_name="local", parent=mock_context1) + mock_context3 = MockContext(info_name="start-api", parent=mock_context2) + self.ctx.parent = mock_context3 + self.ctx.info_name = "test_info" + # Create a temporary directory. + temp_dir = tempfile.mkdtemp() + # Create a new config file path that is one layer above the temporary directory. + config_file_path = Path(temp_dir).parent.joinpath(self.config_file) + with open(config_file_path, "wb"): + # Set the `samconfig_dir` to be temporary directory that was created. + setattr(self.ctx, "samconfig_dir", temp_dir) + # set a relative path for the config file from `samconfig_dir`. + self.ctx.params = {"config_file": os.path.join("..", self.config_file)} + configuration_callback( + cmd_name=self.cmd_name, + option_name=self.option_name, + saved_callback=self.saved_callback, + provider=self.provider, + ctx=self.ctx, + param=self.param, + value=self.value, + ) + self.assertEqual(self.saved_callback.call_count, 1) + for arg in [self.ctx, self.param, DEFAULT_ENV]: + self.assertIn(arg, self.saved_callback.call_args[0]) + self.assertNotIn(self.value, self.saved_callback.call_args[0]) + def test_configuration_option(self): toml_provider = TomlProvider() click_option = configuration_option(provider=toml_provider) diff --git a/tests/unit/cli/test_types.py b/tests/unit/cli/test_types.py index 353e48eaca..710e513478 100644 --- a/tests/unit/cli/test_types.py +++ b/tests/unit/cli/test_types.py @@ -241,6 +241,64 @@ def test_successful_parsing(self, input, expected): self.assertEqual(result, expected, msg="Failed with Input = " + str(input)) +class TestCfnTagsMultipleValues(TestCase): + """ + Tests for the CfnTags parameter allowing multiple values per key. + """ + + def setUp(self): + self.param_type = CfnTags(multiple_values_per_key=True) + + @parameterized.expand( + [ + # Just a string + ("some string"), + # Wrong notation + # ("a==b"), + # Wrong multi-key notation + # ("a==b,c==d"), + ] + ) + def test_must_fail_on_invalid_format(self, input): + self.param_type.fail = Mock() + self.param_type.convert(input, "param", "ctx") + + self.param_type.fail.assert_called_with(ANY, "param", "ctx") + + @parameterized.expand( + [ + (("a=b",), {"a": ["b"]}), + (("a=b", "c=d"), {"a": ["b"], "c": ["d"]}), + (('"a+-=._:/@"="b+-=._:/@" "--c="="=d/"',), {"a+-=._:/@": ["b+-=._:/@"], "--c=": ["=d/"]}), + (('owner:name="son of anton"',), {"owner:name": ["son of anton"]}), + (("a=012345678901234567890123456789",), {"a": ["012345678901234567890123456789"]}), + ( + ("a=012345678901234567890123456789 name=this-is-a-very-long-tag-value-now-it-should-not-fail"), + { + "a": ["012345678901234567890123456789"], + "name": ["this-is-a-very-long-tag-value-now-it-should-not-fail"], + }, + ), + ( + ("a=012345678901234567890123456789", "c=012345678901234567890123456789"), + {"a": ["012345678901234567890123456789"], "c": ["012345678901234567890123456789"]}, + ), + (("",), {}), + # list as input + ([], {}), + ( + ["stage=int", "company:application=awesome-service", "company:department=engineering"], + {"stage": ["int"], "company:application": ["awesome-service"], "company:department": ["engineering"]}, + ), + (("a=b", "a=d"), {"a": ["b", "d"]}), + (("stage=alpha", "stage=beta", "stage=gamma", "stage=prod"), {"stage": ["alpha", "beta", "gamma", "prod"]}), + ] + ) + def test_successful_parsing(self, input, expected): + result = self.param_type.convert(input, None, None) + self.assertEqual(result, expected, msg="Failed with Input = " + str(input)) + + class TestCodeSignOptionType(TestCase): def setUp(self): self.param_type = SigningProfilesOptionType() diff --git a/tests/unit/commands/_utils/test_experimental.py b/tests/unit/commands/_utils/test_experimental.py index 67709912cf..db14741263 100644 --- a/tests/unit/commands/_utils/test_experimental.py +++ b/tests/unit/commands/_utils/test_experimental.py @@ -12,8 +12,6 @@ prompt_experimental, set_experimental, get_enabled_experimental_flags, - ExperimentalEntry, - ExperimentalFlag, ) from samcli.lib.utils.colors import Colored diff --git a/tests/unit/commands/buildcmd/test_build_context.py b/tests/unit/commands/buildcmd/test_build_context.py index dde06145d2..a517325715 100644 --- a/tests/unit/commands/buildcmd/test_build_context.py +++ b/tests/unit/commands/buildcmd/test_build_context.py @@ -1,11 +1,11 @@ import os from unittest import TestCase -from unittest.mock import patch, Mock, ANY, call +from unittest.mock import MagicMock, patch, Mock, ANY, call from parameterized import parameterized -from samcli.commands._utils.experimental import ExperimentalFlag from samcli.lib.build.build_graph import DEFAULT_DEPENDENCIES_DIR +from samcli.lib.build.bundler import EsbuildBundlerManager from samcli.lib.utils.osutils import BUILD_DIR_PERMISSIONS from samcli.lib.utils.packagetype import ZIP, IMAGE from samcli.local.lambdafn.exceptions import ResourceNotFound @@ -61,6 +61,11 @@ def __init__( self.runtime = runtime +class DummyStack: + def __init__(self, resource): + self.resources = resource + + class TestBuildContext__enter__(TestCase): @patch("samcli.commands.build.build_context.get_template_data") @patch("samcli.commands.build.build_context.SamLocalStackProvider.get_stacks") @@ -636,8 +641,10 @@ def test_must_print_remote_url_warning( @patch("samcli.commands.build.build_context.move_template") @patch("samcli.commands.build.build_context.get_template_data") @patch("samcli.commands.build.build_context.os") + @patch("samcli.commands.build.build_context.EsbuildBundlerManager") def test_run_sync_build_context( self, + esbuild_bundler_manager_mock, os_mock, get_template_data_mock, move_template_mock, @@ -662,7 +669,7 @@ def test_run_sync_build_context( root_stack.stack_path: "./build_dir/template.yaml", child_stack.stack_path: "./build_dir/abcd/template.yaml", } - resources_mock.return_value = Mock() + resources_mock.return_value = MagicMock() builder_mock = ApplicationBuilderMock.return_value = Mock() artifacts = "artifacts" @@ -706,9 +713,8 @@ def test_run_sync_build_context( print_success_message=False, ) as build_context: with patch("samcli.commands.build.build_context.BuildContext.gen_success_msg") as mock_message: - with patch("samcli.commands.build.build_context.BuildContext._check_esbuild_warning"): - build_context.run() - mock_message.assert_not_called() + build_context.run() + mock_message.assert_not_called() class TestBuildContext_setup_build_dir(TestCase): @@ -892,9 +898,13 @@ class TestBuildContext_run(TestCase): @patch("samcli.commands.build.build_context.move_template") @patch("samcli.commands.build.build_context.get_template_data") @patch("samcli.commands.build.build_context.os") + @patch("samcli.commands.build.build_context.EsbuildBundlerManager") + @patch("samcli.commands.build.build_context.BuildContext._handle_build_pre_processing") def test_run_build_context( self, auto_dependency_layer, + pre_processing_mock, + esbuild_bundler_manager_mock, os_mock, get_template_data_mock, move_template_mock, @@ -947,6 +957,15 @@ def test_run_build_context( ] nested_stack_manager_mock.return_value = given_nested_stack_manager + pre_processing_mock.return_value = [root_stack, child_stack] + + esbuild_manager = EsbuildBundlerManager(Mock()) + esbuild_manager.set_sourcemap_env_from_metadata = Mock() + esbuild_manager.set_sourcemap_env_from_metadata.side_effect = [modified_template_root, modified_template_child] + esbuild_manager.esbuild_configured = Mock() + esbuild_manager.esbuild_configured.return_value = False + esbuild_bundler_manager_mock.return_value = esbuild_manager + with BuildContext( resource_identifier="function_identifier", template_file="template_file", @@ -1020,12 +1039,10 @@ def test_run_build_context( if auto_dependency_layer: nested_stack_manager_mock.assert_has_calls( [ - call( - root_stack, None, build_context.build_dir, modified_template_root, application_build_result - ), + call(root_stack, "", build_context.build_dir, modified_template_root, application_build_result), call( child_stack, - None, + "", build_context.build_dir, modified_template_child, application_build_result, @@ -1060,10 +1077,12 @@ def test_run_build_context( @patch("samcli.commands.build.build_context.move_template") @patch("samcli.commands.build.build_context.get_template_data") @patch("samcli.commands.build.build_context.os") + @patch("samcli.commands.build.build_context.EsbuildBundlerManager") def test_must_catch_known_exceptions( self, exception, wrapped_exception, + esbuild_bundler_manager_mock, os_mock, get_template_data_mock, move_template_mock, @@ -1139,8 +1158,10 @@ def test_must_catch_known_exceptions( @patch("samcli.commands.build.build_context.move_template") @patch("samcli.commands.build.build_context.get_template_data") @patch("samcli.commands.build.build_context.os") + @patch("samcli.commands.build.build_context.EsbuildBundlerManager") def test_must_catch_function_not_found_exception( self, + source_map_mock, os_mock, get_template_data_mock, move_template_mock, @@ -1203,37 +1224,6 @@ def test_must_catch_function_not_found_exception( self.assertEqual(str(ctx.exception), "Function Not Found") -class TestBuildContext_esbuild_warning(TestCase): - @parameterized.expand( - [ - ([], False), - ([DummyFunction("Esbuild", metadata={"BuildMethod": "esbuild"})], True), - ([DummyFunction("NotEsbuild", metadata={"BuildMethod": "Makefile"})], False), - ] - ) - @patch("samcli.commands.build.build_context.prompt_experimental") - def test_check_esbuild_warning(self, functions, should_print, mocked_click): - build_context = BuildContext( - resource_identifier="function_identifier", - template_file="template_file", - base_dir="base_dir", - build_dir="build_dir", - cache_dir="cache_dir", - cached=False, - clean=False, - parallel=False, - mode="mode", - ) - with patch.object(build_context, "get_resources_to_build") as mocked_resources_to_build: - mocked_resources_to_build.return_value = Mock(functions=functions) - build_context._check_esbuild_warning() - - if should_print: - mocked_click.assert_called_with(ExperimentalFlag.Esbuild, BuildContext._ESBUILD_WARNING_MESSAGE) - else: - mocked_click.assert_not_called() - - class TestBuildContext_exclude_warning(TestCase): @parameterized.expand( [ diff --git a/tests/unit/commands/deploy/test_command.py b/tests/unit/commands/deploy/test_command.py index e46bdf99b7..9412adc1d9 100644 --- a/tests/unit/commands/deploy/test_command.py +++ b/tests/unit/commands/deploy/test_command.py @@ -51,6 +51,7 @@ def setUp(self): self.use_changeset = True self.resolve_image_repos = False self.disable_rollback = False + self.on_failure = None MOCK_SAM_CONFIG.reset_mock() self.companion_stack_manager_patch = patch("samcli.commands.deploy.guided_context.CompanionStackManager") @@ -103,6 +104,7 @@ def test_all_args(self, mock_deploy_context, mock_deploy_click, mock_package_con config_file=self.config_file, resolve_image_repos=self.resolve_image_repos, disable_rollback=self.disable_rollback, + on_failure=self.on_failure, ) mock_deploy_context.assert_called_with( @@ -129,6 +131,7 @@ def test_all_args(self, mock_deploy_context, mock_deploy_click, mock_package_con use_changeset=self.use_changeset, disable_rollback=self.disable_rollback, poll_delay=10, + on_failure=self.on_failure, ) context_mock.run.assert_called_with() @@ -217,6 +220,7 @@ def test_all_args_guided_no_to_authorization_confirmation_prompt( config_file=self.config_file, resolve_image_repos=self.resolve_image_repos, disable_rollback=self.disable_rollback, + on_failure=self.on_failure, ) @patch("samcli.commands.package.command.click") @@ -310,6 +314,7 @@ def test_all_args_guided( config_file=self.config_file, resolve_image_repos=self.resolve_image_repos, disable_rollback=self.disable_rollback, + on_failure=self.on_failure, ) mock_deploy_context.assert_called_with( @@ -336,6 +341,7 @@ def test_all_args_guided( use_changeset=self.use_changeset, disable_rollback=True, poll_delay=0.5, + on_failure=self.on_failure, ) context_mock.run.assert_called_with() @@ -454,6 +460,7 @@ def test_all_args_guided_no_save_echo_param_to_config( config_file=self.config_file, resolve_image_repos=self.resolve_image_repos, disable_rollback=self.disable_rollback, + on_failure=self.on_failure, ) mock_deploy_context.assert_called_with( @@ -484,6 +491,7 @@ def test_all_args_guided_no_save_echo_param_to_config( use_changeset=self.use_changeset, disable_rollback=True, poll_delay=0.5, + on_failure=self.on_failure, ) context_mock.run.assert_called_with() @@ -610,6 +618,7 @@ def test_all_args_guided_no_params_save_config( signing_profiles=self.signing_profiles, resolve_image_repos=self.resolve_image_repos, disable_rollback=self.disable_rollback, + on_failure=self.on_failure, ) mock_deploy_context.assert_called_with( @@ -636,6 +645,7 @@ def test_all_args_guided_no_params_save_config( use_changeset=self.use_changeset, disable_rollback=True, poll_delay=0.5, + on_failure=self.on_failure, ) context_mock.run.assert_called_with() @@ -747,6 +757,7 @@ def test_all_args_guided_no_params_no_save_config( signing_profiles=self.signing_profiles, resolve_image_repos=self.resolve_image_repos, disable_rollback=self.disable_rollback, + on_failure=self.on_failure, ) mock_deploy_context.assert_called_with( @@ -773,6 +784,7 @@ def test_all_args_guided_no_params_no_save_config( use_changeset=self.use_changeset, disable_rollback=self.disable_rollback, poll_delay=0.5, + on_failure=self.on_failure, ) context_mock.run.assert_called_with() @@ -821,6 +833,7 @@ def test_all_args_resolve_s3( signing_profiles=self.signing_profiles, resolve_image_repos=self.resolve_image_repos, disable_rollback=self.disable_rollback, + on_failure=self.on_failure, ) mock_deploy_context.assert_called_with( @@ -847,6 +860,7 @@ def test_all_args_resolve_s3( use_changeset=self.use_changeset, disable_rollback=self.disable_rollback, poll_delay=0.5, + on_failure=self.on_failure, ) context_mock.run.assert_called_with() @@ -883,6 +897,7 @@ def test_resolve_s3_and_s3_bucket_both_set(self): signing_profiles=self.signing_profiles, resolve_image_repos=self.resolve_image_repos, disable_rollback=self.disable_rollback, + on_failure=self.on_failure, ) @patch("samcli.commands.package.command.click") @@ -933,6 +948,7 @@ def test_all_args_resolve_image_repos( signing_profiles=self.signing_profiles, resolve_image_repos=True, disable_rollback=self.disable_rollback, + on_failure=self.on_failure, ) mock_deploy_context.assert_called_with( @@ -959,6 +975,7 @@ def test_all_args_resolve_image_repos( use_changeset=True, disable_rollback=self.disable_rollback, poll_delay=0.5, + on_failure=self.on_failure, ) context_mock.run.assert_called_with() diff --git a/tests/unit/commands/deploy/test_deploy_context.py b/tests/unit/commands/deploy/test_deploy_context.py index fe01ff57e4..c0c7a9aec9 100644 --- a/tests/unit/commands/deploy/test_deploy_context.py +++ b/tests/unit/commands/deploy/test_deploy_context.py @@ -6,6 +6,8 @@ from samcli.lib.deploy.deployer import Deployer from samcli.commands.deploy.deploy_context import DeployContext from samcli.commands.deploy.exceptions import DeployBucketRequiredError, DeployFailedError, ChangeEmptyError +from samcli.lib.deploy.utils import FailureMode +from samcli.commands.deploy.exceptions import DeployFailedError class TestSamDeployCommand(TestCase): @@ -34,6 +36,7 @@ def setUp(self): use_changeset=True, disable_rollback=False, poll_delay=0.5, + on_failure=None, ) def test_template_improper(self): @@ -185,6 +188,7 @@ def test_sync(self, patched_get_buildable_stacks, patched_auth_required, patched use_changeset=False, disable_rollback=False, poll_delay=0.5, + on_failure=None, ) patched_get_buildable_stacks.return_value = (Mock(), []) patched_auth_required.return_value = [("HelloWorldFunction", False)] @@ -216,3 +220,39 @@ def test_sync(self, patched_get_buildable_stacks, patched_auth_required, patched sync_context.deployer.sync.call_args[1]["role_arn"], "role-arn", ) + + @patch("boto3.Session") + @patch.object(Deployer, "rollback_delete_stack", MagicMock()) + @patch.object( + Deployer, "execute_changeset", MagicMock(side_effect=DeployFailedError("stack-name", "failed to deploy")) + ) + @patch.object(Deployer, "wait_for_execute", MagicMock()) + def test_on_failure_delete_rollback_stack(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.template_file = template_file.name + + self.deploy_command_context.on_failure = FailureMode.DELETE + + self.deploy_command_context.run() + + self.assertEqual(self.deploy_command_context.deployer.rollback_delete_stack.call_count, 1) + + @patch("boto3.Session") + @patch.object(Deployer, "execute_changeset", MagicMock()) + @patch.object(Deployer, "wait_for_execute", MagicMock()) + @patch.object(Deployer, "create_and_wait_for_changeset", MagicMock(return_value=({"Id": "test"}, "CREATE"))) + def test_on_failure_do_nothing(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.template_file = template_file.name + + self.deploy_command_context.on_failure = FailureMode.DO_NOTHING + + self.deploy_command_context.run() + + self.deploy_command_context.deployer.wait_for_execute.assert_called_with( + ANY, "CREATE", True, FailureMode.DO_NOTHING + ) diff --git a/tests/unit/commands/local/lib/test_local_lambda.py b/tests/unit/commands/local/lib/test_local_lambda.py index 55675b064e..88dbafde2c 100644 --- a/tests/unit/commands/local/lib/test_local_lambda.py +++ b/tests/unit/commands/local/lib/test_local_lambda.py @@ -208,12 +208,17 @@ def setUp(self): ({"function_id": {"a": "b"}}, {"a": "b"}), # Override for the logical_id exists ({"logical_id": {"a": "c"}}, {"a": "c"}), + # Override for the functionname exists + ({"function_name": {"a": "d"}}, {"a": "d"}), # Override for the full_path exists ({posixpath.join("somepath", "function_id"): {"a": "d"}}, {"a": "d"}), # Override for the function does *not* exist - ({"otherfunction": {"c": "d"}}, None), + ({"otherfunction": {"c": "d"}}, {}), # Using a CloudFormation parameter file format ({"Parameters": {"p1": "v1"}}, {"p1": "v1"}), + # Mix of Cloudformation and standard parameter format + ({"Parameters": {"p1": "v1"}, "logical_id": {"a": "b"}}, {"p1": "v1", "a": "b"}), + ({"Parameters": {"p1": "v1"}, "logical_id": {"p1": "v2"}}, {"p1": "v2"}), ] ) @patch("samcli.commands.local.lib.local_lambda.EnvironmentVariables") @@ -353,7 +358,7 @@ def test_must_work_with_invalid_environment_variable(self, environment_variable, function.handler, variables=None, shell_env_values=os_environ, - override_values=None, + override_values={}, aws_creds=self.aws_creds, ) diff --git a/tests/unit/commands/local/lib/test_sam_function_provider.py b/tests/unit/commands/local/lib/test_sam_function_provider.py index a3a20350df..55d385743b 100644 --- a/tests/unit/commands/local/lib/test_sam_function_provider.py +++ b/tests/unit/commands/local/lib/test_sam_function_provider.py @@ -1000,6 +1000,34 @@ def test_get_all_must_return_all_functions(self): self.assertEqual(expected, result) + def test_update_function_provider(self): + updated_template = { + "Resources": { + "SamFunctions": { + "Type": "AWS::Serverless::Function", + "Properties": { + "FunctionName": "SamFunc1", + "CodeUri": "/usr/foo/bar", + "Runtime": "nodejs4.3", + "Handler": "index.handler", + }, + }, + "SamFuncWithInlineCode": { + "Type": "AWS::Serverless::Function", + "Properties": { + "FunctionName": "SamFuncWithInlineCode", + "InlineCode": "testcode", + "Runtime": "nodejs4.3", + "Handler": "index.handler", + }, + }, + } + } + updated_stack = Stack("", "", "template.yaml", self.parameter_overrides, updated_template) + self.provider.update([updated_stack]) + functions = list(self.provider.get_all()) + self.assertEqual(len(functions), 2) + class TestSamFunctionProvider_init(TestCase): def setUp(self): diff --git a/tests/unit/commands/pipeline/init/test_initeractive_init_flow.py b/tests/unit/commands/pipeline/init/test_initeractive_init_flow.py index f5a1bae009..5d9311d869 100644 --- a/tests/unit/commands/pipeline/init/test_initeractive_init_flow.py +++ b/tests/unit/commands/pipeline/init/test_initeractive_init_flow.py @@ -244,9 +244,18 @@ def test_generate_pipeline_configuration_file_from_custom_local_existing_path_wi osutils_mock, os_mock, ): + def only_template_manifest_does_not_exist(args): + """ + Mock every file except the template manifest as its "existence" will + result in errors when it can't actually be read by `InteractiveInitFlow`. + """ + if args == Path("/any/existing/local/path/manifest.yaml"): + return False + return True + # setup local_pipeline_templates_path = "/any/existing/local/path" - os_mock.path.exists.return_value = True + os_mock.path.exists.side_effect = only_template_manifest_does_not_exist questions_click_mock.prompt.return_value = "2" # Custom pipeline templates init_click_mock.prompt.return_value = local_pipeline_templates_path # git repo path # trigger diff --git a/tests/unit/commands/samconfig/test_samconfig.py b/tests/unit/commands/samconfig/test_samconfig.py index e5b69e9983..61f625a060 100644 --- a/tests/unit/commands/samconfig/test_samconfig.py +++ b/tests/unit/commands/samconfig/test_samconfig.py @@ -8,7 +8,6 @@ import tempfile from pathlib import Path from contextlib import contextmanager -from samcli.commands._utils.experimental import ExperimentalFlag, set_experimental from samcli.lib.config.samconfig import SamConfig, DEFAULT_ENV from click.testing import CliRunner @@ -686,6 +685,7 @@ def test_deploy(self, do_cli_mock, template_artifacts_mock1, template_artifacts_ "default", False, True, + "ROLLBACK", ) @patch("samcli.commands.deploy.command.do_cli") @@ -803,6 +803,7 @@ def test_deploy_different_parameter_override_format( "default", False, True, + "ROLLBACK", ) @patch("samcli.commands._utils.experimental.is_experimental_enabled") diff --git a/tests/unit/commands/sync/test_command.py b/tests/unit/commands/sync/test_command.py index 706c496b84..6730b259c0 100644 --- a/tests/unit/commands/sync/test_command.py +++ b/tests/unit/commands/sync/test_command.py @@ -3,13 +3,15 @@ from unittest.mock import ANY, MagicMock, Mock, patch from parameterized import parameterized -from samcli.commands.sync.command import do_cli, execute_code_sync, execute_watch +from samcli.commands.sync.command import do_cli, execute_code_sync, execute_watch, check_enable_dependency_layer from samcli.lib.providers.provider import ResourceIdentifier from samcli.commands._utils.constants import ( DEFAULT_BUILD_DIR, DEFAULT_BUILD_DIR_WITH_AUTO_DEPENDENCY_LAYER, DEFAULT_CACHE_DIR, ) +from samcli.lib.providers.sam_stack_provider import SamLocalStackProvider +from tests.unit.commands.buildcmd.test_build_context import DummyStack def get_mock_sam_config(): @@ -61,11 +63,13 @@ def setUp(self): @patch("samcli.commands.build.command.os") @patch("samcli.commands.sync.command.manage_stack") @patch("samcli.commands.sync.command.SyncContext") + @patch("samcli.commands.sync.command.check_enable_dependency_layer") def test_infra_must_succeed_sync( self, code, watch, auto_dependency_layer, + check_enable_adl_mock, SyncContextMock, manage_stack_mock, os_mock, @@ -88,6 +92,8 @@ def test_infra_must_succeed_sync( sync_context_mock = Mock() SyncContextMock.return_value.__enter__.return_value = sync_context_mock + check_enable_adl_mock.return_value = auto_dependency_layer + do_cli( self.template_file, False, @@ -174,6 +180,7 @@ def test_infra_must_succeed_sync( signing_profiles=None, disable_rollback=False, poll_delay=10, + on_failure=None, ) build_context_mock.run.assert_called_once_with() package_context_mock.run.assert_called_once_with() @@ -304,6 +311,7 @@ def test_watch_must_succeed_sync( signing_profiles=None, disable_rollback=False, poll_delay=0.5, + on_failure=None, ) execute_watch_mock.assert_called_once_with( self.template_file, build_context_mock, package_context_mock, deploy_context_mock, auto_dependency_layer @@ -321,11 +329,13 @@ def test_watch_must_succeed_sync( @patch("samcli.commands.build.command.os") @patch("samcli.commands.sync.command.manage_stack") @patch("samcli.commands.sync.command.SyncContext") + @patch("samcli.commands.sync.command.check_enable_dependency_layer") def test_code_must_succeed_sync( self, code, watch, auto_dependency_layer, + check_enable_adl_mock, SyncContextMock, manage_stack_mock, os_mock, @@ -348,6 +358,8 @@ def test_code_must_succeed_sync( sync_context_mock = Mock() SyncContextMock.return_value.__enter__.return_value = sync_context_mock + check_enable_adl_mock.return_value = auto_dependency_layer + do_cli( self.template_file, True, @@ -639,3 +651,43 @@ def test_execute_watch( self.template_file, self.build_context, self.package_context, self.deploy_context, auto_dependency_layer ) watch_manager_mock.return_value.start.assert_called_once_with() + + +class TestDisableADL(TestCase): + @parameterized.expand( + [ + ( + { + "test": { + "Properties": { + "Environment": {"Variables": {"NODE_OPTIONS": ["--something"]}}, + }, + "Metadata": {"BuildMethod": "esbuild", "BuildProperties": {"Sourcemap": True}}, + "Type": "AWS::Serverless::Function", + } + }, + False, + ), + ( + { + "test": { + "Properties": { + "Environment": {"Variables": {"NODE_OPTIONS": ["--something"]}}, + }, + "Type": "AWS::Serverless::Function", + } + }, + True, + ), + ] + ) + @patch("samcli.commands.sync.command.SamLocalStackProvider") + def test_disables_adl_for_esbuild(self, stack_resources, expected, provider_mock): + stack = DummyStack(stack_resources) + stack.stack_path = "/path" + stack.location = "/location" + provider_mock.get_stacks.return_value = ( + [stack], + "", + ) + self.assertEqual(check_enable_dependency_layer("/template/file"), expected) diff --git a/tests/unit/lib/bootstrap/nested_stack/test_nested_stack_manager.py b/tests/unit/lib/bootstrap/nested_stack/test_nested_stack_manager.py index 089893d966..193d6db054 100644 --- a/tests/unit/lib/bootstrap/nested_stack/test_nested_stack_manager.py +++ b/tests/unit/lib/bootstrap/nested_stack/test_nested_stack_manager.py @@ -185,6 +185,7 @@ def test_update_layer_folder( ) patched_shutil.rmtree.assert_called_with(layer_root_folder) + layer_root_folder.mkdir.assert_called_with(BUILD_DIR_PERMISSIONS, parents=True) layer_contents_folder.mkdir.assert_called_with(BUILD_DIR_PERMISSIONS, parents=True) patched_osutils.copytree.assert_called_with(dependencies_dir, str(layer_contents_folder)) patched_add_layer_readme.assert_called_with(str(layer_root_folder), function_logical_id) @@ -215,7 +216,10 @@ def test_skipping_dependency_copy_when_function_has_no_dependencies( NestedStackManager.update_layer_folder( build_dir, dependencies_dir, layer_logical_id, function_logical_id, function_runtime ) + layer_root_folder.mkdir.assert_called_with(BUILD_DIR_PERMISSIONS, parents=True) + layer_contents_folder.mkdir.assert_not_called() patched_osutils.copytree.assert_not_called() + patched_add_layer_readme.assert_called_with(str(layer_root_folder), function_logical_id) @parameterized.expand([("python3.8", True), ("ruby2.7", False)]) def test_is_runtime_supported(self, runtime, supported): diff --git a/tests/unit/lib/build_module/test_app_builder.py b/tests/unit/lib/build_module/test_app_builder.py index bf93496207..06c93ad4f5 100644 --- a/tests/unit/lib/build_module/test_app_builder.py +++ b/tests/unit/lib/build_module/test_app_builder.py @@ -25,6 +25,7 @@ DockerConnectionError, ) from samcli.commands.local.cli_common.user_exceptions import InvalidFunctionPropertyType +from samcli.lib.telemetry.event import EventName, EventTracker from samcli.lib.utils.architecture import X86_64, ARM64 from samcli.lib.utils.packagetype import IMAGE, ZIP from samcli.lib.utils.stream_writer import StreamWriter @@ -2367,13 +2368,20 @@ def setUp(self): Mock(), "/build/dir", "/base/dir", "/cache/dir", mode="mode", stream_writer=StreamWriter(sys.stderr) ) + def tearDown(self): + EventTracker.clear_trackers() + @parameterized.expand([([],), (["ExpFlag1", "ExpFlag2"],)]) + @patch("samcli.lib.telemetry.event.EventType.get_accepted_values") @patch("samcli.lib.build.app_builder.LambdaBuilder") @patch("samcli.lib.build.app_builder.get_enabled_experimental_flags") - def test_must_use_lambda_builder(self, experimental_flags, experimental_flags_mock, lambda_builder_mock): + def test_must_use_lambda_builder( + self, experimental_flags, experimental_flags_mock, lambda_builder_mock, event_mock + ): experimental_flags_mock.return_value = experimental_flags config_mock = Mock() builder_instance_mock = lambda_builder_mock.return_value = Mock() + event_mock.return_value = ["runtime"] result = self.builder._build_function_in_process( config_mock, @@ -2436,10 +2444,14 @@ def test_must_raise_on_error(self, lambda_builder_mock): True, ) + @patch("samcli.lib.telemetry.event.EventType.get_accepted_values") @patch("samcli.lib.build.app_builder.LambdaBuilder") @patch("samcli.lib.build.app_builder.get_enabled_experimental_flags") - def test_building_with_experimental_flags(self, get_enabled_experimental_flags_mock, lambda_builder_mock): + def test_building_with_experimental_flags( + self, get_enabled_experimental_flags_mock, lambda_builder_mock, event_mock + ): get_enabled_experimental_flags_mock.return_value = ["A", "B", "C"] + event_mock.return_value = ["runtime"] config_mock = Mock() self.builder._build_function_in_process( config_mock, @@ -2491,11 +2503,18 @@ def setUp(self): ) self.builder._parse_builder_response = Mock() + def tearDown(self): + EventTracker.clear_trackers() + + @patch("samcli.lib.telemetry.event.EventType.get_accepted_values") @patch("samcli.lib.build.app_builder.LambdaBuildContainer") @patch("samcli.lib.build.app_builder.lambda_builders_protocol_version") @patch("samcli.lib.build.app_builder.LOG") @patch("samcli.lib.build.app_builder.osutils") - def test_must_build_in_container(self, osutils_mock, LOGMock, protocol_version_mock, LambdaBuildContainerMock): + def test_must_build_in_container( + self, osutils_mock, LOGMock, protocol_version_mock, LambdaBuildContainerMock, event_mock + ): + event_mock.return_value = "runtime" config = Mock() log_level = LOGMock.getEffectiveLevel.return_value = "foo" stdout_data = "container stdout response data" diff --git a/tests/unit/lib/build_module/test_build_graph.py b/tests/unit/lib/build_module/test_build_graph.py index 7e1167be24..c1485b0a8d 100644 --- a/tests/unit/lib/build_module/test_build_graph.py +++ b/tests/unit/lib/build_module/test_build_graph.py @@ -1,3 +1,4 @@ +import os.path from unittest import TestCase from unittest.mock import patch, Mock from uuid import uuid4 @@ -935,3 +936,26 @@ def test_two_esbuild_methods_same_handler(self): self.assertEqual(build_definition1, build_definition2) self.assertEqual(len(build_definitions), 1) self.assertEqual(len(build_definition1.functions), 2) + + @parameterized.expand([(True,), (False,)]) + @patch("samcli.lib.build.build_graph.is_experimental_enabled") + def test_build_folder_with_multiple_functions(self, build_improvements_22_enabled, patched_is_experimental): + patched_is_experimental.return_value = build_improvements_22_enabled + build_graph = BuildGraph("build/path") + build_definition = FunctionBuildDefinition( + "runtime", "codeuri", ZIP, ARM64, {}, "handler", "source_hash", "manifest_hash" + ) + function1 = generate_function(runtime=TestBuildGraph.RUNTIME, codeuri=TestBuildGraph.CODEURI, handler="handler") + function2 = generate_function(runtime=TestBuildGraph.RUNTIME, codeuri=TestBuildGraph.CODEURI, handler="handler") + build_graph.put_function_build_definition(build_definition, function1) + build_graph.put_function_build_definition(build_definition, function2) + + if not build_improvements_22_enabled: + self.assertEqual( + build_definition.get_build_dir("build_dir"), build_definition.functions[0].get_build_dir("build_dir") + ) + else: + self.assertEqual( + build_definition.get_build_dir("build_dir"), + build_definition.functions[0].get_build_dir("build_dir") + "-Shared", + ) diff --git a/tests/unit/lib/build_module/test_build_strategy.py b/tests/unit/lib/build_module/test_build_strategy.py index f183b0b7a2..95d2c40aa5 100644 --- a/tests/unit/lib/build_module/test_build_strategy.py +++ b/tests/unit/lib/build_module/test_build_strategy.py @@ -227,6 +227,27 @@ def test_build_layers_and_functions(self, mock_copy_tree): self.function1_2.get_build_dir(given_build_dir), ) + @patch("samcli.lib.build.build_strategy.is_experimental_enabled") + def test_dedup_build_functions_with_symlink(self, patched_is_experimental, mock_copy_tree): + patched_is_experimental.return_value = True + given_build_function = Mock() + given_build_function.inlinecode = None + given_build_layer = Mock() + given_build_dir = "build_dir" + default_build_strategy = DefaultBuildStrategy( + self.build_graph, given_build_dir, given_build_function, given_build_layer + ) + + build_result = default_build_strategy.build() + # with 22 build improvements, functions with same build definitions should point to same artifact folder + self.assertEqual( + build_result.get(self.function_build_definition1.functions[0].full_path), + build_result.get(self.function_build_definition1.functions[1].full_path), + ) + + # assert that copy operation is not called + mock_copy_tree.assert_not_called() + def test_build_single_function_definition_image_functions_with_same_metadata(self, mock_copy_tree): given_build_function = Mock() built_image = Mock() @@ -345,6 +366,86 @@ def test_if_cached_valid_when_build_single_function_definition(self, dir_checksu cached_build_strategy.build_single_layer_definition(layer_definition) self.assertEqual(copytree_mock.call_count, 3) + @parameterized.expand([(True,), (False,)]) + @patch("samcli.lib.build.build_strategy.osutils.copytree") + @patch("samcli.lib.build.build_strategy.pathlib.Path.exists") + @patch("samcli.lib.build.build_strategy.dir_checksum") + @patch("samcli.lib.utils.osutils.os") + @patch("samcli.lib.build.build_strategy.is_experimental_enabled") + def test_if_cached_valid_when_build_single_function_definition_with_build_improvements_22( + self, should_raise_os_error, patch_is_experimental, patch_os, dir_checksum_mock, exists_mock, copytree_mock + ): + patch_is_experimental.return_value = True + if should_raise_os_error: + patch_os.symlink.side_effect = OSError() + with osutils.mkdir_temp() as temp_base_dir: + build_dir = Path(temp_base_dir, ".aws-sam", "build") + build_dir.mkdir(parents=True) + cache_dir = Path(temp_base_dir, ".aws-sam", "cache") + cache_dir.mkdir(parents=True) + + exists_mock.return_value = True + dir_checksum_mock.return_value = CachedBuildStrategyTest.SOURCE_HASH + + build_graph_path = Path(build_dir.parent, "build.toml") + build_graph_path.write_text(CachedBuildStrategyTest.BUILD_GRAPH_CONTENTS) + build_graph = BuildGraph(str(build_dir)) + cached_build_strategy = CachedBuildStrategy( + build_graph, DefaultBuildStrategy, temp_base_dir, build_dir, cache_dir + ) + func1 = Mock() + func1.name = "func1_name" + func1.full_path = "func1_full_path" + func1.inlinecode = None + func1.get_build_dir.return_value = "func1/build/dir" + func2 = Mock() + func2.name = "func2_name" + func2.full_path = "func2_full_path" + func2.inlinecode = None + build_definition = build_graph.get_function_build_definitions()[0] + layer_definition = build_graph.get_layer_build_definitions()[0] + build_graph.put_function_build_definition(build_definition, func1) + build_graph.put_function_build_definition(build_definition, func2) + layer = Mock() + layer.name = "layer_name" + layer.full_path = "layer_full_path" + layer.get_build_dir.return_value = "layer/build/dir" + build_graph.put_layer_build_definition(layer_definition, layer) + cached_build_strategy.build_single_function_definition(build_definition) + cached_build_strategy.build_single_layer_definition(layer_definition) + + if should_raise_os_error: + copytree_mock.assert_has_calls( + [ + call( + str(cache_dir.joinpath(build_graph.get_function_build_definitions()[0].uuid)), + build_graph.get_function_build_definitions()[0].functions[0].get_build_dir(build_dir), + ), + call( + str(cache_dir.joinpath(build_graph.get_layer_build_definitions()[0].uuid)), + build_graph.get_layer_build_definitions()[0].layer.get_build_dir(build_dir), + ), + ] + ) + else: + copytree_mock.assert_not_called() + patch_os.symlink.assert_has_calls( + [ + call( + cache_dir.joinpath(build_graph.get_function_build_definitions()[0].uuid), + Path( + build_graph.get_function_build_definitions()[0].functions[0].get_build_dir(build_dir) + ).absolute(), + ), + call( + cache_dir.joinpath(build_graph.get_layer_build_definitions()[0].uuid), + Path( + build_graph.get_layer_build_definitions()[0].layer.get_build_dir(build_dir) + ).absolute(), + ), + ] + ) + @patch("samcli.lib.build.build_strategy.osutils.copytree") @patch("samcli.lib.build.build_strategy.DefaultBuildStrategy.build_single_function_definition") @patch("samcli.lib.build.build_strategy.DefaultBuildStrategy.build_single_layer_definition") diff --git a/tests/unit/lib/build_module/test_bundler.py b/tests/unit/lib/build_module/test_bundler.py new file mode 100644 index 0000000000..be436e1080 --- /dev/null +++ b/tests/unit/lib/build_module/test_bundler.py @@ -0,0 +1,173 @@ +from unittest import TestCase +from unittest.mock import patch, Mock + +from parameterized import parameterized + +from samcli.lib.build.bundler import EsbuildBundlerManager +from tests.unit.commands.buildcmd.test_build_context import DummyStack + + +class EsbuildBundler_is_node_option_set(TestCase): + @parameterized.expand( + [ + ( + {"Properties": {"Environment": {"Variables": {"NODE_OPTIONS": "--enable-source-maps"}}}}, + True, + ), + ( + {"Properties": {"Environment": {"Variables": {"NODE_OPTIONS": "nothing"}}}}, + False, + ), + ] + ) + def test_is_node_option_set(self, resource, expected_result): + esbuild_bundler_manager = EsbuildBundlerManager(Mock()) + self.assertEqual(esbuild_bundler_manager._is_node_option_set(resource), expected_result) + + def test_enable_source_map_missing(self): + esbuild_bundler_manager = EsbuildBundlerManager(Mock()) + self.assertFalse(esbuild_bundler_manager._is_node_option_set({"Properties": {}})) + + +class EsbuildBundler_enable_source_maps(TestCase): + @parameterized.expand( + [ + ( + { + "Resources": { + "test": {"Metadata": {"BuildMethod": "esbuild", "BuildProperties": {"Sourcemap": True}}} + } + }, + ), + ( + { + "Resources": { + "test": { + "Properties": {"Environment": {"Variables": {"NODE_OPTIONS": "--something"}}}, + "Metadata": {"BuildMethod": "esbuild", "BuildProperties": {"Sourcemap": True}}, + } + } + }, + ), + ] + ) + def test_enable_source_maps_only_source_map(self, template): + esbuild_manager = EsbuildBundlerManager(stack=DummyStack(template.get("Resources")), template=template) + + updated_template = esbuild_manager.set_sourcemap_env_from_metadata() + + for _, resource in updated_template["Resources"].items(): + self.assertIn("--enable-source-maps", resource["Properties"]["Environment"]["Variables"]["NODE_OPTIONS"]) + + @parameterized.expand( + [ + ({"Resources": {"test": {"Metadata": {"BuildMethod": "esbuild"}}}}, True), + ( + { + "Resources": { + "test": { + "Properties": {"Environment": {"Variables": {"NODE_OPTIONS": "--enable-source-maps"}}}, + "Metadata": {"BuildMethod": "esbuild"}, + } + } + }, + True, + ), + ( + { + "Resources": { + "test": { + "Metadata": {"BuildMethod": "esbuild", "BuildProperties": {"Sourcemap": False}}, + } + } + }, + False, + ), + ( + { + "Globals": {"Environment": {"Variables": {"NODE_OPTIONS": "--enable-source-maps"}}}, + "Resources": { + "test": { + "Properties": {}, + "Metadata": {"BuildMethod": "esbuild"}, + } + }, + }, + True, + ), + ] + ) + def test_enable_source_maps_only_node_options( + self, + template, + expected_value, + ): + esbuild_manager = EsbuildBundlerManager(stack=DummyStack(template.get("Resources")), template=template) + esbuild_manager._is_node_option_set = Mock() + esbuild_manager._is_node_option_set.return_value = True + updated_template = esbuild_manager.set_sourcemap_metadata_from_env() + + for _, resource in updated_template.resources.items(): + self.assertEqual(resource["Metadata"]["BuildProperties"]["Sourcemap"], expected_value) + + def test_warnings_printed(self): + template = { + "Resources": { + "test": { + "Properties": { + "Environment": {"Variables": {"NODE_OPTIONS": ["--something"]}}, + }, + "Metadata": {"BuildMethod": "esbuild", "BuildProperties": {"Sourcemap": True}}, + } + } + } + esbuild_manager = EsbuildBundlerManager(stack=DummyStack(template.get("Resources")), template=template) + esbuild_manager._warn_using_source_maps = Mock() + esbuild_manager._warn_invalid_node_options = Mock() + esbuild_manager.set_sourcemap_env_from_metadata() + + esbuild_manager._warn_using_source_maps.assert_called() + esbuild_manager._warn_invalid_node_options.assert_called() + + +class EsbuildBundler_esbuild_configured(TestCase): + @parameterized.expand( + [ + ( + { + "test": { + "Properties": { + "Environment": {"Variables": {"NODE_OPTIONS": ["--something"]}}, + }, + "Metadata": {"BuildMethod": "esbuild", "BuildProperties": {"Sourcemap": True}}, + "Type": "AWS::Serverless::Function", + } + }, + True, + ), + ( + { + "test": { + "Properties": { + "Environment": {"Variables": {"NODE_OPTIONS": ["--something"]}}, + }, + "Metadata": {"BuildMethod": "Makefile", "BuildProperties": {"Sourcemap": True}}, + "Type": "AWS::Serverless::Function", + } + }, + False, + ), + ], + ) + def test_detects_if_esbuild_is_configured(self, stack_resources, expected): + stack = DummyStack(stack_resources) + stack.stack_path = "/path" + stack.location = "/location" + esbuild_manager = EsbuildBundlerManager(stack) + self.assertEqual(esbuild_manager.esbuild_configured(), expected) + + @patch("samcli.lib.providers.sam_function_provider.SamFunctionProvider.__init__", return_value=None) + @patch("samcli.lib.providers.sam_function_provider.SamFunctionProvider.get_all", return_value={}) + def test_use_raw_codeuri_passed(self, get_all_mock, provider_mock): + EsbuildBundlerManager([]).esbuild_configured() + provider_mock.assert_called_with([[]], use_raw_codeuri=True, ignore_code_extraction_warnings=True) diff --git a/tests/unit/lib/build_module/test_workflow_config.py b/tests/unit/lib/build_module/test_workflow_config.py index 52549a3718..e35d6c05fe 100644 --- a/tests/unit/lib/build_module/test_workflow_config.py +++ b/tests/unit/lib/build_module/test_workflow_config.py @@ -7,12 +7,14 @@ UnsupportedRuntimeException, UnsupportedBuilderException, ) +from samcli.lib.telemetry.event import Event, EventTracker class Test_get_workflow_config(TestCase): def setUp(self): self.code_dir = "" self.project_dir = "" + EventTracker.clear_trackers() @parameterized.expand([("python3.6",), ("python3.7",), ("python3.8",)]) def test_must_work_for_python(self, runtime): @@ -23,6 +25,8 @@ def test_must_work_for_python(self, runtime): self.assertEqual(result.application_framework, None) self.assertEqual(result.manifest_name, "requirements.txt") self.assertIsNone(result.executable_search_paths) + self.assertEqual(len(EventTracker.get_tracked_events()), 1) + self.assertIn(Event("BuildWorkflowUsed", "python-pip"), EventTracker.get_tracked_events()) @parameterized.expand([("nodejs12.x",), ("nodejs14.x",), ("nodejs16.x",)]) def test_must_work_for_nodejs(self, runtime): @@ -33,6 +37,8 @@ def test_must_work_for_nodejs(self, runtime): self.assertEqual(result.application_framework, None) self.assertEqual(result.manifest_name, "package.json") self.assertIsNone(result.executable_search_paths) + self.assertEqual(len(EventTracker.get_tracked_events()), 1) + self.assertIn(Event("BuildWorkflowUsed", "nodejs-npm"), EventTracker.get_tracked_events()) @parameterized.expand([("provided",)]) def test_must_work_for_provided(self, runtime): @@ -42,6 +48,8 @@ def test_must_work_for_provided(self, runtime): self.assertEqual(result.application_framework, None) self.assertEqual(result.manifest_name, "Makefile") self.assertIsNone(result.executable_search_paths) + self.assertEqual(len(EventTracker.get_tracked_events()), 1) + self.assertIn(Event("BuildWorkflowUsed", "provided-None"), EventTracker.get_tracked_events()) @parameterized.expand([("provided",)]) def test_must_work_for_provided_with_no_specified_workflow(self, runtime): @@ -52,6 +60,8 @@ def test_must_work_for_provided_with_no_specified_workflow(self, runtime): self.assertEqual(result.application_framework, None) self.assertEqual(result.manifest_name, "Makefile") self.assertIsNone(result.executable_search_paths) + self.assertEqual(len(EventTracker.get_tracked_events()), 1) + self.assertIn(Event("BuildWorkflowUsed", "provided-None"), EventTracker.get_tracked_events()) @parameterized.expand([("provided",)]) def test_raise_exception_for_bad_specified_workflow(self, runtime): @@ -66,6 +76,8 @@ def test_must_work_for_ruby(self, runtime): self.assertEqual(result.application_framework, None) self.assertEqual(result.manifest_name, "Gemfile") self.assertIsNone(result.executable_search_paths) + self.assertEqual(len(EventTracker.get_tracked_events()), 1) + self.assertIn(Event("BuildWorkflowUsed", "ruby-bundler"), EventTracker.get_tracked_events()) @parameterized.expand( [("java8", "build.gradle", "gradle"), ("java8", "build.gradle.kts", "gradle"), ("java8", "pom.xml", "maven")] @@ -80,11 +92,14 @@ def test_must_work_for_java(self, runtime, build_file, dep_manager, os_mock): self.assertEqual(result.dependency_manager, dep_manager) self.assertEqual(result.application_framework, None) self.assertEqual(result.manifest_name, build_file) + self.assertEqual(len(EventTracker.get_tracked_events()), 1) if dep_manager == "gradle": self.assertEqual(result.executable_search_paths, [self.code_dir, self.project_dir]) + self.assertIn(Event("BuildWorkflowUsed", "java-gradle"), EventTracker.get_tracked_events()) else: self.assertIsNone(result.executable_search_paths) + self.assertIn(Event("BuildWorkflowUsed", "java-maven"), EventTracker.get_tracked_events()) def test_must_get_workflow_for_esbuild(self): runtime = "nodejs12.x" @@ -94,6 +109,8 @@ def test_must_get_workflow_for_esbuild(self): self.assertEqual(result.application_framework, None) self.assertEqual(result.manifest_name, "package.json") self.assertIsNone(result.executable_search_paths) + self.assertEqual(len(EventTracker.get_tracked_events()), 1) + self.assertIn(Event("BuildWorkflowUsed", "nodejs-npm-esbuild"), EventTracker.get_tracked_events()) @parameterized.expand([("java8", "unknown.manifest")]) @patch("samcli.lib.build.workflow_config.os") diff --git a/tests/unit/lib/deploy/test_deployer.py b/tests/unit/lib/deploy/test_deployer.py index 4c2f2e2b31..d05cba959a 100644 --- a/tests/unit/lib/deploy/test_deployer.py +++ b/tests/unit/lib/deploy/test_deployer.py @@ -1,5 +1,3 @@ -from logging import captureWarnings -from operator import inv from typing import Container, Iterable, Union import uuid import time @@ -15,8 +13,10 @@ ChangeSetError, DeployStackOutPutFailedError, DeployBucketInDifferentRegionError, + DeployStackStatusMissingError, ) from samcli.lib.deploy.deployer import Deployer +from samcli.lib.deploy.utils import FailureMode from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.utils.time import utc_to_timestamp, to_datetime @@ -1024,6 +1024,14 @@ def test_wait_for_execute(self, patched_time): with self.assertRaises(DeployFailedError): self.deployer.wait_for_execute("test", "CREATE", False) + self.deployer._client.get_waiter = MagicMock() + self.deployer.get_stack_outputs = MagicMock( + side_effect=DeployStackOutPutFailedError("test", "message"), return_value=None + ) + self.deployer._display_stack_outputs = MagicMock() + with self.assertRaises(DeployStackOutPutFailedError): + self.deployer.wait_for_execute("test", "CREATE", False) + def test_create_and_wait_for_changeset(self): self.deployer.create_changeset = MagicMock(return_value=({"Id": "test"}, "create")) self.deployer.wait_for_changeset = MagicMock() @@ -1159,6 +1167,7 @@ def test_sync_update_stack(self): notification_arns=[], s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), tags={"unit": "true"}, + on_failure=None, ) self.assertEqual(self.deployer._client.update_stack.call_count, 1) @@ -1170,6 +1179,7 @@ def test_sync_update_stack(self): StackName="test", Tags={"unit": "true"}, TemplateURL=ANY, + DisableRollback=False, ) def test_sync_update_stack_exception(self): @@ -1188,6 +1198,7 @@ def test_sync_update_stack_exception(self): notification_arns=[], s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), tags={"unit": "true"}, + on_failure=None, ) def test_sync_create_stack(self): @@ -1204,6 +1215,7 @@ def test_sync_create_stack(self): notification_arns=[], s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), tags={"unit": "true"}, + on_failure=FailureMode.ROLLBACK, ) self.assertEqual(self.deployer._client.create_stack.call_count, 1) @@ -1215,6 +1227,7 @@ def test_sync_create_stack(self): StackName="test", Tags={"unit": "true"}, TemplateURL=ANY, + OnFailure=str(FailureMode.ROLLBACK), ) def test_sync_create_stack_exception(self): @@ -1233,6 +1246,7 @@ def test_sync_create_stack_exception(self): notification_arns=[], s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), tags={"unit": "true"}, + on_failure=None, ) def test_process_kwargs(self): @@ -1248,3 +1262,116 @@ def test_process_kwargs(self): } result = self.deployer._process_kwargs(kwargs, None, capabilities, role_arn, notification_arns) self.assertEqual(expected, result) + + def test_sync_disable_rollback_using_on_failure(self): + self.deployer.has_stack = MagicMock(return_value=True) + self.deployer.wait_for_execute = MagicMock() + self.deployer.sync( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + on_failure=FailureMode.DO_NOTHING, + ) + + self.assertEqual(self.deployer._client.update_stack.call_count, 1) + self.deployer._client.update_stack.assert_called_with( + Capabilities=["CAPABILITY_IAM"], + NotificationARNs=[], + Parameters=[{"ParameterKey": "a", "ParameterValue": "b"}], + RoleARN="role-arn", + StackName="test", + Tags={"unit": "true"}, + TemplateURL=ANY, + DisableRollback=True, + ) + + def test_sync_create_stack_on_failure_delete(self): + self.deployer.has_stack = MagicMock(return_value=False) + self.deployer.wait_for_execute = MagicMock() + self.deployer.sync( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + on_failure=str(FailureMode.DELETE), + ) + + self.assertEqual(self.deployer._client.create_stack.call_count, 1) + self.deployer._client.create_stack.assert_called_with( + Capabilities=["CAPABILITY_IAM"], + NotificationARNs=[], + Parameters=[{"ParameterKey": "a", "ParameterValue": "b"}], + RoleARN="role-arn", + StackName="test", + Tags={"unit": "true"}, + TemplateURL=ANY, + OnFailure=str(FailureMode.DELETE), + ) + + def test_rollback_stack_new_stack_failed(self): + self.deployer._client.describe_stacks = MagicMock(return_value={"Stacks": [{"StackStatus": "CREATE_FAILED"}]}) + self.deployer.wait_for_execute = MagicMock() + self.deployer.describe_stack_events = MagicMock() + + self.deployer.rollback_delete_stack("test") + + self.assertEqual(self.deployer._client.rollback_stack.call_count, 0) + self.assertEqual(self.deployer._client.delete_stack.call_count, 1) + + def test_rollback_stack_update_stack_delete(self): + self.deployer._get_stack_status = MagicMock(side_effect=["UPDATE_FAILED", "ROLLBACK_COMPLETE"]) + self.deployer._rollback_wait = MagicMock() + self.deployer.wait_for_execute = MagicMock() + self.deployer.describe_stack_events = MagicMock() + + self.deployer.rollback_delete_stack("test") + + self.assertEqual(self.deployer._client.rollback_stack.call_count, 1) + self.assertEqual(self.deployer._client.delete_stack.call_count, 1) + self.assertEqual(self.deployer._client.describe_stack_events.call_count, 0) + + def test_rollback_invalid_stack_name(self): + self.deployer._client.describe_stacks = MagicMock( + side_effect=ClientError(error_response={"Error": {"Message": "Error"}}, operation_name="describe_stacks") + ) + + with self.assertRaises(ClientError): + self.deployer.rollback_delete_stack("test") + + def test_get_stack_status(self): + self.deployer._client.describe_stacks = MagicMock(return_value={"Stacks": [{"StackStatus": "CREATE_FAILED"}]}) + + result = self.deployer._get_stack_status("test") + + self.assertEqual(result, "CREATE_FAILED") + + @patch("samcli.lib.deploy.deployer.LOG.error") + @patch("samcli.lib.deploy.deployer.time.sleep") + def test_rollback_wait(self, time_mock, log_mock): + self.deployer._get_stack_status = MagicMock(return_value="UPDATE_ROLLBACK_COMPLETE") + + self.deployer._rollback_wait("test") + + self.assertEqual(log_mock.call_count, 0) + + @patch("samcli.lib.deploy.deployer.LOG.error") + @patch("samcli.lib.deploy.deployer.time.sleep") + def test_rollback_wait_timeout(self, time_mock, log_mock): + self.deployer._get_stack_status = MagicMock(return_value="CREATE_FAILED") + + self.deployer._rollback_wait("test") + + self.assertEqual(log_mock.call_count, 1) diff --git a/tests/unit/lib/telemetry/test_event.py b/tests/unit/lib/telemetry/test_event.py new file mode 100644 index 0000000000..f566a68554 --- /dev/null +++ b/tests/unit/lib/telemetry/test_event.py @@ -0,0 +1,244 @@ +""" +Module for testing the event.py methods and classes. +""" + +from enum import Enum +import threading +from typing import List, Tuple +from unittest import TestCase +from unittest.mock import ANY, Mock, patch + +from samcli.lib.telemetry.event import Event, EventCreationError, EventTracker, track_long_event + + +class DummyEventName(Enum): + TEST_ONE = "TestOne" + TEST_TWO = "TestTwo" + TEST_THREE = "TestThree" + + +class TestEventCreation(TestCase): + @patch("samcli.lib.telemetry.event.Event._verify_event") + @patch("samcli.lib.telemetry.event.EventType") + @patch("samcli.lib.telemetry.event.EventName") + def test_create_event_exists(self, name_mock, type_mock, verify_mock): + name_mock.return_value = Mock(value="TestOne") + type_mock.get_accepted_values.return_value = ["value1", "value2"] + verify_mock.return_value = None + + test_event = Event("TestOne", "value1") + + name_mock.assert_called_once() + self.assertEqual(test_event.event_name.value, "TestOne") + self.assertEqual(test_event.event_value, "value1") + self.assertEqual(test_event.thread_id, threading.get_ident()) # Should be on the same thread + + @patch("samcli.lib.telemetry.event.EventType") + @patch("samcli.lib.telemetry.event.EventName") + @patch("samcli.lib.telemetry.event.Event._get_event_names") + def test_create_event_value_doesnt_exist(self, name_getter_mock, name_mock, type_mock): + name_getter_mock.return_value = ["TestOne"] + name_mock.return_value = Mock(value="TestOne") + type_mock.get_accepted_values.return_value = ["value1", "value2"] + + with self.assertRaises(EventCreationError) as e: + Event("TestOne", "value3") + + self.assertEqual(e.exception.args[0], "Event 'TestOne' does not accept value 'value3'.") + + def test_create_event_name_doesnt_exist(self): + with self.assertRaises(EventCreationError) as e: + Event("SomeEventThatDoesn'tExist", "value1") + + self.assertEqual(e.exception.args[0], "Event 'SomeEventThatDoesn'tExist' does not exist.") + + @patch("samcli.lib.telemetry.event.Event._verify_event") + @patch("samcli.lib.telemetry.event.EventType") + @patch("samcli.lib.telemetry.event.EventName") + def test_event_to_json(self, name_mock, type_mock, verify_mock): + name_mock.return_value = Mock(value="Testing") + type_mock.get_accepted_values.return_value = ["value1"] + verify_mock.return_value = None + + test_event = Event("Testing", "value1") + + self.assertEqual( + test_event.to_json(), + {"event_name": "Testing", "event_value": "value1", "thread_id": threading.get_ident(), "time_stamp": ANY}, + ) + + +class TestEventTracker(TestCase): + def setUp(self): + EventTracker.clear_trackers() + + @patch("samcli.lib.telemetry.event.EventTracker._event_lock") + @patch("samcli.lib.telemetry.event.Event") + def test_track_event(self, event_mock, lock_mock): + lock_mock.__enter__ = Mock() + lock_mock.__exit__ = Mock() + + # Test that an event can be tracked + dummy_event = Mock(event_name="Test", event_value="SomeValue", thread_id=threading.get_ident(), timestamp=ANY) + event_mock.return_value = dummy_event + + EventTracker.track_event("Test", "SomeValue") + + self.assertEqual(len(EventTracker._events), 1) + self.assertEqual(EventTracker._events[0], dummy_event) + lock_mock.__enter__.assert_called() # Lock should have been accessed + lock_mock.__exit__.assert_called() + lock_mock.__enter__.reset_mock() + lock_mock.__exit__.reset_mock() + + # Test that the Event list will be cleared + EventTracker.clear_trackers() + + self.assertEqual(len(EventTracker._events), 0) + lock_mock.__enter__.assert_called() # Lock should have been accessed + lock_mock.__exit__.assert_called() + + @patch("samcli.lib.telemetry.event.Telemetry") + def test_events_get_sent(self, telemetry_mock): + # Create fake emit to capture tracked events + dummy_telemetry = Mock() + emitted_events = [] + mock_emit = lambda x: emitted_events.append(x) + dummy_telemetry.emit.return_value = None + dummy_telemetry.emit.side_effect = mock_emit + telemetry_mock.return_value = dummy_telemetry + + # Verify that no events are sent if tracker is empty + # Note we are using the in-line version of the method, as the regular send_events will + # simply call this method in a new thread + EventTracker._send_events_in_thread() + + self.assertEqual(emitted_events, []) # No events should have been collected + dummy_telemetry.emit.assert_not_called() # Nothing should have been sent (empty list) + + # Verify that events get sent when they exist in tracker + dummy_event = Mock( + event_name=Mock(value="Test"), event_value="SomeValue", thread_id=threading.get_ident(), time_stamp=ANY + ) + dummy_event.to_json.return_value = Event.to_json(dummy_event) + EventTracker._events.append(dummy_event) + + EventTracker._send_events_in_thread() + + dummy_telemetry.emit.assert_called() + self.assertEqual(len(emitted_events), 1) # The list of metrics (1) is copied into emitted_events + metric_data = emitted_events[0].get_data() + expected_data = { + "requestId": ANY, + "installationId": ANY, + "sessionId": ANY, + "executionEnvironment": ANY, + "ci": ANY, + "pyversion": ANY, + "samcliVersion": ANY, + "metricSpecificAttributes": { + "events": [ + { + "event_name": "Test", + "event_value": "SomeValue", + "thread_id": ANY, + "time_stamp": ANY, + } + ] + }, + } + self.assertEqual(len(metric_data["metricSpecificAttributes"]["events"]), 1) # There is one event captured + self.assertEqual(metric_data, expected_data) + self.assertEqual(len(EventTracker._events), 0) # Events should have been sent and cleared + + @patch( + "samcli.lib.telemetry.event.EventTracker.send_events", + return_value=None, + ) + @patch("samcli.lib.telemetry.event.Event") + def test_track_event_events_sent_when_capacity_reached(self, event_mock, send_mock): + # Create dummy Event creator to bypass verification + def make_mock_event(name, value): + dummy = Mock(event_name=Mock(value=name), event_value=value, thread_id=ANY, time_stamp=ANY) + dummy.to_json.return_value = Event.to_json(dummy) + return dummy + + event_mock.return_value = make_mock_event + + # Fill EventTracker with almost enough events to reach capacity + for i in range(EventTracker.MAX_EVENTS - 1): + EventTracker.track_event(f"Name{i}", f"Value{i}") + + send_mock.assert_not_called() + self.assertEqual(len(EventTracker._events), EventTracker.MAX_EVENTS - 1) + + # Add one more event to trigger sending all events + EventTracker.track_event("TheStrawThat", "BreaksTheCamel'sBack") + + # Wait for all threads to complete + for thread in threading.enumerate(): + if thread is threading.main_thread(): + continue + thread.join() + + send_mock.assert_called() + + +class TestTrackLongEvent(TestCase): + @patch("samcli.lib.telemetry.event.EventTracker.send_events") + @patch("samcli.lib.telemetry.event.EventTracker.track_event") + @patch("samcli.lib.telemetry.event.Event", return_value=None) + def test_long_event_is_tracked(self, event_mock, track_mock, send_mock): + mock_tracker = {} + mock_tracker["tracked_events"]: List[Tuple[str, str]] = [] # Tuple to bypass Event verification + mock_tracker["emitted_events"]: List[Tuple[str, str]] = [] + + def mock_track(name, value): + mock_tracker["tracked_events"].append((name, value)) + + def mock_send(): + mock_tracker["emitted_events"] = mock_tracker["tracked_events"] + mock_tracker["tracked_events"] = [] # Mimic clear_trackers() + + track_mock.side_effect = mock_track + send_mock.side_effect = mock_send + + @track_long_event("StartEvent", "StartValue", "EndEvent", "EndValue") + def func(): + self.assertEqual(len(mock_tracker["tracked_events"]), 1, "Starting event not tracked.") + self.assertIn(("StartEvent", "StartValue"), mock_tracker["tracked_events"], "Incorrect starting event.") + + func() + + self.assertEqual(len(mock_tracker["tracked_events"]), 0, "Tracked events not reset; send_events not called.") + self.assertEqual(len(mock_tracker["emitted_events"]), 2, "Unexpected number of emitted events.") + self.assertIn(("StartEvent", "StartValue"), mock_tracker["emitted_events"], "Starting event not tracked.") + self.assertIn(("EndEvent", "EndValue"), mock_tracker["emitted_events"], "Ending event not tracked.") + + @patch("samcli.lib.telemetry.event.EventTracker.send_events") + @patch("samcli.lib.telemetry.event.EventTracker.track_event") + def test_nothing_tracked_if_invalid_events(self, track_mock, send_mock): + mock_tracker = {} + mock_tracker["tracked_events"]: List[Tuple[str, str]] = [] # Tuple to bypass Event verification + mock_tracker["emitted_events"]: List[Tuple[str, str]] = [] + + def mock_track(name, value): + mock_tracker["tracked_events"].append((name, value)) + + def mock_send(): + mock_tracker["emitted_events"] = mock_tracker["tracked_events"] + mock_tracker["tracked_events"] = [] # Mimic clear_trackers() + + track_mock.side_effect = mock_track + send_mock.side_effect = mock_send + + @track_long_event("DefinitelyNotARealEvent", "Nope", "ThisEventDoesntExist", "NuhUh") + def func(): + self.assertEqual(len(mock_tracker["tracked_events"]), 0, "Events should not have been tracked.") + + func() + + self.assertEqual(len(mock_tracker["tracked_events"]), 0, "Events should not have been tracked.") + self.assertEqual(len(mock_tracker["emitted_events"]), 0, "Events should not have been emitted.") + track_mock.assert_not_called() # Tracker should not have been called + send_mock.assert_not_called() # Sender should not have been called diff --git a/tests/unit/lib/telemetry/test_metric.py b/tests/unit/lib/telemetry/test_metric.py index 3cfc680a2e..617eb47d96 100644 --- a/tests/unit/lib/telemetry/test_metric.py +++ b/tests/unit/lib/telemetry/test_metric.py @@ -2,6 +2,7 @@ import platform import time import uuid +import traceback from parameterized import parameterized @@ -9,12 +10,15 @@ from unittest import TestCase from unittest.mock import patch, Mock, ANY, call +from samcli.lib.telemetry.event import EventTracker import samcli.lib.telemetry.metric from samcli.lib.telemetry.cicd import CICDPlatform from samcli.lib.telemetry.metric import ( capture_return_value, _get_metric, + _get_stack_trace_info, + _clean_stack_summary_paths, send_installed_metric, track_command, track_template_warnings, @@ -132,6 +136,7 @@ def setUp(self): GlobalConfigClassMock = Mock() self.telemetry_instance = TelemetryClassMock.return_value = Mock() self.gc_instance_mock = GlobalConfigClassMock.return_value = Mock() + EventTracker.clear_trackers() self.telemetry_class_patcher = patch("samcli.lib.telemetry.metric.Telemetry", TelemetryClassMock) self.gc_patcher = patch("samcli.lib.telemetry.metric.GlobalConfig", GlobalConfigClassMock) @@ -181,9 +186,12 @@ def real_fn(): "debugFlagProvided": False, "region": "myregion", "commandName": "fakesam local invoke", + "metricSpecificAttributes": ANY, "duration": ANY, "exitReason": "success", "exitCode": 0, + "stackTrace": None, + "exceptionMessage": None, } args, _ = self.telemetry_instance.emit.call_args_list[0] metric = args[0] @@ -227,8 +235,12 @@ def real_fn(): "Measured duration must be in milliseconds and greater than equal to the sleep duration", ) + @patch("samcli.lib.telemetry.metric._get_stack_trace_info") @patch("samcli.lib.telemetry.metric.Context") - def test_must_record_user_exception(self, ContextMock): + def test_must_record_user_exception(self, ContextMock, get_stack_trace_info_mock): + expected_stack_trace = "Expected stack trace" + expected_exception_message = "Expected exception message" + get_stack_trace_info_mock.return_value = (expected_stack_trace, expected_exception_message) ContextMock.get_current_context.return_value = self.context_mock expected_exception = UserException("Something went wrong") expected_exception.exit_code = 1235 @@ -244,14 +256,26 @@ def real_fn(): "Must re-raise the original exception object " "without modification", ) - expected_attrs = _ignore_common_attributes({"exitReason": "UserException", "exitCode": 1235}) + get_stack_trace_info_mock.assert_called_once() + expected_attrs = _ignore_common_attributes( + { + "exitReason": "UserException", + "exitCode": 1235, + "stackTrace": expected_stack_trace, + "exceptionMessage": expected_exception_message, + } + ) args, _ = self.telemetry_instance.emit.call_args_list[0] metric = args[0] assert metric.get_metric_name() == "commandRun" self.assertGreaterEqual(metric.get_data().items(), expected_attrs.items()) + @patch("samcli.lib.telemetry.metric._get_stack_trace_info") @patch("samcli.lib.telemetry.metric.Context") - def test_must_record_wrapped_user_exception(self, ContextMock): + def test_must_record_wrapped_user_exception(self, ContextMock, get_stack_trace_info_mock): + expected_stack_trace = "Expected stack trace" + expected_exception_message = "Expected exception message" + get_stack_trace_info_mock.return_value = (expected_stack_trace, expected_exception_message) ContextMock.get_current_context.return_value = self.context_mock expected_exception = UserException("Something went wrong", wrapped_from="CustomException") expected_exception.exit_code = 1235 @@ -267,14 +291,26 @@ def real_fn(): "Must re-raise the original exception object " "without modification", ) - expected_attrs = _ignore_common_attributes({"exitReason": "CustomException", "exitCode": 1235}) + get_stack_trace_info_mock.assert_called_once() + expected_attrs = _ignore_common_attributes( + { + "exitReason": "CustomException", + "exitCode": 1235, + "stackTrace": expected_stack_trace, + "exceptionMessage": expected_exception_message, + } + ) args, _ = self.telemetry_instance.emit.call_args_list[0] metric = args[0] assert metric.get_metric_name() == "commandRun" self.assertGreaterEqual(metric.get_data().items(), expected_attrs.items()) + @patch("samcli.lib.telemetry.metric._get_stack_trace_info") @patch("samcli.lib.telemetry.metric.Context") - def test_must_record_any_exceptions(self, ContextMock): + def test_must_record_any_exceptions(self, ContextMock, get_stack_trace_info_mock): + expected_stack_trace = "Expected stack trace" + expected_exception_message = "Expected exception message" + get_stack_trace_info_mock.return_value = (expected_stack_trace, expected_exception_message) ContextMock.get_current_context.return_value = self.context_mock expected_exception = KeyError("IO Error test") @@ -289,8 +325,14 @@ def real_fn(): "Must re-raise the original exception object " "without modification", ) + get_stack_trace_info_mock.assert_called_once() expected_attrs = _ignore_common_attributes( - {"exitReason": "KeyError", "exitCode": 255} # Unhandled exceptions always use exit code 255 + { + "exitReason": "KeyError", + "exitCode": 255, + "stackTrace": expected_stack_trace, + "exceptionMessage": expected_exception_message, + } # Unhandled exceptions always use exit code 255 ) args, _ = self.telemetry_instance.emit.call_args_list[0] metric = args[0] @@ -338,6 +380,78 @@ def real_fn(a, b=None): "The command metrics be emitted when used as a decorator", ) + @patch("samcli.lib.telemetry.event.EventTracker.send_events", return_value=None) + @patch("samcli.lib.telemetry.metric.Context") + def test_must_send_events(self, ContextMock, send_mock): + ContextMock.get_current_context.return_value = self.context_mock + + def real_fn(): + pass + + track_command(real_fn)() + + send_mock.assert_called() + + +class TestStackTrace(TestCase): + def setUp(self): + pass + + def tearDown(self): + pass + + def test_must_return_stack_trace_info(self): + exception = Exception("Something went wrong...") + stack_trace, exception_message = _get_stack_trace_info(exception) + self.assertIsInstance(stack_trace, str) + self.assertIsInstance(exception_message, str) + + def test_must_clean_path_preceding_site_packages(self): + stack_summary = traceback.StackSummary.from_list( + [ + ("/python3.8/site-packages/botocore/abc.py", 264, "___iter__", "return func(*args, **kwargs)"), + ("/python3.8/site-packages/samcli/abc.py", 87, "wrapper", "return func(*args, **kwargs)"), + ] + ) + expected_stack_summary = traceback.StackSummary.from_list( + [ + ("/../site-packages/botocore/abc.py", 264, "___iter__", "return func(*args, **kwargs)"), + ("/../site-packages/samcli/abc.py", 87, "wrapper", "return func(*args, **kwargs)"), + ] + ) + _clean_stack_summary_paths(stack_summary) + self.assertEqual(stack_summary, expected_stack_summary) + + def test_must_clean_path_preceding_samcli(self): + stack_summary = traceback.StackSummary.from_list( + [("/aws-sam-cli/samcli/abc.py", 87, "wrapper", "return func(*args, **kwargs)")] + ) + expected_stack_summary = traceback.StackSummary.from_list( + [("/../samcli/abc.py", 87, "wrapper", "return func(*args, **kwargs)")] + ) + _clean_stack_summary_paths(stack_summary) + self.assertEqual(stack_summary, expected_stack_summary) + + def test_must_clean_path_preceding_last_file(self): + stack_summary = traceback.StackSummary.from_list( + [("/test-folder/abc.py", 508, "_api_call", "return self._make_api_call(operation_name, kwargs)")] + ) + expected_stack_summary = traceback.StackSummary.from_list( + [("/../abc.py", 508, "_api_call", "return self._make_api_call(operation_name, kwargs)")] + ) + _clean_stack_summary_paths(stack_summary) + self.assertEqual(stack_summary, expected_stack_summary) + + def test_must_clean_path_preceding_last_file_windows(self): + stack_summary = traceback.StackSummary.from_list( + [("\\test-folder\\abc.py", 508, "_api_call", "return self._make_api_call(operation_name, kwargs)")] + ) + expected_stack_summary = traceback.StackSummary.from_list( + [("\\..\\abc.py", 508, "_api_call", "return self._make_api_call(operation_name, kwargs)")] + ) + _clean_stack_summary_paths(stack_summary) + self.assertEqual(stack_summary, expected_stack_summary) + class TestParameterCapture(TestCase): def setUp(self): diff --git a/tests/unit/lib/telemetry/test_project_metadata.py b/tests/unit/lib/telemetry/test_project_metadata.py new file mode 100644 index 0000000000..b165e912fa --- /dev/null +++ b/tests/unit/lib/telemetry/test_project_metadata.py @@ -0,0 +1,119 @@ +""" +Module for testing the project_metadata.py methods. +""" + +import hashlib +from subprocess import CompletedProcess, CalledProcessError +from unittest.mock import patch, Mock +from unittest import TestCase + +from parameterized import parameterized + +from samcli.lib.telemetry.project_metadata import get_git_remote_origin_url, get_project_name, get_initial_commit_hash + + +class TestProjectMetadata(TestCase): + def setUp(self): + self.gc_mock = Mock() + self.global_config_patcher = patch("samcli.lib.telemetry.project_metadata.GlobalConfig", self.gc_mock) + self.global_config_patcher.start() + self.gc_mock.return_value.telemetry_enabled = True + + def tearDown(self): + self.global_config_patcher.stop() + + def test_return_none_when_telemetry_disabled(self): + self.gc_mock.return_value.telemetry_enabled = False + + git_origin = get_git_remote_origin_url() + self.assertIsNone(git_origin) + + project_name = get_project_name() + self.assertIsNone(project_name) + + initial_commit = get_initial_commit_hash() + self.assertIsNone(initial_commit) + + @parameterized.expand( + [ + ("https://github.com/aws/aws-sam-cli.git\n", "github.com/aws/aws-sam-cli.git"), + ("http://github.com/aws/aws-sam-cli.git\n", "github.com/aws/aws-sam-cli.git"), + ("git@github.com:aws/aws-sam-cli.git\n", "github.com/aws/aws-sam-cli.git"), + ("https://github.com/aws/aws-cli.git\n", "github.com/aws/aws-cli.git"), + ("http://not.a.real.site.com/somebody/my-project.git", "not.a.real.site.com/somebody/my-project.git"), + ("git@not.github:person/my-project.git", "not.github/person/my-project.git"), + ] + ) + @patch("samcli.lib.telemetry.project_metadata.subprocess.run") + def test_retrieve_git_origin(self, origin, expected, sp_mock): + sp_mock.return_value = CompletedProcess(["git", "config", "--get", "remote.origin.url"], 0, stdout=origin) + + git_origin = get_git_remote_origin_url() + expected_hash = hashlib.sha256() + expected_hash.update(expected.encode("utf-8")) + self.assertEqual(git_origin, expected_hash.hexdigest()) + + @patch("samcli.lib.telemetry.project_metadata.subprocess.run") + def test_retrieve_git_origin_when_not_a_repo(self, sp_mock): + sp_mock.side_effect = CalledProcessError(128, ["git", "config", "--get", "remote.origin.url"]) + + git_origin = get_git_remote_origin_url() + self.assertIsNone(git_origin) + + @parameterized.expand( + [ + ("https://github.com/aws/aws-sam-cli.git\n", "aws-sam-cli"), + ("https://github.com/aws/aws-sam-cli.git\n", "aws-sam-cli"), + ("git@github.com:aws/aws-sam-cli.git\n", "aws-sam-cli"), + ("https://github.com/aws/aws-cli.git\n", "aws-cli"), + ("http://not.a.real.site.com/somebody/my-project.git", "my-project"), + ("git@not.github:person/my-project.git", "my-project"), + ] + ) + @patch("samcli.lib.telemetry.project_metadata.subprocess.run") + def test_retrieve_project_name_from_git(self, origin, expected, sp_mock): + sp_mock.return_value = CompletedProcess(["git", "config", "--get", "remote.origin.url"], 0, stdout=origin) + + project_name = get_project_name() + expected_hash = hashlib.sha256() + expected_hash.update(expected.encode("utf-8")) + self.assertEqual(project_name, expected_hash.hexdigest()) + + @parameterized.expand( + [ + ("C:/Users/aws/path/to/library/aws-sam-cli"), + ("C:\\Users\\aws\\Windows\\path\\aws-sam-cli"), + ("C:/"), + ("C:\\"), + ("E:/path/to/another/dir"), + ("This/one/doesn't/start/with/a/letter"), + ("/banana"), + ("D:/one/more/just/to/be/safe"), + ] + ) + @patch("samcli.lib.telemetry.project_metadata.getcwd") + @patch("samcli.lib.telemetry.project_metadata.subprocess.run") + def test_retrieve_project_name_from_dir(self, cwd, sp_mock, cwd_mock): + sp_mock.side_effect = CalledProcessError(128, ["git", "config", "--get", "remote.origin.url"]) + cwd_mock.return_value = cwd + + project_name = get_project_name() + expected_hash = hashlib.sha256() + expected_hash.update(cwd.replace("\\", "/").encode("utf-8")) + self.assertEqual(project_name, expected_hash.hexdigest()) + + @parameterized.expand( + [ + ("0000000000000000000000000000000000000000"), + ("0123456789abcdef0123456789abcdef01234567"), + ("abababababababababababababababababababab"), + ] + ) + @patch("samcli.lib.telemetry.project_metadata.subprocess.run") + def test_retrieve_initial_commit(self, git_hash, sp_mock): + sp_mock.return_value = CompletedProcess(["git", "rev-list", "--max-parents=0", "HEAD"], 0, stdout=git_hash) + + initial_commit = get_initial_commit_hash() + expected_hash = hashlib.sha256() + expected_hash.update(git_hash.encode("utf-8")) + self.assertEqual(initial_commit, expected_hash.hexdigest()) diff --git a/tests/unit/lib/utils/test_osutils.py b/tests/unit/lib/utils/test_osutils.py index e09e1b47ee..d461fc35a6 100644 --- a/tests/unit/lib/utils/test_osutils.py +++ b/tests/unit/lib/utils/test_osutils.py @@ -100,3 +100,31 @@ def test_must_delete_if_path_exist(self, patched_rmtree, patched_path): rmtree_if_exists(Mock()) patched_rmtree.assert_called_with(mock_path_obj) + + +class Test_create_symlink_or_copy(TestCase): + @patch("samcli.lib.utils.osutils.Path") + @patch("samcli.lib.utils.osutils.os") + @patch("samcli.lib.utils.osutils.copytree") + def test_must_create_symlink_with_absolute_path(self, patched_copy_tree, pathced_os, patched_path): + source_path = "source/path" + destination_path = "destination/path" + osutils.create_symlink_or_copy(source_path, destination_path) + + pathced_os.symlink.assert_called_with( + patched_path(source_path).absolute(), patched_path(destination_path).absolute() + ) + patched_copy_tree.assert_not_called() + + @patch("samcli.lib.utils.osutils.Path") + @patch("samcli.lib.utils.osutils.os") + @patch("samcli.lib.utils.osutils.copytree") + def test_must_copy_if_symlink_fails(self, patched_copy_tree, pathced_os, patched_path): + pathced_os.symlink.side_effect = OSError("Unable to create symlink") + + source_path = "source/path" + destination_path = "destination/path" + osutils.create_symlink_or_copy(source_path, destination_path) + + pathced_os.symlink.assert_called_once() + patched_copy_tree.assert_called_with(source_path, destination_path) diff --git a/tests/unit/lib/utils/test_tar.py b/tests/unit/lib/utils/test_tar.py index e14219d3f4..ebbfb28756 100644 --- a/tests/unit/lib/utils/test_tar.py +++ b/tests/unit/lib/utils/test_tar.py @@ -31,6 +31,32 @@ def test_generating_tarball(self, temporary_file_patch, tarfile_open_patch): temp_file_mock.close.assert_called_once() tarfile_open_patch.assert_called_once_with(fileobj=temp_file_mock, mode="w") + @patch("samcli.lib.utils.tar.tarfile.open") + @patch("samcli.lib.utils.tar.TemporaryFile") + def test_generating_tarball_with_gzip(self, temporary_file_patch, tarfile_open_patch): + temp_file_mock = Mock() + temporary_file_patch.return_value = temp_file_mock + + tarfile_file_mock = Mock() + tarfile_open_patch.return_value.__enter__.return_value = tarfile_file_mock + + with create_tarball({"/some/path": "/layer1", "/some/dockerfile/path": "/Dockerfile"}, mode="w:gz") as acutal: + self.assertEqual(acutal, temp_file_mock) + + tarfile_file_mock.add.assert_called() + tarfile_file_mock.add.assert_has_calls( + [ + call("/some/path", arcname="/layer1", filter=None), + call("/some/dockerfile/path", arcname="/Dockerfile", filter=None), + ], + any_order=True, + ) + + temp_file_mock.flush.assert_called_once() + temp_file_mock.seek.assert_called_once_with(0) + temp_file_mock.close.assert_called_once() + tarfile_open_patch.assert_called_once_with(fileobj=temp_file_mock, mode="w:gz") + @patch("samcli.lib.utils.tar.tarfile.open") @patch("samcli.lib.utils.tar.TemporaryFile") def test_generating_tarball_with_filter(self, temporary_file_patch, tarfile_open_patch):