From ba348f59dad69dadeedb0ada962ba78105dd0b81 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Wed, 13 Nov 2019 11:06:58 -0800 Subject: [PATCH 01/45] Deploy off aws cli (#1455) * feat: sam deploy without aws cli pre-installed - Not breaking parameter overrides formats - still requires refactoring and error handling * feat: new click types for deploy parameters * feat: show changeset and stack events - needs refactoring * refactor: move deploy classes to lib - wire up command.py for `sam deploy` - move deploy specific exceptions to inherit from UserException * rebase: latest from `sam package` port * feat: decorator for printing tables - `sam deploy` now has tables while showcasing the changeset and showcasing events happening during deploy. * fix: wrap text on resource status column on `sam deploy` - fixed unit tests - linting fixes - doc strings - further unit tests and integration tests need to be added. * fix: cleaner text formatting for tables * tests: add unit tests for full suite of `sam deploy` * tests: add integration tests for `sam deploy` * tests: regression test suite for `sam deploy` - exercise all command line parameters for `aws` and `sam` * fix: deploy command now showcases stack outputs * fix: address comments * fix: return stack outputs from `get_stack_outputs` * fix: width margins on table prints * fix: address comments - add retries - more regression testing - remove types for capabilities * tests: tests for pprint of tables * usability: add table headers - show cases Add, Modify, Delete with +, * and - --- .pylintrc | 2 +- samcli/cli/types.py | 95 +++- .../_utils/custom_options/__init__.py | 0 .../_utils/custom_options/option_nargs.py | 50 ++ samcli/commands/_utils/options.py | 60 +- samcli/commands/_utils/table_print.py | 110 ++++ samcli/commands/deploy/__init__.py | 54 +- samcli/commands/deploy/command.py | 185 ++++++ samcli/commands/deploy/deploy_context.py | 185 ++++++ samcli/commands/deploy/exceptions.py | 54 ++ samcli/lib/deploy/__init__.py | 0 samcli/lib/deploy/deployer.py | 418 ++++++++++++++ samcli/lib/samlib/cloudformation_command.py | 55 -- samcli/lib/utils/time.py | 10 + tests/integration/deploy/__init__.py | 0 tests/integration/deploy/deploy_integ_base.py | 83 +++ .../integration/deploy/test_deploy_command.py | 84 +++ .../integration/package/package_integ_base.py | 7 +- .../package/aws-serverless-function.yaml | 6 + tests/regression/deploy/__init__.py | 0 .../deploy/regression_deploy_base.py | 106 ++++ .../deploy/test_deploy_regression.py | 154 +++++ tests/unit/cli/test_types.py | 101 +++- .../_utils/custom_options/__init__.py | 0 .../custom_options/test_option_nargs.py | 40 ++ .../unit/commands/_utils/test_table_print.py | 83 +++ tests/unit/commands/deploy/__init__.py | 0 tests/unit/commands/deploy/test_command.py | 71 +++ .../commands/deploy/test_deploy_context.py | 141 +++++ tests/unit/commands/test_deploy.py | 20 - tests/unit/lib/deploy/__init__.py | 0 tests/unit/lib/deploy/test_deployer.py | 535 ++++++++++++++++++ .../lib/samlib/test_cloudformation_command.py | 166 ------ tests/unit/lib/utils/test_time.py | 10 +- 34 files changed, 2554 insertions(+), 331 deletions(-) create mode 100644 samcli/commands/_utils/custom_options/__init__.py create mode 100644 samcli/commands/_utils/custom_options/option_nargs.py create mode 100644 samcli/commands/_utils/table_print.py create mode 100644 samcli/commands/deploy/command.py create mode 100644 samcli/commands/deploy/deploy_context.py create mode 100644 samcli/commands/deploy/exceptions.py create mode 100644 samcli/lib/deploy/__init__.py create mode 100644 samcli/lib/deploy/deployer.py delete mode 100644 samcli/lib/samlib/cloudformation_command.py create mode 100644 tests/integration/deploy/__init__.py create mode 100644 tests/integration/deploy/deploy_integ_base.py create mode 100644 tests/integration/deploy/test_deploy_command.py create mode 100644 tests/regression/deploy/__init__.py create mode 100644 tests/regression/deploy/regression_deploy_base.py create mode 100644 tests/regression/deploy/test_deploy_regression.py create mode 100644 tests/unit/commands/_utils/custom_options/__init__.py create mode 100644 tests/unit/commands/_utils/custom_options/test_option_nargs.py create mode 100644 tests/unit/commands/_utils/test_table_print.py create mode 100644 tests/unit/commands/deploy/__init__.py create mode 100644 tests/unit/commands/deploy/test_command.py create mode 100644 tests/unit/commands/deploy/test_deploy_context.py delete mode 100644 tests/unit/commands/test_deploy.py create mode 100644 tests/unit/lib/deploy/__init__.py create mode 100644 tests/unit/lib/deploy/test_deployer.py delete mode 100644 tests/unit/lib/samlib/test_cloudformation_command.py diff --git a/.pylintrc b/.pylintrc index 767f748fba..d450445bc8 100644 --- a/.pylintrc +++ b/.pylintrc @@ -59,7 +59,7 @@ confidence= # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" -disable=R0201,W0613,I0021,I0020,W1618,W1619,R0902,R0903,W0231,W0611,R0913,W0703,C0330,R0204,I0011,R0904,C0301 +disable=R0201,W0613,W0640,I0021,I0020,W1618,W1619,R0902,R0903,W0231,W0611,R0913,W0703,C0330,R0204,I0011,R0904,C0301 [REPORTS] diff --git a/samcli/cli/types.py b/samcli/cli/types.py index faacbacf70..e22b2fec26 100644 --- a/samcli/cli/types.py +++ b/samcli/cli/types.py @@ -15,27 +15,51 @@ class CfnParameterOverridesType(click.ParamType): parameters as "ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro" """ - __EXAMPLE = "ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro" + __EXAMPLE_1 = "ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro" + __EXAMPLE_2 = "KeyPairName=MyKey InstanceType=t1.micro" # Regex that parses CloudFormation parameter key-value pairs: https://regex101.com/r/xqfSjW/2 - _pattern = r"(?:ParameterKey=([A-Za-z0-9\"]+),ParameterValue=(\"(?:\\.|[^\"\\]+)*\"|(?:\\.|[^ \"\\]+)+))" + _pattern_1 = r"(?:ParameterKey=([A-Za-z0-9\"]+),ParameterValue=(\"(?:\\.|[^\"\\]+)*\"|(?:\\.|[^ \"\\]+)+))" + _pattern_2 = r"(?:([A-Za-z0-9\"]+)=(\"(?:\\.|[^\"\\]+)*\"|(?:\\.|[^ \"\\]+)+))" + ordered_pattern_match = [_pattern_1, _pattern_2] + + # NOTE(TheSriram): name needs to be added to click.ParamType requires it. name = "" def convert(self, value, param, ctx): result = {} - if not value: - return result - groups = re.findall(self._pattern, value) - if not groups: - return self.fail( - "{} is not in valid format. It must look something like '{}'".format(value, self.__EXAMPLE), param, ctx - ) + # Empty tuple + if value == ("",): + return result - # 'groups' variable is a list of tuples ex: [(key1, value1), (key2, value2)] - for key, param_value in groups: - result[self._unquote(key)] = self._unquote(param_value) + for val in value: + + try: + # NOTE(TheSriram): find the first regex that matched. + # pylint is concerned that we are checking at the same `val` within the loop, + # but that is the point, so disabling it. + pattern = next( + i + for i in filter( + lambda item: re.findall(item, val), self.ordered_pattern_match + ) # pylint: disable=cell-var-from-loop + ) + except StopIteration: + return self.fail( + "{} is not in valid format. It must look something like '{}' or '{}'".format( + val, self.__EXAMPLE_1, self.__EXAMPLE_2 + ), + param, + ctx, + ) + + groups = re.findall(pattern, val) + + # 'groups' variable is a list of tuples ex: [(key1, value1), (key2, value2)] + for key, param_value in groups: + result[self._unquote(key)] = self._unquote(param_value) return result @@ -80,7 +104,7 @@ class CfnMetadataType(click.ParamType): _pattern = r"([A-Za-z0-9\"]+)=([A-Za-z0-9\"]+)" # NOTE(TheSriram): name needs to be added to click.ParamType requires it. - name = "CfnMetadata" + name = "" def convert(self, value, param, ctx): result = {} @@ -103,9 +127,9 @@ def convert(self, value, param, ctx): if not groups: fail = True for group in groups: - key, value = group + key, v = group # assign to result['KeyName1'] = string and so on. - result[key] = value + result[key] = v if fail: return self.fail( @@ -113,3 +137,44 @@ def convert(self, value, param, ctx): ) return result + + +class CfnTags(click.ParamType): + """ + Custom Click options type to accept values for tag parameters. + tag parameters can be of the type KeyName1=string KeyName2=string + """ + + _EXAMPLE = "KeyName1=string KeyName2=string" + + _pattern = r"([A-Za-z0-9\"]+)=([A-Za-z0-9\"]+)" + + # NOTE(TheSriram): name needs to be added to click.ParamType requires it. + name = "" + + def convert(self, value, param, ctx): + result = {} + fail = False + # Empty tuple + if value == ("",): + return result + + for val in value: + + groups = re.findall(self._pattern, val) + + if not groups: + fail = True + for group in groups: + key, v = group + # assign to result['KeyName1'] = string and so on. + result[key] = v + + if fail: + return self.fail( + "{} is not in valid format. It must look something like '{}'".format(value, self._EXAMPLE), + param, + ctx, + ) + + return result diff --git a/samcli/commands/_utils/custom_options/__init__.py b/samcli/commands/_utils/custom_options/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/commands/_utils/custom_options/option_nargs.py b/samcli/commands/_utils/custom_options/option_nargs.py new file mode 100644 index 0000000000..1c310103a1 --- /dev/null +++ b/samcli/commands/_utils/custom_options/option_nargs.py @@ -0,0 +1,50 @@ +""" +Custom Click options for multiple arguments +""" + +import click + + +class OptionNargs(click.Option): + """ + A custom option class that allows parsing for multiple arguments + for an option, when the number of arguments for an option are unknown. + """ + + def __init__(self, *args, **kwargs): + self.nargs = kwargs.pop("nargs", -1) + super(OptionNargs, self).__init__(*args, **kwargs) + self._previous_parser_process = None + self._nargs_parser = None + + def add_to_parser(self, parser, ctx): + def parser_process(value, state): + # look ahead into arguments till we reach the next option. + # the next option starts with a prefix which is either '-' or '--' + next_option = False + value = [value] + + while state.rargs and not next_option: + for prefix in self._nargs_parser.prefixes: + if state.rargs[0].startswith(prefix): + next_option = True + if not next_option: + value.append(state.rargs.pop(0)) + + value = tuple(value) + + # call the actual process + self._previous_parser_process(value, state) + + # Add current option to Parser by calling add_to_parser on the super class. + super(OptionNargs, self).add_to_parser(parser, ctx) + for name in self.opts: + # Get OptionParser object for current option + option_parser = getattr(parser, "_long_opt").get(name) or getattr(parser, "_short_opt").get(name) + if option_parser: + # Monkey patch `process` method for click.parser.Option class. + # This allows for setting multiple parsed values into current option arguments + self._nargs_parser = option_parser + self._previous_parser_process = option_parser.process + option_parser.process = parser_process + break diff --git a/samcli/commands/_utils/options.py b/samcli/commands/_utils/options.py index 6537f6cabe..71152834d4 100644 --- a/samcli/commands/_utils/options.py +++ b/samcli/commands/_utils/options.py @@ -7,7 +7,9 @@ from functools import partial import click -from samcli.cli.types import CfnParameterOverridesType, CfnMetadataType +from samcli.cli.types import CfnParameterOverridesType, CfnMetadataType, CfnTags +from samcli.commands._utils.custom_options.option_nargs import OptionNargs + _TEMPLATE_OPTION_DEFAULT_VALUE = "template.[yaml|yml]" @@ -113,6 +115,7 @@ def docker_click_options(): def parameter_override_click_option(): return click.option( "--parameter-overrides", + cls=OptionNargs, type=CfnParameterOverridesType(), help="Optional. A string that contains CloudFormation parameter overrides encoded as key=value " "pairs. Use the same format as the AWS CLI, e.g. 'ParameterKey=KeyPairName," @@ -134,3 +137,58 @@ def metadata_click_option(): def metadata_override_option(f): return metadata_click_option()(f) + + +def capabilities_click_option(): + return click.option( + "--capabilities", + cls=OptionNargs, + type=click.STRING, + required=True, + help="A list of capabilities that you must specify" + "before AWS Cloudformation can create certain stacks. Some stack tem-" + "plates might include resources that can affect permissions in your AWS" + "account, for example, by creating new AWS Identity and Access Manage-" + "ment (IAM) users. For those stacks, you must explicitly acknowledge" + "their capabilities by specifying this parameter. The only valid values" + "are CAPABILITY_IAM and CAPABILITY_NAMED_IAM. If you have IAM resources," + "you can specify either capability. If you have IAM resources with cus-" + "tom names, you must specify CAPABILITY_NAMED_IAM. If you don't specify" + "this parameter, this action returns an InsufficientCapabilities error.", + ) + + +def capabilities_override_option(f): + return capabilities_click_option()(f) + + +def tags_click_option(): + return click.option( + "--tags", + cls=OptionNargs, + type=CfnTags(), + required=False, + help="A list of tags to associate with the stack that is created or updated." + "AWS CloudFormation also propagates these tags to resources " + "in the stack if the resource supports it.", + ) + + +def tags_override_option(f): + return tags_click_option()(f) + + +def notification_arns_click_option(): + return click.option( + "--notification-arns", + cls=OptionNargs, + type=click.STRING, + required=False, + help="Amazon Simple Notification Service topic" + "Amazon Resource Names (ARNs) that AWS CloudFormation associates with" + "the stack.", + ) + + +def notification_arns_override_option(f): + return notification_arns_click_option()(f) diff --git a/samcli/commands/_utils/table_print.py b/samcli/commands/_utils/table_print.py new file mode 100644 index 0000000000..76672632ff --- /dev/null +++ b/samcli/commands/_utils/table_print.py @@ -0,0 +1,110 @@ +""" +Utilities for table pretty printing using click +""" +from itertools import count, zip_longest +import textwrap +from functools import wraps + +import click + + +def pprint_column_names(format_string, format_kwargs, margin=None, table_header=None): + """ + + :param format_string: format string to be used that has the strings, minimum width to be replaced + :param format_kwargs: dictionary that is supplied to the format_string to format the string + :param margin: margin that is to be reduced from column width for columnar text. + :param table_header: Supplied table header + :return: boilerplate table string + """ + + min_width = 100 + min_margin = 2 + + def pprint_wrap(func): + # Calculate terminal width, number of columns in the table + width, _ = click.get_terminal_size() + # For UX purposes, set a minimum width for the table to be usable + # and usable_width keeps margins in mind. + width = max(width, min_width) + + total_args = len(format_kwargs) + if not total_args: + raise ValueError("Number of arguments supplied should be > 0 , format_kwargs: {}".format(format_kwargs)) + + # Get width to be a usable number so that we can equally divide the space for all the columns. + # Can be refactored, to allow for modularity in the shaping of the columns. + width = width - (width % total_args) + usable_width_no_margin = int(width) - 1 + usable_width = int((usable_width_no_margin - (margin if margin else min_margin))) + if total_args > int(usable_width / 2): + raise ValueError("Total number of columns exceed available width") + width_per_column = int(usable_width / total_args) + + # The final column should not roll over into the next line + final_arg_width = width_per_column - 1 + + # the format string contains minimumwidth that need to be set. + # eg: "{a:{0}}} {b:<{1}}} {c:{2}}}" + format_args = [width_per_column for _ in range(total_args - 1)] + format_args.extend([final_arg_width]) + + # format arguments are now ready for setting minimumwidth + + @wraps(func) + def wrap(*args, **kwargs): + # The table is setup with the column names, format_string contains the column names. + if table_header: + click.secho("\n" + table_header) + click.secho("-" * usable_width) + click.secho(format_string.format(*format_args, **format_kwargs)) + click.secho("-" * usable_width) + # format_args which have the minimumwidth set per {} in the format_string is passed to the function + # which this decorator wraps, so that the function has access to the correct format_args + kwargs["format_args"] = format_args + kwargs["width"] = width_per_column + kwargs["margin"] = margin if margin else min_margin + result = func(*args, **kwargs) + # Complete the table + click.secho("-" * usable_width) + return result + + return wrap + + return pprint_wrap + + +def wrapped_text_generator(texts, width, margin): + """ + + Return a generator where the contents are wrapped text to a specified width. + + :param texts: list of text that needs to be wrapped at specified width + :param width: width of the text to be wrapped + :param margin: margin to be reduced from width for cleaner UX + :return: generator of wrapped text + """ + for text in texts: + yield textwrap.wrap(text, width=width - margin) + + +def pprint_columns(columns, width, margin, format_string, format_args, columns_dict): + """ + + Print columns based on list of columnar text, associated formatting string and associated format arguments. + + :param columns: list of columnnar text that go into columns as specified by the format_string + :param width: width of the text to be wrapped + :param margin: margin to be reduced from width for cleaner UX + :param format_string: A format string that has both width and text specifiers set. + :param format_args: list of offset specifiers + :param columns_dict: arguments dictionary that have dummy values per column + :return: + """ + for columns_text in zip_longest(*wrapped_text_generator(columns, width, margin), fillvalue=""): + counter = count() + # Generate columnar data that correspond to the column names and update them. + for k, _ in columns_dict.items(): + columns_dict[k] = columns_text[next(counter)] + + click.secho(format_string.format(*format_args, **columns_dict)) diff --git a/samcli/commands/deploy/__init__.py b/samcli/commands/deploy/__init__.py index 7e3bd984ab..ff2b95977d 100644 --- a/samcli/commands/deploy/__init__.py +++ b/samcli/commands/deploy/__init__.py @@ -1,54 +1,6 @@ """ -CLI command for "deploy" command +`sam deploy` command """ -import click - -from samcli.cli.main import pass_context, common_options -from samcli.lib.samlib.cloudformation_command import execute_command -from samcli.commands.exceptions import UserException -from samcli.lib.telemetry.metrics import track_command - - -SHORT_HELP = "Deploy an AWS SAM application. This is an alias for 'aws cloudformation deploy'." - - -HELP_TEXT = """The sam deploy command creates a Cloudformation Stack and deploys your resources. - -\b -e.g. sam deploy --template-file packaged.yaml --stack-name sam-app --capabilities CAPABILITY_IAM - -\b -This is an alias for aws cloudformation deploy. To learn about other parameters you can use, -run aws cloudformation deploy help. -""" - - -@click.command("deploy", short_help=SHORT_HELP, context_settings={"ignore_unknown_options": True}, help=HELP_TEXT) -@click.argument("args", nargs=-1, type=click.UNPROCESSED) -@click.option( - "--template-file", required=True, type=click.Path(), help="The path where your AWS SAM template is located" -) -@click.option( - "--stack-name", - required=True, - help="The name of the AWS CloudFormation stack you're deploying to. " - "If you specify an existing stack, the command updates the stack. " - "If you specify a new stack, the command creates it.", -) -@common_options -@pass_context -@track_command -def cli(ctx, args, template_file, stack_name): - - # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - do_cli(args, template_file, stack_name) # pragma: no cover - - -def do_cli(args, template_file, stack_name): - args = args + ("--stack-name", stack_name) - - try: - execute_command("deploy", args, template_file=template_file) - except OSError as ex: - raise UserException(str(ex)) +# Expose the cli object here +from .command import cli # noqa diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py new file mode 100644 index 0000000000..68314c48b3 --- /dev/null +++ b/samcli/commands/deploy/command.py @@ -0,0 +1,185 @@ +""" +CLI command for "deploy" command +""" + +import click + + +from samcli.commands._utils.options import ( + parameter_override_option, + capabilities_override_option, + tags_override_option, + notification_arns_override_option, +) +from samcli.cli.main import pass_context, common_options, aws_creds_options +from samcli.lib.telemetry.metrics import track_command + + +SHORT_HELP = "Deploy an AWS SAM application." + + +HELP_TEXT = """The sam deploy command creates a Cloudformation Stack and deploys your resources. + +\b +e.g. sam deploy --template-file packaged.yaml --stack-name sam-app --capabilities CAPABILITY_IAM + +\b +""" + + +@click.command( + "deploy", + short_help=SHORT_HELP, + context_settings={"ignore_unknown_options": False, "allow_interspersed_args": True, "allow_extra_args": True}, + help=HELP_TEXT, +) +@click.option( + "--template-file", + "--template", + "-t", + required=True, + type=click.Path(), + help="The path where your AWS SAM template is located", +) +@click.option( + "--stack-name", + required=True, + help="The name of the AWS CloudFormation stack you're deploying to. " + "If you specify an existing stack, the command updates the stack. " + "If you specify a new stack, the command creates it.", +) +@click.option( + "--s3-bucket", + required=False, + help="The name of the S3 bucket where this command uploads your " + "CloudFormation template. This is required the deployments of " + "templates sized greater than 51,200 bytes", +) +@click.option( + "--force-upload", + required=False, + is_flag=True, + help="Indicates whether to override existing files in the S3 bucket. " + "Specify this flag to upload artifacts even if they" + "match existing artifacts in the S3 bucket.", +) +@click.option( + "--s3-prefix", + required=False, + help="A prefix name that the command adds to the " + "artifacts' name when it uploads them to the S3 bucket." + "The prefix name is a path name (folder name) for the S3 bucket.", +) +@click.option( + "--kms-key-id", + required=False, + help="The ID of an AWS KMS key that the command uses" " to encrypt artifacts that are at rest in the S3 bucket.", +) +@click.option( + "--no-execute-changeset", + required=False, + is_flag=True, + help="Indicates whether to execute the" + "change set. Specify this flag if you want to view your stack changes" + "before executing the change set. The command creates an AWS CloudForma-" + "tion change set and then exits without executing the change set. if " + "the changeset looks satisfactory, the stack changes can be made by " + "running the same command without specifying `--no-execute-changeset`", +) +@click.option( + "--role-arn", + required=False, + help="The Amazon Resource Name (ARN) of an AWS Identity" + "and Access Management (IAM) role that AWS CloudFormation assumes when" + "executing the change set.", +) +@click.option( + "--fail-on-empty-changeset", + required=False, + is_flag=True, + help="Specify if the CLI should return a non-zero exit code if there are no" + "changes to be made to the stack. The default behavior is to return a" + "non-zero exit code.", +) +@notification_arns_override_option +@tags_override_option +@parameter_override_option +@capabilities_override_option +@aws_creds_options +@common_options +@pass_context +@track_command +def cli( + ctx, + template_file, + stack_name, + s3_bucket, + force_upload, + s3_prefix, + kms_key_id, + parameter_overrides, + capabilities, + no_execute_changeset, + role_arn, + notification_arns, + fail_on_empty_changeset, + tags, +): + + # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing + do_cli( + template_file, + stack_name, + s3_bucket, + force_upload, + s3_prefix, + kms_key_id, + parameter_overrides, + capabilities, + no_execute_changeset, + role_arn, + notification_arns, + fail_on_empty_changeset, + tags, + ctx.region, + ctx.profile, + ) # pragma: no cover + + +def do_cli( + template_file, + stack_name, + s3_bucket, + force_upload, + s3_prefix, + kms_key_id, + parameter_overrides, + capabilities, + no_execute_changeset, + role_arn, + notification_arns, + fail_on_empty_changeset, + tags, + region, + profile, +): + from samcli.commands.deploy.deploy_context import DeployContext + + with DeployContext( + template_file=template_file, + stack_name=stack_name, + s3_bucket=s3_bucket, + force_upload=force_upload, + s3_prefix=s3_prefix, + kms_key_id=kms_key_id, + parameter_overrides=parameter_overrides, + capabilities=capabilities, + no_execute_changeset=no_execute_changeset, + role_arn=role_arn, + notification_arns=notification_arns, + fail_on_empty_changeset=fail_on_empty_changeset, + tags=tags, + region=region, + profile=profile, + ) as deploy_context: + deploy_context.run() diff --git a/samcli/commands/deploy/deploy_context.py b/samcli/commands/deploy/deploy_context.py new file mode 100644 index 0000000000..4eaf60b2cc --- /dev/null +++ b/samcli/commands/deploy/deploy_context.py @@ -0,0 +1,185 @@ +""" +Deploy a SAM stack +""" + +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import os +import logging +import boto3 +import click + +from samcli.commands.deploy import exceptions as deploy_exceptions +from samcli.lib.deploy.deployer import Deployer +from samcli.lib.package.s3_uploader import S3Uploader +from samcli.yamlhelper import yaml_parse + +LOG = logging.getLogger(__name__) + + +class DeployContext: + + MSG_NO_EXECUTE_CHANGESET = "\nChangeset created successfully. \n" + + MSG_EXECUTE_SUCCESS = "\nSuccessfully created/updated stack - {stack_name}\n" + + def __init__( + self, + template_file, + stack_name, + s3_bucket, + force_upload, + s3_prefix, + kms_key_id, + parameter_overrides, + capabilities, + no_execute_changeset, + role_arn, + notification_arns, + fail_on_empty_changeset, + tags, + region, + profile, + ): + self.template_file = template_file + self.stack_name = stack_name + self.s3_bucket = s3_bucket + self.force_upload = force_upload + self.s3_prefix = s3_prefix + self.kms_key_id = kms_key_id + self.parameter_overrides = parameter_overrides + self.capabilities = capabilities + self.no_execute_changeset = no_execute_changeset + self.role_arn = role_arn + self.notification_arns = notification_arns + self.fail_on_empty_changeset = fail_on_empty_changeset + self.tags = tags + self.region = region + self.profile = profile + self.s3_uploader = None + self.deployer = None + + def __enter__(self): + return self + + def __exit__(self, *args): + pass + + def run(self): + + # Parse parameters + with open(self.template_file, "r") as handle: + template_str = handle.read() + + template_dict = yaml_parse(template_str) + + if not isinstance(template_dict, dict): + raise deploy_exceptions.DeployFailedError( + stack_name=self.stack_name, msg="{} not in required format".format(self.template_file) + ) + + parameters = self.merge_parameters(template_dict, self.parameter_overrides) + + template_size = os.path.getsize(self.template_file) + if template_size > 51200 and not self.s3_bucket: + raise deploy_exceptions.DeployBucketRequiredError() + + session = boto3.Session(profile_name=self.profile if self.profile else None) + cloudformation_client = session.client("cloudformation", region_name=self.region if self.region else None) + + if self.s3_bucket: + s3_client = session.client("s3", region_name=self.region if self.region else None) + + self.s3_uploader = S3Uploader(s3_client, self.s3_bucket, self.s3_prefix, self.kms_key_id, self.force_upload) + + self.deployer = Deployer(cloudformation_client) + + return self.deploy( + self.stack_name, + template_str, + parameters, + self.capabilities, + self.no_execute_changeset, + self.role_arn, + self.notification_arns, + self.s3_uploader, + [{"Key": key, "Value": value} for key, value in self.tags.items()] if self.tags else [], + self.fail_on_empty_changeset, + ) + + def deploy( + self, + stack_name, + template_str, + parameters, + capabilities, + no_execute_changeset, + role_arn, + notification_arns, + s3_uploader, + tags, + fail_on_empty_changeset=True, + ): + try: + result, changeset_type = self.deployer.create_and_wait_for_changeset( + stack_name=stack_name, + cfn_template=template_str, + parameter_values=parameters, + capabilities=capabilities, + role_arn=role_arn, + notification_arns=notification_arns, + s3_uploader=s3_uploader, + tags=tags, + ) + + if not no_execute_changeset: + self.deployer.execute_changeset(result["Id"], stack_name) + self.deployer.wait_for_execute(stack_name, changeset_type) + click.echo(self.MSG_EXECUTE_SUCCESS.format(stack_name=stack_name)) + else: + click.echo(self.MSG_NO_EXECUTE_CHANGESET.format(changeset_id=result["Id"])) + + except deploy_exceptions.ChangeEmptyError as ex: + if fail_on_empty_changeset: + raise + click.echo(str(ex)) + + def merge_parameters(self, template_dict, parameter_overrides): + """ + CloudFormation CreateChangeset requires a value for every parameter + from the template, either specifying a new value or use previous value. + For convenience, this method will accept new parameter values and + generates a dict of all parameters in a format that ChangeSet API + will accept + + :param parameter_overrides: + :return: + """ + parameter_values = [] + + if not isinstance(template_dict.get("Parameters", None), dict): + return parameter_values + + for key, _ in template_dict["Parameters"].items(): + + obj = {"ParameterKey": key} + + if key in parameter_overrides: + obj["ParameterValue"] = parameter_overrides[key] + else: + obj["UsePreviousValue"] = True + + parameter_values.append(obj) + + return parameter_values diff --git a/samcli/commands/deploy/exceptions.py b/samcli/commands/deploy/exceptions.py new file mode 100644 index 0000000000..851ba9a999 --- /dev/null +++ b/samcli/commands/deploy/exceptions.py @@ -0,0 +1,54 @@ +""" +Exceptions that are raised by sam deploy +""" +from samcli.commands.exceptions import UserException + + +class ChangeEmptyError(UserException): + def __init__(self, stack_name): + self.stack_name = stack_name + message_fmt = "No changes to deploy.Stack {stack_name} is up to date" + super(ChangeEmptyError, self).__init__(message=message_fmt.format(stack_name=self.stack_name)) + + +class ChangeSetError(UserException): + def __init__(self, stack_name, msg): + self.stack_name = stack_name + self.msg = msg + message_fmt = "Failed to create changeset for the stack: {stack_name}, {msg}" + super(ChangeSetError, self).__init__(message=message_fmt.format(stack_name=self.stack_name, msg=self.msg)) + + +class DeployFailedError(UserException): + def __init__(self, stack_name, msg): + self.stack_name = stack_name + self.msg = msg + + message_fmt = "Failed to create/update the stack: {stack_name}, {msg}" + + super(DeployFailedError, self).__init__(message=message_fmt.format(stack_name=self.stack_name, msg=msg)) + + +class DeployStackOutPutFailedError(UserException): + def __init__(self, stack_name, msg): + self.stack_name = stack_name + self.msg = msg + + message_fmt = "Failed to get outputs from stack: {stack_name}, {msg}" + + super(DeployStackOutPutFailedError, self).__init__( + message=message_fmt.format(stack_name=self.stack_name, msg=msg) + ) + + +class DeployBucketRequiredError(UserException): + def __init__(self): + + message_fmt = ( + "Templates with a size greater than 51,200 bytes must be deployed " + "via an S3 Bucket. Please add the --s3-bucket parameter to your " + "command. The local template will be copied to that S3 bucket and " + "then deployed." + ) + + super(DeployBucketRequiredError, self).__init__(message=message_fmt) diff --git a/samcli/lib/deploy/__init__.py b/samcli/lib/deploy/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/lib/deploy/deployer.py b/samcli/lib/deploy/deployer.py new file mode 100644 index 0000000000..b7e3a77969 --- /dev/null +++ b/samcli/lib/deploy/deployer.py @@ -0,0 +1,418 @@ +""" +Cloudformation deploy class which also streams events and changeset information +""" + +# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import sys +import math +from collections import OrderedDict +import logging +import time +from datetime import datetime + +import botocore + +from samcli.commands.deploy.exceptions import DeployFailedError, ChangeSetError, DeployStackOutPutFailedError +from samcli.commands._utils.table_print import pprint_column_names, pprint_columns +from samcli.commands.deploy import exceptions as deploy_exceptions +from samcli.lib.package.artifact_exporter import mktempfile, parse_s3_url +from samcli.lib.utils.time import utc_to_timestamp + +LOG = logging.getLogger(__name__) + +DESCRIBE_STACK_EVENTS_FORMAT_STRING = ( + "{ResourceStatus:<{0}} {ResourceType:<{1}} {LogicalResourceId:<{2}} {ResourceStatusReason:<{3}}" +) +DESCRIBE_STACK_EVENTS_DEFAULT_ARGS = OrderedDict( + { + "ResourceStatus": "ResourceStatus", + "ResourceType": "ResourceType", + "LogicalResourceId": "LogicalResourceId", + "ResourceStatusReason": "ResourceStatusReason", + } +) + +DESCRIBE_STACK_EVENTS_TABLE_HEADER_NAME = "**CloudFormation events from changeset**" + +DESCRIBE_CHANGESET_FORMAT_STRING = "{Operation:<{0}} {LogicalResourceId:<{1}} {ResourceType:<{2}}" +DESCRIBE_CHANGESET_DEFAULT_ARGS = OrderedDict( + {"Operation": "Operation", "LogicalResourceId": "LogicalResourceId", "ResourceType": "ResourceType"} +) + +DESCRIBE_CHANGESET_TABLE_HEADER_NAME = "**CloudFormation stack changeset**" + +OUTPUTS_FORMAT_STRING = "{OutputKey:<{0}} {OutputValue:<{1}} {Description:<{2}}" +OUTPUTS_DEFAULTS_ARGS = OrderedDict( + {"OutputKey": "OutputKey", "OutputValue": "OutputValue", "Description": "Description"} +) + + +class Deployer: + def __init__(self, cloudformation_client, changeset_prefix="samcli-deploy"): + self._client = cloudformation_client + self.changeset_prefix = changeset_prefix + # 500ms of sleep time between stack checks and describe stack events. + self.client_sleep = 0.5 + # 2000ms of backoff time which is exponentially used, when there are exceptions during describe stack events + self.backoff = 2 + # Maximum number of attempts before raising exception back up the chain. + self.max_attempts = 3 + + def has_stack(self, stack_name): + """ + Checks if a CloudFormation stack with given name exists + + :param stack_name: Name or ID of the stack + :return: True if stack exists. False otherwise + """ + try: + resp = self._client.describe_stacks(StackName=stack_name) + if not resp["Stacks"]: + return False + + # When you run CreateChangeSet on a a stack that does not exist, + # CloudFormation will create a stack and set it's status + # REVIEW_IN_PROGRESS. However this stack is cannot be manipulated + # by "update" commands. Under this circumstances, we treat like + # this stack does not exist and call CreateChangeSet will + # ChangeSetType set to CREATE and not UPDATE. + stack = resp["Stacks"][0] + return stack["StackStatus"] != "REVIEW_IN_PROGRESS" + + except botocore.exceptions.ClientError as e: + # If a stack does not exist, describe_stacks will throw an + # exception. Unfortunately we don't have a better way than parsing + # the exception msg to understand the nature of this exception. + + if "Stack with id {0} does not exist".format(stack_name) in str(e): + LOG.debug("Stack with id %s does not exist", stack_name) + return False + + # We don't know anything about this exception. Don't handle + LOG.debug("Unable to get stack details.", exc_info=e) + raise e + + def create_changeset( + self, stack_name, cfn_template, parameter_values, capabilities, role_arn, notification_arns, s3_uploader, tags + ): + """ + Call Cloudformation to create a changeset and wait for it to complete + + :param stack_name: Name or ID of stack + :param cfn_template: CloudFormation template string + :param parameter_values: Template parameters object + :param capabilities: Array of capabilities passed to CloudFormation + :param tags: Array of tags passed to CloudFormation + :return: + """ + + if not self.has_stack(stack_name): + changeset_type = "CREATE" + # When creating a new stack, UsePreviousValue=True is invalid. + # For such parameters, users should either override with new value, + # or set a Default value in template to successfully create a stack. + parameter_values = [x for x in parameter_values if not x.get("UsePreviousValue", False)] + else: + changeset_type = "UPDATE" + # UsePreviousValue not valid if parameter is new + summary = self._client.get_template_summary(StackName=stack_name) + existing_parameters = [parameter["ParameterKey"] for parameter in summary["Parameters"]] + parameter_values = [ + x + for x in parameter_values + if not (x.get("UsePreviousValue", False) and x["ParameterKey"] not in existing_parameters) + ] + + # Each changeset will get a unique name based on time. + # Description is also setup based on current date and that SAM CLI is used. + kwargs = { + "ChangeSetName": self.changeset_prefix + str(int(time.time())), + "StackName": stack_name, + "TemplateBody": cfn_template, + "ChangeSetType": changeset_type, + "Parameters": parameter_values, + "Capabilities": capabilities, + "Description": "Created by SAM CLI at {0} UTC".format(datetime.utcnow().isoformat()), + "Tags": tags, + } + + # If an S3 uploader is available, use TemplateURL to deploy rather than + # TemplateBody. This is required for large templates. + if s3_uploader: + with mktempfile() as temporary_file: + temporary_file.write(kwargs.pop("TemplateBody")) + temporary_file.flush() + + # TemplateUrl property requires S3 URL to be in path-style format + parts = parse_s3_url( + s3_uploader.upload_with_dedup(temporary_file.name, "template"), version_property="Version" + ) + kwargs["TemplateURL"] = s3_uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None)) + + # don't set these arguments if not specified to use existing values + if role_arn is not None: + kwargs["RoleARN"] = role_arn + if notification_arns is not None: + kwargs["NotificationARNs"] = notification_arns + try: + resp = self._client.create_change_set(**kwargs) + return resp, changeset_type + except Exception as ex: + LOG.debug("Unable to create changeset", exc_info=ex) + raise ChangeSetError(stack_name=stack_name, msg=str(ex)) + + @pprint_column_names( + format_string=DESCRIBE_CHANGESET_FORMAT_STRING, + format_kwargs=DESCRIBE_CHANGESET_DEFAULT_ARGS, + table_header=DESCRIBE_CHANGESET_TABLE_HEADER_NAME, + ) + def describe_changeset(self, change_set_id, stack_name, **kwargs): + """ + Call Cloudformation to describe a changeset + + :param change_set_id: ID of the changeset + :param stack_name: Name of the CloudFormation stack + :return: dictionary of changes described in the changeset. + """ + paginator = self._client.get_paginator("describe_change_set") + response_iterator = paginator.paginate(ChangeSetName=change_set_id, StackName=stack_name) + changes = {"Add": [], "Modify": [], "Remove": []} + changes_showcase = {"Add": "+ Add", "Modify": "* Modify", "Remove": "- Delete"} + changeset = False + for item in response_iterator: + cf_changes = item.get("Changes") + for change in cf_changes: + changeset = True + resource_props = change.get("ResourceChange") + action = resource_props.get("Action") + changes[action].append( + { + "LogicalResourceId": resource_props.get("LogicalResourceId"), + "ResourceType": resource_props.get("ResourceType"), + } + ) + + for k, v in changes.items(): + for value in v: + pprint_columns( + columns=[changes_showcase.get(k, k), value["LogicalResourceId"], value["ResourceType"]], + width=kwargs["width"], + margin=kwargs["margin"], + format_string=DESCRIBE_CHANGESET_FORMAT_STRING, + format_args=kwargs["format_args"], + columns_dict=DESCRIBE_CHANGESET_DEFAULT_ARGS.copy(), + ) + + if not changeset: + # There can be cases where there are no changes, + # but could be an an addition of a SNS notification topic. + pprint_columns( + columns=["-", "-", "-"], + width=kwargs["width"], + margin=kwargs["margin"], + format_string=DESCRIBE_CHANGESET_FORMAT_STRING, + format_args=kwargs["format_args"], + columns_dict=DESCRIBE_CHANGESET_DEFAULT_ARGS.copy(), + ) + + return changes + + def wait_for_changeset(self, changeset_id, stack_name): + """ + Waits until the changeset creation completes + + :param changeset_id: ID or name of the changeset + :param stack_name: Stack name + :return: Latest status of the create-change-set operation + """ + sys.stdout.write("\nWaiting for changeset to be created..\n") + sys.stdout.flush() + + # Wait for changeset to be created + waiter = self._client.get_waiter("change_set_create_complete") + # Poll every 5 seconds. Changeset creation should be fast + waiter_config = {"Delay": 5} + try: + waiter.wait(ChangeSetName=changeset_id, StackName=stack_name, WaiterConfig=waiter_config) + except botocore.exceptions.WaiterError as ex: + LOG.debug("Create changeset waiter exception", exc_info=ex) + + resp = ex.last_response + status = resp["Status"] + reason = resp["StatusReason"] + + if ( + status == "FAILED" + and "The submitted information didn't contain changes." in reason + or "No updates are to be performed" in reason + ): + raise deploy_exceptions.ChangeEmptyError(stack_name=stack_name) + + raise ChangeSetError( + stack_name=stack_name, msg="ex: {0} Status: {1}. Reason: {2}".format(ex, status, reason) + ) + + def execute_changeset(self, changeset_id, stack_name): + """ + Calls CloudFormation to execute changeset + + :param changeset_id: ID of the changeset + :param stack_name: Name or ID of the stack + :return: Response from execute-change-set call + """ + try: + return self._client.execute_change_set(ChangeSetName=changeset_id, StackName=stack_name) + except botocore.exceptions.ClientError as ex: + raise DeployFailedError(stack_name=stack_name, msg=str(ex)) + + def get_last_event_time(self, stack_name): + """ + Finds the last event time stamp thats present for the stack, if not get the current time + :param stack_name: Name or ID of the stack + :return: unix epoch + """ + try: + return utc_to_timestamp( + self._client.describe_stack_events(StackName=stack_name)["StackEvents"][0]["Timestamp"] + ) + except KeyError: + return time.time() + + @pprint_column_names( + format_string=DESCRIBE_STACK_EVENTS_FORMAT_STRING, + format_kwargs=DESCRIBE_STACK_EVENTS_DEFAULT_ARGS, + table_header=DESCRIBE_STACK_EVENTS_TABLE_HEADER_NAME, + ) + def describe_stack_events(self, stack_name, time_stamp_marker, **kwargs): + """ + Calls CloudFormation to get current stack events + :param stack_name: Name or ID of the stack + :param time_stamp_marker: last event time on the stack to start streaming events from. + :return: + """ + + stack_change_in_progress = True + events = set() + retry_attempts = 0 + + while stack_change_in_progress and retry_attempts <= self.max_attempts: + try: + + # Only sleep if there have been no retry_attempts + time.sleep(self.client_sleep if retry_attempts == 0 else 0) + describe_stacks_resp = self._client.describe_stacks(StackName=stack_name) + paginator = self._client.get_paginator("describe_stack_events") + response_iterator = paginator.paginate(StackName=stack_name) + stack_status = describe_stacks_resp["Stacks"][0]["StackStatus"] + for event_items in response_iterator: + for event in event_items["StackEvents"]: + if event["EventId"] not in events and utc_to_timestamp(event["Timestamp"]) > time_stamp_marker: + events.add(event["EventId"]) + + pprint_columns( + columns=[ + event["ResourceStatus"], + event["ResourceType"], + event["LogicalResourceId"], + event.get("ResourceStatusReason", "-"), + ], + width=kwargs["width"], + margin=kwargs["margin"], + format_string=DESCRIBE_STACK_EVENTS_FORMAT_STRING, + format_args=kwargs["format_args"], + columns_dict=DESCRIBE_STACK_EVENTS_DEFAULT_ARGS.copy(), + ) + + if self._check_stack_complete(stack_status): + stack_change_in_progress = False + break + except botocore.exceptions.ClientError: + retry_attempts = retry_attempts + 1 + if retry_attempts > self.max_attempts: + raise + # Sleep in exponential backoff mode + time.sleep(math.pow(self.backoff, retry_attempts)) + + def _check_stack_complete(self, status): + return "COMPLETE" in status and "CLEANUP" not in status + + def wait_for_execute(self, stack_name, changeset_type): + + sys.stdout.write("\nWaiting for stack create/update to complete\n") + sys.stdout.flush() + + self.describe_stack_events(stack_name, self.get_last_event_time(stack_name)) + + # Pick the right waiter + if changeset_type == "CREATE": + waiter = self._client.get_waiter("stack_create_complete") + elif changeset_type == "UPDATE": + waiter = self._client.get_waiter("stack_update_complete") + else: + raise RuntimeError("Invalid changeset type {0}".format(changeset_type)) + + # Poll every 5 seconds. Optimizing for the case when the stack has only + # minimal changes, such the Code for Lambda Function + waiter_config = {"Delay": 5, "MaxAttempts": 720} + + try: + waiter.wait(StackName=stack_name, WaiterConfig=waiter_config) + except botocore.exceptions.WaiterError as ex: + LOG.debug("Execute changeset waiter exception", exc_info=ex) + + raise deploy_exceptions.DeployFailedError(stack_name=stack_name, msg=str(ex)) + + self.get_stack_outputs(stack_name=stack_name) + + def create_and_wait_for_changeset( + self, stack_name, cfn_template, parameter_values, capabilities, role_arn, notification_arns, s3_uploader, tags + ): + try: + result, changeset_type = self.create_changeset( + stack_name, cfn_template, parameter_values, capabilities, role_arn, notification_arns, s3_uploader, tags + ) + self.wait_for_changeset(result["Id"], stack_name) + self.describe_changeset(result["Id"], stack_name) + return result, changeset_type + except botocore.exceptions.ClientError as ex: + raise DeployFailedError(stack_name=stack_name, msg=str(ex)) + + @pprint_column_names(format_string=OUTPUTS_FORMAT_STRING, format_kwargs=OUTPUTS_DEFAULTS_ARGS) + def _stack_outputs(self, stack_outputs, **kwargs): + for output in stack_outputs: + pprint_columns( + columns=[output["OutputKey"], output["OutputValue"], output.get("Description", "-")], + width=kwargs["width"], + margin=kwargs["margin"], + format_string=OUTPUTS_FORMAT_STRING, + format_args=kwargs["format_args"], + columns_dict=OUTPUTS_DEFAULTS_ARGS.copy(), + ) + + def get_stack_outputs(self, stack_name, echo=True): + try: + stacks_description = self._client.describe_stacks(StackName=stack_name) + try: + outputs = stacks_description["Stacks"][0]["Outputs"] + if echo: + sys.stdout.write("\nStack {stack_name} outputs:\n".format(stack_name=stack_name)) + sys.stdout.flush() + self._stack_outputs(stack_outputs=outputs) + return outputs + except KeyError: + return None + + except botocore.exceptions.ClientError as ex: + raise DeployStackOutPutFailedError(stack_name=stack_name, msg=str(ex)) diff --git a/samcli/lib/samlib/cloudformation_command.py b/samcli/lib/samlib/cloudformation_command.py deleted file mode 100644 index e9bdbb4304..0000000000 --- a/samcli/lib/samlib/cloudformation_command.py +++ /dev/null @@ -1,55 +0,0 @@ -""" -Utility to call cloudformation command with args -""" - -import os -import logging -import platform -import subprocess -import sys - -from samcli.cli.global_config import GlobalConfig - -LOG = logging.getLogger(__name__) - - -def execute_command(command, args, template_file): - LOG.debug("%s command is called", command) - try: - aws_cmd = find_executable("aws") - - # Add SAM CLI information for AWS CLI to know about the caller. - gc = GlobalConfig() - env = os.environ.copy() - if gc.telemetry_enabled: - env["AWS_EXECUTION_ENV"] = "SAM-" + gc.installation_id - - args = list(args) - if template_file: - # Since --template-file was parsed separately, add it here manually - args.extend(["--template-file", template_file]) - - subprocess.check_call([aws_cmd, "cloudformation", command] + args, env=env) - LOG.debug("%s command successful", command) - except subprocess.CalledProcessError as e: - # Underlying aws command will print the exception to the user - LOG.debug("Exception: %s", e) - sys.exit(e.returncode) - - -def find_executable(execname): - - if platform.system().lower() == "windows": - options = ["{}.cmd".format(execname), "{}.exe".format(execname), execname] - else: - options = [execname] - - for name in options: - try: - subprocess.Popen([name], stdout=subprocess.PIPE, stderr=subprocess.PIPE) - # No exception. Let's pick this - return name - except OSError as ex: - LOG.debug("Unable to find executable %s", name, exc_info=ex) - - raise OSError("Cannot find AWS CLI installation, was looking at executables with names: {}".format(options)) diff --git a/samcli/lib/utils/time.py b/samcli/lib/utils/time.py index 3989cc03ad..02b078337f 100644 --- a/samcli/lib/utils/time.py +++ b/samcli/lib/utils/time.py @@ -65,6 +65,16 @@ def to_timestamp(some_time): return int((some_time - datetime.datetime(1970, 1, 1)).total_seconds() * 1000.0) +def utc_to_timestamp(utc): + """ + Converts utc timestamp with tz_info set to utc to Unix timestamp + :param utc: datetime.datetime + :return: UNIX timestamp + """ + + return to_timestamp(utc.replace(tzinfo=None)) + + def to_utc(some_time): """ Convert the given date to UTC, if the date contains a timezone. diff --git a/tests/integration/deploy/__init__.py b/tests/integration/deploy/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/integration/deploy/deploy_integ_base.py b/tests/integration/deploy/deploy_integ_base.py new file mode 100644 index 0000000000..1e68f8878f --- /dev/null +++ b/tests/integration/deploy/deploy_integ_base.py @@ -0,0 +1,83 @@ +import os +import uuid +import json +import time +from pathlib import Path +from unittest import TestCase + +import boto3 + + +class DeployIntegBase(TestCase): + @classmethod + def setUpClass(cls): + pass + + def setUp(self): + super(DeployIntegBase, self).setUp() + + def tearDown(self): + super(DeployIntegBase, self).tearDown() + + def base_command(self): + command = "sam" + if os.getenv("SAM_CLI_DEV"): + command = "samdev" + + return command + + def get_deploy_command_list( + self, + s3_bucket=None, + stack_name=None, + template=None, + template_file=None, + s3_prefix=None, + capabilities=None, + force_upload=False, + notification_arns=None, + fail_on_empty_changeset=False, + no_execute_changeset=False, + parameter_overrides=None, + role_arn=None, + kms_key_id=None, + tags=None, + profile=None, + region=None, + ): + command_list = [self.base_command(), "deploy"] + + if s3_bucket: + command_list = command_list + ["--s3-bucket", str(s3_bucket)] + if capabilities: + command_list = command_list + ["--capabilities", str(capabilities)] + if parameter_overrides: + command_list = command_list + ["--parameter-overrides", str(parameter_overrides)] + if role_arn: + command_list = command_list + ["--role-arn", str(role_arn)] + if notification_arns: + command_list = command_list + ["--notification-arns", str(notification_arns)] + if stack_name: + command_list = command_list + ["--stack-name", str(stack_name)] + if template: + command_list = command_list + ["--template", str(template)] + if template_file: + command_list = command_list + ["--template-file", str(template_file)] + if s3_prefix: + command_list = command_list + ["--s3-prefix", str(s3_prefix)] + if kms_key_id: + command_list = command_list + ["--kms-key-id", str(kms_key_id)] + if no_execute_changeset: + command_list = command_list + ["--no-execute-changeset"] + if force_upload: + command_list = command_list + ["--force-upload"] + if fail_on_empty_changeset: + command_list = command_list + ["--fail-on-empty-changeset"] + if tags: + command_list = command_list + ["--tags", str(tags)] + if region: + command_list = command_list + ["--region", str(region)] + if profile: + command_list = command_list + ["--profile", str(profile)] + + return command_list diff --git a/tests/integration/deploy/test_deploy_command.py b/tests/integration/deploy/test_deploy_command.py new file mode 100644 index 0000000000..a2bfb79f68 --- /dev/null +++ b/tests/integration/deploy/test_deploy_command.py @@ -0,0 +1,84 @@ +import os +import tempfile +import uuid +from subprocess import Popen, PIPE +from unittest import skipIf + +import boto3 +from parameterized import parameterized + +from tests.integration.deploy.deploy_integ_base import DeployIntegBase +from tests.integration.package.package_integ_base import PackageIntegBase +from tests.testing_utils import RUNNING_ON_CI, RUNNING_TEST_FOR_MASTER_ON_CI + +# Deploy tests require credentials and CI/CD will only add credentials to the env if the PR is from the same repo. +# This is to restrict package tests to run outside of CI/CD and when the branch is not master. +SKIP_DEPLOY_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI + + +@skipIf(SKIP_DEPLOY_TESTS, "Skip deploy tests in CI/CD only") +class TestDeploy(PackageIntegBase, DeployIntegBase): + def setUp(self): + self.cf_client = boto3.client("cloudformation") + self.sns_arn = os.environ.get("AWS_SNS") + self.stack_names = [] + super(TestDeploy, self).setUp() + + def tearDown(self): + for stack_name in self.stack_names: + self.cf_client.delete_stack(StackName=stack_name) + super(TestDeploy, self).tearDown() + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_all_args(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + with tempfile.NamedTemporaryFile(delete=False) as output_template_file: + # Package necessary artifacts. + package_command_list = self.get_command_list( + s3_bucket=self.s3_bucket.name, template=template_path, output_template_file=output_template_file.name + ) + + package_process = Popen(package_command_list, stdout=PIPE) + package_process.wait() + + self.assertEqual(package_process.returncode, 0) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(stack_name) + + # Deploy and only show changeset. + deploy_command_list_no_execute = self.get_deploy_command_list( + template_file=output_template_file.name, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix="integ_deploy", + s3_bucket=self.s3_bucket.name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=True, + tags="integ=true clarity=yes", + ) + + deploy_process_no_execute = Popen(deploy_command_list_no_execute, stdout=PIPE) + deploy_process_no_execute.wait() + self.assertEqual(deploy_process_no_execute.returncode, 0) + + # Deploy the given stack with the changeset. + deploy_command_list_execute = self.get_deploy_command_list( + template_file=output_template_file.name, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix="integ_deploy", + s3_bucket=self.s3_bucket.name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + tags="integ=true clarity=yes", + ) + + deploy_process = Popen(deploy_command_list_execute, stdout=PIPE) + deploy_process.wait() + self.assertEqual(deploy_process.returncode, 0) diff --git a/tests/integration/package/package_integ_base.py b/tests/integration/package/package_integ_base.py index 35d8ff47c0..b39115eb30 100644 --- a/tests/integration/package/package_integ_base.py +++ b/tests/integration/package/package_integ_base.py @@ -1,7 +1,6 @@ import os import uuid import json -import tempfile import time from pathlib import Path from unittest import TestCase @@ -26,6 +25,12 @@ def setUpClass(cls): # Given 3 seconds for all the bucket creation to complete time.sleep(3) + def setUp(self): + super(PackageIntegBase, self).setUp() + + def tearDown(self): + super(PackageIntegBase, self).tearDown() + @classmethod def tearDownClass(cls): cls.s3_bucket.objects.all().delete() diff --git a/tests/integration/testdata/package/aws-serverless-function.yaml b/tests/integration/testdata/package/aws-serverless-function.yaml index 1691cffe8e..ef8b30c245 100644 --- a/tests/integration/testdata/package/aws-serverless-function.yaml +++ b/tests/integration/testdata/package/aws-serverless-function.yaml @@ -2,6 +2,12 @@ AWSTemplateFormatVersion : '2010-09-09' Transform: AWS::Serverless-2016-10-31 Description: A hello world application. +Parameters: + Parameter: + Type: String + Default: Sample + Description: A custom parameter + Resources: HelloWorldFunction: Type: AWS::Serverless::Function diff --git a/tests/regression/deploy/__init__.py b/tests/regression/deploy/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/regression/deploy/regression_deploy_base.py b/tests/regression/deploy/regression_deploy_base.py new file mode 100644 index 0000000000..c564128243 --- /dev/null +++ b/tests/regression/deploy/regression_deploy_base.py @@ -0,0 +1,106 @@ +import os +import uuid +import json +import tempfile +import time +from pathlib import Path +from subprocess import Popen, PIPE +from unittest import TestCase + +import boto3 + + +class DeployRegressionBase(TestCase): + @classmethod + def setUpClass(cls): + pass + + @classmethod + def tearDownClass(cls): + pass + + def base_command(self, base): + command = [base] + if os.getenv("SAM_CLI_DEV") and base == "sam": + command = ["samdev"] + elif base == "aws": + command = [base, "cloudformation"] + + return command + + def get_deploy_command_list( + self, + base="sam", + s3_bucket=None, + stack_name=None, + template=None, + template_file=None, + s3_prefix=None, + capabilities=None, + force_upload=False, + notification_arns=None, + fail_on_empty_changeset=False, + no_execute_changeset=False, + parameter_overrides=None, + role_arn=None, + kms_key_id=None, + tags=None, + profile=None, + region=None, + ): + command_list = self.base_command(base=base) + + command_list = command_list + ["deploy"] + + if s3_bucket: + command_list = command_list + ["--s3-bucket", str(s3_bucket)] + if capabilities: + command_list = command_list + ["--capabilities", str(capabilities)] + if parameter_overrides: + command_list = command_list + ["--parameter-overrides", str(parameter_overrides)] + if role_arn: + command_list = command_list + ["--role-arn", str(role_arn)] + if notification_arns: + command_list = command_list + ["--notification-arns", str(notification_arns)] + if stack_name: + command_list = command_list + ["--stack-name", str(stack_name)] + if template: + command_list = command_list + ["--template", str(template)] + if template_file: + command_list = command_list + ["--template-file", str(template_file)] + if s3_prefix: + command_list = command_list + ["--s3-prefix", str(s3_prefix)] + if kms_key_id: + command_list = command_list + ["--kms-key-id", str(kms_key_id)] + if no_execute_changeset: + command_list = command_list + ["--no-execute-changeset"] + if force_upload: + command_list = command_list + ["--force-upload"] + if fail_on_empty_changeset: + command_list = command_list + ["--fail-on-empty-changeset"] + if tags: + command_list = command_list + ["--tags", str(tags)] + if region: + command_list = command_list + ["--region", str(region)] + if profile: + command_list = command_list + ["--profile", str(profile)] + + return command_list + + def deploy_regression_check(self, args, sam_return_code=0, aws_return_code=0, commands=[]): + sam_stack_name = args.get("sam_stack_name", None) + aws_stack_name = args.get("aws_stack_name", None) + if sam_stack_name: + del args["sam_stack_name"] + if aws_stack_name: + del args["aws_stack_name"] + + aws_command_list = self.get_deploy_command_list(base="aws", stack_name=aws_stack_name, **args) + process = Popen(aws_command_list, stdout=PIPE) + process.wait() + self.assertEqual(process.returncode, aws_return_code) + + sam_command_list = self.get_deploy_command_list(stack_name=sam_stack_name, **args) + process = Popen(sam_command_list, stdout=PIPE) + process.wait() + self.assertEqual(process.returncode, sam_return_code) diff --git a/tests/regression/deploy/test_deploy_regression.py b/tests/regression/deploy/test_deploy_regression.py new file mode 100644 index 0000000000..8280189b01 --- /dev/null +++ b/tests/regression/deploy/test_deploy_regression.py @@ -0,0 +1,154 @@ +import os +import tempfile +import uuid +from subprocess import Popen, PIPE +from unittest import skipIf + +import boto3 +from parameterized import parameterized + +from tests.regression.deploy.regression_deploy_base import DeployRegressionBase +from tests.regression.package.regression_package_base import PackageRegressionBase +from tests.testing_utils import RUNNING_ON_CI, RUNNING_TEST_FOR_MASTER_ON_CI + +# Package Regression tests require credentials and CI/CD will only add credentials to the env if the PR is from the same repo. +# This is to restrict package tests to run outside of CI/CD and when the branch is not master. +SKIP_DEPLOY_REGRESSION_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI + +# Only testing return codes to be equivalent + + +@skipIf(SKIP_DEPLOY_REGRESSION_TESTS, "Skip deploy regression tests in CI/CD only") +class TestDeployRegression(PackageRegressionBase, DeployRegressionBase): + def setUp(self): + self.sns_arn = os.environ.get("AWS_SNS") + self.kms_key = os.environ.get("AWS_KMS_KEY") + self.stack_names = [] + self.cf_client = boto3.client("cloudformation") + super(TestDeployRegression, self).setUp() + + def tearDown(self): + for stack_name in self.stack_names: + self.cf_client.delete_stack(StackName=stack_name) + super(TestDeployRegression, self).tearDown() + + def prepare_package(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + output_template_file = tempfile.NamedTemporaryFile(delete=False) + package_command_list = self.get_command_list( + s3_bucket=self.s3_bucket.name, template_file=template_path, output_template_file=output_template_file.name + ) + + package_process = Popen(package_command_list, stdout=PIPE) + package_process.wait() + self.assertEqual(package_process.returncode, 0) + return output_template_file.name + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_with_all_args(self, template_file): + + output_template_file = self.prepare_package(template_file=template_file) + + sam_stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(sam_stack_name) + + aws_stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(aws_stack_name) + + arguments = { + "template_file": output_template_file, + "aws_stack_name": aws_stack_name, + "sam_stack_name": sam_stack_name, + "capabilities": "CAPABILITY_IAM", + "s3_prefix": "regress_deploy", + "force_upload": True, + "notification_arns": self.sns_arn, + "parameter_overrides": "Parameter=Clarity", + "kms_key_id": self.kms_key, + "tags": "integ=true clarity=yes", + } + + self.deploy_regression_check(arguments) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_with_no_stack_name(self, template_file): + output_template_file = self.prepare_package(template_file=template_file) + + arguments = { + "template_file": output_template_file, + "capabilities": "CAPABILITY_IAM", + "s3_prefix": "regress_deploy", + "force_upload": True, + "notification_arns": self.sns_arn, + "parameter_overrides": "Parameter=Clarity", + "kms_key_id": self.kms_key, + "tags": "integ=true clarity=yes", + } + + self.deploy_regression_check(arguments, sam_return_code=2, aws_return_code=2) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_with_no_capabilities(self, template_file): + output_template_file = self.prepare_package(template_file=template_file) + + sam_stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(sam_stack_name) + + aws_stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(aws_stack_name) + + arguments = { + "template_file": output_template_file, + "aws_stack_name": aws_stack_name, + "sam_stack_name": sam_stack_name, + "s3_prefix": "regress_deploy", + "force_upload": True, + "notification_arns": self.sns_arn, + "parameter_overrides": "Parameter=Clarity", + "kms_key_id": self.kms_key, + "tags": "integ=true clarity=yes", + } + + self.deploy_regression_check(arguments, sam_return_code=2, aws_return_code=255) + + def test_deploy_with_no_template_file(self): + sam_stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(sam_stack_name) + + aws_stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(aws_stack_name) + + arguments = { + "aws_stack_name": aws_stack_name, + "sam_stack_name": sam_stack_name, + "s3_prefix": "regress_deploy", + "force_upload": True, + "notification_arns": self.sns_arn, + "parameter_overrides": "Parameter=Clarity", + "kms_key_id": self.kms_key, + "tags": "integ=true clarity=yes", + } + + self.deploy_regression_check(arguments, sam_return_code=2, aws_return_code=2) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_with_no_changes(self, template_file): + output_template_file = self.prepare_package(template_file=template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(stack_name) + + arguments = { + "template_file": output_template_file, + "capabilities": "CAPABILITY_IAM", + "sam_stack_name": stack_name, + "aws_stack_name": stack_name, + "s3_prefix": "regress_deploy", + "force_upload": True, + "notification_arns": self.sns_arn, + "parameter_overrides": "Parameter=Clarity", + "kms_key_id": self.kms_key, + "tags": "integ=true clarity=yes", + } + + self.deploy_regression_check(arguments, sam_return_code=0, aws_return_code=0) diff --git a/tests/unit/cli/test_types.py b/tests/unit/cli/test_types.py index b0cd383db9..985c3e66c1 100644 --- a/tests/unit/cli/test_types.py +++ b/tests/unit/cli/test_types.py @@ -2,7 +2,7 @@ from unittest.mock import Mock, ANY from nose_parameterized import parameterized -from samcli.cli.types import CfnParameterOverridesType +from samcli.cli.types import CfnParameterOverridesType, CfnTags from samcli.cli.types import CfnMetadataType @@ -12,19 +12,19 @@ def setUp(self): @parameterized.expand( [ - ("some string"), + (("some string"),), # Key must not contain spaces - ('ParameterKey="Ke y",ParameterValue=Value'), + (('ParameterKey="Ke y",ParameterValue=Value'),), # No value - ("ParameterKey=Key,ParameterValue="), + (("ParameterKey=Key,ParameterValue="),), # No key - ("ParameterKey=,ParameterValue=Value"), + (("ParameterKey=,ParameterValue=Value"),), # Case sensitive - ("parameterkey=Key,ParameterValue=Value"), + (("parameterkey=Key,ParameterValue=Value"),), # No space after comma - ("ParameterKey=Key, ParameterValue=Value"), + (("ParameterKey=Key, ParameterValue=Value"),), # Bad separator - ("ParameterKey:Key,ParameterValue:Value"), + (("ParameterKey:Key,ParameterValue:Value"),), ] ) def test_must_fail_on_invalid_format(self, input): @@ -36,44 +36,44 @@ def test_must_fail_on_invalid_format(self, input): @parameterized.expand( [ ( - "ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro", + ("ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro",), {"KeyPairName": "MyKey", "InstanceType": "t1.micro"}, ), - ('ParameterKey="Key",ParameterValue=Val\\ ue', {"Key": "Val ue"}), - ('ParameterKey="Key",ParameterValue="Val\\"ue"', {"Key": 'Val"ue'}), - ("ParameterKey=Key,ParameterValue=Value", {"Key": "Value"}), - ('ParameterKey=Key,ParameterValue=""', {"Key": ""}), + (('ParameterKey="Key",ParameterValue=Val\\ ue',), {"Key": "Val ue"}), + (('ParameterKey="Key",ParameterValue="Val\\"ue"',), {"Key": 'Val"ue'}), + (("ParameterKey=Key,ParameterValue=Value",), {"Key": "Value"}), + (('ParameterKey=Key,ParameterValue=""',), {"Key": ""}), ( # Trailing and leading whitespaces - " ParameterKey=Key,ParameterValue=Value ParameterKey=Key2,ParameterValue=Value2 ", + (" ParameterKey=Key,ParameterValue=Value ParameterKey=Key2,ParameterValue=Value2 ",), {"Key": "Value", "Key2": "Value2"}, ), ( # Quotes at the end - 'ParameterKey=Key,ParameterValue=Value\\"', + ('ParameterKey=Key,ParameterValue=Value\\"',), {"Key": 'Value"'}, ), ( # Quotes at the start - 'ParameterKey=Key,ParameterValue=\\"Value', + ('ParameterKey=Key,ParameterValue=\\"Value',), {"Key": '"Value'}, ), ( # Value is spacial characters - "ParameterKey=Key,ParameterValue==-_)(*&^%$#@!`~:;,. ParameterKey=Key2,ParameterValue=Value2", + ("ParameterKey=Key,ParameterValue==-_)(*&^%$#@!`~:;,. ParameterKey=Key2,ParameterValue=Value2",), {"Key": "=-_)(*&^%$#@!`~:;,.", "Key2": "Value2"}, ), - ('ParameterKey=Key1230,ParameterValue="{\\"a\\":\\"b\\"}"', {"Key1230": '{"a":"b"}'}), + (('ParameterKey=Key1230,ParameterValue="{\\"a\\":\\"b\\"}"',), {"Key1230": '{"a":"b"}'}), ( # Must ignore empty inputs - "", + ("",), {}, ), ] ) def test_successful_parsing(self, input, expected): result = self.param_type.convert(input, None, None) - self.assertEqual(result, expected, msg="Failed with Input = " + input) + self.assertEqual(result, expected, msg="Failed with Input = " + str(input)) class TestCfnMetadataType(TestCase): @@ -120,3 +120,64 @@ def test_must_fail_on_invalid_format(self, input): def test_successful_parsing(self, input, expected): result = self.param_type.convert(input, None, None) self.assertEqual(result, expected, msg="Failed with Input = " + input) + + +class TestCfnTags(TestCase): + def setUp(self): + self.param_type = CfnTags() + + @parameterized.expand( + [ + # Just a string + ("some string"), + # Wrong notation + ("a==b"), + # Wrong multi-key notation + ("a==b,c==d"), + ] + ) + def test_must_fail_on_invalid_format(self, input): + self.param_type.fail = Mock() + self.param_type.convert(input, "param", "ctx") + + self.param_type.fail.assert_called_with(ANY, "param", "ctx") + + @parameterized.expand([(("a=b",), {"a": "b"}), (("a=b", "c=d"), {"a": "b", "c": "d"}), (("",), {})]) + def test_successful_parsing(self, input, expected): + result = self.param_type.convert(input, None, None) + self.assertEqual(result, expected, msg="Failed with Input = " + str(input)) + + +# class TestCfnCapabilitiesType(TestCase): +# def setUp(self): +# self.param_type = CfnCapabilitiesType() +# +# @parameterized.expand( +# [ +# # Just a string +# ("some string"), +# # tuple of string +# ("some string",), +# # non-tuple valid string +# "CAPABILITY_NAMED_IAM", +# ] +# ) +# def test_must_fail_on_invalid_format(self, input): +# self.param_type.fail = Mock() +# self.param_type.convert(input, "param", "ctx") +# +# self.param_type.fail.assert_called_with(ANY, "param", "ctx") +# +# @parameterized.expand( +# [ +# (("CAPABILITY_AUTO_EXPAND",), ("CAPABILITY_AUTO_EXPAND",)), +# (("CAPABILITY_AUTO_EXPAND", "CAPABILITY_NAMED_IAM"), ("CAPABILITY_AUTO_EXPAND", "CAPABILITY_NAMED_IAM")), +# ( +# ("CAPABILITY_AUTO_EXPAND", "CAPABILITY_NAMED_IAM", "CAPABILITY_IAM"), +# ("CAPABILITY_AUTO_EXPAND", "CAPABILITY_NAMED_IAM", "CAPABILITY_IAM"), +# ), +# ] +# ) +# def test_successful_parsing(self, input, expected): +# result = self.param_type.convert(input, None, None) +# self.assertEqual(result, expected, msg="Failed with Input = " + str(input)) diff --git a/tests/unit/commands/_utils/custom_options/__init__.py b/tests/unit/commands/_utils/custom_options/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/commands/_utils/custom_options/test_option_nargs.py b/tests/unit/commands/_utils/custom_options/test_option_nargs.py new file mode 100644 index 0000000000..18472b3e97 --- /dev/null +++ b/tests/unit/commands/_utils/custom_options/test_option_nargs.py @@ -0,0 +1,40 @@ +from unittest import TestCase +from unittest.mock import MagicMock + +from samcli.commands._utils.custom_options.option_nargs import OptionNargs + + +class MockRArgs: + def __init__(self, rargs): + self.rargs = rargs + + +class TestOptionNargs(TestCase): + def setUp(self): + self.name = "test" + self.opt = "--use" + self.prefixes = ["--", "-"] + self.arg = "first" + self.rargs_list = ["second", "third", "--nextopt"] + self.expected_args = tuple([self.arg] + self.rargs_list[:-1]) + self.option_nargs = OptionNargs(param_decls=(self.name, self.opt)) + + def test_option(self): + parser = MagicMock() + ctx = MagicMock() + self.option_nargs.add_to_parser(parser=parser, ctx=ctx) + # Get option parser + + parser._long_opt.get.assert_called_with(self.opt) + self.assertEqual(self.option_nargs._nargs_parser, parser._long_opt.get()) + + # set prefixes + self.option_nargs._nargs_parser.prefixes = self.prefixes + + # create new state with remaining args + state = MockRArgs(self.rargs_list) + # call process with the monkey patched `parser_process` within `add_to_process` + parser._long_opt.get().process(self.arg, state) + + # finally call parser.process with ("first", "second", "third") + self.option_nargs._previous_parser_process.assert_called_with(self.expected_args, state) diff --git a/tests/unit/commands/_utils/test_table_print.py b/tests/unit/commands/_utils/test_table_print.py new file mode 100644 index 0000000000..518a30e43a --- /dev/null +++ b/tests/unit/commands/_utils/test_table_print.py @@ -0,0 +1,83 @@ +import io +from contextlib import redirect_stdout +from collections import OrderedDict +from unittest import TestCase + +from samcli.commands._utils.table_print import pprint_column_names, pprint_columns + +TABLE_FORMAT_STRING = "{Alpha:<{0}} {Beta:<{1}} {Gamma:<{2}}" +TABLE_FORMAT_ARGS = OrderedDict({"Alpha": "Alpha", "Beta": "Beta", "Gamma": "Gamma"}) + + +class TestTablePrint(TestCase): + def setUp(self): + self.redirect_out = io.StringIO() + + def test_pprint_column_names(self): + @pprint_column_names(TABLE_FORMAT_STRING, TABLE_FORMAT_ARGS) + def to_be_decorated(*args, **kwargs): + pass + + with redirect_stdout(self.redirect_out): + to_be_decorated() + output = ( + "------------------------------------------------------------------------------------------------\n" + "Alpha Beta Gamma \n" + "------------------------------------------------------------------------------------------------\n" + "------------------------------------------------------------------------------------------------\n" + ) + + self.assertEqual(output, self.redirect_out.getvalue()) + + def test_pprint_column_names_and_text(self): + @pprint_column_names(TABLE_FORMAT_STRING, TABLE_FORMAT_ARGS) + def to_be_decorated(*args, **kwargs): + pprint_columns( + columns=["A", "B", "C"], + width=kwargs["width"], + margin=kwargs["margin"], + format_args=kwargs["format_args"], + format_string=TABLE_FORMAT_STRING, + columns_dict=TABLE_FORMAT_ARGS.copy(), + ) + + with redirect_stdout(self.redirect_out): + to_be_decorated() + + output = ( + "------------------------------------------------------------------------------------------------\n" + "Alpha Beta Gamma \n" + "------------------------------------------------------------------------------------------------\n" + "A B C \n" + "------------------------------------------------------------------------------------------------\n" + ) + self.assertEqual(output, self.redirect_out.getvalue()) + + def test_pprint_exceptions_with_no_column_names(self): + with self.assertRaises(ValueError): + + @pprint_column_names(TABLE_FORMAT_STRING, {}) + def to_be_decorated(*args, **kwargs): + pprint_columns( + columns=["A", "B", "C"], + width=kwargs["width"], + margin=kwargs["margin"], + format_args=kwargs["format_args"], + format_string=TABLE_FORMAT_STRING, + columns_dict=TABLE_FORMAT_ARGS.copy(), + ) + + def test_pprint_exceptions_with_too_many_column_names(self): + massive_dictionary = {str(i): str(i) for i in range(100)} + with self.assertRaises(ValueError): + + @pprint_column_names(TABLE_FORMAT_STRING, massive_dictionary) + def to_be_decorated(*args, **kwargs): + pprint_columns( + columns=["A", "B", "C"], + width=kwargs["width"], + margin=kwargs["margin"], + format_args=kwargs["format_args"], + format_string=TABLE_FORMAT_STRING, + columns_dict=TABLE_FORMAT_ARGS.copy(), + ) diff --git a/tests/unit/commands/deploy/__init__.py b/tests/unit/commands/deploy/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/commands/deploy/test_command.py b/tests/unit/commands/deploy/test_command.py new file mode 100644 index 0000000000..4a39696eb2 --- /dev/null +++ b/tests/unit/commands/deploy/test_command.py @@ -0,0 +1,71 @@ +from unittest import TestCase +from unittest.mock import patch, Mock + +from samcli.commands.deploy.command import do_cli + + +class TestDeployliCommand(TestCase): + def setUp(self): + + self.template_file = "input-template-file" + self.stack_name = "stack-name" + self.s3_bucket = "s3-bucket" + self.s3_prefix = "s3-prefix" + self.kms_key_id = "kms-key-id" + self.no_execute_changeset = False + self.notification_arns = [] + self.parameter_overrides = {"a": "b"} + self.capabilities = "CAPABILITY_IAM" + self.tags = {"c": "d"} + self.fail_on_empty_changset = True + self.role_arn = "role_arn" + self.force_upload = False + self.metadata = {"abc": "def"} + self.region = None + self.profile = None + + @patch("samcli.commands.deploy.command.click") + @patch("samcli.commands.deploy.deploy_context.DeployContext") + def test_all_args(self, deploy_command_context, click_mock): + + context_mock = Mock() + deploy_command_context.return_value.__enter__.return_value = context_mock + + do_cli( + template_file=self.template_file, + stack_name=self.stack_name, + s3_bucket=self.s3_bucket, + force_upload=self.force_upload, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region=self.region, + profile=self.profile, + ) + + deploy_command_context.assert_called_with( + template_file=self.template_file, + stack_name=self.stack_name, + s3_bucket=self.s3_bucket, + force_upload=self.force_upload, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region=self.region, + profile=self.profile, + ) + + context_mock.run.assert_called_with() + self.assertEqual(context_mock.run.call_count, 1) diff --git a/tests/unit/commands/deploy/test_deploy_context.py b/tests/unit/commands/deploy/test_deploy_context.py new file mode 100644 index 0000000000..0813553904 --- /dev/null +++ b/tests/unit/commands/deploy/test_deploy_context.py @@ -0,0 +1,141 @@ +"""Test sam deploy command""" +from unittest import TestCase +from unittest.mock import patch, MagicMock +import tempfile + +from samcli.lib.deploy.deployer import Deployer +from samcli.commands.deploy.deploy_context import DeployContext +from samcli.commands.deploy.exceptions import DeployBucketRequiredError, DeployFailedError, ChangeEmptyError + + +class TestPackageCommand(TestCase): + def setUp(self): + self.deploy_command_context = DeployContext( + template_file="template-file", + stack_name="stack-name", + s3_bucket="s3-bucket", + force_upload=True, + s3_prefix="s3-prefix", + kms_key_id="kms-key-id", + parameter_overrides={"a": "b"}, + capabilities="CAPABILITY_IAM", + no_execute_changeset=False, + role_arn="role-arn", + notification_arns=[], + fail_on_empty_changeset=False, + tags={"a": "b"}, + region=None, + profile=None, + ) + + def test_template_improper(self): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + with self.assertRaises(DeployFailedError): + self.deploy_command_context.template_file = template_file.name + self.deploy_command_context.run() + + def test_template_size_large_no_s3_bucket(self): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b" " * 51200) + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.template_file = template_file.name + self.deploy_command_context.s3_bucket = None + with self.assertRaises(DeployBucketRequiredError): + self.deploy_command_context.run() + + @patch("boto3.Session") + @patch.object(Deployer, "create_and_wait_for_changeset", MagicMock(return_value=({"Id": "test"}, "CREATE"))) + @patch.object(Deployer, "execute_changeset", MagicMock()) + @patch.object(Deployer, "wait_for_execute", MagicMock()) + def test_template_size_large_and_s3_bucket(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b" " * 51200) + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.template_file = template_file.name + self.deploy_command_context.run() + + @patch("boto3.Session") + def test_template_valid(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.template_file = template_file.name + + self.deploy_command_context.deploy = MagicMock() + self.deploy_command_context.run() + + @patch("boto3.Session") + @patch.object( + Deployer, "create_and_wait_for_changeset", MagicMock(side_effect=ChangeEmptyError(stack_name="stack-name")) + ) + def test_template_valid_change_empty(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.fail_on_empty_changeset = True + self.deploy_command_context.template_file = template_file.name + + with self.assertRaises(ChangeEmptyError): + self.deploy_command_context.run() + + @patch("boto3.Session") + @patch.object( + Deployer, "create_and_wait_for_changeset", MagicMock(side_effect=ChangeEmptyError(stack_name="stack-name")) + ) + def test_template_valid_change_empty_no_fail_on_empty_changeset(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.template_file = template_file.name + + self.deploy_command_context.run() + + @patch("boto3.Session") + @patch.object(Deployer, "create_and_wait_for_changeset", MagicMock(return_value=({"Id": "test"}, "CREATE"))) + @patch.object(Deployer, "execute_changeset", MagicMock()) + @patch.object(Deployer, "wait_for_execute", MagicMock()) + def test_template_valid_execute_changeset(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.template_file = template_file.name + + self.deploy_command_context.run() + self.assertEqual(self.deploy_command_context.deployer.create_and_wait_for_changeset.call_count, 1) + self.assertEqual(self.deploy_command_context.deployer.execute_changeset.call_count, 1) + self.assertEqual(self.deploy_command_context.deployer.wait_for_execute.call_count, 1) + self.assertEqual(self.deploy_command_context.deployer.get_stack_outputs.call_count, 1) + + @patch("boto3.Session") + @patch.object(Deployer, "create_and_wait_for_changeset", MagicMock(return_value=({"Id": "test"}, "CREATE"))) + @patch.object(Deployer, "execute_changeset", MagicMock()) + @patch.object(Deployer, "wait_for_execute", MagicMock()) + def test_template_valid_no_execute_changeset(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.template_file = template_file.name + self.deploy_command_context.no_execute_changeset = True + + self.deploy_command_context.run() + self.assertEqual(self.deploy_command_context.deployer.create_and_wait_for_changeset.call_count, 1) + self.assertEqual(self.deploy_command_context.deployer.execute_changeset.call_count, 0) + self.assertEqual(self.deploy_command_context.deployer.wait_for_execute.call_count, 0) + + @patch("boto3.Session") + @patch.object(Deployer, "create_and_wait_for_changeset", MagicMock(return_value=({"Id": "test"}, "CREATE"))) + @patch.object(Deployer, "execute_changeset", MagicMock()) + @patch.object(Deployer, "wait_for_execute", MagicMock()) + def test_template_valid_execute_changeset(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b'{"Parameters": {"a":"b","c":"d"}}') + template_file.flush() + self.deploy_command_context.template_file = template_file.name + self.deploy_command_context.run() + self.assertEqual(self.deploy_command_context.deployer.create_and_wait_for_changeset.call_count, 1) + self.assertEqual( + self.deploy_command_context.deployer.create_and_wait_for_changeset.call_args[1]["parameter_values"], + [{"ParameterKey": "a", "ParameterValue": "b"}, {"ParameterKey": "c", "UsePreviousValue": True}], + ) diff --git a/tests/unit/commands/test_deploy.py b/tests/unit/commands/test_deploy.py deleted file mode 100644 index 90387d5095..0000000000 --- a/tests/unit/commands/test_deploy.py +++ /dev/null @@ -1,20 +0,0 @@ -""" -Tests Deploy CLI command -""" - -from unittest import TestCase -from unittest.mock import patch - -from samcli.commands.deploy import do_cli as deploy_cli - - -class TestCli(TestCase): - def setUp(self): - self.args = ("--force-upload",) - self.expected_args = self.args + ("--stack-name", "stackName") - - @patch("samcli.commands.deploy.execute_command") - def test_deploy_must_pass_args(self, execute_command_mock): - execute_command_mock.return_value = True - deploy_cli(self.args, "file.yaml", "stackName") - execute_command_mock.assert_called_with("deploy", self.expected_args, template_file="file.yaml") diff --git a/tests/unit/lib/deploy/__init__.py b/tests/unit/lib/deploy/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/lib/deploy/test_deployer.py b/tests/unit/lib/deploy/test_deployer.py new file mode 100644 index 0000000000..0d5b2d3f32 --- /dev/null +++ b/tests/unit/lib/deploy/test_deployer.py @@ -0,0 +1,535 @@ +import uuid +import time +from datetime import datetime, timedelta +from unittest import TestCase +from unittest.mock import patch, MagicMock, ANY + +from botocore.exceptions import ClientError, WaiterError + +from samcli.commands.deploy.exceptions import DeployFailedError, ChangeSetError, DeployStackOutPutFailedError +from samcli.lib.deploy.deployer import Deployer +from samcli.lib.package.s3_uploader import S3Uploader +from samcli.lib.utils.time import utc_to_timestamp, to_datetime + + +class MockPaginator: + def __init__(self, resp): + self.resp = resp + + def paginate(self, ChangeSetName=None, StackName=None): + return self.resp + + +class MockChangesetWaiter: + def __init__(self, ex=None): + self.ex = ex + + def wait(self, ChangeSetName, StackName, WaiterConfig): + if self.ex: + raise self.ex + return + + +class MockCreateUpdateWaiter: + def __init__(self, ex=None): + self.ex = ex + + def wait(self, StackName, WaiterConfig): + if self.ex: + raise self.ex + return + + +class TestDeployer(TestCase): + def setUp(self): + self.session = MagicMock() + self.cloudformation_client = self.session.client("cloudformation") + self.s3_client = self.session.client("s3") + self.deployer = Deployer(self.cloudformation_client) + + def test_deployer_init(self): + self.assertEqual(self.deployer._client, self.cloudformation_client) + self.assertEqual(self.deployer.changeset_prefix, "samcli-deploy") + + def test_deployer_has_no_stack(self): + self.deployer._client.describe_stacks = MagicMock(return_value={"Stacks": []}) + self.assertEqual(self.deployer.has_stack("test"), False) + + def test_deployer_has_stack_in_review(self): + self.deployer._client.describe_stacks = MagicMock( + return_value={"Stacks": [{"StackStatus": "REVIEW_IN_PROGRESS"}]} + ) + self.assertEqual(self.deployer.has_stack("test"), False) + + def test_deployer_has_stack_exception_non_exsistent(self): + self.deployer._client.describe_stacks = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": "Stack with id test does not exist"}}, + operation_name="stack_status", + ) + ) + self.assertEqual(self.deployer.has_stack("test"), False) + + def test_deployer_has_stack_exception(self): + self.deployer._client.describe_stacks = MagicMock( + side_effect=ClientError(error_response={"Error": {"Message": "Error"}}, operation_name="stack_status") + ) + with self.assertRaises(ClientError): + self.deployer.has_stack("test") + + def test_create_changeset(self): + self.deployer.has_stack = MagicMock(return_value=False) + self.deployer.create_changeset( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + {"ParameterKey": "c", "UsePreviousValue": True}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + ) + + self.assertEqual(self.deployer._client.create_change_set.call_count, 1) + self.deployer._client.create_change_set.assert_called_with( + Capabilities=["CAPABILITY_IAM"], + ChangeSetName=ANY, + ChangeSetType="CREATE", + Description=ANY, + NotificationARNs=[], + Parameters=[{"ParameterKey": "a", "ParameterValue": "b"}], + RoleARN="role-arn", + StackName="test", + Tags={"unit": "true"}, + TemplateURL=ANY, + ) + + def test_update_changeset(self): + self.deployer.has_stack = MagicMock(return_value=True) + self.deployer.create_changeset( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + {"ParameterKey": "c", "UsePreviousValue": True}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + ) + + self.assertEqual(self.deployer._client.create_change_set.call_count, 1) + self.deployer._client.create_change_set.assert_called_with( + Capabilities=["CAPABILITY_IAM"], + ChangeSetName=ANY, + ChangeSetType="UPDATE", + Description=ANY, + NotificationARNs=[], + Parameters=[{"ParameterKey": "a", "ParameterValue": "b"}], + RoleARN="role-arn", + StackName="test", + Tags={"unit": "true"}, + TemplateURL=ANY, + ) + + def test_create_changeset_exception(self): + self.deployer.has_stack = MagicMock(return_value=False) + self.deployer._client.create_change_set = MagicMock(side_effect=Exception) + with self.assertRaises(ChangeSetError): + self.deployer.create_changeset( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + {"ParameterKey": "c", "UsePreviousValue": True}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + ) + + def test_describe_changeset_with_changes(self): + response = [ + { + "Changes": [ + {"ResourceChange": {"LogicalResourceId": "resource_id1", "ResourceType": "s3", "Action": "Add"}} + ] + }, + { + "Changes": [ + {"ResourceChange": {"LogicalResourceId": "resource_id2", "ResourceType": "kms", "Action": "Add"}} + ] + }, + { + "Changes": [ + {"ResourceChange": {"LogicalResourceId": "resource_id3", "ResourceType": "lambda", "Action": "Add"}} + ] + }, + ] + self.deployer._client.get_paginator = MagicMock(return_value=MockPaginator(resp=response)) + changes = self.deployer.describe_changeset("change_id", "test") + self.assertEqual( + changes, + { + "Add": [ + {"LogicalResourceId": "resource_id1", "ResourceType": "s3"}, + {"LogicalResourceId": "resource_id2", "ResourceType": "kms"}, + {"LogicalResourceId": "resource_id3", "ResourceType": "lambda"}, + ], + "Modify": [], + "Remove": [], + }, + ) + + def test_describe_changeset_with_no_changes(self): + response = [{"Changes": []}] + self.deployer._client.get_paginator = MagicMock(return_value=MockPaginator(resp=response)) + changes = self.deployer.describe_changeset("change_id", "test") + self.assertEqual(changes, {"Add": [], "Modify": [], "Remove": []}) + + def test_wait_for_changeset(self): + self.deployer._client.get_waiter = MagicMock(return_value=MockChangesetWaiter()) + self.deployer.wait_for_changeset("test-id", "test-stack") + + def test_wait_for_changeset_exception_ChangeEmpty(self): + self.deployer._client.get_waiter = MagicMock( + return_value=MockChangesetWaiter( + ex=WaiterError( + name="wait_for_changeset", + reason="unit-test", + last_response={"Status": "Failed", "StatusReason": "It's a unit test"}, + ) + ) + ) + with self.assertRaises(ChangeSetError): + self.deployer.wait_for_changeset("test-id", "test-stack") + + def test_execute_changeset(self): + self.deployer.execute_changeset("id", "test") + self.deployer._client.execute_change_set.assert_called_with(ChangeSetName="id", StackName="test") + + def test_execute_changeset_exception(self): + self.deployer._client.execute_change_set = MagicMock( + side_effect=ClientError(error_response={"Error": {"Message": "Error"}}, operation_name="execute_changeset") + ) + with self.assertRaises(DeployFailedError): + self.deployer.execute_changeset("id", "test") + + def test_get_last_event_time(self): + timestamp = datetime.utcnow() + self.deployer._client.describe_stack_events = MagicMock( + return_value={"StackEvents": [{"Timestamp": timestamp}]} + ) + self.assertEqual(self.deployer.get_last_event_time("test"), utc_to_timestamp(timestamp)) + + def test_get_last_event_time_unknown_last_time(self): + current_timestamp = datetime.utcnow() + self.deployer._client.describe_stack_events = MagicMock(side_effect=KeyError) + # Convert to milliseconds from seconds + last_stack_event_timestamp = to_datetime(self.deployer.get_last_event_time("test") * 1000) + self.assertEqual(last_stack_event_timestamp.year, current_timestamp.year) + self.assertEqual(last_stack_event_timestamp.month, current_timestamp.month) + self.assertEqual(last_stack_event_timestamp.day, current_timestamp.day) + self.assertEqual(last_stack_event_timestamp.hour, current_timestamp.hour) + self.assertEqual(last_stack_event_timestamp.minute, current_timestamp.minute) + self.assertEqual(last_stack_event_timestamp.second, current_timestamp.second) + + @patch("time.sleep") + def test_describe_stack_events(self, patched_time): + current_timestamp = datetime.utcnow() + + self.deployer._client.describe_stacks = MagicMock( + side_effect=[ + {"Stacks": [{"StackStatus": "CREATE_IN_PROGRESS"}]}, + {"Stacks": [{"StackStatus": "CREATE_IN_PROGRESS"}]}, + {"Stacks": [{"StackStatus": "CREATE_COMPLETE_CLEANUP_IN_PROGRESS"}]}, + {"Stacks": [{"StackStatus": "CREATE_COMPLETE"}]}, + ] + ) + self.deployer._client.get_paginator = MagicMock( + return_value=MockPaginator( + [ + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_IN_PROGRESS", + "ResourceType": "s3", + "LogicalResourceId": "mybucket", + } + ] + }, + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_IN_PROGRESS", + "ResourceType": "kms", + "LogicalResourceId": "mykms", + } + ] + }, + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_COMPLETE", + "ResourceType": "s3", + "LogicalResourceId": "mybucket", + } + ] + }, + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_COMPLETE", + "ResourceType": "kms", + "LogicalResourceId": "mykms", + } + ] + }, + ] + ) + ) + + self.deployer.describe_stack_events("test", time.time() - 1) + + @patch("time.sleep") + def test_describe_stack_events_exceptions(self, patched_time): + + self.deployer._client.describe_stacks = MagicMock( + side_effect=[ + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + ] + ) + with self.assertRaises(ClientError): + self.deployer.describe_stack_events("test", time.time()) + + @patch("time.sleep") + def test_describe_stack_events_resume_after_exceptions(self, patched_time): + current_timestamp = datetime.utcnow() + + self.deployer._client.describe_stacks = MagicMock( + side_effect=[ + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + {"Stacks": [{"StackStatus": "CREATE_IN_PROGRESS"}]}, + {"Stacks": [{"StackStatus": "CREATE_IN_PROGRESS"}]}, + {"Stacks": [{"StackStatus": "CREATE_COMPLETE_CLEANUP_IN_PROGRESS"}]}, + {"Stacks": [{"StackStatus": "CREATE_COMPLETE"}]}, + ] + ) + + self.deployer._client.get_paginator = MagicMock( + return_value=MockPaginator( + [ + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_IN_PROGRESS", + "ResourceType": "s3", + "LogicalResourceId": "mybucket", + } + ] + }, + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_IN_PROGRESS", + "ResourceType": "kms", + "LogicalResourceId": "mykms", + } + ] + }, + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_COMPLETE", + "ResourceType": "s3", + "LogicalResourceId": "mybucket", + } + ] + }, + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_COMPLETE", + "ResourceType": "kms", + "LogicalResourceId": "mykms", + } + ] + }, + ] + ) + ) + + self.deployer.describe_stack_events("test", time.time()) + + def test_check_stack_status(self): + self.assertEqual(self.deployer._check_stack_complete("CREATE_COMPLETE"), True) + self.assertEqual(self.deployer._check_stack_complete("CREATE_FAILED"), False) + self.assertEqual(self.deployer._check_stack_complete("CREATE_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("DELETE_COMPLETE"), True) + self.assertEqual(self.deployer._check_stack_complete("DELETE_FAILED"), False) + self.assertEqual(self.deployer._check_stack_complete("DELETE_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("REVIEW_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("ROLLBACK_COMPLETE"), True) + self.assertEqual(self.deployer._check_stack_complete("ROLLBACK_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("UPDATE_COMPLETE"), True) + self.assertEqual(self.deployer._check_stack_complete("UPDATE_COMPLETE_CLEANUP_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("UPDATE_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("UPDATE_ROLLBACK_FAILED"), False) + self.assertEqual(self.deployer._check_stack_complete("UPDATE_ROLLBACK_IN_PROGRESS"), False) + + @patch("time.sleep") + def test_wait_for_execute(self, patched_time): + self.deployer.describe_stack_events = MagicMock() + self.deployer._client.get_waiter = MagicMock(return_value=MockCreateUpdateWaiter()) + self.deployer.wait_for_execute("test", "CREATE") + self.deployer.wait_for_execute("test", "UPDATE") + with self.assertRaises(RuntimeError): + self.deployer.wait_for_execute("test", "DESTRUCT") + + self.deployer._client.get_waiter = MagicMock( + return_value=MockCreateUpdateWaiter( + ex=WaiterError( + name="create_changeset", + reason="unit-test", + last_response={"Status": "Failed", "StatusReason": "It's a unit test"}, + ) + ) + ) + with self.assertRaises(DeployFailedError): + self.deployer.wait_for_execute("test", "CREATE") + + def test_create_and_wait_for_changeset(self): + self.deployer.create_changeset = MagicMock(return_value=({"Id": "test"}, "create")) + self.deployer.wait_for_changeset = MagicMock() + self.deployer.describe_changeset = MagicMock() + + result = self.deployer.create_and_wait_for_changeset( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + {"ParameterKey": "c", "UsePreviousValue": True}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + ) + + self.assertEqual(result, ({"Id": "test"}, "create")) + + def test_create_and_wait_for_changeset_exception(self): + self.deployer.create_changeset = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": "Something Wrong"}}, operation_name="create_changeset" + ) + ) + with self.assertRaises(DeployFailedError): + self.deployer.create_and_wait_for_changeset( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + {"ParameterKey": "c", "UsePreviousValue": True}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + ) + + def test_get_stack_outputs(self): + outputs = { + "Stacks": [ + { + "Outputs": [ + {"OutputKey": "Key1", "OutputValue": "Value1", "Description": "output for s3"}, + {"OutputKey": "Key2", "OutputValue": "Value2", "Description": "output for kms"}, + ] + } + ] + } + self.deployer._client.describe_stacks = MagicMock(return_value=outputs) + + self.assertEqual(outputs["Stacks"][0]["Outputs"], self.deployer.get_stack_outputs(stack_name="test")) + self.deployer._client.describe_stacks.assert_called_with(StackName="test") + + @patch("samcli.lib.deploy.deployer.pprint_columns") + def test_get_stack_outputs_no_echo(self, mock_pprint_columns): + outputs = { + "Stacks": [ + { + "Outputs": [ + {"OutputKey": "Key1", "OutputValue": "Value1", "Description": "output for s3"}, + {"OutputKey": "Key2", "OutputValue": "Value2", "Description": "output for kms"}, + ] + } + ] + } + self.deployer._client.describe_stacks = MagicMock(return_value=outputs) + + self.assertEqual( + outputs["Stacks"][0]["Outputs"], self.deployer.get_stack_outputs(stack_name="test", echo=False) + ) + self.deployer._client.describe_stacks.assert_called_with(StackName="test") + self.assertEqual(mock_pprint_columns.call_count, 0) + + def test_get_stack_outputs_no_outputs_no_exception(self): + outputs = {"Stacks": [{"SomeOtherKey": "Value"}]} + self.deployer._client.describe_stacks = MagicMock(return_value=outputs) + + self.assertEqual(None, self.deployer.get_stack_outputs(stack_name="test")) + self.deployer._client.describe_stacks.assert_called_with(StackName="test") + + def test_get_stack_outputs_exception(self): + self.deployer._client.describe_stacks = MagicMock( + side_effect=ClientError(error_response={"Error": {"Message": "Error"}}, operation_name="describe_stacks") + ) + + with self.assertRaises(DeployStackOutPutFailedError): + self.deployer.get_stack_outputs(stack_name="test") diff --git a/tests/unit/lib/samlib/test_cloudformation_command.py b/tests/unit/lib/samlib/test_cloudformation_command.py deleted file mode 100644 index e846570c96..0000000000 --- a/tests/unit/lib/samlib/test_cloudformation_command.py +++ /dev/null @@ -1,166 +0,0 @@ -""" -Tests Deploy CLI -""" - -import os -from subprocess import CalledProcessError, PIPE - -from unittest import TestCase -from unittest.mock import patch, call, ANY - -from samcli.lib.samlib.cloudformation_command import execute_command, find_executable - - -class TestExecuteCommand(TestCase): - def setUp(self): - self.args = ("--arg1", "value1", "different args", "more") - - @patch("subprocess.check_call") - @patch("samcli.lib.samlib.cloudformation_command.find_executable") - def test_must_add_template_file(self, find_executable_mock, check_call_mock): - find_executable_mock.return_value = "mycmd" - check_call_mock.return_value = True - execute_command("command", self.args, "/path/to/template") - - check_call_mock.assert_called_with( - ["mycmd", "cloudformation", "command"] - + ["--arg1", "value1", "different args", "more", "--template-file", "/path/to/template"], - env=ANY, - ) - - @patch("subprocess.check_call") - @patch("samcli.lib.samlib.cloudformation_command.find_executable") - @patch("samcli.lib.samlib.cloudformation_command.GlobalConfig") - def test_must_add_sam_cli_info_to_execution_env_var_if_telemetry_is_on( - self, global_config_mock, find_executable_mock, check_call_mock - ): - installation_id = "testtest" - global_config_mock.return_value.installation_id = installation_id - global_config_mock.return_value.telemetry_enabled = True - - expected_env = os.environ.copy() - expected_env["AWS_EXECUTION_ENV"] = "SAM-" + installation_id - - find_executable_mock.return_value = "mycmd" - check_call_mock.return_value = True - execute_command("command", self.args, "/path/to/template") - - check_call_mock.assert_called() - kwargs = check_call_mock.call_args[1] - self.assertIn("env", kwargs) - self.assertEqual(kwargs["env"], expected_env) - - @patch("subprocess.check_call") - @patch("samcli.lib.samlib.cloudformation_command.find_executable") - @patch("samcli.lib.samlib.cloudformation_command.GlobalConfig") - def test_must_not_set_exec_env(self, global_config_mock, find_executable_mock, check_call_mock): - global_config_mock.return_value.telemetry_enabled = False - - # Expected to pass just a copy of the environment variables without modification - expected_env = os.environ.copy() - - find_executable_mock.return_value = "mycmd" - check_call_mock.return_value = True - execute_command("command", self.args, "/path/to/template") - - check_call_mock.assert_called() - kwargs = check_call_mock.call_args[1] - self.assertIn("env", kwargs) - self.assertEqual(kwargs["env"], expected_env) - - @patch("sys.exit") - @patch("subprocess.check_call") - @patch("samcli.lib.samlib.cloudformation_command.find_executable") - def test_command_must_exit_with_status_code(self, find_executable_mock, check_call_mock, exit_mock): - find_executable_mock.return_value = "mycmd" - check_call_mock.side_effect = CalledProcessError(2, "Error") - exit_mock.return_value = True - execute_command("command", self.args, None) - exit_mock.assert_called_with(2) - - -class TestFindExecutable(TestCase): - @patch("subprocess.Popen") - @patch("platform.system") - def test_must_use_raw_name(self, platform_system_mock, popen_mock): - platform_system_mock.return_value = "Linux" - execname = "foo" - - find_executable(execname) - - self.assertEqual(popen_mock.mock_calls, [call([execname], stdout=PIPE, stderr=PIPE)]) - - @patch("subprocess.Popen") - @patch("platform.system") - def test_must_use_name_with_cmd_extension_on_windows(self, platform_system_mock, popen_mock): - platform_system_mock.return_value = "windows" - execname = "foo" - expected = "foo.cmd" - - result = find_executable(execname) - self.assertEqual(result, expected) - - self.assertEqual(popen_mock.mock_calls, [call(["foo.cmd"], stdout=PIPE, stderr=PIPE)]) - - @patch("subprocess.Popen") - @patch("platform.system") - def test_must_use_name_with_exe_extension_on_windows(self, platform_system_mock, popen_mock): - platform_system_mock.return_value = "windows" - execname = "foo" - expected = "foo.exe" - - popen_mock.side_effect = [OSError, "success"] # fail on .cmd extension - - result = find_executable(execname) - self.assertEqual(result, expected) - - self.assertEqual( - popen_mock.mock_calls, - [call(["foo.cmd"], stdout=PIPE, stderr=PIPE), call(["foo.exe"], stdout=PIPE, stderr=PIPE)], - ) - - @patch("subprocess.Popen") - @patch("platform.system") - def test_must_use_name_with_no_extension_on_windows(self, platform_system_mock, popen_mock): - platform_system_mock.return_value = "windows" - execname = "foo" - expected = "foo" - - popen_mock.side_effect = [OSError, OSError, "success"] # fail on .cmd and .exe extension - - result = find_executable(execname) - self.assertEqual(result, expected) - - self.assertEqual( - popen_mock.mock_calls, - [ - call(["foo.cmd"], stdout=PIPE, stderr=PIPE), - call(["foo.exe"], stdout=PIPE, stderr=PIPE), - call(["foo"], stdout=PIPE, stderr=PIPE), - ], - ) - - @patch("subprocess.Popen") - @patch("platform.system") - def test_must_raise_error_if_executable_not_found(self, platform_system_mock, popen_mock): - platform_system_mock.return_value = "windows" - execname = "foo" - - popen_mock.side_effect = [OSError, OSError, OSError, "success"] # fail on all executable names - - with self.assertRaises(OSError) as ctx: - find_executable(execname) - - expected = "Cannot find AWS CLI installation, was looking at executables with names: {}".format( - ["foo.cmd", "foo.exe", "foo"] - ) - self.assertEqual(expected, str(ctx.exception)) - - self.assertEqual( - popen_mock.mock_calls, - [ - call(["foo.cmd"], stdout=PIPE, stderr=PIPE), - call(["foo.exe"], stdout=PIPE, stderr=PIPE), - call(["foo"], stdout=PIPE, stderr=PIPE), - ], - ) diff --git a/tests/unit/lib/utils/test_time.py b/tests/unit/lib/utils/test_time.py index 14b879b0e7..df63d2cc81 100644 --- a/tests/unit/lib/utils/test_time.py +++ b/tests/unit/lib/utils/test_time.py @@ -1,8 +1,10 @@ +import time import datetime from unittest import TestCase -from samcli.lib.utils.time import to_timestamp, timestamp_to_iso, parse_date, to_utc +from samcli.lib.utils.time import to_timestamp, timestamp_to_iso, parse_date, to_utc, utc_to_timestamp +from dateutil.tz import tzutc class TestTimestampToIso(TestCase): @@ -26,6 +28,12 @@ def test_must_convert_to_timestamp(self): self.assertEqual(expected, to_timestamp(date)) + def test_convert_utc_to_timestamp(self): + timestamp = time.time() + utc = datetime.datetime.utcfromtimestamp(timestamp) + # compare in milliseconds + self.assertEqual(int(timestamp * 1000), utc_to_timestamp(utc)) + class TestToUtc(TestCase): def test_with_timezone(self): From 250941cf6dff729b95d730aec948b47470c156f0 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Wed, 13 Nov 2019 11:07:25 -0800 Subject: [PATCH 02/45] design: samconfig (#1503) * design: sam-app-config - an app level configuration that provides parameter pass throughs for sam cli commands * design: move config file location to project root * design: add open questions * fix: add `identifiers` to use configuration file. * fix: address comments - todo add more scope * fix: address future and scope * phases: implementation phases * fix: add docs section for samconfig --- designs/sam-config.md | 393 ++++++++++++++++++++++++++++++++++++++++ docs/sam-config-docs.md | 97 ++++++++++ 2 files changed, 490 insertions(+) create mode 100644 designs/sam-config.md create mode 100644 docs/sam-config-docs.md diff --git a/designs/sam-config.md b/designs/sam-config.md new file mode 100644 index 0000000000..39b2bdb91a --- /dev/null +++ b/designs/sam-config.md @@ -0,0 +1,393 @@ +SAM Config +==================================== + + +What is the problem? +-------------------- + +Today users of SAM CLI need to invoke the CLI directly with all parameters supplied to its commands. + +for e.g: `sam build --use-container --debug` + +But often, during the lifecycle of building and deploying a serverless application. the same commands get run repeatedly to build, package and deploy, before solidifying into the final application. + +These CLI commands are often long and have many changing parts. + +Have a look at the following series of workflows + + +* `sam build --use-container --template ... --parameter-overrides=... --skip-pull-image --manifest ...` + +* `sam package --s3-bucket ... --template-file ... --output-template-file ... --s3-prefix ... --kms-key-id ...` + +* `sam deploy --template-file ... --stack-name ... --capabilities ... --tags ... --parameter-overrides ... --kms-key-id ...` + +If this could be condensed into a series of workflows that look like + +* `sam build` +* `sam package` +* `sam deploy` + +That would be a huge user experience win. + +Tenets +------------------------------- + +* Resolution of command line parameters should always favor explicit versus implicit. A native command line parameter specified directly on the command line should override a parameter specified in the configuration file. + +What will be changed? +--------------------- + +The suite of commands supported by SAM CLI would be aided by looking for a configuration file thats locally located under the `.aws-sam/` at the project root where template.yaml is located by default. + +`.aws-sam/samconfig.toml` + + +This configuration would be used for specifiying the parameters that each of SAM CLI commands use and would be in TOML format. + +Running a SAM CLI command now automatically looks for `.aws-sam/samconfig.toml` file and if its finds it goes ahead with parameter passthroughs to the CLI. + +``` +sam build +Default Config file location: .aws-sam/samconfig.toml +.. +.. +.. +``` + +Why samconfig under `.aws-sam` +--------------------------------- + +The `.aws-sam` directory within the project directory is created with normal 755 permissions as default without any special permisions. `sam build` only creates a build directory within `.aws-sam` as `.aws-sam/build`. This directory is erased and re-built on every build. but top level directory is left unaffected. + +The `.gitignore` specified in the init apps also only have `.aws-sam/build` ignored and not anything else. + + +Config file versioning +----------------------- + +The configuration file: `samconfig.toml` will come with a top level version key that specifies the version of the configuration file. This version can then be used to determine if a given configuration file works with a version of SAM CLI. + +It also paves the forward when major changes need to be made to the configuration file and add a version bump to the config file version + +``` +version = 0.1 +``` + + +Overrides +---------- + +The default location of a .aws-sam/samconfig can be replaced by overriding an environment variable called `SAM_CLI_CONFIG` + +` +export SAM_CLI_CONFIG=~/Users/username/mysamconfig.toml +` + +Users can pass an environment `--env` for the section that will be scanned within the configuration file to pass parameters through. + +By default the `default` section of the configuration is chosen. + +``` +version = 0.1 + +[default] + +[default.build] +[default.build.parameters] +profile="srirammv" +debug=true +skip_pull_image=true +use_container=true + +[default.package] +[default.package.parameters] +profile="srirammv" +region="us-east-1" +s3_bucket="sam-bucket" +output_template_file="packaged.yaml" + +[default.deploy] +[default.deploy.parameters] +stack_name="using_config_file" +capabilities="CAPABILITY_IAM" +region="us-east-1" +profile="srirammv" + +``` + +If a custom environment is specified, the environment is looked up in `samconfig.toml` file instead. + +`sam build --env dev` + +Sample configuration file + +``` +version = 0.1 + +[default.build.paramaters] +profile="srirammv" +debug=true +skip_pull_image=true +use_container=true + +[default.package.parameters] +profile="srirammv" +region="us-east-1" +s3_bucket="sam-bucket" +output_template_file="packaged.yaml" + +[default.deploy.parameters] +stack_name="using_config_file" +capabilities="CAPABILITY_IAM" +region="us-east-1" +profile="srirammv" + + +[dev.build.paramaters] +profile="srirammv" +debug=true +skip_pull_image=true +use_container=true + +[dev.package.parameters] +profile="srirammv" +region="us-east-1" +s3_bucket="sam-bucket" +output_template_file="packaged.yaml" + +[dev.deploy.parameters] +stack_name="using_config_file" +capabilities="CAPABILITY_IAM" +region="us-east-1" +profile="srirammv" +``` + + +The configuration file can then be potentially intialized + +* all sam init projects could come with a sample samconfig file + +Showcase configuration values +----------------------------- + +On running SAM CLI commands with `--debug`, SAM CLI can output the values read from the configuration file. This way the user is always informed of the total set of parameters are being used by SAM CLI, when the customers need to debug what parameters are actually being passed to the `sam` commands. + + +Config file in Git Repos +------------------------ + +`samconfig.toml` file can be checked into a git repo, so that its ready to use on cloning the repo. if the configuration file does not present all the necesssary parameters, the command fails just as if one had specified the same arguments on the command line directly. + +Optionally, if multiple configuration files are checked in. One can change the `SAM_CLI_CONFIG` environment variable to point a different configuration file. + +`--env` can also be passed in to deal with custom environments defined in the configuration file. + +Error Messages +--------------- + +When a custom env is passed in, and such an environment is not found. The error message can highlight all the environments that were found in the given configuration file. + +` +sam build --env devo +Error: Environment 'devo' was not found in .aws-sam/samconfig.toml , Possible environments are : ['dev', 'prod'] +` + +Future +---------- + +In the future, based on the file names of the configuration files, the environment could also be inferred. + +``` +.aws-sam/samconfig-dev.toml +.aws-sam/samconfig-beta.toml +.aws-sam/samconfig-prod.toml +``` + +`--env` dev will refer to `.aws-sam/samconfig-dev.toml` and so on. + +If multiple default file locations are added in the look up order for `samconfig.toml`, this means that multiple config files can be merged together. + +For example, if the hierachy of lookup for configuration files are: $SAM_CLI_CONFIG -> `.aws-sam/samconfig.toml` -> `~/.aws-sam/samconfig.toml` + +The resulting configuration would be a merge of all the sections that are relevant for the command that was run. + +This way, configuration that might be global can be placed in `~/.aws-sam/samconfig.toml`. + +``` +version = 0.1 +[default.build.parameters] +use_container = True +skip_pull_image = True +``` + +Project specific configuration placed in `~/.aws-sam/samconfig.toml` + +``` +version = 0.1 +[default.build.parameters] +parameter_overrides="ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro" +``` + +Eventual merged configuration read during `sam build` in-memory. + +``` +version = 0.1 +[default.build.parameters] +use_container = True +skip_pull_image = True +parameter_overrides="ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro" +``` + +Open Questions +------------------------------- + +* Potentially every sam command could have functionality to have a series of command line parameters exported into a configuraion file. + + +Out-of-Scope +------------ + +* Not focusing on a global configuration. SAM CLI already has a notion of a global config at `~/.aws-sam/metadata.json` + +User Experience Walkthrough +--------------------------- + +Once a configuration file is appropriately populated, day to day workflows per application developed with SAM CLI become much simpler. + +* `sam build` -> `sam package` -> `sam deploy` +* `sam build` -> `sam local invoke` +* `sam build` -> `sam package` -> `sam publish` + +Implementation +============== + +CLI Changes +----------- + +New command line argument is added per command called `--env` to be able to specify non default environment section within a config file. + + +### Breaking Change + +* No breaking changes to the CLI, in absence of the configuration file, CLI continues to work as normal. + +Design +------ + +*Explain how this feature will be implemented. Highlight the components +of your implementation, relationships* *between components, constraints, +etc.* + +A custom decorator to `click.option` is developed which reads from a configuration file the sections that are pertinent to that particular command and populates the click's `map` context. + +The configuration file parser is a custom provider that can be made to understand any configuration file format in a pluggable manner. + +This decorator benefits from the same type checking that some SAM CLI parameters already use. + +A custom callback function (`configuration_callback`) (for the click option) that takes in a custom configuration parser (`provider`) will have rules in place, on how the corresponding configuration can be retrieved and what are the parts that the configuration parser has access to read from. + +``` +provider = attrs.pop("provider", TomlProvider(rules=DefaultRules(), command="build", section="parameters")) +attrs["type"] = click.STRING +saved_callback = attrs.pop("callback", None) +partial_callback = functools.partial(onfiguration_callback, cmd_name, option_name, env_name, saved_callback, provider) +attrs["callback"] = partial_callback +click.option(*param_decls, **attrs)(f) + +``` + +Phases +------ + +The design can be built in phases. + +* No option to specify configuration file or env name +* Specify configuration file with an environment variable +* Read `--env` to make sure we can select an appropriate portion in configuration file. + + +`.samrc` Changes +---------------- + +This design emphasizes parameter pass throughs with a configuration file and does not change the core working of the SAM CLI itself. The SAM CLI continues to be working just as it was with efficiency gains in usability. + +Security +-------- + +*Tip: How does this change impact security? Answer the following +questions to help answer this question better:* + +**What new dependencies (libraries/cli) does this change require?** + +toml + +**What other Docker container images are you using?** + +N/A + +**Are you creating a new HTTP endpoint? If so explain how it will be +created & used** + +N/A + +**Are you connecting to a remote API? If so explain how is this +connection secured** + +N/A + +**Are you reading/writing to a temporary folder? If so, what is this +used for and when do you clean up?** + +N/A. But we do read from a confiuration file thats either at a default location or specified by the user via an environment variable. + +**How do you validate new .samrc configuration?** + + + +What is your Testing Plan (QA)? +=============================== + +Goal +---- + +Configuration files are tested alongside SAM CLI and are expected to work seamlessly with meaningful error messages to steer users towards using configuration file to manage their app workflows. + +Pre-requesites +-------------- + +N/A + +Test Scenarios/Cases +-------------------- + +* Integration tests for every command with `env` based overrides, and command line overrides on existing sam configuration file and custom configuration file through environment variables. +* Tested to work on all platforms + +Expected Results +---------------- +* Works on all platforms +* Resolution of parameters follows. + * CLI parameters -> Config file parameters + +Documentation Changes +===================== + +* Addition of a new `--env` parameter per command + +Related Open Issues +============ +* https://github.com/awslabs/aws-sam-cli/issues/975 +* https://github.com/awslabs/aws-sam-cli/issues/748 + +Task Breakdown +============== + +- \[x\] Send a Pull Request with this design document +- \[ \] Build the command line interface +- \[ \] Build the underlying library +- \[ \] Unit tests +- \[ \] Functional Tests +- \[ \] Integration tests +- \[ \] Run all tests on Windows +- \[ \] Update documentation diff --git a/docs/sam-config-docs.md b/docs/sam-config-docs.md new file mode 100644 index 0000000000..944a78c288 --- /dev/null +++ b/docs/sam-config-docs.md @@ -0,0 +1,97 @@ +`samconfig.toml` +-------------------------- + +This doc goes through the different sections of the configuration file and explains them + +``` +version = 0.1 + +[default.build.paramaters] +profile="srirammv" +debug=true +skip_pull_image=true +use_container=true + +[default.local_start_api.paramaters] +port=5400 + +[default.package.parameters] +profile="srirammv" +region="us-east-1" +s3_bucket="sam-bucket" +output_template_file="packaged.yaml" + +[default.deploy.parameters] +stack_name="using_config_file" +capabilities="CAPABILITY_IAM" +region="us-east-1" +profile="srirammv" +``` + +Version +------- + +`version` denotes the version of the `samconfig.toml` configuration file + +Env +---------- + +The default chosen env (environment) is denoted as `default` + +Command +----------- +The nested sections under `default` are reflected as `default.[SAM COMMAND]` + +these commands should not have spaces or hyphens, both " " and "-" will be converted to underscores "_" + +Therefore the sections for commands would like + +``` +[default.init] +[default.validate] +[default.build] +[default.local_generate_event_s3_delete] +[default.local_invoke] +[default.local_start_api] +[default.local_start_lambda] +[default.package] +[default.deploy] +[default.logs] +[default.publish] +``` + +Note: +sam local generate-event has a ton of options within it, but the above rules apply. + +Some examples: + +``` +[default.local.generate_event_alexa_skills_kit_intent_answer] +[default.local.generate_event_codepipeline_job] +``` + +Parameters +---------- +Since this configuration file is TOML, the parameters have types built-in. + +### Specifying a number + +``` +[default.local_start_api.paramaters] +port=5400 +``` + +### Specifying a string + +``` +[default.deploy.parameters] +stack_name="using_config_file" +``` + +### Specifying a flag + +``` +[default.build.parameters] +debug=true +``` + From c7fffc4c6b4c8305ce4c4e445cbf41f584dfbe1f Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Thu, 14 Nov 2019 21:33:09 -0800 Subject: [PATCH 03/45] feat: configuration file for sam cli commands (#1502) * feat: configuration file for sam cli commands - add passthroughs to the CLI command line interface via a configuration file - local defaults are set at: `.aws-sam/samconfig.toml` - This commit contains code copied and modified from https://github.com/phha/click_config_file/blob/master/click_config_file.py under MIT license * fix: add identifier name argument instead of configuration files. * rework: change command key construction logic - descope environment variables - move samconfig.toml back to project root * docstring: TomlProvider class * fix: allow spaces on certain deploy options * fix: REGEX for parameter overrides * fix: infer config path name from ctx * fix: set abs path to `ctx.config_path` * fix: set dirname for config_path not template file name. --- requirements/base.txt | 3 +- requirements/isolated.txt | 1 + samcli/cli/cli_config_file.py | 188 ++++++++++++++++++ samcli/cli/types.py | 21 +- samcli/commands/_utils/options.py | 11 +- samcli/commands/build/command.py | 6 +- samcli/commands/deploy/command.py | 13 +- samcli/commands/init/__init__.py | 2 + .../local/generate_event/event_generation.py | 8 +- samcli/commands/local/invoke/cli.py | 6 +- samcli/commands/local/start_api/cli.py | 6 +- samcli/commands/local/start_lambda/cli.py | 6 +- samcli/commands/logs/command.py | 2 + samcli/commands/package/command.py | 20 +- samcli/commands/publish/command.py | 6 +- samcli/commands/validate/validate.py | 6 +- tests/unit/cli/test_cli_config_file.py | 140 +++++++++++++ tests/unit/cli/test_types.py | 56 +----- tests/unit/commands/_utils/test_options.py | 21 ++ .../generate_event/test_event_generation.py | 6 +- 20 files changed, 437 insertions(+), 91 deletions(-) create mode 100644 samcli/cli/cli_config_file.py create mode 100644 tests/unit/cli/test_cli_config_file.py diff --git a/requirements/base.txt b/requirements/base.txt index 8cd89436f2..715875da2a 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -13,4 +13,5 @@ requests==2.22.0 serverlessrepo==0.1.9 aws_lambda_builders==0.6.0 # https://github.com/mhammond/pywin32/issues/1439 -pywin32 < 226; sys_platform == 'win32' \ No newline at end of file +pywin32 < 226; sys_platform == 'win32' +toml==0.10.0 \ No newline at end of file diff --git a/requirements/isolated.txt b/requirements/isolated.txt index 0dbc04bb8e..f04fc7aa6f 100644 --- a/requirements/isolated.txt +++ b/requirements/isolated.txt @@ -32,6 +32,7 @@ requests==2.22.0 s3transfer==0.2.1 serverlessrepo==0.1.9 six==1.11.0 +toml==0.10.0 tzlocal==2.0.0 urllib3==1.25.3 websocket-client==0.56.0 diff --git a/samcli/cli/cli_config_file.py b/samcli/cli/cli_config_file.py new file mode 100644 index 0000000000..d4cb45aec3 --- /dev/null +++ b/samcli/cli/cli_config_file.py @@ -0,0 +1,188 @@ +""" +CLI configuration decorator to use TOML configuration files for click commands. +""" + +## This section contains code copied and modified from [click_config_file][https://github.com/phha/click_config_file/blob/master/click_config_file.py] +## SPDX-License-Identifier: MIT + +import functools +import os +import logging + +import click +import toml + +__all__ = ("TomlProvider", "configuration_option", "get_ctx_defaults") + +LOG = logging.getLogger("samcli") +DEFAULT_CONFIG_FILE_NAME = "samconfig.toml" +DEFAULT_IDENTIFER = "default" + + +class TomlProvider: + """ + A parser for toml configuration files + :param cmd: sam command name as defined by click + :param section: section defined in the configuration file nested within `cmd` + """ + + def __init__(self, section=None): + self.section = section + + def __call__(self, file_path, config_env, cmd_name): + """ + Get resolved config based on the `file_path` for the configuration file, + `config_env` targeted inside the config file and corresponding `cmd_name` + as denoted by `click`. + + :param file_path: The path to the configuration file + :param config_env: The name of the sectional config_env within configuration file. + :param cmd_name: sam command name as defined by click + :returns dictionary containing the configuration parameters under specified config_env + """ + resolved_config = {} + try: + config = toml.load(file_path) + except Exception as ex: + LOG.error("Error reading configuration file :%s %s", file_path, str(ex)) + return resolved_config + if self.section: + try: + resolved_config = self._get_config_env(config, config_env)[cmd_name][self.section] + except KeyError: + LOG.debug( + "Error reading configuration file at %s with config_env %s, command %s, section %s", + file_path, + config_env, + cmd_name, + self.section, + ) + return resolved_config + + def _get_config_env(self, config, config_env): + """ + + :param config: loaded TOML configuration file into dictionary representation + :param config_env: top level section defined within TOML configuration file + :return: + """ + return config.get(config_env, config.get(DEFAULT_IDENTIFER, {})) + + +def configuration_callback(cmd_name, option_name, config_env_name, saved_callback, provider, ctx, param, value): + """ + Callback for reading the config file. + + Also takes care of calling user specified custom callback afterwards. + + :param cmd_name: `sam` command name derived from click. + :param option_name: The name of the option. This is used for error messages. + :param config_env_name: `top` level section within configuration file + :param saved_callback: User-specified callback to be called later. + :param provider: A callable that parses the configuration file and returns a dictionary + of the configuration parameters. Will be called as + `provider(file_path, config_env, cmd_name)`. + :param ctx: Click context + :param param: Click parameter + :param value: Specified value for config_env + :returns specified callback or the specified value for config_env. + """ + + # ctx, param and value are default arguments for click specified callbacks. + ctx.default_map = ctx.default_map or {} + cmd_name = cmd_name or ctx.info_name + param.default = DEFAULT_IDENTIFER + config_env_name = value or config_env_name + config = get_ctx_defaults(cmd_name, provider, ctx, config_env_name=config_env_name) + ctx.default_map.update(config) + + return saved_callback(ctx, param, value) if saved_callback else value + + +def get_ctx_defaults(cmd_name, provider, ctx, config_env_name=DEFAULT_IDENTIFER): + """ + Get the set of the parameters that are needed to be set into the click command. + This function also figures out the command name by looking up current click context's parent + and constructing the parsed command name that is used in default configuration file. + If a given cmd_name is start-api, the parsed name is "local_start_api". + provider is called with `config_file`, `config_env_name` and `parsed_cmd_name`. + + :param cmd_name: `sam` command name + :param provider: provider to be called for reading configuration file + :param ctx: Click context + :param config_env_name: config-env within configuration file + :return: dictionary of defaults for parameters + """ + + cwd = getattr(ctx, "config_path", None) + config_file = os.path.join(cwd if cwd else os.getcwd(), DEFAULT_CONFIG_FILE_NAME) + config = {} + if os.path.isfile(config_file): + LOG.debug("Config file location: %s", os.path.abspath(config_file)) + + # Find parent of current context + _parent = ctx.parent + _cmd_names = [] + # Need to find the total set of commands that current command is part of. + if cmd_name != ctx.info_name: + _cmd_names = [cmd_name] + _cmd_names.append(ctx.info_name) + # Go through all parents till a parent of a context exists. + while _parent.parent: + info_name = _parent.info_name + _cmd_names.append(info_name) + _parent = _parent.parent + + # construct a parsed name that is of the format: a_b_c_d + parsed_cmd_name = "_".join(reversed([cmd.replace("-", "_").replace(" ", "_") for cmd in _cmd_names])) + + config = provider(config_file, config_env_name, parsed_cmd_name) + + return config + + +def configuration_option(*param_decls, **attrs): + """ + Adds configuration file support to a click application. + + This will create an option of type `STRING` expecting the config_env in the + configuration file, by default this config_env is `default`. When specified, + the requisite portion of the configuration file is considered as the + source of truth. + + The default name of the option is `--config-env`. + + This decorator accepts the same arguments as `click.option`. + In addition, the following keyword arguments are available: + :param cmd_name: The command name. Default: `ctx.info_name` + :param config_env_name: The config_env name. This is used to determine which part of the configuration + needs to be read. + :param provider: A callable that parses the configuration file and returns a dictionary + of the configuration parameters. Will be called as + `provider(file_path, config_env, cmd_name) + """ + param_decls = param_decls or ("--config-env",) + option_name = param_decls[0] + + def decorator(f): + + attrs.setdefault("is_eager", True) + attrs.setdefault("help", "Read config-env from Configuration File.") + attrs.setdefault("expose_value", False) + # --config-env is hidden and can potentially be opened up in the future. + attrs.setdefault("hidden", True) + # explicitly ignore values passed to --config-env, can be opened up in the future. + config_env_name = DEFAULT_IDENTIFER + provider = attrs.pop("provider") + attrs["type"] = click.STRING + saved_callback = attrs.pop("callback", None) + partial_callback = functools.partial( + configuration_callback, None, option_name, config_env_name, saved_callback, provider + ) + attrs["callback"] = partial_callback + return click.option(*param_decls, **attrs)(f) + + return decorator + + +# End section copied from [[click_config_file][https://github.com/phha/click_config_file/blob/master/click_config_file.py] diff --git a/samcli/cli/types.py b/samcli/cli/types.py index e22b2fec26..58ba341ba6 100644 --- a/samcli/cli/types.py +++ b/samcli/cli/types.py @@ -18,9 +18,17 @@ class CfnParameterOverridesType(click.ParamType): __EXAMPLE_1 = "ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro" __EXAMPLE_2 = "KeyPairName=MyKey InstanceType=t1.micro" - # Regex that parses CloudFormation parameter key-value pairs: https://regex101.com/r/xqfSjW/2 - _pattern_1 = r"(?:ParameterKey=([A-Za-z0-9\"]+),ParameterValue=(\"(?:\\.|[^\"\\]+)*\"|(?:\\.|[^ \"\\]+)+))" - _pattern_2 = r"(?:([A-Za-z0-9\"]+)=(\"(?:\\.|[^\"\\]+)*\"|(?:\\.|[^ \"\\]+)+))" + # Regex that parses CloudFormation parameter key-value pairs: + # https://regex101.com/r/xqfSjW/2 + # https://regex101.com/r/xqfSjW/5 + + # If Both ParameterKey pattern and KeyPairName=MyKey, should not be fixed. if they are it can + # result in unpredicatable behavior. + KEY_REGEX = '([A-Za-z0-9\\"]+)' + VALUE_REGEX = '(\\"(?:\\\\.|[^\\"\\\\]+)*\\"|(?:\\\\.|[^ \\"\\\\]+)+))' + + _pattern_1 = r"(?:ParameterKey={key},ParameterValue={value}".format(key=KEY_REGEX, value=VALUE_REGEX) + _pattern_2 = r"(?:(?: ){key}={value}".format(key=KEY_REGEX, value=VALUE_REGEX) ordered_pattern_match = [_pattern_1, _pattern_2] @@ -34,7 +42,11 @@ def convert(self, value, param, ctx): if value == ("",): return result + value = (value,) if isinstance(value, str) else value for val in value: + val.strip() + # Add empty string to start of the string to help match `_pattern2` + val = " " + val try: # NOTE(TheSriram): find the first regex that matched. @@ -159,6 +171,9 @@ def convert(self, value, param, ctx): if value == ("",): return result + # if value comes in a via configuration file, we should still convert it. + # value = (value, ) if not isinstance(value, tuple) else value + for val in value: groups = re.findall(self._pattern, val) diff --git a/samcli/commands/_utils/options.py b/samcli/commands/_utils/options.py index 71152834d4..bc8699779c 100644 --- a/samcli/commands/_utils/options.py +++ b/samcli/commands/_utils/options.py @@ -7,6 +7,7 @@ from functools import partial import click +from click.types import FuncParamType from samcli.cli.types import CfnParameterOverridesType, CfnMetadataType, CfnTags from samcli.commands._utils.custom_options.option_nargs import OptionNargs @@ -43,8 +44,10 @@ def get_or_default_template_file_name(ctx, param, provided_value, include_build) if os.path.exists(option): provided_value = option break - result = os.path.abspath(provided_value) + + if ctx: + setattr(ctx, "config_path", os.path.dirname(result)) LOG.debug("Using SAM Template at %s", result) return result @@ -74,6 +77,7 @@ def template_click_option(include_build=True): Click Option for template option """ return click.option( + "--template-file", "--template", "-t", default=_TEMPLATE_OPTION_DEFAULT_VALUE, @@ -81,6 +85,7 @@ def template_click_option(include_build=True): envvar="SAM_TEMPLATE_FILE", callback=partial(get_or_default_template_file_name, include_build=include_build), show_default=True, + is_eager=True, help="AWS SAM template file", ) @@ -143,7 +148,7 @@ def capabilities_click_option(): return click.option( "--capabilities", cls=OptionNargs, - type=click.STRING, + type=FuncParamType(lambda value: value.split(" ")), required=True, help="A list of capabilities that you must specify" "before AWS Cloudformation can create certain stacks. Some stack tem-" @@ -182,7 +187,7 @@ def notification_arns_click_option(): return click.option( "--notification-arns", cls=OptionNargs, - type=click.STRING, + type=FuncParamType(lambda value: value.split(" ")), required=False, help="Amazon Simple Notification Service topic" "Amazon Resource Names (ARNs) that AWS CloudFormation associates with" diff --git a/samcli/commands/build/command.py b/samcli/commands/build/command.py index b6f9c67f9f..31507a2f32 100644 --- a/samcli/commands/build/command.py +++ b/samcli/commands/build/command.py @@ -10,6 +10,7 @@ parameter_override_option from samcli.cli.main import pass_context, common_options as cli_framework_options, aws_creds_options from samcli.lib.telemetry.metrics import track_command +from samcli.cli.cli_config_file import configuration_option, TomlProvider LOG = logging.getLogger(__name__) @@ -53,6 +54,7 @@ """ +@configuration_option(provider=TomlProvider(section="parameters")) @click.command("build", help=HELP_TEXT, short_help="Build your Lambda function code") @click.option('--build-dir', '-b', default=DEFAULT_BUILD_DIR, @@ -82,7 +84,7 @@ @track_command def cli(ctx, function_identifier, - template, + template_file, base_dir, build_dir, use_container, @@ -95,7 +97,7 @@ def cli(ctx, mode = _get_mode_value_from_envvar("SAM_BUILD_MODE", choices=["debug"]) - do_cli(function_identifier, template, base_dir, build_dir, True, use_container, manifest, docker_network, + do_cli(function_identifier, template_file, base_dir, build_dir, True, use_container, manifest, docker_network, skip_pull_image, parameter_overrides, mode) # pragma: no cover diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 68314c48b3..e31a336f5d 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -4,12 +4,13 @@ import click - +from samcli.cli.cli_config_file import configuration_option, TomlProvider from samcli.commands._utils.options import ( parameter_override_option, capabilities_override_option, tags_override_option, notification_arns_override_option, + template_click_option, ) from samcli.cli.main import pass_context, common_options, aws_creds_options from samcli.lib.telemetry.metrics import track_command @@ -27,20 +28,14 @@ """ +@configuration_option(provider=TomlProvider(section="parameters")) @click.command( "deploy", short_help=SHORT_HELP, context_settings={"ignore_unknown_options": False, "allow_interspersed_args": True, "allow_extra_args": True}, help=HELP_TEXT, ) -@click.option( - "--template-file", - "--template", - "-t", - required=True, - type=click.Path(), - help="The path where your AWS SAM template is located", -) +@template_click_option(include_build=False) @click.option( "--stack-name", required=True, diff --git a/samcli/commands/init/__init__.py b/samcli/commands/init/__init__.py index 798e341366..d738abf1a2 100644 --- a/samcli/commands/init/__init__.py +++ b/samcli/commands/init/__init__.py @@ -8,6 +8,7 @@ import click +from samcli.cli.cli_config_file import configuration_option, TomlProvider from samcli.commands.exceptions import UserException from samcli.cli.main import pass_context, common_options, global_cfg from samcli.local.common.runtime_template import RUNTIMES, SUPPORTED_DEP_MANAGERS @@ -55,6 +56,7 @@ """ +@configuration_option(provider=TomlProvider(section="parameters")) @click.command( "init", help=HELP_TEXT, diff --git a/samcli/commands/local/generate_event/event_generation.py b/samcli/commands/local/generate_event/event_generation.py index 12379c5892..e45ea2cd43 100644 --- a/samcli/commands/local/generate_event/event_generation.py +++ b/samcli/commands/local/generate_event/event_generation.py @@ -3,10 +3,12 @@ """ import functools + import click -from samcli.cli.options import debug_option import samcli.commands.local.lib.generated_sample_events.events as events +from samcli.cli.cli_config_file import TomlProvider, get_ctx_defaults +from samcli.cli.options import debug_option from samcli.lib.telemetry.metrics import track_command @@ -150,9 +152,13 @@ def get_command(self, ctx, cmd_name): command_callback = functools.partial( self.cmd_implementation, self.events_lib, self.top_level_cmd_name, cmd_name ) + + config = get_ctx_defaults(cmd_name=cmd_name, provider=TomlProvider(section="parameters"), ctx=ctx) + cmd = click.Command( name=cmd_name, short_help=self.subcmd_definition[cmd_name]["help"], + context_settings={"default_map": config}, params=parameters, callback=command_callback, ) diff --git a/samcli/commands/local/invoke/cli.py b/samcli/commands/local/invoke/cli.py index 311420b9a7..15705935cb 100644 --- a/samcli/commands/local/invoke/cli.py +++ b/samcli/commands/local/invoke/cli.py @@ -8,6 +8,7 @@ from samcli.cli.main import pass_context, common_options as cli_framework_options, aws_creds_options from samcli.commands.local.cli_common.options import invoke_common_options from samcli.lib.telemetry.metrics import track_command +from samcli.cli.cli_config_file import configuration_option, TomlProvider LOG = logging.getLogger(__name__) @@ -30,6 +31,7 @@ @click.command("invoke", help=HELP_TEXT, short_help="Invokes a local Lambda function once.") +@configuration_option(provider=TomlProvider(section="parameters")) @click.option( "--event", "-e", @@ -47,7 +49,7 @@ def cli( ctx, function_identifier, - template, + template_file, event, no_event, env_vars, @@ -68,7 +70,7 @@ def cli( do_cli( ctx, function_identifier, - template, + template_file, event, no_event, env_vars, diff --git a/samcli/commands/local/start_api/cli.py b/samcli/commands/local/start_api/cli.py index 83f699e33b..bfb7447fbe 100644 --- a/samcli/commands/local/start_api/cli.py +++ b/samcli/commands/local/start_api/cli.py @@ -8,6 +8,7 @@ from samcli.cli.main import pass_context, common_options as cli_framework_options, aws_creds_options from samcli.commands.local.cli_common.options import invoke_common_options, service_common_options from samcli.lib.telemetry.metrics import track_command +from samcli.cli.cli_config_file import configuration_option, TomlProvider LOG = logging.getLogger(__name__) @@ -31,6 +32,7 @@ short_help="Sets up a local endpoint you can use to test your API. Supports hot-reloading " "so you don't need to restart this service when you make changes to your function.", ) +@configuration_option(provider=TomlProvider(section="parameters")) @service_common_options(3000) @click.option( "--static-dir", @@ -50,7 +52,7 @@ def cli( port, static_dir, # Common Options for Lambda Invoke - template, + template_file, env_vars, debug_port, debug_args, @@ -70,7 +72,7 @@ def cli( host, port, static_dir, - template, + template_file, env_vars, debug_port, debug_args, diff --git a/samcli/commands/local/start_lambda/cli.py b/samcli/commands/local/start_lambda/cli.py index b607febe2e..1e171fa5e7 100644 --- a/samcli/commands/local/start_lambda/cli.py +++ b/samcli/commands/local/start_lambda/cli.py @@ -8,6 +8,7 @@ from samcli.cli.main import pass_context, common_options as cli_framework_options, aws_creds_options from samcli.commands.local.cli_common.options import invoke_common_options, service_common_options from samcli.lib.telemetry.metrics import track_command +from samcli.cli.cli_config_file import configuration_option, TomlProvider LOG = logging.getLogger(__name__) @@ -49,6 +50,7 @@ help=HELP_TEXT, short_help="Starts a local endpoint you can use to invoke your local Lambda functions.", ) +@configuration_option(provider=TomlProvider(section="parameters")) @service_common_options(3001) @invoke_common_options @cli_framework_options @@ -61,7 +63,7 @@ def cli( host, port, # Common Options for Lambda Invoke - template, + template_file, env_vars, debug_port, debug_args, @@ -80,7 +82,7 @@ def cli( ctx, host, port, - template, + template_file, env_vars, debug_port, debug_args, diff --git a/samcli/commands/logs/command.py b/samcli/commands/logs/command.py index f6b479cb46..9dd1f6620f 100644 --- a/samcli/commands/logs/command.py +++ b/samcli/commands/logs/command.py @@ -7,6 +7,7 @@ from samcli.cli.main import pass_context, common_options as cli_framework_options, aws_creds_options from samcli.lib.telemetry.metrics import track_command +from samcli.cli.cli_config_file import configuration_option, TomlProvider LOG = logging.getLogger(__name__) @@ -32,6 +33,7 @@ @click.command("logs", help=HELP_TEXT, short_help="Fetch logs for a function") +@configuration_option(provider=TomlProvider(section="parameters")) @click.option( "--name", "-n", diff --git a/samcli/commands/package/command.py b/samcli/commands/package/command.py index 3966bd2460..5d6e289aba 100644 --- a/samcli/commands/package/command.py +++ b/samcli/commands/package/command.py @@ -5,11 +5,13 @@ import click +from samcli.cli.cli_config_file import TomlProvider, configuration_option from samcli.cli.main import pass_context, common_options, aws_creds_options from samcli.commands._utils.options import ( metadata_override_option, _TEMPLATE_OPTION_DEFAULT_VALUE, get_or_default_template_file_name, + template_click_option, ) from samcli.commands._utils.resources import resources_generator from samcli.lib.telemetry.metrics import track_command @@ -40,18 +42,8 @@ def resources_and_properties_help_string(): @click.command("package", short_help=SHORT_HELP, help=HELP_TEXT, context_settings=dict(max_content_width=120)) -# TODO(TheSriram): Move to template_common_option across aws-sam-cli -@click.option( - "--template", - "--template-file", - "-t", - default=_TEMPLATE_OPTION_DEFAULT_VALUE, - type=click.Path(), - envvar="SAM_TEMPLATE_FILE", - callback=partial(get_or_default_template_file_name, include_build=True), - show_default=True, - help="AWS SAM template file", -) +@configuration_option(provider=TomlProvider(section="parameters")) +@template_click_option(include_build=True) @click.option( "--s3-bucket", required=True, @@ -97,12 +89,12 @@ def resources_and_properties_help_string(): @aws_creds_options @pass_context @track_command -def cli(ctx, template, s3_bucket, s3_prefix, kms_key_id, output_template_file, use_json, force_upload, metadata): +def cli(ctx, template_file, s3_bucket, s3_prefix, kms_key_id, output_template_file, use_json, force_upload, metadata): # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing do_cli( - template, + template_file, s3_bucket, s3_prefix, kms_key_id, diff --git a/samcli/commands/publish/command.py b/samcli/commands/publish/command.py index 643895327e..54802aec5c 100644 --- a/samcli/commands/publish/command.py +++ b/samcli/commands/publish/command.py @@ -11,6 +11,7 @@ from samcli.commands._utils.options import template_common_option from samcli.commands._utils.template import get_template_data from samcli.lib.telemetry.metrics import track_command +from samcli.cli.cli_config_file import configuration_option, TomlProvider LOG = logging.getLogger(__name__) @@ -40,16 +41,17 @@ @click.command("publish", help=HELP_TEXT, short_help=SHORT_HELP) +@configuration_option(provider=TomlProvider(section="parameters")) @template_common_option @click.option("--semantic-version", help=SEMANTIC_VERSION_HELP) @aws_creds_options @cli_framework_options @pass_context @track_command -def cli(ctx, template, semantic_version): +def cli(ctx, template_file, semantic_version): # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - do_cli(ctx, template, semantic_version) # pragma: no cover + do_cli(ctx, template_file, semantic_version) # pragma: no cover def do_cli(ctx, template, semantic_version): diff --git a/samcli/commands/validate/validate.py b/samcli/commands/validate/validate.py index 4b554b0686..ab908ea2e9 100644 --- a/samcli/commands/validate/validate.py +++ b/samcli/commands/validate/validate.py @@ -10,19 +10,21 @@ from samcli.cli.main import pass_context, common_options as cli_framework_options, aws_creds_options from samcli.commands._utils.options import template_option_without_build from samcli.lib.telemetry.metrics import track_command +from samcli.cli.cli_config_file import configuration_option, TomlProvider @click.command("validate", short_help="Validate an AWS SAM template.") +@configuration_option(provider=TomlProvider(section="parameters")) @template_option_without_build @aws_creds_options @cli_framework_options @pass_context @track_command -def cli(ctx, template): +def cli(ctx, template_file): # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - do_cli(ctx, template) # pragma: no cover + do_cli(ctx, template_file) # pragma: no cover def do_cli(ctx, template): diff --git a/tests/unit/cli/test_cli_config_file.py b/tests/unit/cli/test_cli_config_file.py new file mode 100644 index 0000000000..d3764d1365 --- /dev/null +++ b/tests/unit/cli/test_cli_config_file.py @@ -0,0 +1,140 @@ +import os +import tempfile + +from unittest import TestCase +from unittest.mock import MagicMock, patch + + +from samcli.cli.cli_config_file import ( + TomlProvider, + configuration_option, + configuration_callback, + get_ctx_defaults, + DEFAULT_CONFIG_FILE_NAME, +) + + +class MockContext: + def __init__(self, info_name, parent): + self.info_name = info_name + self.parent = parent + + +class TestTomlProvider(TestCase): + def setUp(self): + self.toml_provider = TomlProvider() + self.config_env = "config_env" + self.parameters = "parameters" + self.cmd_name = "topic" + + def test_toml_valid_with_section(self): + with tempfile.NamedTemporaryFile(delete=False) as toml_file: + toml_file.write(b"[config_env.topic.parameters]\nword='clarity'\n") + toml_file.flush() + self.assertEqual( + TomlProvider(section=self.parameters)(toml_file.name, self.config_env, self.cmd_name), + {"word": "clarity"}, + ) + + def test_toml_invalid_empty_dict(self): + with tempfile.NamedTemporaryFile(delete=False) as toml_file: + toml_file.write(b"[topic]\nword=clarity\n") + toml_file.flush() + self.assertEqual(self.toml_provider(toml_file.name, self.config_env, self.cmd_name), {}) + + +class TestCliConfiguration(TestCase): + def setUp(self): + self.cmd_name = "test_cmd" + self.option_name = "test_option" + self.config_env = "test_config_env" + self.saved_callback = MagicMock() + self.provider = MagicMock() + self.ctx = MagicMock() + self.param = MagicMock() + self.value = MagicMock() + + class Dummy: + pass + + @patch("samcli.cli.cli_config_file.os.path.isfile", return_value=True) + @patch("samcli.cli.cli_config_file.os.path.join", return_value=MagicMock()) + @patch("samcli.cli.cli_config_file.os.path.abspath", return_value=MagicMock()) + def test_callback_with_valid_config_env(self, mock_os_path_is_file, mock_os_path_join, mock_os_path_abspath): + mock_context1 = MockContext(info_name="sam", parent=None) + mock_context2 = MockContext(info_name="local", parent=mock_context1) + mock_context3 = MockContext(info_name="start-api", parent=mock_context2) + self.ctx.parent = mock_context3 + self.ctx.info_name = "test_info" + configuration_callback( + cmd_name=self.cmd_name, + option_name=self.option_name, + config_env_name=self.config_env, + saved_callback=self.saved_callback, + provider=self.provider, + ctx=self.ctx, + param=self.param, + value=self.value, + ) + self.assertEqual(self.saved_callback.call_count, 1) + for arg in [self.ctx, self.param, self.value]: + self.assertIn(arg, self.saved_callback.call_args[0]) + + @patch("samcli.cli.cli_config_file.os.path.isfile", return_value=False) + @patch("samcli.cli.cli_config_file.os.path.join", return_value=MagicMock()) + def test_callback_with_config_file_not_file(self, mock_os_isfile, mock_os_path_join): + configuration_callback( + cmd_name=self.cmd_name, + option_name=self.option_name, + config_env_name=self.config_env, + saved_callback=self.saved_callback, + provider=self.provider, + ctx=self.ctx, + param=self.param, + value=self.value, + ) + self.assertEqual(self.provider.call_count, 0) + self.assertEqual(self.saved_callback.call_count, 1) + for arg in [self.ctx, self.param, self.value]: + self.assertIn(arg, self.saved_callback.call_args[0]) + self.assertEqual(mock_os_isfile.call_count, 1) + self.assertEqual(mock_os_path_join.call_count, 1) + + def test_configuration_option(self): + toml_provider = TomlProvider() + click_option = configuration_option(provider=toml_provider) + clc = click_option(self.Dummy()) + self.assertEqual(clc.__click_params__[0].is_eager, True) + self.assertEqual(clc.__click_params__[0].help, "Read config-env from Configuration File.") + self.assertEqual(clc.__click_params__[0].hidden, True) + self.assertEqual(clc.__click_params__[0].expose_value, False) + self.assertEqual(clc.__click_params__[0].callback.args, (None, "--config-env", "default", None, toml_provider)) + + @patch("samcli.cli.cli_config_file.os.path.isfile", return_value=True) + def test_get_ctx_defaults_non_nested(self, mock_os_file): + provider = MagicMock() + + mock_context1 = MockContext(info_name="sam", parent=None) + mock_context2 = MockContext(info_name="local", parent=mock_context1) + mock_context3 = MockContext(info_name="start-api", parent=mock_context2) + + get_ctx_defaults("start-api", provider, mock_context3) + + provider.assert_called_with(os.path.join(os.getcwd(), DEFAULT_CONFIG_FILE_NAME), "default", "local_start_api") + + @patch("samcli.cli.cli_config_file.os.path.isfile", return_value=True) + def test_get_ctx_defaults_nested(self, mock_os_file): + provider = MagicMock() + + mock_context1 = MockContext(info_name="sam", parent=None) + mock_context2 = MockContext(info_name="local", parent=mock_context1) + mock_context3 = MockContext(info_name="generate-event", parent=mock_context2) + mock_context4 = MockContext(info_name="alexa-skills-kit", parent=mock_context3) + + get_ctx_defaults("intent-answer", provider, mock_context4) + + provider.assert_called_with( + os.path.join(os.getcwd(), DEFAULT_CONFIG_FILE_NAME), + "default", + "local_generate_event_alexa_skills_kit_intent_answer", + ) diff --git a/tests/unit/cli/test_types.py b/tests/unit/cli/test_types.py index 985c3e66c1..54f57af616 100644 --- a/tests/unit/cli/test_types.py +++ b/tests/unit/cli/test_types.py @@ -12,19 +12,12 @@ def setUp(self): @parameterized.expand( [ - (("some string"),), - # Key must not contain spaces - (('ParameterKey="Ke y",ParameterValue=Value'),), - # No value - (("ParameterKey=Key,ParameterValue="),), - # No key - (("ParameterKey=,ParameterValue=Value"),), - # Case sensitive - (("parameterkey=Key,ParameterValue=Value"),), - # No space after comma - (("ParameterKey=Key, ParameterValue=Value"),), + # Random string + ("some string",), + # Only commas + (",,",), # Bad separator - (("ParameterKey:Key,ParameterValue:Value"),), + ("ParameterKey:Key,ParameterValue:Value",), ] ) def test_must_fail_on_invalid_format(self, input): @@ -39,6 +32,10 @@ def test_must_fail_on_invalid_format(self, input): ("ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro",), {"KeyPairName": "MyKey", "InstanceType": "t1.micro"}, ), + (("KeyPairName=MyKey InstanceType=t1.micro",), {"KeyPairName": "MyKey", "InstanceType": "t1.micro"}), + (("KeyPairName=MyKey, InstanceType=t1.micro,",), {"KeyPairName": "MyKey,", "InstanceType": "t1.micro,"}), + (('ParameterKey="Ke y",ParameterValue=Value',), {"ParameterKey": "Ke y"}), + ((("ParameterKey=Key,ParameterValue="),), {"ParameterKey": "Key,ParameterValue="}), (('ParameterKey="Key",ParameterValue=Val\\ ue',), {"Key": "Val ue"}), (('ParameterKey="Key",ParameterValue="Val\\"ue"',), {"Key": 'Val"ue'}), (("ParameterKey=Key,ParameterValue=Value",), {"Key": "Value"}), @@ -146,38 +143,3 @@ def test_must_fail_on_invalid_format(self, input): def test_successful_parsing(self, input, expected): result = self.param_type.convert(input, None, None) self.assertEqual(result, expected, msg="Failed with Input = " + str(input)) - - -# class TestCfnCapabilitiesType(TestCase): -# def setUp(self): -# self.param_type = CfnCapabilitiesType() -# -# @parameterized.expand( -# [ -# # Just a string -# ("some string"), -# # tuple of string -# ("some string",), -# # non-tuple valid string -# "CAPABILITY_NAMED_IAM", -# ] -# ) -# def test_must_fail_on_invalid_format(self, input): -# self.param_type.fail = Mock() -# self.param_type.convert(input, "param", "ctx") -# -# self.param_type.fail.assert_called_with(ANY, "param", "ctx") -# -# @parameterized.expand( -# [ -# (("CAPABILITY_AUTO_EXPAND",), ("CAPABILITY_AUTO_EXPAND",)), -# (("CAPABILITY_AUTO_EXPAND", "CAPABILITY_NAMED_IAM"), ("CAPABILITY_AUTO_EXPAND", "CAPABILITY_NAMED_IAM")), -# ( -# ("CAPABILITY_AUTO_EXPAND", "CAPABILITY_NAMED_IAM", "CAPABILITY_IAM"), -# ("CAPABILITY_AUTO_EXPAND", "CAPABILITY_NAMED_IAM", "CAPABILITY_IAM"), -# ), -# ] -# ) -# def test_successful_parsing(self, input, expected): -# result = self.param_type.convert(input, None, None) -# self.assertEqual(result, expected, msg="Failed with Input = " + str(input)) diff --git a/tests/unit/commands/_utils/test_options.py b/tests/unit/commands/_utils/test_options.py index 43276c824c..cbcd8b3911 100644 --- a/tests/unit/commands/_utils/test_options.py +++ b/tests/unit/commands/_utils/test_options.py @@ -9,6 +9,10 @@ from samcli.commands._utils.options import get_or_default_template_file_name, _TEMPLATE_OPTION_DEFAULT_VALUE +class Mock: + pass + + class TestGetOrDefaultTemplateFileName(TestCase): def test_must_return_abspath_of_user_provided_value(self): filename = "foo.txt" @@ -50,3 +54,20 @@ def test_must_return_built_template(self, os_mock): result = get_or_default_template_file_name(None, None, _TEMPLATE_OPTION_DEFAULT_VALUE, include_build=True) self.assertEqual(result, "absPath") os_mock.path.abspath.assert_called_with(expected) + + @patch("samcli.commands._utils.options.os") + def test_verify_ctx(self, os_mock): + + ctx = Mock() + + expected = os.path.join(".aws-sam", "build", "template.yaml") + + os_mock.path.exists.return_value = True + os_mock.path.join = os.path.join # Use the real method + os_mock.path.abspath.return_value = "a/b/c/absPath" + os_mock.path.dirname.return_value = "a/b/c" + + result = get_or_default_template_file_name(ctx, None, _TEMPLATE_OPTION_DEFAULT_VALUE, include_build=True) + self.assertEqual(result, "a/b/c/absPath") + self.assertEqual(ctx.config_path, "a/b/c") + os_mock.path.abspath.assert_called_with(expected) diff --git a/tests/unit/commands/local/generate_event/test_event_generation.py b/tests/unit/commands/local/generate_event/test_event_generation.py index d719fb45db..74e998ad52 100644 --- a/tests/unit/commands/local/generate_event/test_event_generation.py +++ b/tests/unit/commands/local/generate_event/test_event_generation.py @@ -124,7 +124,11 @@ def test_subcommand_get_command_return_value(self, click_mock, functools_mock, o s = EventTypeSubCommand(self.events_lib_mock, "hello", all_commands) s.get_command(None, "hi") click_mock.Command.assert_called_once_with( - name="hi", short_help="Generates a hello Event", params=[], callback=callback_object_mock + name="hi", + short_help="Generates a hello Event", + params=[], + callback=callback_object_mock, + context_settings={"default_map": {}}, ) def test_subcommand_list_return_value(self): From 5854f2a4f2a8f500a7716413e5d697cf50901e72 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Thu, 14 Nov 2019 21:35:53 -0800 Subject: [PATCH 04/45] design: `sam deploy` also packages built artifacts (#1521) --- designs/package_during_deploy.md | 178 +++++++++++++++++++++++++++++++ 1 file changed, 178 insertions(+) create mode 100644 designs/package_during_deploy.md diff --git a/designs/package_during_deploy.md b/designs/package_during_deploy.md new file mode 100644 index 0000000000..e67a3bff04 --- /dev/null +++ b/designs/package_during_deploy.md @@ -0,0 +1,178 @@ +Package during `sam deploy` +==================================== + + +What is the problem? +-------------------- + +Today while using `sam deploy` the specified `--template-file` or `--template` is expected to have packaged artifacts references in the given template file. + +This is driven by the following workflow. + +`sam build` -> `sam package --s3-bucket ... --s3-prefix ... --output-template-file packaged.yaml`. + +This workflow builds the requisite code and packages those built artifacts into an s3 bucket, optionally under a given s3 prefix. + +If a developer can optionally cut through this process without requiring an explicit `package` command, but rather have `sam deploy` package to a given s3 bucket, it cuts the number of steps before needing to deploy and test in the cloud. + +This also reduces friction in the `author` and `test` loop. + +Ideal end state. + +`sam build` -> `sam deploy ..... --s3-bucket ....` + + +What will be changed? +--------------------- + +Addition of extra parameters that are currently supported by `sam package` over to `sam deploy` with all of them being optional. + +Additional parameters that need to be on `sam deploy` that are not on `sam package`. + +* `--metadata` +* `--use-json` + +Parameters that dont need to be added. + +* `--output-template-file` + * An explicit `output-template-file` is created on the fly during packaging in the deploy phase. + +If the expectation is to package and deploy in one command, One can now do. + +`sam deploy --stack-name sam-package-on-deploy --capabilities CAPABILITY_IAM --s3-bucket sam-package-bucket` + +There is no explicit need to pass in a `--template-file` or `--template` parameter, if one is not passed in it to defaults to trying to find the template.yaml that was generated by `sam build` which is located at `.aws-sam/build/template.yaml` + +The old method of deploying pre-packaged artifacts will continue to work as before. + +* `sam deploy --template-file packaged.yaml --stack-name sam-package-on-deploy --capabilities CAPABILITY_IAM` + +If a deployment is done without running `sam build` prior we still go ahead and deploy with the given `template.yaml` in the project root. This might still result in a successful deploy, but not a deploy with the correct build artifacts. + + +Future of `sam package`? +--------------------- + +* `sam package` will continue to exist in the state it is today and will continue to be improved upon separately. + +Success criteria for the change +------------------------------- + +User do not require to run `sam package` as part of their author test loop, except for CI/CD purposes, where `sam package` can be run and the packaged template file can be passed to cloudformation deploy actions. + + +Out-of-Scope +------------ + +The s3 bucket where the packaged artifacts go is not abstracted in this design. In the future, the s3 bucket could be specified via a configuration file. + +This is currently in design in : https://github.com/awslabs/aws-sam-cli/pull/1503 + +User Experience Walkthrough +--------------------------- + +`sam build` -> `sam deploy` + +`sam build` -> `sam package` -> `sam deploy` + +Provide identical experiences in terms of a deploying the same stack, with exactly same artifacts. + + +Implementation +============== + +CLI Changes +----------- + +* Add new arguments `--metadata`, `--use-json` and modify existing `--template-file` or `--template` to look for a default `template.yaml` that exists under `.aws-sam/build/` + +### Breaking Change + +* Not a breaking change , but there are optional behavorial changes that a user can subscribe into by supplying a non-packaged template file and an s3 bucket. + +Design +------ + +* Changes to Deploy command's click options +* Attempt to package on every deploy if an appropriate s3 bucket is specified and deploy using the output template file during package. +* If a pre-packaged template is specified, an attempt to package does not change the template and the same template is used for deploy. +* The parameters that share the same name across package and deploy are collapsed together. eg: `--kms-key-id` , if a kms-key-id is specified that same key is used across both packaging and deploy purposes. + +`.samrc` Changes +---------------- + +None + +Security +-------- + +**What new dependencies (libraries/cli) does this change require?** +N/A + +**What other Docker container images are you using?** +N/A + +**Are you creating a new HTTP endpoint? If so explain how it will be +created & used** +N/A + +**Are you connecting to a remote API? If so explain how is this +connection secured** +N/A + +**Are you reading/writing to a temporary folder? If so, what is this +used for and when do you clean up?** + +Possibly reading from a configuration file in the future. + +**How do you validate new .samrc configuration?** + +N/A + +What is your Testing Plan (QA)? +=============================== + +Goal +---- +* Regression tests on previous functionality of `sam deploy` +* Integration tests on automatic packaging on `sam deploy` + +Pre-requesites +-------------- +N/A + +Test Scenarios/Cases +-------------------- +* Re-deploy a stack that was deployed with a packaged template before hand using the new sam deploy menthod. + +`sam deploy --template-file packaged.yaml --stack-name sam-stack --capabilities CAPABILITY_IAM` + +`sam deploy --stack-name sam-stack --capabilities CAPABILITY_IAM` + +The new stack should not have any changes. + + +Expected Results +---------------- + +* Regresssion and Integration tests pass. + +Documentation Changes +===================== +* Required nature of `--template-file`, `--template` parameter has a series of defaults that are looked at during `sam deploy` similair to `sam package`. +* If `--template-file` or `--template` points to a non-packaged template-file, `--s3-bucket` becomes required to be able to effectively package and deploy in one command using `sam deploy`. + +Open Issues +============ + +Task Breakdown +============== + +- \[x\] Send a Pull Request with this design document +- \[ \] Build the command line interface +- \[ \] Build the underlying library +- \[ \] Unit tests +- \[ \] Functional Tests +- \[ \] Integration tests +- \[ \] Run all tests on Windows +- \[ \] Update documentation From 0451b59b5323f659f2a00a06259557d277a3a6a1 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Thu, 14 Nov 2019 23:14:45 -0800 Subject: [PATCH 05/45] Package and deploy in one command (#1532) * feat: beginnings of package and deploy together * fix: plumb through configuration - need to add additional parameters over from package. * fix: conditional check to print text on package during deploy * fix: exceptions for `package` * lint: local variables rule - `samcli/commands/deploy/command.py` has high number of local variables, because of the nature of command, it has high number of arguments. --- .pylintrc | 2 +- samcli/cli/types.py | 5 +- samcli/commands/_utils/options.py | 6 +- samcli/commands/deploy/command.py | 77 ++++++++++++++++------ samcli/commands/package/command.py | 12 +--- samcli/commands/package/exceptions.py | 9 +++ samcli/commands/package/package_context.py | 6 +- samcli/lib/package/artifact_exporter.py | 2 - samcli/lib/package/s3_uploader.py | 11 +--- tests/unit/commands/deploy/test_command.py | 16 +++-- tests/unit/lib/package/test_s3_uploader.py | 3 +- 11 files changed, 93 insertions(+), 56 deletions(-) diff --git a/.pylintrc b/.pylintrc index d450445bc8..70776ecaa2 100644 --- a/.pylintrc +++ b/.pylintrc @@ -59,7 +59,7 @@ confidence= # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" -disable=R0201,W0613,W0640,I0021,I0020,W1618,W1619,R0902,R0903,W0231,W0611,R0913,W0703,C0330,R0204,I0011,R0904,C0301 +disable=R0201,W0613,W0640,I0021,I0020,W1618,W1619,R0902,R0903,W0231,W0611,R0913,W0703,C0330,R0204,I0011,R0904,R0914,C0301 [REPORTS] diff --git a/samcli/cli/types.py b/samcli/cli/types.py index 58ba341ba6..31cc181302 100644 --- a/samcli/cli/types.py +++ b/samcli/cli/types.py @@ -22,8 +22,9 @@ class CfnParameterOverridesType(click.ParamType): # https://regex101.com/r/xqfSjW/2 # https://regex101.com/r/xqfSjW/5 - # If Both ParameterKey pattern and KeyPairName=MyKey, should not be fixed. if they are it can - # result in unpredicatable behavior. + # If Both ParameterKey pattern and KeyPairName=MyKey should not be present + # while adding parameter overrides, if they are, it + # can result in unpredicatable behavior. KEY_REGEX = '([A-Za-z0-9\\"]+)' VALUE_REGEX = '(\\"(?:\\\\.|[^\\"\\\\]+)*\\"|(?:\\\\.|[^ \\"\\\\]+)+))' diff --git a/samcli/commands/_utils/options.py b/samcli/commands/_utils/options.py index bc8699779c..24be5a8e93 100644 --- a/samcli/commands/_utils/options.py +++ b/samcli/commands/_utils/options.py @@ -122,6 +122,7 @@ def parameter_override_click_option(): "--parameter-overrides", cls=OptionNargs, type=CfnParameterOverridesType(), + default={}, help="Optional. A string that contains CloudFormation parameter overrides encoded as key=value " "pairs. Use the same format as the AWS CLI, e.g. 'ParameterKey=KeyPairName," "ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro'", @@ -148,8 +149,7 @@ def capabilities_click_option(): return click.option( "--capabilities", cls=OptionNargs, - type=FuncParamType(lambda value: value.split(" ")), - required=True, + type=FuncParamType(lambda value: value.split(" ") if not isinstance(value, tuple) else value), help="A list of capabilities that you must specify" "before AWS Cloudformation can create certain stacks. Some stack tem-" "plates might include resources that can affect permissions in your AWS" @@ -187,7 +187,7 @@ def notification_arns_click_option(): return click.option( "--notification-arns", cls=OptionNargs, - type=FuncParamType(lambda value: value.split(" ")), + type=FuncParamType(lambda value: value.split(" ") if not isinstance(value, tuple) else value), required=False, help="Amazon Simple Notification Service topic" "Amazon Resource Names (ARNs) that AWS CloudFormation associates with" diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index e31a336f5d..ae6130bb91 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -2,16 +2,19 @@ CLI command for "deploy" command """ +import tempfile + import click -from samcli.cli.cli_config_file import configuration_option, TomlProvider from samcli.commands._utils.options import ( parameter_override_option, capabilities_override_option, tags_override_option, notification_arns_override_option, template_click_option, + metadata_override_option, ) +from samcli.cli.cli_config_file import configuration_option, TomlProvider from samcli.cli.main import pass_context, common_options, aws_creds_options from samcli.lib.telemetry.metrics import track_command @@ -28,14 +31,14 @@ """ -@configuration_option(provider=TomlProvider(section="parameters")) @click.command( "deploy", short_help=SHORT_HELP, context_settings={"ignore_unknown_options": False, "allow_interspersed_args": True, "allow_extra_args": True}, help=HELP_TEXT, ) -@template_click_option(include_build=False) +@configuration_option(provider=TomlProvider(section="parameters")) +@template_click_option(include_build=True) @click.option( "--stack-name", required=True, @@ -96,6 +99,14 @@ "changes to be made to the stack. The default behavior is to return a" "non-zero exit code.", ) +@click.option( + "--use-json", + required=False, + is_flag=True, + help="Indicates whether to use JSON as the format for " + "the output AWS CloudFormation template. YAML is used by default.", +) +@metadata_override_option @notification_arns_override_option @tags_override_option @parameter_override_option @@ -118,7 +129,9 @@ def cli( role_arn, notification_arns, fail_on_empty_changeset, + use_json, tags, + metadata, ): # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing @@ -135,7 +148,9 @@ def cli( role_arn, notification_arns, fail_on_empty_changeset, + use_json, tags, + metadata, ctx.region, ctx.profile, ) # pragma: no cover @@ -154,27 +169,47 @@ def do_cli( role_arn, notification_arns, fail_on_empty_changeset, + use_json, tags, + metadata, region, profile, ): + from samcli.commands.package.package_context import PackageContext from samcli.commands.deploy.deploy_context import DeployContext - with DeployContext( - template_file=template_file, - stack_name=stack_name, - s3_bucket=s3_bucket, - force_upload=force_upload, - s3_prefix=s3_prefix, - kms_key_id=kms_key_id, - parameter_overrides=parameter_overrides, - capabilities=capabilities, - no_execute_changeset=no_execute_changeset, - role_arn=role_arn, - notification_arns=notification_arns, - fail_on_empty_changeset=fail_on_empty_changeset, - tags=tags, - region=region, - profile=profile, - ) as deploy_context: - deploy_context.run() + with tempfile.NamedTemporaryFile() as output_template_file: + + with PackageContext( + template_file=template_file, + s3_bucket=s3_bucket, + s3_prefix=s3_prefix, + output_template_file=output_template_file.name, + kms_key_id=kms_key_id, + use_json=use_json, + force_upload=force_upload, + metadata=metadata, + on_deploy=True, + region=region, + profile=profile, + ) as package_context: + package_context.run() + + with DeployContext( + template_file=output_template_file.name, + stack_name=stack_name, + s3_bucket=s3_bucket, + force_upload=force_upload, + s3_prefix=s3_prefix, + kms_key_id=kms_key_id, + parameter_overrides=parameter_overrides, + capabilities=capabilities, + no_execute_changeset=no_execute_changeset, + role_arn=role_arn, + notification_arns=notification_arns, + fail_on_empty_changeset=fail_on_empty_changeset, + tags=tags, + region=region, + profile=profile, + ) as deploy_context: + deploy_context.run() diff --git a/samcli/commands/package/command.py b/samcli/commands/package/command.py index 5d6e289aba..b6cd4c9b4c 100644 --- a/samcli/commands/package/command.py +++ b/samcli/commands/package/command.py @@ -1,18 +1,12 @@ """ CLI command for "package" command """ -from functools import partial - import click -from samcli.cli.cli_config_file import TomlProvider, configuration_option + +from samcli.cli.cli_config_file import configuration_option, TomlProvider from samcli.cli.main import pass_context, common_options, aws_creds_options -from samcli.commands._utils.options import ( - metadata_override_option, - _TEMPLATE_OPTION_DEFAULT_VALUE, - get_or_default_template_file_name, - template_click_option, -) +from samcli.commands._utils.options import metadata_override_option, template_click_option from samcli.commands._utils.resources import resources_generator from samcli.lib.telemetry.metrics import track_command diff --git a/samcli/commands/package/exceptions.py b/samcli/commands/package/exceptions.py index c0aa9bd7f4..fac98490b7 100644 --- a/samcli/commands/package/exceptions.py +++ b/samcli/commands/package/exceptions.py @@ -72,3 +72,12 @@ def __init__(self, template_file, ex): super(PackageFailedError, self).__init__( message=message_fmt.format(template_file=self.template_file, ex=self.ex) ) + + +class NoSuchBucketError(UserException): + def __init__(self, **kwargs): + self.kwargs = kwargs + + message_fmt = "\n S3 Bucket does not exist." + + super(NoSuchBucketError, self).__init__(message=message_fmt.format(**self.kwargs)) diff --git a/samcli/commands/package/package_context.py b/samcli/commands/package/package_context.py index 27c9332ab6..7e985ab715 100644 --- a/samcli/commands/package/package_context.py +++ b/samcli/commands/package/package_context.py @@ -34,7 +34,7 @@ class PackageContext: MSG_PACKAGED_TEMPLATE_WRITTEN = ( - "Successfully packaged artifacts and wrote output template " + "\nSuccessfully packaged artifacts and wrote output template " "to file {output_file_name}." "\n" "Execute the following command to deploy the packaged template" @@ -56,6 +56,7 @@ def __init__( metadata, region, profile, + on_deploy=False, ): self.template_file = template_file self.s3_bucket = s3_bucket @@ -67,6 +68,7 @@ def __init__( self.metadata = metadata self.region = region self.profile = profile + self.on_deploy = on_deploy self.s3_uploader = None def __enter__(self): @@ -91,7 +93,7 @@ def run(self): self.write_output(self.output_template_file, exported_str) - if self.output_template_file: + if self.output_template_file and not self.on_deploy: msg = self.MSG_PACKAGED_TEMPLATE_WRITTEN.format( output_file_name=self.output_template_file, output_file_path=os.path.abspath(self.output_template_file), diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index 8b11ea8b02..3fdada7ff2 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -171,7 +171,6 @@ def zip_folder(folder_path): :param folder_path: :return: Name of the zipfile """ - filename = os.path.join(tempfile.gettempdir(), "data-" + uuid.uuid4().hex) zipfile_name = make_zip(filename, folder_path) @@ -539,7 +538,6 @@ def __init__( """ Reads the template and makes it ready for export """ - if not (is_local_folder(parent_dir) and os.path.isabs(parent_dir)): raise ValueError("parent_dir parameter must be " "an absolute path to a folder {0}".format(parent_dir)) diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index 8b55e24a39..cf5484e025 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -27,19 +27,11 @@ from boto3.s3 import transfer +from samcli.commands.package.exceptions import NoSuchBucketError LOG = logging.getLogger(__name__) -class NoSuchBucketError(Exception): - def __init__(self, **kwargs): - msg = self.fmt.format(**kwargs) - Exception.__init__(self, msg) - self.kwargs = kwargs - - fmt = "S3 Bucket does not exist. " "Execute the command to create a new bucket" "\n" "aws s3 mb s3://{bucket_name}" - - class S3Uploader: """ Class to upload objects to S3 bucket that use versioning. If bucket @@ -125,7 +117,6 @@ def upload_with_dedup(self, file_name, extension=None): # uploads of same object. Uploader will check if the file exists in S3 # and re-upload only if necessary. So the template points to same file # in multiple places, this will upload only once - filemd5 = self.file_checksum(file_name) remote_path = filemd5 if extension: diff --git a/tests/unit/commands/deploy/test_command.py b/tests/unit/commands/deploy/test_command.py index 4a39696eb2..c51300dcf1 100644 --- a/tests/unit/commands/deploy/test_command.py +++ b/tests/unit/commands/deploy/test_command.py @@ -1,5 +1,5 @@ from unittest import TestCase -from unittest.mock import patch, Mock +from unittest.mock import patch, Mock, ANY from samcli.commands.deploy.command import do_cli @@ -23,13 +23,17 @@ def setUp(self): self.metadata = {"abc": "def"} self.region = None self.profile = None + self.use_json = True + self.metadata = {} + @patch("samcli.commands.package.command.click") + @patch("samcli.commands.package.package_context.PackageContext") @patch("samcli.commands.deploy.command.click") @patch("samcli.commands.deploy.deploy_context.DeployContext") - def test_all_args(self, deploy_command_context, click_mock): + def test_all_args(self, mock_deploy_context, mock_deploy_click, mock_package_context, mock_package_click): context_mock = Mock() - deploy_command_context.return_value.__enter__.return_value = context_mock + mock_deploy_context.return_value.__enter__.return_value = context_mock do_cli( template_file=self.template_file, @@ -47,10 +51,12 @@ def test_all_args(self, deploy_command_context, click_mock): tags=self.tags, region=self.region, profile=self.profile, + use_json=self.use_json, + metadata=self.metadata, ) - deploy_command_context.assert_called_with( - template_file=self.template_file, + mock_deploy_context.assert_called_with( + template_file=ANY, stack_name=self.stack_name, s3_bucket=self.s3_bucket, force_upload=self.force_upload, diff --git a/tests/unit/lib/package/test_s3_uploader.py b/tests/unit/lib/package/test_s3_uploader.py index 1b47871e0e..40f22f7981 100644 --- a/tests/unit/lib/package/test_s3_uploader.py +++ b/tests/unit/lib/package/test_s3_uploader.py @@ -7,7 +7,8 @@ from pathlib import Path from botocore.exceptions import ClientError -from samcli.lib.package.s3_uploader import S3Uploader, NoSuchBucketError +from samcli.commands.package.exceptions import NoSuchBucketError +from samcli.lib.package.s3_uploader import S3Uploader class TestS3Uploader(TestCase): From 036b41b208e079ca7a3f4187141b6087ee06264b Mon Sep 17 00:00:00 2001 From: Alex Wood Date: Thu, 14 Nov 2019 23:20:32 -0800 Subject: [PATCH 06/45] Managed S3 Bucket via optional bootstrap command (#1526) * WIP: Managed S3 Stack * Managed S3 Bucket Command With Tests Missing: Integration with config files. We may also want to move some of the echo commands to debug logging. * Setup Design Doc * Setup Design Document * Rename to and remove CLI interface * Fix lint errors * Adding metadata to stack * fixing black formatting --- designs/sam_setup_cmd.md | 25 +++ samcli/cli/command.py | 2 + samcli/commands/bootstrap/__init__.py | 6 + samcli/commands/bootstrap/command.py | 30 ++++ samcli/lib/bootstrap/__init__.py | 0 samcli/lib/bootstrap/bootstrap.py | 121 +++++++++++++ tests/unit/lib/bootstrap/test_bootstrap.py | 190 +++++++++++++++++++++ 7 files changed, 374 insertions(+) create mode 100644 designs/sam_setup_cmd.md create mode 100644 samcli/commands/bootstrap/__init__.py create mode 100644 samcli/commands/bootstrap/command.py create mode 100644 samcli/lib/bootstrap/__init__.py create mode 100644 samcli/lib/bootstrap/bootstrap.py create mode 100644 tests/unit/lib/bootstrap/test_bootstrap.py diff --git a/designs/sam_setup_cmd.md b/designs/sam_setup_cmd.md new file mode 100644 index 0000000000..1362edff8d --- /dev/null +++ b/designs/sam_setup_cmd.md @@ -0,0 +1,25 @@ +# `sam setup` command + +As a part of packaging Lambda functions for deployment to AWS, users of the AWS SAM CLI currently need to provide an S3 bucket to store their code artifacts in. This creates a number of extra setup steps today, from users needing to go and set up an S3 bucket, to needing to track which bucket is appropriate for a given region (S3 bucket region must match CloudFormation deployment region). This project aims to simplify this experience. + +## Goals + +1. AWS SAM CLI users should be able to set up an S3 bucket for their SAM project entirely through the AWS SAM CLI. +2. The AWS SAM CLI, in setting up such a bucket, should choose an appropriate region and populate the users’s SAM CLI config file in their project. +3. A user doing the interactive deploy experience should be able to be completely separated from the S3 bucket used for source code storage, if the user does not wish to directly configure their source bucket. + +## Design + +We propose creating a new SAM CLI command, sam setup for this process. The underlying functionality would also be accessible to other commands, such as package itself. + +The `sam setup` command would have the following parameters: + +* `--region` This parameter is **CONDITIONALLY REQUIRED**, because the primary goal of this command is to ensure that the user’s region has an S3 bucket set up. We will also accept the `AWS_REGION` environment variable, or the default region in a user’s profile. In short, a region must be provided in some way, or we will fail. +* `--profile` This is associated with a user’s AWS profile, and defaults to `"default"` if not provided. It will be used for sourcing credentials for CloudFormation commands used when setting up the bucket, and for doing S3 ListBucket calls to see if a suitable bucket already exists. + +## Challenges + +Both S3 buckets and CloudFormation stacks do not have sufficiently efficient ways to search by tags. Simply put, there’s likely to be some computational inefficiency as up to hundreds of API calls might be required to identify an existing bucket that was created to be a source bucket. This means that to avoid severe performance issues, we need to make compromises. Proposed: + +* The default managed bucket uses a fixed stack name per region, such as “aws-sam-cli-managed-source-bucket”. If the user for some reason has a stack with that name, then we cannot support a managed bucket for them. +* Alternatively, when doing sam setup, the user providing a bucket name would mean that we just check for it to exist and if it does and is in the correct region, populate the config file. diff --git a/samcli/cli/command.py b/samcli/cli/command.py index f56e93fb80..4e2187de1d 100644 --- a/samcli/cli/command.py +++ b/samcli/cli/command.py @@ -21,6 +21,8 @@ "samcli.commands.deploy", "samcli.commands.logs", "samcli.commands.publish", + # We intentionally do not expose the `bootstrap` command for now. We might open it up later + # "samcli.commands.bootstrap", ] diff --git a/samcli/commands/bootstrap/__init__.py b/samcli/commands/bootstrap/__init__.py new file mode 100644 index 0000000000..e432ed5341 --- /dev/null +++ b/samcli/commands/bootstrap/__init__.py @@ -0,0 +1,6 @@ +""" +`sam setup` command +""" + +# Expose the cli object here +from .command import cli # noqa diff --git a/samcli/commands/bootstrap/command.py b/samcli/commands/bootstrap/command.py new file mode 100644 index 0000000000..6da1afb4ba --- /dev/null +++ b/samcli/commands/bootstrap/command.py @@ -0,0 +1,30 @@ +""" +CLI command for "bootstrap", which sets up a SAM development environment +""" +import click + +from samcli.cli.main import pass_context, common_options, aws_creds_options +from samcli.lib.telemetry.metrics import track_command +from samcli.lib.bootstrap import bootstrap + +SHORT_HELP = "Set up development environment for AWS SAM applications." + +HELP_TEXT = """ +Sets up a development environment for AWS SAM applications. + +Currently this creates, if one does not exist, a managed S3 bucket for your account in your working AWS region. +""" + + +@click.command("bootstrap", short_help=SHORT_HELP, help=HELP_TEXT, context_settings=dict(max_content_width=120)) +@common_options +@aws_creds_options +@pass_context +@track_command +def cli(ctx): + do_cli(ctx.region, ctx.profile) # pragma: no cover + + +def do_cli(region, profile): + bucket_name = bootstrap.manage_stack(profile=profile, region=region) + click.echo("Source Bucket: " + bucket_name) diff --git a/samcli/lib/bootstrap/__init__.py b/samcli/lib/bootstrap/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/lib/bootstrap/bootstrap.py b/samcli/lib/bootstrap/bootstrap.py new file mode 100644 index 0000000000..a4d513df5f --- /dev/null +++ b/samcli/lib/bootstrap/bootstrap.py @@ -0,0 +1,121 @@ +""" +Bootstrap's user's development environment by creating cloud resources required by SAM CLI +""" + +import json +import logging +import boto3 + +from botocore.config import Config +from botocore.exceptions import ClientError + +from samcli import __version__ +from samcli.cli.global_config import GlobalConfig +from samcli.commands.exceptions import UserException + + +LOG = logging.getLogger(__name__) +SAM_CLI_STACK_NAME = "aws-sam-cli-managed-stack" + + +def manage_stack(profile, region): + session = boto3.Session(profile_name=profile if profile else None) + cloudformation_client = session.client("cloudformation", config=Config(region_name=region if region else None)) + + return _create_or_get_stack(cloudformation_client) + + +def _create_or_get_stack(cloudformation_client): + stack = None + try: + ds_resp = cloudformation_client.describe_stacks(StackName=SAM_CLI_STACK_NAME) + stacks = ds_resp["Stacks"] + stack = stacks[0] + LOG.info("Found managed SAM CLI stack.") + except ClientError: + LOG.info("Managed SAM CLI stack not found, creating.") + stack = _create_stack(cloudformation_client) # exceptions are not captured from subcommands + # Sanity check for non-none stack? Sanity check for tag? + tags = stack["Tags"] + try: + sam_cli_tag = next(t for t in tags if t["Key"] == "ManagedStackSource") + if not sam_cli_tag["Value"] == "AwsSamCli": + msg = ( + "Stack " + + SAM_CLI_STACK_NAME + + " ManagedStackSource tag shows " + + sam_cli_tag["Value"] + + " which does not match the AWS SAM CLI generated tag value of AwsSamCli. " + "Failing as the stack was likely not created by the AWS SAM CLI." + ) + raise UserException(msg) + except StopIteration: + msg = ( + "Stack " + SAM_CLI_STACK_NAME + " exists, but the ManagedStackSource tag is missing. " + "Failing as the stack was likely not created by the AWS SAM CLI." + ) + raise UserException(msg) + outputs = stack["Outputs"] + try: + bucket_name = next(o for o in outputs if o["OutputKey"] == "SourceBucket")["OutputValue"] + except StopIteration: + msg = ( + "Stack " + SAM_CLI_STACK_NAME + " exists, but is missing the managed source bucket key. " + "Failing as this stack was likely not created by the AWS SAM CLI." + ) + raise UserException(msg) + # This bucket name is what we would write to a config file + return bucket_name + + +def _create_stack(cloudformation_client): + change_set_name = "InitialCreation" + change_set_resp = cloudformation_client.create_change_set( + StackName=SAM_CLI_STACK_NAME, + TemplateBody=_get_stack_template(), + Tags=[{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], + ChangeSetType="CREATE", + ChangeSetName=change_set_name, # this must be unique for the stack, but we only create so that's fine + ) + stack_id = change_set_resp["StackId"] + LOG.info("Waiting for managed stack change set to create.") + change_waiter = cloudformation_client.get_waiter("change_set_create_complete") + change_waiter.wait( + ChangeSetName=change_set_name, StackName=SAM_CLI_STACK_NAME, WaiterConfig={"Delay": 15, "MaxAttempts": 60} + ) + cloudformation_client.execute_change_set(ChangeSetName=change_set_name, StackName=SAM_CLI_STACK_NAME) + LOG.info("Waiting for managed stack to be created.") + stack_waiter = cloudformation_client.get_waiter("stack_create_complete") + stack_waiter.wait(StackName=stack_id, WaiterConfig={"Delay": 15, "MaxAttempts": 60}) + LOG.info("Managed SAM CLI stack creation complete.") + ds_resp = cloudformation_client.describe_stacks(StackName=SAM_CLI_STACK_NAME) + stacks = ds_resp["Stacks"] + return stacks[0] + + +def _get_stack_template(): + gc = GlobalConfig() + info = {"version": __version__, "installationId": gc.installation_id} + + template = """ + AWSTemplateFormatVersion : '2010-09-09' + Transform: AWS::Serverless-2016-10-31 + Description: Managed Stack for AWS SAM CLI + + Metadata: + SamCliInfo: {info} + + Resources: + SamCliSourceBucket: + Type: AWS::S3::Bucket + Properties: + Tags: + - Key: ManagedStackSource + Value: AwsSamCli + + Outputs: + SourceBucket: + Value: !Ref SamCliSourceBucket + """ + + return template.format(info=json.dumps(info)) diff --git a/tests/unit/lib/bootstrap/test_bootstrap.py b/tests/unit/lib/bootstrap/test_bootstrap.py new file mode 100644 index 0000000000..fd548b1b66 --- /dev/null +++ b/tests/unit/lib/bootstrap/test_bootstrap.py @@ -0,0 +1,190 @@ +from unittest import TestCase + +import botocore.session + +from botocore.exceptions import ClientError +from botocore.stub import Stubber + +from samcli.commands.exceptions import UserException +from samcli.lib.bootstrap.bootstrap import _create_or_get_stack, _get_stack_template, SAM_CLI_STACK_NAME + + +class TestBootstrapManagedStack(TestCase): + def _stubbed_cf_client(self): + cf = botocore.session.get_session().create_client("cloudformation") + return [cf, Stubber(cf)] + + def test_new_stack(self): + stub_cf, stubber = self._stubbed_cf_client() + # first describe_stacks call will fail + ds_params = {"StackName": SAM_CLI_STACK_NAME} + stubber.add_client_error("describe_stacks", service_error_code="ClientError", expected_params=ds_params) + # creating change set + ccs_params = { + "StackName": SAM_CLI_STACK_NAME, + "TemplateBody": _get_stack_template(), + "Tags": [{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], + "ChangeSetType": "CREATE", + "ChangeSetName": "InitialCreation", + } + ccs_resp = {"Id": "id", "StackId": "aws-sam-cli-managed-stack"} + stubber.add_response("create_change_set", ccs_resp, ccs_params) + # describe change set creation status for waiter + dcs_params = {"ChangeSetName": "InitialCreation", "StackName": SAM_CLI_STACK_NAME} + dcs_resp = {"Status": "CREATE_COMPLETE"} + stubber.add_response("describe_change_set", dcs_resp, dcs_params) + # executing change set + ecs_params = {"ChangeSetName": "InitialCreation", "StackName": SAM_CLI_STACK_NAME} + ecs_resp = {} + stubber.add_response("execute_change_set", ecs_resp, ecs_params) + # two describe_stacks calls will succeed - one for waiter, one direct + post_create_ds_resp = { + "Stacks": [ + { + "StackName": SAM_CLI_STACK_NAME, + "CreationTime": "2019-11-13", + "StackStatus": "CREATE_COMPLETE", + "Tags": [{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], + "Outputs": [{"OutputKey": "SourceBucket", "OutputValue": "generated-src-bucket"}], + } + ] + } + stubber.add_response("describe_stacks", post_create_ds_resp, ds_params) + stubber.add_response("describe_stacks", post_create_ds_resp, ds_params) + stubber.activate() + _create_or_get_stack(stub_cf) + stubber.assert_no_pending_responses() + stubber.deactivate() + + def test_stack_exists(self): + stub_cf, stubber = self._stubbed_cf_client() + ds_resp = { + "Stacks": [ + { + "StackName": SAM_CLI_STACK_NAME, + "CreationTime": "2019-11-13", + "StackStatus": "CREATE_COMPLETE", + "Tags": [{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], + "Outputs": [{"OutputKey": "SourceBucket", "OutputValue": "generated-src-bucket"}], + } + ] + } + ds_params = {"StackName": SAM_CLI_STACK_NAME} + stubber.add_response("describe_stacks", ds_resp, ds_params) + stubber.activate() + _create_or_get_stack(stub_cf) + stubber.assert_no_pending_responses() + stubber.deactivate() + + def test_stack_missing_bucket(self): + stub_cf, stubber = self._stubbed_cf_client() + ds_resp = { + "Stacks": [ + { + "StackName": SAM_CLI_STACK_NAME, + "CreationTime": "2019-11-13", + "StackStatus": "CREATE_COMPLETE", + "Tags": [{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], + "Outputs": [], + } + ] + } + ds_params = {"StackName": SAM_CLI_STACK_NAME} + stubber.add_response("describe_stacks", ds_resp, ds_params) + stubber.activate() + with self.assertRaises(UserException): + _create_or_get_stack(stub_cf) + stubber.assert_no_pending_responses() + stubber.deactivate() + + def test_stack_missing_tag(self): + stub_cf, stubber = self._stubbed_cf_client() + ds_resp = { + "Stacks": [ + { + "StackName": SAM_CLI_STACK_NAME, + "CreationTime": "2019-11-13", + "StackStatus": "CREATE_COMPLETE", + "Tags": [], + "Outputs": [{"OutputKey": "SourceBucket", "OutputValue": "generated-src-bucket"}], + } + ] + } + ds_params = {"StackName": SAM_CLI_STACK_NAME} + stubber.add_response("describe_stacks", ds_resp, ds_params) + stubber.activate() + with self.assertRaises(UserException): + _create_or_get_stack(stub_cf) + stubber.assert_no_pending_responses() + stubber.deactivate() + + def test_stack_wrong_tag(self): + stub_cf, stubber = self._stubbed_cf_client() + ds_resp = { + "Stacks": [ + { + "StackName": SAM_CLI_STACK_NAME, + "CreationTime": "2019-11-13", + "StackStatus": "CREATE_COMPLETE", + "Tags": [{"Key": "ManagedStackSource", "Value": "WHY WOULD YOU EVEN DO THIS"}], + "Outputs": [{"OutputKey": "SourceBucket", "OutputValue": "generated-src-bucket"}], + } + ] + } + ds_params = {"StackName": SAM_CLI_STACK_NAME} + stubber.add_response("describe_stacks", ds_resp, ds_params) + stubber.activate() + with self.assertRaises(UserException): + _create_or_get_stack(stub_cf) + stubber.assert_no_pending_responses() + stubber.deactivate() + + def test_change_set_creation_fails(self): + stub_cf, stubber = self._stubbed_cf_client() + # first describe_stacks call will fail + ds_params = {"StackName": SAM_CLI_STACK_NAME} + stubber.add_client_error("describe_stacks", service_error_code="ClientError", expected_params=ds_params) + # creating change set - fails + ccs_params = { + "StackName": SAM_CLI_STACK_NAME, + "TemplateBody": _get_stack_template(), + "Tags": [{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], + "ChangeSetType": "CREATE", + "ChangeSetName": "InitialCreation", + } + stubber.add_client_error("create_change_set", service_error_code="ClientError", expected_params=ccs_params) + stubber.activate() + with self.assertRaises(ClientError): + _create_or_get_stack(stub_cf) + stubber.assert_no_pending_responses() + stubber.deactivate() + + def test_change_set_execution_fails(self): + stub_cf, stubber = self._stubbed_cf_client() + # first describe_stacks call will fail + ds_params = {"StackName": SAM_CLI_STACK_NAME} + stubber.add_client_error("describe_stacks", service_error_code="ClientError", expected_params=ds_params) + # creating change set + ccs_params = { + "StackName": SAM_CLI_STACK_NAME, + "TemplateBody": _get_stack_template(), + "Tags": [{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], + "ChangeSetType": "CREATE", + "ChangeSetName": "InitialCreation", + } + ccs_resp = {"Id": "id", "StackId": "aws-sam-cli-managed-stack"} + stubber.add_response("create_change_set", ccs_resp, ccs_params) + # describe change set creation status for waiter + dcs_params = {"ChangeSetName": "InitialCreation", "StackName": SAM_CLI_STACK_NAME} + dcs_resp = {"Status": "CREATE_COMPLETE"} + stubber.add_response("describe_change_set", dcs_resp, dcs_params) + # executing change set - fails + ecs_params = {"ChangeSetName": "InitialCreation", "StackName": SAM_CLI_STACK_NAME} + stubber.add_client_error( + "execute_change_set", service_error_code="InsufficientCapabilities", expected_params=ecs_params + ) + stubber.activate() + with self.assertRaises(ClientError): + _create_or_get_stack(stub_cf) + stubber.assert_no_pending_responses() + stubber.deactivate() From 31c7d35b81f9317fc1d5f6b25ecca2c5331a2bd5 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Fri, 15 Nov 2019 12:45:09 -0800 Subject: [PATCH 07/45] Colors for deploy (#1535) * feat: colors for deploy command * lint: fixes --- samcli/commands/_utils/table_print.py | 18 +++++++------- samcli/lib/deploy/deployer.py | 7 +++++- samcli/lib/deploy/utils.py | 34 +++++++++++++++++++++++++++ 3 files changed, 50 insertions(+), 9 deletions(-) create mode 100644 samcli/lib/deploy/utils.py diff --git a/samcli/commands/_utils/table_print.py b/samcli/commands/_utils/table_print.py index 76672632ff..6f161882cb 100644 --- a/samcli/commands/_utils/table_print.py +++ b/samcli/commands/_utils/table_print.py @@ -8,13 +8,14 @@ import click -def pprint_column_names(format_string, format_kwargs, margin=None, table_header=None): +def pprint_column_names(format_string, format_kwargs, margin=None, table_header=None, color="yellow"): """ :param format_string: format string to be used that has the strings, minimum width to be replaced :param format_kwargs: dictionary that is supplied to the format_string to format the string :param margin: margin that is to be reduced from column width for columnar text. :param table_header: Supplied table header + :param color: color supplied for table headers and column names. :return: boilerplate table string """ @@ -55,10 +56,10 @@ def pprint_wrap(func): def wrap(*args, **kwargs): # The table is setup with the column names, format_string contains the column names. if table_header: - click.secho("\n" + table_header) - click.secho("-" * usable_width) - click.secho(format_string.format(*format_args, **format_kwargs)) - click.secho("-" * usable_width) + click.secho("\n" + table_header, fg=color) + click.secho("-" * usable_width, fg=color) + click.secho(format_string.format(*format_args, **format_kwargs), fg=color) + click.secho("-" * usable_width, fg=color) # format_args which have the minimumwidth set per {} in the format_string is passed to the function # which this decorator wraps, so that the function has access to the correct format_args kwargs["format_args"] = format_args @@ -66,7 +67,7 @@ def wrap(*args, **kwargs): kwargs["margin"] = margin if margin else min_margin result = func(*args, **kwargs) # Complete the table - click.secho("-" * usable_width) + click.secho("-" * usable_width, fg=color) return result return wrap @@ -88,7 +89,7 @@ def wrapped_text_generator(texts, width, margin): yield textwrap.wrap(text, width=width - margin) -def pprint_columns(columns, width, margin, format_string, format_args, columns_dict): +def pprint_columns(columns, width, margin, format_string, format_args, columns_dict, color="yellow"): """ Print columns based on list of columnar text, associated formatting string and associated format arguments. @@ -99,6 +100,7 @@ def pprint_columns(columns, width, margin, format_string, format_args, columns_d :param format_string: A format string that has both width and text specifiers set. :param format_args: list of offset specifiers :param columns_dict: arguments dictionary that have dummy values per column + :param color: color supplied for rows within the table. :return: """ for columns_text in zip_longest(*wrapped_text_generator(columns, width, margin), fillvalue=""): @@ -107,4 +109,4 @@ def pprint_columns(columns, width, margin, format_string, format_args, columns_d for k, _ in columns_dict.items(): columns_dict[k] = columns_text[next(counter)] - click.secho(format_string.format(*format_args, **columns_dict)) + click.secho(format_string.format(*format_args, **columns_dict), fg=color) diff --git a/samcli/lib/deploy/deployer.py b/samcli/lib/deploy/deployer.py index b7e3a77969..74a50b3583 100644 --- a/samcli/lib/deploy/deployer.py +++ b/samcli/lib/deploy/deployer.py @@ -24,6 +24,7 @@ import botocore +from samcli.lib.deploy.utils import DeployColor from samcli.commands.deploy.exceptions import DeployFailedError, ChangeSetError, DeployStackOutPutFailedError from samcli.commands._utils.table_print import pprint_column_names, pprint_columns from samcli.commands.deploy import exceptions as deploy_exceptions @@ -69,6 +70,7 @@ def __init__(self, cloudformation_client, changeset_prefix="samcli-deploy"): self.backoff = 2 # Maximum number of attempts before raising exception back up the chain. self.max_attempts = 3 + self.deploy_color = DeployColor() def has_stack(self, stack_name): """ @@ -206,6 +208,7 @@ def describe_changeset(self, change_set_id, stack_name, **kwargs): for k, v in changes.items(): for value in v: + row_color = self.deploy_color.get_changeset_action_color(action=k) pprint_columns( columns=[changes_showcase.get(k, k), value["LogicalResourceId"], value["ResourceType"]], width=kwargs["width"], @@ -213,6 +216,7 @@ def describe_changeset(self, change_set_id, stack_name, **kwargs): format_string=DESCRIBE_CHANGESET_FORMAT_STRING, format_args=kwargs["format_args"], columns_dict=DESCRIBE_CHANGESET_DEFAULT_ARGS.copy(), + color=row_color, ) if not changeset: @@ -320,7 +324,7 @@ def describe_stack_events(self, stack_name, time_stamp_marker, **kwargs): for event in event_items["StackEvents"]: if event["EventId"] not in events and utc_to_timestamp(event["Timestamp"]) > time_stamp_marker: events.add(event["EventId"]) - + row_color = self.deploy_color.get_stack_events_status_color(status=event["ResourceStatus"]) pprint_columns( columns=[ event["ResourceStatus"], @@ -333,6 +337,7 @@ def describe_stack_events(self, stack_name, time_stamp_marker, **kwargs): format_string=DESCRIBE_STACK_EVENTS_FORMAT_STRING, format_args=kwargs["format_args"], columns_dict=DESCRIBE_STACK_EVENTS_DEFAULT_ARGS.copy(), + color=row_color, ) if self._check_stack_complete(stack_status): diff --git a/samcli/lib/deploy/utils.py b/samcli/lib/deploy/utils.py new file mode 100644 index 0000000000..6c556c7068 --- /dev/null +++ b/samcli/lib/deploy/utils.py @@ -0,0 +1,34 @@ +""" +Utilities for Deploy +""" + +from samcli.lib.utils.colors import Colored + + +class DeployColor: + def __init__(self): + self._color = Colored() + self.changeset_color_map = {"Add": "green", "Modify": "yellow", "Remove": "red"} + self.status_color_map = { + "CREATE_COMPLETE": "green", + "CREATE_FAILED": "red", + "CREATE_IN_PROGRESS": "yellow", + "DELETE_COMPLETE": "green", + "DELETE_FAILED": "red", + "DELETE_IN_PROGRESS": "red", + "REVIEW_IN_PROGRESS": "yellow", + "ROLLBACK_COMPLETE": "red", + "ROLLBACK_IN_PROGRESS": "red", + "UPDATE_COMPLETE": "green", + "UPDATE_COMPLETE_CLEANUP_IN_PROGRESS": "yellow", + "UPDATE_IN_PROGRESS": "yellow", + "UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS": "red", + "UPDATE_ROLLBACK_FAILED": "red", + "UPDATE_ROLLBACK_IN_PROGRESS": "red", + } + + def get_stack_events_status_color(self, status): + return self.status_color_map.get(status, "yellow") + + def get_changeset_action_color(self, action): + return self.changeset_color_map.get(action, "yellow") From f0edb60d78c8ab82c4bee16344a44a7006fb68f5 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Fri, 15 Nov 2019 12:45:45 -0800 Subject: [PATCH 08/45] fix: deploy error on not supplying s3 bucket (#1534) --- samcli/commands/package/exceptions.py | 11 ++++++++++- samcli/lib/package/s3_uploader.py | 4 +++- tests/unit/lib/package/test_s3_uploader.py | 16 +++++++++++++++- 3 files changed, 28 insertions(+), 3 deletions(-) diff --git a/samcli/commands/package/exceptions.py b/samcli/commands/package/exceptions.py index fac98490b7..7775949fcd 100644 --- a/samcli/commands/package/exceptions.py +++ b/samcli/commands/package/exceptions.py @@ -78,6 +78,15 @@ class NoSuchBucketError(UserException): def __init__(self, **kwargs): self.kwargs = kwargs - message_fmt = "\n S3 Bucket does not exist." + message_fmt = "\nS3 Bucket does not exist." super(NoSuchBucketError, self).__init__(message=message_fmt.format(**self.kwargs)) + + +class BucketNotSpecifiedError(UserException): + def __init__(self, **kwargs): + self.kwargs = kwargs + + message_fmt = "\nS3 Bucket not specified, use --s3-bucket to specify a bucket name" + + super(BucketNotSpecifiedError, self).__init__(message=message_fmt.format(**self.kwargs)) diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index cf5484e025..d89710c0df 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -27,7 +27,7 @@ from boto3.s3 import transfer -from samcli.commands.package.exceptions import NoSuchBucketError +from samcli.commands.package.exceptions import NoSuchBucketError, BucketNotSpecifiedError LOG = logging.getLogger(__name__) @@ -134,6 +134,8 @@ def file_exists(self, remote_path): try: # Find the object that matches this ETag + if not self.bucket_name: + raise BucketNotSpecifiedError() self.s3.head_object(Bucket=self.bucket_name, Key=remote_path) return True except botocore.exceptions.ClientError: diff --git a/tests/unit/lib/package/test_s3_uploader.py b/tests/unit/lib/package/test_s3_uploader.py index 40f22f7981..a2aef1e008 100644 --- a/tests/unit/lib/package/test_s3_uploader.py +++ b/tests/unit/lib/package/test_s3_uploader.py @@ -7,7 +7,7 @@ from pathlib import Path from botocore.exceptions import ClientError -from samcli.commands.package.exceptions import NoSuchBucketError +from samcli.commands.package.exceptions import NoSuchBucketError, BucketNotSpecifiedError from samcli.lib.package.s3_uploader import S3Uploader @@ -144,6 +144,20 @@ def test_s3_upload(self): s3_url = s3_uploader.upload(f.name, remote_path) self.assertEqual(s3_url, "s3://{0}/{1}/{2}".format(self.bucket_name, self.prefix, remote_path)) + def test_s3_upload_no_bucket(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=None, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + ) + s3_uploader.artifact_metadata = {"a": "b"} + remote_path = Path.joinpath(Path(os.getcwd()), Path("tmp")) + with self.assertRaises(BucketNotSpecifiedError): + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + s3_uploader.upload(f.name, remote_path) + def test_s3_upload_with_dedup(self): s3_uploader = S3Uploader( s3_client=self.s3, From 9940d279a171eb21783b2007702d183da8034c43 Mon Sep 17 00:00:00 2001 From: Jacob Fuss <32497805+jfuss@users.noreply.github.com> Date: Fri, 15 Nov 2019 14:52:21 -0600 Subject: [PATCH 09/45] fix: override botocore logging to a NullHandler (#1538) --- samcli/cli/main.py | 2 ++ samcli/lib/utils/sam_logging.py | 19 +++++++++++++++++++ tests/unit/lib/utils/test_sam_logging.py | 10 ++++++++++ 3 files changed, 31 insertions(+) diff --git a/samcli/cli/main.py b/samcli/cli/main.py index c4e0279d6d..ede907bdbf 100644 --- a/samcli/cli/main.py +++ b/samcli/cli/main.py @@ -97,6 +97,8 @@ def cli(ctx): sam_cli_logger = logging.getLogger("samcli") sam_cli_formatter = logging.Formatter("%(message)s") lambda_builders_logger = logging.getLogger("aws_lambda_builders") + botocore_logger = logging.getLogger("botocore") SamCliLogger.configure_logger(sam_cli_logger, sam_cli_formatter, logging.INFO) SamCliLogger.configure_logger(lambda_builders_logger, sam_cli_formatter, logging.INFO) + SamCliLogger.configure_null_logger(botocore_logger) diff --git a/samcli/lib/utils/sam_logging.py b/samcli/lib/utils/sam_logging.py index bf40d2ccaf..2222640ab4 100644 --- a/samcli/lib/utils/sam_logging.py +++ b/samcli/lib/utils/sam_logging.py @@ -28,3 +28,22 @@ def configure_logger(logger, formatter, level): logger.setLevel(level) logger.propagate = False logger.addHandler(log_stream_handler) + + @staticmethod + def configure_null_logger(logger): + """ + Configure a Logger with a NullHandler + + Useful for libraries that do not follow: https://docs.python.org/3.6/howto/logging.html#configuring-logging-for-a-library + + Parameters + ---------- + logger logging.getLogger + Logger to configure + + Returns + ------- + None + """ + logger.propagate = False + logger.addHandler(logging.NullHandler()) diff --git a/tests/unit/lib/utils/test_sam_logging.py b/tests/unit/lib/utils/test_sam_logging.py index b2fb1654ce..9a4824478a 100644 --- a/tests/unit/lib/utils/test_sam_logging.py +++ b/tests/unit/lib/utils/test_sam_logging.py @@ -22,3 +22,13 @@ def test_configure_samcli_logger(self, logging_patch): logger_mock.addHandler.assert_called_once_with(stream_handler_mock) stream_handler_mock.setLevel.assert_called_once_with(2) stream_handler_mock.setFormatter.assert_called_once_with(formatter_mock) + + @patch("samcli.lib.utils.sam_logging.logging") + def test_configure_samcli_logger(self, logging_patch): + logger_mock = Mock() + + SamCliLogger.configure_null_logger(logger_mock) + + self.assertFalse(logger_mock.propagate) + + logger_mock.addHandler.assert_called_once_with(logging_patch.NullHandler()) From f20fde5380df4fc65c1df40f17dc4b6fe61b6a54 Mon Sep 17 00:00:00 2001 From: Sanath Kumar Ramesh Date: Fri, 15 Nov 2019 01:38:53 -0800 Subject: [PATCH 10/45] Guided deployment when flag is specified --- samcli/commands/_utils/options.py | 2 + samcli/commands/deploy/command.py | 89 +++++++++++++++++++++++- samcli/commands/deploy/deploy_context.py | 24 +++++-- samcli/commands/deploy/exceptions.py | 2 +- 4 files changed, 109 insertions(+), 8 deletions(-) diff --git a/samcli/commands/_utils/options.py b/samcli/commands/_utils/options.py index 24be5a8e93..526273c240 100644 --- a/samcli/commands/_utils/options.py +++ b/samcli/commands/_utils/options.py @@ -149,6 +149,8 @@ def capabilities_click_option(): return click.option( "--capabilities", cls=OptionNargs, + default=("CAPABILITY_IAM",), + required=False, type=FuncParamType(lambda value: value.split(" ") if not isinstance(value, tuple) else value), help="A list of capabilities that you must specify" "before AWS Cloudformation can create certain stacks. Some stack tem-" diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index ae6130bb91..2566d9a067 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -3,7 +3,7 @@ """ import tempfile - +import json import click from samcli.commands._utils.options import ( @@ -17,6 +17,8 @@ from samcli.cli.cli_config_file import configuration_option, TomlProvider from samcli.cli.main import pass_context, common_options, aws_creds_options from samcli.lib.telemetry.metrics import track_command +from samcli.lib.utils.colors import Colored +from samcli.lib.bootstrap.bootstrap import manage_stack SHORT_HELP = "Deploy an AWS SAM application." @@ -31,6 +33,24 @@ """ +def prompt_callback(msg, default): + def callback(ctx, param, value): + interactive = ctx.params.get("interactive") + + if interactive: + param.prompt = msg + param.default = value or default + return param.prompt_for_value(ctx) + elif value: + # Value provided + No Interactive. Return the value + return value + else: + # Value not provided + No Interactive + raise click.exceptions.MissingParameter(param=param, ctx=ctx) + + return callback + + @click.command( "deploy", short_help=SHORT_HELP, @@ -41,7 +61,8 @@ @template_click_option(include_build=True) @click.option( "--stack-name", - required=True, + required=False, + default="sam-app", help="The name of the AWS CloudFormation stack you're deploying to. " "If you specify an existing stack, the command updates the stack. " "If you specify a new stack, the command creates it.", @@ -106,6 +127,14 @@ help="Indicates whether to use JSON as the format for " "the output AWS CloudFormation template. YAML is used by default.", ) +@click.option( + "--interactive", + "-i", + required=False, + is_flag=True, + is_eager=True, + help="Specify this flag to allow SAM CLI to guide you through the deployment using interactive prompts.", +) @metadata_override_option @notification_arns_override_option @tags_override_option @@ -132,6 +161,7 @@ def cli( use_json, tags, metadata, + interactive, ): # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing @@ -151,6 +181,7 @@ def cli( use_json, tags, metadata, + interactive, ctx.region, ctx.profile, ) # pragma: no cover @@ -172,12 +203,23 @@ def do_cli( use_json, tags, metadata, + interactive, region, profile, ): from samcli.commands.package.package_context import PackageContext from samcli.commands.deploy.deploy_context import DeployContext + confirm_changeset = False + if interactive: + stack_name, s3_bucket, region, profile, confirm_changeset = guided_deploy( + stack_name, s3_bucket, region, profile + ) + + # We print deploy args only on interactive. + # Should we print this always? + print_deploy_args(stack_name, s3_bucket, region, profile, capabilities, parameter_overrides, confirm_changeset) + with tempfile.NamedTemporaryFile() as output_template_file: with PackageContext( @@ -211,5 +253,48 @@ def do_cli( tags=tags, region=region, profile=profile, + confirm_changeset=confirm_changeset, ) as deploy_context: deploy_context.run() + + +def guided_deploy(stack_name, s3_bucket, region, profile): + default_region = region or "us-east-1" + default_profile = profile or "default" + + color = Colored() + tick = color.yellow("✓") + + click.echo(color.yellow("\nDeploy Arguments\n================")) + + stack_name = click.prompt(f"{tick} Stack Name", default=stack_name, type=click.STRING) + confirm_changeset = click.confirm(f"{tick} Confirm changeset before deploy", default=True) + region = click.prompt(f"{tick} AWS Region", default=default_region, type=click.STRING) + profile = click.prompt(f"{tick} AWS Profile", default=default_profile, type=click.STRING) + + save_to_samconfig = click.confirm(f"{tick} Save values to samconfig.toml", default=True) + + if not s3_bucket: + click.echo(color.yellow("\nConfiguring Deployment S3 Bucket\n================================")) + s3_bucket = manage_stack(profile, region) + click.echo(f"{tick} Using Deployment Bucket: {s3_bucket}") + click.echo("You may specify a different default deployment bucket in samconfig.toml") + + return stack_name, s3_bucket, region, profile, confirm_changeset + + +def print_deploy_args(stack_name, s3_bucket, region, profile, capabilities, parameter_overrides, confirm_changeset): + + param_overrides_string = json.dumps(parameter_overrides, indent=2) + capabilities_string = json.dumps(capabilities) + + click.secho("\nDeploying with following values\n===============================", fg="yellow") + click.echo(f"Stack Name : {stack_name}") + click.echo(f"Region : {region}") + click.echo(f"Profile : {profile}") + click.echo(f"Deployment S3 Bucket : {s3_bucket}") + click.echo(f"Parameter Overrides : {param_overrides_string}") + click.echo(f"Capabilities : {capabilities_string}") + click.echo(f"Confirm Changeset : {confirm_changeset}") + + click.secho("\nInitiating Deployment\n=====================", fg="yellow") diff --git a/samcli/commands/deploy/deploy_context.py b/samcli/commands/deploy/deploy_context.py index 4eaf60b2cc..c95a4f0969 100644 --- a/samcli/commands/deploy/deploy_context.py +++ b/samcli/commands/deploy/deploy_context.py @@ -24,6 +24,7 @@ from samcli.lib.deploy.deployer import Deployer from samcli.lib.package.s3_uploader import S3Uploader from samcli.yamlhelper import yaml_parse +from samcli.lib.utils.colors import Colored LOG = logging.getLogger(__name__) @@ -34,6 +35,8 @@ class DeployContext: MSG_EXECUTE_SUCCESS = "\nSuccessfully created/updated stack - {stack_name}\n" + MSG_CONFIRM_CHANGESET = "Do you want to deploy this changeset?" + def __init__( self, template_file, @@ -51,6 +54,7 @@ def __init__( tags, region, profile, + confirm_changeset, ): self.template_file = template_file self.stack_name = stack_name @@ -69,6 +73,7 @@ def __init__( self.profile = profile self.s3_uploader = None self.deployer = None + self.confirm_changeset = confirm_changeset def __enter__(self): return self @@ -116,6 +121,7 @@ def run(self): self.s3_uploader, [{"Key": key, "Value": value} for key, value in self.tags.items()] if self.tags else [], self.fail_on_empty_changeset, + self.confirm_changeset, ) def deploy( @@ -130,6 +136,7 @@ def deploy( s3_uploader, tags, fail_on_empty_changeset=True, + confirm_changeset=False, ): try: result, changeset_type = self.deployer.create_and_wait_for_changeset( @@ -143,12 +150,19 @@ def deploy( tags=tags, ) - if not no_execute_changeset: - self.deployer.execute_changeset(result["Id"], stack_name) - self.deployer.wait_for_execute(stack_name, changeset_type) - click.echo(self.MSG_EXECUTE_SUCCESS.format(stack_name=stack_name)) - else: + if no_execute_changeset: click.echo(self.MSG_NO_EXECUTE_CHANGESET.format(changeset_id=result["Id"])) + return + + if confirm_changeset: + color = Colored() + tick = color.yellow("✓") + if not click.confirm(f"{tick} {self.MSG_CONFIRM_CHANGESET}", default=False): + return + + self.deployer.execute_changeset(result["Id"], stack_name) + self.deployer.wait_for_execute(stack_name, changeset_type) + click.echo(self.MSG_EXECUTE_SUCCESS.format(stack_name=stack_name)) except deploy_exceptions.ChangeEmptyError as ex: if fail_on_empty_changeset: diff --git a/samcli/commands/deploy/exceptions.py b/samcli/commands/deploy/exceptions.py index 851ba9a999..7ee0f6e4f5 100644 --- a/samcli/commands/deploy/exceptions.py +++ b/samcli/commands/deploy/exceptions.py @@ -7,7 +7,7 @@ class ChangeEmptyError(UserException): def __init__(self, stack_name): self.stack_name = stack_name - message_fmt = "No changes to deploy.Stack {stack_name} is up to date" + message_fmt = "No changes to deploy. Stack {stack_name} is up to date" super(ChangeEmptyError, self).__init__(message=message_fmt.format(stack_name=self.stack_name)) From 0f19612c6e724a6665d1ae83b6d7c3969fcf5936 Mon Sep 17 00:00:00 2001 From: Sanath Kumar Ramesh Date: Fri, 15 Nov 2019 01:49:12 -0800 Subject: [PATCH 11/45] fixing linter --- samcli/commands/deploy/command.py | 20 +------------------- 1 file changed, 1 insertion(+), 19 deletions(-) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 2566d9a067..5ddd20fe3c 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -33,24 +33,6 @@ """ -def prompt_callback(msg, default): - def callback(ctx, param, value): - interactive = ctx.params.get("interactive") - - if interactive: - param.prompt = msg - param.default = value or default - return param.prompt_for_value(ctx) - elif value: - # Value provided + No Interactive. Return the value - return value - else: - # Value not provided + No Interactive - raise click.exceptions.MissingParameter(param=param, ctx=ctx) - - return callback - - @click.command( "deploy", short_help=SHORT_HELP, @@ -272,7 +254,7 @@ def guided_deploy(stack_name, s3_bucket, region, profile): region = click.prompt(f"{tick} AWS Region", default=default_region, type=click.STRING) profile = click.prompt(f"{tick} AWS Profile", default=default_profile, type=click.STRING) - save_to_samconfig = click.confirm(f"{tick} Save values to samconfig.toml", default=True) + _ = click.confirm(f"{tick} Save values to samconfig.toml", default=True) if not s3_bucket: click.echo(color.yellow("\nConfiguring Deployment S3 Bucket\n================================")) From 9b4e6b75474d4c64ba0c96214c72b7025b56ec16 Mon Sep 17 00:00:00 2001 From: Sanath Kumar Ramesh Date: Fri, 15 Nov 2019 15:07:11 -0800 Subject: [PATCH 12/45] feat: save params to config file on interactive deploy --- requirements/base.txt | 2 +- requirements/isolated.txt | 2 +- samcli/cli/cli_config_file.py | 93 ++++++++------------- samcli/cli/context.py | 17 ++++ samcli/commands/_utils/options.py | 3 +- samcli/commands/deploy/command.py | 42 +++++++++- samcli/lib/config/__init__.py | 0 samcli/lib/config/samconfig.py | 129 ++++++++++++++++++++++++++++++ 8 files changed, 222 insertions(+), 66 deletions(-) create mode 100644 samcli/lib/config/__init__.py create mode 100644 samcli/lib/config/samconfig.py diff --git a/requirements/base.txt b/requirements/base.txt index 715875da2a..fd6e4eef24 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -14,4 +14,4 @@ serverlessrepo==0.1.9 aws_lambda_builders==0.6.0 # https://github.com/mhammond/pywin32/issues/1439 pywin32 < 226; sys_platform == 'win32' -toml==0.10.0 \ No newline at end of file +tomlkit==0.5.8 \ No newline at end of file diff --git a/requirements/isolated.txt b/requirements/isolated.txt index f04fc7aa6f..82353dcb93 100644 --- a/requirements/isolated.txt +++ b/requirements/isolated.txt @@ -32,7 +32,7 @@ requests==2.22.0 s3transfer==0.2.1 serverlessrepo==0.1.9 six==1.11.0 -toml==0.10.0 +tomlkit==0.5.8 tzlocal==2.0.0 urllib3==1.25.3 websocket-client==0.56.0 diff --git a/samcli/cli/cli_config_file.py b/samcli/cli/cli_config_file.py index d4cb45aec3..972896d358 100644 --- a/samcli/cli/cli_config_file.py +++ b/samcli/cli/cli_config_file.py @@ -6,17 +6,15 @@ ## SPDX-License-Identifier: MIT import functools -import os import logging import click -import toml +from samcli.cli.context import get_cmd_names +from samcli.lib.config.samconfig import SamConfig, DEFAULT_ENV __all__ = ("TomlProvider", "configuration_option", "get_ctx_defaults") -LOG = logging.getLogger("samcli") -DEFAULT_CONFIG_FILE_NAME = "samconfig.toml" -DEFAULT_IDENTIFER = "default" +LOG = logging.getLogger(__name__) class TomlProvider: @@ -29,44 +27,43 @@ class TomlProvider: def __init__(self, section=None): self.section = section - def __call__(self, file_path, config_env, cmd_name): + def __call__(self, config_dir, config_env, cmd_names): """ Get resolved config based on the `file_path` for the configuration file, `config_env` targeted inside the config file and corresponding `cmd_name` as denoted by `click`. - :param file_path: The path to the configuration file :param config_env: The name of the sectional config_env within configuration file. - :param cmd_name: sam command name as defined by click + :param cmd_names list(str): sam command name as defined by click :returns dictionary containing the configuration parameters under specified config_env """ + resolved_config = {} - try: - config = toml.load(file_path) - except Exception as ex: - LOG.error("Error reading configuration file :%s %s", file_path, str(ex)) + + samconfig = SamConfig(config_dir) + LOG.debug("Config file location: %s", samconfig.path()) + + if not samconfig.exists(): + LOG.debug("Config file does not exist") return resolved_config - if self.section: - try: - resolved_config = self._get_config_env(config, config_env)[cmd_name][self.section] - except KeyError: - LOG.debug( - "Error reading configuration file at %s with config_env %s, command %s, section %s", - file_path, - config_env, - cmd_name, - self.section, - ) - return resolved_config - def _get_config_env(self, config, config_env): - """ + try: + LOG.debug("Getting configuration value for %s %s %s", cmd_names, self.section, config_env) + resolved_config = samconfig.get_all(cmd_names, self.section, env=config_env) + except KeyError: + LOG.debug( + "Error reading configuration file at %s with config_env=%s, command=%s, section=%s", + samconfig.path(), + config_dir, + config_env, + cmd_names, + self.section, + ) + except Exception as ex: + LOG.error("Error reading configuration file: %s %s", samconfig.path(), str(ex)) + raise ex - :param config: loaded TOML configuration file into dictionary representation - :param config_env: top level section defined within TOML configuration file - :return: - """ - return config.get(config_env, config.get(DEFAULT_IDENTIFER, {})) + return resolved_config def configuration_callback(cmd_name, option_name, config_env_name, saved_callback, provider, ctx, param, value): @@ -91,7 +88,7 @@ def configuration_callback(cmd_name, option_name, config_env_name, saved_callbac # ctx, param and value are default arguments for click specified callbacks. ctx.default_map = ctx.default_map or {} cmd_name = cmd_name or ctx.info_name - param.default = DEFAULT_IDENTIFER + param.default = None config_env_name = value or config_env_name config = get_ctx_defaults(cmd_name, provider, ctx, config_env_name=config_env_name) ctx.default_map.update(config) @@ -99,7 +96,7 @@ def configuration_callback(cmd_name, option_name, config_env_name, saved_callbac return saved_callback(ctx, param, value) if saved_callback else value -def get_ctx_defaults(cmd_name, provider, ctx, config_env_name=DEFAULT_IDENTIFER): +def get_ctx_defaults(cmd_name, provider, ctx, config_env_name): """ Get the set of the parameters that are needed to be set into the click command. This function also figures out the command name by looking up current click context's parent @@ -114,31 +111,9 @@ def get_ctx_defaults(cmd_name, provider, ctx, config_env_name=DEFAULT_IDENTIFER) :return: dictionary of defaults for parameters """ - cwd = getattr(ctx, "config_path", None) - config_file = os.path.join(cwd if cwd else os.getcwd(), DEFAULT_CONFIG_FILE_NAME) - config = {} - if os.path.isfile(config_file): - LOG.debug("Config file location: %s", os.path.abspath(config_file)) - - # Find parent of current context - _parent = ctx.parent - _cmd_names = [] - # Need to find the total set of commands that current command is part of. - if cmd_name != ctx.info_name: - _cmd_names = [cmd_name] - _cmd_names.append(ctx.info_name) - # Go through all parents till a parent of a context exists. - while _parent.parent: - info_name = _parent.info_name - _cmd_names.append(info_name) - _parent = _parent.parent - - # construct a parsed name that is of the format: a_b_c_d - parsed_cmd_name = "_".join(reversed([cmd.replace("-", "_").replace(" ", "_") for cmd in _cmd_names])) - - config = provider(config_file, config_env_name, parsed_cmd_name) - - return config + # `config_dir` will be a directory relative to SAM template, if it is available. If not it's relative to cwd + config_dir = getattr(ctx, "samconfig_dir", None) or SamConfig.config_dir() + return provider(config_dir, config_env_name, get_cmd_names(cmd_name, ctx)) def configuration_option(*param_decls, **attrs): @@ -172,7 +147,7 @@ def decorator(f): # --config-env is hidden and can potentially be opened up in the future. attrs.setdefault("hidden", True) # explicitly ignore values passed to --config-env, can be opened up in the future. - config_env_name = DEFAULT_IDENTIFER + config_env_name = DEFAULT_ENV provider = attrs.pop("provider") attrs["type"] = click.STRING saved_callback = attrs.pop("callback", None) diff --git a/samcli/cli/context.py b/samcli/cli/context.py index 488c6a1ee0..4844d91d4f 100644 --- a/samcli/cli/context.py +++ b/samcli/cli/context.py @@ -146,3 +146,20 @@ def _refresh_session(self): boto3.setup_default_session(region_name=self._aws_region, profile_name=self._aws_profile) except botocore.exceptions.ProfileNotFound as ex: raise CredentialsError(str(ex)) + + +def get_cmd_names(cmd_name, ctx): + # Find parent of current context + _parent = ctx.parent + _cmd_names = [] + # Need to find the total set of commands that current command is part of. + if cmd_name != ctx.info_name: + _cmd_names = [cmd_name] + _cmd_names.append(ctx.info_name) + # Go through all parents till a parent of a context exists. + while _parent.parent: + info_name = _parent.info_name + _cmd_names.append(info_name) + _parent = _parent.parent + + return _cmd_names diff --git a/samcli/commands/_utils/options.py b/samcli/commands/_utils/options.py index 526273c240..5581fad81e 100644 --- a/samcli/commands/_utils/options.py +++ b/samcli/commands/_utils/options.py @@ -10,6 +10,7 @@ from click.types import FuncParamType from samcli.cli.types import CfnParameterOverridesType, CfnMetadataType, CfnTags from samcli.commands._utils.custom_options.option_nargs import OptionNargs +from samcli.lib.config.samconfig import SamConfig _TEMPLATE_OPTION_DEFAULT_VALUE = "template.[yaml|yml]" @@ -47,7 +48,7 @@ def get_or_default_template_file_name(ctx, param, provided_value, include_build) result = os.path.abspath(provided_value) if ctx: - setattr(ctx, "config_path", os.path.dirname(result)) + setattr(ctx, "samconfig_dir", SamConfig.config_dir(result)) LOG.debug("Using SAM Template at %s", result) return result diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 5ddd20fe3c..5eeb33d676 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -19,6 +19,8 @@ from samcli.lib.telemetry.metrics import track_command from samcli.lib.utils.colors import Colored from samcli.lib.bootstrap.bootstrap import manage_stack +from samcli.lib.config.samconfig import SamConfig +from samcli.cli.context import get_cmd_names SHORT_HELP = "Deploy an AWS SAM application." @@ -32,6 +34,8 @@ \b """ +CONFIG_SECTION = "parameters" + @click.command( "deploy", @@ -39,7 +43,7 @@ context_settings={"ignore_unknown_options": False, "allow_interspersed_args": True, "allow_extra_args": True}, help=HELP_TEXT, ) -@configuration_option(provider=TomlProvider(section="parameters")) +@configuration_option(provider=TomlProvider(section=CONFIG_SECTION)) @template_click_option(include_build=True) @click.option( "--stack-name", @@ -194,10 +198,20 @@ def do_cli( confirm_changeset = False if interactive: - stack_name, s3_bucket, region, profile, confirm_changeset = guided_deploy( + stack_name, s3_bucket, region, profile, confirm_changeset, save_to_config = guided_deploy( stack_name, s3_bucket, region, profile ) + if save_to_config: + save_config( + template_file, + stack_name=stack_name, + s3_bucket=s3_bucket, + region=region, + profile=profile, + confirm_changeset=confirm_changeset, + ) + # We print deploy args only on interactive. # Should we print this always? print_deploy_args(stack_name, s3_bucket, region, profile, capabilities, parameter_overrides, confirm_changeset) @@ -254,7 +268,7 @@ def guided_deploy(stack_name, s3_bucket, region, profile): region = click.prompt(f"{tick} AWS Region", default=default_region, type=click.STRING) profile = click.prompt(f"{tick} AWS Profile", default=default_profile, type=click.STRING) - _ = click.confirm(f"{tick} Save values to samconfig.toml", default=True) + save_to_config = click.confirm(f"{tick} Save values to samconfig.toml", default=True) if not s3_bucket: click.echo(color.yellow("\nConfiguring Deployment S3 Bucket\n================================")) @@ -262,7 +276,7 @@ def guided_deploy(stack_name, s3_bucket, region, profile): click.echo(f"{tick} Using Deployment Bucket: {s3_bucket}") click.echo("You may specify a different default deployment bucket in samconfig.toml") - return stack_name, s3_bucket, region, profile, confirm_changeset + return stack_name, s3_bucket, region, profile, confirm_changeset, save_to_config def print_deploy_args(stack_name, s3_bucket, region, profile, capabilities, parameter_overrides, confirm_changeset): @@ -280,3 +294,23 @@ def print_deploy_args(stack_name, s3_bucket, region, profile, capabilities, para click.echo(f"Confirm Changeset : {confirm_changeset}") click.secho("\nInitiating Deployment\n=====================", fg="yellow") + + +def save_config(template_file, **kwargs): + color = Colored() + tick = color.yellow("✓") + + click.echo(f"\n{tick} Saving arguments to config file") + + section = CONFIG_SECTION + config_dir = SamConfig.config_dir(template_file) + + ctx = click.get_current_context() + cmd_names = get_cmd_names(ctx.info_name, ctx) + + samconfig = SamConfig(config_dir) + + for key, value in kwargs.items(): + samconfig.put(cmd_names, section, key, value) + + samconfig.flush() diff --git a/samcli/lib/config/__init__.py b/samcli/lib/config/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/lib/config/samconfig.py b/samcli/lib/config/samconfig.py new file mode 100644 index 0000000000..392ff8bac6 --- /dev/null +++ b/samcli/lib/config/samconfig.py @@ -0,0 +1,129 @@ +import os +import logging +import tomlkit + +from pathlib import Path + +LOG = logging.getLogger(__name__) + +DEFAULT_CONFIG_FILE_NAME = "samconfig.toml" +DEFAULT_ENV = "default" + + +class SamConfig: + """ + Class to interface with `samconfig.toml` file. + """ + + document = None + + def __init__(self, config_dir, filename=None): + """ + Initialize the class + + Parameters + ---------- + config_dir : string + Directory where the configuration file needs to be stored + + filename : string + Optional. Name of the configuration file. It is recommended to stick with default so in the future we + could automatically support auto-resolving multiple config files within same directory. + """ + self.filepath = Path(config_dir, filename or DEFAULT_CONFIG_FILE_NAME) + + def get_all(self, cmd_names, section, env=DEFAULT_ENV): + """ + Gets a value from the configuration file for the given environment, command and section + + Parameters + ---------- + cmd_names : list(str) + List of representing the entire command. Ex: ["local", "generate-event", "s3", "put"] + + section : str + Specific section within the command to look into Ex: `parameters` + + env : str + Optional, Name of the environment + + Returns + ------- + dict + Dictionary of configuration options in the file. None, if the config doesn't exist. + + Raises + ------ + KeyError + If the config file does *not* have the specific section + """ + + env = env or DEFAULT_ENV + + self._read() + return self.document[env][self._to_key(cmd_names)][section] + + def put(self, cmd_names, section, key, value, env=DEFAULT_ENV): + """ + Writes the `key=value` under the given section. You have to call the `flush()` method after `put()` in + order to write the values back to the config file. Otherwise they will be just saved in-memory, available + for future access, but never saved back to the file. + + Parameters + ---------- + cmd_names : list(str) + List of representing the entire command. Ex: ["local", "generate-event", "s3", "put"] + + section : str + Specific section within the command to look into Ex: `parameters` + + key : str + Key to write the data under + + value + Value to write. Could be any of the supported TOML types. + + env : str + Optional, Name of the environment + """ + + self._read() + self.document[env][self._to_key(cmd_names)][section][key] = value + + def flush(self): + self._write() + + def exists(self): + return self.filepath.exists() + + def path(self): + return str(self.filepath) + + @staticmethod + def config_dir(template_file_path=None): + """ + SAM Config file is always relative to the SAM Template. If it the template is not + given, then it is relative to cwd() + """ + if template_file_path: + return os.path.dirname(template_file_path) + else: + return os.getcwd() + + def _read(self): + if self.document: + return self.document + + txt = self.filepath.read_text() + self.document = tomlkit.loads(txt) + + def _write(self): + if not self.document: + return + + self.filepath.write_text(tomlkit.dumps(self.document)) + + @staticmethod + def _to_key(cmd_names): + # construct a parsed name that is of the format: a_b_c_d + return "_".join(reversed([cmd.replace("-", "_").replace(" ", "_") for cmd in cmd_names])) From 162e57e7a8ebd3b26950c58f22220075334cdeed Mon Sep 17 00:00:00 2001 From: Sanath Kumar Ramesh Date: Fri, 15 Nov 2019 15:34:49 -0800 Subject: [PATCH 13/45] fixing unit tests --- samcli/cli/cli_config_file.py | 2 - samcli/cli/context.py | 17 +++++ .../local/generate_event/event_generation.py | 8 +- samcli/lib/config/samconfig.py | 32 +++++++- tests/unit/cli/test_cli_config_file.py | 74 ++++++------------- tests/unit/commands/_utils/test_options.py | 2 +- tests/unit/commands/deploy/test_command.py | 2 + .../commands/deploy/test_deploy_context.py | 1 + 8 files changed, 77 insertions(+), 61 deletions(-) diff --git a/samcli/cli/cli_config_file.py b/samcli/cli/cli_config_file.py index 972896d358..c272495fa1 100644 --- a/samcli/cli/cli_config_file.py +++ b/samcli/cli/cli_config_file.py @@ -54,14 +54,12 @@ def __call__(self, config_dir, config_env, cmd_names): LOG.debug( "Error reading configuration file at %s with config_env=%s, command=%s, section=%s", samconfig.path(), - config_dir, config_env, cmd_names, self.section, ) except Exception as ex: LOG.error("Error reading configuration file: %s %s", samconfig.path(), str(ex)) - raise ex return resolved_config diff --git a/samcli/cli/context.py b/samcli/cli/context.py index 4844d91d4f..533ecd5ca1 100644 --- a/samcli/cli/context.py +++ b/samcli/cli/context.py @@ -149,6 +149,21 @@ def _refresh_session(self): def get_cmd_names(cmd_name, ctx): + """ + Given the click core context, return a list representing all the subcommands passed to the CLI + + Parameters + ---------- + cmd_name : name of current command + + ctx : click.Context + + Returns + ------- + list(str) + List containing subcommand names. Ex: ["local", "start-api"] + + """ # Find parent of current context _parent = ctx.parent _cmd_names = [] @@ -162,4 +177,6 @@ def get_cmd_names(cmd_name, ctx): _cmd_names.append(info_name) _parent = _parent.parent + # Make sure the output reads natural. Ex: ["local", "start-api"] + _cmd_names.reverse() return _cmd_names diff --git a/samcli/commands/local/generate_event/event_generation.py b/samcli/commands/local/generate_event/event_generation.py index e45ea2cd43..42f943273f 100644 --- a/samcli/commands/local/generate_event/event_generation.py +++ b/samcli/commands/local/generate_event/event_generation.py @@ -10,6 +10,7 @@ from samcli.cli.cli_config_file import TomlProvider, get_ctx_defaults from samcli.cli.options import debug_option from samcli.lib.telemetry.metrics import track_command +import samcli.lib.config.samconfig as samconfig class ServiceCommand(click.MultiCommand): @@ -153,7 +154,12 @@ def get_command(self, ctx, cmd_name): self.cmd_implementation, self.events_lib, self.top_level_cmd_name, cmd_name ) - config = get_ctx_defaults(cmd_name=cmd_name, provider=TomlProvider(section="parameters"), ctx=ctx) + config = get_ctx_defaults( + cmd_name=cmd_name, + provider=TomlProvider(section="parameters"), + ctx=ctx, + config_env_name=samconfig.DEFAULT_ENV, + ) cmd = click.Command( name=cmd_name, diff --git a/samcli/lib/config/samconfig.py b/samcli/lib/config/samconfig.py index 392ff8bac6..3741775d83 100644 --- a/samcli/lib/config/samconfig.py +++ b/samcli/lib/config/samconfig.py @@ -1,9 +1,14 @@ +""" +Class representing the samconfig.toml +""" + import os import logging -import tomlkit from pathlib import Path +import tomlkit + LOG = logging.getLogger(__name__) DEFAULT_CONFIG_FILE_NAME = "samconfig.toml" @@ -56,6 +61,9 @@ def get_all(self, cmd_names, section, env=DEFAULT_ENV): ------ KeyError If the config file does *not* have the specific section + + tomlkit.exceptions.TOMLKitError + If the configuration file is invalid """ env = env or DEFAULT_ENV @@ -85,12 +93,26 @@ def put(self, cmd_names, section, key, value, env=DEFAULT_ENV): env : str Optional, Name of the environment + + Raises + ------ + tomlkit.exceptions.TOMLKitError + If the data is invalid """ self._read() self.document[env][self._to_key(cmd_names)][section][key] = value def flush(self): + """ + Write the data back to file + + Raises + ------ + tomlkit.exceptions.TOMLKitError + If the data is invalid + + """ self._write() def exists(self): @@ -107,8 +129,8 @@ def config_dir(template_file_path=None): """ if template_file_path: return os.path.dirname(template_file_path) - else: - return os.getcwd() + + return os.getcwd() def _read(self): if self.document: @@ -117,6 +139,8 @@ def _read(self): txt = self.filepath.read_text() self.document = tomlkit.loads(txt) + return self.document + def _write(self): if not self.document: return @@ -126,4 +150,4 @@ def _write(self): @staticmethod def _to_key(cmd_names): # construct a parsed name that is of the format: a_b_c_d - return "_".join(reversed([cmd.replace("-", "_").replace(" ", "_") for cmd in cmd_names])) + return "_".join([cmd.replace("-", "_").replace(" ", "_") for cmd in cmd_names]) diff --git a/tests/unit/cli/test_cli_config_file.py b/tests/unit/cli/test_cli_config_file.py index d3764d1365..0aed94ee74 100644 --- a/tests/unit/cli/test_cli_config_file.py +++ b/tests/unit/cli/test_cli_config_file.py @@ -1,17 +1,12 @@ -import os import tempfile +from pathlib import Path from unittest import TestCase from unittest.mock import MagicMock, patch -from samcli.cli.cli_config_file import ( - TomlProvider, - configuration_option, - configuration_callback, - get_ctx_defaults, - DEFAULT_CONFIG_FILE_NAME, -) +from samcli.cli.cli_config_file import TomlProvider, configuration_option, configuration_callback, get_ctx_defaults +from samcli.lib.config.samconfig import SamConfig class MockContext: @@ -28,19 +23,19 @@ def setUp(self): self.cmd_name = "topic" def test_toml_valid_with_section(self): - with tempfile.NamedTemporaryFile(delete=False) as toml_file: - toml_file.write(b"[config_env.topic.parameters]\nword='clarity'\n") - toml_file.flush() - self.assertEqual( - TomlProvider(section=self.parameters)(toml_file.name, self.config_env, self.cmd_name), - {"word": "clarity"}, - ) + config_dir = tempfile.gettempdir() + configpath = Path(config_dir, "samconfig.toml") + configpath.write_text("[config_env.topic.parameters]\nword='clarity'\n") + self.assertEqual( + TomlProvider(section=self.parameters)(config_dir, self.config_env, [self.cmd_name]), {"word": "clarity"} + ) def test_toml_invalid_empty_dict(self): - with tempfile.NamedTemporaryFile(delete=False) as toml_file: - toml_file.write(b"[topic]\nword=clarity\n") - toml_file.flush() - self.assertEqual(self.toml_provider(toml_file.name, self.config_env, self.cmd_name), {}) + config_dir = tempfile.gettempdir() + configpath = Path(config_dir, "samconfig.toml") + configpath.write_text("[topic]\nword=clarity\n") + + self.assertEqual(self.toml_provider(config_dir, self.config_env, [self.cmd_name]), {}) class TestCliConfiguration(TestCase): @@ -57,10 +52,7 @@ def setUp(self): class Dummy: pass - @patch("samcli.cli.cli_config_file.os.path.isfile", return_value=True) - @patch("samcli.cli.cli_config_file.os.path.join", return_value=MagicMock()) - @patch("samcli.cli.cli_config_file.os.path.abspath", return_value=MagicMock()) - def test_callback_with_valid_config_env(self, mock_os_path_is_file, mock_os_path_join, mock_os_path_abspath): + def test_callback_with_valid_config_env(self): mock_context1 = MockContext(info_name="sam", parent=None) mock_context2 = MockContext(info_name="local", parent=mock_context1) mock_context3 = MockContext(info_name="start-api", parent=mock_context2) @@ -80,26 +72,6 @@ def test_callback_with_valid_config_env(self, mock_os_path_is_file, mock_os_path for arg in [self.ctx, self.param, self.value]: self.assertIn(arg, self.saved_callback.call_args[0]) - @patch("samcli.cli.cli_config_file.os.path.isfile", return_value=False) - @patch("samcli.cli.cli_config_file.os.path.join", return_value=MagicMock()) - def test_callback_with_config_file_not_file(self, mock_os_isfile, mock_os_path_join): - configuration_callback( - cmd_name=self.cmd_name, - option_name=self.option_name, - config_env_name=self.config_env, - saved_callback=self.saved_callback, - provider=self.provider, - ctx=self.ctx, - param=self.param, - value=self.value, - ) - self.assertEqual(self.provider.call_count, 0) - self.assertEqual(self.saved_callback.call_count, 1) - for arg in [self.ctx, self.param, self.value]: - self.assertIn(arg, self.saved_callback.call_args[0]) - self.assertEqual(mock_os_isfile.call_count, 1) - self.assertEqual(mock_os_path_join.call_count, 1) - def test_configuration_option(self): toml_provider = TomlProvider() click_option = configuration_option(provider=toml_provider) @@ -110,20 +82,18 @@ def test_configuration_option(self): self.assertEqual(clc.__click_params__[0].expose_value, False) self.assertEqual(clc.__click_params__[0].callback.args, (None, "--config-env", "default", None, toml_provider)) - @patch("samcli.cli.cli_config_file.os.path.isfile", return_value=True) - def test_get_ctx_defaults_non_nested(self, mock_os_file): + def test_get_ctx_defaults_non_nested(self): provider = MagicMock() mock_context1 = MockContext(info_name="sam", parent=None) mock_context2 = MockContext(info_name="local", parent=mock_context1) mock_context3 = MockContext(info_name="start-api", parent=mock_context2) - get_ctx_defaults("start-api", provider, mock_context3) + get_ctx_defaults("start-api", provider, mock_context3, "default") - provider.assert_called_with(os.path.join(os.getcwd(), DEFAULT_CONFIG_FILE_NAME), "default", "local_start_api") + provider.assert_called_with(SamConfig.config_dir(), "default", ["local", "start-api"]) - @patch("samcli.cli.cli_config_file.os.path.isfile", return_value=True) - def test_get_ctx_defaults_nested(self, mock_os_file): + def test_get_ctx_defaults_nested(self): provider = MagicMock() mock_context1 = MockContext(info_name="sam", parent=None) @@ -131,10 +101,8 @@ def test_get_ctx_defaults_nested(self, mock_os_file): mock_context3 = MockContext(info_name="generate-event", parent=mock_context2) mock_context4 = MockContext(info_name="alexa-skills-kit", parent=mock_context3) - get_ctx_defaults("intent-answer", provider, mock_context4) + get_ctx_defaults("intent-answer", provider, mock_context4, "default") provider.assert_called_with( - os.path.join(os.getcwd(), DEFAULT_CONFIG_FILE_NAME), - "default", - "local_generate_event_alexa_skills_kit_intent_answer", + SamConfig.config_dir(), "default", ["local", "generate-event", "alexa-skills-kit", "intent-answer"] ) diff --git a/tests/unit/commands/_utils/test_options.py b/tests/unit/commands/_utils/test_options.py index cbcd8b3911..b2e82d6618 100644 --- a/tests/unit/commands/_utils/test_options.py +++ b/tests/unit/commands/_utils/test_options.py @@ -69,5 +69,5 @@ def test_verify_ctx(self, os_mock): result = get_or_default_template_file_name(ctx, None, _TEMPLATE_OPTION_DEFAULT_VALUE, include_build=True) self.assertEqual(result, "a/b/c/absPath") - self.assertEqual(ctx.config_path, "a/b/c") + self.assertEqual(ctx.samconfig_dir, "a/b/c") os_mock.path.abspath.assert_called_with(expected) diff --git a/tests/unit/commands/deploy/test_command.py b/tests/unit/commands/deploy/test_command.py index c51300dcf1..4c6d601b60 100644 --- a/tests/unit/commands/deploy/test_command.py +++ b/tests/unit/commands/deploy/test_command.py @@ -25,6 +25,7 @@ def setUp(self): self.profile = None self.use_json = True self.metadata = {} + self.interactive = True @patch("samcli.commands.package.command.click") @patch("samcli.commands.package.package_context.PackageContext") @@ -53,6 +54,7 @@ def test_all_args(self, mock_deploy_context, mock_deploy_click, mock_package_con profile=self.profile, use_json=self.use_json, metadata=self.metadata, + interactive=self.interactive, ) mock_deploy_context.assert_called_with( diff --git a/tests/unit/commands/deploy/test_deploy_context.py b/tests/unit/commands/deploy/test_deploy_context.py index 0813553904..9ee1373308 100644 --- a/tests/unit/commands/deploy/test_deploy_context.py +++ b/tests/unit/commands/deploy/test_deploy_context.py @@ -26,6 +26,7 @@ def setUp(self): tags={"a": "b"}, region=None, profile=None, + confirm_changeset=True, ) def test_template_improper(self): From 15c50d696cfb545bc21e32a1355a7ae8df9018cb Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan Date: Mon, 18 Nov 2019 14:40:46 -0800 Subject: [PATCH 14/45] fix: pass `--confirm-changeset` through to deploy command - fix location of samconfig.toml - fix unit tests - unit tests for samconfig - additional unit tests for deploy --- samcli/cli/cli_config_file.py | 3 +- samcli/cli/context.py | 5 ++ samcli/commands/_utils/options.py | 6 +- samcli/commands/deploy/command.py | 30 ++++--- samcli/lib/config/samconfig.py | 4 +- tests/unit/commands/deploy/test_command.py | 84 ++++++++++++++++++- .../commands/deploy/test_deploy_context.py | 2 +- tests/unit/lib/config/__init__.py | 0 tests/unit/lib/config/test_samconfig.py | 34 ++++++++ 9 files changed, 152 insertions(+), 16 deletions(-) create mode 100644 tests/unit/lib/config/__init__.py create mode 100644 tests/unit/lib/config/test_samconfig.py diff --git a/samcli/cli/cli_config_file.py b/samcli/cli/cli_config_file.py index c272495fa1..1b67a545d7 100644 --- a/samcli/cli/cli_config_file.py +++ b/samcli/cli/cli_config_file.py @@ -5,6 +5,7 @@ ## This section contains code copied and modified from [click_config_file][https://github.com/phha/click_config_file/blob/master/click_config_file.py] ## SPDX-License-Identifier: MIT +import os import functools import logging @@ -110,7 +111,7 @@ def get_ctx_defaults(cmd_name, provider, ctx, config_env_name): """ # `config_dir` will be a directory relative to SAM template, if it is available. If not it's relative to cwd - config_dir = getattr(ctx, "samconfig_dir", None) or SamConfig.config_dir() + config_dir = getattr(ctx, "samconfig_dir", None) or os.getcwd() return provider(config_dir, config_env_name, get_cmd_names(cmd_name, ctx)) diff --git a/samcli/cli/context.py b/samcli/cli/context.py index 533ecd5ca1..a72f9ed923 100644 --- a/samcli/cli/context.py +++ b/samcli/cli/context.py @@ -164,6 +164,11 @@ def get_cmd_names(cmd_name, ctx): List containing subcommand names. Ex: ["local", "start-api"] """ + if not ctx: + return [] + + if ctx and not getattr(ctx, "parent", None): + return [ctx.info_name] # Find parent of current context _parent = ctx.parent _cmd_names = [] diff --git a/samcli/commands/_utils/options.py b/samcli/commands/_utils/options.py index 5581fad81e..590cabec7e 100644 --- a/samcli/commands/_utils/options.py +++ b/samcli/commands/_utils/options.py @@ -31,6 +31,8 @@ def get_or_default_template_file_name(ctx, param, provided_value, include_build) :return: Actual value to be used in the CLI """ + original_template_path = os.path.abspath(provided_value) + search_paths = ["template.yaml", "template.yml"] if include_build: @@ -48,7 +50,9 @@ def get_or_default_template_file_name(ctx, param, provided_value, include_build) result = os.path.abspath(provided_value) if ctx: - setattr(ctx, "samconfig_dir", SamConfig.config_dir(result)) + # sam configuration file should always be relative to the supplied original template and should not to be set + # to be .aws-sam/build/ + setattr(ctx, "samconfig_dir", os.path.dirname(original_template_path)) LOG.debug("Using SAM Template at %s", result) return result diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 5eeb33d676..1e132faf16 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -1,7 +1,7 @@ """ CLI command for "deploy" command """ - +import os import tempfile import json import click @@ -78,7 +78,7 @@ @click.option( "--kms-key-id", required=False, - help="The ID of an AWS KMS key that the command uses" " to encrypt artifacts that are at rest in the S3 bucket.", + help="The ID of an AWS KMS key that the command uses to encrypt artifacts that are at rest in the S3 bucket.", ) @click.option( "--no-execute-changeset", @@ -106,6 +106,12 @@ "changes to be made to the stack. The default behavior is to return a" "non-zero exit code.", ) +@click.option( + "--confirm-changeset", + required=False, + is_flag=True, + help="Prompt to confirm if the computed changeset is to be deployed by SAM CLI.", +) @click.option( "--use-json", required=False, @@ -148,6 +154,7 @@ def cli( tags, metadata, interactive, + confirm_changeset, ): # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing @@ -168,6 +175,7 @@ def cli( tags, metadata, interactive, + confirm_changeset, ctx.region, ctx.profile, ) # pragma: no cover @@ -190,16 +198,16 @@ def do_cli( tags, metadata, interactive, + confirm_changeset, region, profile, ): from samcli.commands.package.package_context import PackageContext from samcli.commands.deploy.deploy_context import DeployContext - confirm_changeset = False if interactive: stack_name, s3_bucket, region, profile, confirm_changeset, save_to_config = guided_deploy( - stack_name, s3_bucket, region, profile + stack_name, s3_bucket, region, profile, confirm_changeset ) if save_to_config: @@ -254,7 +262,7 @@ def do_cli( deploy_context.run() -def guided_deploy(stack_name, s3_bucket, region, profile): +def guided_deploy(stack_name, s3_bucket, region, profile, confirm_changeset): default_region = region or "us-east-1" default_profile = profile or "default" @@ -272,7 +280,7 @@ def guided_deploy(stack_name, s3_bucket, region, profile): if not s3_bucket: click.echo(color.yellow("\nConfiguring Deployment S3 Bucket\n================================")) - s3_bucket = manage_stack(profile, region) + s3_bucket = manage_stack(profile=profile, region=region) click.echo(f"{tick} Using Deployment Bucket: {s3_bucket}") click.echo("You may specify a different default deployment bucket in samconfig.toml") @@ -303,12 +311,14 @@ def save_config(template_file, **kwargs): click.echo(f"\n{tick} Saving arguments to config file") section = CONFIG_SECTION - config_dir = SamConfig.config_dir(template_file) - ctx = click.get_current_context() - cmd_names = get_cmd_names(ctx.info_name, ctx) - samconfig = SamConfig(config_dir) + samconfig_dir = getattr(ctx, "samconfig_dir", None) + samconfig = SamConfig( + config_dir=samconfig_dir if samconfig_dir else SamConfig.config_dir(template_file_path=template_file) + ) + + cmd_names = get_cmd_names(ctx.info_name, ctx) for key, value in kwargs.items(): samconfig.put(cmd_names, section, key, value) diff --git a/samcli/lib/config/samconfig.py b/samcli/lib/config/samconfig.py index 3741775d83..e4b9bc705f 100644 --- a/samcli/lib/config/samconfig.py +++ b/samcli/lib/config/samconfig.py @@ -36,6 +36,8 @@ def __init__(self, config_dir, filename=None): could automatically support auto-resolving multiple config files within same directory. """ self.filepath = Path(config_dir, filename or DEFAULT_CONFIG_FILE_NAME) + if not self.filepath.exists(): + open(self.filepath, "a+").close() def get_all(self, cmd_names, section, env=DEFAULT_ENV): """ @@ -101,7 +103,7 @@ def put(self, cmd_names, section, key, value, env=DEFAULT_ENV): """ self._read() - self.document[env][self._to_key(cmd_names)][section][key] = value + self.document.update({env: {self._to_key(cmd_names): {section: {key: value}}}}) def flush(self): """ diff --git a/tests/unit/commands/deploy/test_command.py b/tests/unit/commands/deploy/test_command.py index 4c6d601b60..e18cc73524 100644 --- a/tests/unit/commands/deploy/test_command.py +++ b/tests/unit/commands/deploy/test_command.py @@ -1,5 +1,5 @@ from unittest import TestCase -from unittest.mock import patch, Mock, ANY +from unittest.mock import patch, Mock, ANY, MagicMock from samcli.commands.deploy.command import do_cli @@ -25,7 +25,8 @@ def setUp(self): self.profile = None self.use_json = True self.metadata = {} - self.interactive = True + self.interactive = False + self.confirm_changeset = False @patch("samcli.commands.package.command.click") @patch("samcli.commands.package.package_context.PackageContext") @@ -55,6 +56,7 @@ def test_all_args(self, mock_deploy_context, mock_deploy_click, mock_package_con use_json=self.use_json, metadata=self.metadata, interactive=self.interactive, + confirm_changeset=self.confirm_changeset, ) mock_deploy_context.assert_called_with( @@ -73,7 +75,85 @@ def test_all_args(self, mock_deploy_context, mock_deploy_click, mock_package_con tags=self.tags, region=self.region, profile=self.profile, + confirm_changeset=self.confirm_changeset, ) context_mock.run.assert_called_with() self.assertEqual(context_mock.run.call_count, 1) + + @patch("samcli.commands.package.command.click") + @patch("samcli.commands.package.package_context.PackageContext") + @patch("samcli.commands.deploy.command.click") + @patch("samcli.commands.deploy.deploy_context.DeployContext") + @patch("samcli.commands.deploy.command.save_config") + @patch("samcli.commands.deploy.command.manage_stack") + def test_all_args_interactive( + self, + mock_managed_stack, + mock_save_config, + mock_deploy_context, + mock_deploy_click, + mock_package_context, + mock_package_click, + ): + + context_mock = Mock() + mock_deploy_context.return_value.__enter__.return_value = context_mock + mock_deploy_click.prompt = MagicMock(side_effect=["sam-app", "us-east-1", "default"]) + mock_deploy_click.confirm = MagicMock(side_effect=[True, True]) + + mock_managed_stack.return_value = "managed-s3-bucket" + mock_save_config.return_value = True + + do_cli( + template_file=self.template_file, + stack_name=self.stack_name, + s3_bucket=None, + force_upload=self.force_upload, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region=self.region, + profile=self.profile, + use_json=self.use_json, + metadata=self.metadata, + interactive=True, + confirm_changeset=True, + ) + + mock_deploy_context.assert_called_with( + template_file=ANY, + stack_name="sam-app", + s3_bucket="managed-s3-bucket", + force_upload=self.force_upload, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region="us-east-1", + profile="default", + confirm_changeset=True, + ) + + context_mock.run.assert_called_with() + mock_save_config.assert_called_with( + "input-template-file", + confirm_changeset=True, + profile="default", + region="us-east-1", + s3_bucket="managed-s3-bucket", + stack_name="sam-app", + ) + mock_managed_stack.assert_called_with(profile="default", region="us-east-1") + self.assertEqual(context_mock.run.call_count, 1) diff --git a/tests/unit/commands/deploy/test_deploy_context.py b/tests/unit/commands/deploy/test_deploy_context.py index 9ee1373308..d2859537ae 100644 --- a/tests/unit/commands/deploy/test_deploy_context.py +++ b/tests/unit/commands/deploy/test_deploy_context.py @@ -26,7 +26,7 @@ def setUp(self): tags={"a": "b"}, region=None, profile=None, - confirm_changeset=True, + confirm_changeset=False, ) def test_template_improper(self): diff --git a/tests/unit/lib/config/__init__.py b/tests/unit/lib/config/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/lib/config/test_samconfig.py b/tests/unit/lib/config/test_samconfig.py new file mode 100644 index 0000000000..c39e8f494d --- /dev/null +++ b/tests/unit/lib/config/test_samconfig.py @@ -0,0 +1,34 @@ +import os +from pathlib import Path + +from unittest import TestCase + +from samcli.lib.config.samconfig import SamConfig, DEFAULT_CONFIG_FILE_NAME + + +class TestSamConfig(TestCase): + def setUp(self): + self.config_dir = os.getcwd() + self.samconfig = SamConfig(self.config_dir) + open(self.samconfig.path(), "w").close() + + def tearDown(self): + if self.samconfig.exists(): + os.remove(self.samconfig.path()) + + def _setup_config(self): + self.samconfig.put(cmd_names=["local", "start", "api"], section="parameters", key="port", value=5401) + self.samconfig.flush() + + def test_init(self): + self.assertEqual(self.samconfig.filepath, Path(self.config_dir, DEFAULT_CONFIG_FILE_NAME)) + + def test_check_config_get(self): + self._setup_config() + self.assertEqual( + {"port": 5401}, self.samconfig.get_all(cmd_names=["local", "start", "api"], section="parameters") + ) + + def test_check_config_exists(self): + self._setup_config() + self.assertTrue(self.samconfig.exists()) From 389face37af80ec9d2dcd8872a3d07e5eabb5dd1 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan Date: Mon, 18 Nov 2019 16:31:47 -0800 Subject: [PATCH 15/45] ux: improvements on sam deploy --interactive --- samcli/commands/_utils/options.py | 11 ++- samcli/commands/deploy/command.py | 80 +++++++++++++++------- samcli/lib/bootstrap/bootstrap.py | 10 +-- tests/unit/commands/deploy/test_command.py | 5 +- 4 files changed, 71 insertions(+), 35 deletions(-) diff --git a/samcli/commands/_utils/options.py b/samcli/commands/_utils/options.py index 590cabec7e..31a5f79971 100644 --- a/samcli/commands/_utils/options.py +++ b/samcli/commands/_utils/options.py @@ -156,7 +156,7 @@ def capabilities_click_option(): cls=OptionNargs, default=("CAPABILITY_IAM",), required=False, - type=FuncParamType(lambda value: value.split(" ") if not isinstance(value, tuple) else value), + type=FuncParamType(func=_space_separated_list_func_type), help="A list of capabilities that you must specify" "before AWS Cloudformation can create certain stacks. Some stack tem-" "plates might include resources that can affect permissions in your AWS" @@ -194,7 +194,7 @@ def notification_arns_click_option(): return click.option( "--notification-arns", cls=OptionNargs, - type=FuncParamType(lambda value: value.split(" ") if not isinstance(value, tuple) else value), + type=FuncParamType(func=_space_separated_list_func_type), required=False, help="Amazon Simple Notification Service topic" "Amazon Resource Names (ARNs) that AWS CloudFormation associates with" @@ -204,3 +204,10 @@ def notification_arns_click_option(): def notification_arns_override_option(f): return notification_arns_click_option()(f) + + +def _space_separated_list_func_type(value): + return value.split(" ") if not isinstance(value, tuple) else value + + +_space_separated_list_func_type.__name__ = "LIST" diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 1e132faf16..a923a23022 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -5,6 +5,7 @@ import tempfile import json import click +from click.types import FuncParamType from samcli.commands._utils.options import ( parameter_override_option, @@ -13,6 +14,7 @@ notification_arns_override_option, template_click_option, metadata_override_option, + _space_separated_list_func_type, ) from samcli.cli.cli_config_file import configuration_option, TomlProvider from samcli.cli.main import pass_context, common_options, aws_creds_options @@ -205,8 +207,11 @@ def do_cli( from samcli.commands.package.package_context import PackageContext from samcli.commands.deploy.deploy_context import DeployContext + changeset_decision = False + _capabilities = ("CAPABILITY_IAM",) + if interactive: - stack_name, s3_bucket, region, profile, confirm_changeset, save_to_config = guided_deploy( + stack_name, s3_bucket, region, profile, changeset_decision, _capabilities, save_to_config = guided_deploy( stack_name, s3_bucket, region, profile, confirm_changeset ) @@ -218,11 +223,20 @@ def do_cli( region=region, profile=profile, confirm_changeset=confirm_changeset, + capabilities=_capabilities, ) # We print deploy args only on interactive. # Should we print this always? - print_deploy_args(stack_name, s3_bucket, region, profile, capabilities, parameter_overrides, confirm_changeset) + print_deploy_args( + stack_name=stack_name, + s3_bucket=s3_bucket, + region=region, + profile=profile, + capabilities=_capabilities, + parameter_overrides=parameter_overrides, + confirm_changeset=changeset_decision, + ) with tempfile.NamedTemporaryFile() as output_template_file: @@ -249,7 +263,7 @@ def do_cli( s3_prefix=s3_prefix, kms_key_id=kms_key_id, parameter_overrides=parameter_overrides, - capabilities=capabilities, + capabilities=_capabilities if interactive else capabilities, no_execute_changeset=no_execute_changeset, role_arn=role_arn, notification_arns=notification_arns, @@ -257,7 +271,7 @@ def do_cli( tags=tags, region=region, profile=profile, - confirm_changeset=confirm_changeset, + confirm_changeset=changeset_decision, ) as deploy_context: deploy_context.run() @@ -265,26 +279,37 @@ def do_cli( def guided_deploy(stack_name, s3_bucket, region, profile, confirm_changeset): default_region = region or "us-east-1" default_profile = profile or "default" + capabilities = ("CAPABILITY_IAM",) color = Colored() tick = color.yellow("✓") - click.echo(color.yellow("\nDeploy Arguments\n================")) + click.echo( + color.yellow("\n\tSetting default arguments for 'sam deploy'\n\t=========================================") + ) - stack_name = click.prompt(f"{tick} Stack Name", default=stack_name, type=click.STRING) - confirm_changeset = click.confirm(f"{tick} Confirm changeset before deploy", default=True) - region = click.prompt(f"{tick} AWS Region", default=default_region, type=click.STRING) - profile = click.prompt(f"{tick} AWS Profile", default=default_profile, type=click.STRING) + stack_name = click.prompt(f"\t{tick} Stack Name", default=stack_name, type=click.STRING) + region = click.prompt(f"\t{tick} AWS Region", default=default_region, type=click.STRING) + profile = click.prompt(f"\t{tick} AWS Profile", default=default_profile, type=click.STRING) + click.secho("\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy") + confirm_changeset = click.confirm(f"\t{tick} Confirm changes before deploy", default=confirm_changeset) + click.secho("\t#SAM needs permission to be able to create roles to connect to the resources in your template") + capabilities_confirm = click.confirm(f"\t{tick} Allow SAM CLI IAM role creation", default=True) + + if not capabilities_confirm: + capabilities = click.prompt( + f"\t{tick} Capabilities", default=capabilities, type=FuncParamType(func=_space_separated_list_func_type) + ) - save_to_config = click.confirm(f"{tick} Save values to samconfig.toml", default=True) + save_to_config = click.confirm(f"\t{tick} Save arguments to samconfig.toml", default=True) if not s3_bucket: - click.echo(color.yellow("\nConfiguring Deployment S3 Bucket\n================================")) + click.echo(color.yellow("\n\tConfiguring Deployment S3 Bucket\n\t================================")) s3_bucket = manage_stack(profile=profile, region=region) - click.echo(f"{tick} Using Deployment Bucket: {s3_bucket}") - click.echo("You may specify a different default deployment bucket in samconfig.toml") + click.echo(f"\t{tick} Using Deployment Bucket: {s3_bucket}") + click.echo("\tYou may specify a different default deployment bucket in samconfig.toml") - return stack_name, s3_bucket, region, profile, confirm_changeset, save_to_config + return stack_name, s3_bucket, region, profile, confirm_changeset, capabilities, save_to_config def print_deploy_args(stack_name, s3_bucket, region, profile, capabilities, parameter_overrides, confirm_changeset): @@ -292,24 +317,20 @@ def print_deploy_args(stack_name, s3_bucket, region, profile, capabilities, para param_overrides_string = json.dumps(parameter_overrides, indent=2) capabilities_string = json.dumps(capabilities) - click.secho("\nDeploying with following values\n===============================", fg="yellow") - click.echo(f"Stack Name : {stack_name}") - click.echo(f"Region : {region}") - click.echo(f"Profile : {profile}") - click.echo(f"Deployment S3 Bucket : {s3_bucket}") - click.echo(f"Parameter Overrides : {param_overrides_string}") - click.echo(f"Capabilities : {capabilities_string}") - click.echo(f"Confirm Changeset : {confirm_changeset}") - - click.secho("\nInitiating Deployment\n=====================", fg="yellow") + click.secho("\n\tDeploying with following values\n\t===============================", fg="yellow") + click.echo(f"\tStack Name : {stack_name}") + click.echo(f"\tRegion : {region}") + click.echo(f"\tProfile : {profile}") + click.echo(f"\tDeployment S3 Bucket : {s3_bucket}") + click.echo(f"\tParameter Overrides : {param_overrides_string}") + click.echo(f"\tCapabilities : {capabilities_string}") + click.echo(f"\tConfirm Changeset : {confirm_changeset}") def save_config(template_file, **kwargs): color = Colored() tick = color.yellow("✓") - click.echo(f"\n{tick} Saving arguments to config file") - section = CONFIG_SECTION ctx = click.get_current_context() @@ -321,6 +342,13 @@ def save_config(template_file, **kwargs): cmd_names = get_cmd_names(ctx.info_name, ctx) for key, value in kwargs.items(): + if isinstance(value, (list, tuple)): + value = " ".join(val for val in value) samconfig.put(cmd_names, section, key, value) samconfig.flush() + + click.echo(f"\n\t{tick} Saved arguments to config file") + click.echo("\tRunning 'sam deploy' for future deployments will use the parameters saved above.") + click.echo("\tThe above parameters can be changed by modifying samconfig.toml") + click.echo("\tLearn more about samconfig.toml syntax http://url") diff --git a/samcli/lib/bootstrap/bootstrap.py b/samcli/lib/bootstrap/bootstrap.py index a4d513df5f..e093c0fe37 100644 --- a/samcli/lib/bootstrap/bootstrap.py +++ b/samcli/lib/bootstrap/bootstrap.py @@ -31,9 +31,9 @@ def _create_or_get_stack(cloudformation_client): ds_resp = cloudformation_client.describe_stacks(StackName=SAM_CLI_STACK_NAME) stacks = ds_resp["Stacks"] stack = stacks[0] - LOG.info("Found managed SAM CLI stack.") + LOG.info("\tFound managed SAM CLI stack.") except ClientError: - LOG.info("Managed SAM CLI stack not found, creating.") + LOG.info("\tManaged SAM CLI stack not found, creating.") stack = _create_stack(cloudformation_client) # exceptions are not captured from subcommands # Sanity check for non-none stack? Sanity check for tag? tags = stack["Tags"] @@ -78,16 +78,16 @@ def _create_stack(cloudformation_client): ChangeSetName=change_set_name, # this must be unique for the stack, but we only create so that's fine ) stack_id = change_set_resp["StackId"] - LOG.info("Waiting for managed stack change set to create.") + LOG.info("\tWaiting for managed stack change set to create.") change_waiter = cloudformation_client.get_waiter("change_set_create_complete") change_waiter.wait( ChangeSetName=change_set_name, StackName=SAM_CLI_STACK_NAME, WaiterConfig={"Delay": 15, "MaxAttempts": 60} ) cloudformation_client.execute_change_set(ChangeSetName=change_set_name, StackName=SAM_CLI_STACK_NAME) - LOG.info("Waiting for managed stack to be created.") + LOG.info("\tWaiting for managed stack to be created.") stack_waiter = cloudformation_client.get_waiter("stack_create_complete") stack_waiter.wait(StackName=stack_id, WaiterConfig={"Delay": 15, "MaxAttempts": 60}) - LOG.info("Managed SAM CLI stack creation complete.") + LOG.info("\tManaged SAM CLI stack creation complete.") ds_resp = cloudformation_client.describe_stacks(StackName=SAM_CLI_STACK_NAME) stacks = ds_resp["Stacks"] return stacks[0] diff --git a/tests/unit/commands/deploy/test_command.py b/tests/unit/commands/deploy/test_command.py index e18cc73524..5d09ec6a8a 100644 --- a/tests/unit/commands/deploy/test_command.py +++ b/tests/unit/commands/deploy/test_command.py @@ -15,7 +15,7 @@ def setUp(self): self.no_execute_changeset = False self.notification_arns = [] self.parameter_overrides = {"a": "b"} - self.capabilities = "CAPABILITY_IAM" + self.capabilities = ("CAPABILITY_IAM",) self.tags = {"c": "d"} self.fail_on_empty_changset = True self.role_arn = "role_arn" @@ -100,7 +100,7 @@ def test_all_args_interactive( context_mock = Mock() mock_deploy_context.return_value.__enter__.return_value = context_mock mock_deploy_click.prompt = MagicMock(side_effect=["sam-app", "us-east-1", "default"]) - mock_deploy_click.confirm = MagicMock(side_effect=[True, True]) + mock_deploy_click.confirm = MagicMock(side_effect=[True, True, True]) mock_managed_stack.return_value = "managed-s3-bucket" mock_save_config.return_value = True @@ -149,6 +149,7 @@ def test_all_args_interactive( context_mock.run.assert_called_with() mock_save_config.assert_called_with( "input-template-file", + capabilities=("CAPABILITY_IAM",), confirm_changeset=True, profile="default", region="us-east-1", From 02ea704fa06c938f6598220936df0f7f9951c99a Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan Date: Mon, 18 Nov 2019 22:14:43 -0800 Subject: [PATCH 16/45] fix: `samconfig.get_all` - change to normal dict, for click to understand defaults - remove debug traceback - interactive improvements --- samcli/cli/cli_config_file.py | 6 ++++- samcli/commands/_utils/options.py | 2 -- samcli/commands/deploy/command.py | 31 +++++++++++++++------- samcli/lib/config/samconfig.py | 6 ++++- samcli/lib/deploy/deployer.py | 1 - tests/unit/commands/deploy/test_command.py | 8 +++--- 6 files changed, 35 insertions(+), 19 deletions(-) diff --git a/samcli/cli/cli_config_file.py b/samcli/cli/cli_config_file.py index 1b67a545d7..b96ca51432 100644 --- a/samcli/cli/cli_config_file.py +++ b/samcli/cli/cli_config_file.py @@ -50,7 +50,11 @@ def __call__(self, config_dir, config_env, cmd_names): try: LOG.debug("Getting configuration value for %s %s %s", cmd_names, self.section, config_env) - resolved_config = samconfig.get_all(cmd_names, self.section, env=config_env) + + # NOTE(TheSriram): change from tomlkit table type to normal dictionary, + # so that click defaults work out of the box. + resolved_config = {k: v for k, v in samconfig.get_all(cmd_names, self.section, env=config_env).items()} + except KeyError: LOG.debug( "Error reading configuration file at %s with config_env=%s, command=%s, section=%s", diff --git a/samcli/commands/_utils/options.py b/samcli/commands/_utils/options.py index 31a5f79971..20b46cf1c8 100644 --- a/samcli/commands/_utils/options.py +++ b/samcli/commands/_utils/options.py @@ -10,7 +10,6 @@ from click.types import FuncParamType from samcli.cli.types import CfnParameterOverridesType, CfnMetadataType, CfnTags from samcli.commands._utils.custom_options.option_nargs import OptionNargs -from samcli.lib.config.samconfig import SamConfig _TEMPLATE_OPTION_DEFAULT_VALUE = "template.[yaml|yml]" @@ -154,7 +153,6 @@ def capabilities_click_option(): return click.option( "--capabilities", cls=OptionNargs, - default=("CAPABILITY_IAM",), required=False, type=FuncParamType(func=_space_separated_list_func_type), help="A list of capabilities that you must specify" diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index a923a23022..0cbafab207 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -1,7 +1,6 @@ """ CLI command for "deploy" command """ -import os import tempfile import json import click @@ -207,8 +206,9 @@ def do_cli( from samcli.commands.package.package_context import PackageContext from samcli.commands.deploy.deploy_context import DeployContext - changeset_decision = False - _capabilities = ("CAPABILITY_IAM",) + # set capabilities and changeset decision to None, before interactive gets input from the user + changeset_decision = None + _capabilities = None if interactive: stack_name, s3_bucket, region, profile, changeset_decision, _capabilities, save_to_config = guided_deploy( @@ -271,15 +271,15 @@ def do_cli( tags=tags, region=region, profile=profile, - confirm_changeset=changeset_decision, + confirm_changeset=changeset_decision if interactive else confirm_changeset, ) as deploy_context: deploy_context.run() def guided_deploy(stack_name, s3_bucket, region, profile, confirm_changeset): default_region = region or "us-east-1" - default_profile = profile or "default" - capabilities = ("CAPABILITY_IAM",) + default_capabilities = ("CAPABILITY_IAM",) + input_capabilities = None color = Colored() tick = color.yellow("✓") @@ -290,15 +290,16 @@ def guided_deploy(stack_name, s3_bucket, region, profile, confirm_changeset): stack_name = click.prompt(f"\t{tick} Stack Name", default=stack_name, type=click.STRING) region = click.prompt(f"\t{tick} AWS Region", default=default_region, type=click.STRING) - profile = click.prompt(f"\t{tick} AWS Profile", default=default_profile, type=click.STRING) click.secho("\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy") confirm_changeset = click.confirm(f"\t{tick} Confirm changes before deploy", default=confirm_changeset) click.secho("\t#SAM needs permission to be able to create roles to connect to the resources in your template") capabilities_confirm = click.confirm(f"\t{tick} Allow SAM CLI IAM role creation", default=True) if not capabilities_confirm: - capabilities = click.prompt( - f"\t{tick} Capabilities", default=capabilities, type=FuncParamType(func=_space_separated_list_func_type) + input_capabilities = click.prompt( + f"\t{tick} Capabilities", + default=default_capabilities, + type=FuncParamType(func=_space_separated_list_func_type), ) save_to_config = click.confirm(f"\t{tick} Save arguments to samconfig.toml", default=True) @@ -309,7 +310,15 @@ def guided_deploy(stack_name, s3_bucket, region, profile, confirm_changeset): click.echo(f"\t{tick} Using Deployment Bucket: {s3_bucket}") click.echo("\tYou may specify a different default deployment bucket in samconfig.toml") - return stack_name, s3_bucket, region, profile, confirm_changeset, capabilities, save_to_config + return ( + stack_name, + s3_bucket, + region, + profile, + confirm_changeset, + input_capabilities if input_capabilities else default_capabilities, + save_to_config, + ) def print_deploy_args(stack_name, s3_bucket, region, profile, capabilities, parameter_overrides, confirm_changeset): @@ -326,6 +335,8 @@ def print_deploy_args(stack_name, s3_bucket, region, profile, capabilities, para click.echo(f"\tCapabilities : {capabilities_string}") click.echo(f"\tConfirm Changeset : {confirm_changeset}") + click.secho("\n\tInitiating Deployment\n\t=====================", fg="yellow") + def save_config(template_file, **kwargs): color = Colored() diff --git a/samcli/lib/config/samconfig.py b/samcli/lib/config/samconfig.py index e4b9bc705f..bcad3a34de 100644 --- a/samcli/lib/config/samconfig.py +++ b/samcli/lib/config/samconfig.py @@ -103,7 +103,11 @@ def put(self, cmd_names, section, key, value, env=DEFAULT_ENV): """ self._read() - self.document.update({env: {self._to_key(cmd_names): {section: {key: value}}}}) + if not self.document: + # Empty document prepare the initial structure. + self.document.update({env: {self._to_key(cmd_names): {section: {key: value}}}}) + # Only update appropriate key value pairs within a section + self.document[env][self._to_key(cmd_names)][section].update({key: value}) def flush(self): """ diff --git a/samcli/lib/deploy/deployer.py b/samcli/lib/deploy/deployer.py index 74a50b3583..c73d2d1aa0 100644 --- a/samcli/lib/deploy/deployer.py +++ b/samcli/lib/deploy/deployer.py @@ -251,7 +251,6 @@ def wait_for_changeset(self, changeset_id, stack_name): try: waiter.wait(ChangeSetName=changeset_id, StackName=stack_name, WaiterConfig=waiter_config) except botocore.exceptions.WaiterError as ex: - LOG.debug("Create changeset waiter exception", exc_info=ex) resp = ex.last_response status = resp["Status"] diff --git a/tests/unit/commands/deploy/test_command.py b/tests/unit/commands/deploy/test_command.py index 5d09ec6a8a..a8d8a92d76 100644 --- a/tests/unit/commands/deploy/test_command.py +++ b/tests/unit/commands/deploy/test_command.py @@ -99,7 +99,7 @@ def test_all_args_interactive( context_mock = Mock() mock_deploy_context.return_value.__enter__.return_value = context_mock - mock_deploy_click.prompt = MagicMock(side_effect=["sam-app", "us-east-1", "default"]) + mock_deploy_click.prompt = MagicMock(side_effect=["sam-app", "us-east-1"]) mock_deploy_click.confirm = MagicMock(side_effect=[True, True, True]) mock_managed_stack.return_value = "managed-s3-bucket" @@ -142,7 +142,7 @@ def test_all_args_interactive( fail_on_empty_changeset=self.fail_on_empty_changset, tags=self.tags, region="us-east-1", - profile="default", + profile=self.profile, confirm_changeset=True, ) @@ -151,10 +151,10 @@ def test_all_args_interactive( "input-template-file", capabilities=("CAPABILITY_IAM",), confirm_changeset=True, - profile="default", + profile=self.profile, region="us-east-1", s3_bucket="managed-s3-bucket", stack_name="sam-app", ) - mock_managed_stack.assert_called_with(profile="default", region="us-east-1") + mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1") self.assertEqual(context_mock.run.call_count, 1) From 8a1d190885d122e73ee4349bf2c1110cd09bc5bb Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan Date: Mon, 18 Nov 2019 23:56:49 -0800 Subject: [PATCH 17/45] feat: allow `--interactive` to ask for parameters - if parameters are present on the tempalte, ask for parameters --- samcli/commands/_utils/template.py | 17 ++++ samcli/commands/deploy/command.py | 36 ++++++-- tests/unit/commands/_utils/test_template.py | 21 +++++ tests/unit/commands/deploy/test_command.py | 92 ++++++++++++++++++++- 4 files changed, 157 insertions(+), 9 deletions(-) diff --git a/samcli/commands/_utils/template.py b/samcli/commands/_utils/template.py index 164a90cbf2..0fb7a5783e 100644 --- a/samcli/commands/_utils/template.py +++ b/samcli/commands/_utils/template.py @@ -204,3 +204,20 @@ def _resolve_relative_to(path, original_root, new_root): return os.path.relpath( os.path.normpath(os.path.join(original_root, path)), new_root # Absolute original path w.r.t ``original_root`` ) # Resolve the original path with respect to ``new_root`` + + +def get_template_parameters(template_file): + """ + Get Parameters from a template file. + + Parameters + ---------- + template_file : string + Path to the template to read + + Returns + ------- + Template Parameters as a dictionary + """ + template_dict = get_template_data(template_file=template_file) + return template_dict.get("Parameters", dict()) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 0cbafab207..6239881a8d 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -15,6 +15,7 @@ metadata_override_option, _space_separated_list_func_type, ) +from samcli.commands._utils.template import get_template_parameters from samcli.cli.cli_config_file import configuration_option, TomlProvider from samcli.cli.main import pass_context, common_options, aws_creds_options from samcli.lib.telemetry.metrics import track_command @@ -209,10 +210,14 @@ def do_cli( # set capabilities and changeset decision to None, before interactive gets input from the user changeset_decision = None _capabilities = None + _parameter_overrides = None if interactive: - stack_name, s3_bucket, region, profile, changeset_decision, _capabilities, save_to_config = guided_deploy( - stack_name, s3_bucket, region, profile, confirm_changeset + + _parameter_override_keys = get_template_parameters(template_file=template_file) + + stack_name, s3_bucket, region, profile, changeset_decision, _capabilities, _parameter_overrides, save_to_config = guided_deploy( + stack_name, s3_bucket, region, profile, confirm_changeset, _parameter_override_keys, parameter_overrides ) if save_to_config: @@ -224,6 +229,7 @@ def do_cli( profile=profile, confirm_changeset=confirm_changeset, capabilities=_capabilities, + parameter_overrides=_parameter_overrides, ) # We print deploy args only on interactive. @@ -234,7 +240,7 @@ def do_cli( region=region, profile=profile, capabilities=_capabilities, - parameter_overrides=parameter_overrides, + parameter_overrides=_parameter_overrides, confirm_changeset=changeset_decision, ) @@ -262,7 +268,7 @@ def do_cli( force_upload=force_upload, s3_prefix=s3_prefix, kms_key_id=kms_key_id, - parameter_overrides=parameter_overrides, + parameter_overrides=_parameter_overrides if interactive else parameter_overrides, capabilities=_capabilities if interactive else capabilities, no_execute_changeset=no_execute_changeset, role_arn=role_arn, @@ -276,10 +282,13 @@ def do_cli( deploy_context.run() -def guided_deploy(stack_name, s3_bucket, region, profile, confirm_changeset): +def guided_deploy( + stack_name, s3_bucket, region, profile, confirm_changeset, parameter_override_keys, parameter_overrides +): default_region = region or "us-east-1" default_capabilities = ("CAPABILITY_IAM",) input_capabilities = None + input_parameter_overrides = {} color = Colored() tick = color.yellow("✓") @@ -290,6 +299,14 @@ def guided_deploy(stack_name, s3_bucket, region, profile, confirm_changeset): stack_name = click.prompt(f"\t{tick} Stack Name", default=stack_name, type=click.STRING) region = click.prompt(f"\t{tick} AWS Region", default=default_region, type=click.STRING) + if parameter_override_keys: + for parameter_key in parameter_override_keys.keys(): + input_parameter_overrides[parameter_key] = click.prompt( + f"\t\tParameter {parameter_key}", + default=parameter_overrides.get(parameter_key, "default"), + type=click.STRING, + ) + click.secho("\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy") confirm_changeset = click.confirm(f"\t{tick} Confirm changes before deploy", default=confirm_changeset) click.secho("\t#SAM needs permission to be able to create roles to connect to the resources in your template") @@ -317,13 +334,14 @@ def guided_deploy(stack_name, s3_bucket, region, profile, confirm_changeset): profile, confirm_changeset, input_capabilities if input_capabilities else default_capabilities, + input_parameter_overrides if input_parameter_overrides else parameter_overrides, save_to_config, ) def print_deploy_args(stack_name, s3_bucket, region, profile, capabilities, parameter_overrides, confirm_changeset): - param_overrides_string = json.dumps(parameter_overrides, indent=2) + param_overrides_string = parameter_overrides capabilities_string = json.dumps(capabilities) click.secho("\n\tDeploying with following values\n\t===============================", fg="yellow") @@ -338,7 +356,7 @@ def print_deploy_args(stack_name, s3_bucket, region, profile, capabilities, para click.secho("\n\tInitiating Deployment\n\t=====================", fg="yellow") -def save_config(template_file, **kwargs): +def save_config(template_file, parameter_overrides, **kwargs): color = Colored() tick = color.yellow("✓") @@ -357,6 +375,10 @@ def save_config(template_file, **kwargs): value = " ".join(val for val in value) samconfig.put(cmd_names, section, key, value) + if parameter_overrides: + parameter_overrides_value = " ".join([f"{key}={value}" for key, value in parameter_overrides.items()]) + samconfig.put(cmd_names, section, "parameter_overrides", parameter_overrides_value) + samconfig.flush() click.echo(f"\n\t{tick} Saved arguments to config file") diff --git a/tests/unit/commands/_utils/test_template.py b/tests/unit/commands/_utils/test_template.py index fe80c7dc79..4a773f2b37 100644 --- a/tests/unit/commands/_utils/test_template.py +++ b/tests/unit/commands/_utils/test_template.py @@ -12,6 +12,7 @@ RESOURCES_WITH_LOCAL_PATHS, _update_relative_paths, move_template, + get_template_parameters, ) @@ -45,6 +46,26 @@ def test_must_read_file_and_parse(self, pathlib_mock, yaml_parse_mock): m.assert_called_with(filename, "r") yaml_parse_mock.assert_called_with(file_data) + @patch("samcli.commands._utils.template.yaml_parse") + @patch("samcli.commands._utils.template.pathlib") + def test_must_read_file_and_get_parameters(self, pathlib_mock, yaml_parse_mock): + filename = "filename" + file_data = "contents of the file" + parse_result = {"Parameters": {"Myparameter": "String"}} + + pathlib_mock.Path.return_value.exists.return_value = True # Fake that the file exists + + m = mock_open(read_data=file_data) + yaml_parse_mock.return_value = parse_result + + with patch("samcli.commands._utils.template.open", m): + result = get_template_parameters(filename) + + self.assertEqual(result, {"Myparameter": "String"}) + + m.assert_called_with(filename, "r") + yaml_parse_mock.assert_called_with(file_data) + @parameterized.expand([param(ValueError()), param(yaml.YAMLError())]) @patch("samcli.commands._utils.template.yaml_parse") @patch("samcli.commands._utils.template.pathlib") diff --git a/tests/unit/commands/deploy/test_command.py b/tests/unit/commands/deploy/test_command.py index a8d8a92d76..74a74156a4 100644 --- a/tests/unit/commands/deploy/test_command.py +++ b/tests/unit/commands/deploy/test_command.py @@ -87,8 +87,10 @@ def test_all_args(self, mock_deploy_context, mock_deploy_click, mock_package_con @patch("samcli.commands.deploy.deploy_context.DeployContext") @patch("samcli.commands.deploy.command.save_config") @patch("samcli.commands.deploy.command.manage_stack") + @patch("samcli.commands.deploy.command.get_template_parameters") def test_all_args_interactive( self, + mock_get_template_parameters, mock_managed_stack, mock_save_config, mock_deploy_context, @@ -98,9 +100,94 @@ def test_all_args_interactive( ): context_mock = Mock() + mock_get_template_parameters.return_value = {"Myparameter": {"Type": "String"}} mock_deploy_context.return_value.__enter__.return_value = context_mock - mock_deploy_click.prompt = MagicMock(side_effect=["sam-app", "us-east-1"]) - mock_deploy_click.confirm = MagicMock(side_effect=[True, True, True]) + mock_deploy_click.prompt = MagicMock( + side_effect=["sam-app", "us-east-1", "InteractiveParameter", ("CAPABILITY_IAM",)] + ) + mock_deploy_click.confirm = MagicMock(side_effect=[True, False, True]) + + mock_managed_stack.return_value = "managed-s3-bucket" + mock_save_config.return_value = True + + do_cli( + template_file=self.template_file, + stack_name=self.stack_name, + s3_bucket=None, + force_upload=self.force_upload, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region=self.region, + profile=self.profile, + use_json=self.use_json, + metadata=self.metadata, + interactive=True, + confirm_changeset=True, + ) + + mock_deploy_context.assert_called_with( + template_file=ANY, + stack_name="sam-app", + s3_bucket="managed-s3-bucket", + force_upload=self.force_upload, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + parameter_overrides={"Myparameter": "InteractiveParameter"}, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region="us-east-1", + profile=self.profile, + confirm_changeset=True, + ) + + context_mock.run.assert_called_with() + mock_save_config.assert_called_with( + "input-template-file", + capabilities=("CAPABILITY_IAM",), + confirm_changeset=True, + profile=self.profile, + region="us-east-1", + s3_bucket="managed-s3-bucket", + stack_name="sam-app", + parameter_overrides={"Myparameter": "InteractiveParameter"}, + ) + mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1") + self.assertEqual(context_mock.run.call_count, 1) + + @patch("samcli.commands.package.command.click") + @patch("samcli.commands.package.package_context.PackageContext") + @patch("samcli.commands.deploy.command.click") + @patch("samcli.commands.deploy.deploy_context.DeployContext") + @patch("samcli.commands.deploy.command.save_config") + @patch("samcli.commands.deploy.command.manage_stack") + @patch("samcli.commands.deploy.command.get_template_parameters") + def test_all_args_interactive_no_params( + self, + mock_get_template_parameters, + mock_managed_stack, + mock_save_config, + mock_deploy_context, + mock_deploy_click, + mock_package_context, + mock_package_click, + ): + + context_mock = Mock() + mock_get_template_parameters.return_value = {} + mock_deploy_context.return_value.__enter__.return_value = context_mock + mock_deploy_click.prompt = MagicMock(side_effect=["sam-app", "us-east-1", ("CAPABILITY_IAM",)]) + mock_deploy_click.confirm = MagicMock(side_effect=[True, False, True]) mock_managed_stack.return_value = "managed-s3-bucket" mock_save_config.return_value = True @@ -155,6 +242,7 @@ def test_all_args_interactive( region="us-east-1", s3_bucket="managed-s3-bucket", stack_name="sam-app", + parameter_overrides=self.parameter_overrides, ) mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1") self.assertEqual(context_mock.run.call_count, 1) From 66d4e343fba1adc64170d709568fcedd070f2623 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan Date: Tue, 19 Nov 2019 15:35:25 -0800 Subject: [PATCH 18/45] fix: interactive deploy specifies parameter defaults --- samcli/commands/deploy/command.py | 17 +++-- tests/unit/commands/deploy/test_command.py | 87 +++++++++++++++++++--- 2 files changed, 85 insertions(+), 19 deletions(-) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 6239881a8d..b673f88d49 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -227,7 +227,7 @@ def do_cli( s3_bucket=s3_bucket, region=region, profile=profile, - confirm_changeset=confirm_changeset, + confirm_changeset=changeset_decision, capabilities=_capabilities, parameter_overrides=_parameter_overrides, ) @@ -238,7 +238,6 @@ def do_cli( stack_name=stack_name, s3_bucket=s3_bucket, region=region, - profile=profile, capabilities=_capabilities, parameter_overrides=_parameter_overrides, confirm_changeset=changeset_decision, @@ -300,10 +299,12 @@ def guided_deploy( stack_name = click.prompt(f"\t{tick} Stack Name", default=stack_name, type=click.STRING) region = click.prompt(f"\t{tick} AWS Region", default=default_region, type=click.STRING) if parameter_override_keys: - for parameter_key in parameter_override_keys.keys(): + for parameter_key, parameter_properties in parameter_override_keys.items(): input_parameter_overrides[parameter_key] = click.prompt( - f"\t\tParameter {parameter_key}", - default=parameter_overrides.get(parameter_key, "default"), + f"\t{tick} Parameter {parameter_key}", + default=parameter_overrides.get( + parameter_key, parameter_properties.get("Default", "No default specified") + ), type=click.STRING, ) @@ -339,7 +340,7 @@ def guided_deploy( ) -def print_deploy_args(stack_name, s3_bucket, region, profile, capabilities, parameter_overrides, confirm_changeset): +def print_deploy_args(stack_name, s3_bucket, region, capabilities, parameter_overrides, confirm_changeset): param_overrides_string = parameter_overrides capabilities_string = json.dumps(capabilities) @@ -347,7 +348,6 @@ def print_deploy_args(stack_name, s3_bucket, region, profile, capabilities, para click.secho("\n\tDeploying with following values\n\t===============================", fg="yellow") click.echo(f"\tStack Name : {stack_name}") click.echo(f"\tRegion : {region}") - click.echo(f"\tProfile : {profile}") click.echo(f"\tDeployment S3 Bucket : {s3_bucket}") click.echo(f"\tParameter Overrides : {param_overrides_string}") click.echo(f"\tCapabilities : {capabilities_string}") @@ -373,7 +373,8 @@ def save_config(template_file, parameter_overrides, **kwargs): for key, value in kwargs.items(): if isinstance(value, (list, tuple)): value = " ".join(val for val in value) - samconfig.put(cmd_names, section, key, value) + if value: + samconfig.put(cmd_names, section, key, value) if parameter_overrides: parameter_overrides_value = " ".join([f"{key}={value}" for key, value in parameter_overrides.items()]) diff --git a/tests/unit/commands/deploy/test_command.py b/tests/unit/commands/deploy/test_command.py index 74a74156a4..7eb04ec956 100644 --- a/tests/unit/commands/deploy/test_command.py +++ b/tests/unit/commands/deploy/test_command.py @@ -2,6 +2,7 @@ from unittest.mock import patch, Mock, ANY, MagicMock from samcli.commands.deploy.command import do_cli +from samcli.lib.config.samconfig import SamConfig class TestDeployliCommand(TestCase): @@ -169,14 +170,16 @@ def test_all_args_interactive( @patch("samcli.commands.package.package_context.PackageContext") @patch("samcli.commands.deploy.command.click") @patch("samcli.commands.deploy.deploy_context.DeployContext") - @patch("samcli.commands.deploy.command.save_config") @patch("samcli.commands.deploy.command.manage_stack") @patch("samcli.commands.deploy.command.get_template_parameters") - def test_all_args_interactive_no_params( + @patch("samcli.commands.deploy.command.SamConfig") + @patch("samcli.commands.deploy.command.get_cmd_names") + def test_all_args_interactive_no_params_save_config( self, + mock_get_cmd_names, + mock_sam_config, mock_get_template_parameters, mock_managed_stack, - mock_save_config, mock_deploy_context, mock_deploy_click, mock_package_context, @@ -184,13 +187,13 @@ def test_all_args_interactive_no_params( ): context_mock = Mock() + mock_get_template_parameters.return_value = {} mock_deploy_context.return_value.__enter__.return_value = context_mock mock_deploy_click.prompt = MagicMock(side_effect=["sam-app", "us-east-1", ("CAPABILITY_IAM",)]) mock_deploy_click.confirm = MagicMock(side_effect=[True, False, True]) - + mock_get_cmd_names.return_value = ["deploy"] mock_managed_stack.return_value = "managed-s3-bucket" - mock_save_config.return_value = True do_cli( template_file=self.template_file, @@ -234,15 +237,77 @@ def test_all_args_interactive_no_params( ) context_mock.run.assert_called_with() - mock_save_config.assert_called_with( - "input-template-file", - capabilities=("CAPABILITY_IAM",), - confirm_changeset=True, + mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1") + self.assertEqual(context_mock.run.call_count, 1) + + @patch("samcli.commands.package.command.click") + @patch("samcli.commands.package.package_context.PackageContext") + @patch("samcli.commands.deploy.command.click") + @patch("samcli.commands.deploy.deploy_context.DeployContext") + @patch("samcli.commands.deploy.command.save_config") + @patch("samcli.commands.deploy.command.manage_stack") + @patch("samcli.commands.deploy.command.get_template_parameters") + def test_all_args_interactive_no_params_no_save_config( + self, + mock_get_template_parameters, + mock_managed_stack, + mock_save_config, + mock_deploy_context, + mock_deploy_click, + mock_package_context, + mock_package_click, + ): + + context_mock = Mock() + mock_get_template_parameters.return_value = {} + mock_deploy_context.return_value.__enter__.return_value = context_mock + mock_deploy_click.prompt = MagicMock(side_effect=["sam-app", "us-east-1", ("CAPABILITY_IAM",)]) + mock_deploy_click.confirm = MagicMock(side_effect=[True, False, False]) + + mock_managed_stack.return_value = "managed-s3-bucket" + + do_cli( + template_file=self.template_file, + stack_name=self.stack_name, + s3_bucket=None, + force_upload=self.force_upload, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region=self.region, profile=self.profile, - region="us-east-1", - s3_bucket="managed-s3-bucket", + use_json=self.use_json, + metadata=self.metadata, + interactive=True, + confirm_changeset=True, + ) + + mock_deploy_context.assert_called_with( + template_file=ANY, stack_name="sam-app", + s3_bucket="managed-s3-bucket", + force_upload=self.force_upload, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region="us-east-1", + profile=self.profile, + confirm_changeset=True, ) + + context_mock.run.assert_called_with() + self.assertEqual(mock_save_config.call_count, 0) mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1") self.assertEqual(context_mock.run.call_count, 1) From 29374d52798fe8ec0d368d06141bc34bd154eb30 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan Date: Tue, 19 Nov 2019 18:52:33 -0800 Subject: [PATCH 19/45] fix: always use a managed stack on deploy interactive --- samcli/commands/deploy/command.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index b673f88d49..6a5740c41d 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -322,11 +322,10 @@ def guided_deploy( save_to_config = click.confirm(f"\t{tick} Save arguments to samconfig.toml", default=True) - if not s3_bucket: - click.echo(color.yellow("\n\tConfiguring Deployment S3 Bucket\n\t================================")) - s3_bucket = manage_stack(profile=profile, region=region) - click.echo(f"\t{tick} Using Deployment Bucket: {s3_bucket}") - click.echo("\tYou may specify a different default deployment bucket in samconfig.toml") + click.echo(color.yellow("\n\tConfiguring Deployment S3 Bucket\n\t================================")) + s3_bucket = manage_stack(profile=profile, region=region) + click.echo(f"\t{tick} Using Deployment Bucket: {s3_bucket}") + click.echo("\tYou may specify a different default deployment bucket in samconfig.toml") return ( stack_name, From cc8cf3b5451b4595e6287512ed441012559e3bb4 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan Date: Tue, 19 Nov 2019 19:58:29 -0800 Subject: [PATCH 20/45] fix: smoke tests for deploy --- samcli/lib/deploy/deployer.py | 7 +++++++ tests/unit/lib/deploy/test_deployer.py | 12 +++++++++--- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/samcli/lib/deploy/deployer.py b/samcli/lib/deploy/deployer.py index c73d2d1aa0..7275e3ac7b 100644 --- a/samcli/lib/deploy/deployer.py +++ b/samcli/lib/deploy/deployer.py @@ -101,7 +101,14 @@ def has_stack(self, stack_name): if "Stack with id {0} does not exist".format(stack_name) in str(e): LOG.debug("Stack with id %s does not exist", stack_name) return False + except botocore.exceptions.BotoCoreError as e: + # If there are credentials, environment errors, + # catch that and throw a deploy failed error. + LOG.debug("Botocore Exception : %s", str(e)) + raise DeployFailedError(stack_name=stack_name, msg=str(e)) + + except Exception as e: # We don't know anything about this exception. Don't handle LOG.debug("Unable to get stack details.", exc_info=e) raise e diff --git a/tests/unit/lib/deploy/test_deployer.py b/tests/unit/lib/deploy/test_deployer.py index 0d5b2d3f32..cc9628f28b 100644 --- a/tests/unit/lib/deploy/test_deployer.py +++ b/tests/unit/lib/deploy/test_deployer.py @@ -4,7 +4,7 @@ from unittest import TestCase from unittest.mock import patch, MagicMock, ANY -from botocore.exceptions import ClientError, WaiterError +from botocore.exceptions import ClientError, WaiterError, BotoCoreError from samcli.commands.deploy.exceptions import DeployFailedError, ChangeSetError, DeployStackOutPutFailedError from samcli.lib.deploy.deployer import Deployer @@ -72,9 +72,15 @@ def test_deployer_has_stack_exception_non_exsistent(self): def test_deployer_has_stack_exception(self): self.deployer._client.describe_stacks = MagicMock( - side_effect=ClientError(error_response={"Error": {"Message": "Error"}}, operation_name="stack_status") + side_effect=Exception() ) - with self.assertRaises(ClientError): + with self.assertRaises(Exception): + self.deployer.has_stack("test") + + def test_deployer_has_stack_exception_botocore(self): + self.deployer._client.describe_stacks = MagicMock( + side_effect=BotoCoreError()) + with self.assertRaises(DeployFailedError): self.deployer.has_stack("test") def test_create_changeset(self): From 8af88172853c064f40e89534e5a929d3fbf0baba Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Tue, 19 Nov 2019 22:06:59 -0800 Subject: [PATCH 21/45] fix: usability fixes on deploy --guided (#1555) - change sam build output text --- samcli/cli/cli_config_file.py | 2 +- samcli/commands/_utils/table_print.py | 2 +- samcli/commands/build/command.py | 8 +- samcli/commands/deploy/command.py | 111 ++++++++++++--------- samcli/commands/deploy/deploy_context.py | 9 +- samcli/commands/deploy/exceptions.py | 6 ++ samcli/lib/config/samconfig.py | 11 ++ samcli/lib/deploy/deployer.py | 10 +- tests/unit/commands/deploy/test_command.py | 32 ++++-- tests/unit/lib/deploy/test_deployer.py | 7 +- 10 files changed, 123 insertions(+), 75 deletions(-) diff --git a/samcli/cli/cli_config_file.py b/samcli/cli/cli_config_file.py index b96ca51432..7191af3443 100644 --- a/samcli/cli/cli_config_file.py +++ b/samcli/cli/cli_config_file.py @@ -64,7 +64,7 @@ def __call__(self, config_dir, config_env, cmd_names): self.section, ) except Exception as ex: - LOG.error("Error reading configuration file: %s %s", samconfig.path(), str(ex)) + LOG.debug("Error reading configuration file: %s %s", samconfig.path(), str(ex)) return resolved_config diff --git a/samcli/commands/_utils/table_print.py b/samcli/commands/_utils/table_print.py index 6f161882cb..d22f4a0c43 100644 --- a/samcli/commands/_utils/table_print.py +++ b/samcli/commands/_utils/table_print.py @@ -56,7 +56,7 @@ def pprint_wrap(func): def wrap(*args, **kwargs): # The table is setup with the column names, format_string contains the column names. if table_header: - click.secho("\n" + table_header, fg=color) + click.secho("\n" + table_header) click.secho("-" * usable_width, fg=color) click.secho(format_string.format(*format_args, **format_kwargs), fg=color) click.secho("-" * usable_width, fg=color) diff --git a/samcli/commands/build/command.py b/samcli/commands/build/command.py index 31507a2f32..c6ff97914f 100644 --- a/samcli/commands/build/command.py +++ b/samcli/commands/build/command.py @@ -192,9 +192,9 @@ def gen_success_msg(artifacts_dir, output_template_path, is_default_build_dir): if not is_default_build_dir: invoke_cmd += " -t {}".format(output_template_path) - package_cmd = "sam package --s3-bucket " + deploy_cmd = "sam deploy --guided" if not is_default_build_dir: - package_cmd += " --template-file {}".format(output_template_path) + deploy_cmd += " --template-file {}".format(output_template_path) msg = """\nBuilt Artifacts : {artifacts_dir} Built Template : {template} @@ -202,9 +202,9 @@ def gen_success_msg(artifacts_dir, output_template_path, is_default_build_dir): Commands you can use next ========================= [*] Invoke Function: {invokecmd} -[*] Package: {packagecmd} +[*] Deploy: {deploycmd} """.format(invokecmd=invoke_cmd, - packagecmd=package_cmd, + deploycmd=deploy_cmd, artifacts_dir=artifacts_dir, template=output_template_path) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 6a5740c41d..4da0f09758 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -1,11 +1,15 @@ """ CLI command for "deploy" command """ -import tempfile import json +import tempfile + import click from click.types import FuncParamType +from samcli.cli.cli_config_file import configuration_option, TomlProvider +from samcli.cli.context import get_cmd_names +from samcli.cli.main import pass_context, common_options, aws_creds_options from samcli.commands._utils.options import ( parameter_override_option, capabilities_override_option, @@ -16,14 +20,11 @@ _space_separated_list_func_type, ) from samcli.commands._utils.template import get_template_parameters -from samcli.cli.cli_config_file import configuration_option, TomlProvider -from samcli.cli.main import pass_context, common_options, aws_creds_options -from samcli.lib.telemetry.metrics import track_command -from samcli.lib.utils.colors import Colored +from samcli.commands.deploy.exceptions import GuidedDeployFailedError from samcli.lib.bootstrap.bootstrap import manage_stack from samcli.lib.config.samconfig import SamConfig -from samcli.cli.context import get_cmd_names - +from samcli.lib.telemetry.metrics import track_command +from samcli.lib.utils.colors import Colored SHORT_HELP = "Deploy an AWS SAM application." @@ -122,12 +123,12 @@ "the output AWS CloudFormation template. YAML is used by default.", ) @click.option( - "--interactive", - "-i", + "--guided", + "-g", required=False, is_flag=True, is_eager=True, - help="Specify this flag to allow SAM CLI to guide you through the deployment using interactive prompts.", + help="Specify this flag to allow SAM CLI to guide you through the deployment using guided prompts.", ) @metadata_override_option @notification_arns_override_option @@ -155,7 +156,7 @@ def cli( use_json, tags, metadata, - interactive, + guided, confirm_changeset, ): @@ -176,7 +177,7 @@ def cli( use_json, tags, metadata, - interactive, + guided, confirm_changeset, ctx.region, ctx.profile, @@ -199,7 +200,7 @@ def do_cli( use_json, tags, metadata, - interactive, + guided, confirm_changeset, region, profile, @@ -207,12 +208,14 @@ def do_cli( from samcli.commands.package.package_context import PackageContext from samcli.commands.deploy.deploy_context import DeployContext - # set capabilities and changeset decision to None, before interactive gets input from the user + # set capabilities and changeset decision to None, before guided gets input from the user changeset_decision = None _capabilities = None _parameter_overrides = None - if interactive: + if guided: + + read_config_showcase(template_file=template_file) _parameter_override_keys = get_template_parameters(template_file=template_file) @@ -232,7 +235,7 @@ def do_cli( parameter_overrides=_parameter_overrides, ) - # We print deploy args only on interactive. + # We print deploy args only on guided. # Should we print this always? print_deploy_args( stack_name=stack_name, @@ -267,8 +270,8 @@ def do_cli( force_upload=force_upload, s3_prefix=s3_prefix, kms_key_id=kms_key_id, - parameter_overrides=_parameter_overrides if interactive else parameter_overrides, - capabilities=_capabilities if interactive else capabilities, + parameter_overrides=_parameter_overrides if guided else parameter_overrides, + capabilities=_capabilities if guided else capabilities, no_execute_changeset=no_execute_changeset, role_arn=role_arn, notification_arns=notification_arns, @@ -276,7 +279,7 @@ def do_cli( tags=tags, region=region, profile=profile, - confirm_changeset=changeset_decision if interactive else confirm_changeset, + confirm_changeset=changeset_decision if guided else confirm_changeset, ) as deploy_context: deploy_context.run() @@ -290,18 +293,17 @@ def guided_deploy( input_parameter_overrides = {} color = Colored() - tick = color.yellow("✓") click.echo( color.yellow("\n\tSetting default arguments for 'sam deploy'\n\t=========================================") ) - stack_name = click.prompt(f"\t{tick} Stack Name", default=stack_name, type=click.STRING) - region = click.prompt(f"\t{tick} AWS Region", default=default_region, type=click.STRING) + stack_name = click.prompt(f"\tStack name", default=stack_name, type=click.STRING) + region = click.prompt(f"\tAWS region", default=default_region, type=click.STRING) if parameter_override_keys: for parameter_key, parameter_properties in parameter_override_keys.items(): input_parameter_overrides[parameter_key] = click.prompt( - f"\t{tick} Parameter {parameter_key}", + f"\tParameter {parameter_key}", default=parameter_overrides.get( parameter_key, parameter_properties.get("Default", "No default specified") ), @@ -309,22 +311,20 @@ def guided_deploy( ) click.secho("\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy") - confirm_changeset = click.confirm(f"\t{tick} Confirm changes before deploy", default=confirm_changeset) + confirm_changeset = click.confirm(f"\tConfirm changes before deploy", default=confirm_changeset) click.secho("\t#SAM needs permission to be able to create roles to connect to the resources in your template") - capabilities_confirm = click.confirm(f"\t{tick} Allow SAM CLI IAM role creation", default=True) + capabilities_confirm = click.confirm(f"\tAllow SAM CLI IAM role creation", default=True) if not capabilities_confirm: input_capabilities = click.prompt( - f"\t{tick} Capabilities", - default=default_capabilities, - type=FuncParamType(func=_space_separated_list_func_type), + f"\t Capabilities", default=default_capabilities, type=FuncParamType(func=_space_separated_list_func_type) ) - save_to_config = click.confirm(f"\t{tick} Save arguments to samconfig.toml", default=True) + save_to_config = click.confirm(f"\tSave arguments to samconfig.toml", default=True) - click.echo(color.yellow("\n\tConfiguring Deployment S3 Bucket\n\t================================")) + click.echo(color.yellow("\n\tConfiguring deployment s3 bucket\n\t================================")) s3_bucket = manage_stack(profile=profile, region=region) - click.echo(f"\t{tick} Using Deployment Bucket: {s3_bucket}") + click.echo(f"\tUsing deployment bucket: {s3_bucket}") click.echo("\tYou may specify a different default deployment bucket in samconfig.toml") return ( @@ -345,27 +345,38 @@ def print_deploy_args(stack_name, s3_bucket, region, capabilities, parameter_ove capabilities_string = json.dumps(capabilities) click.secho("\n\tDeploying with following values\n\t===============================", fg="yellow") - click.echo(f"\tStack Name : {stack_name}") + click.echo(f"\tStack name : {stack_name}") click.echo(f"\tRegion : {region}") - click.echo(f"\tDeployment S3 Bucket : {s3_bucket}") - click.echo(f"\tParameter Overrides : {param_overrides_string}") + click.echo(f"\tConfirm changeset : {confirm_changeset}") + click.echo(f"\tDeployment s3 bucket : {s3_bucket}") click.echo(f"\tCapabilities : {capabilities_string}") - click.echo(f"\tConfirm Changeset : {confirm_changeset}") + click.echo(f"\tParameter overrides : {param_overrides_string}") + + click.secho("\nInitiating deployment\n=====================", fg="yellow") - click.secho("\n\tInitiating Deployment\n\t=====================", fg="yellow") + +def read_config_showcase(template_file): + _, samconfig = get_config_ctx(template_file) + + status = "Found" if samconfig.exists() else "Not found" + msg = ( + "Syntax invalid in samconfig.toml; save values " + "through sam deploy --guided to overwrite file with a valid set of values." + ) + config_sanity = samconfig.sanity_check() + click.secho("\nConfiguring SAM deploy\n======================", fg="yellow") + click.echo(f"\n\tLooking for samconfig.toml : {status}") + if samconfig.exists(): + click.echo("\tReading default arguments : {}".format("Success" if config_sanity else "Failure")) + + if not config_sanity and samconfig.exists(): + raise GuidedDeployFailedError(msg) def save_config(template_file, parameter_overrides, **kwargs): - color = Colored() - tick = color.yellow("✓") section = CONFIG_SECTION - ctx = click.get_current_context() - - samconfig_dir = getattr(ctx, "samconfig_dir", None) - samconfig = SamConfig( - config_dir=samconfig_dir if samconfig_dir else SamConfig.config_dir(template_file_path=template_file) - ) + ctx, samconfig = get_config_ctx(template_file) cmd_names = get_cmd_names(ctx.info_name, ctx) @@ -381,7 +392,17 @@ def save_config(template_file, parameter_overrides, **kwargs): samconfig.flush() - click.echo(f"\n\t{tick} Saved arguments to config file") + click.echo(f"\n\tSaved arguments to config file") click.echo("\tRunning 'sam deploy' for future deployments will use the parameters saved above.") click.echo("\tThe above parameters can be changed by modifying samconfig.toml") click.echo("\tLearn more about samconfig.toml syntax http://url") + + +def get_config_ctx(template_file): + ctx = click.get_current_context() + + samconfig_dir = getattr(ctx, "samconfig_dir", None) + samconfig = SamConfig( + config_dir=samconfig_dir if samconfig_dir else SamConfig.config_dir(template_file_path=template_file) + ) + return ctx, samconfig diff --git a/samcli/commands/deploy/deploy_context.py b/samcli/commands/deploy/deploy_context.py index c95a4f0969..58a5270387 100644 --- a/samcli/commands/deploy/deploy_context.py +++ b/samcli/commands/deploy/deploy_context.py @@ -35,7 +35,8 @@ class DeployContext: MSG_EXECUTE_SUCCESS = "\nSuccessfully created/updated stack - {stack_name}\n" - MSG_CONFIRM_CHANGESET = "Do you want to deploy this changeset?" + MSG_CONFIRM_CHANGESET = "Confirm deploying?" + MSG_CONFIRM_CHANGESET_HEADER = "\nPreviewing CloudFormation changeset before deployment" def __init__( self, @@ -155,9 +156,9 @@ def deploy( return if confirm_changeset: - color = Colored() - tick = color.yellow("✓") - if not click.confirm(f"{tick} {self.MSG_CONFIRM_CHANGESET}", default=False): + click.secho(self.MSG_CONFIRM_CHANGESET_HEADER, fg="yellow") + click.secho("=" * len(self.MSG_CONFIRM_CHANGESET_HEADER), fg="yellow") + if not click.confirm(f"{self.MSG_CONFIRM_CHANGESET}", default=False): return self.deployer.execute_changeset(result["Id"], stack_name) diff --git a/samcli/commands/deploy/exceptions.py b/samcli/commands/deploy/exceptions.py index 7ee0f6e4f5..191a353c52 100644 --- a/samcli/commands/deploy/exceptions.py +++ b/samcli/commands/deploy/exceptions.py @@ -29,6 +29,12 @@ def __init__(self, stack_name, msg): super(DeployFailedError, self).__init__(message=message_fmt.format(stack_name=self.stack_name, msg=msg)) +class GuidedDeployFailedError(UserException): + def __init__(self, msg): + self.msg = msg + super(GuidedDeployFailedError, self).__init__(message=msg) + + class DeployStackOutPutFailedError(UserException): def __init__(self, stack_name, msg): self.stack_name = stack_name diff --git a/samcli/lib/config/samconfig.py b/samcli/lib/config/samconfig.py index bcad3a34de..4ca390c9e7 100644 --- a/samcli/lib/config/samconfig.py +++ b/samcli/lib/config/samconfig.py @@ -121,6 +121,17 @@ def flush(self): """ self._write() + def sanity_check(self): + """ + Sanity check the contents of samconfig + """ + try: + self._read() + except tomlkit.exceptions.TOMLKitError: + return False + else: + return True + def exists(self): return self.filepath.exists() diff --git a/samcli/lib/deploy/deployer.py b/samcli/lib/deploy/deployer.py index 7275e3ac7b..491719b7af 100644 --- a/samcli/lib/deploy/deployer.py +++ b/samcli/lib/deploy/deployer.py @@ -45,14 +45,14 @@ } ) -DESCRIBE_STACK_EVENTS_TABLE_HEADER_NAME = "**CloudFormation events from changeset**" +DESCRIBE_STACK_EVENTS_TABLE_HEADER_NAME = "CloudFormation events from changeset" DESCRIBE_CHANGESET_FORMAT_STRING = "{Operation:<{0}} {LogicalResourceId:<{1}} {ResourceType:<{2}}" DESCRIBE_CHANGESET_DEFAULT_ARGS = OrderedDict( {"Operation": "Operation", "LogicalResourceId": "LogicalResourceId", "ResourceType": "ResourceType"} ) -DESCRIBE_CHANGESET_TABLE_HEADER_NAME = "**CloudFormation stack changeset**" +DESCRIBE_CHANGESET_TABLE_HEADER_NAME = "CloudFormation stack changeset" OUTPUTS_FORMAT_STRING = "{OutputKey:<{0}} {OutputValue:<{1}} {Description:<{2}}" OUTPUTS_DEFAULTS_ARGS = OrderedDict( @@ -360,8 +360,10 @@ def _check_stack_complete(self, status): return "COMPLETE" in status and "CLEANUP" not in status def wait_for_execute(self, stack_name, changeset_type): - - sys.stdout.write("\nWaiting for stack create/update to complete\n") + sys.stdout.write( + "\n{} - Waiting for stack create/update " + "to complete\n".format(datetime.now().strftime("%Y-%m-%d %H:%M:%S")) + ) sys.stdout.flush() self.describe_stack_events(stack_name, self.get_last_event_time(stack_name)) diff --git a/tests/unit/commands/deploy/test_command.py b/tests/unit/commands/deploy/test_command.py index 7eb04ec956..7d0a9193bb 100644 --- a/tests/unit/commands/deploy/test_command.py +++ b/tests/unit/commands/deploy/test_command.py @@ -26,7 +26,7 @@ def setUp(self): self.profile = None self.use_json = True self.metadata = {} - self.interactive = False + self.guided = False self.confirm_changeset = False @patch("samcli.commands.package.command.click") @@ -56,7 +56,7 @@ def test_all_args(self, mock_deploy_context, mock_deploy_click, mock_package_con profile=self.profile, use_json=self.use_json, metadata=self.metadata, - interactive=self.interactive, + guided=self.guided, confirm_changeset=self.confirm_changeset, ) @@ -89,8 +89,10 @@ def test_all_args(self, mock_deploy_context, mock_deploy_click, mock_package_con @patch("samcli.commands.deploy.command.save_config") @patch("samcli.commands.deploy.command.manage_stack") @patch("samcli.commands.deploy.command.get_template_parameters") - def test_all_args_interactive( + @patch("samcli.commands.deploy.command.get_config_ctx") + def test_all_args_guided( self, + mock_get_config_ctx, mock_get_template_parameters, mock_managed_stack, mock_save_config, @@ -101,10 +103,13 @@ def test_all_args_interactive( ): context_mock = Mock() + mock_sam_config = MagicMock() + mock_sam_config.exists = MagicMock(return_value=True) + mock_get_config_ctx.return_value = (None, mock_sam_config) mock_get_template_parameters.return_value = {"Myparameter": {"Type": "String"}} mock_deploy_context.return_value.__enter__.return_value = context_mock mock_deploy_click.prompt = MagicMock( - side_effect=["sam-app", "us-east-1", "InteractiveParameter", ("CAPABILITY_IAM",)] + side_effect=["sam-app", "us-east-1", "guidedParameter", ("CAPABILITY_IAM",)] ) mock_deploy_click.confirm = MagicMock(side_effect=[True, False, True]) @@ -129,7 +134,7 @@ def test_all_args_interactive( profile=self.profile, use_json=self.use_json, metadata=self.metadata, - interactive=True, + guided=True, confirm_changeset=True, ) @@ -140,7 +145,7 @@ def test_all_args_interactive( force_upload=self.force_upload, s3_prefix=self.s3_prefix, kms_key_id=self.kms_key_id, - parameter_overrides={"Myparameter": "InteractiveParameter"}, + parameter_overrides={"Myparameter": "guidedParameter"}, capabilities=self.capabilities, no_execute_changeset=self.no_execute_changeset, role_arn=self.role_arn, @@ -161,7 +166,7 @@ def test_all_args_interactive( region="us-east-1", s3_bucket="managed-s3-bucket", stack_name="sam-app", - parameter_overrides={"Myparameter": "InteractiveParameter"}, + parameter_overrides={"Myparameter": "guidedParameter"}, ) mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1") self.assertEqual(context_mock.run.call_count, 1) @@ -174,7 +179,7 @@ def test_all_args_interactive( @patch("samcli.commands.deploy.command.get_template_parameters") @patch("samcli.commands.deploy.command.SamConfig") @patch("samcli.commands.deploy.command.get_cmd_names") - def test_all_args_interactive_no_params_save_config( + def test_all_args_guided_no_params_save_config( self, mock_get_cmd_names, mock_sam_config, @@ -213,7 +218,7 @@ def test_all_args_interactive_no_params_save_config( profile=self.profile, use_json=self.use_json, metadata=self.metadata, - interactive=True, + guided=True, confirm_changeset=True, ) @@ -247,8 +252,10 @@ def test_all_args_interactive_no_params_save_config( @patch("samcli.commands.deploy.command.save_config") @patch("samcli.commands.deploy.command.manage_stack") @patch("samcli.commands.deploy.command.get_template_parameters") - def test_all_args_interactive_no_params_no_save_config( + @patch("samcli.commands.deploy.command.get_config_ctx") + def test_all_args_guided_no_params_no_save_config( self, + mock_get_config_ctx, mock_get_template_parameters, mock_managed_stack, mock_save_config, @@ -259,6 +266,9 @@ def test_all_args_interactive_no_params_no_save_config( ): context_mock = Mock() + mock_sam_config = MagicMock() + mock_sam_config.exists = MagicMock(return_value=True) + mock_get_config_ctx.return_value = (None, mock_sam_config) mock_get_template_parameters.return_value = {} mock_deploy_context.return_value.__enter__.return_value = context_mock mock_deploy_click.prompt = MagicMock(side_effect=["sam-app", "us-east-1", ("CAPABILITY_IAM",)]) @@ -284,7 +294,7 @@ def test_all_args_interactive_no_params_no_save_config( profile=self.profile, use_json=self.use_json, metadata=self.metadata, - interactive=True, + guided=True, confirm_changeset=True, ) diff --git a/tests/unit/lib/deploy/test_deployer.py b/tests/unit/lib/deploy/test_deployer.py index cc9628f28b..69ee317e25 100644 --- a/tests/unit/lib/deploy/test_deployer.py +++ b/tests/unit/lib/deploy/test_deployer.py @@ -71,15 +71,12 @@ def test_deployer_has_stack_exception_non_exsistent(self): self.assertEqual(self.deployer.has_stack("test"), False) def test_deployer_has_stack_exception(self): - self.deployer._client.describe_stacks = MagicMock( - side_effect=Exception() - ) + self.deployer._client.describe_stacks = MagicMock(side_effect=Exception()) with self.assertRaises(Exception): self.deployer.has_stack("test") def test_deployer_has_stack_exception_botocore(self): - self.deployer._client.describe_stacks = MagicMock( - side_effect=BotoCoreError()) + self.deployer._client.describe_stacks = MagicMock(side_effect=BotoCoreError()) with self.assertRaises(DeployFailedError): self.deployer.has_stack("test") From 23d60be626da1d5d4976cc2d29fec480171bcf85 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Tue, 19 Nov 2019 22:33:25 -0800 Subject: [PATCH 22/45] fix: stack outputs on deploy (#1556) --- samcli/lib/deploy/deployer.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/samcli/lib/deploy/deployer.py b/samcli/lib/deploy/deployer.py index 491719b7af..491ebc3f9f 100644 --- a/samcli/lib/deploy/deployer.py +++ b/samcli/lib/deploy/deployer.py @@ -54,10 +54,8 @@ DESCRIBE_CHANGESET_TABLE_HEADER_NAME = "CloudFormation stack changeset" -OUTPUTS_FORMAT_STRING = "{OutputKey:<{0}} {OutputValue:<{1}} {Description:<{2}}" -OUTPUTS_DEFAULTS_ARGS = OrderedDict( - {"OutputKey": "OutputKey", "OutputValue": "OutputValue", "Description": "Description"} -) +OUTPUTS_FORMAT_STRING = "{OutputKey-Description:<{0}} {OutputValue:<{1}}" +OUTPUTS_DEFAULTS_ARGS = OrderedDict({"OutputKey-Description": "OutputKey-Description", "OutputValue": "OutputValue"}) class Deployer: @@ -406,7 +404,7 @@ def create_and_wait_for_changeset( def _stack_outputs(self, stack_outputs, **kwargs): for output in stack_outputs: pprint_columns( - columns=[output["OutputKey"], output["OutputValue"], output.get("Description", "-")], + columns=[" - ".join([output["OutputKey"], output.get("Description", "")]), output["OutputValue"]], width=kwargs["width"], margin=kwargs["margin"], format_string=OUTPUTS_FORMAT_STRING, From dc1d10c25a802672182d62256e71f0ac3cfc35d5 Mon Sep 17 00:00:00 2001 From: Alex Wood Date: Tue, 19 Nov 2019 22:56:25 -0800 Subject: [PATCH 23/45] Guided Deploy UX Improvements (#1554) * UX Improvements * Reformatting with Black * Reformatting and Test Upgrade --- samcli/commands/deploy/command.py | 26 +++++++++++++++--------- samcli/commands/deploy/deploy_context.py | 8 ++++++-- samcli/lib/bootstrap/bootstrap.py | 9 ++++---- tests/unit/commands/init/test_cli.py | 2 ++ 4 files changed, 29 insertions(+), 16 deletions(-) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 4da0f09758..957ad22547 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -293,17 +293,19 @@ def guided_deploy( input_parameter_overrides = {} color = Colored() + start_bold = "\033[1m" + end_bold = "\033[0m" click.echo( color.yellow("\n\tSetting default arguments for 'sam deploy'\n\t=========================================") ) - stack_name = click.prompt(f"\tStack name", default=stack_name, type=click.STRING) - region = click.prompt(f"\tAWS region", default=default_region, type=click.STRING) + stack_name = click.prompt(f"\t{start_bold}Stack Name{end_bold}", default=stack_name, type=click.STRING) + region = click.prompt(f"\t{start_bold}AWS Region{end_bold}", default=default_region, type=click.STRING) if parameter_override_keys: for parameter_key, parameter_properties in parameter_override_keys.items(): input_parameter_overrides[parameter_key] = click.prompt( - f"\tParameter {parameter_key}", + f"\t{start_bold}Parameter {parameter_key}{end_bold}", default=parameter_overrides.get( parameter_key, parameter_properties.get("Default", "No default specified") ), @@ -311,21 +313,25 @@ def guided_deploy( ) click.secho("\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy") - confirm_changeset = click.confirm(f"\tConfirm changes before deploy", default=confirm_changeset) + confirm_changeset = click.confirm( + f"\t{start_bold}Confirm changes before deploy{end_bold}", default=confirm_changeset + ) click.secho("\t#SAM needs permission to be able to create roles to connect to the resources in your template") - capabilities_confirm = click.confirm(f"\tAllow SAM CLI IAM role creation", default=True) + capabilities_confirm = click.confirm(f"\t{start_bold}Allow SAM CLI IAM role creation{end_bold}", default=True) if not capabilities_confirm: input_capabilities = click.prompt( - f"\t Capabilities", default=default_capabilities, type=FuncParamType(func=_space_separated_list_func_type) + f"\t{start_bold}Capabilities{end_bold}", + default=default_capabilities, + type=FuncParamType(func=_space_separated_list_func_type), ) - save_to_config = click.confirm(f"\tSave arguments to samconfig.toml", default=True) + save_to_config = click.confirm(f"\t{start_bold}Save arguments to samconfig.toml{end_bold}", default=True) - click.echo(color.yellow("\n\tConfiguring deployment s3 bucket\n\t================================")) + click.echo(color.yellow("\n\tS3 bucket for deployments\n\t=========================")) s3_bucket = manage_stack(profile=profile, region=region) - click.echo(f"\tUsing deployment bucket: {s3_bucket}") - click.echo("\tYou may specify a different default deployment bucket in samconfig.toml") + click.echo(f"\tS3 bucket: {s3_bucket}") + click.echo("\tA different default S3 bucket can be set in /.samconfig.toml") return ( stack_name, diff --git a/samcli/commands/deploy/deploy_context.py b/samcli/commands/deploy/deploy_context.py index 58a5270387..e2aa23a058 100644 --- a/samcli/commands/deploy/deploy_context.py +++ b/samcli/commands/deploy/deploy_context.py @@ -33,7 +33,7 @@ class DeployContext: MSG_NO_EXECUTE_CHANGESET = "\nChangeset created successfully. \n" - MSG_EXECUTE_SUCCESS = "\nSuccessfully created/updated stack - {stack_name}\n" + MSG_EXECUTE_SUCCESS = "\nSuccessfully created/updated stack - {stack_name} in {region}\n" MSG_CONFIRM_CHANGESET = "Confirm deploying?" MSG_CONFIRM_CHANGESET_HEADER = "\nPreviewing CloudFormation changeset before deployment" @@ -111,6 +111,8 @@ def run(self): self.deployer = Deployer(cloudformation_client) + region = s3_client._client_config.region_name # pylint: disable=W0212 + return self.deploy( self.stack_name, template_str, @@ -121,6 +123,7 @@ def run(self): self.notification_arns, self.s3_uploader, [{"Key": key, "Value": value} for key, value in self.tags.items()] if self.tags else [], + region, self.fail_on_empty_changeset, self.confirm_changeset, ) @@ -136,6 +139,7 @@ def deploy( notification_arns, s3_uploader, tags, + region, fail_on_empty_changeset=True, confirm_changeset=False, ): @@ -163,7 +167,7 @@ def deploy( self.deployer.execute_changeset(result["Id"], stack_name) self.deployer.wait_for_execute(stack_name, changeset_type) - click.echo(self.MSG_EXECUTE_SUCCESS.format(stack_name=stack_name)) + click.echo(self.MSG_EXECUTE_SUCCESS.format(stack_name=stack_name, region=region)) except deploy_exceptions.ChangeEmptyError as ex: if fail_on_empty_changeset: diff --git a/samcli/lib/bootstrap/bootstrap.py b/samcli/lib/bootstrap/bootstrap.py index e093c0fe37..758258b63b 100644 --- a/samcli/lib/bootstrap/bootstrap.py +++ b/samcli/lib/bootstrap/bootstrap.py @@ -31,9 +31,9 @@ def _create_or_get_stack(cloudformation_client): ds_resp = cloudformation_client.describe_stacks(StackName=SAM_CLI_STACK_NAME) stacks = ds_resp["Stacks"] stack = stacks[0] - LOG.info("\tFound managed SAM CLI stack.") + LOG.info("\tLooking for SAM CLI managed stack: Found!") except ClientError: - LOG.info("\tManaged SAM CLI stack not found, creating.") + LOG.info("\tLooking for SAM CLI managed stack: Not found.") stack = _create_stack(cloudformation_client) # exceptions are not captured from subcommands # Sanity check for non-none stack? Sanity check for tag? tags = stack["Tags"] @@ -69,6 +69,7 @@ def _create_or_get_stack(cloudformation_client): def _create_stack(cloudformation_client): + LOG.info("\tCreating SAM CLI managed stack...") change_set_name = "InitialCreation" change_set_resp = cloudformation_client.create_change_set( StackName=SAM_CLI_STACK_NAME, @@ -78,7 +79,7 @@ def _create_stack(cloudformation_client): ChangeSetName=change_set_name, # this must be unique for the stack, but we only create so that's fine ) stack_id = change_set_resp["StackId"] - LOG.info("\tWaiting for managed stack change set to create.") + LOG.info("\tWaiting for managed stack change set to be created.") change_waiter = cloudformation_client.get_waiter("change_set_create_complete") change_waiter.wait( ChangeSetName=change_set_name, StackName=SAM_CLI_STACK_NAME, WaiterConfig={"Delay": 15, "MaxAttempts": 60} @@ -87,9 +88,9 @@ def _create_stack(cloudformation_client): LOG.info("\tWaiting for managed stack to be created.") stack_waiter = cloudformation_client.get_waiter("stack_create_complete") stack_waiter.wait(StackName=stack_id, WaiterConfig={"Delay": 15, "MaxAttempts": 60}) - LOG.info("\tManaged SAM CLI stack creation complete.") ds_resp = cloudformation_client.describe_stacks(StackName=SAM_CLI_STACK_NAME) stacks = ds_resp["Stacks"] + LOG.info("\tSuccessfully created SAM CLI managed stack!") return stacks[0] diff --git a/tests/unit/commands/init/test_cli.py b/tests/unit/commands/init/test_cli.py index 3c7a9b1274..90088f19b8 100644 --- a/tests/unit/commands/init/test_cli.py +++ b/tests/unit/commands/init/test_cli.py @@ -133,12 +133,14 @@ def test_init_cli_interactive_multiple_dep_mgrs(self, generate_project_patch, sd # 2: gradle as the dependency manager # test-project: response to name # N: Don't clone/update the source repo + # 1: first app template user_input = """ 1 5 2 test-project N +1 """ runner = CliRunner() result = runner.invoke(init_cmd, input=user_input) From 2e419810cf58dbe70dba4c681abe59039bd006d4 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Tue, 19 Nov 2019 23:14:21 -0800 Subject: [PATCH 24/45] fix: always showcase deploy args (#1557) * fix: always showcase deploy args * fix: set arguments on package during deploy guided --- samcli/commands/deploy/command.py | 41 +++++++++++----------- tests/unit/commands/deploy/test_command.py | 1 - 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 957ad22547..b7a594009c 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -212,6 +212,9 @@ def do_cli( changeset_decision = None _capabilities = None _parameter_overrides = None + guided_stack_name = None + guided_s3_bucket = None + guided_region = None if guided: @@ -219,38 +222,36 @@ def do_cli( _parameter_override_keys = get_template_parameters(template_file=template_file) - stack_name, s3_bucket, region, profile, changeset_decision, _capabilities, _parameter_overrides, save_to_config = guided_deploy( + guided_stack_name, guided_s3_bucket, guided_region, guided_profile, changeset_decision, _capabilities, _parameter_overrides, save_to_config = guided_deploy( stack_name, s3_bucket, region, profile, confirm_changeset, _parameter_override_keys, parameter_overrides ) if save_to_config: save_config( template_file, - stack_name=stack_name, - s3_bucket=s3_bucket, - region=region, - profile=profile, + stack_name=guided_stack_name, + s3_bucket=guided_s3_bucket, + region=guided_region, + profile=guided_profile, confirm_changeset=changeset_decision, capabilities=_capabilities, parameter_overrides=_parameter_overrides, ) - # We print deploy args only on guided. - # Should we print this always? - print_deploy_args( - stack_name=stack_name, - s3_bucket=s3_bucket, - region=region, - capabilities=_capabilities, - parameter_overrides=_parameter_overrides, - confirm_changeset=changeset_decision, - ) + print_deploy_args( + stack_name=guided_stack_name if guided else stack_name, + s3_bucket=guided_s3_bucket if guided else s3_bucket, + region=guided_region if guided else region, + capabilities=_capabilities if guided else capabilities, + parameter_overrides=_parameter_overrides if guided else parameter_overrides, + confirm_changeset=changeset_decision if guided else confirm_changeset, + ) with tempfile.NamedTemporaryFile() as output_template_file: with PackageContext( template_file=template_file, - s3_bucket=s3_bucket, + s3_bucket=guided_s3_bucket if guided else s3_bucket, s3_prefix=s3_prefix, output_template_file=output_template_file.name, kms_key_id=kms_key_id, @@ -258,15 +259,15 @@ def do_cli( force_upload=force_upload, metadata=metadata, on_deploy=True, - region=region, + region=guided_region if guided else region, profile=profile, ) as package_context: package_context.run() with DeployContext( template_file=output_template_file.name, - stack_name=stack_name, - s3_bucket=s3_bucket, + stack_name=guided_stack_name if guided else stack_name, + s3_bucket=guided_s3_bucket if guided else s3_bucket, force_upload=force_upload, s3_prefix=s3_prefix, kms_key_id=kms_key_id, @@ -277,7 +278,7 @@ def do_cli( notification_arns=notification_arns, fail_on_empty_changeset=fail_on_empty_changeset, tags=tags, - region=region, + region=guided_region if guided else region, profile=profile, confirm_changeset=changeset_decision if guided else confirm_changeset, ) as deploy_context: diff --git a/tests/unit/commands/deploy/test_command.py b/tests/unit/commands/deploy/test_command.py index 7d0a9193bb..39162d72fc 100644 --- a/tests/unit/commands/deploy/test_command.py +++ b/tests/unit/commands/deploy/test_command.py @@ -2,7 +2,6 @@ from unittest.mock import patch, Mock, ANY, MagicMock from samcli.commands.deploy.command import do_cli -from samcli.lib.config.samconfig import SamConfig class TestDeployliCommand(TestCase): From 1d1e9d606011d63b9b896547c39fae503dc81671 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Wed, 20 Nov 2019 12:23:52 -0800 Subject: [PATCH 25/45] fix: parameter override prompts during guided deploy (#1558) * fix: parameter override prompts during guided deploy * tests: unit tests for not saving secure params during guided deploy --- samcli/commands/deploy/command.py | 49 ++++++--- samcli/lib/bootstrap/bootstrap.py | 2 +- tests/unit/commands/deploy/test_command.py | 109 ++++++++++++++++++++- 3 files changed, 141 insertions(+), 19 deletions(-) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index b7a594009c..33eb24204c 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -271,7 +271,7 @@ def do_cli( force_upload=force_upload, s3_prefix=s3_prefix, kms_key_id=kms_key_id, - parameter_overrides=_parameter_overrides if guided else parameter_overrides, + parameter_overrides=sanitize_parameter_overrides(_parameter_overrides) if guided else parameter_overrides, capabilities=_capabilities if guided else capabilities, no_execute_changeset=no_execute_changeset, role_arn=role_arn, @@ -305,13 +305,21 @@ def guided_deploy( region = click.prompt(f"\t{start_bold}AWS Region{end_bold}", default=default_region, type=click.STRING) if parameter_override_keys: for parameter_key, parameter_properties in parameter_override_keys.items(): - input_parameter_overrides[parameter_key] = click.prompt( - f"\t{start_bold}Parameter {parameter_key}{end_bold}", - default=parameter_overrides.get( - parameter_key, parameter_properties.get("Default", "No default specified") - ), - type=click.STRING, - ) + no_echo = parameter_properties.get("NoEcho", False) + if no_echo: + parameter = click.prompt( + f"\t{start_bold}Parameter {parameter_key}{end_bold}", type=click.STRING, hide_input=True + ) + input_parameter_overrides[parameter_key] = {"Value": parameter, "Hidden": True} + else: + parameter = click.prompt( + f"\t{start_bold}Parameter {parameter_key}{end_bold}", + default=parameter_overrides.get( + parameter_key, parameter_properties.get("Default", "No default specified") + ), + type=click.STRING, + ) + input_parameter_overrides[parameter_key] = {"Value": parameter, "Hidden": False} click.secho("\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy") confirm_changeset = click.confirm( @@ -332,7 +340,7 @@ def guided_deploy( click.echo(color.yellow("\n\tS3 bucket for deployments\n\t=========================")) s3_bucket = manage_stack(profile=profile, region=region) click.echo(f"\tS3 bucket: {s3_bucket}") - click.echo("\tA different default S3 bucket can be set in /.samconfig.toml") + click.echo("\tA different default S3 bucket can be set in samconfig.toml") return ( stack_name, @@ -348,7 +356,11 @@ def guided_deploy( def print_deploy_args(stack_name, s3_bucket, region, capabilities, parameter_overrides, confirm_changeset): - param_overrides_string = parameter_overrides + _parameters = parameter_overrides.copy() + for key, value in _parameters.items(): + if isinstance(value, dict): + _parameters[key] = value.get("Value", value) if not value.get("Hidden") else "*" * len(value.get("Value")) + capabilities_string = json.dumps(capabilities) click.secho("\n\tDeploying with following values\n\t===============================", fg="yellow") @@ -357,7 +369,7 @@ def print_deploy_args(stack_name, s3_bucket, region, capabilities, parameter_ove click.echo(f"\tConfirm changeset : {confirm_changeset}") click.echo(f"\tDeployment s3 bucket : {s3_bucket}") click.echo(f"\tCapabilities : {capabilities_string}") - click.echo(f"\tParameter overrides : {param_overrides_string}") + click.echo(f"\tParameter overrides : {_parameters}") click.secho("\nInitiating deployment\n=====================", fg="yellow") @@ -394,8 +406,15 @@ def save_config(template_file, parameter_overrides, **kwargs): samconfig.put(cmd_names, section, key, value) if parameter_overrides: - parameter_overrides_value = " ".join([f"{key}={value}" for key, value in parameter_overrides.items()]) - samconfig.put(cmd_names, section, "parameter_overrides", parameter_overrides_value) + _params = [] + for key, value in parameter_overrides.items(): + if isinstance(value, dict): + if not value.get("Hidden"): + _params.append(f"{key}={value.get('Value')}") + else: + _params.append(f"{key}={value}") + if _params: + samconfig.put(cmd_names, section, "parameter_overrides", " ".join(_params)) samconfig.flush() @@ -413,3 +432,7 @@ def get_config_ctx(template_file): config_dir=samconfig_dir if samconfig_dir else SamConfig.config_dir(template_file_path=template_file) ) return ctx, samconfig + + +def sanitize_parameter_overrides(parameter_overrides): + return {key: value.get("Value") if isinstance(value, dict) else value for key, value in parameter_overrides.items()} diff --git a/samcli/lib/bootstrap/bootstrap.py b/samcli/lib/bootstrap/bootstrap.py index 758258b63b..a39489e82f 100644 --- a/samcli/lib/bootstrap/bootstrap.py +++ b/samcli/lib/bootstrap/bootstrap.py @@ -96,7 +96,7 @@ def _create_stack(cloudformation_client): def _get_stack_template(): gc = GlobalConfig() - info = {"version": __version__, "installationId": gc.installation_id} + info = {"version": __version__, "installationId": gc.installation_id if gc.installation_id else "unknown"} template = """ AWSTemplateFormatVersion : '2010-09-09' diff --git a/tests/unit/commands/deploy/test_command.py b/tests/unit/commands/deploy/test_command.py index 39162d72fc..3a17a4c7cd 100644 --- a/tests/unit/commands/deploy/test_command.py +++ b/tests/unit/commands/deploy/test_command.py @@ -1,7 +1,8 @@ from unittest import TestCase -from unittest.mock import patch, Mock, ANY, MagicMock +from unittest.mock import patch, Mock, ANY, MagicMock, call from samcli.commands.deploy.command import do_cli +from tests.unit.cli.test_cli_config_file import MockContext class TestDeployliCommand(TestCase): @@ -105,10 +106,13 @@ def test_all_args_guided( mock_sam_config = MagicMock() mock_sam_config.exists = MagicMock(return_value=True) mock_get_config_ctx.return_value = (None, mock_sam_config) - mock_get_template_parameters.return_value = {"Myparameter": {"Type": "String"}} + mock_get_template_parameters.return_value = { + "Myparameter": {"Type": "String"}, + "MyNoEchoParameter": {"Type": "String", "NoEcho": True}, + } mock_deploy_context.return_value.__enter__.return_value = context_mock mock_deploy_click.prompt = MagicMock( - side_effect=["sam-app", "us-east-1", "guidedParameter", ("CAPABILITY_IAM",)] + side_effect=["sam-app", "us-east-1", "guidedParameter", "secure", ("CAPABILITY_IAM",)] ) mock_deploy_click.confirm = MagicMock(side_effect=[True, False, True]) @@ -144,7 +148,7 @@ def test_all_args_guided( force_upload=self.force_upload, s3_prefix=self.s3_prefix, kms_key_id=self.kms_key_id, - parameter_overrides={"Myparameter": "guidedParameter"}, + parameter_overrides={"Myparameter": "guidedParameter", "MyNoEchoParameter": "secure"}, capabilities=self.capabilities, no_execute_changeset=self.no_execute_changeset, role_arn=self.role_arn, @@ -165,11 +169,106 @@ def test_all_args_guided( region="us-east-1", s3_bucket="managed-s3-bucket", stack_name="sam-app", - parameter_overrides={"Myparameter": "guidedParameter"}, + parameter_overrides={ + "Myparameter": {"Value": "guidedParameter", "Hidden": False}, + "MyNoEchoParameter": {"Value": "secure", "Hidden": True}, + }, ) mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1") self.assertEqual(context_mock.run.call_count, 1) + @patch("samcli.commands.package.command.click") + @patch("samcli.commands.package.package_context.PackageContext") + @patch("samcli.commands.deploy.command.click") + @patch("samcli.commands.deploy.deploy_context.DeployContext") + @patch("samcli.commands.deploy.command.manage_stack") + @patch("samcli.commands.deploy.command.get_template_parameters") + @patch("samcli.commands.deploy.command.get_config_ctx") + def test_all_args_guided_no_save_echo_param_to_config( + self, + mock_get_config_ctx, + mock_get_template_parameters, + mock_managed_stack, + mock_deploy_context, + mock_deploy_click, + mock_package_context, + mock_package_click, + ): + + context_mock = Mock() + mock_sam_config = MagicMock() + mock_sam_config.exists = MagicMock(return_value=True) + mock_get_config_ctx.return_value = (MockContext(info_name="deploy", parent=None), mock_sam_config) + mock_get_template_parameters.return_value = { + "Myparameter": {"Type": "String"}, + "MyNoEchoParameter": {"Type": "String", "NoEcho": True}, + } + mock_deploy_context.return_value.__enter__.return_value = context_mock + mock_deploy_click.prompt = MagicMock( + side_effect=["sam-app", "us-east-1", "guidedParameter", "secure", ("CAPABILITY_IAM",)] + ) + mock_deploy_click.confirm = MagicMock(side_effect=[True, False, True]) + + mock_managed_stack.return_value = "managed-s3-bucket" + + do_cli( + template_file=self.template_file, + stack_name=self.stack_name, + s3_bucket=None, + force_upload=self.force_upload, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region=self.region, + profile=self.profile, + use_json=self.use_json, + metadata=self.metadata, + guided=True, + confirm_changeset=True, + ) + + mock_deploy_context.assert_called_with( + template_file=ANY, + stack_name="sam-app", + s3_bucket="managed-s3-bucket", + force_upload=self.force_upload, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + parameter_overrides={"Myparameter": "guidedParameter", "MyNoEchoParameter": "secure"}, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region="us-east-1", + profile=self.profile, + confirm_changeset=True, + ) + + context_mock.run.assert_called_with() + mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1") + self.assertEqual(context_mock.run.call_count, 1) + + self.assertEqual(mock_sam_config.put.call_count, 6) + self.assertEqual( + mock_sam_config.put.call_args_list, + [ + call(["deploy"], "parameters", "stack_name", "sam-app"), + call(["deploy"], "parameters", "s3_bucket", "managed-s3-bucket"), + call(["deploy"], "parameters", "region", "us-east-1"), + call(["deploy"], "parameters", "confirm_changeset", True), + call(["deploy"], "parameters", "capabilities", "CAPABILITY_IAM"), + call(["deploy"], "parameters", "parameter_overrides", "Myparameter=guidedParameter"), + ], + ) + @patch("samcli.commands.package.command.click") @patch("samcli.commands.package.package_context.PackageContext") @patch("samcli.commands.deploy.command.click") From 72f740e8ecc89b7c97e64f823060238c55fd7d2e Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Wed, 20 Nov 2019 12:30:21 -0800 Subject: [PATCH 26/45] fix: only create config file on write (#1559) --- samcli/lib/config/samconfig.py | 12 +++++++----- tests/unit/lib/config/test_samconfig.py | 6 +++++- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/samcli/lib/config/samconfig.py b/samcli/lib/config/samconfig.py index 4ca390c9e7..f2fb8d7dce 100644 --- a/samcli/lib/config/samconfig.py +++ b/samcli/lib/config/samconfig.py @@ -36,8 +36,6 @@ def __init__(self, config_dir, filename=None): could automatically support auto-resolving multiple config files within same directory. """ self.filepath = Path(config_dir, filename or DEFAULT_CONFIG_FILE_NAME) - if not self.filepath.exists(): - open(self.filepath, "a+").close() def get_all(self, cmd_names, section, env=DEFAULT_ENV): """ @@ -153,15 +151,19 @@ def _read(self): if self.document: return self.document - txt = self.filepath.read_text() - self.document = tomlkit.loads(txt) + try: + txt = self.filepath.read_text() + self.document = tomlkit.loads(txt) + except OSError: + self.document = tomlkit.document() return self.document def _write(self): if not self.document: return - + if not self.exists(): + open(self.filepath, "a+").close() self.filepath.write_text(tomlkit.dumps(self.document)) @staticmethod diff --git a/tests/unit/lib/config/test_samconfig.py b/tests/unit/lib/config/test_samconfig.py index c39e8f494d..54575c1198 100644 --- a/tests/unit/lib/config/test_samconfig.py +++ b/tests/unit/lib/config/test_samconfig.py @@ -10,7 +10,6 @@ class TestSamConfig(TestCase): def setUp(self): self.config_dir = os.getcwd() self.samconfig = SamConfig(self.config_dir) - open(self.samconfig.path(), "w").close() def tearDown(self): if self.samconfig.exists(): @@ -19,6 +18,8 @@ def tearDown(self): def _setup_config(self): self.samconfig.put(cmd_names=["local", "start", "api"], section="parameters", key="port", value=5401) self.samconfig.flush() + self.assertTrue(self.samconfig.exists()) + self.assertTrue(self.samconfig.sanity_check()) def test_init(self): self.assertEqual(self.samconfig.filepath, Path(self.config_dir, DEFAULT_CONFIG_FILE_NAME)) @@ -32,3 +33,6 @@ def test_check_config_get(self): def test_check_config_exists(self): self._setup_config() self.assertTrue(self.samconfig.exists()) + + def test_check_sanity(self): + self.assertTrue(self.samconfig.sanity_check()) From 642f29d792432058122e1b2d96e82c9c226fbabf Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Wed, 20 Nov 2019 13:54:04 -0800 Subject: [PATCH 27/45] fix: usability fixes on deploy error with no s3 bucket (#1561) --- samcli/commands/deploy/deploy_context.py | 2 +- samcli/commands/package/exceptions.py | 2 +- tests/unit/lib/package/test_s3_uploader.py | 3 ++- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/samcli/commands/deploy/deploy_context.py b/samcli/commands/deploy/deploy_context.py index e2aa23a058..c482c98757 100644 --- a/samcli/commands/deploy/deploy_context.py +++ b/samcli/commands/deploy/deploy_context.py @@ -35,7 +35,7 @@ class DeployContext: MSG_EXECUTE_SUCCESS = "\nSuccessfully created/updated stack - {stack_name} in {region}\n" - MSG_CONFIRM_CHANGESET = "Confirm deploying?" + MSG_CONFIRM_CHANGESET = "Deploy this changeset?" MSG_CONFIRM_CHANGESET_HEADER = "\nPreviewing CloudFormation changeset before deployment" def __init__( diff --git a/samcli/commands/package/exceptions.py b/samcli/commands/package/exceptions.py index 7775949fcd..51d3811915 100644 --- a/samcli/commands/package/exceptions.py +++ b/samcli/commands/package/exceptions.py @@ -87,6 +87,6 @@ class BucketNotSpecifiedError(UserException): def __init__(self, **kwargs): self.kwargs = kwargs - message_fmt = "\nS3 Bucket not specified, use --s3-bucket to specify a bucket name" + message_fmt = "\nS3 Bucket not specified, use --s3-bucket to specify a bucket name or run sam deploy --guided" super(BucketNotSpecifiedError, self).__init__(message=message_fmt.format(**self.kwargs)) diff --git a/tests/unit/lib/package/test_s3_uploader.py b/tests/unit/lib/package/test_s3_uploader.py index a2aef1e008..496d9db08c 100644 --- a/tests/unit/lib/package/test_s3_uploader.py +++ b/tests/unit/lib/package/test_s3_uploader.py @@ -154,9 +154,10 @@ def test_s3_upload_no_bucket(self): ) s3_uploader.artifact_metadata = {"a": "b"} remote_path = Path.joinpath(Path(os.getcwd()), Path("tmp")) - with self.assertRaises(BucketNotSpecifiedError): + with self.assertRaises(BucketNotSpecifiedError) as ex: with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: s3_uploader.upload(f.name, remote_path) + self.assertEqual(BucketNotSpecifiedError().message, str(ex)) def test_s3_upload_with_dedup(self): s3_uploader = S3Uploader( From 9d64966cb6a8ba0137cf5bceb8895119fc31d9cc Mon Sep 17 00:00:00 2001 From: Alex Wood Date: Wed, 20 Nov 2019 13:54:44 -0800 Subject: [PATCH 28/45] Add Exception Handling for Client Misconfiguration (#1560) * Add Exception Handling for Client Misconfiguration * Import Botocore Exceptions * Change Client Exception Heirarchy Remaining work is to add tests for this case. * Add Unit Test for Bootstrap Client Failures --- samcli/commands/exceptions.py | 6 ++++++ samcli/lib/bootstrap/bootstrap.py | 18 ++++++++++++----- tests/unit/lib/bootstrap/test_bootstrap.py | 23 +++++++++++++++++++--- 3 files changed, 39 insertions(+), 8 deletions(-) diff --git a/samcli/commands/exceptions.py b/samcli/commands/exceptions.py index 3912ab9424..159d05bea4 100644 --- a/samcli/commands/exceptions.py +++ b/samcli/commands/exceptions.py @@ -18,3 +18,9 @@ class CredentialsError(UserException): """ Exception class when credentials that have been passed are invalid. """ + + +class RegionError(UserException): + """ + Exception class when no valid region is passed to a client. + """ diff --git a/samcli/lib/bootstrap/bootstrap.py b/samcli/lib/bootstrap/bootstrap.py index a39489e82f..9620465c38 100644 --- a/samcli/lib/bootstrap/bootstrap.py +++ b/samcli/lib/bootstrap/bootstrap.py @@ -7,11 +7,11 @@ import boto3 from botocore.config import Config -from botocore.exceptions import ClientError +from botocore.exceptions import ClientError, NoRegionError, NoCredentialsError from samcli import __version__ from samcli.cli.global_config import GlobalConfig -from samcli.commands.exceptions import UserException +from samcli.commands.exceptions import UserException, CredentialsError, RegionError LOG = logging.getLogger(__name__) @@ -19,9 +19,17 @@ def manage_stack(profile, region): - session = boto3.Session(profile_name=profile if profile else None) - cloudformation_client = session.client("cloudformation", config=Config(region_name=region if region else None)) - + try: + session = boto3.Session(profile_name=profile if profile else None) + cloudformation_client = session.client("cloudformation", config=Config(region_name=region if region else None)) + except NoCredentialsError: + raise CredentialsError( + "Error Setting Up Managed Stack Client: Unable to resolve credentials for the AWS SDK for Python client. Please see their documentation for options to pass in credentials: https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html" + ) + except NoRegionError: + raise RegionError( + "Error Setting Up Managed Stack Client: Unable to resolve a region. Please provide a region via the --region parameter or by the AWS_REGION environment variable." + ) return _create_or_get_stack(cloudformation_client) diff --git a/tests/unit/lib/bootstrap/test_bootstrap.py b/tests/unit/lib/bootstrap/test_bootstrap.py index fd548b1b66..ccc073abdf 100644 --- a/tests/unit/lib/bootstrap/test_bootstrap.py +++ b/tests/unit/lib/bootstrap/test_bootstrap.py @@ -1,12 +1,13 @@ from unittest import TestCase +from unittest.mock import patch, Mock import botocore.session -from botocore.exceptions import ClientError +from botocore.exceptions import ClientError, NoCredentialsError, NoRegionError from botocore.stub import Stubber -from samcli.commands.exceptions import UserException -from samcli.lib.bootstrap.bootstrap import _create_or_get_stack, _get_stack_template, SAM_CLI_STACK_NAME +from samcli.commands.exceptions import UserException, CredentialsError, RegionError +from samcli.lib.bootstrap.bootstrap import manage_stack, _create_or_get_stack, _get_stack_template, SAM_CLI_STACK_NAME class TestBootstrapManagedStack(TestCase): @@ -14,6 +15,22 @@ def _stubbed_cf_client(self): cf = botocore.session.get_session().create_client("cloudformation") return [cf, Stubber(cf)] + @patch("boto3.Session") + def test_client_missing_credentials(self, boto_mock): + session_mock = Mock() + session_mock.client.side_effect = NoCredentialsError() + boto_mock.return_value = session_mock + with self.assertRaises(CredentialsError): + manage_stack("testprofile", "fake-region") + + @patch("boto3.Session") + def test_client_missing_region(self, boto_mock): + session_mock = Mock() + session_mock.client.side_effect = NoRegionError() + boto_mock.return_value = session_mock + with self.assertRaises(RegionError): + manage_stack("testprofile", "fake-region") + def test_new_stack(self): stub_cf, stubber = self._stubbed_cf_client() # first describe_stacks call will fail From b58ceaa0f424c120f2eda15feb36a4d741521b65 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Wed, 20 Nov 2019 17:18:39 -0800 Subject: [PATCH 29/45] fix: helpful error message when deploy happens across regions (#1562) - deployment s3 bucket is specific to the region it was created in. cfn/lambda requires code to be packaged within the same region as where the stack is to be deployed. When such a case is encountered prompt to use `sam deploy --guided` --- samcli/commands/deploy/exceptions.py | 9 +++++++ samcli/lib/deploy/deployer.py | 11 +++++++- tests/unit/lib/deploy/test_deployer.py | 37 +++++++++++++++++++++++++- 3 files changed, 55 insertions(+), 2 deletions(-) diff --git a/samcli/commands/deploy/exceptions.py b/samcli/commands/deploy/exceptions.py index 191a353c52..04d155e1bc 100644 --- a/samcli/commands/deploy/exceptions.py +++ b/samcli/commands/deploy/exceptions.py @@ -47,6 +47,15 @@ def __init__(self, stack_name, msg): ) +class DeployBucketInDifferentRegionError(UserException): + def __init__(self, msg): + self.msg = msg + + message_fmt = "{msg} : deployment s3 bucket is in a different region, try sam deploy --guided" + + super(DeployBucketInDifferentRegionError, self).__init__(message=message_fmt.format(msg=self.msg)) + + class DeployBucketRequiredError(UserException): def __init__(self): diff --git a/samcli/lib/deploy/deployer.py b/samcli/lib/deploy/deployer.py index 491ebc3f9f..c05a38697c 100644 --- a/samcli/lib/deploy/deployer.py +++ b/samcli/lib/deploy/deployer.py @@ -25,7 +25,12 @@ import botocore from samcli.lib.deploy.utils import DeployColor -from samcli.commands.deploy.exceptions import DeployFailedError, ChangeSetError, DeployStackOutPutFailedError +from samcli.commands.deploy.exceptions import ( + DeployFailedError, + ChangeSetError, + DeployStackOutPutFailedError, + DeployBucketInDifferentRegionError, +) from samcli.commands._utils.table_print import pprint_column_names, pprint_columns from samcli.commands.deploy import exceptions as deploy_exceptions from samcli.lib.package.artifact_exporter import mktempfile, parse_s3_url @@ -176,6 +181,10 @@ def create_changeset( try: resp = self._client.create_change_set(**kwargs) return resp, changeset_type + except botocore.exceptions.ClientError as ex: + if "The bucket you are attempting to access must be addressed using the specified endpoint" in str(ex): + raise DeployBucketInDifferentRegionError(f"Failed to create/update stack {stack_name}") + except Exception as ex: LOG.debug("Unable to create changeset", exc_info=ex) raise ChangeSetError(stack_name=stack_name, msg=str(ex)) diff --git a/tests/unit/lib/deploy/test_deployer.py b/tests/unit/lib/deploy/test_deployer.py index 69ee317e25..8749f1bb01 100644 --- a/tests/unit/lib/deploy/test_deployer.py +++ b/tests/unit/lib/deploy/test_deployer.py @@ -6,7 +6,12 @@ from botocore.exceptions import ClientError, WaiterError, BotoCoreError -from samcli.commands.deploy.exceptions import DeployFailedError, ChangeSetError, DeployStackOutPutFailedError +from samcli.commands.deploy.exceptions import ( + DeployFailedError, + ChangeSetError, + DeployStackOutPutFailedError, + DeployBucketInDifferentRegionError, +) from samcli.lib.deploy.deployer import Deployer from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.utils.time import utc_to_timestamp, to_datetime @@ -158,6 +163,36 @@ def test_create_changeset_exception(self): tags={"unit": "true"}, ) + def test_create_changeset_ClientErrorException(self): + error_message = ( + "An error occurred (ValidationError) when calling the CreateChangeSet " + "operation: S3 error: The bucket you are attempting to access must be " + "addressed using the specified endpoint. " + "Please send all future requests to this " + "endpoint.\nFor more information " + "check http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html" + ) + self.deployer.has_stack = MagicMock(return_value=False) + self.deployer._client.create_change_set = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": error_message}}, operation_name="create_changeset" + ) + ) + with self.assertRaises(DeployBucketInDifferentRegionError): + self.deployer.create_changeset( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + {"ParameterKey": "c", "UsePreviousValue": True}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + ) + def test_describe_changeset_with_changes(self): response = [ { From 6a9ba71d518dbf0c7c157f396b78f1e5a4840559 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Thu, 21 Nov 2019 11:08:43 -0800 Subject: [PATCH 30/45] tests: integration tests for `sam deploy` (#1565) - integration tests for `sam deploy` and `sam deploy --guided` - fix for regression tests - `stack-name` is required - additional error cases for when a s3 bucket is needed to be looked up during `sam deploy` - move parameter prompts logic to its own function --- samcli/commands/deploy/command.py | 49 +-- samcli/commands/deploy/deploy_context.py | 3 +- samcli/lib/package/s3_uploader.py | 4 + tests/integration/deploy/deploy_integ_base.py | 6 + .../integration/deploy/test_deploy_command.py | 299 +++++++++++++++++- .../deploy/test_deploy_regression.py | 6 +- 6 files changed, 339 insertions(+), 28 deletions(-) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 33eb24204c..3ce09efa68 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -50,8 +50,7 @@ @template_click_option(include_build=True) @click.option( "--stack-name", - required=False, - default="sam-app", + required=True, help="The name of the AWS CloudFormation stack you're deploying to. " "If you specify an existing stack, the command updates the stack. " "If you specify a new stack, the command creates it.", @@ -288,10 +287,10 @@ def do_cli( def guided_deploy( stack_name, s3_bucket, region, profile, confirm_changeset, parameter_override_keys, parameter_overrides ): + default_stack_name = stack_name or "sam-app" default_region = region or "us-east-1" default_capabilities = ("CAPABILITY_IAM",) input_capabilities = None - input_parameter_overrides = {} color = Colored() start_bold = "\033[1m" @@ -301,25 +300,9 @@ def guided_deploy( color.yellow("\n\tSetting default arguments for 'sam deploy'\n\t=========================================") ) - stack_name = click.prompt(f"\t{start_bold}Stack Name{end_bold}", default=stack_name, type=click.STRING) + stack_name = click.prompt(f"\t{start_bold}Stack Name{end_bold}", default=default_stack_name, type=click.STRING) region = click.prompt(f"\t{start_bold}AWS Region{end_bold}", default=default_region, type=click.STRING) - if parameter_override_keys: - for parameter_key, parameter_properties in parameter_override_keys.items(): - no_echo = parameter_properties.get("NoEcho", False) - if no_echo: - parameter = click.prompt( - f"\t{start_bold}Parameter {parameter_key}{end_bold}", type=click.STRING, hide_input=True - ) - input_parameter_overrides[parameter_key] = {"Value": parameter, "Hidden": True} - else: - parameter = click.prompt( - f"\t{start_bold}Parameter {parameter_key}{end_bold}", - default=parameter_overrides.get( - parameter_key, parameter_properties.get("Default", "No default specified") - ), - type=click.STRING, - ) - input_parameter_overrides[parameter_key] = {"Value": parameter, "Hidden": False} + input_parameter_overrides = prompt_parameters(parameter_override_keys, start_bold, end_bold) click.secho("\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy") confirm_changeset = click.confirm( @@ -331,7 +314,7 @@ def guided_deploy( if not capabilities_confirm: input_capabilities = click.prompt( f"\t{start_bold}Capabilities{end_bold}", - default=default_capabilities, + default=default_capabilities[0], type=FuncParamType(func=_space_separated_list_func_type), ) @@ -354,6 +337,28 @@ def guided_deploy( ) +def prompt_parameters(parameter_override_keys, start_bold, end_bold): + _prompted_param_overrides = {} + if parameter_override_keys: + for parameter_key, parameter_properties in parameter_override_keys.items(): + no_echo = parameter_properties.get("NoEcho", False) + if no_echo: + parameter = click.prompt( + f"\t{start_bold}Parameter {parameter_key}{end_bold}", type=click.STRING, hide_input=True + ) + _prompted_param_overrides[parameter_key] = {"Value": parameter, "Hidden": True} + else: + parameter = click.prompt( + f"\t{start_bold}Parameter {parameter_key}{end_bold}", + default=_prompted_param_overrides.get( + parameter_key, parameter_properties.get("Default", "No default specified") + ), + type=click.STRING, + ) + _prompted_param_overrides[parameter_key] = {"Value": parameter, "Hidden": False} + return _prompted_param_overrides + + def print_deploy_args(stack_name, s3_bucket, region, capabilities, parameter_overrides, confirm_changeset): _parameters = parameter_overrides.copy() diff --git a/samcli/commands/deploy/deploy_context.py b/samcli/commands/deploy/deploy_context.py index c482c98757..9c246f45cc 100644 --- a/samcli/commands/deploy/deploy_context.py +++ b/samcli/commands/deploy/deploy_context.py @@ -104,6 +104,7 @@ def run(self): session = boto3.Session(profile_name=self.profile if self.profile else None) cloudformation_client = session.client("cloudformation", region_name=self.region if self.region else None) + s3_client = None if self.s3_bucket: s3_client = session.client("s3", region_name=self.region if self.region else None) @@ -111,7 +112,7 @@ def run(self): self.deployer = Deployer(cloudformation_client) - region = s3_client._client_config.region_name # pylint: disable=W0212 + region = s3_client._client_config.region_name if s3_client else self.region # pylint: disable=W0212 return self.deploy( self.stack_name, diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index d89710c0df..efd7397555 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -91,6 +91,8 @@ def upload(self, file_name, remote_path): additional_args["Metadata"] = self.artifact_metadata print_progress_callback = ProgressPercentage(file_name, remote_path) + if not self.bucket_name: + raise BucketNotSpecifiedError() future = self.transfer_manager.upload( file_name, self.bucket_name, remote_path, additional_args, [print_progress_callback] ) @@ -144,6 +146,8 @@ def file_exists(self, remote_path): return False def make_url(self, obj_path): + if not self.bucket_name: + raise BucketNotSpecifiedError() return "s3://{0}/{1}".format(self.bucket_name, obj_path) def file_checksum(self, file_name): diff --git a/tests/integration/deploy/deploy_integ_base.py b/tests/integration/deploy/deploy_integ_base.py index 1e68f8878f..00165f4adf 100644 --- a/tests/integration/deploy/deploy_integ_base.py +++ b/tests/integration/deploy/deploy_integ_base.py @@ -37,6 +37,7 @@ def get_deploy_command_list( force_upload=False, notification_arns=None, fail_on_empty_changeset=False, + confirm_changeset=False, no_execute_changeset=False, parameter_overrides=None, role_arn=None, @@ -44,9 +45,12 @@ def get_deploy_command_list( tags=None, profile=None, region=None, + guided=False, ): command_list = [self.base_command(), "deploy"] + if guided: + command_list = command_list + ["--guided"] if s3_bucket: command_list = command_list + ["--s3-bucket", str(s3_bucket)] if capabilities: @@ -73,6 +77,8 @@ def get_deploy_command_list( command_list = command_list + ["--force-upload"] if fail_on_empty_changeset: command_list = command_list + ["--fail-on-empty-changeset"] + if confirm_changeset: + command_list = command_list + ["--confirm-changeset"] if tags: command_list = command_list + ["--tags", str(tags)] if region: diff --git a/tests/integration/deploy/test_deploy_command.py b/tests/integration/deploy/test_deploy_command.py index a2bfb79f68..f36872aed7 100644 --- a/tests/integration/deploy/test_deploy_command.py +++ b/tests/integration/deploy/test_deploy_command.py @@ -7,6 +7,8 @@ import boto3 from parameterized import parameterized +from samcli.lib.config.samconfig import DEFAULT_CONFIG_FILE_NAME +from samcli.lib.bootstrap.bootstrap import SAM_CLI_STACK_NAME from tests.integration.deploy.deploy_integ_base import DeployIntegBase from tests.integration.package.package_integ_base import PackageIntegBase from tests.testing_utils import RUNNING_ON_CI, RUNNING_TEST_FOR_MASTER_ON_CI @@ -30,7 +32,7 @@ def tearDown(self): super(TestDeploy, self).tearDown() @parameterized.expand(["aws-serverless-function.yaml"]) - def test_deploy_all_args(self, template_file): + def test_package_and_deploy_no_s3_bucket_all_args(self, template_file): template_path = self.test_data_path.joinpath(template_file) with tempfile.NamedTemporaryFile(delete=False) as output_template_file: # Package necessary artifacts. @@ -71,7 +73,6 @@ def test_deploy_all_args(self, template_file): stack_name=stack_name, capabilities="CAPABILITY_IAM", s3_prefix="integ_deploy", - s3_bucket=self.s3_bucket.name, force_upload=True, notification_arns=self.sns_arn, parameter_overrides="Parameter=Clarity", @@ -82,3 +83,297 @@ def test_deploy_all_args(self, template_file): deploy_process = Popen(deploy_command_list_execute, stdout=PIPE) deploy_process.wait() self.assertEqual(deploy_process.returncode, 0) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_no_package_and_deploy_with_s3_bucket_all_args(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(stack_name) + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix="integ_deploy", + s3_bucket=self.s3_bucket.name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes", + confirm_changeset=False, + ) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE) + deploy_process_execute.wait() + self.assertEqual(deploy_process_execute.returncode, 0) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_no_package_and_deploy_with_s3_bucket_all_args_confirm_changeset(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(stack_name) + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix="integ_deploy", + s3_bucket=self.s3_bucket.name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes", + confirm_changeset=True, + ) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE, stdin=PIPE) + deploy_process_execute.communicate("Y".encode()) + self.assertEqual(deploy_process_execute.returncode, 0) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_without_s3_bucket(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix="integ_deploy", + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes", + confirm_changeset=False, + ) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE) + deploy_process_execute.wait() + # Error asking for s3 bucket + self.assertEqual(deploy_process_execute.returncode, 1) + stderr = b"".join(deploy_process_execute.stderr.readlines()).strip() + self.assertIn( + bytes( + f"S3 Bucket not specified, use --s3-bucket to specify a bucket name or run sam deploy --guided", + encoding="utf-8", + ), + stderr, + ) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_without_stack_name(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + capabilities="CAPABILITY_IAM", + s3_prefix="integ_deploy", + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes", + confirm_changeset=False, + ) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE) + deploy_process_execute.wait() + # Error no stack name present + self.assertEqual(deploy_process_execute.returncode, 1) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_without_capabilities(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + s3_prefix="integ_deploy", + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes", + confirm_changeset=False, + ) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE) + deploy_process_execute.wait() + # Error capabilities not specified + self.assertEqual(deploy_process_execute.returncode, 1) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_without_template_file(self, template_file): + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list( + stack_name=stack_name, + s3_prefix="integ_deploy", + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes", + confirm_changeset=False, + ) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE) + deploy_process_execute.wait() + # Error template file not specified + self.assertEqual(deploy_process_execute.returncode, 1) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_with_s3_bucket_switch_region(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix="integ_deploy", + s3_bucket=self.bucket_name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes", + confirm_changeset=False, + ) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE) + deploy_process_execute.wait() + # Deploy should succeed + self.assertEqual(deploy_process_execute.returncode, 0) + + # Try to deploy to another region. + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix="integ_deploy", + s3_bucket=self.bucket_name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes", + confirm_changeset=False, + region="eu-west-2", + ) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE) + deploy_process_execute.wait() + # Deploy should fail, asking for s3 bucket + self.assertEqual(deploy_process_execute.returncode, 1) + stderr = b"".join(deploy_process_execute.stderr.readlines()).strip() + self.assertIn( + bytes( + f"Error: Failed to create/update stack {stack_name} : " + f"deployment s3 bucket is in a different region, try sam deploy --guided", + encoding="utf-8", + ), + stderr, + ) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_guided(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(stack_name) + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list(template_file=template_path, guided=True) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE, stdin=PIPE) + deploy_process_execute.communicate("{}\n\n\n\n\n\n".format(stack_name).encode()) + + # Deploy should succeed with a managed stack + self.assertEqual(deploy_process_execute.returncode, 0) + self.stack_names.append(SAM_CLI_STACK_NAME) + # Remove samconfig.toml + os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_guided_set_parameter(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(stack_name) + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list(template_file=template_path, guided=True) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE, stdin=PIPE) + deploy_process_execute.communicate("{}\n\nSuppliedParameter\n\n\n\n".format(stack_name).encode()) + + # Deploy should succeed with a managed stack + self.assertEqual(deploy_process_execute.returncode, 0) + self.stack_names.append(SAM_CLI_STACK_NAME) + # Remove samconfig.toml + os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_guided_set_capabilities(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(stack_name) + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list(template_file=template_path, guided=True) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE, stdin=PIPE) + deploy_process_execute.communicate( + "{}\n\nSuppliedParameter\n\nn\nCAPABILITY_IAM CAPABILITY_NAMED_IAM\n\n".format(stack_name).encode() + ) + + # Deploy should succeed with a managed stack + self.assertEqual(deploy_process_execute.returncode, 0) + self.stack_names.append(SAM_CLI_STACK_NAME) + # Remove samconfig.toml + os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_guided_set_confirm_changeset(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(stack_name) + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list(template_file=template_path, guided=True) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE, stdin=PIPE) + deploy_process_execute.communicate("{}\n\nSuppliedParameter\nY\n\n\nY\n".format(stack_name).encode()) + + # Deploy should succeed with a managed stack + self.assertEqual(deploy_process_execute.returncode, 0) + self.stack_names.append(SAM_CLI_STACK_NAME) + # Remove samconfig.toml + os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) diff --git a/tests/regression/deploy/test_deploy_regression.py b/tests/regression/deploy/test_deploy_regression.py index 8280189b01..9a6fac4134 100644 --- a/tests/regression/deploy/test_deploy_regression.py +++ b/tests/regression/deploy/test_deploy_regression.py @@ -109,7 +109,7 @@ def test_deploy_with_no_capabilities(self, template_file): "tags": "integ=true clarity=yes", } - self.deploy_regression_check(arguments, sam_return_code=2, aws_return_code=255) + self.deploy_regression_check(arguments, sam_return_code=1, aws_return_code=255) def test_deploy_with_no_template_file(self): sam_stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] @@ -128,8 +128,8 @@ def test_deploy_with_no_template_file(self): "kms_key_id": self.kms_key, "tags": "integ=true clarity=yes", } - - self.deploy_regression_check(arguments, sam_return_code=2, aws_return_code=2) + # if no template file is specified, sam cli looks for a template.yaml in the current working directory. + self.deploy_regression_check(arguments, sam_return_code=1, aws_return_code=2) @parameterized.expand(["aws-serverless-function.yaml"]) def test_deploy_with_no_changes(self, template_file): From 43096683723f71da307f62870d09c4bd82d47238 Mon Sep 17 00:00:00 2001 From: Alex Wood Date: Thu, 21 Nov 2019 11:28:29 -0800 Subject: [PATCH 31/45] Reduce guided output (#1563) * Reduce Deployment Output on Buckets * Change Indentation * Change LOG to click echo * Change Import Order for Linter --- samcli/commands/deploy/command.py | 5 ++--- samcli/lib/bootstrap/bootstrap.py | 14 ++++++-------- 2 files changed, 8 insertions(+), 11 deletions(-) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 3ce09efa68..d2f3a8fcc6 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -320,10 +320,9 @@ def guided_deploy( save_to_config = click.confirm(f"\t{start_bold}Save arguments to samconfig.toml{end_bold}", default=True) - click.echo(color.yellow("\n\tS3 bucket for deployments\n\t=========================")) s3_bucket = manage_stack(profile=profile, region=region) - click.echo(f"\tS3 bucket: {s3_bucket}") - click.echo("\tA different default S3 bucket can be set in samconfig.toml") + click.echo(f"\n\t\tManaged S3 bucket: {s3_bucket}") + click.echo("\t\tA different default S3 bucket can be set in samconfig.toml") return ( stack_name, diff --git a/samcli/lib/bootstrap/bootstrap.py b/samcli/lib/bootstrap/bootstrap.py index 9620465c38..55e63891b8 100644 --- a/samcli/lib/bootstrap/bootstrap.py +++ b/samcli/lib/bootstrap/bootstrap.py @@ -3,9 +3,10 @@ """ import json -import logging import boto3 +import click + from botocore.config import Config from botocore.exceptions import ClientError, NoRegionError, NoCredentialsError @@ -14,7 +15,6 @@ from samcli.commands.exceptions import UserException, CredentialsError, RegionError -LOG = logging.getLogger(__name__) SAM_CLI_STACK_NAME = "aws-sam-cli-managed-stack" @@ -39,9 +39,9 @@ def _create_or_get_stack(cloudformation_client): ds_resp = cloudformation_client.describe_stacks(StackName=SAM_CLI_STACK_NAME) stacks = ds_resp["Stacks"] stack = stacks[0] - LOG.info("\tLooking for SAM CLI managed stack: Found!") + click.echo("\n\tLooking for resources needed for deployment: Found!") except ClientError: - LOG.info("\tLooking for SAM CLI managed stack: Not found.") + click.echo("\n\tLooking for resources needed for deployment: Not found.") stack = _create_stack(cloudformation_client) # exceptions are not captured from subcommands # Sanity check for non-none stack? Sanity check for tag? tags = stack["Tags"] @@ -77,7 +77,7 @@ def _create_or_get_stack(cloudformation_client): def _create_stack(cloudformation_client): - LOG.info("\tCreating SAM CLI managed stack...") + click.echo("\tCreating the required resources...") change_set_name = "InitialCreation" change_set_resp = cloudformation_client.create_change_set( StackName=SAM_CLI_STACK_NAME, @@ -87,18 +87,16 @@ def _create_stack(cloudformation_client): ChangeSetName=change_set_name, # this must be unique for the stack, but we only create so that's fine ) stack_id = change_set_resp["StackId"] - LOG.info("\tWaiting for managed stack change set to be created.") change_waiter = cloudformation_client.get_waiter("change_set_create_complete") change_waiter.wait( ChangeSetName=change_set_name, StackName=SAM_CLI_STACK_NAME, WaiterConfig={"Delay": 15, "MaxAttempts": 60} ) cloudformation_client.execute_change_set(ChangeSetName=change_set_name, StackName=SAM_CLI_STACK_NAME) - LOG.info("\tWaiting for managed stack to be created.") stack_waiter = cloudformation_client.get_waiter("stack_create_complete") stack_waiter.wait(StackName=stack_id, WaiterConfig={"Delay": 15, "MaxAttempts": 60}) ds_resp = cloudformation_client.describe_stacks(StackName=SAM_CLI_STACK_NAME) stacks = ds_resp["Stacks"] - LOG.info("\tSuccessfully created SAM CLI managed stack!") + click.echo("\tSuccessfully created!") return stacks[0] From 22fe0cec0a889d2b7a7c3e87e95769830ad98bca Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Thu, 21 Nov 2019 21:02:03 -0800 Subject: [PATCH 32/45] bugfixes: `stack-name` is required on deploy (#1573) * bugfixes: `stack-name` is required on deploy - providing non standard capabilities gives an appropriate error message - add s3 prefix to be the stack name during guided deploy * fix: help text on parameter overrides * fix: add `serverlessrepo` access to managed stack * fix: address comments * fix: usability fixes * fix: parameter defaults are casted to string * fix: unit tests on bootstrap --- samcli/commands/_utils/options.py | 29 ++++++++-- samcli/commands/deploy/command.py | 35 ++++++------ samcli/lib/bootstrap/bootstrap.py | 23 +++++++- samcli/lib/deploy/deployer.py | 5 +- samcli/lib/package/s3_uploader.py | 2 + .../integration/deploy/test_deploy_command.py | 2 +- tests/unit/cli/test_cli_config_file.py | 4 +- tests/unit/commands/_utils/test_options.py | 54 ++++++++++++++++++- tests/unit/commands/deploy/test_command.py | 12 +++-- tests/unit/lib/bootstrap/test_bootstrap.py | 4 +- tests/unit/lib/deploy/test_deployer.py | 20 +++++++ 11 files changed, 159 insertions(+), 31 deletions(-) diff --git a/samcli/commands/_utils/options.py b/samcli/commands/_utils/options.py index 20b46cf1c8..72973ad43e 100644 --- a/samcli/commands/_utils/options.py +++ b/samcli/commands/_utils/options.py @@ -13,6 +13,7 @@ _TEMPLATE_OPTION_DEFAULT_VALUE = "template.[yaml|yml]" +DEFAULT_STACK_NAME = "sam-app" LOG = logging.getLogger(__name__) @@ -56,6 +57,28 @@ def get_or_default_template_file_name(ctx, param, provided_value, include_build) return result +def guided_deploy_stack_name(ctx, param, provided_value): + """ + Provide a default value for stack name if invoked with a guided deploy. + :param ctx: Click Context + :param param: Param name + :param provided_value: Value provided by Click, it would be the value provided by the user. + :return: Actual value to be used in the CLI + """ + + guided = ctx.params.get("guided", False) or ctx.params.get("g", False) + + if not guided and not provided_value: + raise click.BadOptionUsage( + option_name=param.name, + ctx=ctx, + message="Missing option '--stack-name', 'sam deploy –guided' can " + "be used to provide and save needed parameters for future deploys.", + ) + + return provided_value if provided_value else DEFAULT_STACK_NAME + + def template_common_option(f): """ Common ClI option for template @@ -127,9 +150,9 @@ def parameter_override_click_option(): cls=OptionNargs, type=CfnParameterOverridesType(), default={}, - help="Optional. A string that contains CloudFormation parameter overrides encoded as key=value " - "pairs. Use the same format as the AWS CLI, e.g. 'ParameterKey=KeyPairName," - "ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro'", + help="Optional. A string that contains AWS CloudFormation parameter overrides encoded as key=value pairs." + "For example, 'ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType," + "ParameterValue=t1.micro' or KeyPairName=MyKey InstanceType=t1.micro", ) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index d2f3a8fcc6..a0264db328 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -18,6 +18,7 @@ template_click_option, metadata_override_option, _space_separated_list_func_type, + guided_deploy_stack_name, ) from samcli.commands._utils.template import get_template_parameters from samcli.commands.deploy.exceptions import GuidedDeployFailedError @@ -47,10 +48,19 @@ help=HELP_TEXT, ) @configuration_option(provider=TomlProvider(section=CONFIG_SECTION)) +@click.option( + "--guided", + "-g", + required=False, + is_flag=True, + is_eager=True, + help="Specify this flag to allow SAM CLI to guide you through the deployment using guided prompts.", +) @template_click_option(include_build=True) @click.option( "--stack-name", - required=True, + required=False, + callback=guided_deploy_stack_name, help="The name of the AWS CloudFormation stack you're deploying to. " "If you specify an existing stack, the command updates the stack. " "If you specify a new stack, the command creates it.", @@ -121,14 +131,6 @@ help="Indicates whether to use JSON as the format for " "the output AWS CloudFormation template. YAML is used by default.", ) -@click.option( - "--guided", - "-g", - required=False, - is_flag=True, - is_eager=True, - help="Specify this flag to allow SAM CLI to guide you through the deployment using guided prompts.", -) @metadata_override_option @notification_arns_override_option @tags_override_option @@ -213,6 +215,7 @@ def do_cli( _parameter_overrides = None guided_stack_name = None guided_s3_bucket = None + guided_s3_prefix = None guided_region = None if guided: @@ -221,7 +224,7 @@ def do_cli( _parameter_override_keys = get_template_parameters(template_file=template_file) - guided_stack_name, guided_s3_bucket, guided_region, guided_profile, changeset_decision, _capabilities, _parameter_overrides, save_to_config = guided_deploy( + guided_stack_name, guided_s3_bucket, guided_s3_prefix, guided_region, guided_profile, changeset_decision, _capabilities, _parameter_overrides, save_to_config = guided_deploy( stack_name, s3_bucket, region, profile, confirm_changeset, _parameter_override_keys, parameter_overrides ) @@ -230,6 +233,7 @@ def do_cli( template_file, stack_name=guided_stack_name, s3_bucket=guided_s3_bucket, + s3_prefix=guided_s3_prefix, region=guided_region, profile=guided_profile, confirm_changeset=changeset_decision, @@ -251,7 +255,7 @@ def do_cli( with PackageContext( template_file=template_file, s3_bucket=guided_s3_bucket if guided else s3_bucket, - s3_prefix=s3_prefix, + s3_prefix=guided_s3_prefix if guided else s3_prefix, output_template_file=output_template_file.name, kms_key_id=kms_key_id, use_json=use_json, @@ -268,7 +272,7 @@ def do_cli( stack_name=guided_stack_name if guided else stack_name, s3_bucket=guided_s3_bucket if guided else s3_bucket, force_upload=force_upload, - s3_prefix=s3_prefix, + s3_prefix=guided_s3_prefix if guided else s3_prefix, kms_key_id=kms_key_id, parameter_overrides=sanitize_parameter_overrides(_parameter_overrides) if guided else parameter_overrides, capabilities=_capabilities if guided else capabilities, @@ -301,6 +305,7 @@ def guided_deploy( ) stack_name = click.prompt(f"\t{start_bold}Stack Name{end_bold}", default=default_stack_name, type=click.STRING) + s3_prefix = stack_name region = click.prompt(f"\t{start_bold}AWS Region{end_bold}", default=default_region, type=click.STRING) input_parameter_overrides = prompt_parameters(parameter_override_keys, start_bold, end_bold) @@ -327,6 +332,7 @@ def guided_deploy( return ( stack_name, s3_bucket, + s3_prefix, region, profile, confirm_changeset, @@ -347,11 +353,10 @@ def prompt_parameters(parameter_override_keys, start_bold, end_bold): ) _prompted_param_overrides[parameter_key] = {"Value": parameter, "Hidden": True} else: + # Make sure the default is casted to a string. parameter = click.prompt( f"\t{start_bold}Parameter {parameter_key}{end_bold}", - default=_prompted_param_overrides.get( - parameter_key, parameter_properties.get("Default", "No default specified") - ), + default=_prompted_param_overrides.get(parameter_key, str(parameter_properties.get("Default", ""))), type=click.STRING, ) _prompted_param_overrides[parameter_key] = {"Value": parameter, "Hidden": False} diff --git a/samcli/lib/bootstrap/bootstrap.py b/samcli/lib/bootstrap/bootstrap.py index 55e63891b8..900e8317b6 100644 --- a/samcli/lib/bootstrap/bootstrap.py +++ b/samcli/lib/bootstrap/bootstrap.py @@ -15,7 +15,7 @@ from samcli.commands.exceptions import UserException, CredentialsError, RegionError -SAM_CLI_STACK_NAME = "aws-sam-cli-managed-stack" +SAM_CLI_STACK_NAME = "aws-sam-cli-managed-default" def manage_stack(profile, region): @@ -120,6 +120,27 @@ def _get_stack_template(): - Key: ManagedStackSource Value: AwsSamCli + SamCliSourceBucketBucketPolicy: + Type: AWS::S3::BucketPolicy + Properties: + Bucket: !Ref SamCliSourceBucket + PolicyDocument: + Statement: + - + Action: + - "s3:GetObject" + Effect: "Allow" + Resource: + Fn::Join: + - "" + - + - "arn:aws:s3:::" + - + !Ref SamCliSourceBucket + - "/*" + Principal: + Service: serverlessrepo.amazonaws.com + Outputs: SourceBucket: Value: !Ref SamCliSourceBucket diff --git a/samcli/lib/deploy/deployer.py b/samcli/lib/deploy/deployer.py index c05a38697c..170bcc8778 100644 --- a/samcli/lib/deploy/deployer.py +++ b/samcli/lib/deploy/deployer.py @@ -129,7 +129,6 @@ def create_changeset( :param tags: Array of tags passed to CloudFormation :return: """ - if not self.has_stack(stack_name): changeset_type = "CREATE" # When creating a new stack, UsePreviousValue=True is invalid. @@ -178,12 +177,16 @@ def create_changeset( kwargs["RoleARN"] = role_arn if notification_arns is not None: kwargs["NotificationARNs"] = notification_arns + return self._create_change_set(stack_name=stack_name, changeset_type=changeset_type, **kwargs) + + def _create_change_set(self, stack_name, changeset_type, **kwargs): try: resp = self._client.create_change_set(**kwargs) return resp, changeset_type except botocore.exceptions.ClientError as ex: if "The bucket you are attempting to access must be addressed using the specified endpoint" in str(ex): raise DeployBucketInDifferentRegionError(f"Failed to create/update stack {stack_name}") + raise ChangeSetError(stack_name=stack_name, msg=str(ex)) except Exception as ex: LOG.debug("Unable to create changeset", exc_info=ex) diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index efd7397555..3e9e167ced 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -205,3 +205,5 @@ def on_progress(self, bytes_transferred, **kwargs): "\rUploading to %s %s / %s (%.2f%%)" % (self._remote_path, self._seen_so_far, self._size, percentage) ) sys.stderr.flush() + if int(percentage) == 100: + sys.stderr.write("\n") diff --git a/tests/integration/deploy/test_deploy_command.py b/tests/integration/deploy/test_deploy_command.py index f36872aed7..19d3bdd342 100644 --- a/tests/integration/deploy/test_deploy_command.py +++ b/tests/integration/deploy/test_deploy_command.py @@ -193,7 +193,7 @@ def test_deploy_without_stack_name(self, template_file): deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE) deploy_process_execute.wait() # Error no stack name present - self.assertEqual(deploy_process_execute.returncode, 1) + self.assertEqual(deploy_process_execute.returncode, 2) @parameterized.expand(["aws-serverless-function.yaml"]) def test_deploy_without_capabilities(self, template_file): diff --git a/tests/unit/cli/test_cli_config_file.py b/tests/unit/cli/test_cli_config_file.py index 0aed94ee74..490e4029b0 100644 --- a/tests/unit/cli/test_cli_config_file.py +++ b/tests/unit/cli/test_cli_config_file.py @@ -10,9 +10,11 @@ class MockContext: - def __init__(self, info_name, parent): + def __init__(self, info_name, parent, params=None, command=None): self.info_name = info_name self.parent = parent + self.params = params + self.command = command class TestTomlProvider(TestCase): diff --git a/tests/unit/commands/_utils/test_options.py b/tests/unit/commands/_utils/test_options.py index b2e82d6618..e7ebb65c41 100644 --- a/tests/unit/commands/_utils/test_options.py +++ b/tests/unit/commands/_utils/test_options.py @@ -5,8 +5,16 @@ import os from unittest import TestCase -from unittest.mock import patch -from samcli.commands._utils.options import get_or_default_template_file_name, _TEMPLATE_OPTION_DEFAULT_VALUE +from unittest.mock import patch, MagicMock + +import click + +from samcli.commands._utils.options import ( + get_or_default_template_file_name, + _TEMPLATE_OPTION_DEFAULT_VALUE, + guided_deploy_stack_name, +) +from tests.unit.cli.test_cli_config_file import MockContext class Mock: @@ -71,3 +79,45 @@ def test_verify_ctx(self, os_mock): self.assertEqual(result, "a/b/c/absPath") self.assertEqual(ctx.samconfig_dir, "a/b/c") os_mock.path.abspath.assert_called_with(expected) + + +class TestGuidedDeployStackName(TestCase): + def test_must_return_provided_value_guided(self): + stack_name = "provided-stack" + mock_params = MagicMock() + mock_params.get = MagicMock(return_value=True) + result = guided_deploy_stack_name( + ctx=MockContext(info_name="test", parent=None, params=mock_params), + param=MagicMock(), + provided_value=stack_name, + ) + self.assertEqual(result, stack_name) + + def test_must_return_default_value_guided(self): + stack_name = None + mock_params = MagicMock() + mock_params.get = MagicMock(return_value=True) + result = guided_deploy_stack_name( + ctx=MockContext(info_name="test", parent=None, params=mock_params), + param=MagicMock(), + provided_value=stack_name, + ) + self.assertEqual(result, "sam-app") + + def test_must_return_provided_value_non_guided(self): + stack_name = "provided-stack" + mock_params = MagicMock() + mock_params.get = MagicMock(return_value=False) + result = guided_deploy_stack_name(ctx=MagicMock(), param=MagicMock(), provided_value=stack_name) + self.assertEqual(result, "provided-stack") + + def test_exception_missing_parameter_no_value_non_guided(self): + stack_name = None + mock_params = MagicMock() + mock_params.get = MagicMock(return_value=False) + with self.assertRaises(click.BadOptionUsage): + guided_deploy_stack_name( + ctx=MockContext(info_name="test", parent=None, params=mock_params), + param=MagicMock(), + provided_value=stack_name, + ) diff --git a/tests/unit/commands/deploy/test_command.py b/tests/unit/commands/deploy/test_command.py index 3a17a4c7cd..5f7331494d 100644 --- a/tests/unit/commands/deploy/test_command.py +++ b/tests/unit/commands/deploy/test_command.py @@ -146,7 +146,7 @@ def test_all_args_guided( stack_name="sam-app", s3_bucket="managed-s3-bucket", force_upload=self.force_upload, - s3_prefix=self.s3_prefix, + s3_prefix="sam-app", kms_key_id=self.kms_key_id, parameter_overrides={"Myparameter": "guidedParameter", "MyNoEchoParameter": "secure"}, capabilities=self.capabilities, @@ -169,6 +169,7 @@ def test_all_args_guided( region="us-east-1", s3_bucket="managed-s3-bucket", stack_name="sam-app", + s3_prefix="sam-app", parameter_overrides={ "Myparameter": {"Value": "guidedParameter", "Hidden": False}, "MyNoEchoParameter": {"Value": "secure", "Hidden": True}, @@ -238,7 +239,7 @@ def test_all_args_guided_no_save_echo_param_to_config( stack_name="sam-app", s3_bucket="managed-s3-bucket", force_upload=self.force_upload, - s3_prefix=self.s3_prefix, + s3_prefix="sam-app", kms_key_id=self.kms_key_id, parameter_overrides={"Myparameter": "guidedParameter", "MyNoEchoParameter": "secure"}, capabilities=self.capabilities, @@ -256,12 +257,13 @@ def test_all_args_guided_no_save_echo_param_to_config( mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1") self.assertEqual(context_mock.run.call_count, 1) - self.assertEqual(mock_sam_config.put.call_count, 6) + self.assertEqual(mock_sam_config.put.call_count, 7) self.assertEqual( mock_sam_config.put.call_args_list, [ call(["deploy"], "parameters", "stack_name", "sam-app"), call(["deploy"], "parameters", "s3_bucket", "managed-s3-bucket"), + call(["deploy"], "parameters", "s3_prefix", "sam-app"), call(["deploy"], "parameters", "region", "us-east-1"), call(["deploy"], "parameters", "confirm_changeset", True), call(["deploy"], "parameters", "capabilities", "CAPABILITY_IAM"), @@ -325,7 +327,7 @@ def test_all_args_guided_no_params_save_config( stack_name="sam-app", s3_bucket="managed-s3-bucket", force_upload=self.force_upload, - s3_prefix=self.s3_prefix, + s3_prefix="sam-app", kms_key_id=self.kms_key_id, parameter_overrides=self.parameter_overrides, capabilities=self.capabilities, @@ -401,7 +403,7 @@ def test_all_args_guided_no_params_no_save_config( stack_name="sam-app", s3_bucket="managed-s3-bucket", force_upload=self.force_upload, - s3_prefix=self.s3_prefix, + s3_prefix="sam-app", kms_key_id=self.kms_key_id, parameter_overrides=self.parameter_overrides, capabilities=self.capabilities, diff --git a/tests/unit/lib/bootstrap/test_bootstrap.py b/tests/unit/lib/bootstrap/test_bootstrap.py index ccc073abdf..f1653af61a 100644 --- a/tests/unit/lib/bootstrap/test_bootstrap.py +++ b/tests/unit/lib/bootstrap/test_bootstrap.py @@ -44,7 +44,7 @@ def test_new_stack(self): "ChangeSetType": "CREATE", "ChangeSetName": "InitialCreation", } - ccs_resp = {"Id": "id", "StackId": "aws-sam-cli-managed-stack"} + ccs_resp = {"Id": "id", "StackId": "aws-sam-cli-managed-default"} stubber.add_response("create_change_set", ccs_resp, ccs_params) # describe change set creation status for waiter dcs_params = {"ChangeSetName": "InitialCreation", "StackName": SAM_CLI_STACK_NAME} @@ -189,7 +189,7 @@ def test_change_set_execution_fails(self): "ChangeSetType": "CREATE", "ChangeSetName": "InitialCreation", } - ccs_resp = {"Id": "id", "StackId": "aws-sam-cli-managed-stack"} + ccs_resp = {"Id": "id", "StackId": "aws-sam-cli-managed-default"} stubber.add_response("create_change_set", ccs_resp, ccs_params) # describe change set creation status for waiter dcs_params = {"ChangeSetName": "InitialCreation", "StackName": SAM_CLI_STACK_NAME} diff --git a/tests/unit/lib/deploy/test_deployer.py b/tests/unit/lib/deploy/test_deployer.py index 8749f1bb01..1eb1567cec 100644 --- a/tests/unit/lib/deploy/test_deployer.py +++ b/tests/unit/lib/deploy/test_deployer.py @@ -193,6 +193,26 @@ def test_create_changeset_ClientErrorException(self): tags={"unit": "true"}, ) + def test_create_changeset_ClientErrorException_generic(self): + self.deployer.has_stack = MagicMock(return_value=False) + self.deployer._client.create_change_set = MagicMock( + side_effect=ClientError(error_response={"Error": {"Message": "Message"}}, operation_name="create_changeset") + ) + with self.assertRaises(ChangeSetError): + self.deployer.create_changeset( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + {"ParameterKey": "c", "UsePreviousValue": True}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + ) + def test_describe_changeset_with_changes(self): response = [ { From f2272d99985b9162e6972fbd987b3ce9549651c6 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Fri, 22 Nov 2019 11:31:00 -0800 Subject: [PATCH 33/45] feat: s3 bucket versioning - managed stack (#1579) --- samcli/lib/bootstrap/bootstrap.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/samcli/lib/bootstrap/bootstrap.py b/samcli/lib/bootstrap/bootstrap.py index 900e8317b6..0461012287 100644 --- a/samcli/lib/bootstrap/bootstrap.py +++ b/samcli/lib/bootstrap/bootstrap.py @@ -116,6 +116,8 @@ def _get_stack_template(): SamCliSourceBucket: Type: AWS::S3::Bucket Properties: + VersioningConfiguration: + Status: Enabled Tags: - Key: ManagedStackSource Value: AwsSamCli From 4be81588045b9c154f564df7f4f76cbc32bbc79c Mon Sep 17 00:00:00 2001 From: Sanath Kumar Ramesh Date: Fri, 22 Nov 2019 15:34:54 -0800 Subject: [PATCH 34/45] Tests for samconfig use with all commands (#1575) * test: Verify samconfig is accessible to all CLI commands * fix cases where comma vs space needs to be delimiter * adding few more unit tests * adding unit tests for all commands * fix linter * Adding tests for overriding args thru config, CLI args, and envvars * Fixing a minor UX issue when sam template is invalid * fixing mock imports --- samcli/cli/cli_config_file.py | 16 +- samcli/cli/types.py | 27 +- samcli/commands/build/command.py | 195 +++-- samcli/commands/deploy/command.py | 10 +- samcli/commands/init/__init__.py | 2 +- samcli/lib/config/samconfig.py | 2 + tests/testing_utils.py | 1 + tests/unit/cli/test_types.py | 8 +- .../config => commands/samconfig}/__init__.py | 0 .../unit/commands/samconfig/test_samconfig.py | 687 ++++++++++++++++++ tests/unit/lib/samconfig/__init__.py | 0 .../{config => samconfig}/test_samconfig.py | 0 12 files changed, 850 insertions(+), 98 deletions(-) rename tests/unit/{lib/config => commands/samconfig}/__init__.py (100%) create mode 100644 tests/unit/commands/samconfig/test_samconfig.py create mode 100644 tests/unit/lib/samconfig/__init__.py rename tests/unit/lib/{config => samconfig}/test_samconfig.py (100%) diff --git a/samcli/cli/cli_config_file.py b/samcli/cli/cli_config_file.py index 7191af3443..032717fbfe 100644 --- a/samcli/cli/cli_config_file.py +++ b/samcli/cli/cli_config_file.py @@ -54,14 +54,16 @@ def __call__(self, config_dir, config_env, cmd_names): # NOTE(TheSriram): change from tomlkit table type to normal dictionary, # so that click defaults work out of the box. resolved_config = {k: v for k, v in samconfig.get_all(cmd_names, self.section, env=config_env).items()} + LOG.debug("Configuration values read from the file: %s", resolved_config) - except KeyError: + except KeyError as ex: LOG.debug( - "Error reading configuration file at %s with config_env=%s, command=%s, section=%s", + "Error reading configuration file at %s with config_env=%s, command=%s, section=%s %s", samconfig.path(), config_env, cmd_names, self.section, + str(ex), ) except Exception as ex: LOG.debug("Error reading configuration file: %s %s", samconfig.path(), str(ex)) @@ -123,6 +125,16 @@ def configuration_option(*param_decls, **attrs): """ Adds configuration file support to a click application. + NOTE: This decorator should be added to the top of parameter chain, right below click.command, before + any options are declared. + + Example: + >>> @click.command("hello") + @configuration_option(provider=TomlProvider(section="parameters")) + @click.option('--name', type=click.String) + def hello(name): + print("Hello " + name) + This will create an option of type `STRING` expecting the config_env in the configuration file, by default this config_env is `default`. When specified, the requisite portion of the configuration file is considered as the diff --git a/samcli/cli/types.py b/samcli/cli/types.py index 31cc181302..7d43e353e7 100644 --- a/samcli/cli/types.py +++ b/samcli/cli/types.py @@ -9,6 +9,17 @@ import click +def _value_regex(delim): + return f'(\\"(?:\\\\.|[^\\"\\\\]+)*\\"|(?:\\\\.|[^{delim}\\"\\\\]+)+)' + + +KEY_REGEX = '([A-Za-z0-9\\"]+)' +# Use this regex when you have space as delimiter Ex: "KeyName1=string KeyName2=string" +VALUE_REGEX_SPACE_DELIM = _value_regex(" ") +# Use this regex when you have comma as delimiter Ex: "KeyName1=string,KeyName2=string" +VALUE_REGEX_COMMA_DELIM = _value_regex(",") + + class CfnParameterOverridesType(click.ParamType): """ Custom Click options type to accept values for CloudFormation template parameters. You can pass values for @@ -25,11 +36,8 @@ class CfnParameterOverridesType(click.ParamType): # If Both ParameterKey pattern and KeyPairName=MyKey should not be present # while adding parameter overrides, if they are, it # can result in unpredicatable behavior. - KEY_REGEX = '([A-Za-z0-9\\"]+)' - VALUE_REGEX = '(\\"(?:\\\\.|[^\\"\\\\]+)*\\"|(?:\\\\.|[^ \\"\\\\]+)+))' - - _pattern_1 = r"(?:ParameterKey={key},ParameterValue={value}".format(key=KEY_REGEX, value=VALUE_REGEX) - _pattern_2 = r"(?:(?: ){key}={value}".format(key=KEY_REGEX, value=VALUE_REGEX) + _pattern_1 = r"(?:ParameterKey={key},ParameterValue={value})".format(key=KEY_REGEX, value=VALUE_REGEX_SPACE_DELIM) + _pattern_2 = r"(?:(?: ){key}={value})".format(key=KEY_REGEX, value=VALUE_REGEX_SPACE_DELIM) ordered_pattern_match = [_pattern_1, _pattern_2] @@ -114,7 +122,7 @@ class CfnMetadataType(click.ParamType): _EXAMPLE = 'KeyName1=string,KeyName2=string or {"string":"string"}' - _pattern = r"([A-Za-z0-9\"]+)=([A-Za-z0-9\"]+)" + _pattern = r"(?:{key}={value})".format(key=KEY_REGEX, value=VALUE_REGEX_COMMA_DELIM) # NOTE(TheSriram): name needs to be added to click.ParamType requires it. name = "" @@ -160,7 +168,7 @@ class CfnTags(click.ParamType): _EXAMPLE = "KeyName1=string KeyName2=string" - _pattern = r"([A-Za-z0-9\"]+)=([A-Za-z0-9\"]+)" + _pattern = r"{key}={value}".format(key=KEY_REGEX, value=VALUE_REGEX_SPACE_DELIM) # NOTE(TheSriram): name needs to be added to click.ParamType requires it. name = "" @@ -172,11 +180,10 @@ def convert(self, value, param, ctx): if value == ("",): return result - # if value comes in a via configuration file, we should still convert it. - # value = (value, ) if not isinstance(value, tuple) else value + # if value comes in a via configuration file, it will be a string. So we should still convert it. + value = (value,) if not isinstance(value, tuple) else value for val in value: - groups = re.findall(self._pattern, val) if not groups: diff --git a/samcli/commands/build/command.py b/samcli/commands/build/command.py index c6ff97914f..0c12273479 100644 --- a/samcli/commands/build/command.py +++ b/samcli/commands/build/command.py @@ -6,8 +6,11 @@ import logging import click -from samcli.commands._utils.options import template_option_without_build, docker_common_options, \ - parameter_override_option +from samcli.commands._utils.options import ( + template_option_without_build, + docker_common_options, + parameter_override_option, +) from samcli.cli.main import pass_context, common_options as cli_framework_options, aws_creds_options from samcli.lib.telemetry.metrics import track_command from samcli.cli.cli_config_file import configuration_option, TomlProvider @@ -54,64 +57,90 @@ """ -@configuration_option(provider=TomlProvider(section="parameters")) @click.command("build", help=HELP_TEXT, short_help="Build your Lambda function code") -@click.option('--build-dir', '-b', - default=DEFAULT_BUILD_DIR, - type=click.Path(file_okay=False, dir_okay=True, writable=True), # Must be a directory - help="Path to a folder where the built artifacts will be stored. This directory will be first removed before starting a build.") -@click.option("--base-dir", "-s", - default=None, - type=click.Path(dir_okay=True, file_okay=False), # Must be a directory - help="Resolve relative paths to function's source code with respect to this folder. Use this if " - "SAM template and your source code are not in same enclosing folder. By default, relative paths " - "are resolved with respect to the SAM template's location") -@click.option("--use-container", "-u", - is_flag=True, - help="If your functions depend on packages that have natively compiled dependencies, use this flag " - "to build your function inside an AWS Lambda-like Docker container") -@click.option("--manifest", "-m", - default=None, - type=click.Path(), - help="Path to a custom dependency manifest (ex: package.json) to use instead of the default one") +@configuration_option(provider=TomlProvider(section="parameters")) +@click.option( + "--build-dir", + "-b", + default=DEFAULT_BUILD_DIR, + type=click.Path(file_okay=False, dir_okay=True, writable=True), # Must be a directory + help="Path to a folder where the built artifacts will be stored. This directory will be first removed before starting a build.", +) +@click.option( + "--base-dir", + "-s", + default=None, + type=click.Path(dir_okay=True, file_okay=False), # Must be a directory + help="Resolve relative paths to function's source code with respect to this folder. Use this if " + "SAM template and your source code are not in same enclosing folder. By default, relative paths " + "are resolved with respect to the SAM template's location", +) +@click.option( + "--use-container", + "-u", + is_flag=True, + help="If your functions depend on packages that have natively compiled dependencies, use this flag " + "to build your function inside an AWS Lambda-like Docker container", +) +@click.option( + "--manifest", + "-m", + default=None, + type=click.Path(), + help="Path to a custom dependency manifest (ex: package.json) to use instead of the default one", +) @template_option_without_build @parameter_override_option @docker_common_options @cli_framework_options @aws_creds_options -@click.argument('function_identifier', required=False) +@click.argument("function_identifier", required=False) @pass_context @track_command -def cli(ctx, +def cli( + ctx, + function_identifier, + template_file, + base_dir, + build_dir, + use_container, + manifest, + docker_network, + skip_pull_image, + parameter_overrides, +): + # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing + + mode = _get_mode_value_from_envvar("SAM_BUILD_MODE", choices=["debug"]) + + do_cli( function_identifier, template_file, base_dir, build_dir, + True, use_container, manifest, docker_network, skip_pull_image, parameter_overrides, - ): - # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - - mode = _get_mode_value_from_envvar("SAM_BUILD_MODE", choices=["debug"]) - - do_cli(function_identifier, template_file, base_dir, build_dir, True, use_container, manifest, docker_network, - skip_pull_image, parameter_overrides, mode) # pragma: no cover - - -def do_cli(function_identifier, # pylint: disable=too-many-locals, too-many-statements - template, - base_dir, - build_dir, - clean, - use_container, - manifest_path, - docker_network, - skip_pull_image, - parameter_overrides, - mode): + mode, + ) # pragma: no cover + + +def do_cli( # pylint: disable=too-many-locals, too-many-statements + function_identifier, + template, + base_dir, + build_dir, + clean, + use_container, + manifest_path, + docker_network, + skip_pull_image, + parameter_overrides, + mode, +): """ Implementation of the ``cli`` method """ @@ -119,8 +148,12 @@ def do_cli(function_identifier, # pylint: disable=too-many-locals, too-many-sta from samcli.commands.exceptions import UserException from samcli.commands.build.build_context import BuildContext - from samcli.lib.build.app_builder import ApplicationBuilder, BuildError, UnsupportedBuilderLibraryVersionError, \ - ContainerBuildNotSupported + from samcli.lib.build.app_builder import ( + ApplicationBuilder, + BuildError, + UnsupportedBuilderLibraryVersionError, + ContainerBuildNotSupported, + ) from samcli.lib.build.workflow_config import UnsupportedRuntimeException from samcli.local.lambdafn.exceptions import FunctionNotFound from samcli.commands._utils.template import move_template @@ -130,36 +163,36 @@ def do_cli(function_identifier, # pylint: disable=too-many-locals, too-many-sta if use_container: LOG.info("Starting Build inside a container") - with BuildContext(function_identifier, - template, - base_dir, - build_dir, - clean=clean, - manifest_path=manifest_path, - use_container=use_container, - parameter_overrides=parameter_overrides, - docker_network=docker_network, - skip_pull_image=skip_pull_image, - mode=mode) as ctx: + with BuildContext( + function_identifier, + template, + base_dir, + build_dir, + clean=clean, + manifest_path=manifest_path, + use_container=use_container, + parameter_overrides=parameter_overrides, + docker_network=docker_network, + skip_pull_image=skip_pull_image, + mode=mode, + ) as ctx: try: - builder = ApplicationBuilder(ctx.functions_to_build, - ctx.build_dir, - ctx.base_dir, - manifest_path_override=ctx.manifest_path_override, - container_manager=ctx.container_manager, - mode=ctx.mode) + builder = ApplicationBuilder( + ctx.functions_to_build, + ctx.build_dir, + ctx.base_dir, + manifest_path_override=ctx.manifest_path_override, + container_manager=ctx.container_manager, + mode=ctx.mode, + ) except FunctionNotFound as ex: raise UserException(str(ex)) try: artifacts = builder.build() - modified_template = builder.update_template(ctx.template_dict, - ctx.original_template_path, - artifacts) + modified_template = builder.update_template(ctx.template_dict, ctx.original_template_path, artifacts) - move_template(ctx.original_template_path, - ctx.output_template_path, - modified_template) + move_template(ctx.original_template_path, ctx.output_template_path, modified_template) click.secho("\nBuild Succeeded", fg="green") @@ -174,14 +207,20 @@ def do_cli(function_identifier, # pylint: disable=too-many-locals, too-many-sta build_dir_in_success_message = ctx.build_dir output_template_path_in_success_message = ctx.output_template_path - msg = gen_success_msg(build_dir_in_success_message, - output_template_path_in_success_message, - os.path.abspath(ctx.build_dir) == os.path.abspath(DEFAULT_BUILD_DIR)) + msg = gen_success_msg( + build_dir_in_success_message, + output_template_path_in_success_message, + os.path.abspath(ctx.build_dir) == os.path.abspath(DEFAULT_BUILD_DIR), + ) click.secho(msg, fg="yellow") - except (UnsupportedRuntimeException, BuildError, UnsupportedBuilderLibraryVersionError, - ContainerBuildNotSupported) as ex: + except ( + UnsupportedRuntimeException, + BuildError, + UnsupportedBuilderLibraryVersionError, + ContainerBuildNotSupported, + ) as ex: click.secho("\nBuild Failed", fg="red") raise UserException(str(ex)) @@ -203,10 +242,9 @@ def gen_success_msg(artifacts_dir, output_template_path, is_default_build_dir): ========================= [*] Invoke Function: {invokecmd} [*] Deploy: {deploycmd} - """.format(invokecmd=invoke_cmd, - deploycmd=deploy_cmd, - artifacts_dir=artifacts_dir, - template=output_template_path) + """.format( + invokecmd=invoke_cmd, deploycmd=deploy_cmd, artifacts_dir=artifacts_dir, template=output_template_path + ) return msg @@ -218,7 +256,6 @@ def _get_mode_value_from_envvar(name, choices): return None if mode not in choices: - raise click.UsageError("Invalid value for 'mode': invalid choice: {}. (choose from {})" - .format(mode, choices)) + raise click.UsageError("Invalid value for 'mode': invalid choice: {}. (choose from {})".format(mode, choices)) return mode diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index a0264db328..70fe3c0af6 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -3,6 +3,7 @@ """ import json import tempfile +import logging import click from click.types import FuncParamType @@ -39,6 +40,7 @@ """ CONFIG_SECTION = "parameters" +LOG = logging.getLogger(__name__) @click.command( @@ -220,9 +222,13 @@ def do_cli( if guided: - read_config_showcase(template_file=template_file) + try: + _parameter_override_keys = get_template_parameters(template_file=template_file) + except ValueError as ex: + LOG.debug("Failed to parse SAM template", exc_info=ex) + raise GuidedDeployFailedError(str(ex)) - _parameter_override_keys = get_template_parameters(template_file=template_file) + read_config_showcase(template_file=template_file) guided_stack_name, guided_s3_bucket, guided_s3_prefix, guided_region, guided_profile, changeset_decision, _capabilities, _parameter_overrides, save_to_config = guided_deploy( stack_name, s3_bucket, region, profile, confirm_changeset, _parameter_override_keys, parameter_overrides diff --git a/samcli/commands/init/__init__.py b/samcli/commands/init/__init__.py index d738abf1a2..2cd8889281 100644 --- a/samcli/commands/init/__init__.py +++ b/samcli/commands/init/__init__.py @@ -56,13 +56,13 @@ """ -@configuration_option(provider=TomlProvider(section="parameters")) @click.command( "init", help=HELP_TEXT, short_help="Init an AWS SAM application.", context_settings=dict(help_option_names=["-h", "--help"]), ) +@configuration_option(provider=TomlProvider(section="parameters")) @click.option( "--no-interactive", is_flag=True, diff --git a/samcli/lib/config/samconfig.py b/samcli/lib/config/samconfig.py index f2fb8d7dce..bc47a44ae4 100644 --- a/samcli/lib/config/samconfig.py +++ b/samcli/lib/config/samconfig.py @@ -21,6 +21,7 @@ class SamConfig: """ document = None + VERSION = "0.1" def __init__(self, config_dir, filename=None): """ @@ -164,6 +165,7 @@ def _write(self): return if not self.exists(): open(self.filepath, "a+").close() + self.filepath.write_text(tomlkit.dumps(self.document)) @staticmethod diff --git a/tests/testing_utils.py b/tests/testing_utils.py index 94df3a9418..41011b761a 100644 --- a/tests/testing_utils.py +++ b/tests/testing_utils.py @@ -3,6 +3,7 @@ import tempfile import shutil + IS_WINDOWS = platform.system().lower() == "windows" RUNNING_ON_CI = os.environ.get("APPVEYOR", False) RUNNING_TEST_FOR_MASTER_ON_CI = os.environ.get("APPVEYOR_REPO_BRANCH", "master") != "master" diff --git a/tests/unit/cli/test_types.py b/tests/unit/cli/test_types.py index 54f57af616..f1c5b12297 100644 --- a/tests/unit/cli/test_types.py +++ b/tests/unit/cli/test_types.py @@ -94,9 +94,9 @@ def setUp(self): # Non-string ("{1:1}"), # Wrong notation - ("a==b"), + # ("a==b"), # Wrong multi-key notation - ("a==b,c==d"), + # ("a==b,c==d"), ] ) def test_must_fail_on_invalid_format(self, input): @@ -128,9 +128,9 @@ def setUp(self): # Just a string ("some string"), # Wrong notation - ("a==b"), + # ("a==b"), # Wrong multi-key notation - ("a==b,c==d"), + # ("a==b,c==d"), ] ) def test_must_fail_on_invalid_format(self, input): diff --git a/tests/unit/lib/config/__init__.py b/tests/unit/commands/samconfig/__init__.py similarity index 100% rename from tests/unit/lib/config/__init__.py rename to tests/unit/commands/samconfig/__init__.py diff --git a/tests/unit/commands/samconfig/test_samconfig.py b/tests/unit/commands/samconfig/test_samconfig.py new file mode 100644 index 0000000000..e34f8231f2 --- /dev/null +++ b/tests/unit/commands/samconfig/test_samconfig.py @@ -0,0 +1,687 @@ +""" +Tests whether SAM Config is being read by all CLI commands +""" + +import json +import os +import shutil +import tempfile +from pathlib import Path +from contextlib import contextmanager +from samcli.lib.config.samconfig import SamConfig, DEFAULT_ENV + +from click.testing import CliRunner + +from unittest import TestCase +from unittest.mock import patch, ANY +import logging + +LOG = logging.getLogger() +logging.basicConfig() + + +class TestSamConfigForAllCommands(TestCase): + def setUp(self): + self._old_cwd = os.getcwd() + + self.scratch_dir = tempfile.mkdtemp() + Path(self.scratch_dir, "envvar.json").write_text("{}") + + os.chdir(self.scratch_dir) + + def tearDown(self): + os.chdir(self._old_cwd) + shutil.rmtree(self.scratch_dir) + self.scratch_dir = None + + @patch("samcli.commands.init.do_cli") + def test_init(self, do_cli_mock): + config_values = { + "no_interactive": True, + "location": "github.com", + "runtime": "nodejs10.x", + "dependency_manager": "maven", + "output_dir": "myoutput", + "name": "myname", + "app_template": "apptemplate", + "no_input": True, + "extra_context": '{"key": "value", "key2": "value2"}', + } + + with samconfig_parameters(["init"], self.scratch_dir, **config_values) as config_path: + from samcli.commands.init import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, []) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with( + ANY, + True, + "github.com", + "nodejs10.x", + "maven", + "myoutput", + "myname", + "apptemplate", + True, + '{"key": "value", "key2": "value2"}', + ) + + @patch("samcli.commands.validate.validate.do_cli") + def test_validate(self, do_cli_mock): + config_values = {"template_file": "mytemplate.yaml"} + + with samconfig_parameters(["validate"], self.scratch_dir, **config_values) as config_path: + + from samcli.commands.validate.validate import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, []) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with(ANY, str(Path(os.getcwd(), "mytemplate.yaml"))) + + @patch("samcli.commands.build.command.do_cli") + def test_build(self, do_cli_mock): + config_values = { + "function_identifier": "foo", + "template_file": "mytemplate.yaml", + "base_dir": "basedir", + "build_dir": "builddir", + "use_container": True, + "manifest": "requirements.txt", + "docker_network": "mynetwork", + "skip_pull_image": True, + "parameter_overrides": "ParameterKey=Key,ParameterValue=Value ParameterKey=Key2,ParameterValue=Value2", + } + + with samconfig_parameters(["build"], self.scratch_dir, **config_values) as config_path: + + from samcli.commands.build.command import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, []) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with( + "foo", + str(Path(os.getcwd(), "mytemplate.yaml")), + "basedir", + "builddir", + True, + True, + "requirements.txt", + "mynetwork", + True, + {"Key": "Value", "Key2": "Value2"}, + None, + ) + + @patch("samcli.commands.local.invoke.cli.do_cli") + def test_local_invoke(self, do_cli_mock): + config_values = { + "function_identifier": "foo", + "template_file": "mytemplate.yaml", + "event": "event", + "no_event": False, + "env_vars": "envvar.json", + "debug_port": [1, 2, 3], + "debug_args": "args", + "debugger_path": "mypath", + "docker_volume_basedir": "basedir", + "docker_network": "mynetwork", + "log_file": "logfile", + "layer_cache_basedir": "basedir", + "skip_pull_image": True, + "force_image_build": True, + "parameter_overrides": "ParameterKey=Key,ParameterValue=Value ParameterKey=Key2,ParameterValue=Value2", + } + + # NOTE: Because we don't load the full Click BaseCommand here, this is mounted as top-level command + with samconfig_parameters(["invoke"], self.scratch_dir, **config_values) as config_path: + + from samcli.commands.local.invoke.cli import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, []) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with( + ANY, + "foo", + str(Path(os.getcwd(), "mytemplate.yaml")), + "event", + False, + "envvar.json", + (1, 2, 3), + "args", + "mypath", + "basedir", + "mynetwork", + "logfile", + "basedir", + True, + True, + {"Key": "Value", "Key2": "Value2"}, + ) + + @patch("samcli.commands.local.start_api.cli.do_cli") + def test_local_start_api(self, do_cli_mock): + + config_values = { + "template_file": "mytemplate.yaml", + "host": "127.0.0.1", + "port": 12345, + "static_dir": "static_dir", + "env_vars": "envvar.json", + "debug_port": [1, 2, 3], + "debug_args": "args", + "debugger_path": "mypath", + "docker_volume_basedir": "basedir", + "docker_network": "mynetwork", + "log_file": "logfile", + "layer_cache_basedir": "basedir", + "skip_pull_image": True, + "force_image_build": True, + "parameter_overrides": "ParameterKey=Key,ParameterValue=Value ParameterKey=Key2,ParameterValue=Value2", + } + + # NOTE: Because we don't load the full Click BaseCommand here, this is mounted as top-level command + with samconfig_parameters(["start-api"], self.scratch_dir, **config_values) as config_path: + + from samcli.commands.local.start_api.cli import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, []) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with( + ANY, + "127.0.0.1", + 12345, + "static_dir", + str(Path(os.getcwd(), "mytemplate.yaml")), + "envvar.json", + (1, 2, 3), + "args", + "mypath", + "basedir", + "mynetwork", + "logfile", + "basedir", + True, + True, + {"Key": "Value", "Key2": "Value2"}, + ) + + @patch("samcli.commands.local.start_lambda.cli.do_cli") + def test_local_start_lambda(self, do_cli_mock): + + config_values = { + "template_file": "mytemplate.yaml", + "host": "127.0.0.1", + "port": 12345, + "env_vars": "envvar.json", + "debug_port": [1, 2, 3], + "debug_args": "args", + "debugger_path": "mypath", + "docker_volume_basedir": "basedir", + "docker_network": "mynetwork", + "log_file": "logfile", + "layer_cache_basedir": "basedir", + "skip_pull_image": True, + "force_image_build": True, + "parameter_overrides": "ParameterKey=Key,ParameterValue=Value", + } + + # NOTE: Because we don't load the full Click BaseCommand here, this is mounted as top-level command + with samconfig_parameters(["start-lambda"], self.scratch_dir, **config_values) as config_path: + + from samcli.commands.local.start_lambda.cli import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, []) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with( + ANY, + "127.0.0.1", + 12345, + str(Path(os.getcwd(), "mytemplate.yaml")), + "envvar.json", + (1, 2, 3), + "args", + "mypath", + "basedir", + "mynetwork", + "logfile", + "basedir", + True, + True, + {"Key": "Value"}, + ) + + @patch("samcli.commands.package.command.do_cli") + def test_package(self, do_cli_mock): + + config_values = { + "template_file": "mytemplate.yaml", + "s3_bucket": "mybucket", + "force_upload": True, + "s3_prefix": "myprefix", + "kms_key_id": "mykms", + "use_json": True, + "metadata": '{"m1": "value1", "m2": "value2"}', + "region": "myregion", + "output_template_file": "output.yaml", + } + + with samconfig_parameters(["package"], self.scratch_dir, **config_values) as config_path: + + from samcli.commands.package.command import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, []) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with( + str(Path(os.getcwd(), "mytemplate.yaml")), + "mybucket", + "myprefix", + "mykms", + "output.yaml", + True, + True, + {"m1": "value1", "m2": "value2"}, + "myregion", + None, + ) + + @patch("samcli.commands.deploy.command.do_cli") + def test_deploy(self, do_cli_mock): + + config_values = { + "template_file": "mytemplate.yaml", + "stack_name": "mystack", + "s3_bucket": "mybucket", + "force_upload": True, + "s3_prefix": "myprefix", + "kms_key_id": "mykms", + "parameter_overrides": "ParameterKey=Key,ParameterValue=Value", + "capabilities": "cap1 cap2", + "no_execute_changeset": True, + "role_arn": "arn", + "notification_arns": "notify1 notify2", + "fail_on_empty_changeset": True, + "use_json": True, + "tags": 'a=tag1 b="tag with spaces"', + "metadata": '{"m1": "value1", "m2": "value2"}', + "guided": True, + "confirm_changeset": True, + "region": "myregion", + } + + with samconfig_parameters(["deploy"], self.scratch_dir, **config_values) as config_path: + + from samcli.commands.deploy.command import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, []) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with( + str(Path(os.getcwd(), "mytemplate.yaml")), + "mystack", + "mybucket", + True, + "myprefix", + "mykms", + {"Key": "Value"}, + ["cap1", "cap2"], + True, + "arn", + ["notify1", "notify2"], + True, + True, + {"a": "tag1", "b": '"tag with spaces"'}, + {"m1": "value1", "m2": "value2"}, + True, + True, + "myregion", + None, + ) + + @patch("samcli.commands.logs.command.do_cli") + def test_logs(self, do_cli_mock): + config_values = { + "name": "myfunction", + "stack_name": "mystack", + "filter": "myfilter", + "tail": True, + "start_time": "starttime", + "end_time": "endtime", + } + + with samconfig_parameters(["logs"], self.scratch_dir, **config_values) as config_path: + from samcli.commands.logs.command import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, []) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with("myfunction", "mystack", "myfilter", True, "starttime", "endtime") + + @patch("samcli.commands.publish.command.do_cli") + def test_publish(self, do_cli_mock): + config_values = {"template_file": "mytemplate.yaml", "semantic_version": "0.1.1"} + + with samconfig_parameters(["publish"], self.scratch_dir, **config_values) as config_path: + from samcli.commands.publish.command import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, []) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with(ANY, str(Path(os.getcwd(), "mytemplate.yaml")), "0.1.1") + + def test_info_must_not_read_from_config(self): + config_values = {"a": "b"} + + with samconfig_parameters([], self.scratch_dir, **config_values) as config_path: + from samcli.cli.main import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, ["--info"]) + + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + info_result = json.loads(result.output) + self.assertTrue("version" in info_result) + + +class TestSamConfigWithOverrides(TestCase): + def setUp(self): + self._old_cwd = os.getcwd() + + self.scratch_dir = tempfile.mkdtemp() + Path(self.scratch_dir, "otherenvvar.json").write_text("{}") + + os.chdir(self.scratch_dir) + + def tearDown(self): + os.chdir(self._old_cwd) + shutil.rmtree(self.scratch_dir) + self.scratch_dir = None + + @patch("samcli.commands.local.start_lambda.cli.do_cli") + def test_override_with_cli_params(self, do_cli_mock): + + config_values = { + "template_file": "mytemplate.yaml", + "host": "127.0.0.1", + "port": 12345, + "env_vars": "envvar.json", + "debug_port": [1, 2, 3], + "debug_args": "args", + "debugger_path": "mypath", + "docker_volume_basedir": "basedir", + "docker_network": "mynetwork", + "log_file": "logfile", + "layer_cache_basedir": "basedir", + "skip_pull_image": True, + "force_image_build": True, + "parameter_overrides": "ParameterKey=Key,ParameterValue=Value", + } + + # NOTE: Because we don't load the full Click BaseCommand here, this is mounted as top-level command + with samconfig_parameters(["start-lambda"], self.scratch_dir, **config_values) as config_path: + + from samcli.commands.local.start_lambda.cli import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke( + cli, + [ + "--template-file", + "othertemplate.yaml", + "--host", + "otherhost", + "--port", + 9999, + "--env-vars", + "otherenvvar.json", + "--debug-port", + 9, + "--debug-port", + 8, + "--debug-port", + 7, + "--debug-args", + "otherargs", + "--debugger-path", + "otherpath", + "--docker-volume-basedir", + "otherbasedir", + "--docker-network", + "othernetwork", + "--log-file", + "otherlogfile", + "--layer-cache-basedir", + "otherbasedir", + "--skip-pull-image", + "--force-image-build", + "--parameter-overrides", + "A=123 C=D E=F12! G=H", + ], + ) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with( + ANY, + "otherhost", + 9999, + str(Path(os.getcwd(), "othertemplate.yaml")), + "otherenvvar.json", + (9, 8, 7), + "otherargs", + "otherpath", + "otherbasedir", + "othernetwork", + "otherlogfile", + "otherbasedir", + True, + True, + {"A": "123", "C": "D", "E": "F12!", "G": "H"}, + ) + + @patch("samcli.commands.local.start_lambda.cli.do_cli") + def test_override_with_cli_params_and_envvars(self, do_cli_mock): + + config_values = { + "template_file": "mytemplate.yaml", + "host": "127.0.0.1", + "port": 12345, + "env_vars": "envvar.json", + "debug_port": [1, 2, 3], + "debug_args": "args", + "debugger_path": "mypath", + "docker_volume_basedir": "basedir", + "docker_network": "mynetwork", + "log_file": "logfile", + "layer_cache_basedir": "basedir", + "skip_pull_image": True, + "force_image_build": False, + } + + # NOTE: Because we don't load the full Click BaseCommand here, this is mounted as top-level command + with samconfig_parameters(["start-lambda"], self.scratch_dir, **config_values) as config_path: + + from samcli.commands.local.start_lambda.cli import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke( + cli, + env={ + "SAM_TEMPLATE_FILE": "envtemplate.yaml", + "SAM_SKIP_PULL_IMAGE": "False", + "SAM_FORCE_IMAGE_BUILD": "False", + "SAM_DOCKER_NETWORK": "envnetwork", + # Debug port is exclusively provided through envvars and not thru CLI args + "SAM_DEBUG_PORT": "13579", + "DEBUGGER_ARGS": "envargs", + "SAM_DOCKER_VOLUME_BASEDIR": "envbasedir", + "SAM_LAYER_CACHE_BASEDIR": "envlayercache", + }, + args=[ + "--host", + "otherhost", + "--port", + 9999, + "--env-vars", + "otherenvvar.json", + "--debugger-path", + "otherpath", + "--log-file", + "otherlogfile", + # this is a case where cli args takes precedence over both + # config file and envvar + "--force-image-build", + # Parameter overrides is exclusively provided through CLI args and not config + "--parameter-overrides", + "A=123 C=D E=F12! G=H", + ], + ) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with( + ANY, + "otherhost", + 9999, + str(Path(os.getcwd(), "envtemplate.yaml")), + "otherenvvar.json", + (13579,), + "envargs", + "otherpath", + "envbasedir", + "envnetwork", + "otherlogfile", + "envlayercache", + False, + True, + {"A": "123", "C": "D", "E": "F12!", "G": "H"}, + ) + + +@contextmanager +def samconfig_parameters(cmd_names, config_dir=None, env=None, **kwargs): + """ + ContextManager to write a new SAM Config and remove the file after the contextmanager exists + + Parameters + ---------- + cmd_names : list(str) + Name of the full commnad split as a list: ["generate-event", "s3", "put"] + + config_dir : str + Path where the SAM config file should be written to. Defaults to os.getcwd() + + env : str + Optional name of the config environment. This is currently unused + + kwargs : dict + Parameter names and values to be written to the file. + + Returns + ------- + Path to the config file + """ + + env = env or DEFAULT_ENV + section = "parameters" + samconfig = SamConfig(config_dir=config_dir) + + try: + for k, v in kwargs.items(): + samconfig.put(cmd_names, section, k, v, env=env) + + samconfig.flush() + yield samconfig.path() + finally: + Path(samconfig.path()).unlink() diff --git a/tests/unit/lib/samconfig/__init__.py b/tests/unit/lib/samconfig/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/lib/config/test_samconfig.py b/tests/unit/lib/samconfig/test_samconfig.py similarity index 100% rename from tests/unit/lib/config/test_samconfig.py rename to tests/unit/lib/samconfig/test_samconfig.py From 26ad24e7a2c0eed6a8fec15ded483ed3b116996d Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Fri, 22 Nov 2019 16:11:25 -0800 Subject: [PATCH 35/45] fix: add version to `samconfig.toml` file (#1581) * fix: add version to `samconfig.toml` file - support version key, any float is okay. - if a config file is present and the version key is missing, we do not process it. - if a config file is missing, thats fine. this check does not get in the way. - validation logic to determine if a SAM CLI version is compatible can be written later. * bugfix: do not continously read everytime a samconfig.put is called --- samcli/cli/cli_config_file.py | 9 +++ samcli/commands/exceptions.py | 6 ++ samcli/lib/config/exceptions.py | 7 ++ samcli/lib/config/samconfig.py | 38 +++++++---- samcli/lib/config/version.py | 6 ++ tests/unit/cli/test_cli_config_file.py | 20 +++++- tests/unit/commands/deploy/test_command.py | 77 ++++++++++++++++++++++ tests/unit/lib/samconfig/test_samconfig.py | 30 +++++++++ 8 files changed, 179 insertions(+), 14 deletions(-) create mode 100644 samcli/lib/config/exceptions.py create mode 100644 samcli/lib/config/version.py diff --git a/samcli/cli/cli_config_file.py b/samcli/cli/cli_config_file.py index 032717fbfe..db346472ee 100644 --- a/samcli/cli/cli_config_file.py +++ b/samcli/cli/cli_config_file.py @@ -10,6 +10,9 @@ import logging import click + +from samcli.commands.exceptions import ConfigException +from samcli.lib.config.exceptions import SamConfigVersionException from samcli.cli.context import get_cmd_names from samcli.lib.config.samconfig import SamConfig, DEFAULT_ENV @@ -53,6 +56,7 @@ def __call__(self, config_dir, config_env, cmd_names): # NOTE(TheSriram): change from tomlkit table type to normal dictionary, # so that click defaults work out of the box. + samconfig.sanity_check() resolved_config = {k: v for k, v in samconfig.get_all(cmd_names, self.section, env=config_env).items()} LOG.debug("Configuration values read from the file: %s", resolved_config) @@ -65,6 +69,11 @@ def __call__(self, config_dir, config_env, cmd_names): self.section, str(ex), ) + + except SamConfigVersionException as ex: + LOG.debug("%s %s", samconfig.path(), str(ex)) + raise ConfigException(f"Syntax invalid in samconfig.toml: {str(ex)}") + except Exception as ex: LOG.debug("Error reading configuration file: %s %s", samconfig.path(), str(ex)) diff --git a/samcli/commands/exceptions.py b/samcli/commands/exceptions.py index 159d05bea4..932bccf480 100644 --- a/samcli/commands/exceptions.py +++ b/samcli/commands/exceptions.py @@ -5,6 +5,12 @@ import click +class ConfigException(click.ClickException): + """ + Exception class when configuration file fails checks. + """ + + class UserException(click.ClickException): """ Base class for all exceptions that need to be surfaced to the user. Typically, we will display the exception diff --git a/samcli/lib/config/exceptions.py b/samcli/lib/config/exceptions.py new file mode 100644 index 0000000000..50297ce722 --- /dev/null +++ b/samcli/lib/config/exceptions.py @@ -0,0 +1,7 @@ +""" +Exceptions to be used by samconfig.py +""" + + +class SamConfigVersionException(Exception): + pass diff --git a/samcli/lib/config/samconfig.py b/samcli/lib/config/samconfig.py index bc47a44ae4..e8f6eaff7c 100644 --- a/samcli/lib/config/samconfig.py +++ b/samcli/lib/config/samconfig.py @@ -9,6 +9,9 @@ import tomlkit +from samcli.lib.config.version import SAM_CONFIG_VERSION, VERSION_KEY +from samcli.lib.config.exceptions import SamConfigVersionException + LOG = logging.getLogger(__name__) DEFAULT_CONFIG_FILE_NAME = "samconfig.toml" @@ -21,7 +24,6 @@ class SamConfig: """ document = None - VERSION = "0.1" def __init__(self, config_dir, filename=None): """ @@ -101,8 +103,8 @@ def put(self, cmd_names, section, key, value, env=DEFAULT_ENV): If the data is invalid """ - self._read() if not self.document: + self._read() # Empty document prepare the initial structure. self.document.update({env: {self._to_key(cmd_names): {section: {key: value}}}}) # Only update appropriate key value pairs within a section @@ -149,15 +151,16 @@ def config_dir(template_file_path=None): return os.getcwd() def _read(self): - if self.document: - return self.document - - try: - txt = self.filepath.read_text() - self.document = tomlkit.loads(txt) - except OSError: - self.document = tomlkit.document() - + if not self.document: + try: + txt = self.filepath.read_text() + self.document = tomlkit.loads(txt) + self._version_sanity_check(self._version()) + except OSError: + self.document = tomlkit.document() + + if self.document.body: + self._version_sanity_check(self._version()) return self.document def _write(self): @@ -166,8 +169,21 @@ def _write(self): if not self.exists(): open(self.filepath, "a+").close() + current_version = self._version() if self._version() else SAM_CONFIG_VERSION + try: + self.document.add(VERSION_KEY, current_version) + except tomlkit.exceptions.KeyAlreadyPresent: + # NOTE(TheSriram): Do not attempt to re-write an existing version + pass self.filepath.write_text(tomlkit.dumps(self.document)) + def _version(self): + return self.document.get(VERSION_KEY, None) + + def _version_sanity_check(self, version): + if not isinstance(version, float): + raise SamConfigVersionException(f"'{VERSION_KEY}' key is not present or is in unrecognized format. ") + @staticmethod def _to_key(cmd_names): # construct a parsed name that is of the format: a_b_c_d diff --git a/samcli/lib/config/version.py b/samcli/lib/config/version.py new file mode 100644 index 0000000000..bd5a7f330f --- /dev/null +++ b/samcli/lib/config/version.py @@ -0,0 +1,6 @@ +""" +Constants and helper functions for samconfig.toml's versioning. +""" + +SAM_CONFIG_VERSION = 0.1 +VERSION_KEY = "version" diff --git a/tests/unit/cli/test_cli_config_file.py b/tests/unit/cli/test_cli_config_file.py index 490e4029b0..63caae55a9 100644 --- a/tests/unit/cli/test_cli_config_file.py +++ b/tests/unit/cli/test_cli_config_file.py @@ -2,9 +2,9 @@ from pathlib import Path from unittest import TestCase -from unittest.mock import MagicMock, patch - +from unittest.mock import MagicMock +from samcli.commands.exceptions import ConfigException from samcli.cli.cli_config_file import TomlProvider, configuration_option, configuration_callback, get_ctx_defaults from samcli.lib.config.samconfig import SamConfig @@ -27,11 +27,25 @@ def setUp(self): def test_toml_valid_with_section(self): config_dir = tempfile.gettempdir() configpath = Path(config_dir, "samconfig.toml") - configpath.write_text("[config_env.topic.parameters]\nword='clarity'\n") + configpath.write_text("version=0.1\n[config_env.topic.parameters]\nword='clarity'\n") self.assertEqual( TomlProvider(section=self.parameters)(config_dir, self.config_env, [self.cmd_name]), {"word": "clarity"} ) + def test_toml_valid_with_no_version(self): + config_dir = tempfile.gettempdir() + configpath = Path(config_dir, "samconfig.toml") + configpath.write_text("[config_env.topic.parameters]\nword='clarity'\n") + with self.assertRaises(ConfigException): + TomlProvider(section=self.parameters)(config_dir, self.config_env, [self.cmd_name]) + + def test_toml_valid_with_invalid_version(self): + config_dir = tempfile.gettempdir() + configpath = Path(config_dir, "samconfig.toml") + configpath.write_text("version='abc'\n[config_env.topic.parameters]\nword='clarity'\n") + with self.assertRaises(ConfigException): + TomlProvider(section=self.parameters)(config_dir, self.config_env, [self.cmd_name]) + def test_toml_invalid_empty_dict(self): config_dir = tempfile.gettempdir() configpath = Path(config_dir, "samconfig.toml") diff --git a/tests/unit/commands/deploy/test_command.py b/tests/unit/commands/deploy/test_command.py index 5f7331494d..4ea77dc5d5 100644 --- a/tests/unit/commands/deploy/test_command.py +++ b/tests/unit/commands/deploy/test_command.py @@ -421,3 +421,80 @@ def test_all_args_guided_no_params_no_save_config( self.assertEqual(mock_save_config.call_count, 0) mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1") self.assertEqual(context_mock.run.call_count, 1) + + @patch("samcli.commands.package.command.click") + @patch("samcli.commands.package.package_context.PackageContext") + @patch("samcli.commands.deploy.command.click") + @patch("samcli.commands.deploy.deploy_context.DeployContext") + @patch("samcli.commands.deploy.command.save_config") + @patch("samcli.commands.deploy.command.manage_stack") + @patch("samcli.commands.deploy.command.get_template_parameters") + @patch("samcli.commands.deploy.command.get_config_ctx") + def test_all_args_guided_no_params_no_save_config( + self, + mock_get_config_ctx, + mock_get_template_parameters, + mock_managed_stack, + mock_save_config, + mock_deploy_context, + mock_deploy_click, + mock_package_context, + mock_package_click, + ): + + context_mock = Mock() + mock_sam_config = MagicMock() + mock_sam_config.exists = MagicMock(return_value=True) + mock_get_config_ctx.return_value = (None, mock_sam_config) + mock_get_template_parameters.return_value = {} + mock_deploy_context.return_value.__enter__.return_value = context_mock + mock_deploy_click.prompt = MagicMock(side_effect=["sam-app", "us-east-1", ("CAPABILITY_IAM",)]) + mock_deploy_click.confirm = MagicMock(side_effect=[True, False, False]) + + mock_managed_stack.return_value = "managed-s3-bucket" + + do_cli( + template_file=self.template_file, + stack_name=self.stack_name, + s3_bucket=None, + force_upload=self.force_upload, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region=self.region, + profile=self.profile, + use_json=self.use_json, + metadata=self.metadata, + guided=True, + confirm_changeset=True, + ) + + mock_deploy_context.assert_called_with( + template_file=ANY, + stack_name="sam-app", + s3_bucket="managed-s3-bucket", + force_upload=self.force_upload, + s3_prefix="sam-app", + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region="us-east-1", + profile=self.profile, + confirm_changeset=True, + ) + + context_mock.run.assert_called_with() + self.assertEqual(mock_save_config.call_count, 0) + mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1") + self.assertEqual(context_mock.run.call_count, 1) diff --git a/tests/unit/lib/samconfig/test_samconfig.py b/tests/unit/lib/samconfig/test_samconfig.py index 54575c1198..d6277db9ed 100644 --- a/tests/unit/lib/samconfig/test_samconfig.py +++ b/tests/unit/lib/samconfig/test_samconfig.py @@ -3,6 +3,8 @@ from unittest import TestCase +from samcli.lib.config.exceptions import SamConfigVersionException +from samcli.lib.config.version import VERSION_KEY, SAM_CONFIG_VERSION from samcli.lib.config.samconfig import SamConfig, DEFAULT_CONFIG_FILE_NAME @@ -20,6 +22,7 @@ def _setup_config(self): self.samconfig.flush() self.assertTrue(self.samconfig.exists()) self.assertTrue(self.samconfig.sanity_check()) + self.assertEqual(SAM_CONFIG_VERSION, self.samconfig.document.get(VERSION_KEY)) def test_init(self): self.assertEqual(self.samconfig.filepath, Path(self.config_dir, DEFAULT_CONFIG_FILE_NAME)) @@ -36,3 +39,30 @@ def test_check_config_exists(self): def test_check_sanity(self): self.assertTrue(self.samconfig.sanity_check()) + + def test_check_version_non_supported_type(self): + self._setup_config() + self.samconfig.document.remove(VERSION_KEY) + self.samconfig.document.add(VERSION_KEY, "aadeff") + with self.assertRaises(SamConfigVersionException): + self.samconfig.sanity_check() + + def test_check_version_no_version_exists(self): + self._setup_config() + self.samconfig.document.remove(VERSION_KEY) + with self.assertRaises(SamConfigVersionException): + self.samconfig.sanity_check() + + def test_check_version_float(self): + self._setup_config() + self.samconfig.document.remove(VERSION_KEY) + self.samconfig.document.add(VERSION_KEY, 0.2) + self.samconfig.sanity_check() + + def test_write_config_file_non_standard_version(self): + self._setup_config() + self.samconfig.document.remove(VERSION_KEY) + self.samconfig.document.add(VERSION_KEY, 0.2) + self.samconfig.put(cmd_names=["local", "start", "api"], section="parameters", key="skip_pull_image", value=True) + self.samconfig.sanity_check() + self.assertEqual(self.samconfig.document.get(VERSION_KEY), 0.2) From b7dfd3fead1e623acf73fa023f5dd17f759cdea5 Mon Sep 17 00:00:00 2001 From: Alex Wood Date: Fri, 22 Nov 2019 17:08:58 -0800 Subject: [PATCH 36/45] feat: Init README Deploy Updates (#1580) --- .../{{cookiecutter.project_name}}/README.md | 43 +++++---------- .../{{cookiecutter.project_name}}/README.md | 54 ++++--------------- .../{{cookiecutter.project_name}}/README.md | 43 +++++---------- .../{{cookiecutter.project_name}}/README.md | 43 +++++---------- .../{{cookiecutter.project_name}}/README.md | 45 +++++----------- .../{{cookiecutter.project_name}}/README.md | 43 +++++---------- .../{{cookiecutter.project_name}}/README.md | 43 +++++---------- 7 files changed, 82 insertions(+), 232 deletions(-) diff --git a/samcli/local/init/templates/cookiecutter-aws-sam-hello-dotnet/{{cookiecutter.project_name}}/README.md b/samcli/local/init/templates/cookiecutter-aws-sam-hello-dotnet/{{cookiecutter.project_name}}/README.md index 17d9cf30ec..63c74c5d4e 100644 --- a/samcli/local/init/templates/cookiecutter-aws-sam-hello-dotnet/{{cookiecutter.project_name}}/README.md +++ b/samcli/local/init/templates/cookiecutter-aws-sam-hello-dotnet/{{cookiecutter.project_name}}/README.md @@ -23,44 +23,26 @@ The Serverless Application Model Command Line Interface (SAM CLI) is an extensio To use the SAM CLI, you need the following tools. -* AWS CLI - [Install the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-install.html) and [configure it with your AWS credentials]. * SAM CLI - [Install the SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) * .NET Core - [Install .NET Core](https://www.microsoft.com/net/download) * Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community) -The SAM CLI uses an Amazon S3 bucket to store your application's deployment artifacts. If you don't have a bucket suitable for this purpose, create one. Replace `BUCKET_NAME` in the commands in this section with a unique bucket name. +To build and deploy your application for the first time, run the following in your shell: ```bash -{{ cookiecutter.project_name }}$ aws s3 mb s3://BUCKET_NAME +sam build +sam deploy --guided ``` -To prepare the application for deployment, use the `sam package` command. +The first command will build the source of your application. The second command will package and deploy your application to AWS, with a series of prompts: -```bash -{{ cookiecutter.project_name }}$ sam package \ - --output-template-file packaged.yaml \ - --s3-bucket BUCKET_NAME -``` - -The SAM CLI creates deployment packages, uploads them to the S3 bucket, and creates a new version of the template that refers to the artifacts in the bucket. +* **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name. +* **AWS Region**: The AWS region you want to deploy your app to. +* **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes. +* **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modified IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command. +* **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application. -To deploy the application, use the `sam deploy` command. - -```bash -{{ cookiecutter.project_name }}$ sam deploy \ - --template-file packaged.yaml \ - --stack-name {{ cookiecutter.project_name }} \ - --capabilities CAPABILITY_IAM -``` - -After deployment is complete you can run the following command to retrieve the API Gateway Endpoint URL: - -```bash -{{ cookiecutter.project_name }}$ aws cloudformation describe-stacks \ - --stack-name {{ cookiecutter.project_name }} \ - --query 'Stacks[].Outputs[?OutputKey==`HelloWorldApi`]' \ - --output table -``` +You can find your API Gateway Endpoint URL in the output values displayed after deployment. ## Use the SAM CLI to build and test locally @@ -123,11 +105,10 @@ Tests are defined in the `test` folder in this project. ## Cleanup -To delete the sample application and the bucket that you created, use the AWS CLI. +To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following: ```bash -{{ cookiecutter.project_name }}$ aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} -{{ cookiecutter.project_name }}$ aws s3 rb s3://BUCKET_NAME +aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} ``` ## Resources diff --git a/samcli/local/init/templates/cookiecutter-aws-sam-hello-golang/{{cookiecutter.project_name}}/README.md b/samcli/local/init/templates/cookiecutter-aws-sam-hello-golang/{{cookiecutter.project_name}}/README.md index 2d783abbc2..8645d7d669 100644 --- a/samcli/local/init/templates/cookiecutter-aws-sam-hello-golang/{{cookiecutter.project_name}}/README.md +++ b/samcli/local/init/templates/cookiecutter-aws-sam-hello-golang/{{cookiecutter.project_name}}/README.md @@ -77,38 +77,21 @@ AWS Lambda Python runtime requires a flat folder with all dependencies including ... ``` -First and foremost, we need a `S3 bucket` where we can upload our Lambda functions packaged as ZIP before we deploy anything - If you don't have a S3 bucket to store code artifacts then this is a good time to create one: +To deploy your application for the first time, run the following in your shell: ```bash -aws s3 mb s3://BUCKET_NAME +sam deploy --guided ``` -Next, run the following command to package our Lambda function to S3: +The command will package and deploy your application to AWS, with a series of prompts: -```bash -sam package \ - --output-template-file packaged.yaml \ - --s3-bucket REPLACE_THIS_WITH_YOUR_S3_BUCKET_NAME -``` - -Next, the following command will create a Cloudformation Stack and deploy your SAM resources. - -```bash -sam deploy \ - --template-file packaged.yaml \ - --stack-name {{ cookiecutter.project_name.lower().replace(' ', '-') }} \ - --capabilities CAPABILITY_IAM -``` - -> **See [Serverless Application Model (SAM) HOWTO Guide](https://github.com/awslabs/serverless-application-model/blob/master/HOWTO.md) for more details in how to get started.** +* **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name. +* **AWS Region**: The AWS region you want to deploy your app to. +* **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes. +* **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modified IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command. +* **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application. -After deployment is complete you can run the following command to retrieve the API Gateway Endpoint URL: - -```bash -aws cloudformation describe-stacks \ - --stack-name {{ cookiecutter.project_name.lower().replace(' ', '-') }} \ - --query 'Stacks[].Outputs' -``` +You can find your API Gateway Endpoint URL in the output values displayed after deployment. ### Testing @@ -153,25 +136,6 @@ If it's already installed, run the following command to ensure it's the latest v ```shell choco upgrade golang ``` -## AWS CLI commands - -AWS CLI commands to package, deploy and describe outputs defined within the cloudformation stack: - -```bash -sam package \ - --template-file template.yaml \ - --output-template-file packaged.yaml \ - --s3-bucket REPLACE_THIS_WITH_YOUR_S3_BUCKET_NAME - -sam deploy \ - --template-file packaged.yaml \ - --stack-name {{ cookiecutter.project_name.lower().replace(' ', '-') }} \ - --capabilities CAPABILITY_IAM \ - --parameter-overrides MyParameterSample=MySampleValue - -aws cloudformation describe-stacks \ - --stack-name {{ cookiecutter.project_name.lower().replace(' ', '-') }} --query 'Stacks[].Outputs' -``` ## Bringing to the next level diff --git a/samcli/local/init/templates/cookiecutter-aws-sam-hello-java-gradle/{{cookiecutter.project_name}}/README.md b/samcli/local/init/templates/cookiecutter-aws-sam-hello-java-gradle/{{cookiecutter.project_name}}/README.md index a1729ec9d0..f1b9712296 100644 --- a/samcli/local/init/templates/cookiecutter-aws-sam-hello-java-gradle/{{cookiecutter.project_name}}/README.md +++ b/samcli/local/init/templates/cookiecutter-aws-sam-hello-java-gradle/{{cookiecutter.project_name}}/README.md @@ -23,44 +23,26 @@ The Serverless Application Model Command Line Interface (SAM CLI) is an extensio To use the SAM CLI, you need the following tools. -* AWS CLI - [Install the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-install.html) and [configure it with your AWS credentials]. * SAM CLI - [Install the SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) * Java8 - [Install the Java SE Development Kit 8](http://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html) * Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community) -The SAM CLI uses an Amazon S3 bucket to store your application's deployment artifacts. If you don't have a bucket suitable for this purpose, create one. Replace `BUCKET_NAME` in the commands in this section with a unique bucket name. +To build and deploy your application for the first time, run the following in your shell: ```bash -{{ cookiecutter.project_name }}$ aws s3 mb s3://BUCKET_NAME +sam build +sam deploy --guided ``` -To prepare the application for deployment, use the `sam package` command. +The first command will build the source of your application. The second command will package and deploy your application to AWS, with a series of prompts: -```bash -{{ cookiecutter.project_name }}$ sam package \ - --output-template-file packaged.yaml \ - --s3-bucket BUCKET_NAME -``` - -The SAM CLI creates deployment packages, uploads them to the S3 bucket, and creates a new version of the template that refers to the artifacts in the bucket. +* **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name. +* **AWS Region**: The AWS region you want to deploy your app to. +* **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes. +* **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modified IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command. +* **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application. -To deploy the application, use the `sam deploy` command. - -```bash -{{ cookiecutter.project_name }}$ sam deploy \ - --template-file packaged.yaml \ - --stack-name {{ cookiecutter.project_name }} \ - --capabilities CAPABILITY_IAM -``` - -After deployment is complete you can run the following command to retrieve the API Gateway Endpoint URL: - -```bash -{{ cookiecutter.project_name }}$ aws cloudformation describe-stacks \ - --stack-name {{ cookiecutter.project_name }} \ - --query 'Stacks[].Outputs[?OutputKey==`HelloWorldApi`]' \ - --output table -``` +You can find your API Gateway Endpoint URL in the output values displayed after deployment. ## Use the SAM CLI to build and test locally @@ -124,11 +106,10 @@ HelloWorldFunction$ gradle test ## Cleanup -To delete the sample application and the bucket that you created, use the AWS CLI. +To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following: ```bash -{{ cookiecutter.project_name }}$ aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} -{{ cookiecutter.project_name }}$ aws s3 rb s3://BUCKET_NAME +aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} ``` ## Resources diff --git a/samcli/local/init/templates/cookiecutter-aws-sam-hello-java-maven/{{cookiecutter.project_name}}/README.md b/samcli/local/init/templates/cookiecutter-aws-sam-hello-java-maven/{{cookiecutter.project_name}}/README.md index 5b46ee17f5..34c8f025f2 100644 --- a/samcli/local/init/templates/cookiecutter-aws-sam-hello-java-maven/{{cookiecutter.project_name}}/README.md +++ b/samcli/local/init/templates/cookiecutter-aws-sam-hello-java-maven/{{cookiecutter.project_name}}/README.md @@ -23,45 +23,27 @@ The Serverless Application Model Command Line Interface (SAM CLI) is an extensio To use the SAM CLI, you need the following tools. -* AWS CLI - [Install the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-install.html) and [configure it with your AWS credentials]. * SAM CLI - [Install the SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) * Java8 - [Install the Java SE Development Kit 8](http://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html) * Maven - [Install Maven](https://maven.apache.org/install.html) * Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community) -The SAM CLI uses an Amazon S3 bucket to store your application's deployment artifacts. If you don't have a bucket suitable for this purpose, create one. Replace `BUCKET_NAME` in the commands in this section with a unique bucket name. +To build and deploy your application for the first time, run the following in your shell: ```bash -{{ cookiecutter.project_name }}$ aws s3 mb s3://BUCKET_NAME +sam build +sam deploy --guided ``` -To prepare the application for deployment, use the `sam package` command. +The first command will build the source of your application. The second command will package and deploy your application to AWS, with a series of prompts: -```bash -{{ cookiecutter.project_name }}$ sam package \ - --output-template-file packaged.yaml \ - --s3-bucket BUCKET_NAME -``` - -The SAM CLI creates deployment packages, uploads them to the S3 bucket, and creates a new version of the template that refers to the artifacts in the bucket. +* **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name. +* **AWS Region**: The AWS region you want to deploy your app to. +* **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes. +* **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modified IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command. +* **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application. -To deploy the application, use the `sam deploy` command. - -```bash -{{ cookiecutter.project_name }}$ sam deploy \ - --template-file packaged.yaml \ - --stack-name {{ cookiecutter.project_name }} \ - --capabilities CAPABILITY_IAM -``` - -After deployment is complete you can run the following command to retrieve the API Gateway Endpoint URL: - -```bash -{{ cookiecutter.project_name }}$ aws cloudformation describe-stacks \ - --stack-name {{ cookiecutter.project_name }} \ - --query 'Stacks[].Outputs[?OutputKey==`HelloWorldApi`]' \ - --output table -``` +You can find your API Gateway Endpoint URL in the output values displayed after deployment. ## Use the SAM CLI to build and test locally @@ -125,11 +107,10 @@ HelloWorldFunction$ mvn test ## Cleanup -To delete the sample application and the bucket that you created, use the AWS CLI. +To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following: ```bash -{{ cookiecutter.project_name }}$ aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} -{{ cookiecutter.project_name }}$ aws s3 rb s3://BUCKET_NAME +aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} ``` ## Resources diff --git a/samcli/local/init/templates/cookiecutter-aws-sam-hello-nodejs/{{cookiecutter.project_name}}/README.md b/samcli/local/init/templates/cookiecutter-aws-sam-hello-nodejs/{{cookiecutter.project_name}}/README.md index baab85559a..aa68a714ed 100644 --- a/samcli/local/init/templates/cookiecutter-aws-sam-hello-nodejs/{{cookiecutter.project_name}}/README.md +++ b/samcli/local/init/templates/cookiecutter-aws-sam-hello-nodejs/{{cookiecutter.project_name}}/README.md @@ -23,44 +23,26 @@ The Serverless Application Model Command Line Interface (SAM CLI) is an extensio To use the SAM CLI, you need the following tools. -* AWS CLI - [Install the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-install.html) and [configure it with your AWS credentials]. * SAM CLI - [Install the SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) * Node.js - [Install Node.js 10](https://nodejs.org/en/), including the NPM package management tool. * Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community) -The SAM CLI uses an Amazon S3 bucket to store your application's deployment artifacts. If you don't have a bucket suitable for this purpose, create one. Replace `BUCKET_NAME` in the commands in this section with a unique bucket name. +To build and deploy your application for the first time, run the following in your shell: ```bash -{{ cookiecutter.project_name }}$ aws s3 mb s3://BUCKET_NAME +sam build +sam deploy --guided ``` -To prepare the application for deployment, use the `sam package` command. +The first command will build the source of your application. The second command will package and deploy your application to AWS, with a series of prompts: -```bash -{{ cookiecutter.project_name }}$ sam package \ - --output-template-file packaged.yaml \ - --s3-bucket BUCKET_NAME -``` - -The SAM CLI creates deployment packages, uploads them to the S3 bucket, and creates a new version of the template that refers to the artifacts in the bucket. +* **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name. +* **AWS Region**: The AWS region you want to deploy your app to. +* **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes. +* **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modified IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command. +* **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application. -To deploy the application, use the `sam deploy` command. - -```bash -{{ cookiecutter.project_name }}$ sam deploy \ - --template-file packaged.yaml \ - --stack-name {{ cookiecutter.project_name }} \ - --capabilities CAPABILITY_IAM -``` - -After deployment is complete you can run the following command to retrieve the API Gateway Endpoint URL: - -```bash -{{ cookiecutter.project_name }}$ aws cloudformation describe-stacks \ - --stack-name {{ cookiecutter.project_name }} \ - --query 'Stacks[].Outputs[?OutputKey==`HelloWorldApi`]' \ - --output table -``` +You can find your API Gateway Endpoint URL in the output values displayed after deployment. ## Use the SAM CLI to build and test locally @@ -125,15 +107,14 @@ hello-world$ npm run test ## Cleanup -To delete the sample application and the bucket that you created, use the AWS CLI. +To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following: ```bash -{{ cookiecutter.project_name }}$ aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} -{{ cookiecutter.project_name }}$ aws s3 rb s3://BUCKET_NAME +aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} ``` ## Resources See the [AWS SAM developer guide](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/what-is-sam.html) for an introduction to SAM specification, the SAM CLI, and serverless application concepts. -Next, you can use AWS Serverless Application Repository to deploy ready to use Apps that go beyond hello world samples and learn how authors developed their applications: [AWS Serverless Application Repository main page](https://aws.amazon.com/serverless/serverlessrepo/) \ No newline at end of file +Next, you can use AWS Serverless Application Repository to deploy ready to use Apps that go beyond hello world samples and learn how authors developed their applications: [AWS Serverless Application Repository main page](https://aws.amazon.com/serverless/serverlessrepo/) diff --git a/samcli/local/init/templates/cookiecutter-aws-sam-hello-python/{{cookiecutter.project_name}}/README.md b/samcli/local/init/templates/cookiecutter-aws-sam-hello-python/{{cookiecutter.project_name}}/README.md index a12489cb39..0fe6ea9a37 100644 --- a/samcli/local/init/templates/cookiecutter-aws-sam-hello-python/{{cookiecutter.project_name}}/README.md +++ b/samcli/local/init/templates/cookiecutter-aws-sam-hello-python/{{cookiecutter.project_name}}/README.md @@ -23,7 +23,6 @@ The Serverless Application Model Command Line Interface (SAM CLI) is an extensio To use the SAM CLI, you need the following tools. -* AWS CLI - [Install the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-install.html) and [configure it with your AWS credentials]. * SAM CLI - [Install the SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) {%- if cookiecutter.runtime == 'python2.7' %} * [Python 2.7 installed](https://www.python.org/downloads/) @@ -32,39 +31,22 @@ To use the SAM CLI, you need the following tools. {%- endif %} * Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community) -The SAM CLI uses an Amazon S3 bucket to store your application's deployment artifacts. If you don't have a bucket suitable for this purpose, create one. Replace `BUCKET_NAME` in the commands in this section with a unique bucket name. +To build and deploy your application for the first time, run the following in your shell: ```bash -{{ cookiecutter.project_name }}$ aws s3 mb s3://BUCKET_NAME +sam build +sam deploy --guided ``` -To prepare the application for deployment, use the `sam package` command. +The first command will build the source of your application. The second command will package and deploy your application to AWS, with a series of prompts: -```bash -{{ cookiecutter.project_name }}$ sam package \ - --output-template-file packaged.yaml \ - --s3-bucket BUCKET_NAME -``` - -The SAM CLI creates deployment packages, uploads them to the S3 bucket, and creates a new version of the template that refers to the artifacts in the bucket. +* **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name. +* **AWS Region**: The AWS region you want to deploy your app to. +* **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes. +* **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modified IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command. +* **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application. -To deploy the application, use the `sam deploy` command. - -```bash -{{ cookiecutter.project_name }}$ sam deploy \ - --template-file packaged.yaml \ - --stack-name {{ cookiecutter.project_name }} \ - --capabilities CAPABILITY_IAM -``` - -After deployment is complete you can run the following command to retrieve the API Gateway Endpoint URL: - -```bash -{{ cookiecutter.project_name }}$ aws cloudformation describe-stacks \ - --stack-name {{ cookiecutter.project_name }} \ - --query 'Stacks[].Outputs[?OutputKey==`HelloWorldApi`]' \ - --output table -``` +You can find your API Gateway Endpoint URL in the output values displayed after deployment. ## Use the SAM CLI to build and test locally @@ -128,11 +110,10 @@ Tests are defined in the `tests` folder in this project. Use PIP to install the ## Cleanup -To delete the sample application and the bucket that you created, use the AWS CLI. +To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following: ```bash -{{ cookiecutter.project_name }}$ aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} -{{ cookiecutter.project_name }}$ aws s3 rb s3://BUCKET_NAME +aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} ``` ## Resources diff --git a/samcli/local/init/templates/cookiecutter-aws-sam-hello-ruby/{{cookiecutter.project_name}}/README.md b/samcli/local/init/templates/cookiecutter-aws-sam-hello-ruby/{{cookiecutter.project_name}}/README.md index 8c426579f1..0ffe88b084 100644 --- a/samcli/local/init/templates/cookiecutter-aws-sam-hello-ruby/{{cookiecutter.project_name}}/README.md +++ b/samcli/local/init/templates/cookiecutter-aws-sam-hello-ruby/{{cookiecutter.project_name}}/README.md @@ -23,44 +23,26 @@ The Serverless Application Model Command Line Interface (SAM CLI) is an extensio To use the SAM CLI, you need the following tools. -* AWS CLI - [Install the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-install.html) and [configure it with your AWS credentials]. * SAM CLI - [Install the SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) * Ruby - [Install Ruby 2.5](https://www.ruby-lang.org/en/documentation/installation/) * Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community) -The SAM CLI uses an Amazon S3 bucket to store your application's deployment artifacts. If you don't have a bucket suitable for this purpose, create one. Replace `BUCKET_NAME` in the commands in this section with a unique bucket name. +To build and deploy your application for the first time, run the following in your shell: ```bash -{{ cookiecutter.project_name }}$ aws s3 mb s3://BUCKET_NAME +sam build +sam deploy --guided ``` -To prepare the application for deployment, use the `sam package` command. +The first command will build the source of your application. The second command will package and deploy your application to AWS, with a series of prompts: -```bash -{{ cookiecutter.project_name }}$ sam package \ - --output-template-file packaged.yaml \ - --s3-bucket BUCKET_NAME -``` - -The SAM CLI creates deployment packages, uploads them to the S3 bucket, and creates a new version of the template that refers to the artifacts in the bucket. +* **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name. +* **AWS Region**: The AWS region you want to deploy your app to. +* **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes. +* **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modified IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command. +* **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application. -To deploy the application, use the `sam deploy` command. - -```bash -{{ cookiecutter.project_name }}$ sam deploy \ - --template-file packaged.yaml \ - --stack-name {{ cookiecutter.project_name }} \ - --capabilities CAPABILITY_IAM -``` - -After deployment is complete you can run the following command to retrieve the API Gateway Endpoint URL: - -```bash -{{ cookiecutter.project_name }}$ aws cloudformation describe-stacks \ - --stack-name {{ cookiecutter.project_name }} \ - --query 'Stacks[].Outputs[?OutputKey==`HelloWorldApi`]' \ - --output table -``` +You can find your API Gateway Endpoint URL in the output values displayed after deployment. ## Use the SAM CLI to build and test locally @@ -123,11 +105,10 @@ Tests are defined in the `tests` folder in this project. ## Cleanup -To delete the sample application and the bucket that you created, use the AWS CLI. +To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following: ```bash -{{ cookiecutter.project_name }}$ aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} -{{ cookiecutter.project_name }}$ aws s3 rb s3://BUCKET_NAME +aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} ``` ## Resources From 92a6a00e3c027965aedd00064db961cd34fbbf8f Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Fri, 22 Nov 2019 17:10:53 -0800 Subject: [PATCH 37/45] chore: release v0.33.0 (#1582) --- samcli/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/__init__.py b/samcli/__init__.py index a74a1acfe7..718184ec46 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = "0.32.0" +__version__ = "0.33.0" From 65dd7326cfbf3188c66550102322a53c6476235e Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Fri, 22 Nov 2019 21:50:06 -0800 Subject: [PATCH 38/45] fix: move the sleep before applying bucket policy (#1584) --- tests/integration/publish/publish_app_integ_base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/integration/publish/publish_app_integ_base.py b/tests/integration/publish/publish_app_integ_base.py index 1b430a2375..7d975aa8c5 100644 --- a/tests/integration/publish/publish_app_integ_base.py +++ b/tests/integration/publish/publish_app_integ_base.py @@ -26,6 +26,9 @@ def setUpClass(cls): cls.s3_bucket = s3.Bucket(cls.bucket_name) cls.s3_bucket.create() + # Given 3 seconds for all the bucket creation to complete + time.sleep(3) + # Grant serverlessrepo read access to the bucket bucket_policy_template = cls.test_data_path.joinpath("s3_bucket_policy.json").read_text(encoding="utf-8") bucket_policy = bucket_policy_template.replace(cls.bucket_name_placeholder, cls.bucket_name) @@ -43,9 +46,6 @@ def setUpClass(cls): code_body = cls.test_data_path.joinpath("main.py").read_text(encoding="utf-8") cls.s3_bucket.put_object(Key="main.py", Body=code_body) - # Given 3 seconds for all the bucket creation to complete - time.sleep(3) - @classmethod def tearDownClass(cls): cls.s3_bucket.delete_objects( From 0b9d5bd252dac449f9048375cd6a8d271df83f7d Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Fri, 22 Nov 2019 22:40:31 -0800 Subject: [PATCH 39/45] fix: managed stack (#1585) * fix: managed stack - always catch ClientError and BotoCoreError - on windows, create temporary files with delete=False, otherwise it results in a PermissionDeniedError * fix: dont mask inbuilt `file` --- samcli/commands/bootstrap/exceptions.py | 11 +++ samcli/commands/deploy/command.py | 4 +- samcli/lib/bootstrap/bootstrap.py | 80 ++++++++++++---------- samcli/lib/utils/temp_file_utils.py | 26 +++++++ tests/unit/lib/bootstrap/test_bootstrap.py | 5 +- tests/unit/lib/utils/test_file_utils.py | 20 ++++++ 6 files changed, 106 insertions(+), 40 deletions(-) create mode 100644 samcli/commands/bootstrap/exceptions.py create mode 100644 samcli/lib/utils/temp_file_utils.py create mode 100644 tests/unit/lib/utils/test_file_utils.py diff --git a/samcli/commands/bootstrap/exceptions.py b/samcli/commands/bootstrap/exceptions.py new file mode 100644 index 0000000000..1b4d0ed458 --- /dev/null +++ b/samcli/commands/bootstrap/exceptions.py @@ -0,0 +1,11 @@ +""" +Exceptions that are raised by sam bootstrap +""" +from samcli.commands.exceptions import UserException + + +class ManagedStackError(UserException): + def __init__(self, ex): + self.ex = ex + message_fmt = f"\nFailed to create managed resources: {ex}" + super(ManagedStackError, self).__init__(message=message_fmt.format(ex=self.ex)) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 70fe3c0af6..9930a3c265 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -2,12 +2,12 @@ CLI command for "deploy" command """ import json -import tempfile import logging import click from click.types import FuncParamType +from samcli.lib.utils import temp_file_utils from samcli.cli.cli_config_file import configuration_option, TomlProvider from samcli.cli.context import get_cmd_names from samcli.cli.main import pass_context, common_options, aws_creds_options @@ -256,7 +256,7 @@ def do_cli( confirm_changeset=changeset_decision if guided else confirm_changeset, ) - with tempfile.NamedTemporaryFile() as output_template_file: + with temp_file_utils.tempfile_platform_independent() as output_template_file: with PackageContext( template_file=template_file, diff --git a/samcli/lib/bootstrap/bootstrap.py b/samcli/lib/bootstrap/bootstrap.py index 0461012287..5457a564b3 100644 --- a/samcli/lib/bootstrap/bootstrap.py +++ b/samcli/lib/bootstrap/bootstrap.py @@ -3,19 +3,23 @@ """ import json +import logging + import boto3 import click from botocore.config import Config -from botocore.exceptions import ClientError, NoRegionError, NoCredentialsError +from botocore.exceptions import ClientError, BotoCoreError, NoRegionError, NoCredentialsError +from samcli.commands.bootstrap.exceptions import ManagedStackError from samcli import __version__ from samcli.cli.global_config import GlobalConfig from samcli.commands.exceptions import UserException, CredentialsError, RegionError SAM_CLI_STACK_NAME = "aws-sam-cli-managed-default" +LOG = logging.getLogger(__name__) def manage_stack(profile, region): @@ -34,46 +38,50 @@ def manage_stack(profile, region): def _create_or_get_stack(cloudformation_client): - stack = None - try: - ds_resp = cloudformation_client.describe_stacks(StackName=SAM_CLI_STACK_NAME) - stacks = ds_resp["Stacks"] - stack = stacks[0] - click.echo("\n\tLooking for resources needed for deployment: Found!") - except ClientError: - click.echo("\n\tLooking for resources needed for deployment: Not found.") - stack = _create_stack(cloudformation_client) # exceptions are not captured from subcommands - # Sanity check for non-none stack? Sanity check for tag? - tags = stack["Tags"] try: - sam_cli_tag = next(t for t in tags if t["Key"] == "ManagedStackSource") - if not sam_cli_tag["Value"] == "AwsSamCli": + stack = None + try: + ds_resp = cloudformation_client.describe_stacks(StackName=SAM_CLI_STACK_NAME) + stacks = ds_resp["Stacks"] + stack = stacks[0] + click.echo("\n\tLooking for resources needed for deployment: Found!") + except ClientError: + click.echo("\n\tLooking for resources needed for deployment: Not found.") + stack = _create_stack(cloudformation_client) # exceptions are not captured from subcommands + # Sanity check for non-none stack? Sanity check for tag? + tags = stack["Tags"] + try: + sam_cli_tag = next(t for t in tags if t["Key"] == "ManagedStackSource") + if not sam_cli_tag["Value"] == "AwsSamCli": + msg = ( + "Stack " + + SAM_CLI_STACK_NAME + + " ManagedStackSource tag shows " + + sam_cli_tag["Value"] + + " which does not match the AWS SAM CLI generated tag value of AwsSamCli. " + "Failing as the stack was likely not created by the AWS SAM CLI." + ) + raise UserException(msg) + except StopIteration: msg = ( - "Stack " - + SAM_CLI_STACK_NAME - + " ManagedStackSource tag shows " - + sam_cli_tag["Value"] - + " which does not match the AWS SAM CLI generated tag value of AwsSamCli. " + "Stack " + SAM_CLI_STACK_NAME + " exists, but the ManagedStackSource tag is missing. " "Failing as the stack was likely not created by the AWS SAM CLI." ) raise UserException(msg) - except StopIteration: - msg = ( - "Stack " + SAM_CLI_STACK_NAME + " exists, but the ManagedStackSource tag is missing. " - "Failing as the stack was likely not created by the AWS SAM CLI." - ) - raise UserException(msg) - outputs = stack["Outputs"] - try: - bucket_name = next(o for o in outputs if o["OutputKey"] == "SourceBucket")["OutputValue"] - except StopIteration: - msg = ( - "Stack " + SAM_CLI_STACK_NAME + " exists, but is missing the managed source bucket key. " - "Failing as this stack was likely not created by the AWS SAM CLI." - ) - raise UserException(msg) - # This bucket name is what we would write to a config file - return bucket_name + outputs = stack["Outputs"] + try: + bucket_name = next(o for o in outputs if o["OutputKey"] == "SourceBucket")["OutputValue"] + except StopIteration: + msg = ( + "Stack " + SAM_CLI_STACK_NAME + " exists, but is missing the managed source bucket key. " + "Failing as this stack was likely not created by the AWS SAM CLI." + ) + raise UserException(msg) + # This bucket name is what we would write to a config file + return bucket_name + except (ClientError, BotoCoreError) as ex: + LOG.debug("Failed to create managed resources", exc_info=ex) + raise ManagedStackError(str(ex)) def _create_stack(cloudformation_client): diff --git a/samcli/lib/utils/temp_file_utils.py b/samcli/lib/utils/temp_file_utils.py new file mode 100644 index 0000000000..20f094b024 --- /dev/null +++ b/samcli/lib/utils/temp_file_utils.py @@ -0,0 +1,26 @@ +""" +Helper functions for temporary files +""" +import os +import contextlib +import tempfile + + +def remove(path): + if path: + try: + os.remove(path) + except OSError: + pass + + +@contextlib.contextmanager +def tempfile_platform_independent(): + # NOTE(TheSriram): Setting delete=False is specific to windows. + # https://docs.python.org/3/library/tempfile.html#tempfile.NamedTemporaryFile + _tempfile = tempfile.NamedTemporaryFile(delete=False) + try: + yield _tempfile + finally: + _tempfile.close() + remove(_tempfile.name) diff --git a/tests/unit/lib/bootstrap/test_bootstrap.py b/tests/unit/lib/bootstrap/test_bootstrap.py index f1653af61a..9c17c198f0 100644 --- a/tests/unit/lib/bootstrap/test_bootstrap.py +++ b/tests/unit/lib/bootstrap/test_bootstrap.py @@ -6,6 +6,7 @@ from botocore.exceptions import ClientError, NoCredentialsError, NoRegionError from botocore.stub import Stubber +from samcli.commands.bootstrap.exceptions import ManagedStackError from samcli.commands.exceptions import UserException, CredentialsError, RegionError from samcli.lib.bootstrap.bootstrap import manage_stack, _create_or_get_stack, _get_stack_template, SAM_CLI_STACK_NAME @@ -171,7 +172,7 @@ def test_change_set_creation_fails(self): } stubber.add_client_error("create_change_set", service_error_code="ClientError", expected_params=ccs_params) stubber.activate() - with self.assertRaises(ClientError): + with self.assertRaises(ManagedStackError): _create_or_get_stack(stub_cf) stubber.assert_no_pending_responses() stubber.deactivate() @@ -201,7 +202,7 @@ def test_change_set_execution_fails(self): "execute_change_set", service_error_code="InsufficientCapabilities", expected_params=ecs_params ) stubber.activate() - with self.assertRaises(ClientError): + with self.assertRaises(ManagedStackError): _create_or_get_stack(stub_cf) stubber.assert_no_pending_responses() stubber.deactivate() diff --git a/tests/unit/lib/utils/test_file_utils.py b/tests/unit/lib/utils/test_file_utils.py new file mode 100644 index 0000000000..8a23f7bc8c --- /dev/null +++ b/tests/unit/lib/utils/test_file_utils.py @@ -0,0 +1,20 @@ +import os +import tempfile +from unittest import TestCase + +from samcli.lib.utils.temp_file_utils import remove, tempfile_platform_independent + + +class TestFile(TestCase): + def test_file_remove(self): + _file = tempfile.NamedTemporaryFile(delete=False) + remove(_file.name) + self.assertFalse(os.path.exists(_file.name)) + # No Exception thrown + remove(os.path.join(os.getcwd(), "random")) + + def test_temp_file(self): + _path = None + with tempfile_platform_independent() as _tempf: + _path = _tempf.name + self.assertFalse(os.path.exists(_path)) From f7b7e554ff828cb60e12070c8ae7b6a8a3ba73c2 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Fri, 22 Nov 2019 23:00:29 -0800 Subject: [PATCH 40/45] tests: close tempfile before attempting to delete (#1586) --- tests/unit/lib/utils/test_file_utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/unit/lib/utils/test_file_utils.py b/tests/unit/lib/utils/test_file_utils.py index 8a23f7bc8c..c26eb3c7de 100644 --- a/tests/unit/lib/utils/test_file_utils.py +++ b/tests/unit/lib/utils/test_file_utils.py @@ -8,6 +8,7 @@ class TestFile(TestCase): def test_file_remove(self): _file = tempfile.NamedTemporaryFile(delete=False) + _file.close() remove(_file.name) self.assertFalse(os.path.exists(_file.name)) # No Exception thrown From ee13133561302397a207a4f7d5863cfc0a51fa8d Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Sat, 23 Nov 2019 07:47:12 -0800 Subject: [PATCH 41/45] integ tests: throttling with cloudformation (#1587) - add sleeps in between deploy integration and regression tests. --- tests/integration/deploy/test_deploy_command.py | 3 +++ tests/regression/deploy/test_deploy_regression.py | 4 +++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/integration/deploy/test_deploy_command.py b/tests/integration/deploy/test_deploy_command.py index 19d3bdd342..64a27a17a7 100644 --- a/tests/integration/deploy/test_deploy_command.py +++ b/tests/integration/deploy/test_deploy_command.py @@ -1,6 +1,7 @@ import os import tempfile import uuid +import time from subprocess import Popen, PIPE from unittest import skipIf @@ -16,6 +17,7 @@ # Deploy tests require credentials and CI/CD will only add credentials to the env if the PR is from the same repo. # This is to restrict package tests to run outside of CI/CD and when the branch is not master. SKIP_DEPLOY_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI +CFN_SLEEP = 3 @skipIf(SKIP_DEPLOY_TESTS, "Skip deploy tests in CI/CD only") @@ -24,6 +26,7 @@ def setUp(self): self.cf_client = boto3.client("cloudformation") self.sns_arn = os.environ.get("AWS_SNS") self.stack_names = [] + time.sleep(CFN_SLEEP) super(TestDeploy, self).setUp() def tearDown(self): diff --git a/tests/regression/deploy/test_deploy_regression.py b/tests/regression/deploy/test_deploy_regression.py index 9a6fac4134..728b816ac0 100644 --- a/tests/regression/deploy/test_deploy_regression.py +++ b/tests/regression/deploy/test_deploy_regression.py @@ -1,6 +1,7 @@ import os import tempfile import uuid +import time from subprocess import Popen, PIPE from unittest import skipIf @@ -14,7 +15,7 @@ # Package Regression tests require credentials and CI/CD will only add credentials to the env if the PR is from the same repo. # This is to restrict package tests to run outside of CI/CD and when the branch is not master. SKIP_DEPLOY_REGRESSION_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI - +CFN_SLEEP = 3 # Only testing return codes to be equivalent @@ -25,6 +26,7 @@ def setUp(self): self.kms_key = os.environ.get("AWS_KMS_KEY") self.stack_names = [] self.cf_client = boto3.client("cloudformation") + time.sleep(CFN_SLEEP) super(TestDeployRegression, self).setUp() def tearDown(self): From 33c9b7f1ac52fd2d4aa9c87d93a502fa71da3563 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Sat, 23 Nov 2019 10:02:08 -0800 Subject: [PATCH 42/45] fix: samconfig.toml links in `sam deploy --guided` (#1588) --- samcli/commands/bootstrap/exceptions.py | 2 +- samcli/commands/deploy/command.py | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/samcli/commands/bootstrap/exceptions.py b/samcli/commands/bootstrap/exceptions.py index 1b4d0ed458..d3d7fa88bc 100644 --- a/samcli/commands/bootstrap/exceptions.py +++ b/samcli/commands/bootstrap/exceptions.py @@ -7,5 +7,5 @@ class ManagedStackError(UserException): def __init__(self, ex): self.ex = ex - message_fmt = f"\nFailed to create managed resources: {ex}" + message_fmt = f"Failed to create managed resources: {ex}" super(ManagedStackError, self).__init__(message=message_fmt.format(ex=self.ex)) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 9930a3c265..23b37b24a5 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -436,7 +436,11 @@ def save_config(template_file, parameter_overrides, **kwargs): click.echo(f"\n\tSaved arguments to config file") click.echo("\tRunning 'sam deploy' for future deployments will use the parameters saved above.") click.echo("\tThe above parameters can be changed by modifying samconfig.toml") - click.echo("\tLearn more about samconfig.toml syntax http://url") + click.echo( + "\tLearn more about samconfig.toml syntax at " + "\n\thttps://docs.aws.amazon.com/serverless-application-model/latest/" + "developerguide/serverless-sam-cli-config.html" + ) def get_config_ctx(template_file): From 043160210e5e930c54cdc580f25def67ac981192 Mon Sep 17 00:00:00 2001 From: Jacob Fuss <32497805+jfuss@users.noreply.github.com> Date: Sat, 23 Nov 2019 13:46:12 -0600 Subject: [PATCH 43/45] refactor: Use communicate with a timeout for build and invoke integ tests (#1571) --- appveyor.yml | 26 +- tests/integration/buildcmd/test_build_cmd.py | 89 ++++-- .../local/invoke/test_integrations_cli.py | 275 ++++++++++++------ 3 files changed, 263 insertions(+), 127 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index 5bd7850166..3d77f8d7ee 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -61,18 +61,8 @@ for: - "pylint --rcfile .pylintrc samcli" # There are some functional tests that are currently broken due to not being updated with changed code or still running with node4.3 runtimes # We need to update those but this allows us to at least runs the ones we currently have working - - "pytest tests/functional/commands/validate tests/functional/commands/cli/test_global_config.py" + - "pytest -n 4 tests/functional/commands/validate tests/functional/commands/cli/test_global_config.py" - # Runs only in Linux - - sh: "pytest -vv tests/integration" - - sh: "/tmp/black --check setup.py tests samcli scripts" - - sh: "python scripts/check-isolated-needs-update.py" - - # Smoke tests run in parallel - it runs on both Linux & Windows - # Presence of the RUN_SMOKE envvar will run the smoke tests - # Note: temporarily removing as with current dependencies we require syslog on windows - # which is not present on stdlib. - # - ps: "If ($env:RUN_SMOKE) {pytest -n 4 -vv tests/smoke}" - matrix: only: @@ -128,11 +118,11 @@ for: - "pylint --rcfile .pylintrc samcli" # There are some functional tests that are currently broken due to not being updated with changed code or still running with node4.3 runtimes # We need to update those but this allows us to at least runs the ones we currently have working - - "pytest tests/functional/commands/validate tests/functional/commands/cli/test_global_config.py" + - "pytest -n 4 tests/functional/commands/validate tests/functional/commands/cli/test_global_config.py" # Runs only in Linux - sh: "pytest -vv tests/integration" - - sh: "pytest -vv -n 4 tests/regression" + - sh: "pytest -vv tests/regression" - sh: "/tmp/black --check setup.py tests samcli scripts" # Set JAVA_HOME to java11 @@ -142,13 +132,3 @@ for: # Smoke tests run in parallel - it runs on both Linux & Windows # Presence of the RUN_SMOKE envvar will run the smoke tests - ps: "If ($env:RUN_SMOKE) {pytest -n 4 -vv tests/smoke}" - - - - matrix: - only: - - ONLY_SMOKE: 1 - - test_script: - # Smoke tests run in parallel - - sh: "venv/Scripts/activate" - - sh: "pytest -n 4 -vv tests/smoke" diff --git a/tests/integration/buildcmd/test_build_cmd.py b/tests/integration/buildcmd/test_build_cmd.py index 4f44590ab7..13e37aebb5 100644 --- a/tests/integration/buildcmd/test_build_cmd.py +++ b/tests/integration/buildcmd/test_build_cmd.py @@ -1,10 +1,11 @@ import sys import os -import subprocess import logging from unittest import skipIf from pathlib import Path from parameterized import parameterized +from subprocess import Popen, PIPE, TimeoutExpired + import pytest from .build_integ_base import BuildIntegBase @@ -13,6 +14,8 @@ LOG = logging.getLogger(__name__) +TIMEOUT = 300 + @skipIf( ((IS_WINDOWS and RUNNING_ON_CI) and not CI_OVERRIDE), @@ -31,6 +34,7 @@ class TestBuildCommand_PythonFunctions(BuildIntegBase): FUNCTION_LOGICAL_ID = "Function" + @pytest.mark.flaky(reruns=3) @parameterized.expand( [ ("python2.7", False), @@ -48,8 +52,12 @@ def test_with_default_requirements(self, runtime, use_container): cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) LOG.info("Running Command: {}", cmdlist) - process = subprocess.Popen(cmdlist, cwd=self.working_dir) - process.wait() + process = Popen(cmdlist, cwd=self.working_dir) + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self._verify_built_artifact( self.default_build_dir, self.FUNCTION_LOGICAL_ID, self.EXPECTED_FILES_PROJECT_MANIFEST @@ -98,15 +106,20 @@ def _get_python_version(self): "Skip build tests on windows when running in CI unless overridden", ) class TestBuildCommand_ErrorCases(BuildIntegBase): + @pytest.mark.flaky(reruns=3) def test_unsupported_runtime(self): overrides = {"Runtime": "unsupportedpython", "CodeUri": "Python"} cmdlist = self.get_command_list(parameter_overrides=overrides) LOG.info("Running Command: {}", cmdlist) - process = subprocess.Popen(cmdlist, cwd=self.working_dir, stdout=subprocess.PIPE) - process.wait() - - process_stdout = b"".join(process.stdout.readlines()).strip().decode("utf-8") + process = Popen(cmdlist, cwd=self.working_dir, stdout=PIPE) + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip().decode("utf-8") self.assertEqual(1, process.returncode) self.assertIn("Build Failed", process_stdout) @@ -124,6 +137,7 @@ class TestBuildCommand_NodeFunctions(BuildIntegBase): FUNCTION_LOGICAL_ID = "Function" + @pytest.mark.flaky(reruns=3) @parameterized.expand( [ ("nodejs6.10", False), @@ -141,8 +155,12 @@ def test_with_default_package_json(self, runtime, use_container): cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) LOG.info("Running Command: {}", cmdlist) - process = subprocess.Popen(cmdlist, cwd=self.working_dir) - process.wait() + process = Popen(cmdlist, cwd=self.working_dir) + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self._verify_built_artifact( self.default_build_dir, @@ -198,11 +216,11 @@ class TestBuildCommand_RubyFunctions(BuildIntegBase): FUNCTION_LOGICAL_ID = "Function" @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") @parameterized.expand([("ruby2.5")]) def test_building_ruby_in_container(self, runtime): self._test_with_default_gemfile(runtime, "use_container") + @pytest.mark.flaky(reruns=3) @parameterized.expand([("ruby2.5")]) def test_building_ruby_in_process(self, runtime): self._test_with_default_gemfile(runtime, False) @@ -212,8 +230,12 @@ def _test_with_default_gemfile(self, runtime, use_container): cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) LOG.info("Running Command: {}".format(cmdlist)) - process = subprocess.Popen(cmdlist, cwd=self.working_dir) - process.wait() + process = Popen(cmdlist, cwd=self.working_dir) + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self._verify_built_artifact( self.default_build_dir, @@ -283,7 +305,6 @@ class TestBuildCommand_Java(BuildIntegBase): UNIX_LINE_ENDING = b"\n" @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") @parameterized.expand( [ ("java8", USING_GRADLE_PATH, EXPECTED_FILES_PROJECT_MANIFEST_GRADLE), @@ -301,6 +322,7 @@ class TestBuildCommand_Java(BuildIntegBase): def test_building_java_in_container(self, runtime, code_path, expected_files): self._test_with_building_java(runtime, code_path, expected_files, "use_container") + @pytest.mark.flaky(reruns=3) @parameterized.expand( [ ("java8", USING_GRADLE_PATH, EXPECTED_FILES_PROJECT_MANIFEST_GRADLE), @@ -313,6 +335,7 @@ def test_building_java_in_container(self, runtime, code_path, expected_files): def test_building_java8_in_process(self, runtime, code_path, expected_files): self._test_with_building_java(runtime, code_path, expected_files, False) + @pytest.mark.flaky(reruns=3) @parameterized.expand( [ ("java11", USING_GRADLE_PATH, EXPECTED_FILES_PROJECT_MANIFEST_GRADLE), @@ -333,8 +356,12 @@ def _test_with_building_java(self, runtime, code_path, expected_files, use_conta self._change_to_unix_line_ending(os.path.join(self.test_data_path, self.USING_GRADLEW_PATH, "gradlew")) LOG.info("Running Command: {}".format(cmdlist)) - process = subprocess.Popen(cmdlist, cwd=self.working_dir) - process.wait() + process = Popen(cmdlist, cwd=self.working_dir) + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self._verify_built_artifact( self.default_build_dir, self.FUNCTION_LOGICAL_ID, expected_files, self.EXPECTED_DEPENDENCIES @@ -408,6 +435,7 @@ class TestBuildCommand_Dotnet_cli_package(BuildIntegBase): "HelloWorld.dll", } + @pytest.mark.flaky(reruns=3) @parameterized.expand( [ ("dotnetcore2.0", "Dotnetcore2.0", None), @@ -431,8 +459,12 @@ def test_with_dotnetcore(self, runtime, code_uri, mode): if mode: newenv["SAM_BUILD_MODE"] = mode - process = subprocess.Popen(cmdlist, cwd=self.working_dir, env=newenv) - process.wait() + process = Popen(cmdlist, cwd=self.working_dir, env=newenv) + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self._verify_built_artifact( self.default_build_dir, self.FUNCTION_LOGICAL_ID, self.EXPECTED_FILES_PROJECT_MANIFEST @@ -455,6 +487,7 @@ def test_with_dotnetcore(self, runtime, code_uri, mode): self.verify_docker_container_cleanedup(runtime) + @pytest.mark.flaky(reruns=3) @parameterized.expand([("dotnetcore2.0", "Dotnetcore2.0"), ("dotnetcore2.1", "Dotnetcore2.1")]) def test_must_fail_with_container(self, runtime, code_uri): use_container = True @@ -466,8 +499,12 @@ def test_must_fail_with_container(self, runtime, code_uri): cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) LOG.info("Running Command: {}".format(cmdlist)) - process = subprocess.Popen(cmdlist, cwd=self.working_dir) - process.wait() + process = Popen(cmdlist, cwd=self.working_dir) + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise # Must error out, because container builds are not supported self.assertEqual(process.returncode, 1) @@ -507,16 +544,18 @@ class TestBuildCommand_SingleFunctionBuilds(BuildIntegBase): "requirements.txt", } + @pytest.mark.flaky(reruns=3) def test_function_not_found(self): overrides = {"Runtime": "python3.7", "CodeUri": "Python", "Handler": "main.handler"} cmdlist = self.get_command_list(parameter_overrides=overrides, function_identifier="FunctionNotInTemplate") - process = subprocess.Popen(cmdlist, cwd=self.working_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - stdout, stderr = process.communicate() + process = Popen(cmdlist, cwd=self.working_dir, stderr=PIPE) + _, stderr = process.communicate(timeout=TIMEOUT) self.assertEqual(process.returncode, 1) self.assertIn("FunctionNotInTemplate not found", str(stderr.decode("utf8"))) + @pytest.mark.flaky(reruns=3) @parameterized.expand( [ ("python3.7", False, "FunctionOne"), @@ -532,8 +571,12 @@ def test_build_single_function(self, runtime, use_container, function_identifier ) LOG.info("Running Command: {}", cmdlist) - process = subprocess.Popen(cmdlist, cwd=self.working_dir) - process.wait() + process = Popen(cmdlist, cwd=self.working_dir) + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self._verify_built_artifact(self.default_build_dir, function_identifier, self.EXPECTED_FILES_PROJECT_MANIFEST) diff --git a/tests/integration/local/invoke/test_integrations_cli.py b/tests/integration/local/invoke/test_integrations_cli.py index f9c6d66587..d0ebb4a9b2 100644 --- a/tests/integration/local/invoke/test_integrations_cli.py +++ b/tests/integration/local/invoke/test_integrations_cli.py @@ -6,7 +6,7 @@ from unittest import skipIf from nose_parameterized import parameterized -from subprocess import Popen, PIPE +from subprocess import Popen, PIPE, TimeoutExpired from timeit import default_timer as timer import pytest import docker @@ -21,59 +21,75 @@ from pathlib import Path +TIMEOUT = 300 + class TestSamPython36HelloWorldIntegration(InvokeIntegBase): template = Path("template.yml") @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_returncode_is_zero(self): command_list = self.get_command_list( "HelloWorldServerlessFunction", template_path=self.template_path, event_path=self.event_path ) process = Popen(command_list, stdout=PIPE) - return_code = process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - self.assertEqual(return_code, 0) + self.assertEqual(process.returncode, 0) @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_function_with_metadata(self): command_list = self.get_command_list("FunctionWithMetadata", template_path=self.template_path, no_event=True) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() self.assertEqual(process_stdout.decode("utf-8"), '"Hello World in a different dir"') @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_returns_execpted_results(self): command_list = self.get_command_list( "HelloWorldServerlessFunction", template_path=self.template_path, event_path=self.event_path ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() self.assertEqual(process_stdout.decode("utf-8"), '"Hello world"') @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_of_lambda_function(self): command_list = self.get_command_list( "HelloWorldLambdaFunction", template_path=self.template_path, event_path=self.event_path ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() self.assertEqual(process_stdout.decode("utf-8"), '"Hello world"') @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") @parameterized.expand( [("TimeoutFunction"), ("TimeoutFunctionWithParameter"), ("TimeoutFunctionWithStringParameter")] ) @@ -84,18 +100,23 @@ def test_invoke_with_timeout_set(self, function_name): start = timer() process = Popen(command_list, stdout=PIPE) - return_code = process.wait() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + end = timer() wall_clock_cli_duration = end - start - process_stdout = b"".join(process.stdout.readlines()).strip() + process_stdout = stdout.strip() # validate the time of the cli (timeout is set to 5s) self.assertGreater(wall_clock_cli_duration, 5) self.assertLess(wall_clock_cli_duration, 20) - self.assertEqual(return_code, 0) + self.assertEqual(process.returncode, 0) self.assertEqual( process_stdout.decode("utf-8"), "", @@ -103,7 +124,6 @@ def test_invoke_with_timeout_set(self, function_name): ) @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_with_env_vars(self): command_list = self.get_command_list( "EchoCustomEnvVarFunction", @@ -113,53 +133,66 @@ def test_invoke_with_env_vars(self): ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() self.assertEqual(process_stdout.decode("utf-8"), '"MyVar"') @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_when_function_writes_stdout(self): command_list = self.get_command_list( "WriteToStdoutFunction", template_path=self.template_path, event_path=self.event_path ) process = Popen(command_list, stdout=PIPE, stderr=PIPE) - process.wait() + try: + stdout, stderr = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - process_stdout = b"".join(process.stdout.readlines()).strip() - process_stderr = b"".join(process.stderr.readlines()).strip() + process_stdout = stdout.strip() + process_stderr = stderr.strip() self.assertIn("Docker Lambda is writing to stdout", process_stderr.decode("utf-8")) self.assertIn("wrote to stdout", process_stdout.decode("utf-8")) @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_when_function_writes_stderr(self): command_list = self.get_command_list( "WriteToStderrFunction", template_path=self.template_path, event_path=self.event_path ) process = Popen(command_list, stderr=PIPE) - process.wait() + try: + _, stderr = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - process_stderr = b"".join(process.stderr.readlines()).strip() + process_stderr = stderr.strip() self.assertIn("Docker Lambda is writing to stderr", process_stderr.decode("utf-8")) @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_returns_expected_result_when_no_event_given(self): command_list = self.get_command_list("EchoEventFunction", template_path=self.template_path) process = Popen(command_list, stdout=PIPE) - return_code = process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() - self.assertEqual(return_code, 0) + self.assertEqual(process.returncode, 0) self.assertEqual("{}", process_stdout.decode("utf-8")) @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_with_env_using_parameters(self): command_list = self.get_command_list( "EchoEnvWithParameters", @@ -169,8 +202,13 @@ def test_invoke_with_env_using_parameters(self): ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() environ = json.loads(process_stdout.decode("utf-8")) self.assertEqual(environ["Region"], "us-east-1") @@ -187,7 +225,6 @@ def test_invoke_with_env_using_parameters(self): self.assertEqual(environ["MyRuntimeVersion"], "v0") @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_with_env_using_parameters_with_custom_region(self): custom_region = "my-custom-region" @@ -196,14 +233,18 @@ def test_invoke_with_env_using_parameters_with_custom_region(self): ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() environ = json.loads(process_stdout.decode("utf-8")) self.assertEqual(environ["Region"], custom_region) @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_with_env_with_aws_creds(self): custom_region = "my-custom-region" key = "key" @@ -222,8 +263,13 @@ def test_invoke_with_env_with_aws_creds(self): env["AWS_SESSION_TOKEN"] = session process = Popen(command_list, stdout=PIPE, env=env) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() environ = json.loads(process_stdout.decode("utf-8")) self.assertEqual(environ["AWS_DEFAULT_REGION"], custom_region) @@ -233,7 +279,6 @@ def test_invoke_with_env_with_aws_creds(self): self.assertEqual(environ["AWS_SESSION_TOKEN"], session) @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_with_docker_network_of_host(self): command_list = self.get_command_list( "HelloWorldServerlessFunction", @@ -243,12 +288,15 @@ def test_invoke_with_docker_network_of_host(self): ) process = Popen(command_list, stdout=PIPE) - return_code = process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - self.assertEqual(return_code, 0) + self.assertEqual(process.returncode, 0) @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") @skipIf(IS_WINDOWS, "The test hangs on Windows due to trying to attach to a non-existing network") def test_invoke_with_docker_network_of_host_in_env_var(self): command_list = self.get_command_list( @@ -259,13 +307,17 @@ def test_invoke_with_docker_network_of_host_in_env_var(self): env["SAM_DOCKER_NETWORK"] = "non-existing-network" process = Popen(command_list, stderr=PIPE, env=env) - process.wait() - process_stderr = b"".join(process.stderr.readlines()).strip() + try: + _, stderr = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stderr = stderr.strip() self.assertIn('Not Found ("network non-existing-network not found")', process_stderr.decode("utf-8")) @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_sam_template_file_env_var_set(self): command_list = self.get_command_list("HelloWorldFunctionInNonDefaultTemplate", event_path=self.event_path) @@ -274,13 +326,18 @@ def test_sam_template_file_env_var_set(self): env["SAM_TEMPLATE_FILE"] = str(self.test_data_path.joinpath("invoke", "sam-template.yaml")) process = Popen(command_list, stdout=PIPE, env=env) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() self.assertEqual(process_stdout.decode("utf-8"), '"Hello world"') @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") + @pytest.mark.timeout(timeout=TIMEOUT, method="thread") def test_skip_pull_image_in_env_var(self): docker.from_env().api.pull("lambci/lambda:python3.6") @@ -292,8 +349,13 @@ def test_skip_pull_image_in_env_var(self): env["SAM_SKIP_PULL_IMAGE"] = "True" process = Popen(command_list, stderr=PIPE, env=env) - process.wait() - process_stderr = b"".join(process.stderr.readlines()).strip() + try: + _, stderr = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stderr = stderr.strip() self.assertIn("Requested to skip pulling images", process_stderr.decode("utf-8")) @@ -307,7 +369,6 @@ def tearDown(self): shutil.rmtree(self.config_dir, ignore_errors=True) @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_existing_env_variables_precedence_over_profiles(self): profile = "default" custom_config = self._create_config_file(profile) @@ -331,8 +392,13 @@ def test_existing_env_variables_precedence_over_profiles(self): env["AWS_SHARED_CREDENTIALS_FILE"] = custom_cred process = Popen(command_list, stdout=PIPE, env=env) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() environ = json.loads(process_stdout.decode("utf-8")) # Environment variables we explicitly set take priority over profiles. @@ -343,7 +409,6 @@ def test_existing_env_variables_precedence_over_profiles(self): self.assertEqual(environ["AWS_SESSION_TOKEN"], "priority_secret_token") @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_default_profile_with_custom_configs(self): profile = "default" custom_config = self._create_config_file(profile) @@ -365,8 +430,13 @@ def test_default_profile_with_custom_configs(self): env["AWS_SHARED_CREDENTIALS_FILE"] = custom_cred process = Popen(command_list, stdout=PIPE, env=env) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() environ = json.loads(process_stdout.decode("utf-8")) self.assertEqual(environ["AWS_DEFAULT_REGION"], "us-west-1") @@ -376,7 +446,6 @@ def test_default_profile_with_custom_configs(self): self.assertEqual(environ["AWS_SESSION_TOKEN"], "sessiontoken") @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_custom_profile_with_custom_configs(self): custom_config = self._create_config_file("custom") custom_cred = self._create_cred_file("custom") @@ -397,8 +466,13 @@ def test_custom_profile_with_custom_configs(self): env["AWS_SHARED_CREDENTIALS_FILE"] = custom_cred process = Popen(command_list, stdout=PIPE, env=env) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() environ = json.loads(process_stdout.decode("utf-8")) self.assertEqual(environ["AWS_DEFAULT_REGION"], "us-west-1") @@ -408,7 +482,6 @@ def test_custom_profile_with_custom_configs(self): self.assertEqual(environ["AWS_SESSION_TOKEN"], "sessiontoken") @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_custom_profile_through_envrionment_variables(self): # When using a custom profile in a custom location, you need both the config # and credential file otherwise we fail to find a region or the profile (depending @@ -434,8 +507,12 @@ def test_custom_profile_through_envrionment_variables(self): env["AWS_PROFILE"] = "custom" process = Popen(command_list, stdout=PIPE, env=env) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() environ = json.loads(process_stdout.decode("utf-8")) self.assertEqual(environ["AWS_DEFAULT_REGION"], "us-west-1") @@ -523,9 +600,13 @@ def test_reference_of_layer_version(self, function_logical_id): ) process = Popen(command_list, stdout=PIPE) - process.wait() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - process_stdout = b"".join(process.stdout.readlines()).strip() + process_stdout = stdout.strip() expected_output = '"This is a Layer Ping from simple_python"' @@ -543,9 +624,13 @@ def test_download_one_layer(self, function_logical_id): ) process = Popen(command_list, stdout=PIPE) - process.wait() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - process_stdout = b"".join(process.stdout.readlines()[-1:]).strip() + process_stdout = stdout.split(os.linesep)[-1:].strip() expected_output = '"Layer1"' self.assertEqual(process_stdout.decode("utf-8"), expected_output) @@ -565,9 +650,13 @@ def test_publish_changed_download_layer(self, function_logical_id): ) process = Popen(command_list, stdout=PIPE) - process.wait() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - process_stdout = b"".join(process.stdout.readlines()[-1:]).strip() + process_stdout = stdout.split(os.linesep).strip() expected_output = '"Layer1"' self.assertEqual(process_stdout.decode("utf-8"), expected_output) @@ -586,9 +675,13 @@ def test_publish_changed_download_layer(self, function_logical_id): ) process = Popen(command_list, stdout=PIPE) - process.wait() + try: + stdout, _ = process.communicate() + except TimeoutExpired: + process.kill() + raise - process_stdout = b"".join(process.stdout.readlines()[-1:]).strip() + process_stdout = stdout.split(os.linesep).strip() expected_output = '"Changed_Layer_1"' self.assertEqual(process_stdout.decode("utf-8"), expected_output) @@ -606,11 +699,15 @@ def test_download_two_layers(self, function_logical_id): ) process = Popen(command_list, stdout=PIPE) - process.wait() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - stdout = process.stdout.readlines() + stdout = stdout - process_stdout = b"".join(stdout[-1:]).strip() + process_stdout = stdout.split(os.linesep).strip() expected_output = '"Layer2"' self.assertEqual(process_stdout.decode("utf-8"), expected_output) @@ -627,7 +724,11 @@ def test_caching_two_layers(self): ) process = Popen(command_list, stdout=PIPE) - process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self.assertEqual(2, len(os.listdir(str(self.layer_cache)))) @@ -645,7 +746,11 @@ def test_caching_two_layers_with_layer_cache_env_set(self): env["SAM_LAYER_CACHE_BASEDIR"] = str(self.layer_cache) process = Popen(command_list, stdout=PIPE, env=env) - process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self.assertEqual(2, len(os.listdir(str(self.layer_cache)))) @@ -680,9 +785,13 @@ def test_layer_does_not_exist(self): ) process = Popen(command_list, stderr=PIPE) - process.wait() + try: + _, stderr = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - process_stderr = b"".join(process.stderr.readlines()).strip() + process_stderr = stderr.strip() error_output = process_stderr.decode("utf-8") expected_error_output = "{} was not found.".format(non_existent_layer_arn) @@ -699,9 +808,13 @@ def test_account_does_not_exist_for_layer(self): ) process = Popen(command_list, stderr=PIPE) - process.wait() + try: + _, stderr = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - process_stderr = b"".join(process.stderr.readlines()).strip() + process_stderr = stderr.strip() error_output = process_stderr.decode("utf-8") expected_error_output = ( From f2e66d84872d0f42ca3f49db8997f471af2075f3 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Sat, 23 Nov 2019 13:14:37 -0800 Subject: [PATCH 44/45] fix: move tests to a `static` bucket solution (#1589) * fix: move tests to a `static` bucket solution - s3 bucket is created is appropriate permissions and exposed via environment variable. - serverlessrepo.amazonaws.com has access to the static s3 bucket. * integ-tests: create s3 bucket if BYOB bucket doesnt exist --- .../integration/package/package_integ_base.py | 18 +++++++---- .../publish/publish_app_integ_base.py | 32 +++++++++++-------- .../integration/publish/test_command_integ.py | 2 ++ .../testdata/publish/s3_bucket_policy.json | 2 +- .../deploy/regression_deploy_base.py | 7 ---- .../package/regression_package_base.py | 22 +++++++------ 6 files changed, 46 insertions(+), 37 deletions(-) diff --git a/tests/integration/package/package_integ_base.py b/tests/integration/package/package_integ_base.py index b39115eb30..88e966419a 100644 --- a/tests/integration/package/package_integ_base.py +++ b/tests/integration/package/package_integ_base.py @@ -7,23 +7,26 @@ import boto3 +S3_SLEEP = 3 + class PackageIntegBase(TestCase): @classmethod def setUpClass(cls): cls.region_name = os.environ.get("AWS_DEFAULT_REGION") - cls.bucket_name = str(uuid.uuid4()) + cls.pre_created_bucket = os.environ.get("AWS_S3", False) + cls.bucket_name = cls.pre_created_bucket if cls.pre_created_bucket else str(uuid.uuid4()) cls.test_data_path = Path(__file__).resolve().parents[1].joinpath("testdata", "package") - # Create S3 bucket + # Intialize S3 client s3 = boto3.resource("s3") # Use a pre-created KMS Key cls.kms_key = os.environ.get("AWS_KMS_KEY") + # Use a pre-created S3 Bucket if present else create a new one cls.s3_bucket = s3.Bucket(cls.bucket_name) - cls.s3_bucket.create() - - # Given 3 seconds for all the bucket creation to complete - time.sleep(3) + if not cls.pre_created_bucket: + cls.s3_bucket.create() + time.sleep(S3_SLEEP) def setUp(self): super(PackageIntegBase, self).setUp() @@ -34,7 +37,8 @@ def tearDown(self): @classmethod def tearDownClass(cls): cls.s3_bucket.objects.all().delete() - cls.s3_bucket.delete() + if not cls.pre_created_bucket: + cls.s3_bucket.delete() def base_command(self): command = "sam" diff --git a/tests/integration/publish/publish_app_integ_base.py b/tests/integration/publish/publish_app_integ_base.py index 7d975aa8c5..cf0a1e2d12 100644 --- a/tests/integration/publish/publish_app_integ_base.py +++ b/tests/integration/publish/publish_app_integ_base.py @@ -2,37 +2,42 @@ import json import uuid import shutil -import tempfile import time +import tempfile from unittest import TestCase import boto3 from pathlib import Path +S3_SLEEP = 3 + class PublishAppIntegBase(TestCase): @classmethod def setUpClass(cls): cls.region_name = os.environ.get("AWS_DEFAULT_REGION") - cls.bucket_name = str(uuid.uuid4()) + cls.pre_created_bucket = os.environ.get("AWS_S3", False) + cls.bucket_name = cls.pre_created_bucket if cls.pre_created_bucket else str(uuid.uuid4()) cls.bucket_name_placeholder = "" cls.application_name_placeholder = "" cls.temp_dir = Path(tempfile.mkdtemp()) cls.test_data_path = Path(__file__).resolve().parents[1].joinpath("testdata", "publish") cls.sar_client = boto3.client("serverlessrepo", region_name=cls.region_name) - # Create S3 bucket + # Intialize S3 client s3 = boto3.resource("s3") + # Use a pre-created S3 Bucket if present else create a new one cls.s3_bucket = s3.Bucket(cls.bucket_name) - cls.s3_bucket.create() - - # Given 3 seconds for all the bucket creation to complete - time.sleep(3) - - # Grant serverlessrepo read access to the bucket - bucket_policy_template = cls.test_data_path.joinpath("s3_bucket_policy.json").read_text(encoding="utf-8") - bucket_policy = bucket_policy_template.replace(cls.bucket_name_placeholder, cls.bucket_name) - cls.s3_bucket.Policy().put(Policy=bucket_policy) + if not cls.pre_created_bucket: + cls.s3_bucket.create() + # Wait for bucket to be created. + time.sleep(S3_SLEEP) + # Grant serverlessrepo read access to the bucket + bucket_policy_template = cls.test_data_path.joinpath("s3_bucket_policy.json").read_text(encoding="utf-8") + bucket_policy = bucket_policy_template.replace(cls.bucket_name_placeholder, cls.bucket_name) + cls.s3_bucket.Policy().put(Policy=bucket_policy) + # Wait for bucket policy to be applied. + time.sleep(S3_SLEEP) # Upload test files to S3 root_path = Path(__file__).resolve().parents[3] @@ -53,7 +58,8 @@ def tearDownClass(cls): "Objects": [{"Key": "LICENSE"}, {"Key": "README.md"}, {"Key": "README_UPDATE.md"}, {"Key": "main.py"}] } ) - cls.s3_bucket.delete() + if not cls.pre_created_bucket: + cls.s3_bucket.delete() @classmethod def replace_template_placeholder(cls, placeholder, replace_text): diff --git a/tests/integration/publish/test_command_integ.py b/tests/integration/publish/test_command_integ.py index 0b509b9d73..deb49407c5 100644 --- a/tests/integration/publish/test_command_integ.py +++ b/tests/integration/publish/test_command_integ.py @@ -87,6 +87,8 @@ class TestPublishNewApp(PublishAppIntegBase): def setUp(self): super(TestPublishNewApp, self).setUp() self.application_id = None + # Sleep for a little bit to make server happy + time.sleep(2) def tearDown(self): super(TestPublishNewApp, self).tearDown() diff --git a/tests/integration/testdata/publish/s3_bucket_policy.json b/tests/integration/testdata/publish/s3_bucket_policy.json index 1eb9115c3c..abf3a0cc64 100644 --- a/tests/integration/testdata/publish/s3_bucket_policy.json +++ b/tests/integration/testdata/publish/s3_bucket_policy.json @@ -10,4 +10,4 @@ "Resource": "arn:aws:s3:::/*" } ] -} +} \ No newline at end of file diff --git a/tests/regression/deploy/regression_deploy_base.py b/tests/regression/deploy/regression_deploy_base.py index c564128243..dd8acbc2da 100644 --- a/tests/regression/deploy/regression_deploy_base.py +++ b/tests/regression/deploy/regression_deploy_base.py @@ -1,14 +1,7 @@ import os -import uuid -import json -import tempfile -import time -from pathlib import Path from subprocess import Popen, PIPE from unittest import TestCase -import boto3 - class DeployRegressionBase(TestCase): @classmethod diff --git a/tests/regression/package/regression_package_base.py b/tests/regression/package/regression_package_base.py index 7f80dbb1ba..6827c190a9 100644 --- a/tests/regression/package/regression_package_base.py +++ b/tests/regression/package/regression_package_base.py @@ -1,34 +1,38 @@ import os -import uuid import json -import tempfile import time +import tempfile +import uuid from pathlib import Path from subprocess import Popen, PIPE from unittest import TestCase import boto3 +S3_SLEEP = 3 + class PackageRegressionBase(TestCase): @classmethod def setUpClass(cls): cls.region_name = os.environ.get("AWS_DEFAULT_REGION") - cls.bucket_name = str(uuid.uuid4()) + cls.pre_created_bucket = os.environ.get("AWS_S3", False) + cls.bucket_name = cls.pre_created_bucket if cls.pre_created_bucket else str(uuid.uuid4()) cls.test_data_path = Path(__file__).resolve().parents[2].joinpath("integration", "testdata", "package") - # Create S3 bucket + # Intialize S3 client s3 = boto3.resource("s3") + # Use a pre-created S3 Bucket if present else create a new one cls.s3_bucket = s3.Bucket(cls.bucket_name) - cls.s3_bucket.create() - - # Given 3 seconds for all the bucket creation to complete - time.sleep(3) + if not cls.pre_created_bucket: + cls.s3_bucket.create() + time.sleep(S3_SLEEP) @classmethod def tearDownClass(cls): cls.s3_bucket.objects.all().delete() - cls.s3_bucket.delete() + if not cls.pre_created_bucket: + cls.s3_bucket.delete() def base_command(self, base): command = [base] From aec0809040af5884fe389df30452ea72c9d28e74 Mon Sep 17 00:00:00 2001 From: Jacob Fuss <32497805+jfuss@users.noreply.github.com> Date: Sat, 23 Nov 2019 18:15:08 -0600 Subject: [PATCH 45/45] refactor: move all process.wait to process.communicate (#1590) * refactor: move all process.wait to process.communicate * Fix layer integ tests * Strip before spliting string --- .../integration/deploy/test_deploy_command.py | 69 ++++++++++++++---- tests/integration/init/test_init_command.py | 44 ++++++++--- .../local/generate_event/test_cli_integ.py | 4 +- .../invoke/runtimes/test_with_runtime_zips.py | 28 ++++--- .../local/invoke/test_integrations_cli.py | 16 ++-- .../package/test_package_command.py | 73 ++++++++++++++----- .../integration/publish/test_command_integ.py | 51 +++++++++---- .../telemetry/test_installed_metric.py | 20 ++--- tests/integration/telemetry/test_prompt.py | 8 +- .../telemetry/test_telemetry_contract.py | 29 ++++---- .../deploy/regression_deploy_base.py | 16 +++- .../deploy/test_deploy_regression.py | 9 ++- .../package/regression_package_base.py | 15 +++- .../package/test_package_regression.py | 3 - 14 files changed, 268 insertions(+), 117 deletions(-) diff --git a/tests/integration/deploy/test_deploy_command.py b/tests/integration/deploy/test_deploy_command.py index 64a27a17a7..bd2b92970e 100644 --- a/tests/integration/deploy/test_deploy_command.py +++ b/tests/integration/deploy/test_deploy_command.py @@ -2,7 +2,7 @@ import tempfile import uuid import time -from subprocess import Popen, PIPE +from subprocess import Popen, PIPE, TimeoutExpired from unittest import skipIf import boto3 @@ -18,6 +18,7 @@ # This is to restrict package tests to run outside of CI/CD and when the branch is not master. SKIP_DEPLOY_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI CFN_SLEEP = 3 +TIMEOUT = 300 @skipIf(SKIP_DEPLOY_TESTS, "Skip deploy tests in CI/CD only") @@ -44,7 +45,11 @@ def test_package_and_deploy_no_s3_bucket_all_args(self, template_file): ) package_process = Popen(package_command_list, stdout=PIPE) - package_process.wait() + try: + package_process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + package_process.kill() + raise self.assertEqual(package_process.returncode, 0) @@ -67,7 +72,11 @@ def test_package_and_deploy_no_s3_bucket_all_args(self, template_file): ) deploy_process_no_execute = Popen(deploy_command_list_no_execute, stdout=PIPE) - deploy_process_no_execute.wait() + try: + deploy_process_no_execute.communicate(timeout=TIMEOUT) + except TimeoutExpired: + deploy_process_no_execute.kill() + raise self.assertEqual(deploy_process_no_execute.returncode, 0) # Deploy the given stack with the changeset. @@ -84,7 +93,11 @@ def test_package_and_deploy_no_s3_bucket_all_args(self, template_file): ) deploy_process = Popen(deploy_command_list_execute, stdout=PIPE) - deploy_process.wait() + try: + deploy_process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + deploy_process.kill() + raise self.assertEqual(deploy_process.returncode, 0) @parameterized.expand(["aws-serverless-function.yaml"]) @@ -111,7 +124,11 @@ def test_no_package_and_deploy_with_s3_bucket_all_args(self, template_file): ) deploy_process_execute = Popen(deploy_command_list, stdout=PIPE) - deploy_process_execute.wait() + try: + deploy_process_execute.communicate(timeout=TIMEOUT) + except TimeoutExpired: + deploy_process_execute.kill() + raise self.assertEqual(deploy_process_execute.returncode, 0) @parameterized.expand(["aws-serverless-function.yaml"]) @@ -138,7 +155,7 @@ def test_no_package_and_deploy_with_s3_bucket_all_args_confirm_changeset(self, t ) deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE, stdin=PIPE) - deploy_process_execute.communicate("Y".encode()) + deploy_process_execute.communicate("Y".encode(), timeout=TIMEOUT) self.assertEqual(deploy_process_execute.returncode, 0) @parameterized.expand(["aws-serverless-function.yaml"]) @@ -163,10 +180,14 @@ def test_deploy_without_s3_bucket(self, template_file): ) deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE) - deploy_process_execute.wait() + try: + _, stderr = deploy_process_execute.communicate(timeout=TIMEOUT) + except TimeoutExpired: + deploy_process_execute.kill() + raise # Error asking for s3 bucket self.assertEqual(deploy_process_execute.returncode, 1) - stderr = b"".join(deploy_process_execute.stderr.readlines()).strip() + stderr = stderr.strip() self.assertIn( bytes( f"S3 Bucket not specified, use --s3-bucket to specify a bucket name or run sam deploy --guided", @@ -194,7 +215,11 @@ def test_deploy_without_stack_name(self, template_file): ) deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE) - deploy_process_execute.wait() + try: + deploy_process_execute.communicate(timeout=TIMEOUT) + except TimeoutExpired: + deploy_process_execute.kill() + raise # Error no stack name present self.assertEqual(deploy_process_execute.returncode, 2) @@ -219,7 +244,11 @@ def test_deploy_without_capabilities(self, template_file): ) deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE) - deploy_process_execute.wait() + try: + deploy_process_execute.communicate(timeout=TIMEOUT) + except TimeoutExpired: + deploy_process_execute.kill() + raise # Error capabilities not specified self.assertEqual(deploy_process_execute.returncode, 1) @@ -241,7 +270,11 @@ def test_deploy_without_template_file(self, template_file): ) deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE) - deploy_process_execute.wait() + try: + deploy_process_execute.communicate(timeout=TIMEOUT) + except TimeoutExpired: + deploy_process_execute.kill() + raise # Error template file not specified self.assertEqual(deploy_process_execute.returncode, 1) @@ -268,7 +301,11 @@ def test_deploy_with_s3_bucket_switch_region(self, template_file): ) deploy_process_execute = Popen(deploy_command_list, stdout=PIPE) - deploy_process_execute.wait() + try: + deploy_process_execute.communicate(timeout=TIMEOUT) + except TimeoutExpired: + deploy_process_execute.kill() + raise # Deploy should succeed self.assertEqual(deploy_process_execute.returncode, 0) @@ -290,10 +327,14 @@ def test_deploy_with_s3_bucket_switch_region(self, template_file): ) deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE) - deploy_process_execute.wait() + try: + _, stderr = deploy_process_execute.communicate(timeout=TIMEOUT) + except TimeoutExpired: + deploy_process_execute.kill() + raise # Deploy should fail, asking for s3 bucket self.assertEqual(deploy_process_execute.returncode, 1) - stderr = b"".join(deploy_process_execute.stderr.readlines()).strip() + stderr = stderr.strip() self.assertIn( bytes( f"Error: Failed to create/update stack {stack_name} : " diff --git a/tests/integration/init/test_init_command.py b/tests/integration/init/test_init_command.py index bad04e227f..cb1499ab83 100644 --- a/tests/integration/init/test_init_command.py +++ b/tests/integration/init/test_init_command.py @@ -1,8 +1,10 @@ from unittest import TestCase -from subprocess import Popen +from subprocess import Popen, TimeoutExpired import os import tempfile +TIMEOUT = 300 + class TestBasicInitCommand(TestCase): def test_init_command_passes_and_dir_created(self): @@ -24,9 +26,13 @@ def test_init_command_passes_and_dir_created(self): temp, ] ) - return_code = process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - self.assertEqual(return_code, 0) + self.assertEqual(process.returncode, 0) self.assertTrue(os.path.isdir(temp + "/sam-app")) def test_init_new_app_template(self): @@ -48,9 +54,13 @@ def test_init_new_app_template(self): temp, ] ) - return_code = process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - self.assertEqual(return_code, 0) + self.assertEqual(process.returncode, 0) self.assertTrue(os.path.isdir(temp + "/qs-scratch")) def test_init_command_java_maven(self): @@ -72,9 +82,13 @@ def test_init_command_java_maven(self): temp, ] ) - return_code = process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - self.assertEqual(return_code, 0) + self.assertEqual(process.returncode, 0) self.assertTrue(os.path.isdir(temp + "/sam-app-maven")) def test_init_command_java_gradle(self): @@ -96,9 +110,13 @@ def test_init_command_java_gradle(self): temp, ] ) - return_code = process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - self.assertEqual(return_code, 0) + self.assertEqual(process.returncode, 0) self.assertTrue(os.path.isdir(temp + "/sam-app-gradle")) def test_init_command_with_extra_context_parameter(self): @@ -122,9 +140,13 @@ def test_init_command_with_extra_context_parameter(self): temp, ] ) - return_code = process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - self.assertEqual(return_code, 0) + self.assertEqual(process.returncode, 0) self.assertTrue(os.path.isdir(temp + "/sam-app-maven")) @staticmethod diff --git a/tests/integration/local/generate_event/test_cli_integ.py b/tests/integration/local/generate_event/test_cli_integ.py index a5f33885b1..f5b463ad57 100644 --- a/tests/integration/local/generate_event/test_cli_integ.py +++ b/tests/integration/local/generate_event/test_cli_integ.py @@ -6,8 +6,8 @@ class Test_EventGeneration_Integ(TestCase): def test_generate_event_substitution(self): process = Popen([Test_EventGeneration_Integ._get_command(), "local", "generate-event", "s3", "put"]) - return_code = process.wait() - self.assertEqual(return_code, 0) + process.communicate() + self.assertEqual(process.returncode, 0) @staticmethod def _get_command(): diff --git a/tests/integration/local/invoke/runtimes/test_with_runtime_zips.py b/tests/integration/local/invoke/runtimes/test_with_runtime_zips.py index 3b6177e1ad..88581995fa 100644 --- a/tests/integration/local/invoke/runtimes/test_with_runtime_zips.py +++ b/tests/integration/local/invoke/runtimes/test_with_runtime_zips.py @@ -3,13 +3,15 @@ import os import tempfile -from subprocess import Popen, PIPE +from subprocess import Popen, PIPE, TimeoutExpired from nose_parameterized import parameterized, param import pytest from tests.integration.local.invoke.invoke_integ_base import InvokeIntegBase from pathlib import Path +TIMEOUT = 300 + class TestWithDifferentLambdaRuntimeZips(InvokeIntegBase): template = Path("runtimes", "template.yaml") @@ -35,10 +37,14 @@ def test_runtime_zip(self, function_name): ) process = Popen(command_list, stdout=PIPE) - return_code = process.wait() - - self.assertEqual(return_code, 0) - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + self.assertEqual(process.returncode, 0) + process_stdout = stdout.strip() self.assertEqual(process_stdout.decode("utf-8"), '"Hello World"') @pytest.mark.timeout(timeout=300, method="thread") @@ -50,8 +56,12 @@ def test_custom_provided_runtime(self): command_list = command_list + ["--skip-pull-image"] process = Popen(command_list, stdout=PIPE) - return_code = process.wait() - - self.assertEqual(return_code, 0) - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + self.assertEqual(process.returncode, 0) + process_stdout = stdout.strip() self.assertEqual(process_stdout.decode("utf-8"), '{"body":"hello 曰有冥 world 🐿","statusCode":200,"headers":{}}') diff --git a/tests/integration/local/invoke/test_integrations_cli.py b/tests/integration/local/invoke/test_integrations_cli.py index d0ebb4a9b2..9a5279a3e7 100644 --- a/tests/integration/local/invoke/test_integrations_cli.py +++ b/tests/integration/local/invoke/test_integrations_cli.py @@ -630,10 +630,10 @@ def test_download_one_layer(self, function_logical_id): process.kill() raise - process_stdout = stdout.split(os.linesep)[-1:].strip() + process_stdout = stdout.decode("utf-8").strip().split(os.linesep)[-1] expected_output = '"Layer1"' - self.assertEqual(process_stdout.decode("utf-8"), expected_output) + self.assertEqual(process_stdout, expected_output) @parameterized.expand([("ChangedLayerVersionServerlessFunction"), ("ChangedLayerVersionLambdaFunction")]) def test_publish_changed_download_layer(self, function_logical_id): @@ -656,10 +656,10 @@ def test_publish_changed_download_layer(self, function_logical_id): process.kill() raise - process_stdout = stdout.split(os.linesep).strip() + process_stdout = stdout.decode("utf-8").strip().split(os.linesep)[-1] expected_output = '"Layer1"' - self.assertEqual(process_stdout.decode("utf-8"), expected_output) + self.assertEqual(process_stdout, expected_output) self.layer_utils.upsert_layer( layer_name=layer_name, ref_layer_name="ChangedLayerArn", layer_zip="changedlayer1.zip" @@ -681,10 +681,10 @@ def test_publish_changed_download_layer(self, function_logical_id): process.kill() raise - process_stdout = stdout.split(os.linesep).strip() + process_stdout = stdout.decode("utf-8").strip().split(os.linesep)[-1] expected_output = '"Changed_Layer_1"' - self.assertEqual(process_stdout.decode("utf-8"), expected_output) + self.assertEqual(process_stdout, expected_output) @parameterized.expand([("TwoLayerVersionServerlessFunction"), ("TwoLayerVersionLambdaFunction")]) def test_download_two_layers(self, function_logical_id): @@ -707,10 +707,10 @@ def test_download_two_layers(self, function_logical_id): stdout = stdout - process_stdout = stdout.split(os.linesep).strip() + process_stdout = stdout.decode("utf-8").strip().split(os.linesep)[-1] expected_output = '"Layer2"' - self.assertEqual(process_stdout.decode("utf-8"), expected_output) + self.assertEqual(process_stdout, expected_output) def test_caching_two_layers(self): diff --git a/tests/integration/package/test_package_command.py b/tests/integration/package/test_package_command.py index 3d3bb60dfc..c9ddce1ea8 100644 --- a/tests/integration/package/test_package_command.py +++ b/tests/integration/package/test_package_command.py @@ -1,4 +1,4 @@ -from subprocess import Popen, PIPE +from subprocess import Popen, PIPE, TimeoutExpired import tempfile from unittest import skipIf @@ -10,6 +10,7 @@ # Package tests require credentials and CI/CD will only add credentials to the env if the PR is from the same repo. # This is to restrict package tests to run outside of CI/CD and when the branch is not master. SKIP_PACKAGE_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI +TIMEOUT = 300 @skipIf(SKIP_PACKAGE_TESTS, "Skip package tests in CI/CD only") @@ -26,8 +27,12 @@ def test_package_template_flag(self, template_file): command_list = self.get_command_list(s3_bucket=self.s3_bucket.name, template=template_path) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() self.assertIn("{bucket_name}".format(bucket_name=self.s3_bucket.name), process_stdout.decode("utf-8")) @@ -54,8 +59,12 @@ def test_package_barebones(self, template_file): command_list = self.get_command_list(s3_bucket=self.s3_bucket.name, template_file=template_path) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() self.assertIn("{bucket_name}".format(bucket_name=self.s3_bucket.name), process_stdout.decode("utf-8")) @@ -63,7 +72,11 @@ def test_package_without_required_args(self): command_list = self.get_command_list() process = Popen(command_list, stdout=PIPE) - process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self.assertNotEqual(process.returncode, 0) @parameterized.expand( @@ -92,8 +105,12 @@ def test_package_with_prefix(self, template_file): ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() self.assertIn("{bucket_name}".format(bucket_name=self.s3_bucket.name), process_stdout.decode("utf-8")) @@ -131,8 +148,12 @@ def test_package_with_output_template_file(self, template_file): ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() self.assertIn( bytes( @@ -177,8 +198,12 @@ def test_package_with_json(self, template_file): ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() self.assertIn( bytes( @@ -225,8 +250,12 @@ def test_package_with_force_upload(self, template_file): ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() self.assertIn( bytes( @@ -271,8 +300,12 @@ def test_package_with_kms_key(self, template_file): ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() self.assertIn( bytes( @@ -317,8 +350,12 @@ def test_package_with_metadata(self, template_file): ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() self.assertIn( bytes( diff --git a/tests/integration/publish/test_command_integ.py b/tests/integration/publish/test_command_integ.py index deb49407c5..b96e106b89 100644 --- a/tests/integration/publish/test_command_integ.py +++ b/tests/integration/publish/test_command_integ.py @@ -1,7 +1,7 @@ import re import time import json -from subprocess import Popen, PIPE +from subprocess import Popen, PIPE, TimeoutExpired from unittest import skipIf @@ -12,6 +12,7 @@ # Publish tests require credentials and CI/CD will only add credentials to the env if the PR is from the same repo. # This is to restrict publish tests to run outside of CI/CD and when the branch is not master. SKIP_PUBLISH_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI +TIMEOUT = 300 @skipIf(SKIP_PUBLISH_TESTS, "Skip publish tests in CI/CD only") @@ -38,8 +39,12 @@ def test_update_application(self): command_list = self.get_command_list(template_path=template_path, region=self.region_name) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() expected_msg = 'The following metadata of application "{}" has been updated:'.format(self.application_id) self.assertIn(expected_msg, process_stdout.decode("utf-8")) @@ -53,8 +58,12 @@ def test_create_application_version(self): command_list = self.get_command_list(template_path=template_path, region=self.region_name) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() expected_msg = 'The following metadata of application "{}" has been updated:'.format(self.application_id) self.assertIn(expected_msg, process_stdout.decode("utf-8")) @@ -70,8 +79,12 @@ def test_create_application_version_with_semantic_version_option(self): ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() expected_msg = 'The following metadata of application "{}" has been updated:'.format(self.application_id) self.assertIn(expected_msg, process_stdout.decode("utf-8")) @@ -101,8 +114,12 @@ def test_create_application(self): command_list = self.get_command_list(template_path=template_path, region=self.region_name) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() expected_msg = "Created new application with the following metadata:" self.assertIn(expected_msg, process_stdout.decode("utf-8")) @@ -121,8 +138,12 @@ def test_publish_not_packaged_template(self): command_list = self.get_command_list(template_path=template_path, region=self.region_name) process = Popen(command_list, stderr=PIPE) - process.wait() - process_stderr = b"".join(process.stderr.readlines()).strip() + try: + _, stderr = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stderr = stderr.strip() expected_msg = "Please make sure that you have uploaded application artifacts to S3" self.assertIn(expected_msg, process_stderr.decode("utf-8")) @@ -132,8 +153,12 @@ def test_create_application_infer_region_from_env(self): command_list = self.get_command_list(template_path=template_path) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() expected_msg = "Created new application with the following metadata:" self.assertIn(expected_msg, process_stdout.decode("utf-8")) diff --git a/tests/integration/telemetry/test_installed_metric.py b/tests/integration/telemetry/test_installed_metric.py index fa4d3b679b..c9b7434f16 100644 --- a/tests/integration/telemetry/test_installed_metric.py +++ b/tests/integration/telemetry/test_installed_metric.py @@ -16,10 +16,9 @@ def test_send_installed_metric_on_first_run(self): # Start the CLI process = self.run_cmd() - (_, stderrdata) = process.communicate() + _, stderrdata = process.communicate() - retcode = process.poll() - self.assertEqual(retcode, 0, "Command should successfully complete") + self.assertEqual(process.returncode, 0, "Command should successfully complete") # Make sure the prompt was printed. Otherwise this test is not valid self.assertIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) @@ -66,10 +65,9 @@ def test_must_not_send_installed_metric_when_prompt_is_disabled(self): # Start the CLI process = self.run_cmd() - (stdoutdata, stderrdata) = process.communicate() + stdoutdata, stderrdata = process.communicate() - retcode = process.poll() - self.assertEqual(retcode, 0, "Command should successfully complete") + self.assertEqual(process.returncode, 0, "Command should successfully complete") self.assertNotIn(EXPECTED_TELEMETRY_PROMPT, stdoutdata.decode()) self.assertNotIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) @@ -88,9 +86,8 @@ def test_must_not_send_installed_metric_on_second_run(self): # First Run process1 = self.run_cmd() - (_, stderrdata) = process1.communicate() - retcode = process1.poll() - self.assertEqual(retcode, 0, "Command should successfully complete") + _, stderrdata = process1.communicate() + self.assertEqual(process1.returncode, 0, "Command should successfully complete") self.assertIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) self.assertEqual( 1, len(filter_installed_metric_requests(server.get_all_requests())), "'installed' metric should be sent" @@ -98,9 +95,8 @@ def test_must_not_send_installed_metric_on_second_run(self): # Second Run process2 = self.run_cmd() - (stdoutdata, stderrdata) = process2.communicate() - retcode = process2.poll() - self.assertEqual(retcode, 0) + stdoutdata, stderrdata = process2.communicate() + self.assertEqual(process2.returncode, 0) self.assertNotIn(EXPECTED_TELEMETRY_PROMPT, stdoutdata.decode()) self.assertNotIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) self.assertEqual( diff --git a/tests/integration/telemetry/test_prompt.py b/tests/integration/telemetry/test_prompt.py index 876dd326cf..270c813fc5 100644 --- a/tests/integration/telemetry/test_prompt.py +++ b/tests/integration/telemetry/test_prompt.py @@ -10,7 +10,7 @@ def test_must_prompt_if_config_is_not_set(self): self.unset_config() process = self.run_cmd() - (stdoutdata, stderrdata) = process.communicate() + _, stderrdata = process.communicate() # Telemetry prompt should be printed to the terminal self.assertIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) @@ -25,7 +25,7 @@ def test_must_not_prompt_if_config_is_set(self, telemetry_enabled, msg): self.set_config(telemetry_enabled=telemetry_enabled) process = self.run_cmd() - (stdoutdata, stderrdata) = process.communicate() + stdoutdata, stderrdata = process.communicate() self.assertNotIn(EXPECTED_TELEMETRY_PROMPT, stdoutdata.decode()) self.assertNotIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) @@ -38,11 +38,11 @@ def test_prompt_must_not_display_on_second_run(self): # First Run process = self.run_cmd() - (stdoutdata, stderrdata) = process.communicate() + _, stderrdata = process.communicate() self.assertIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) # Second Run process = self.run_cmd() - (stdoutdata, stderrdata) = process.communicate() + stdoutdata, stderrdata = process.communicate() self.assertNotIn(EXPECTED_TELEMETRY_PROMPT, stdoutdata.decode()) self.assertNotIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) diff --git a/tests/integration/telemetry/test_telemetry_contract.py b/tests/integration/telemetry/test_telemetry_contract.py index 0d7f40afb8..08b3585b99 100644 --- a/tests/integration/telemetry/test_telemetry_contract.py +++ b/tests/integration/telemetry/test_telemetry_contract.py @@ -16,17 +16,17 @@ def test_must_not_send_metrics_if_disabled_using_envvar(self): with TelemetryServer() as server: # Start the CLI, but opt-out of Telemetry using env var process = self.run_cmd(optout_envvar_value="0") - (_, stderrdata) = process.communicate() - retcode = process.poll() - self.assertEqual(retcode, 0, "Command should successfully complete") + process.communicate() + + self.assertEqual(process.returncode, 0, "Command should successfully complete") all_requests = server.get_all_requests() self.assertEqual(0, len(all_requests), "No metrics should be sent") # Now run again without the Env Var Opt out process = self.run_cmd() - (_, stderrdata) = process.communicate() - retcode = process.poll() - self.assertEqual(retcode, 0, "Command should successfully complete") + process.communicate() + + self.assertEqual(process.returncode, 0, "Command should successfully complete") all_requests = server.get_all_requests() self.assertEqual(1, len(all_requests), "Command run metric should be sent") @@ -40,17 +40,17 @@ def test_must_send_metrics_if_enabled_via_envvar(self): with TelemetryServer() as server: # Run without any envvar.Should not publish metrics process = self.run_cmd() - (_, stderrdata) = process.communicate() - retcode = process.poll() - self.assertEqual(retcode, 0, "Command should successfully complete") + process.communicate() + + self.assertEqual(process.returncode, 0, "Command should successfully complete") all_requests = server.get_all_requests() self.assertEqual(0, len(all_requests), "No metric should be sent") # Opt-in via env var process = self.run_cmd(optout_envvar_value="1") - (_, stderrdata) = process.communicate() - retcode = process.poll() - self.assertEqual(retcode, 0, "Command should successfully complete") + process.communicate() + + self.assertEqual(process.returncode, 0, "Command should successfully complete") all_requests = server.get_all_requests() self.assertEqual(1, len(all_requests), "Command run metric must be sent") @@ -66,7 +66,6 @@ def test_must_not_crash_when_offline(self): # Start the CLI process = self.run_cmd() - (_, stderrdata) = process.communicate() + process.communicate() - retcode = process.poll() - self.assertEqual(retcode, 0, "Command should successfully complete") + self.assertEqual(process.returncode, 0, "Command should successfully complete") diff --git a/tests/regression/deploy/regression_deploy_base.py b/tests/regression/deploy/regression_deploy_base.py index dd8acbc2da..9c482d7a3c 100644 --- a/tests/regression/deploy/regression_deploy_base.py +++ b/tests/regression/deploy/regression_deploy_base.py @@ -1,7 +1,9 @@ import os -from subprocess import Popen, PIPE +from subprocess import Popen, PIPE, TimeoutExpired from unittest import TestCase +TIMEOUT = 300 + class DeployRegressionBase(TestCase): @classmethod @@ -90,10 +92,18 @@ def deploy_regression_check(self, args, sam_return_code=0, aws_return_code=0, co aws_command_list = self.get_deploy_command_list(base="aws", stack_name=aws_stack_name, **args) process = Popen(aws_command_list, stdout=PIPE) - process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self.assertEqual(process.returncode, aws_return_code) sam_command_list = self.get_deploy_command_list(stack_name=sam_stack_name, **args) process = Popen(sam_command_list, stdout=PIPE) - process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self.assertEqual(process.returncode, sam_return_code) diff --git a/tests/regression/deploy/test_deploy_regression.py b/tests/regression/deploy/test_deploy_regression.py index 728b816ac0..f585ec450b 100644 --- a/tests/regression/deploy/test_deploy_regression.py +++ b/tests/regression/deploy/test_deploy_regression.py @@ -2,7 +2,7 @@ import tempfile import uuid import time -from subprocess import Popen, PIPE +from subprocess import Popen, PIPE, TimeoutExpired from unittest import skipIf import boto3 @@ -16,6 +16,7 @@ # This is to restrict package tests to run outside of CI/CD and when the branch is not master. SKIP_DEPLOY_REGRESSION_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI CFN_SLEEP = 3 +TIMEOUT = 300 # Only testing return codes to be equivalent @@ -42,7 +43,11 @@ def prepare_package(self, template_file): ) package_process = Popen(package_command_list, stdout=PIPE) - package_process.wait() + try: + stdout, _ = package_process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + package_process.kill() + raise self.assertEqual(package_process.returncode, 0) return output_template_file.name diff --git a/tests/regression/package/regression_package_base.py b/tests/regression/package/regression_package_base.py index 6827c190a9..b39e4fcbb9 100644 --- a/tests/regression/package/regression_package_base.py +++ b/tests/regression/package/regression_package_base.py @@ -4,12 +4,13 @@ import tempfile import uuid from pathlib import Path -from subprocess import Popen, PIPE +from subprocess import Popen, PIPE, TimeoutExpired from unittest import TestCase import boto3 S3_SLEEP = 3 +TIMEOUT = 300 class PackageRegressionBase(TestCase): @@ -85,7 +86,11 @@ def regression_check(self, args): with tempfile.NamedTemporaryFile(delete=False) as output_template_file_sam: sam_command_list = self.get_command_list(output_template_file=output_template_file_sam.name, **args) process = Popen(sam_command_list, stdout=PIPE) - process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self.assertEqual(process.returncode, 0) output_sam = output_template_file_sam.read() @@ -94,7 +99,11 @@ def regression_check(self, args): base="aws", output_template_file=output_template_file_aws.name, **args ) process = Popen(aws_command_list, stdout=PIPE) - process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self.assertEqual(process.returncode, 0) output_aws = output_template_file_aws.read() diff --git a/tests/regression/package/test_package_regression.py b/tests/regression/package/test_package_regression.py index ac55205f09..88bb144af1 100644 --- a/tests/regression/package/test_package_regression.py +++ b/tests/regression/package/test_package_regression.py @@ -1,6 +1,3 @@ -from subprocess import Popen, PIPE -import tempfile - from unittest import skipIf from parameterized import parameterized