diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index d0e22184b7..422cd3a0a9 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -153,7 +153,7 @@ jobs: ruby-version: "3.2" - uses: actions/setup-node@v4 with: - node-version: 18 + node-version: 20 - uses: actions/setup-java@v3 with: distribution: 'corretto' diff --git a/requirements/base.txt b/requirements/base.txt index 06a87b7a6c..790da82013 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -11,7 +11,7 @@ aws-sam-translator==1.79.0 docker~=6.1.0 dateparser~=1.1 requests~=2.31.0 -aws_lambda_builders==1.40.0 +aws_lambda_builders==1.41.0 tomlkit==0.12.2 watchdog==3.0.0 rich~=13.6.0 diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index 3efc5df14a..ed3acf8832 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -20,9 +20,9 @@ attrs==23.1.0 \ # jsonschema # referencing # sarif-om -aws-lambda-builders==1.40.0 \ - --hash=sha256:660b1d284320172beca39971b2932549d1ae642aa94500bcf70b612771957480 \ - --hash=sha256:a48f083f750d62d5a5cf0bac1fe224682b5ed83dd821d794802c791ec22e077b +aws-lambda-builders==1.41.0 \ + --hash=sha256:b26280609269696d4c7ebe6ea208de5d02a8e4f14711ceac265510e28de65258 \ + --hash=sha256:d48d0d9b704319cdb6bb19918a72bf6c06fd3a7012848f6bcc4b906d5686db17 # via aws-sam-cli (setup.py) aws-sam-translator==1.79.0 \ --hash=sha256:6f0c6bbcebcc98c4f98dc238352a06b0befe7fb72535cca75712fffff4873875 \ diff --git a/requirements/reproducible-mac.txt b/requirements/reproducible-mac.txt index d397097902..7e00908439 100644 --- a/requirements/reproducible-mac.txt +++ b/requirements/reproducible-mac.txt @@ -20,9 +20,9 @@ attrs==23.1.0 \ # jsonschema # referencing # sarif-om -aws-lambda-builders==1.40.0 \ - --hash=sha256:660b1d284320172beca39971b2932549d1ae642aa94500bcf70b612771957480 \ - --hash=sha256:a48f083f750d62d5a5cf0bac1fe224682b5ed83dd821d794802c791ec22e077b +aws-lambda-builders==1.41.0 \ + --hash=sha256:b26280609269696d4c7ebe6ea208de5d02a8e4f14711ceac265510e28de65258 \ + --hash=sha256:d48d0d9b704319cdb6bb19918a72bf6c06fd3a7012848f6bcc4b906d5686db17 # via aws-sam-cli (setup.py) aws-sam-translator==1.79.0 \ --hash=sha256:6f0c6bbcebcc98c4f98dc238352a06b0befe7fb72535cca75712fffff4873875 \ diff --git a/requirements/reproducible-win.txt b/requirements/reproducible-win.txt index b13fa8d7ce..105c771941 100644 --- a/requirements/reproducible-win.txt +++ b/requirements/reproducible-win.txt @@ -20,9 +20,9 @@ attrs==23.1.0 \ # jsonschema # referencing # sarif-om -aws-lambda-builders==1.40.0 \ - --hash=sha256:660b1d284320172beca39971b2932549d1ae642aa94500bcf70b612771957480 \ - --hash=sha256:a48f083f750d62d5a5cf0bac1fe224682b5ed83dd821d794802c791ec22e077b +aws-lambda-builders==1.41.0 \ + --hash=sha256:b26280609269696d4c7ebe6ea208de5d02a8e4f14711ceac265510e28de65258 \ + --hash=sha256:d48d0d9b704319cdb6bb19918a72bf6c06fd3a7012848f6bcc4b906d5686db17 # via aws-sam-cli (setup.py) aws-sam-translator==1.79.0 \ --hash=sha256:6f0c6bbcebcc98c4f98dc238352a06b0befe7fb72535cca75712fffff4873875 \ diff --git a/samcli/__init__.py b/samcli/__init__.py index 8752da24bb..2a0e5ce3be 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = "1.101.0" +__version__ = "1.102.0" diff --git a/samcli/commands/_utils/options.py b/samcli/commands/_utils/options.py index 737c8ecdd2..f8f92146bb 100644 --- a/samcli/commands/_utils/options.py +++ b/samcli/commands/_utils/options.py @@ -587,13 +587,22 @@ def remote_invoke_parameter_click_option(): type=RemoteInvokeBotoApiParameterType(), callback=remote_invoke_boto_parameter_callback, required=False, - help="Additional parameters that can be passed to invoke the resource.\n" - "The following additional parameters can be used to invoke a lambda resource and get a buffered response: " - "InvocationType='Event'|'RequestResponse'|'DryRun', LogType='None'|'Tail', " - "ClientContext='base64-encoded string' Qualifier='string'. " - "The following additional parameters can be used to invoke a lambda resource with response streaming: " - "InvocationType='RequestResponse'|'DryRun', LogType='None'|'Tail', " - "ClientContext='base64-encoded string', Qualifier='string'.", + help="Additional parameters that can be passed" + " to invoke the resource.\n\n" + "Lambda Function (Buffered stream): The following additional parameters can be used to invoke a lambda resource" + " and get a buffered response: InvocationType='Event'|'RequestResponse'|'DryRun', LogType='None'|'Tail', " + "ClientContext='base64-encoded string' Qualifier='string'.\n\n" + "Lambda Function (Response stream): The following additional parameters can be used to invoke a lambda resource" + " with response streaming: InvocationType='RequestResponse'|'DryRun', LogType='None'|'Tail', " + "ClientContext='base64-encoded string', Qualifier='string'.\n\n" + "Step Functions: The following additional parameters can be used to start a state machine execution: " + "name='string', traceHeader='string'\n\n" + "SQS Queue: The following additional parameters can be used to send a message to an SQS queue: " + "DelaySeconds=integer, MessageAttributes='json string', MessageSystemAttributes='json string'," + " MessageDeduplicationId='string', MessageGroupId='string'\n\n" + "Kinesis Data Stream: The following additional parameters can be used to put a record" + " in the kinesis data stream: PartitionKey='string', ExplicitHashKey='string'," + " SequenceNumberForOrdering='string', StreamARN='string' ", ) diff --git a/samcli/commands/build/command.py b/samcli/commands/build/command.py index 0aa5457b7e..23784295ba 100644 --- a/samcli/commands/build/command.py +++ b/samcli/commands/build/command.py @@ -53,7 +53,7 @@ Supported Runtimes ------------------ 1. Python 3.7, 3.8, 3.9, 3.10, 3.11 using PIP\n - 2. Nodejs 18.x, 16.x, 14.x, 12.x using NPM\n + 2. Nodejs 20.x, 18.x, 16.x, 14.x, 12.x using NPM\n 3. Ruby 2.7, 3.2 using Bundler\n 4. Java 8, Java 11, Java 17 using Gradle and Maven\n 5. Dotnet6 using Dotnet CLI (without --use-container)\n diff --git a/samcli/commands/remote/invoke/cli.py b/samcli/commands/remote/invoke/cli.py index 0c258527b4..c1c08b3fe0 100644 --- a/samcli/commands/remote/invoke/cli.py +++ b/samcli/commands/remote/invoke/cli.py @@ -18,6 +18,7 @@ from samcli.lib.remote_invoke.remote_invoke_executors import RemoteInvokeOutputFormat from samcli.lib.telemetry.event import EventTracker from samcli.lib.telemetry.metric import track_command +from samcli.lib.utils.resources import AWS_LAMBDA_FUNCTION from samcli.lib.utils.version_checker import check_newer_version LOG = logging.getLogger(__name__) @@ -30,7 +31,13 @@ DESCRIPTION = """ Invoke or send an event to resources in the cloud. An event body can be passed using either -e (--event) or --event-file parameter. - Returned response will be written to stdout. Lambda logs will be written to stderr. + + This command can be used to invoke a Lambda Function and get the output payload, start a State Machine execution + and wait for the output of the final step, send a message to SQS Queue, or put a data record to + Kinesis Data Streams. + + Returned response will be written to stdout. Lambda logs and Step Function execution + errors will be written to stderr. """ @@ -50,7 +57,8 @@ "--event", "-e", help="The event that will be sent to the resource. The target parameter will depend on the resource type. " - "For instance: 'Payload' for Lambda which can be passed as a JSON string", + "For instance: 'Payload' for Lambda which can be passed as a JSON string, 'Input' for Step Functions, " + "'MessageBody' for SQS, and 'Data' for Kinesis data streams.", ) @click.option( "--event-file", @@ -152,11 +160,18 @@ def do_cli( stack_name=stack_name, resource_id=resource_id, ) as remote_invoke_context: - if test_event_name: + if ( + test_event_name + and remote_invoke_context.resource_summary + and remote_invoke_context.resource_summary.resource_type == AWS_LAMBDA_FUNCTION + ): lambda_test_event = remote_invoke_context.get_lambda_shared_test_event_provider() LOG.debug("Retrieving remote event %s", test_event_name) event = lambda_test_event.get_event(test_event_name, remote_invoke_context.resource_summary) LOG.debug("Remote event contents: %s", event) + elif test_event_name: + LOG.info("Note: remote event is only supported for AWS Lambda Function resource.") + test_event_name = "" event_type = RemoteInvokeEventType.get_event_type( event=event, diff --git a/samcli/commands/remote/invoke/core/command.py b/samcli/commands/remote/invoke/core/command.py index 37ec181d86..6c6912e937 100644 --- a/samcli/commands/remote/invoke/core/command.py +++ b/samcli/commands/remote/invoke/core/command.py @@ -20,116 +20,299 @@ class CustomFormatterContext(Context): @staticmethod def format_examples(ctx: Context, formatter: RemoteInvokeCommandHelpTextFormatter): with formatter.indented_section(name="Examples", extra_indents=1): - with formatter.indented_section(name="Invoke default lambda function with empty event", extra_indents=1): - formatter.write_rd( - [ - RowDefinition( - text="\n", - ), - RowDefinition( - name=style(f"${ctx.command_path} --stack-name hello-world"), - extra_row_modifiers=[ShowcaseRowModifier()], - ), - ] - ) - with formatter.indented_section( - name="Invoke default lambda function with event passed as text input", extra_indents=1 - ): - formatter.write_rd( - [ - RowDefinition( - text="\n", - ), - RowDefinition( - name=style( - f"${ctx.command_path} --stack-name hello-world -e '{json.dumps({'message':'hello!'})}'" - ), - extra_row_modifiers=[ShowcaseRowModifier()], - ), - ] - ) - with formatter.indented_section(name="Invoke named lambda function with an event file", extra_indents=1): - formatter.write_rd( - [ - RowDefinition( - text="\n", - ), - RowDefinition( - name=style( - f"${ctx.command_path} --stack-name " - f"hello-world HelloWorldFunction --event-file event.json" - ), - extra_row_modifiers=[ShowcaseRowModifier()], - ), - ] - ) - with formatter.indented_section(name="Invoke lambda function with event as stdin input", extra_indents=1): - formatter.write_rd( - [ - RowDefinition( - text="\n", - ), - RowDefinition( - name=style( - f"$ echo '{json.dumps({'message':'hello!'})}' | " - f"{ctx.command_path} HelloWorldFunction --event-file -" - ), - extra_row_modifiers=[ShowcaseRowModifier()], - ), - ] - ) - with formatter.indented_section( - name="Invoke lambda function using lambda ARN and get the full AWS API response", extra_indents=1 - ): - formatter.write_rd( - [ - RowDefinition( - text="\n", - ), - RowDefinition( - name=style( - f"${ctx.command_path} arn:aws:lambda:us-west-2:123456789012:function:my-function -e <>" - f" --output json" - ), - extra_row_modifiers=[ShowcaseRowModifier()], - ), - ] - ) - with formatter.indented_section( - name="Asynchronously invoke lambda function with additional boto parameters", extra_indents=1 - ): - formatter.write_rd( - [ - RowDefinition( - text="\n", - ), - RowDefinition( - name=style( - f"${ctx.command_path} HelloWorldFunction -e <> " - f"--parameter InvocationType=Event --parameter Qualifier=MyQualifier" - ), - extra_row_modifiers=[ShowcaseRowModifier()], - ), - ] - ) - with formatter.indented_section( - name="Dry invoke a lambda function to validate parameter values and user/role permissions", - extra_indents=1, - ): - formatter.write_rd( - [ - RowDefinition( - text="\n", - ), - RowDefinition( - name=style( - f"${ctx.command_path} HelloWorldFunction -e <> --output json " - f"--parameter InvocationType=DryRun" - ), - extra_row_modifiers=[ShowcaseRowModifier()], - ), - ] - ) + with formatter.indented_section(name="Lambda Functions", extra_indents=1): + with formatter.indented_section( + name="Invoke default lambda function with empty event", extra_indents=1 + ): + formatter.write_rd( + [ + RowDefinition( + name=style(f"${ctx.command_path} --stack-name hello-world"), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section( + name="Invoke default lambda function with event passed as text input", extra_indents=1 + ): + formatter.write_rd( + [ + RowDefinition( + name=style( + f"${ctx.command_path} --stack-name hello-world -e" + f" '{json.dumps({'message':'hello!'})}'" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section( + name="Invoke named lambda function with an event file", extra_indents=1 + ): + formatter.write_rd( + [ + RowDefinition( + name=style( + f"${ctx.command_path} --stack-name " + f"hello-world HelloWorldFunction --event-file event.json" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section(name="Invoke function with event as stdin input", extra_indents=1): + formatter.write_rd( + [ + RowDefinition( + name=style( + f"$ echo '{json.dumps({'message':'hello!'})}' | " + f"{ctx.command_path} HelloWorldFunction --event-file -" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section( + name="Invoke function using lambda ARN and get the full AWS API response", extra_indents=1 + ): + formatter.write_rd( + [ + RowDefinition( + name=style( + f"${ctx.command_path} arn:aws:lambda:us-west-2:123456789012:function:my-function" + f" -e <> --output json" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section( + name="Asynchronously invoke function with additional boto parameters", extra_indents=1 + ): + formatter.write_rd( + [ + RowDefinition( + name=style( + f"${ctx.command_path} HelloWorldFunction -e <> " + f"--parameter InvocationType=Event --parameter Qualifier=MyQualifier" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section( + name="Dry invoke a function to validate parameter values and user/role permissions", + extra_indents=1, + ): + formatter.write_rd( + [ + RowDefinition( + name=style( + f"${ctx.command_path} HelloWorldFunction -e <> --output json " + f"--parameter InvocationType=DryRun" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section(name="Step Functions", extra_indents=1): + with formatter.indented_section( + name="Start execution with event passed as text input", + extra_indents=1, + ): + formatter.write_rd( + [ + RowDefinition( + name=style( + f"${ctx.command_path} --stack-name mock-stack StockTradingStateMachine" + f" -e '{json.dumps({'message':'hello!'})}'" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section( + name="Start execution using its physical-id or ARN with an execution name parameter", + extra_indents=1, + ): + formatter.write_rd( + [ + RowDefinition( + name=style( + f"${ctx.command_path} arn:aws:states:us-east-1:123456789012:stateMachine:MySFN" + f" -e <> --parameter name=mock-execution-name" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section( + name="Start execution with an event file and get the full AWS API response", + extra_indents=1, + ): + formatter.write_rd( + [ + RowDefinition( + name=style( + f"${ctx.command_path} --stack-name mock-stack StockTradingStateMachine" + f" --event-file event.json --output json" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section( + name="Start execution with event as stdin input and pass the X-ray trace header to the execution", + extra_indents=1, + ): + formatter.write_rd( + [ + RowDefinition( + name=style( + f"$ echo '{json.dumps({'message':'hello!'})}' | " + f"${ctx.command_path} --stack-name mock-stack StockTradingStateMachine" + f" --parameter traceHeader=<>" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section(name="SQS Queue", extra_indents=1): + with formatter.indented_section( + name="Send a message with the MessageBody passed as event", + extra_indents=1, + ): + formatter.write_rd( + [ + RowDefinition( + name=style(f"${ctx.command_path} --stack-name mock-stack MySQSQueue -e hello-world"), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section( + name="Send a message using its physical-id and pass event using --event-file", + extra_indents=1, + ): + formatter.write_rd( + [ + RowDefinition( + name=style( + f"${ctx.command_path} https://sqs.us-east-1.amazonaws.com/12345678910/QueueName" + f" --event-file event.json" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section( + name="Send a message using its ARN and delay the specified message", + extra_indents=1, + ): + formatter.write_rd( + [ + RowDefinition( + name=style( + f"${ctx.command_path} arn:aws:sqs:region:account_id:queue_name -e hello-world" + f" --parameter DelaySeconds=10" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section( + name="Send a message along with message attributes and get the full AWS API response", + extra_indents=1, + ): + formatter.write_rd( + [ + RowDefinition( + name=style( + f"${ctx.command_path} --stack-name mock-stack MySQSQueue -e hello-world" + f" --output json --parameter MessageAttributes=" + f"'{json.dumps({'City': {'DataType': 'String', 'StringValue': 'City'}})}'" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + + with formatter.indented_section( + name="Send a message to a FIFO SQS Queue", + extra_indents=1, + ): + formatter.write_rd( + [ + RowDefinition( + name=style( + f"${ctx.command_path} --stack-name mock-stack MySQSQueue -e hello-world" + f" --parameter MessageGroupId=mock-message-group" + f" --parameter MessageDeduplicationId=mock-dedup-id" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section(name="Kinesis Data Stream", extra_indents=1): + with formatter.indented_section( + name="Put a record using the data provided as event", + extra_indents=1, + ): + formatter.write_rd( + [ + RowDefinition( + name=style( + f"${ctx.command_path} --stack-name mock-stack MyKinesisStream -e" + f" '{json.dumps({'message':'hello!'})}'" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section( + name="Put a record using its physical-id and pass event using --event-file", + extra_indents=1, + ): + formatter.write_rd( + [ + RowDefinition( + name=style(f"${ctx.command_path} MyKinesisStreamName" f" --event-file event.json"), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section( + name="Put a record using its ARN and override the key hash", + extra_indents=1, + ): + formatter.write_rd( + [ + RowDefinition( + name=style( + f"${ctx.command_path}" + f" arn:aws:kinesis:us-east-2:123456789012:stream/mystream" + f" --event-file event.json --parameter ExplicitHashKey=<>" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section( + name="Put a record with a sequence number for ordering with a PartitionKey", + extra_indents=1, + ): + formatter.write_rd( + [ + RowDefinition( + name=style( + f"${ctx.command_path} MyKinesisStreamName" + f" --event hello-world --parameter SequenceNumberForOrdering=<>" + f" --parameter PartitionKey=<>" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) @staticmethod def format_acronyms(formatter: RemoteInvokeCommandHelpTextFormatter): diff --git a/samcli/commands/remote/remote_invoke_context.py b/samcli/commands/remote/remote_invoke_context.py index 44b952b492..bf98b6317a 100644 --- a/samcli/commands/remote/remote_invoke_context.py +++ b/samcli/commands/remote/remote_invoke_context.py @@ -35,13 +35,24 @@ get_resource_summary, get_resource_summary_from_physical_id, ) -from samcli.lib.utils.resources import AWS_KINESIS_STREAM, AWS_LAMBDA_FUNCTION, AWS_SQS_QUEUE +from samcli.lib.utils.resources import ( + AWS_KINESIS_STREAM, + AWS_LAMBDA_FUNCTION, + AWS_SQS_QUEUE, + AWS_STEPFUNCTIONS_STATEMACHINE, +) from samcli.lib.utils.stream_writer import StreamWriter LOG = logging.getLogger(__name__) -SUPPORTED_SERVICES = {"lambda": AWS_LAMBDA_FUNCTION} +SUPPORTED_SERVICES = { + "lambda": AWS_LAMBDA_FUNCTION, + "states": AWS_STEPFUNCTIONS_STATEMACHINE, + "sqs": AWS_SQS_QUEUE, + "kinesis": AWS_KINESIS_STREAM, +} +RESOURCES_PRIORITY_ORDER = [AWS_LAMBDA_FUNCTION, AWS_STEPFUNCTIONS_STATEMACHINE, AWS_SQS_QUEUE, AWS_KINESIS_STREAM] class RemoteInvokeContext: @@ -163,15 +174,15 @@ def _get_single_resource_from_stack(self) -> CloudFormationResourceSummary: and returns its information if stack has only one resource from that type (including nested stacks) """ LOG.debug( - "Trying to get single resource with %s type in % stack since no resource id is provided", - SUPPORTED_SERVICES.values(), + "Trying to get single resource with %s type in %s stack since no resource id is provided", + RESOURCES_PRIORITY_ORDER, self._stack_name, ) resource_summaries = get_resource_summaries( self._boto_resource_provider, self._boto_client_provider, cast(str, self._stack_name), - set(SUPPORTED_SERVICES.values()), + set(RESOURCES_PRIORITY_ORDER), ) if len(resource_summaries) == 1: @@ -180,10 +191,21 @@ def _get_single_resource_from_stack(self) -> CloudFormationResourceSummary: return resource_summary if len(resource_summaries) > 1: - raise AmbiguousResourceForRemoteInvoke( - f"{self._stack_name} contains more than one resource that could be used with remote invoke, " - f"please provide resource_id argument to resolve ambiguity." - ) + # Check for single occurrence of resources in priority order. + for resource_type in RESOURCES_PRIORITY_ORDER: + resource_type_count = 0 + single_resource_summary = None + for logical_id, resource_summary in resource_summaries.items(): + if resource_summary.resource_type == resource_type: + resource_type_count += 1 + single_resource_summary = resource_summary + if resource_type_count == 1 and single_resource_summary: + return single_resource_summary + elif resource_type_count > 1: + raise AmbiguousResourceForRemoteInvoke( + f"{self._stack_name} contains more than one resource that could be used with remote invoke," + f" please provide resource_id argument to resolve ambiguity." + ) # fail if no resource summary found with given types raise NoResourceFoundForRemoteInvoke( @@ -211,12 +233,10 @@ def _get_from_physical_resource_id(self) -> CloudFormationResourceSummary: if SUPPORTED_SERVICES.get(service_from_arn) == AWS_SQS_QUEUE: # SQS queue_url is used for calling boto3 API calls - # Note (hnnasit): Add unit test after AWS_SQS_QUEUE is added to SUPPORTED_SERVICES sqs_client = self._boto_client_provider("sqs") resource_id = get_queue_url_from_arn(sqs_client, resource_arn.resource_id) if SUPPORTED_SERVICES.get(service_from_arn) == AWS_KINESIS_STREAM: - # Note (hnnasit): Add unit test after AWS_KINESIS_STREAM is added to SUPPORTED_SERVICES # StreamName extracted from arn is used as resource_id. resource_id = resource_arn.resource_id diff --git a/samcli/lib/build/app_builder.py b/samcli/lib/build/app_builder.py index 056af7b53d..0e49b7b285 100644 --- a/samcli/lib/build/app_builder.py +++ b/samcli/lib/build/app_builder.py @@ -38,6 +38,7 @@ from samcli.lib.providers.provider import ResourcesToBuildCollector, get_full_path, Stack from samcli.lib.utils.colors import Colored, Colors from samcli.lib.utils import osutils +from samcli.lib.utils.lambda_builders import patch_runtime from samcli.lib.utils.packagetype import IMAGE, ZIP from samcli.lib.utils.stream_writer import StreamWriter from samcli.local.docker.exceptions import ContainerNotStartableException @@ -859,7 +860,7 @@ def _build_function_in_process( application_framework=config.application_framework, ) - runtime = runtime.replace(".al2", "") + runtime = patch_runtime(runtime) try: builder.build( diff --git a/samcli/lib/build/workflow_config.py b/samcli/lib/build/workflow_config.py index 29ba64bd06..b100621ce2 100644 --- a/samcli/lib/build/workflow_config.py +++ b/samcli/lib/build/workflow_config.py @@ -97,6 +97,7 @@ def get_layer_subfolder(build_workflow: str) -> str: "nodejs14.x": "nodejs", "nodejs16.x": "nodejs", "nodejs18.x": "nodejs", + "nodejs20.x": "nodejs", "ruby2.7": "ruby/lib", "ruby3.2": "ruby/lib", "java8": "java", @@ -161,6 +162,7 @@ def get_workflow_config( "nodejs14.x": BasicWorkflowSelector(NODEJS_NPM_CONFIG), "nodejs16.x": BasicWorkflowSelector(NODEJS_NPM_CONFIG), "nodejs18.x": BasicWorkflowSelector(NODEJS_NPM_CONFIG), + "nodejs20.x": BasicWorkflowSelector(NODEJS_NPM_CONFIG), "ruby2.7": BasicWorkflowSelector(RUBY_BUNDLER_CONFIG), "ruby3.2": BasicWorkflowSelector(RUBY_BUNDLER_CONFIG), "dotnet6": BasicWorkflowSelector(DOTNET_CLIPACKAGE_CONFIG), diff --git a/samcli/lib/remote_invoke/kinesis_invoke_executors.py b/samcli/lib/remote_invoke/kinesis_invoke_executors.py index d26c51850f..ad020bd1ef 100644 --- a/samcli/lib/remote_invoke/kinesis_invoke_executors.py +++ b/samcli/lib/remote_invoke/kinesis_invoke_executors.py @@ -109,7 +109,7 @@ def _execute_action(self, payload: str) -> RemoteInvokeIterableResponseType: LOG.debug( "Calling kinesis_client.put_record with StreamName:%s, Data:%s", self.request_parameters[STREAM_NAME], - payload, + self.request_parameters[DATA], ) try: put_record_response = cast(dict, self._kinesis_client.put_record(**self.request_parameters)) diff --git a/samcli/lib/remote_invoke/remote_invoke_executor_factory.py b/samcli/lib/remote_invoke/remote_invoke_executor_factory.py index e739a639ff..5d0daaa395 100644 --- a/samcli/lib/remote_invoke/remote_invoke_executor_factory.py +++ b/samcli/lib/remote_invoke/remote_invoke_executor_factory.py @@ -27,7 +27,12 @@ StepFunctionsStartExecutionExecutor, ) from samcli.lib.utils.cloudformation import CloudFormationResourceSummary -from samcli.lib.utils.resources import AWS_LAMBDA_FUNCTION +from samcli.lib.utils.resources import ( + AWS_KINESIS_STREAM, + AWS_LAMBDA_FUNCTION, + AWS_SQS_QUEUE, + AWS_STEPFUNCTIONS_STATEMACHINE, +) LOG = logging.getLogger(__name__) @@ -276,4 +281,9 @@ def _create_kinesis_boto_executor( ], RemoteInvokeExecutor, ], - ] = {AWS_LAMBDA_FUNCTION: _create_lambda_boto_executor} + ] = { + AWS_LAMBDA_FUNCTION: _create_lambda_boto_executor, + AWS_STEPFUNCTIONS_STATEMACHINE: _create_stepfunctions_boto_executor, + AWS_SQS_QUEUE: _create_sqs_boto_executor, + AWS_KINESIS_STREAM: _create_kinesis_boto_executor, + } diff --git a/samcli/lib/remote_invoke/sqs_invoke_executors.py b/samcli/lib/remote_invoke/sqs_invoke_executors.py index 89e0aec80e..b4454158fa 100644 --- a/samcli/lib/remote_invoke/sqs_invoke_executors.py +++ b/samcli/lib/remote_invoke/sqs_invoke_executors.py @@ -121,7 +121,7 @@ def _execute_action(self, payload: str) -> RemoteInvokeIterableResponseType: LOG.debug( "Calling sqs_client.send_message with QueueUrl:%s, MessageBody:%s", self.request_parameters[QUEUE_URL], - payload, + self.request_parameters[MESSAGE_BODY], ) try: send_message_response = cast(dict, self._sqs_client.send_message(**self.request_parameters)) diff --git a/samcli/lib/remote_invoke/stepfunctions_invoke_executors.py b/samcli/lib/remote_invoke/stepfunctions_invoke_executors.py index df8c9b2b4e..f4d6a384c8 100644 --- a/samcli/lib/remote_invoke/stepfunctions_invoke_executors.py +++ b/samcli/lib/remote_invoke/stepfunctions_invoke_executors.py @@ -61,9 +61,9 @@ def validate_action_parameters(self, parameters: dict) -> None: Boto parameters provided as input """ for parameter_key, parameter_value in parameters.items(): - if parameter_key == "stateMachineArn": + if parameter_key == STATE_MACHINE_ARN: LOG.warning("stateMachineArn is defined using the value provided for resource_id argument.") - elif parameter_key == "input": + elif parameter_key == INPUT: LOG.warning("input is defined using the value provided for either --event or --event-file options.") else: self.request_parameters[parameter_key] = parameter_value diff --git a/samcli/lib/utils/architecture.py b/samcli/lib/utils/architecture.py index 33345db49b..8cddfc9e3d 100644 --- a/samcli/lib/utils/architecture.py +++ b/samcli/lib/utils/architecture.py @@ -18,6 +18,7 @@ "nodejs14.x": [ARM64, X86_64], "nodejs16.x": [ARM64, X86_64], "nodejs18.x": [ARM64, X86_64], + "nodejs20.x": [ARM64, X86_64], "python3.7": [X86_64], "python3.8": [ARM64, X86_64], "python3.9": [ARM64, X86_64], diff --git a/samcli/lib/utils/lambda_builders.py b/samcli/lib/utils/lambda_builders.py new file mode 100644 index 0000000000..39509de859 --- /dev/null +++ b/samcli/lib/utils/lambda_builders.py @@ -0,0 +1,10 @@ +""" +Lambda Builders-speicific utils +""" + + +def patch_runtime(runtime: str) -> str: + # NOTE: provided runtimes (provided, provided.al2, etc) are all recognized as "provided" in Lambda Builders + if runtime.startswith("provided"): + runtime = "provided" + return runtime.replace(".al2", "") diff --git a/samcli/local/common/runtime_template.py b/samcli/local/common/runtime_template.py index c41a56862f..b7ba6219d1 100644 --- a/samcli/local/common/runtime_template.py +++ b/samcli/local/common/runtime_template.py @@ -32,7 +32,7 @@ ], "nodejs": [ { - "runtimes": ["nodejs18.x", "nodejs16.x", "nodejs14.x", "nodejs12.x"], + "runtimes": ["nodejs20.x", "nodejs18.x", "nodejs16.x", "nodejs14.x", "nodejs12.x"], "dependency_manager": "npm", "init_location": os.path.join(_templates, "cookiecutter-aws-sam-hello-nodejs"), "build": True, @@ -107,6 +107,7 @@ def get_local_lambda_images_location(mapping, runtime): "java8.al2", "java8", # nodejs runtimes in descending order + "nodejs20.x", "nodejs18.x", "nodejs16.x", "nodejs14.x", @@ -136,6 +137,7 @@ def get_local_lambda_images_location(mapping, runtime): "java11": "amazon/java11-base", "java8.al2": "amazon/java8.al2-base", "java8": "amazon/java8-base", + "nodejs20.x": "amazon/nodejs20.x-base", "nodejs18.x": "amazon/nodejs18.x-base", "nodejs16.x": "amazon/nodejs16.x-base", "nodejs14.x": "amazon/nodejs14.x-base", diff --git a/samcli/local/docker/lambda_build_container.py b/samcli/local/docker/lambda_build_container.py index c2c20e54ad..8a18e45ebd 100644 --- a/samcli/local/docker/lambda_build_container.py +++ b/samcli/local/docker/lambda_build_container.py @@ -10,6 +10,7 @@ from uuid import uuid4 from samcli.commands._utils.experimental import get_enabled_experimental_flags +from samcli.lib.utils.lambda_builders import patch_runtime from samcli.local.docker.container import Container LOG = logging.getLogger(__name__) @@ -144,7 +145,7 @@ def _make_request( is_building_layer, build_in_source, ): - runtime = runtime.replace(".al2", "") + runtime = patch_runtime(runtime) return json.dumps( { diff --git a/samcli/local/docker/lambda_debug_settings.py b/samcli/local/docker/lambda_debug_settings.py index a5e378dea1..6b8c9bea65 100644 --- a/samcli/local/docker/lambda_debug_settings.py +++ b/samcli/local/docker/lambda_debug_settings.py @@ -163,6 +163,20 @@ def get_debug_settings(debug_port, debug_args_list, _container_env_vars, runtime **_container_env_vars, }, ), + Runtime.nodejs20x.value: lambda: DebugSettings( + entry + + ["/var/lang/bin/node"] + + debug_args_list + + ["--no-lazy", "--expose-gc"] + + ["/var/runtime/index.mjs"], + container_env_vars={ + "NODE_PATH": "/opt/nodejs/node_modules:/opt/nodejs/node20/node_modules:/var/runtime/node_modules:" + "/var/runtime:/var/task", + "NODE_OPTIONS": f"--inspect-brk=0.0.0.0:{str(debug_port)} --max-http-header-size 81920", + "AWS_EXECUTION_ENV": "AWS_Lambda_nodejs20.x", + **_container_env_vars, + }, + ), Runtime.python37.value: lambda: DebugSettings( entry + ["/var/lang/bin/python3.7"] + debug_args_list + ["/var/runtime/bootstrap"], container_env_vars=_container_env_vars, diff --git a/samcli/local/docker/lambda_image.py b/samcli/local/docker/lambda_image.py index cd0c93affd..2cee356a5c 100644 --- a/samcli/local/docker/lambda_image.py +++ b/samcli/local/docker/lambda_image.py @@ -36,6 +36,7 @@ class Runtime(Enum): nodejs14x = "nodejs14.x" nodejs16x = "nodejs16.x" nodejs18x = "nodejs18.x" + nodejs20x = "nodejs20.x" python37 = "python3.7" python38 = "python3.8" python39 = "python3.9" diff --git a/samcli/runtime_config.json b/samcli/runtime_config.json index ec0ffa7db5..340d51f5f0 100644 --- a/samcli/runtime_config.json +++ b/samcli/runtime_config.json @@ -1,3 +1,3 @@ { - "app_template_repo_commit": "c8e165581d9bccbe9cd74ed60e2e82abba3f7723" + "app_template_repo_commit": "36ba1b7994f98a52bfdc81cca58705814a9a2578" } diff --git a/schema/samcli.json b/schema/samcli.json index e1acd5bfad..47eeb6a3ce 100644 --- a/schema/samcli.json +++ b/schema/samcli.json @@ -2104,7 +2104,7 @@ "properties": { "parameters": { "title": "Parameters for the remote invoke command", - "description": "Available parameters for the remote invoke command:\n* stack_name:\nName of the stack to get the resource information from\n* event:\nThe event that will be sent to the resource. The target parameter will depend on the resource type. For instance: 'Payload' for Lambda which can be passed as a JSON string\n* event_file:\nThe file that contains the event that will be sent to the resource.\n* test_event_name:\nName of the remote test event to send to the resource\n* output:\nOutput the results from the command in a given output format. The text format prints a readable AWS API response. The json format prints the full AWS API response.\n* parameter:\nAdditional parameters that can be passed to invoke the resource.\nThe following additional parameters can be used to invoke a lambda resource and get a buffered response: InvocationType='Event'|'RequestResponse'|'DryRun', LogType='None'|'Tail', ClientContext='base64-encoded string' Qualifier='string'. The following additional parameters can be used to invoke a lambda resource with response streaming: InvocationType='RequestResponse'|'DryRun', LogType='None'|'Tail', ClientContext='base64-encoded string', Qualifier='string'.\n* beta_features:\nEnable/Disable beta features.\n* debug:\nTurn on debug logging to print debug message generated by AWS SAM CLI and display timestamps.\n* profile:\nSelect a specific profile from your credential file to get AWS credentials.\n* region:\nSet the AWS Region of the service. (e.g. us-east-1)\n* save_params:\nSave the parameters provided via the command line to the configuration file.", + "description": "Available parameters for the remote invoke command:\n* stack_name:\nName of the stack to get the resource information from\n* event:\nThe event that will be sent to the resource. The target parameter will depend on the resource type. For instance: 'Payload' for Lambda which can be passed as a JSON string, 'Input' for Step Functions, 'MessageBody' for SQS, and 'Data' for Kinesis data streams.\n* event_file:\nThe file that contains the event that will be sent to the resource.\n* test_event_name:\nName of the remote test event to send to the resource\n* output:\nOutput the results from the command in a given output format. The text format prints a readable AWS API response. The json format prints the full AWS API response.\n* parameter:\nAdditional parameters that can be passed to invoke the resource.\n\nLambda Function(Buffered stream): The following additional parameters can be used to invoke a lambda resource and get a buffered response: InvocationType='Event'|'RequestResponse'|'DryRun', LogType='None'|'Tail', ClientContext='base64-encoded string' Qualifier='string'.\n\nLambda Function(Response stream): The following additional parameters can be used to invoke a lambda resource with response streaming: InvocationType='RequestResponse'|'DryRun', LogType='None'|'Tail', ClientContext='base64-encoded string', Qualifier='string'.\n\nStep Functions: The following additional parameters can be used to start a state machine execution: name='string', traceHeader='string'\n\nSQS Queue: The following additional parameters can be used to send a message to an SQS queue: DelaySeconds=integer, MessageAttributes='json string', MessageSystemAttributes='json string', MessageDeduplicationId='string', MessageGroupId='string'\n\nKinesis Data Stream: The following additional parameters can be used to put a record in the kinesis data stream: PartitionKey='string', ExplicitHashKey='string', SequenceNumberForOrdering='string', StreamARN='string'\n* beta_features:\nEnable/Disable beta features.\n* debug:\nTurn on debug logging to print debug message generated by AWS SAM CLI and display timestamps.\n* profile:\nSelect a specific profile from your credential file to get AWS credentials.\n* region:\nSet the AWS Region of the service. (e.g. us-east-1)\n* save_params:\nSave the parameters provided via the command line to the configuration file.", "type": "object", "properties": { "stack_name": { @@ -2115,7 +2115,7 @@ "event": { "title": "event", "type": "string", - "description": "The event that will be sent to the resource. The target parameter will depend on the resource type. For instance: 'Payload' for Lambda which can be passed as a JSON string" + "description": "The event that will be sent to the resource. The target parameter will depend on the resource type. For instance: 'Payload' for Lambda which can be passed as a JSON string, 'Input' for Step Functions, 'MessageBody' for SQS, and 'Data' for Kinesis data streams." }, "event_file": { "title": "event_file", @@ -2140,7 +2140,7 @@ "parameter": { "title": "parameter", "type": "array", - "description": "Additional parameters that can be passed to invoke the resource.\nThe following additional parameters can be used to invoke a lambda resource and get a buffered response: InvocationType='Event'|'RequestResponse'|'DryRun', LogType='None'|'Tail', ClientContext='base64-encoded string' Qualifier='string'. The following additional parameters can be used to invoke a lambda resource with response streaming: InvocationType='RequestResponse'|'DryRun', LogType='None'|'Tail', ClientContext='base64-encoded string', Qualifier='string'.", + "description": "Additional parameters that can be passed to invoke the resource.\n\nLambda Function(Buffered stream): The following additional parameters can be used to invoke a lambda resource and get a buffered response: InvocationType='Event'|'RequestResponse'|'DryRun', LogType='None'|'Tail', ClientContext='base64-encoded string' Qualifier='string'.\n\nLambda Function(Response stream): The following additional parameters can be used to invoke a lambda resource with response streaming: InvocationType='RequestResponse'|'DryRun', LogType='None'|'Tail', ClientContext='base64-encoded string', Qualifier='string'.\n\nStep Functions: The following additional parameters can be used to start a state machine execution: name='string', traceHeader='string'\n\nSQS Queue: The following additional parameters can be used to send a message to an SQS queue: DelaySeconds=integer, MessageAttributes='json string', MessageSystemAttributes='json string', MessageDeduplicationId='string', MessageGroupId='string'\n\nKinesis Data Stream: The following additional parameters can be used to put a record in the kinesis data stream: PartitionKey='string', ExplicitHashKey='string', SequenceNumberForOrdering='string', StreamARN='string'", "items": { "type": "string" } diff --git a/tests/integration/buildcmd/test_build_cmd.py b/tests/integration/buildcmd/test_build_cmd.py index 1b9648af24..40cbdb1666 100644 --- a/tests/integration/buildcmd/test_build_cmd.py +++ b/tests/integration/buildcmd/test_build_cmd.py @@ -632,10 +632,12 @@ class TestBuildCommand_NodeFunctions(BuildIntegNodeBase): ("nodejs14.x", False), ("nodejs16.x", False), ("nodejs18.x", False), + ("nodejs20.x", False), ("nodejs12.x", "use_container"), ("nodejs14.x", "use_container"), ("nodejs16.x", "use_container"), ("nodejs18.x", "use_container"), + ("nodejs20.x", "use_container"), ] ) def test_building_default_package_json(self, runtime, use_container): @@ -654,6 +656,7 @@ class TestBuildCommand_NodeFunctions_With_External_Manifest(BuildIntegNodeBase): ("nodejs14.x",), ("nodejs16.x",), ("nodejs18.x",), + ("nodejs20.x",), ] ) def test_building_default_package_json(self, runtime): @@ -730,8 +733,10 @@ class TestBuildCommand_EsbuildFunctionProperties(BuildIntegEsbuildBase): [ ("nodejs16.x", "../Esbuild/TypeScript", "app.lambdaHandler", "x86_64"), ("nodejs18.x", "../Esbuild/TypeScript", "app.lambdaHandler", "x86_64"), + ("nodejs20.x", "../Esbuild/TypeScript", "app.lambdaHandler", "x86_64"), ("nodejs16.x", "../Esbuild/TypeScript", "nested/function/app.lambdaHandler", "x86_64"), ("nodejs18.x", "../Esbuild/TypeScript", "nested/function/app.lambdaHandler", "x86_64"), + ("nodejs20.x", "../Esbuild/TypeScript", "nested/function/app.lambdaHandler", "x86_64"), ] ) def test_environment_generates_sourcemap(self, runtime, code_uri, handler, architecture): @@ -753,6 +758,7 @@ class TestBuildCommand_NodeFunctions_With_Specified_Architecture(BuildIntegNodeB ("nodejs14.x", False, "x86_64"), ("nodejs16.x", False, "x86_64"), ("nodejs18.x", False, "x86_64"), + ("nodejs20.x", False, "x86_64"), ("nodejs12.x", "use_container", "x86_64"), ("nodejs14.x", "use_container", "x86_64"), ("nodejs16.x", "use_container", "x86_64"), diff --git a/tests/integration/buildcmd/test_build_cmd_arm64.py b/tests/integration/buildcmd/test_build_cmd_arm64.py index dc4bcd7723..e0e080e284 100644 --- a/tests/integration/buildcmd/test_build_cmd_arm64.py +++ b/tests/integration/buildcmd/test_build_cmd_arm64.py @@ -80,6 +80,13 @@ class TestBuildCommand_EsbuildFunctions_With_External_Manifest_arm64(BuildIntegE "main.lambdaHandler", False, ), + ( + "nodejs20.x", + "Esbuild/Node_without_manifest", + {"main.js", "main.js.map"}, + "main.lambdaHandler", + False, + ), ( "nodejs16.x", "Esbuild/TypeScript_without_manifest", @@ -94,6 +101,13 @@ class TestBuildCommand_EsbuildFunctions_With_External_Manifest_arm64(BuildIntegE "app.lambdaHandler", False, ), + ( + "nodejs20.x", + "Esbuild/TypeScript_without_manifest", + {"app.js", "app.js.map"}, + "app.lambdaHandler", + False, + ), ] ) def test_building_default_package_json(self, runtime, code_uri, expected_files, handler, use_container): @@ -109,10 +123,12 @@ class TestBuildCommand_NodeFunctions_With_Specified_Architecture_arm64(BuildInte ("nodejs14.x", False), ("nodejs16.x", False), ("nodejs18.x", False), + ("nodejs20.x", False), ("nodejs12.x", "use_container"), ("nodejs14.x", "use_container"), ("nodejs16.x", "use_container"), ("nodejs18.x", "use_container"), + ("nodejs20.x", "use_container"), ] ) def test_building_default_package_json(self, runtime, use_container): diff --git a/tests/integration/init/test_init_command.py b/tests/integration/init/test_init_command.py index 303c39bc44..0e6a40737f 100644 --- a/tests/integration/init/test_init_command.py +++ b/tests/integration/init/test_init_command.py @@ -838,7 +838,7 @@ def test_interactive_init(self): # 1: AWS Quick Start Templates # 1: Hello World Example # N: Use the most popular runtime and package type? (Python and zip) [y/N] - # 12: nodejs16.x + # 13: nodejs18.x # 1: Zip # 1: Hello World Example # N: Would you like to enable X-Ray tracing on the function(s) in your application? [y/N] @@ -847,7 +847,7 @@ def test_interactive_init(self): 1 1 N -12 +13 1 1 N @@ -863,7 +863,7 @@ def test_interactive_init(self): self.assertTrue(expected_output_folder.exists) self.assertTrue(expected_output_folder.is_dir()) self.assertTrue(Path(expected_output_folder, "hello-world").is_dir()) - self.assertTrue(Path(expected_output_folder, "hello-world", "app.js").is_file()) + self.assertTrue(Path(expected_output_folder, "hello-world", "app.mjs").is_file()) def test_interactive_init_default_runtime(self): user_input = """ diff --git a/tests/integration/local/invoke/test_integrations_cli.py b/tests/integration/local/invoke/test_integrations_cli.py index eae82f7063..c1a114b5d4 100644 --- a/tests/integration/local/invoke/test_integrations_cli.py +++ b/tests/integration/local/invoke/test_integrations_cli.py @@ -1205,15 +1205,16 @@ def test_function_exception(self): function_to_invoke="RaiseExceptionFunction", template_path=self.template_path ) - expected_stack_trace = ( - f"[ERROR] Exception: Lambda is raising an exception{os.linesep}" - f"Traceback (most recent call last):{os.linesep}" - f'\xa0\xa0File "/var/task/main.py", line 51, in raise_exception{os.linesep}' - f'\xa0\xa0\xa0\xa0raise Exception("Lambda is raising an exception")' - ) + stack_trace_lines = [ + "[ERROR] Exception: Lambda is raising an exception", + "Traceback (most recent call last):", + '\xa0\xa0File "/var/task/main.py", line 51, in raise_exception', + '\xa0\xa0\xa0\xa0raise Exception("Lambda is raising an exception")', + ] result = run_command(command_list) stderr = result.stderr.decode("utf-8").strip() self.assertEqual(result.process.returncode, 0) - self.assertIn(expected_stack_trace, stderr) + for line in stack_trace_lines: + self.assertIn(line, stderr) diff --git a/tests/integration/remote/invoke/test_remote_invoke.py b/tests/integration/remote/invoke/test_remote_invoke.py index d9af6fb4e3..e2942fea30 100644 --- a/tests/integration/remote/invoke/test_remote_invoke.py +++ b/tests/integration/remote/invoke/test_remote_invoke.py @@ -13,6 +13,8 @@ from pathlib import Path import pytest +SQS_WAIT_TIME_SECONDS = 20 + @pytest.mark.xdist_group(name="sam_remote_invoke_single_lambda_resource") class TestSingleLambdaInvoke(RemoteInvokeIntegBase): @@ -119,7 +121,6 @@ def test_invoke_response_json_output_format(self): self.assertEqual(remote_invoke_result_stdout["StatusCode"], 200) -@skip("Skip remote invoke Step function integration tests") @pytest.mark.xdist_group(name="sam_remote_invoke_sfn_resource_priority") class TestSFNPriorityInvoke(RemoteInvokeIntegBase): template = Path("template-step-function-priority.yaml") @@ -188,7 +189,6 @@ def test_invoke_boto_parameters(self): self.assertEqual([], get_xrays_response["Traces"]) -@skip("Skip remote invoke SQS integration tests") @pytest.mark.xdist_group(name="sam_remote_invoke_sqs_resource_priority") class TestSQSPriorityInvoke(RemoteInvokeIntegBase): template = Path("template-sqs-priority.yaml") @@ -211,12 +211,13 @@ def test_invoke_empty_event_provided(self): self.assertIn("MessageId", remote_invoke_result_stdout) received_message_response = self.sqs_client.receive_message( - QueueUrl=self.sqs_queue_url, MaxNumberOfMessages=1 + QueueUrl=self.sqs_queue_url, MaxNumberOfMessages=1, WaitTimeSeconds=SQS_WAIT_TIME_SECONDS ).get("Messages") self.assertEqual(len(received_message_response), 1) received_message = received_message_response[0] self.assertEqual(expected_message_body, received_message.get("Body")) self.assertEqual(received_message["MessageId"], remote_invoke_result_stdout["MessageId"]) + self.sqs_client.delete_message(QueueUrl=self.sqs_queue_url, ReceiptHandle=received_message["ReceiptHandle"]) @parameterized.expand([('{"foo": "bar"}'), ("Hello World"), ("Reminder")]) def test_invoke_with_event_provided(self, event): @@ -232,12 +233,13 @@ def test_invoke_with_event_provided(self, event): self.assertIn("MessageId", remote_invoke_result_stdout) received_message_response = self.sqs_client.receive_message( - QueueUrl=self.sqs_queue_url, MaxNumberOfMessages=1 + QueueUrl=self.sqs_queue_url, MaxNumberOfMessages=1, WaitTimeSeconds=SQS_WAIT_TIME_SECONDS ).get("Messages") self.assertEqual(len(received_message_response), 1) received_message = received_message_response[0] self.assertEqual(event, received_message.get("Body")) self.assertEqual(received_message["MessageId"], remote_invoke_result_stdout["MessageId"]) + self.sqs_client.delete_message(QueueUrl=self.sqs_queue_url, ReceiptHandle=received_message["ReceiptHandle"]) def test_invoke_with_event_file_provided(self): event_file_path = str(self.events_folder_path.joinpath("default_event.json")) @@ -250,7 +252,7 @@ def test_invoke_with_event_file_provided(self): self.assertIn("MessageId", remote_invoke_result_stdout) received_message_response = self.sqs_client.receive_message( - QueueUrl=self.sqs_queue_url, MaxNumberOfMessages=1 + QueueUrl=self.sqs_queue_url, MaxNumberOfMessages=1, WaitTimeSeconds=SQS_WAIT_TIME_SECONDS ).get("Messages") self.assertEqual(len(received_message_response), 1) received_message = received_message_response[0] @@ -258,6 +260,7 @@ def test_invoke_with_event_file_provided(self): with open(event_file_path, "r") as f: expected_message = f.read() self.assertEqual(expected_message, received_message.get("Body")) + self.sqs_client.delete_message(QueueUrl=self.sqs_queue_url, ReceiptHandle=received_message["ReceiptHandle"]) def test_invoke_with_physical_id_provided_as_resource_id(self): event = '{"foo": "bar"}' @@ -273,12 +276,13 @@ def test_invoke_with_physical_id_provided_as_resource_id(self): self.assertIn("MessageId", remote_invoke_result_stdout) received_message_response = self.sqs_client.receive_message( - QueueUrl=self.sqs_queue_url, MaxNumberOfMessages=1 + QueueUrl=self.sqs_queue_url, MaxNumberOfMessages=1, WaitTimeSeconds=SQS_WAIT_TIME_SECONDS ).get("Messages") self.assertEqual(len(received_message_response), 1) received_message = received_message_response[0] self.assertEqual(event, received_message.get("Body")) self.assertEqual(received_message["MessageId"], remote_invoke_result_stdout["MessageId"]) + self.sqs_client.delete_message(QueueUrl=self.sqs_queue_url, ReceiptHandle=received_message["ReceiptHandle"]) def test_invoke_boto_parameters(self): given_message = "Hello World" @@ -310,7 +314,10 @@ def test_invoke_boto_parameters(self): time.sleep(1) # Required as DelaySeconds is set to 1 and message cannot be received before this. received_message_response = self.sqs_client.receive_message( - QueueUrl=self.sqs_queue_url, MaxNumberOfMessages=1, MessageAttributeNames=["All"] + QueueUrl=self.sqs_queue_url, + MaxNumberOfMessages=1, + MessageAttributeNames=["All"], + WaitTimeSeconds=SQS_WAIT_TIME_SECONDS, ).get("Messages") self.assertEqual(len(received_message_response), 1) @@ -318,9 +325,9 @@ def test_invoke_boto_parameters(self): self.assertEqual(received_message["MessageId"], remote_invoke_result_stdout["MessageId"]) self.assertEqual(received_message.get("Body"), given_message) self.assertEqual(received_message.get("MessageAttributes"), message_attributes) + self.sqs_client.delete_message(QueueUrl=self.sqs_queue_url, ReceiptHandle=received_message["ReceiptHandle"]) -@skip("Skip remote invoke Kinesis integration tests") @pytest.mark.xdist_group(name="sam_remote_invoke_kinesis_resource_priority") class TestKinesisPriorityInvoke(RemoteInvokeIntegBase): template = Path("template-kinesis-priority.yaml") @@ -480,9 +487,6 @@ def test_invoke_empty_event_provided(self): ] ) def test_invoke_with_only_event_provided(self, resource_id, event, expected_response): - if self.stack_resource_summaries[resource_id].resource_type not in self.supported_resources: - pytest.skip("Skip remote invoke Step function integration tests as resource is not supported") - command_list = self.get_command_list( stack_name=self.stack_name, resource_id=resource_id, @@ -567,8 +571,6 @@ def test_lambda_invoke_client_context_boto_parameter(self): def test_sfn_invoke_with_resource_id_provided_as_arn(self): resource_id = "StockPriceGuideStateMachine" - if self.stack_resource_summaries[resource_id].resource_type not in self.supported_resources: - pytest.skip("Skip remote invoke Step function integration tests as resource is not supported") expected_response = {"balance": 320} state_machine_arn = self.stack_resource_summaries[resource_id].physical_resource_id @@ -584,8 +586,6 @@ def test_sfn_invoke_with_resource_id_provided_as_arn(self): def test_sfn_invoke_boto_parameters(self): resource_id = "StockPriceGuideStateMachine" - if self.stack_resource_summaries[resource_id].resource_type not in self.supported_resources: - pytest.skip("Skip remote invoke Step function integration tests as resource is not supported") expected_response = {"balance": 320} name = "custom-execution-name" command_list = self.get_command_list( @@ -603,8 +603,6 @@ def test_sfn_invoke_boto_parameters(self): def test_sfn_invoke_execution_fails(self): resource_id = "StateMachineExecutionFails" - if self.stack_resource_summaries[resource_id].resource_type not in self.supported_resources: - pytest.skip("Skip remote invoke Step function integration tests as resource is not supported") expected_response = "The execution failed due to the error: MockError and cause: Mock Invalid response." command_list = self.get_command_list( stack_name=self.stack_name, @@ -620,8 +618,6 @@ def test_sfn_invoke_execution_fails(self): def test_sqs_invoke_with_resource_id_and_stack_name(self): resource_id = "MySQSQueue" - if self.stack_resource_summaries[resource_id].resource_type not in self.supported_resources: - pytest.skip("Skip remote invoke SQS integration tests as resource is not supported") given_message = "Hello world" sqs_queue_url = self.stack_resource_summaries[resource_id].physical_resource_id @@ -637,18 +633,17 @@ def test_sqs_invoke_with_resource_id_and_stack_name(self): self.assertIn("MD5OfMessageBody", remote_invoke_result_stdout) self.assertIn("MessageId", remote_invoke_result_stdout) - received_message_response = self.sqs_client.receive_message(QueueUrl=sqs_queue_url, MaxNumberOfMessages=1).get( - "Messages" - ) + received_message_response = self.sqs_client.receive_message( + QueueUrl=sqs_queue_url, MaxNumberOfMessages=1, WaitTimeSeconds=SQS_WAIT_TIME_SECONDS + ).get("Messages") self.assertEqual(len(received_message_response), 1) received_message = received_message_response[0] self.assertEqual(given_message, received_message.get("Body")) self.assertEqual(received_message["MessageId"], remote_invoke_result_stdout["MessageId"]) + self.sqs_client.delete_message(QueueUrl=sqs_queue_url, ReceiptHandle=received_message["ReceiptHandle"]) def test_sqs_invoke_with_resource_id_provided_as_arn(self): resource_logical_id = "MySQSQueue" - if self.stack_resource_summaries[resource_logical_id].resource_type not in self.supported_resources: - pytest.skip("Skip remote invoke SQS integration tests as resource is not supported") output = self.cfn_client.describe_stacks(StackName=self.stack_name) sqs_queue_arn = None @@ -657,9 +652,6 @@ def test_sqs_invoke_with_resource_id_provided_as_arn(self): if detail["OutputKey"] == "MySQSQueueArn": sqs_queue_arn = detail["OutputValue"] - if self.stack_resource_summaries[resource_logical_id].resource_type not in self.supported_resources: - pytest.skip("Skip remote invoke SQS integration tests as resource is not supported") - given_message = "Hello world" command_list = self.get_command_list( @@ -673,19 +665,18 @@ def test_sqs_invoke_with_resource_id_provided_as_arn(self): self.assertIn("MD5OfMessageBody", remote_invoke_result_stdout) self.assertIn("MessageId", remote_invoke_result_stdout) - received_message_response = self.sqs_client.receive_message(QueueUrl=sqs_queue_url, MaxNumberOfMessages=1).get( - "Messages" - ) + received_message_response = self.sqs_client.receive_message( + QueueUrl=sqs_queue_url, MaxNumberOfMessages=1, WaitTimeSeconds=SQS_WAIT_TIME_SECONDS + ).get("Messages") self.assertEqual(len(received_message_response), 1) received_message = received_message_response[0] self.assertEqual(given_message, received_message.get("Body")) self.assertEqual(received_message["MessageId"], remote_invoke_result_stdout["MessageId"]) + self.sqs_client.delete_message(QueueUrl=sqs_queue_url, ReceiptHandle=received_message["ReceiptHandle"]) def test_sqs_invoke_boto_parameters_fifo_queue(self): given_message = "Hello World" resource_logical_id = "MyFIFOSQSQueue" - if self.stack_resource_summaries[resource_logical_id].resource_type not in self.supported_resources: - pytest.skip("Skip remote invoke SQS integration tests as resource is not supported") sqs_queue_url = self.stack_resource_summaries[resource_logical_id].physical_resource_id command_list = self.get_command_list( @@ -707,9 +698,9 @@ def test_sqs_invoke_boto_parameters_fifo_queue(self): self.assertIn("MessageId", remote_invoke_result_stdout) self.assertIn("ResponseMetadata", remote_invoke_result_stdout) - received_message_response = self.sqs_client.receive_message(QueueUrl=sqs_queue_url, MaxNumberOfMessages=1).get( - "Messages" - ) + received_message_response = self.sqs_client.receive_message( + QueueUrl=sqs_queue_url, MaxNumberOfMessages=1, WaitTimeSeconds=SQS_WAIT_TIME_SECONDS + ).get("Messages") self.assertEqual(len(received_message_response), 1) received_message = received_message_response[0] @@ -723,11 +714,10 @@ def test_sqs_invoke_boto_parameters_fifo_queue(self): remote_invoke_result_stdout = json.loads(remote_invoke_result.stdout.strip().decode()) # Message id will be the same as it got deduped and a new message was not created self.assertEqual(received_message["MessageId"], remote_invoke_result_stdout["MessageId"]) + self.sqs_client.delete_message(QueueUrl=sqs_queue_url, ReceiptHandle=received_message["ReceiptHandle"]) def test_kinesis_invoke_with_resource_id_and_stack_name(self): resource_logical_id = "KinesisStream" - if self.stack_resource_summaries[resource_logical_id].resource_type not in self.supported_resources: - pytest.skip("Skip remote invoke Kinesis integration tests as resource is not supported") event = '{"foo": "bar"}' stream_name = self.stack_resource_summaries[resource_logical_id].physical_resource_id @@ -753,8 +743,6 @@ def test_kinesis_invoke_with_resource_id_and_stack_name(self): def test_kinesis_invoke_with_resource_id_provided_as_arn(self): resource_logical_id = "KinesisStream" - if self.stack_resource_summaries[resource_logical_id].resource_type not in self.supported_resources: - pytest.skip("Skip remote invoke Kinesis integration tests as resource is not supported") stream_name = self.stack_resource_summaries[resource_logical_id].physical_resource_id output = self.cfn_client.describe_stacks(StackName=self.stack_name) @@ -787,8 +775,6 @@ def test_kinesis_invoke_with_boto_parameters(self): # ExplicitHashKey can be used to specify the shard to put the record to if the hashkey provided is # between the start and end range of that shard's hash key range. resource_logical_id = "KinesisStream" - if self.stack_resource_summaries[resource_logical_id].resource_type not in self.supported_resources: - pytest.skip("Skip remote invoke Kinesis integration tests as resource is not supported") stream_name = self.stack_resource_summaries[resource_logical_id].physical_resource_id describe_stream_response = self.kinesis_client.describe_stream(StreamName=stream_name) @@ -843,8 +829,6 @@ def setUpClass(cls): ] ) def test_invoke_empty_event_provided(self, resource_id, expected_response): - if self.stack_resource_summaries[resource_id].resource_type not in self.supported_resources: - pytest.skip("Skip remote invoke Step function integration tests as resource is not supported") command_list = self.get_command_list(stack_name=self.stack_name, resource_id=resource_id) remote_invoke_result = run_command(command_list) @@ -859,8 +843,6 @@ def test_invoke_empty_event_provided(self, resource_id, expected_response): ] ) def test_invoke_with_event_provided(self, resource_id, event, expected_response): - if self.stack_resource_summaries[resource_id].resource_type not in self.supported_resources: - pytest.skip("Skip remote invoke Step function integration tests as resource is not supported") command_list = self.get_command_list( stack_name=self.stack_name, resource_id=resource_id, diff --git a/tests/integration/validate/test_validate_command.py b/tests/integration/validate/test_validate_command.py index cc25a87535..01bf663abb 100644 --- a/tests/integration/validate/test_validate_command.py +++ b/tests/integration/validate/test_validate_command.py @@ -140,6 +140,7 @@ def test_lint_supported_runtimes(self): "nodejs14.x", "nodejs16.x", "nodejs18.x", + "nodejs20.x", "provided", "provided.al2", "provided.al2023", diff --git a/tests/unit/commands/init/test_cli.py b/tests/unit/commands/init/test_cli.py index 15c892a114..776bce490b 100644 --- a/tests/unit/commands/init/test_cli.py +++ b/tests/unit/commands/init/test_cli.py @@ -147,7 +147,7 @@ def test_init_cli_node(self, generate_project_patch, git_repo_clone_mock): location=self.location, pt_explicit=self.pt_explicit, package_type=self.package_type, - runtime="nodejs18.x", + runtime="nodejs20.x", architecture=X86_64, base_image=self.base_image, dependency_manager="npm", @@ -165,12 +165,12 @@ def test_init_cli_node(self, generate_project_patch, git_repo_clone_mock): # need to change the location validation check ANY, ZIP, - "nodejs18.x", + "nodejs20.x", "npm", self.output_dir, self.name, True, - {"runtime": "nodejs18.x", "project_name": "testing project", "architectures": {"value": ["x86_64"]}}, + {"runtime": "nodejs20.x", "project_name": "testing project", "architectures": {"value": ["x86_64"]}}, False, False, ) diff --git a/tests/unit/commands/remote/invoke/core/test_command.py b/tests/unit/commands/remote/invoke/core/test_command.py index aa5604156d..874eb28d18 100644 --- a/tests/unit/commands/remote/invoke/core/test_command.py +++ b/tests/unit/commands/remote/invoke/core/test_command.py @@ -36,43 +36,109 @@ def test_get_options_remote_invoke_command_text(self, mock_get_params): expected_output = { "Description": [(cmd.description + cmd.description_addendum, "")], "Examples": [], + "Lambda Functions": [], "Invoke default lambda function with empty event": [ - ("", ""), ("$sam remote invoke --stack-name hello-world\x1b[0m", ""), ], "Invoke default lambda function with event passed as text input": [ - ("", ""), ('$sam remote invoke --stack-name hello-world -e \'{"message": "hello!"}\'\x1b[0m', ""), ], "Invoke named lambda function with an event file": [ - ("", ""), ("$sam remote invoke --stack-name hello-world HelloWorldFunction --event-file event.json\x1b[0m", ""), ], - "Invoke lambda function with event as stdin input": [ - ("", ""), + "Invoke function with event as stdin input": [ ('$ echo \'{"message": "hello!"}\' | sam remote invoke HelloWorldFunction --event-file -\x1b[0m', ""), ], - "Invoke lambda function using lambda ARN and get the full AWS API response": [ - ("", ""), + "Invoke function using lambda ARN and get the full AWS API response": [ ( "$sam remote invoke arn:aws:lambda:us-west-2:123456789012:function:my-function -e <> --output json\x1b[0m", "", ), ], - "Asynchronously invoke lambda function with additional boto parameters": [ - ("", ""), + "Asynchronously invoke function with additional boto parameters": [ ( "$sam remote invoke HelloWorldFunction -e <> --parameter InvocationType=Event --parameter Qualifier=MyQualifier\x1b[0m", "", ), ], - "Dry invoke a lambda function to validate parameter values and user/role permissions": [ - ("", ""), + "Dry invoke a function to validate parameter values and user/role permissions": [ ( "$sam remote invoke HelloWorldFunction -e <> --output json --parameter InvocationType=DryRun\x1b[0m", "", ), ], + "Step Functions": [], + "Start execution with event passed as text input": [ + ( + '$sam remote invoke --stack-name mock-stack StockTradingStateMachine -e \'{"message": "hello!"}\'\x1b[0m', + "", + ), + ], + "Start execution using its physical-id or ARN with an execution name parameter": [ + ( + "$sam remote invoke arn:aws:states:us-east-1:123456789012:stateMachine:MySFN -e <> --parameter name=mock-execution-name\x1b[0m", + "", + ), + ], + "Start execution with an event file and get the full AWS API response": [ + ( + "$sam remote invoke --stack-name mock-stack StockTradingStateMachine --event-file event.json --output json\x1b[0m", + "", + ), + ], + "Start execution with event as stdin input and pass the X-ray trace header to the execution": [ + ( + '$ echo \'{"message": "hello!"}\' | $sam remote invoke --stack-name mock-stack StockTradingStateMachine --parameter traceHeader=<>\x1b[0m', + "", + ), + ], + "SQS Queue": [], + "Send a message with the MessageBody passed as event": [ + ("$sam remote invoke --stack-name mock-stack MySQSQueue -e hello-world\x1b[0m", ""), + ], + "Send a message using its physical-id and pass event using --event-file": [ + ( + "$sam remote invoke https://sqs.us-east-1.amazonaws.com/12345678910/QueueName --event-file event.json\x1b[0m", + "", + ), + ], + "Send a message using its ARN and delay the specified message": [ + ( + "$sam remote invoke arn:aws:sqs:region:account_id:queue_name -e hello-world --parameter DelaySeconds=10\x1b[0m", + "", + ), + ], + "Send a message along with message attributes and get the full AWS API response": [ + ( + '$sam remote invoke --stack-name mock-stack MySQSQueue -e hello-world --output json --parameter MessageAttributes=\'{"City": {"DataType": "String", "StringValue": "City"}}\'\x1b[0m', + "", + ), + ], + "Send a message to a FIFO SQS Queue": [ + ( + "$sam remote invoke --stack-name mock-stack MySQSQueue -e hello-world --parameter MessageGroupId=mock-message-group --parameter MessageDeduplicationId=mock-dedup-id\x1b[0m", + "", + ), + ], + "Kinesis Data Stream": [], + "Put a record using the data provided as event": [ + ('$sam remote invoke --stack-name mock-stack MyKinesisStream -e \'{"message": "hello!"}\'\x1b[0m', ""), + ], + "Put a record using its physical-id and pass event using --event-file": [ + ("$sam remote invoke MyKinesisStreamName --event-file event.json\x1b[0m", ""), + ], + "Put a record using its ARN and override the key hash": [ + ( + "$sam remote invoke arn:aws:kinesis:us-east-2:123456789012:stream/mystream --event-file event.json --parameter ExplicitHashKey=<>\x1b[0m", + "", + ), + ], + "Put a record with a sequence number for ordering with a PartitionKey": [ + ( + "$sam remote invoke MyKinesisStreamName --event hello-world --parameter SequenceNumberForOrdering=<> --parameter PartitionKey=<>\x1b[0m", + "", + ), + ], "Acronyms": [("ARN", "")], "Infrastructure Options": [("", ""), ("--stack-name", ""), ("", "")], "Input Event Options": [("", ""), ("--event", ""), ("", "")], diff --git a/tests/unit/commands/remote/invoke/test_cli.py b/tests/unit/commands/remote/invoke/test_cli.py index bb3f1b04dc..e30b6a3392 100644 --- a/tests/unit/commands/remote/invoke/test_cli.py +++ b/tests/unit/commands/remote/invoke/test_cli.py @@ -9,6 +9,7 @@ NoRegionError, ) from samcli.commands.remote.invoke.cli import do_cli +from samcli.lib.utils.resources import AWS_LAMBDA_FUNCTION, AWS_STEPFUNCTIONS_STATEMACHINE from samcli.lib.remote_invoke.remote_invoke_executors import RemoteInvokeOutputFormat from samcli.lib.remote_invoke.exceptions import ( ErrorBotoApiCallException, @@ -148,6 +149,7 @@ def test_remote_invoke_with_shared_test_event_command( test_event_mock = Mock() test_event_mock.get_event.return_value = "stuff" fn_resource = Mock() + fn_resource.resource_type = AWS_LAMBDA_FUNCTION context_mock.resource_summary = fn_resource context_mock.get_lambda_shared_test_event_provider.return_value = test_event_mock mock_remote_invoke_context.return_value.__enter__.return_value = context_mock @@ -189,6 +191,70 @@ def mock_tracker(name, value): # when track_event is called, append an equivale # Assert metric was emitted self.assertIn(["RemoteInvokeEventType", "remote_event"], tracked_events) + @patch("samcli.lib.remote_invoke.remote_invoke_executors.RemoteInvokeExecutionInfo") + @patch("samcli.lib.utils.boto_utils.get_boto_client_provider_with_config") + @patch("samcli.lib.utils.boto_utils.get_boto_resource_provider_with_config") + @patch("samcli.commands.remote.remote_invoke_context.RemoteInvokeContext") + @patch("samcli.lib.telemetry.event.EventTracker.track_event") + def test_remote_invoke_with_shared_test_and_event_for_non_supported_resource_event_command( + self, + mock_track_event, + mock_remote_invoke_context, + patched_get_boto_resource_provider_with_config, + patched_get_boto_client_provider_with_config, + patched_remote_invoke_execution_info, + ): + given_client_provider = Mock() + patched_get_boto_client_provider_with_config.return_value = given_client_provider + + given_resource_provider = Mock() + patched_get_boto_resource_provider_with_config.return_value = given_resource_provider + + given_remote_invoke_execution_info = Mock() + patched_remote_invoke_execution_info.return_value = given_remote_invoke_execution_info + + context_mock = Mock() + fn_resource = Mock() + fn_resource.resource_type = AWS_STEPFUNCTIONS_STATEMACHINE + context_mock.resource_summary = fn_resource + mock_remote_invoke_context.return_value.__enter__.return_value = context_mock + + given_remote_invoke_result = Mock() + given_remote_invoke_result.is_succeeded.return_value = True + given_remote_invoke_result.log_output = "log_output" + context_mock.run.return_value = given_remote_invoke_result + + tracked_events = [] + + def mock_tracker(name, value): # when track_event is called, append an equivalent event to our list + tracked_events.append([name, value]) + + mock_track_event.side_effect = mock_tracker + + do_cli( + stack_name=self.stack_name, + resource_id=self.resource_id, + event="Hello world", + event_file=None, + parameter={}, + output=RemoteInvokeOutputFormat.TEXT, + test_event_name="event1", + region=self.region, + profile=self.profile, + config_file=self.config_file, + config_env=self.config_env, + ) + + patched_remote_invoke_execution_info.assert_called_with( + payload="Hello world", + payload_file=None, + parameters={}, + output_format=RemoteInvokeOutputFormat.TEXT, + ) + context_mock.run.assert_called_with(remote_invoke_input=given_remote_invoke_execution_info) + # Assert metric was emitted + self.assertIn(["RemoteInvokeEventType", "text"], tracked_events) + @parameterized.expand( [ (InvalideBotoResponseException,), diff --git a/tests/unit/commands/remote/test_remote_invoke_context.py b/tests/unit/commands/remote/test_remote_invoke_context.py index efae5b68b8..0e14adf814 100644 --- a/tests/unit/commands/remote/test_remote_invoke_context.py +++ b/tests/unit/commands/remote/test_remote_invoke_context.py @@ -1,5 +1,6 @@ from unittest import TestCase from unittest.mock import Mock, patch +from parameterized import parameterized from uuid import uuid4 from samcli.commands.remote.exceptions import ( @@ -10,7 +11,11 @@ ResourceNotSupportedForRemoteInvoke, InvalidStackNameProvidedForRemoteInvoke, ) -from samcli.commands.remote.remote_invoke_context import RemoteInvokeContext, SUPPORTED_SERVICES +from samcli.commands.remote.remote_invoke_context import ( + RemoteInvokeContext, + SUPPORTED_SERVICES, + RESOURCES_PRIORITY_ORDER, +) from samcli.lib.utils.cloudformation import CloudFormationResourceSummary @@ -49,10 +54,49 @@ def test_only_stack_name_with_no_resource_should_fail(self, patched_resource_sum with self._get_remote_invoke_context(): pass + def test_supported_services_and_priority_order_services_are_same(self): + self.assertEqual(set(SUPPORTED_SERVICES.values()), set(RESOURCES_PRIORITY_ORDER)) + + @parameterized.expand( + [ + ( + { + "resource1": Mock(resource_type=SUPPORTED_SERVICES["lambda"]), + "resource2": Mock(resource_type=SUPPORTED_SERVICES["lambda"]), + "resource3": Mock(resource_type=SUPPORTED_SERVICES["states"]), + "resource4": Mock(resource_type=SUPPORTED_SERVICES["sqs"]), + "resource5": Mock(resource_type=SUPPORTED_SERVICES["kinesis"]), + }, + ), + ( + { + "resource1": Mock(resource_type=SUPPORTED_SERVICES["states"]), + "resource2": Mock(resource_type=SUPPORTED_SERVICES["states"]), + "resource3": Mock(resource_type=SUPPORTED_SERVICES["sqs"]), + "resource4": Mock(resource_type=SUPPORTED_SERVICES["kinesis"]), + }, + ), + ( + { + "resource1": Mock(resource_type=SUPPORTED_SERVICES["sqs"]), + "resource2": Mock(resource_type=SUPPORTED_SERVICES["sqs"]), + "resource3": Mock(resource_type=SUPPORTED_SERVICES["kinesis"]), + }, + ), + ( + { + "resource1": Mock(resource_type=SUPPORTED_SERVICES["kinesis"]), + "resource2": Mock(resource_type=SUPPORTED_SERVICES["kinesis"]), + }, + ), + ] + ) @patch("samcli.commands.remote.remote_invoke_context.get_resource_summaries") - def test_only_stack_name_with_multiple_resource_should_fail(self, patched_resource_summaries): + def test_only_stack_name_with_multiple_same_resource_type_should_fail( + self, mock_resource_summaries, patched_resource_summaries + ): self.resource_id = None - patched_resource_summaries.return_value = {"resource1": Mock(), "resource2": Mock()} + patched_resource_summaries.return_value = mock_resource_summaries with self.assertRaises(AmbiguousResourceForRemoteInvoke): with self._get_remote_invoke_context(): pass @@ -60,11 +104,55 @@ def test_only_stack_name_with_multiple_resource_should_fail(self, patched_resour @patch("samcli.commands.remote.remote_invoke_context.get_resource_summaries") def test_only_stack_name_with_single_resource_should_be_valid(self, patched_resource_summaries): self.resource_id = None - resource_summary = Mock(logical_resource_id=self.resource_id) + resource_summary = Mock(logical_resource_id="mock-resource-id") patched_resource_summaries.return_value = {self.resource_id: resource_summary} with self._get_remote_invoke_context() as remote_invoke_context: self.assertEqual(remote_invoke_context._resource_summary, resource_summary) + @parameterized.expand( + [ + ( + { + "resource2": Mock(resource_type=SUPPORTED_SERVICES["states"], logical_resource_id="resource2"), + "resource1": Mock(resource_type=SUPPORTED_SERVICES["sqs"], logical_resource_id="resource1"), + "resource3": Mock(resource_type=SUPPORTED_SERVICES["lambda"], logical_resource_id="resource3"), + "resource4": Mock(resource_type=SUPPORTED_SERVICES["kinesis"], logical_resource_id="resource4"), + }, + "resource3", + ), + ( + { + "resource1": Mock(resource_type=SUPPORTED_SERVICES["states"], logical_resource_id="resource1"), + "resource2": Mock(resource_type=SUPPORTED_SERVICES["sqs"], logical_resource_id="resource2"), + "resource3": Mock(resource_type=SUPPORTED_SERVICES["kinesis"], logical_resource_id="resource3"), + "resource4": Mock(resource_type=SUPPORTED_SERVICES["sqs"], logical_resource_id="resource4"), + }, + "resource1", + ), + ( + { + "resource1": Mock(resource_type=SUPPORTED_SERVICES["sqs"], logical_resource_id="resource1"), + "resource2": Mock(resource_type=SUPPORTED_SERVICES["kinesis"], logical_resource_id="resource2"), + }, + "resource1", + ), + ( + { + "resource1": Mock(resource_type=SUPPORTED_SERVICES["kinesis"], logical_resource_id="resource1"), + }, + "resource1", + ), + ] + ) + @patch("samcli.commands.remote.remote_invoke_context.get_resource_summaries") + def test_only_stack_name_service_priority_invoke( + self, mock_resource_summaries, expected_logical_id, patched_resource_summaries + ): + self.resource_id = None + patched_resource_summaries.return_value = mock_resource_summaries + with self._get_remote_invoke_context() as remote_invoke_context: + self.assertEqual(remote_invoke_context._resource_summary.logical_resource_id, expected_logical_id) + def test_only_resource_id_unsupported_service_arn_should_fail(self): self.stack_name = None self.resource_id = "arn:aws:unsupported-service:region:account:resource_type:resource_id" @@ -72,9 +160,14 @@ def test_only_resource_id_unsupported_service_arn_should_fail(self): with self._get_remote_invoke_context(): pass - def test_only_resource_id_supported_service_arn_should_be_valid(self): + @parameterized.expand( + [ + ("lambda"), + ("states"), + ] + ) + def test_only_resource_id_supported_service_arn_should_be_valid(self, service): self.stack_name = None - service = "lambda" self.resource_id = f"arn:aws:{service}:region:account:resource_type:{self.resource_id}" with self._get_remote_invoke_context() as remote_invoke_context: self.assertEqual( @@ -84,6 +177,34 @@ def test_only_resource_id_supported_service_arn_should_be_valid(self): ), ) + @patch("samcli.commands.remote.remote_invoke_context.get_queue_url_from_arn") + def test_only_resource_id_supported_service_sqs_arn_should_be_valid(self, patched_get_queue_url_from_arn): + self.stack_name = None + service = "sqs" + mock_queue_url = "https://sqs.us-east-1.amazonaws.com/12345678910/{self.resource_id}" + patched_get_queue_url_from_arn.return_value = mock_queue_url + self.resource_id = f"arn:aws:{service}:region:account:resource_type:{self.resource_id}" + with self._get_remote_invoke_context() as remote_invoke_context: + self.assertEqual( + remote_invoke_context._resource_summary, + CloudFormationResourceSummary( + SUPPORTED_SERVICES.get("%s" % service), "%s" % mock_queue_url, "%s" % mock_queue_url + ), + ) + + def test_only_resource_id_supported_service_kinesis_arn_should_be_valid(self): + self.stack_name = None + service = "kinesis" + mock_stream_name = self.resource_id + self.resource_id = f"arn:aws:{service}:region:account:resource_type:{self.resource_id}" + with self._get_remote_invoke_context() as remote_invoke_context: + self.assertEqual( + remote_invoke_context._resource_summary, + CloudFormationResourceSummary( + SUPPORTED_SERVICES.get("%s" % service), "%s" % mock_stream_name, "%s" % mock_stream_name + ), + ) + @patch("samcli.commands.remote.remote_invoke_context.get_resource_summary_from_physical_id") def test_only_resource_id_as_invalid_physical_id_should_fail(self, patched_resource_summary_from_physical_id): self.stack_name = None diff --git a/tests/unit/lib/build_module/test_app_builder.py b/tests/unit/lib/build_module/test_app_builder.py index 800c2d7fe2..bc47c50f06 100644 --- a/tests/unit/lib/build_module/test_app_builder.py +++ b/tests/unit/lib/build_module/test_app_builder.py @@ -2502,16 +2502,23 @@ def tearDown(self): EventTracker.clear_trackers() @parameterized.expand([([],), (["ExpFlag1", "ExpFlag2"],)]) + @patch("samcli.lib.build.app_builder.patch_runtime") @patch("samcli.lib.telemetry.event.EventType.get_accepted_values") @patch("samcli.lib.build.app_builder.LambdaBuilder") @patch("samcli.lib.build.app_builder.get_enabled_experimental_flags") def test_must_use_lambda_builder( - self, experimental_flags, experimental_flags_mock, lambda_builder_mock, event_mock + self, + experimental_flags, + experimental_flags_mock, + lambda_builder_mock, + event_mock, + patch_runtime_mock, ): experimental_flags_mock.return_value = experimental_flags config_mock = Mock() builder_instance_mock = lambda_builder_mock.return_value = Mock() event_mock.return_value = ["runtime"] + patch_runtime_mock.return_value = "runtime" result = self.builder._build_function_in_process( config_mock, @@ -2553,6 +2560,8 @@ def test_must_use_lambda_builder( build_in_source=False, ) + patch_runtime_mock.assert_called_with("runtime") + @patch("samcli.lib.build.app_builder.LambdaBuilder") def test_must_raise_on_error(self, lambda_builder_mock): config_mock = Mock() diff --git a/tests/unit/lib/build_module/test_workflow_config.py b/tests/unit/lib/build_module/test_workflow_config.py index e51ff0ba50..8aa5279764 100644 --- a/tests/unit/lib/build_module/test_workflow_config.py +++ b/tests/unit/lib/build_module/test_workflow_config.py @@ -28,7 +28,7 @@ def test_must_work_for_python(self, runtime): self.assertIn(Event("BuildWorkflowUsed", "python-pip"), EventTracker.get_tracked_events()) self.assertFalse(result.must_mount_with_write_in_container) - @parameterized.expand([("nodejs12.x",), ("nodejs14.x",), ("nodejs16.x",), ("nodejs18.x",)]) + @parameterized.expand([("nodejs12.x",), ("nodejs14.x",), ("nodejs16.x",), ("nodejs18.x",), ("nodejs20.x",)]) def test_must_work_for_nodejs(self, runtime): result = get_workflow_config(runtime, self.code_dir, self.project_dir) self.assertEqual(result.language, "nodejs") diff --git a/tests/unit/lib/remote_invoke/test_remote_invoke_executor_factory.py b/tests/unit/lib/remote_invoke/test_remote_invoke_executor_factory.py index 9d0843643a..6ba09f2653 100644 --- a/tests/unit/lib/remote_invoke/test_remote_invoke_executor_factory.py +++ b/tests/unit/lib/remote_invoke/test_remote_invoke_executor_factory.py @@ -4,7 +4,13 @@ from parameterized import parameterized -from samcli.lib.remote_invoke.remote_invoke_executor_factory import RemoteInvokeExecutorFactory, AWS_LAMBDA_FUNCTION +from samcli.lib.remote_invoke.remote_invoke_executor_factory import ( + RemoteInvokeExecutorFactory, + AWS_LAMBDA_FUNCTION, + AWS_SQS_QUEUE, + AWS_KINESIS_STREAM, + AWS_STEPFUNCTIONS_STATEMACHINE, +) from samcli.lib.remote_invoke.remote_invoke_executors import RemoteInvokeOutputFormat @@ -15,8 +21,8 @@ def setUp(self) -> None: def test_supported_resource_executors(self): supported_executors = self.remote_invoke_executor_factory.REMOTE_INVOKE_EXECUTOR_MAPPING - self.assertEqual(1, len(supported_executors)) - expected_executors = {AWS_LAMBDA_FUNCTION} + self.assertEqual(4, len(supported_executors)) + expected_executors = {AWS_LAMBDA_FUNCTION, AWS_SQS_QUEUE, AWS_KINESIS_STREAM, AWS_STEPFUNCTIONS_STATEMACHINE} self.assertEqual(expected_executors, set(supported_executors.keys())) @patch( diff --git a/tests/unit/lib/utils/test_lambda_builders.py b/tests/unit/lib/utils/test_lambda_builders.py new file mode 100644 index 0000000000..d0fc170ea3 --- /dev/null +++ b/tests/unit/lib/utils/test_lambda_builders.py @@ -0,0 +1,22 @@ +from unittest import TestCase +from unittest.mock import Mock + +from parameterized import parameterized + +from samcli.lib.utils import lambda_builders + + +class TestPatchRuntime(TestCase): + @parameterized.expand( + [ + ("nodejs14.x", "nodejs14.x"), + ("java8.al2", "java8"), + ("dotnet6", "dotnet6"), + ("provided", "provided"), + ("provided.al2", "provided"), + ("provided.al2023", "provided"), + ] + ) + def test_patch_runtime(self, runtime, expect): + actual = lambda_builders.patch_runtime(runtime) + self.assertEqual(actual, expect) diff --git a/tests/unit/local/docker/test_lambda_build_container.py b/tests/unit/local/docker/test_lambda_build_container.py index c72e808f13..19f4aefe99 100644 --- a/tests/unit/local/docker/test_lambda_build_container.py +++ b/tests/unit/local/docker/test_lambda_build_container.py @@ -73,9 +73,13 @@ def test_must_init_class(self, get_container_dirs_mock, get_entrypoint_mock, get class TestLambdaBuildContainer_make_request(TestCase): @parameterized.expand(itertools.product([True, False], [[], ["exp1", "exp2"]])) + @patch("samcli.local.docker.lambda_build_container.patch_runtime") @patch("samcli.local.docker.lambda_build_container.get_enabled_experimental_flags") - def test_must_make_request_object_string(self, is_building_layer, experimental_flags, patched_experimental_flags): + def test_must_make_request_object_string( + self, is_building_layer, experimental_flags, patched_experimental_flags, patch_runtime_mock + ): patched_experimental_flags.return_value = experimental_flags + patch_runtime_mock.return_value = "runtime" container_dirs = { "base_dir": "base_dir", @@ -133,6 +137,8 @@ def test_must_make_request_object_string(self, is_building_layer, experimental_f }, ) + patch_runtime_mock.assert_called_with("runtime") + class TestLambdaBuildContainer_get_container_dirs(TestCase): def test_must_return_dirs(self): diff --git a/tests/unit/local/docker/test_lambda_container.py b/tests/unit/local/docker/test_lambda_container.py index bffa8bc1cc..8f3e43d31e 100644 --- a/tests/unit/local/docker/test_lambda_container.py +++ b/tests/unit/local/docker/test_lambda_container.py @@ -19,6 +19,7 @@ Runtime.nodejs14x.value, Runtime.nodejs16x.value, Runtime.nodejs18x.value, + Runtime.nodejs20x.value, Runtime.python37.value, Runtime.python38.value, Runtime.python39.value, diff --git a/tests/unit/local/docker/test_lambda_debug_settings.py b/tests/unit/local/docker/test_lambda_debug_settings.py index a72a9722a9..e1fff17664 100644 --- a/tests/unit/local/docker/test_lambda_debug_settings.py +++ b/tests/unit/local/docker/test_lambda_debug_settings.py @@ -16,6 +16,7 @@ Runtime.nodejs14x, Runtime.nodejs16x, Runtime.nodejs18x, + Runtime.nodejs20x, Runtime.python37, Runtime.python38, Runtime.python39,