From 54134c56acbc0249433191cb9b2d38af46561cc5 Mon Sep 17 00:00:00 2001 From: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Date: Wed, 30 Nov 2022 13:49:37 -0800 Subject: [PATCH 01/26] fix: implicit api warnings (#4452) --- samcli/lib/providers/provider.py | 11 ++++++ samcli/lib/providers/sam_api_provider.py | 2 +- .../commands/local/lib/test_api_provider.py | 38 +++++++++++++++---- 3 files changed, 43 insertions(+), 8 deletions(-) diff --git a/samcli/lib/providers/provider.py b/samcli/lib/providers/provider.py index 8c4cea2a91..8e12772b75 100644 --- a/samcli/lib/providers/provider.py +++ b/samcli/lib/providers/provider.py @@ -525,6 +525,7 @@ def __init__( self.template_dict = template_dict self.metadata = metadata self._resources: Optional[Dict] = None + self._raw_resources: Optional[Dict] = None @property def stack_id(self) -> str: @@ -561,6 +562,16 @@ def resources(self) -> Dict: self._resources = cast(Dict, processed_template_dict.get("Resources", {})) return self._resources + @property + def raw_resources(self) -> Dict: + """ + Return the resources dictionary without running SAM Transform + """ + if self._raw_resources is not None: + return self._raw_resources + self._raw_resources = cast(Dict, self.template_dict.get("Resources", {})) + return self._raw_resources + def get_output_template_path(self, build_root: str) -> str: """ Return the path of the template yaml file output by "sam build." diff --git a/samcli/lib/providers/sam_api_provider.py b/samcli/lib/providers/sam_api_provider.py index 5493baedb2..23e08eabd1 100644 --- a/samcli/lib/providers/sam_api_provider.py +++ b/samcli/lib/providers/sam_api_provider.py @@ -332,7 +332,7 @@ def _get_route_stack_depth(route: Route) -> int: @staticmethod def check_implicit_api_resource_ids(stacks: List[Stack]) -> None: for stack in stacks: - for logical_id in stack.resources: + for logical_id in stack.raw_resources: if logical_id in ( SamApiProvider.IMPLICIT_API_RESOURCE_ID, SamApiProvider.IMPLICIT_HTTP_API_RESOURCE_ID, diff --git a/tests/unit/commands/local/lib/test_api_provider.py b/tests/unit/commands/local/lib/test_api_provider.py index f671f77bb7..246c03f673 100644 --- a/tests/unit/commands/local/lib/test_api_provider.py +++ b/tests/unit/commands/local/lib/test_api_provider.py @@ -5,7 +5,7 @@ from parameterized import parameterized -from samcli.lib.providers.provider import Api +from samcli.lib.providers.provider import Api, Stack from samcli.lib.providers.api_provider import ApiProvider from samcli.lib.providers.sam_api_provider import SamApiProvider from samcli.lib.providers.cfn_api_provider import CfnApiProvider @@ -242,21 +242,45 @@ def test_apis_in_child_stack_overridden_by_apis_in_parents_within_implicit_or_ex class TestApiProvider_check_implicit_api_resource_ids(TestCase): + @patch("samcli.lib.providers.sam_base_provider.SamBaseProvider.get_template") @patch("samcli.lib.providers.sam_api_provider.LOG.warning") - def test_check_implicit_api_resource_ids_false(self, warning_mock): - SamApiProvider.check_implicit_api_resource_ids([Mock(resources={"Api1": {"Properties": Mock()}})]) + def test_check_implicit_api_resource_ids_false(self, warning_mock, get_template_mock): + SamApiProvider.check_implicit_api_resource_ids( + [Stack("", "stack", "location", None, {"Resources": {"Api1": {"Properties": Mock()}}})] + ) warning_mock.assert_not_called() + get_template_mock.assert_not_called() + @patch("samcli.lib.providers.sam_base_provider.SamBaseProvider.get_template") @patch("samcli.lib.providers.sam_api_provider.LOG.warning") - def test_check_implicit_api_resource_ids_rest_api(self, warning_mock): + def test_check_implicit_api_resource_ids_rest_api(self, warning_mock, get_template_mock): SamApiProvider.check_implicit_api_resource_ids( - [Mock(resources={"Api1": {"Properties": Mock()}, "ServerlessRestApi": {"Properties": Mock()}})] + [ + Stack( + "", + "stack", + "location", + None, + {"Resources": {"Api1": {"Properties": Mock()}, "ServerlessRestApi": {"Properties": Mock()}}}, + ) + ] ) warning_mock.assert_called_once() + get_template_mock.assert_not_called() + @patch("samcli.lib.providers.sam_base_provider.SamBaseProvider.get_template") @patch("samcli.lib.providers.sam_api_provider.LOG.warning") - def test_check_implicit_api_resource_ids_http_api(self, warning_mock): + def test_check_implicit_api_resource_ids_http_api(self, warning_mock, get_template_mock): SamApiProvider.check_implicit_api_resource_ids( - [Mock(resources={"Api1": {"Properties": Mock()}, "ServerlessHttpApi": {"Properties": Mock()}})] + [ + Stack( + "", + "stack", + "location", + None, + {"Resources": {"Api1": {"Properties": Mock()}, "ServerlessHttpApi": {"Properties": Mock()}}}, + ) + ] ) warning_mock.assert_called_once() + get_template_mock.assert_not_called() From d0f99f2f0160e53f04f08949ac3f779eb6f61f79 Mon Sep 17 00:00:00 2001 From: Harleenijjar <116675218+Harleenijjar@users.noreply.github.com> Date: Fri, 2 Dec 2022 14:55:04 -0500 Subject: [PATCH 02/26] Added Next Command Suggestions (#4441) * Added Next Command Suggestions * Update cli.py * Update cli.py * Update cli.py * Update cli.py * Update cli.py * Update cli.py * Update cli.py * Update cli.py * Update cli.py * Update cli.py * Black reformatting * Added common template to print next command suggestions * Made next commands template with requested PR changes * Fixed command and description order * Changed command suggestion appearance to match existing appearance, added real test suggestions to assert against in unit test * Improved readability and made command suggestions mimic existing appearance * simplified logic implementation * fixed description * made typing stronger * Added new line at the end Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> --- samcli/commands/_utils/options.py | 17 +++++++++++++++++ samcli/commands/init/interactive_init_flow.py | 18 ++++++++++-------- samcli/commands/local/start_api/cli.py | 9 +++++++++ samcli/commands/local/start_lambda/cli.py | 9 +++++++++ samcli/commands/logs/command.py | 18 +++++++++++------- tests/unit/commands/_utils/test_options.py | 19 +++++++++++++++++++ 6 files changed, 75 insertions(+), 15 deletions(-) diff --git a/samcli/commands/_utils/options.py b/samcli/commands/_utils/options.py index 2c091d60b7..cd7ea9c59d 100644 --- a/samcli/commands/_utils/options.py +++ b/samcli/commands/_utils/options.py @@ -5,6 +5,7 @@ import os import logging from functools import partial +from typing import List, Tuple import click from click.types import FuncParamType @@ -788,3 +789,19 @@ def _space_separated_list_func_type(value): _space_separated_list_func_type.__name__ = "LIST" + + +def generate_next_command_recommendation(command_tuples: List[Tuple[str, str]]) -> str: + """ + Generates a message containing some suggested commands to run next. + + :type command_tuples: list[tuple] + :param command_tuples: list of tuples containing the command with their respective description + """ + template = """ +Commands you can use next +========================= +{} +""" + command_list_txt = "\n".join(f"[*] {description}: {command}" for description, command in command_tuples) + return template.format(command_list_txt) diff --git a/samcli/commands/init/interactive_init_flow.py b/samcli/commands/init/interactive_init_flow.py index e55ba72158..36ffcefe92 100644 --- a/samcli/commands/init/interactive_init_flow.py +++ b/samcli/commands/init/interactive_init_flow.py @@ -31,6 +31,7 @@ from samcli.commands.init.init_templates import InitTemplates, InvalidInitTemplateError from samcli.lib.utils.osutils import remove from samcli.lib.utils.packagetype import IMAGE, ZIP +from samcli.commands._utils.options import generate_next_command_recommendation LOG = logging.getLogger(__name__) @@ -238,14 +239,15 @@ def _generate_from_use_case( ) click.echo(summary_msg) - next_commands_msg = f""" - Commands you can use next - ========================= - [*] Create pipeline: cd {name} && sam pipeline init --bootstrap - [*] Validate SAM template: cd {name} && sam validate - [*] Test Function in the Cloud: cd {name} && sam sync --stack-name {{stack-name}} --watch - """ - click.secho(next_commands_msg, fg="yellow") + command_suggestions = generate_next_command_recommendation( + [ + ("Create pipeline", f"cd {name} && sam pipeline init --bootstrap"), + ("Validate SAM template", f"cd {name} && sam validate"), + ("Test Function in the Cloud", f"cd {name} && sam sync --stack-name {{stack-name}} --watch"), + ] + ) + click.secho(command_suggestions, fg="yellow") + do_generate( location, package_type, diff --git a/samcli/commands/local/start_api/cli.py b/samcli/commands/local/start_api/cli.py index 44475ea203..dac78f57a0 100644 --- a/samcli/commands/local/start_api/cli.py +++ b/samcli/commands/local/start_api/cli.py @@ -18,6 +18,7 @@ from samcli.lib.utils.version_checker import check_newer_version from samcli.local.docker.exceptions import ContainerNotStartableException from samcli.commands._utils.option_value_processor import process_image_options +from samcli.commands._utils.options import generate_next_command_recommendation LOG = logging.getLogger(__name__) @@ -191,6 +192,14 @@ def do_cli( # pylint: disable=R0914 service = LocalApiService(lambda_invoke_context=invoke_context, port=port, host=host, static_dir=static_dir) service.start() + command_suggestions = generate_next_command_recommendation( + [ + ("Validate SAM template", "sam validate"), + ("Test Function in the Cloud", "sam sync --stack-name {{stack-name}} --watch"), + ("Deploy", "sam deploy --guided"), + ] + ) + click.secho(command_suggestions, fg="yellow") except NoApisDefined as ex: raise UserException( diff --git a/samcli/commands/local/start_lambda/cli.py b/samcli/commands/local/start_lambda/cli.py index fbc665b9a6..0f3f8197c3 100644 --- a/samcli/commands/local/start_lambda/cli.py +++ b/samcli/commands/local/start_lambda/cli.py @@ -20,6 +20,7 @@ from samcli.lib.utils.version_checker import check_newer_version from samcli.local.docker.exceptions import ContainerNotStartableException from samcli.commands._utils.option_value_processor import process_image_options +from samcli.commands._utils.options import generate_next_command_recommendation LOG = logging.getLogger(__name__) @@ -218,6 +219,14 @@ def do_cli( # pylint: disable=R0914 service = LocalLambdaService(lambda_invoke_context=invoke_context, port=port, host=host) service.start() + command_suggestions = generate_next_command_recommendation( + [ + ("Validate SAM template", "sam validate"), + ("Test Function in the Cloud", "sam sync --stack-name {{stack-name}} --watch"), + ("Deploy", "sam deploy --guided"), + ] + ) + click.secho(command_suggestions, fg="yellow") except ( InvalidSamDocumentException, diff --git a/samcli/commands/logs/command.py b/samcli/commands/logs/command.py index 991fd0eb86..82347eb3a6 100644 --- a/samcli/commands/logs/command.py +++ b/samcli/commands/logs/command.py @@ -16,6 +16,7 @@ from samcli.lib.telemetry.metric import track_command from samcli.commands._utils.command_exception_handler import command_exception_handler from samcli.lib.utils.version_checker import check_newer_version +from samcli.commands._utils.options import generate_next_command_recommendation LOG = logging.getLogger(__name__) @@ -184,10 +185,13 @@ def do_cli( puller.load_time_period(sanitized_start_time, sanitized_end_time, filter_pattern) if tailing: - next_commands_msg = f""" - Commands you can use next - ========================= - [*] Tail Logs from All Support Resources and X-Ray: sam logs --stack-name {stack_name} --tail --include-traces - [*] Tail X-Ray Information: sam traces --tail - """ - click.secho(next_commands_msg, fg="yellow") + command_suggestions = generate_next_command_recommendation( + [ + ( + "Tail Logs from All Support Resources and X-Ray", + f"sam logs --stack-name {stack_name} --tail --include-traces", + ), + ("Tail X-Ray Information", "sam traces --tail"), + ] + ) + click.secho(command_suggestions, fg="yellow") diff --git a/tests/unit/commands/_utils/test_options.py b/tests/unit/commands/_utils/test_options.py index d05771b74a..dddd06bcd3 100644 --- a/tests/unit/commands/_utils/test_options.py +++ b/tests/unit/commands/_utils/test_options.py @@ -22,6 +22,7 @@ image_repositories_callback, _space_separated_list_func_type, skip_prepare_infra_callback, + generate_next_command_recommendation, ) from samcli.commands._utils.parameterized_option import parameterized_option from samcli.commands.package.exceptions import PackageResolveS3AndS3SetError, PackageResolveS3AndS3NotSetError @@ -527,3 +528,21 @@ def test_skip_without_hook_package(self): skip_prepare_infra_callback(ctx_mock, param_mock, True) self.assertEqual(str(ex.exception), "Missing option --hook-name") + + +class TestNextCommandSuggestions(TestCase): + def test_generate_next_command_recommendation(self): + listOfTuples = [ + ("Validate SAM template", "sam validate"), + ("Test Function in the Cloud", "sam sync --stack-name {{stack-name}} --watch"), + ("Deploy", "sam deploy --guided"), + ] + output = generate_next_command_recommendation(listOfTuples) + expectedOutput = """ +Commands you can use next +========================= +[*] Validate SAM template: sam validate +[*] Test Function in the Cloud: sam sync --stack-name {{stack-name}} --watch +[*] Deploy: sam deploy --guided +""" + self.assertEqual(output, expectedOutput) From 7a6fee612994f27ae257ade335be273f05b235db Mon Sep 17 00:00:00 2001 From: Xia Zhao <78883180+xazhao@users.noreply.github.com> Date: Mon, 5 Dec 2022 16:47:12 -0800 Subject: [PATCH 03/26] Add error messages to conflict options (#4350) * Add error messages to conflict options * make black * Use Mutex options * Remove extra pylint rule and revise help text * add more test cases and update help text Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> --- samcli/commands/pipeline/bootstrap/cli.py | 31 ++-- .../commands/pipeline/bootstrap/test_cli.py | 139 ++++++++++-------- 2 files changed, 98 insertions(+), 72 deletions(-) diff --git a/samcli/commands/pipeline/bootstrap/cli.py b/samcli/commands/pipeline/bootstrap/cli.py index b80aa44127..9f97464b3d 100644 --- a/samcli/commands/pipeline/bootstrap/cli.py +++ b/samcli/commands/pipeline/bootstrap/cli.py @@ -9,6 +9,7 @@ from samcli.cli.cli_config_file import configuration_option, TomlProvider from samcli.cli.main import pass_context, common_options, aws_creds_options, print_cmdline_args +from samcli.commands._utils.click_mutex import ClickMutex from samcli.commands.pipeline.bootstrap.pipeline_oidc_provider import PipelineOidcProvider from samcli.lib.config.samconfig import SamConfig @@ -114,12 +115,16 @@ "If there is no organization enter the Username of the repository owner instead " "Only used if using GitHub Actions OIDC for user permissions", required=False, + cls=ClickMutex, + incompatible_params=["bitbucket_repo_uuid", "gitlab_group", "gitlab_project"], ) @click.option( "--github-repo", help="The name of the GitHub Repository that deployments will occur from. " "Only used if using GitHub Actions OIDC for permissions", required=False, + cls=ClickMutex, + incompatible_params=["bitbucket_repo_uuid", "gitlab_group", "gitlab_project"], ) @click.option( "--deployment-branch", @@ -130,25 +135,37 @@ @click.option( "--oidc-provider", help="The name of the CI/CD system that will be used for OIDC permissions " - "we currently only support GitLab, GitHub, and Bitbucket", + "Currently supported CI/CD systems are : GitLab, GitHub and Bitbucket", type=click.Choice([GITHUB_ACTIONS, GITLAB, BITBUCKET]), required=False, + cls=ClickMutex, + required_param_lists=[ + ["gitlab_group", "gitlab_project"], + ["github_org", "github_repo"], + ["bitbucket_repo_uuid"], + ], ) @click.option( "--gitlab-group", help="The GitLab group that the repository belongs to. Only used if using GitLab OIDC for permissions", required=False, + cls=ClickMutex, + incompatible_params=["bitbucket_repo_uuid", "github_org", "github_repo"], ) @click.option( "--gitlab-project", help="The GitLab project name. Only used if using GitLab OIDC for permissions", required=False, + cls=ClickMutex, + incompatible_params=["bitbucket_repo_uuid", "github_org", "github_repo"], ) @click.option( "--bitbucket-repo-uuid", help="The UUID of the Bitbucket repository. Only used if using Bitbucket OIDC for permissions. " "Found at https://bitbucket.org///admin/addon/admin/pipelines/openid-connect", required=False, + cls=ClickMutex, + incompatible_params=["gitlab_group", "gitlab_project", "github_org", "github_repo"], ) @click.option( "--cicd-provider", @@ -265,15 +282,15 @@ def do_cli( Stage, ) + config_parameters = _load_config_values() if not pipeline_user_arn and not permissions_provider == OPEN_ID_CONNECT: - pipeline_user_arn = _load_saved_pipeline_user_arn() + pipeline_user_arn = config_parameters.get("pipeline_user") enable_oidc_option = False if not cicd_provider or cicd_provider in OIDC_SUPPORTED_PROVIDER: enable_oidc_option = True oidc_provider = cicd_provider - config_parameters = _load_config_values() oidc_config = OidcConfig( oidc_client_id=oidc_client_id, oidc_provider=oidc_provider, oidc_provider_url=oidc_provider_url ) @@ -448,14 +465,6 @@ def _get_pipeline_oidc_provider( raise click.UsageError("Missing required parameter '--oidc-provider'") -def _load_saved_pipeline_user_arn() -> Optional[str]: - samconfig: SamConfig = SamConfig(config_dir=PIPELINE_CONFIG_DIR, filename=PIPELINE_CONFIG_FILENAME) - if not samconfig.exists(): - return None - config: Dict[str, str] = samconfig.get_all(cmd_names=_get_bootstrap_command_names(), section="parameters") - return config.get("pipeline_user") - - def _load_config_values() -> Dict[str, str]: samconfig: SamConfig = SamConfig(config_dir=PIPELINE_CONFIG_DIR, filename=PIPELINE_CONFIG_FILENAME) if not samconfig.exists(): diff --git a/tests/unit/commands/pipeline/bootstrap/test_cli.py b/tests/unit/commands/pipeline/bootstrap/test_cli.py index 7c043d098e..6aebc97eae 100644 --- a/tests/unit/commands/pipeline/bootstrap/test_cli.py +++ b/tests/unit/commands/pipeline/bootstrap/test_cli.py @@ -1,18 +1,18 @@ from unittest import TestCase from unittest.mock import patch, Mock +from parameterized import parameterized import click from click.testing import CliRunner from samcli.commands.pipeline.bootstrap.cli import ( - _load_saved_pipeline_user_arn, _load_config_values, PIPELINE_CONFIG_FILENAME, PIPELINE_CONFIG_DIR, ) from samcli.commands.pipeline.bootstrap.cli import cli as bootstrap_cmd from samcli.commands.pipeline.bootstrap.cli import do_cli as bootstrap_cli -from samcli.commands.pipeline.bootstrap.guided_context import GITHUB_ACTIONS, GITLAB +from samcli.commands.pipeline.bootstrap.guided_context import BITBUCKET, GITHUB_ACTIONS, GITLAB from samcli.commands.pipeline.bootstrap.oidc_config import ( GitHubOidcConfig, OidcConfig, @@ -69,11 +69,11 @@ def setUp(self) -> None: "oidc_provider_url": ANY_OIDC_PROVIDER_URL, "oidc_client_id": ANY_OIDC_CLIENT_ID, "oidc_provider": GITHUB_ACTIONS, - "github_org": ANY_GITHUB_ORG, - "github_repo": ANY_GITHUB_REPO, - "gitlab_project": ANY_GITLAB_PROJECT, - "gitlab_group": ANY_GITLAB_GROUP, - "bitbucket_repo_uuid": ANY_BITBUCKET_REPO_UUID, + "github_org": None, + "github_repo": None, + "gitlab_project": None, + "gitlab_group": None, + "bitbucket_repo_uuid": None, "deployment_branch": ANY_DEPLOYMENT_BRANCH, "cicd_provider": ANY_CICD_PROVIDER, } @@ -142,19 +142,15 @@ def test_bootstrap_command_with_different_arguments_combination(self, do_cli_moc self.assertEqual(kwargs["artifacts_bucket_arn"], "bucketARN") @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") - @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") @patch("samcli.lib.pipeline.bootstrap.stage.Stage") @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") - def test_bootstrapping_normal_interactive_flow( - self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock - ): + def test_bootstrapping_normal_interactive_flow(self, guided_context_mock, environment_mock, get_command_names_mock): # setup gc_instance = Mock() gc_instance.permissions_provider = "iam" guided_context_mock.return_value = gc_instance environment_instance = Mock() environment_mock.return_value = environment_instance - load_saved_pipeline_user_arn_mock.return_value = ANY_PIPELINE_USER_ARN self.cli_context["interactive"] = True self.cli_context["pipeline_user_arn"] = None get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES @@ -163,7 +159,6 @@ def test_bootstrapping_normal_interactive_flow( bootstrap_cli(**self.cli_context) # verify - load_saved_pipeline_user_arn_mock.assert_called_once() gc_instance.run.assert_called_once() environment_instance.bootstrap.assert_called_once_with(confirm_changeset=True) environment_instance.print_resources_summary.assert_called_once() @@ -214,6 +209,40 @@ def test_bootstrapping_oidc_non_interactive_fails_if_missing_github_parameters(s environment_instance.print_resources_summary.assert_not_called() environment_instance.save_config_safe.assert_not_called() + @parameterized.expand( + [ + ("any_github_org", None, "any_gitlab_group", None, None), + (None, "any_github_repo", None, "any_gitlab_project", None), + (None, "any_github_repo", None, None, "bitbucket_repo_uuid"), + (None, None, None, "any_gitlab_group", "bitbucket_repo_uuid"), + ] + ) + @patch("samcli.lib.pipeline.bootstrap.stage.Stage") + def test_bootstrapping_oidc_fails_conflict_parameters( + self, github_org, github_repo, gitlab_group, gitlab_project, bitbucket_repo_uuid, environment_mock + ): + # setup + environment_instance = Mock() + environment_mock.return_value = environment_instance + self.cli_context["interactive"] = False + self.cli_context["permissions_provider"] = "oidc" + self.cli_context["oidc_provider"] = GITHUB_ACTIONS + self.cli_context["github_org"] = github_org + self.cli_context["github_repo"] = github_repo + self.cli_context["gitlab_group"] = gitlab_group + self.cli_context["gitlab_project"] = gitlab_project + self.cli_context["bitbucket_repo_uuid"] = bitbucket_repo_uuid + self.cli_context["deployment_branch"] = None + + # trigger + with self.assertRaises(click.UsageError): + bootstrap_cli(**self.cli_context) + + # verify + environment_instance.bootstrap.assert_not_called() + environment_instance.print_resources_summary.assert_not_called() + environment_instance.save_config_safe.assert_not_called() + @patch("samcli.commands.pipeline.bootstrap.pipeline_oidc_provider") @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") @patch("samcli.lib.pipeline.bootstrap.stage.Stage") @@ -231,11 +260,10 @@ def test_bootstrapping_oidc_interactive_flow( guided_context_mock.return_value = gc_instance environment_instance = Mock() environment_mock.return_value = environment_instance - """ oidc_config_instance = Mock() - oidc_config_instance.oidc_provider = "github-actions" - oidc_config_mock.return_value = oidc_config_instance""" self.cli_context["interactive"] = True self.cli_context["permissions_provider"] = "oidc" + self.cli_context["github_org"] = ANY_GITHUB_ORG + self.cli_context["github_repo"] = ANY_GITHUB_REPO get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES # trigger @@ -270,7 +298,9 @@ def test_bootstrapping_oidc_interactive_flow_gitlab( environment_mock.return_value = environment_instance self.cli_context["interactive"] = True self.cli_context["permissions_provider"] = "oidc" - self.cli_context["oidc_provider"] = "gitlab" + self.cli_context["oidc_provider"] = GITLAB + self.cli_context["gitlab_group"] = ANY_GITLAB_GROUP + self.cli_context["gitlab_project"] = ANY_GITLAB_PROJECT get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES # trigger @@ -305,7 +335,8 @@ def test_bootstrapping_oidc_interactive_flow_bitbucket( environment_mock.return_value = environment_instance self.cli_context["interactive"] = True self.cli_context["permissions_provider"] = "oidc" - self.cli_context["oidc_provider"] = "bitbucket-pipelines" + self.cli_context["oidc_provider"] = BITBUCKET + self.cli_context["bitbucket_repo_uuid"] = ANY_BITBUCKET_REPO_UUID get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES # trigger @@ -322,25 +353,15 @@ def test_bootstrapping_oidc_interactive_flow_bitbucket( ) @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") - @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") - @patch("samcli.lib.pipeline.bootstrap.stage.Stage") - @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") - def test_bootstrap_will_not_try_loading_pipeline_user_if_already_provided( - self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock - ): - bootstrap_cli(**self.cli_context) - load_saved_pipeline_user_arn_mock.assert_not_called() - - @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") - @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.commands.pipeline.bootstrap.cli._load_config_values") @patch("samcli.lib.pipeline.bootstrap.stage.Stage") @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") def test_bootstrap_will_try_loading_pipeline_user_if_not_provided( - self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + self, guided_context_mock, environment_mock, load_config_values_mock, get_command_names_mock ): self.cli_context["pipeline_user_arn"] = None bootstrap_cli(**self.cli_context) - load_saved_pipeline_user_arn_mock.assert_called_once() + load_config_values_mock.assert_called_once() @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") @patch("samcli.commands.pipeline.bootstrap.cli._load_config_values") @@ -354,11 +375,11 @@ def test_bootstrap_will_try_loading_oidc_values_if_not_provided( load_saved_oidc_values_arn_mock.assert_called_once() @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") - @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.commands.pipeline.bootstrap.cli._load_config_values") @patch("samcli.lib.pipeline.bootstrap.stage.Stage") @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") def test_stage_configuration_name_is_required_to_be_provided_in_case_of_non_interactive_mode( - self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + self, guided_context_mock, environment_mock, load_config_values_mock, get_command_names_mock ): self.cli_context["interactive"] = False self.cli_context["stage_configuration_name"] = None @@ -366,22 +387,22 @@ def test_stage_configuration_name_is_required_to_be_provided_in_case_of_non_inte bootstrap_cli(**self.cli_context) @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") - @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.commands.pipeline.bootstrap.cli._load_config_values") @patch("samcli.lib.pipeline.bootstrap.stage.Stage") @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") def test_stage_configuration_name_is_not_required_to_be_provided_in_case_of_interactive_mode( - self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + self, guided_context_mock, environment_mock, load_config_values_mock, get_command_names_mock ): self.cli_context["interactive"] = True self.cli_context["stage_configuration_name"] = None bootstrap_cli(**self.cli_context) # No exception is thrown @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") - @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.commands.pipeline.bootstrap.cli._load_config_values") @patch("samcli.lib.pipeline.bootstrap.stage.Stage") @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") def test_guided_context_will_be_enabled_or_disabled_based_on_the_interactive_mode( - self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + self, guided_context_mock, environment_mock, load_config_values_mock, get_command_names_mock ): gc_instance = Mock() guided_context_mock.return_value = gc_instance @@ -393,11 +414,11 @@ def test_guided_context_will_be_enabled_or_disabled_based_on_the_interactive_mod gc_instance.run.assert_called_once() @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") - @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.commands.pipeline.bootstrap.cli._load_config_values") @patch("samcli.lib.pipeline.bootstrap.stage.Stage") @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") def test_bootstrapping_will_confirm_before_creating_the_resources_unless_the_user_choose_not_to( - self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + self, guided_context_mock, environment_mock, load_config_values_mock, get_command_names_mock ): environment_instance = Mock() environment_mock.return_value = environment_instance @@ -411,9 +432,7 @@ def test_bootstrapping_will_confirm_before_creating_the_resources_unless_the_use @patch("samcli.commands.pipeline.bootstrap.cli.SamConfig") @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") - def test_load_saved_pipeline_user_arn_will_read_from_the_correct_file( - self, get_command_names_mock, sam_config_mock - ): + def test_load_config_values_will_read_from_the_correct_file(self, get_command_names_mock, sam_config_mock): # setup get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES sam_config_instance_mock = Mock() @@ -421,14 +440,14 @@ def test_load_saved_pipeline_user_arn_will_read_from_the_correct_file( sam_config_instance_mock.exists.return_value = False # trigger - _load_saved_pipeline_user_arn() + _load_config_values() # verify sam_config_mock.assert_called_once_with(config_dir=PIPELINE_CONFIG_DIR, filename=PIPELINE_CONFIG_FILENAME) @patch("samcli.commands.pipeline.bootstrap.cli.SamConfig") @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") - def test_load_saved_pipeline_user_arn_will_return_non_if_the_pipeline_toml_file_is_not_found( + def test_load_config_values_will_return_non_if_the_pipeline_toml_file_is_not_found( self, get_command_names_mock, sam_config_mock ): # setup @@ -438,14 +457,14 @@ def test_load_saved_pipeline_user_arn_will_return_non_if_the_pipeline_toml_file_ sam_config_instance_mock.exists.return_value = False # trigger - pipeline_user_arn = _load_saved_pipeline_user_arn() + config_values = _load_config_values() # verify - self.assertIsNone(pipeline_user_arn) + self.assertEqual(config_values, {}) @patch("samcli.commands.pipeline.bootstrap.cli.SamConfig") @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") - def test_load_saved_pipeline_user_arn_will_return_non_if_the_pipeline_toml_file_does_not_contain_pipeline_user( + def test_load_config_values_will_return_no_pipeline_user_if_the_pipeline_toml_file_does_not_contain_pipeline_user( self, get_command_names_mock, sam_config_mock ): # setup @@ -456,16 +475,14 @@ def test_load_saved_pipeline_user_arn_will_return_non_if_the_pipeline_toml_file_ sam_config_instance_mock.get_all.return_value = {"non-pipeline_user-key": "any_value"} # trigger - pipeline_user_arn = _load_saved_pipeline_user_arn() + pipeline_user_arn = _load_config_values().get("pipeline_user") # verify self.assertIsNone(pipeline_user_arn) @patch("samcli.commands.pipeline.bootstrap.cli.SamConfig") @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") - def test_load_saved_pipeline_user_arn_returns_the_pipeline_user_arn_from_the_pipeline_toml_file( - self, get_command_names_mock, sam_config_mock - ): + def test_load_config_values_works_from_the_pipeline_toml_file(self, get_command_names_mock, sam_config_mock): # setup get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES sam_config_instance_mock = Mock() @@ -474,10 +491,10 @@ def test_load_saved_pipeline_user_arn_returns_the_pipeline_user_arn_from_the_pip sam_config_instance_mock.get_all.return_value = {"pipeline_user": ANY_PIPELINE_USER_ARN} # trigger - pipeline_user_arn = _load_saved_pipeline_user_arn() + config_values = _load_config_values() # verify - self.assertEqual(pipeline_user_arn, ANY_PIPELINE_USER_ARN) + self.assertEqual(config_values, {"pipeline_user": ANY_PIPELINE_USER_ARN}) @patch("samcli.lib.pipeline.bootstrap.stage.Stage") @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") @@ -500,16 +517,16 @@ def test_load_saved_oidc_values_returns_values_from_file( "deployment_branch": "saved_branch", "permissions_provider": "OpenID Connect (OIDC)", } + self.cli_context["github_org"] = ANY_GITHUB_ORG + self.cli_context["github_repo"] = ANY_GITHUB_REPO github_config = GitHubOidcConfig( github_repo="saved_repo", github_org="saved_org", deployment_branch="saved_branch" ) oidc_config = OidcConfig( oidc_provider="saved_provider", oidc_client_id="saved_client_id", oidc_provider_url="saved_url" ) - gitlab_config = GitLabOidcConfig( - gitlab_group=ANY_GITLAB_GROUP, gitlab_project=ANY_GITLAB_PROJECT, deployment_branch="saved_branch" - ) - bitbucket_config = BitbucketOidcConfig(ANY_BITBUCKET_REPO_UUID) + gitlab_config = GitLabOidcConfig(gitlab_group=None, gitlab_project=None, deployment_branch="saved_branch") + bitbucket_config = BitbucketOidcConfig(None) # trigger bootstrap_cli(**self.cli_context) @@ -533,11 +550,11 @@ def test_load_saved_oidc_values_returns_values_from_file( ) @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") - @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.commands.pipeline.bootstrap.cli._load_config_values") @patch("samcli.lib.pipeline.bootstrap.stage.Stage") @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") def test_bootstrapping_normal_interactive_flow_with_non_user_provided_user( - self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + self, guided_context_mock, environment_mock, load_config_values_mock, get_command_names_mock ): # setup gc_instance = Mock() @@ -546,7 +563,7 @@ def test_bootstrapping_normal_interactive_flow_with_non_user_provided_user( environment_instance = Mock() environment_mock.return_value = environment_instance environment_instance.permissions_provider = "iam" - load_saved_pipeline_user_arn_mock.return_value = ANY_PIPELINE_USER_ARN + load_config_values_mock.return_value = {"pipeline_user": ANY_PIPELINE_USER_ARN} environment_instance.pipeline_user.is_user_provided = False self.cli_context["interactive"] = True self.cli_context["pipeline_user_arn"] = None @@ -556,7 +573,7 @@ def test_bootstrapping_normal_interactive_flow_with_non_user_provided_user( bootstrap_cli(**self.cli_context) # verify - load_saved_pipeline_user_arn_mock.assert_called_once() + load_config_values_mock.assert_called_once() gc_instance.run.assert_called_once() environment_instance.bootstrap.assert_called_once_with(confirm_changeset=True) environment_instance.print_resources_summary.assert_called_once() From 999227c4e77349be9d20c5c5f79d7e54277dd406 Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 7 Dec 2022 11:06:47 -0500 Subject: [PATCH 04/26] chore: Add type-stubs and remove redundant ignores (#4297) * mypy don't ignore pywintypes * Add types-pywin32 * types-pywin32 in base * pylint 2.15 * bump tomlkit for pylint * keep 1 pylint: disable=import-error and revert version bump * Added missing type stubs form typeshed * tomlkit and psutil fixes * pylint import order * Move stub requirements to requirements/dev.txt Co-authored-by: Lucas <12496191+lucashuy@users.noreply.github.com> Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Co-authored-by: Jacob Fuss <32497805+jfuss@users.noreply.github.com> --- .pylintrc | 3 ++- mypy.ini | 11 +---------- requirements/dev.txt | 7 ++++++- samcli/commands/local/lib/local_lambda.py | 3 +-- samcli/commands/sync/sync_context.py | 2 +- samcli/lib/build/build_graph.py | 8 ++++---- samcli/lib/generated_sample_events/events.py | 7 +++---- samcli/lib/package/artifact_exporter.py | 1 - samcli/local/docker/utils.py | 2 +- samcli/yamlhelper.py | 4 +--- tests/testing_utils.py | 6 +++--- tests/unit/local/docker/test_manager.py | 2 +- 12 files changed, 24 insertions(+), 32 deletions(-) diff --git a/.pylintrc b/.pylintrc index e391d311d6..b12e79dd69 100644 --- a/.pylintrc +++ b/.pylintrc @@ -314,7 +314,8 @@ ignored-classes=SQLObject # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E0201 when accessed. Python regular # expressions are accepted. -generated-members=REQUEST,acl_users,aq_parent,objects,DoesNotExist,md5,sha1,sha224,sha256,sha384,sha512 +# Stub files are also missed by pylint: https://github.com/PyCQA/pylint/issues/4987 +generated-members=REQUEST,acl_users,aq_parent,objects,DoesNotExist,md5,sha1,sha224,sha256,sha384,sha512,pywintypes [VARIABLES] diff --git a/mypy.ini b/mypy.ini index 3402dbcd27..5a0be0e705 100644 --- a/mypy.ini +++ b/mypy.ini @@ -19,9 +19,6 @@ ignore_errors=True # below are packages/modules that do not have stubs available # -[mypy-pywintypes] -ignore_missing_imports=True - [mypy-botocore,botocore.*] ignore_missing_imports=True @@ -46,19 +43,13 @@ ignore_missing_imports=True [mypy-jmespath] ignore_missing_imports=True -[mypy-chevron] -ignore_missing_imports=True - [mypy-parameterized] ignore_missing_imports=True -[mypy-setuptools] -ignore_missing_imports=True - [mypy-watchdog,watchdog.*] ignore_missing_imports=True # progressive add typechecks and these modules already complete the process, let's keep them clean [mypy-samcli.lib.iac.plugins_interfaces,samcli.commands.build,samcli.lib.build.*,samcli.commands.local.cli_common.invoke_context,samcli.commands.local.lib.local_lambda,samcli.lib.providers.*,samcli.lib.utils.git_repo.py,samcli.lib.cookiecutter.*,samcli.lib.pipeline.*,samcli.commands.pipeline.*] disallow_untyped_defs=True -disallow_incomplete_defs=True \ No newline at end of file +disallow_incomplete_defs=True diff --git a/requirements/dev.txt b/requirements/dev.txt index 937602691e..acea29bdf0 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -9,6 +9,11 @@ pytest-cov==2.10.1 # here we fix its version and upgrade it manually in the future mypy==0.790 boto3-stubs[apigateway,cloudformation,ecr,iam,lambda,s3,schemas,secretsmanager,signer,stepfunctions,sts,xray]==1.21.21 +types-pywin32==304.0.0.1 +types-PyYAML==6.0.12 +types-chevron==0.14.2 +types-psutil==5.9.5.1 +types-setuptools==65.4.0.0 # Test requirements pytest==6.1.1 @@ -21,4 +26,4 @@ pytest-json-report==1.5.0 # formatter black==22.6.0 -psutil==5.9.0 \ No newline at end of file +psutil==5.9.0 diff --git a/samcli/commands/local/lib/local_lambda.py b/samcli/commands/local/lib/local_lambda.py index ef768a3d20..478c32c6a6 100644 --- a/samcli/commands/local/lib/local_lambda.py +++ b/samcli/commands/local/lib/local_lambda.py @@ -152,8 +152,7 @@ def invoke( # trying to connect to the socket for Docker it would throw ContainerResponseException but now it's this. LOG.info(str(e)) except OSError as os_error: - # pylint: disable=no-member - if hasattr(os_error, "winerror") and os_error.winerror == 1314: # type: ignore + if getattr(os_error, "winerror", None) == 1314: raise NoPrivilegeException( "Administrator, Windows Developer Mode, " "or SeCreateSymbolicLinkPrivilege is required to create symbolic link for files: {}, {}".format( diff --git a/samcli/commands/sync/sync_context.py b/samcli/commands/sync/sync_context.py index 6995494875..fdc3c70015 100644 --- a/samcli/commands/sync/sync_context.py +++ b/samcli/commands/sync/sync_context.py @@ -7,7 +7,7 @@ from typing import Optional, cast, Dict import tomlkit -from tomlkit.api import _TOMLDocument as TOMLDocument +from tomlkit.toml_document import TOMLDocument from tomlkit.items import Item from samcli.lib.build.build_graph import DEFAULT_DEPENDENCIES_DIR diff --git a/samcli/lib/build/build_graph.py b/samcli/lib/build/build_graph.py index 5801fcab52..f17e2b768f 100644 --- a/samcli/lib/build/build_graph.py +++ b/samcli/lib/build/build_graph.py @@ -7,12 +7,13 @@ import os import threading from abc import abstractmethod +from copy import deepcopy from pathlib import Path from typing import Sequence, Tuple, List, Any, Optional, Dict, cast, NamedTuple -from copy import deepcopy from uuid import uuid4 import tomlkit +from tomlkit.toml_document import TOMLDocument from samcli.commands._utils.experimental import is_experimental_enabled, ExperimentalFlag from samcli.lib.build.exceptions import InvalidBuildGraphException @@ -371,7 +372,6 @@ def _write_source_hash( """ Helper to write source_hash values to build.toml file """ - document = {} if not self._filepath.exists(): open(self._filepath, "a+").close() # pylint: disable=consider-using-with @@ -379,7 +379,7 @@ def _write_source_hash( # .loads() returns a TOMLDocument, # and it behaves like a standard dictionary according to https://github.com/sdispater/tomlkit. # in tomlkit 0.7.2, the types are broken (tomlkit#128, #130, #134) so here we convert it to Dict. - document = cast(Dict, tomlkit.loads(txt)) + document = cast(Dict[str, Dict[str, Any]], tomlkit.loads(txt)) for function_uuid, hashing_info in function_content.items(): if function_uuid in document.get(BuildGraph.FUNCTION_BUILD_DEFINITIONS, {}): @@ -397,7 +397,7 @@ def _write_source_hash( layer_build_definition[MANIFEST_HASH_FIELD] = hashing_info.manifest_hash LOG.info("Updated source_hash and manifest_hash field in build.toml for layer with UUID %s", layer_uuid) - self._filepath.write_text(tomlkit.dumps(document)) # type: ignore + self._filepath.write_text(tomlkit.dumps(cast(TOMLDocument, document))) def _read(self) -> None: """ diff --git a/samcli/lib/generated_sample_events/events.py b/samcli/lib/generated_sample_events/events.py index 2f70b6fb15..34e7c69ad3 100644 --- a/samcli/lib/generated_sample_events/events.py +++ b/samcli/lib/generated_sample_events/events.py @@ -6,7 +6,7 @@ import json import base64 import warnings -from typing import Dict, cast +from typing import Dict from urllib.parse import quote as url_quote with warnings.catch_warnings(): @@ -180,6 +180,5 @@ def generate_event(self, service_name: str, event_type: str, values_to_sub: Dict data = json.dumps(data, indent=2) - # return the substituted file - # According to chevron's code, it returns a str (A string containing the rendered template.) - return cast("str", renderer.render(data, values_to_sub)) + # return the substituted file (A string containing the rendered template.) + return renderer.render(data, values_to_sub) diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index 581854577a..b211219682 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -1,7 +1,6 @@ """ Exporting resources defined in the cloudformation template to the cloud. """ -# pylint: disable=no-member # Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # diff --git a/samcli/local/docker/utils.py b/samcli/local/docker/utils.py index 3d41b41233..9a02f016c2 100644 --- a/samcli/local/docker/utils.py +++ b/samcli/local/docker/utils.py @@ -85,7 +85,7 @@ def is_docker_reachable(docker_client): if platform.system() == "Windows": import pywintypes # pylint: disable=import-error - errors += (pywintypes.error,) # pylint: disable=no-member + errors += (pywintypes.error,) try: docker_client.ping() diff --git a/samcli/yamlhelper.py b/samcli/yamlhelper.py index 722a529c11..ab0572037b 100644 --- a/samcli/yamlhelper.py +++ b/samcli/yamlhelper.py @@ -22,9 +22,7 @@ from botocore.compat import OrderedDict import yaml -# ScalarNode and SequenceNode are not declared in __all__, -# TODO: we need to double check whether they are public and stable -from yaml.resolver import ScalarNode, SequenceNode # type: ignore +from yaml.nodes import ScalarNode, SequenceNode from samtranslator.utils.py27hash_fix import Py27Dict, Py27UniStr diff --git a/tests/testing_utils.py b/tests/testing_utils.py index 83b707486f..d410c6b599 100644 --- a/tests/testing_utils.py +++ b/tests/testing_utils.py @@ -13,7 +13,7 @@ import shutil from uuid import uuid4 -import psutil # type: ignore +import psutil IS_WINDOWS = platform.system().lower() == "windows" RUNNING_ON_CI = os.environ.get("APPVEYOR", False) @@ -110,9 +110,9 @@ def kill_process(process: Popen) -> None: root_process = psutil.Process(process.pid) all_processes = root_process.children(recursive=True) all_processes.append(root_process) - for process in all_processes: + for process_to_kill in all_processes: try: - process.kill() + process_to_kill.kill() except psutil.NoSuchProcess: pass _, alive = psutil.wait_procs(all_processes, timeout=10) diff --git a/tests/unit/local/docker/test_manager.py b/tests/unit/local/docker/test_manager.py index 05c36b1998..7e567abfcf 100644 --- a/tests/unit/local/docker/test_manager.py +++ b/tests/unit/local/docker/test_manager.py @@ -321,7 +321,7 @@ def test_must_return_false_if_ping_raises_pywintypes_error(self): importlib.reload(manager_module) importlib.reload(docker_utils) manager = manager_module.ContainerManager(docker_client=self.docker_client_mock) - import pywintypes # pylint: disable=import-error + import pywintypes self.ping_mock.side_effect = pywintypes.error("pywintypes.error") is_reachable = manager.is_docker_reachable From c128b6ee55273b7355b3a173a418199fe9deddb9 Mon Sep 17 00:00:00 2001 From: Lucas <12496191+lucashuy@users.noreply.github.com> Date: Wed, 7 Dec 2022 10:04:34 -0800 Subject: [PATCH 05/26] chore: Terraform prepare hook refactor (#4454) * Split logic in prepare hook and tests * Include new files * Changed file name * make black * Addressed comments * Removed old instance variables for utils unit test --- .../terraform/hooks/prepare/constants.py | 14 + .../terraform/hooks/prepare/enrich.py | 680 ++++ .../terraform/hooks/prepare/hook.py | 1658 +------- .../hooks/prepare/makefile_generator.py | 262 ++ .../hooks/prepare/property_builder.py | 223 ++ .../terraform/hooks/prepare/translate.py | 542 +++ .../terraform/hooks/prepare/types.py | 19 +- samcli/hook_packages/terraform/lib/utils.py | 5 +- .../terraform/hooks/prepare/prepare_base.py | 741 ++++ .../terraform/hooks/prepare/test_enrich.py | 1239 ++++++ .../terraform/hooks/prepare/test_hook.py | 3333 +---------------- .../hooks/prepare/test_makefile_generator.py | 189 + .../hooks/prepare/test_property_builder.py | 196 + .../terraform/hooks/prepare/test_translate.py | 1037 +++++ .../hook_packages/terraform/lib/test_utils.py | 7 +- 15 files changed, 5168 insertions(+), 4977 deletions(-) create mode 100644 samcli/hook_packages/terraform/hooks/prepare/constants.py create mode 100644 samcli/hook_packages/terraform/hooks/prepare/enrich.py create mode 100644 samcli/hook_packages/terraform/hooks/prepare/makefile_generator.py create mode 100644 samcli/hook_packages/terraform/hooks/prepare/property_builder.py create mode 100644 samcli/hook_packages/terraform/hooks/prepare/translate.py create mode 100644 tests/unit/hook_packages/terraform/hooks/prepare/prepare_base.py create mode 100644 tests/unit/hook_packages/terraform/hooks/prepare/test_enrich.py create mode 100644 tests/unit/hook_packages/terraform/hooks/prepare/test_makefile_generator.py create mode 100644 tests/unit/hook_packages/terraform/hooks/prepare/test_property_builder.py create mode 100644 tests/unit/hook_packages/terraform/hooks/prepare/test_translate.py diff --git a/samcli/hook_packages/terraform/hooks/prepare/constants.py b/samcli/hook_packages/terraform/hooks/prepare/constants.py new file mode 100644 index 0000000000..50c011a6f5 --- /dev/null +++ b/samcli/hook_packages/terraform/hooks/prepare/constants.py @@ -0,0 +1,14 @@ +""" +Constants related to the Terraform prepare hook. +""" +from samcli.lib.utils.resources import ( + AWS_LAMBDA_FUNCTION as CFN_AWS_LAMBDA_FUNCTION, + AWS_LAMBDA_LAYERVERSION as CFN_AWS_LAMBDA_LAYER_VERSION, +) + +SAM_METADATA_RESOURCE_NAME_ATTRIBUTE = "resource_name" + +CFN_CODE_PROPERTIES = { + CFN_AWS_LAMBDA_FUNCTION: "Code", + CFN_AWS_LAMBDA_LAYER_VERSION: "Content", +} diff --git a/samcli/hook_packages/terraform/hooks/prepare/enrich.py b/samcli/hook_packages/terraform/hooks/prepare/enrich.py new file mode 100644 index 0000000000..7d1fa2295c --- /dev/null +++ b/samcli/hook_packages/terraform/hooks/prepare/enrich.py @@ -0,0 +1,680 @@ +""" +Terraform resource enrichment + +This module populates the values required for each of the Lambda resources +""" +import logging +import json +import os +import re +from typing import Dict, List, Tuple +from json.decoder import JSONDecodeError +from subprocess import run, CalledProcessError +from samcli.hook_packages.terraform.lib.utils import ( + get_sam_metadata_planned_resource_value_attribute, + _calculate_configuration_attribute_value_hash, + build_cfn_logical_id, +) +from samcli.hook_packages.terraform.hooks.prepare.types import SamMetadataResource +from samcli.hook_packages.terraform.hooks.prepare.makefile_generator import ( + generate_makefile_rule_for_lambda_resource, + generate_makefile, +) +from samcli.hook_packages.terraform.hooks.prepare.constants import ( + CFN_CODE_PROPERTIES, + SAM_METADATA_RESOURCE_NAME_ATTRIBUTE, +) +from samcli.hook_packages.terraform.hooks.prepare.resource_linking import _resolve_resource_attribute +from samcli.hook_packages.terraform.hooks.prepare.exceptions import InvalidSamMetadataPropertiesException +from samcli.lib.utils.resources import ( + AWS_LAMBDA_FUNCTION as CFN_AWS_LAMBDA_FUNCTION, + AWS_LAMBDA_LAYERVERSION as CFN_AWS_LAMBDA_LAYER_VERSION, +) +from samcli.lib.utils.packagetype import ZIP, IMAGE +from samcli.lib.hook.exceptions import PrepareHookException + +SAM_METADATA_DOCKER_TAG_ATTRIBUTE = "docker_tag" +SAM_METADATA_DOCKER_BUILD_ARGS_ATTRIBUTE = "docker_build_args" +SAM_METADATA_DOCKER_FILE_ATTRIBUTE = "docker_file" +SAM_METADATA_RESOURCE_TYPE_ATTRIBUTE = "resource_type" + +# check for python 3, 3.7 or above +# regex: search for 'Python', whitespace, '3.', digits 7-9 or 2+ digits, any digit or '.' 0+ times +PYTHON_VERSION_REGEX = re.compile(r"Python\s*3.([7-9]|\d{2,})[\d.]*") + +LOG = logging.getLogger(__name__) + + +def enrich_resources_and_generate_makefile( + sam_metadata_resources: List[SamMetadataResource], + cfn_resources: Dict[str, Dict], + output_directory_path: str, + terraform_application_dir: str, + lambda_resources_to_code_map: Dict, +) -> None: + """ + Use the sam metadata resources to enrich the mapped resources and to create a Makefile with a rule for + each lambda resource to be built. + + Parameters + ---------- + sam_metadata_resources: List[SamMetadataResource] + The list of sam metadata resources defined in the terraform project. + cfn_resources: dict + CloudFormation resources + output_directory_path: str + the output directory path to write the generated metadata and makefile + terraform_application_dir: str + the terraform project root directory + lambda_resources_to_code_map: Dict + The map between lambda resources code path, and lambda resources logical ids + """ + + python_command_name = _get_python_command_name() + + resources_types_enrichment_functions = { + "ZIP_LAMBDA_FUNCTION": _enrich_zip_lambda_function, + "IMAGE_LAMBDA_FUNCTION": _enrich_image_lambda_function, + "LAMBDA_LAYER": _enrich_lambda_layer, + } + + makefile_rules = [] + for sam_metadata_resource in sam_metadata_resources: + # enrich resource + resource_type = get_sam_metadata_planned_resource_value_attribute( + sam_metadata_resource.resource, SAM_METADATA_RESOURCE_TYPE_ATTRIBUTE + ) + sam_metadata_resource_address = sam_metadata_resource.resource.get("address") + enrichment_function = resources_types_enrichment_functions.get(resource_type) + if enrichment_function is None: + raise InvalidSamMetadataPropertiesException( + f"The resource type {resource_type} found in the sam metadata resource " + f"{sam_metadata_resource_address} is not a correct resource type. The resource type should be one " + f"of these values {resources_types_enrichment_functions.keys()}" + ) + + lambda_resources = _get_relevant_cfn_resource( + sam_metadata_resource, cfn_resources, lambda_resources_to_code_map + ) + for cfn_resource, logical_id in lambda_resources: + enrichment_function( + sam_metadata_resource.resource, + cfn_resource, + logical_id, + terraform_application_dir, + output_directory_path, + ) + + # get makefile rule for resource + makefile_rule = generate_makefile_rule_for_lambda_resource( + sam_metadata_resource, logical_id, terraform_application_dir, python_command_name, output_directory_path + ) + makefile_rules.append(makefile_rule) + + # generate makefile + LOG.debug("Generate Makefile in %s", output_directory_path) + generate_makefile(makefile_rules, output_directory_path) + + +def _enrich_zip_lambda_function( + sam_metadata_resource: Dict, + cfn_lambda_function: Dict, + cfn_lambda_function_logical_id: str, + terraform_application_dir: str, + output_directory_path: str, +): + """ + Use the sam metadata resources to enrich the zip lambda function. + + Parameters + ---------- + sam_metadata_resource: Dict + The sam metadata resource properties + cfn_lambda_function: dict + CloudFormation lambda function to be enriched + cfn_lambda_function_logical_id: str + the cloudFormation lambda function to be enriched logical id. + output_directory_path: str + the output directory path to write the generated metadata and makefile + terraform_application_dir: str + the terraform project root directory + """ + sam_metadata_resource_address = sam_metadata_resource.get("address") + if not sam_metadata_resource_address: + raise PrepareHookException( + "Invalid Terraform plan output. The address property should not be null to any terraform resource." + ) + + LOG.debug( + "Enrich the ZIP lambda function %s using the metadata properties defined in resource %s", + cfn_lambda_function_logical_id, + sam_metadata_resource_address, + ) + + _validate_referenced_resource_matches_sam_metadata_type( + cfn_lambda_function, sam_metadata_resource, sam_metadata_resource_address, ZIP + ) + + cfn_source_code_path = _get_source_code_path( + sam_metadata_resource, + sam_metadata_resource_address, + terraform_application_dir, + "original_source_code", + "source_code_property", + "source code", + ) + _set_zip_metadata_resources( + cfn_lambda_function, + cfn_source_code_path, + output_directory_path, + terraform_application_dir, + CFN_CODE_PROPERTIES[CFN_AWS_LAMBDA_FUNCTION], + ) + + +def _enrich_image_lambda_function( + sam_metadata_resource: Dict, + cfn_lambda_function: Dict, + cfn_lambda_function_logical_id: str, + terraform_application_dir: str, + output_directory_path: str, +): + """ + Use the sam metadata resources to enrich the image lambda function. + + Parameters + ---------- + sam_metadata_resource: Dict + The sam metadata resource properties + cfn_lambda_function: dict + CloudFormation lambda function to be enriched + cfn_lambda_function_logical_id: str + the cloudFormation lambda function to be enriched logical id. + output_directory_path: str + the output directory path to write the generated metadata and makefile + terraform_application_dir: str + the terraform project root directory + """ + sam_metadata_resource_address = sam_metadata_resource.get("address") + if not sam_metadata_resource_address: + raise PrepareHookException( + "Invalid Terraform plan output. The address property should not be null to any terraform resource." + ) + cfn_resource_properties = cfn_lambda_function.get("Properties", {}) + + LOG.debug( + "Enrich the IMAGE lambda function %s using the metadata properties defined in resource %s", + cfn_lambda_function_logical_id, + sam_metadata_resource_address, + ) + + _validate_referenced_resource_matches_sam_metadata_type( + cfn_lambda_function, sam_metadata_resource, sam_metadata_resource_address, IMAGE + ) + + cfn_docker_context_path = _get_source_code_path( + sam_metadata_resource, + sam_metadata_resource_address, + terraform_application_dir, + "docker_context", + "docker_context_property_path", + "docker context", + ) + cfn_docker_file = get_sam_metadata_planned_resource_value_attribute( + sam_metadata_resource, SAM_METADATA_DOCKER_FILE_ATTRIBUTE + ) + cfn_docker_build_args_string = get_sam_metadata_planned_resource_value_attribute( + sam_metadata_resource, SAM_METADATA_DOCKER_BUILD_ARGS_ATTRIBUTE + ) + cfn_docker_build_args = None + if cfn_docker_build_args_string: + try: + LOG.debug("Parse the docker build args %s", cfn_docker_build_args_string) + cfn_docker_build_args = json.loads(cfn_docker_build_args_string) + if not isinstance(cfn_docker_build_args, dict): + raise InvalidSamMetadataPropertiesException( + f"The sam metadata resource {sam_metadata_resource_address} should contain a valid json " + f"encoded string for the lambda function docker build arguments." + ) + except JSONDecodeError as exc: + raise InvalidSamMetadataPropertiesException( + f"The sam metadata resource {sam_metadata_resource_address} should contain a valid json encoded " + f"string for the lambda function docker build arguments." + ) from exc + + cfn_docker_tag = get_sam_metadata_planned_resource_value_attribute( + sam_metadata_resource, SAM_METADATA_DOCKER_TAG_ATTRIBUTE + ) + + if cfn_resource_properties.get("Code"): + cfn_resource_properties.pop("Code") + + if not cfn_lambda_function.get("Metadata", {}): + cfn_lambda_function["Metadata"] = {} + cfn_lambda_function["Metadata"]["SkipBuild"] = False + cfn_lambda_function["Metadata"]["DockerContext"] = cfn_docker_context_path + if cfn_docker_file: + cfn_lambda_function["Metadata"]["Dockerfile"] = cfn_docker_file + if cfn_docker_tag: + cfn_lambda_function["Metadata"]["DockerTag"] = cfn_docker_tag + if cfn_docker_build_args: + cfn_lambda_function["Metadata"]["DockerBuildArgs"] = cfn_docker_build_args + + +def _enrich_lambda_layer( + sam_metadata_resource: Dict, + cfn_lambda_layer: Dict, + cfn_lambda_layer_logical_id: str, + terraform_application_dir: str, + output_directory_path: str, +) -> None: + """ + Use the sam metadata resources to enrich the lambda layer. + + Parameters + ---------- + sam_metadata_resource: Dict + The sam metadata resource properties + cfn_lambda_layer: dict + CloudFormation lambda layer to be enriched + cfn_lambda_layer_logical_id: str + the cloudFormation lambda layer to be enriched logical id. + output_directory_path: str + the output directory path to write the generated metadata and makefile + terraform_application_dir: str + the terraform project root directory + """ + sam_metadata_resource_address = sam_metadata_resource.get("address") + if not sam_metadata_resource_address: + raise PrepareHookException( + "Invalid Terraform plan output. The address property should not be null to any terraform resource." + ) + _validate_referenced_resource_layer_matches_metadata_type( + cfn_lambda_layer, sam_metadata_resource, sam_metadata_resource_address + ) + LOG.debug( + "Enrich the Lambda Layer Version %s using the metadata properties defined in resource %s", + cfn_lambda_layer_logical_id, + sam_metadata_resource_address, + ) + + cfn_source_code_path = _get_source_code_path( + sam_metadata_resource, + sam_metadata_resource_address, + terraform_application_dir, + "original_source_code", + "source_code_property", + "source code", + ) + + _set_zip_metadata_resources( + cfn_lambda_layer, + cfn_source_code_path, + output_directory_path, + terraform_application_dir, + CFN_CODE_PROPERTIES[CFN_AWS_LAMBDA_LAYER_VERSION], + ) + + +def _validate_referenced_resource_layer_matches_metadata_type( + cfn_resource: dict, + sam_metadata_resource: dict, + sam_metadata_resource_address: str, +) -> None: + """ + Validate if the resource that match the resource name provided in the sam metadata resource matches the resource + type provided in the metadata as well. + + Parameters + ---------- + cfn_resource: dict + The CFN resource that matches the sam metadata resource name + sam_metadata_resource: Dict + The sam metadata resource properties + sam_metadata_resource_address: str + The sam metadata resource address + """ + cfn_resource_properties = cfn_resource.get("Properties", {}) + resource_type = sam_metadata_resource.get(SAM_METADATA_RESOURCE_TYPE_ATTRIBUTE) + cfn_resource_type = cfn_resource.get("Type") + LOG.debug( + "Validate if the referenced resource in sam metadata resource %s is of the expected type %s", + sam_metadata_resource_address, + resource_type, + ) + + if cfn_resource_type != CFN_AWS_LAMBDA_LAYER_VERSION or not cfn_resource_properties: + LOG.error( + "The matched resource is of type %s but the type mentioned in the sam metadata resource %s is %s", + cfn_resource_type, + sam_metadata_resource_address, + resource_type, + ) + raise InvalidSamMetadataPropertiesException( + f"The sam metadata resource {sam_metadata_resource_address} is referring to a resource that does not " + f"match the resource type {resource_type}." + ) + + +def _get_source_code_path( + sam_metadata_resource: dict, + sam_metadata_resource_address: str, + project_root_dir: str, + src_code_property_name: str, + property_path_property_name: str, + src_code_attribute_name: str, +) -> str: + """ + Validate that sam metadata resource contains the valid metadata properties + to get a lambda function or layer source code. + + Parameters + ---------- + sam_metadata_resource: Dict + The sam metadata resource properties + sam_metadata_resource_address: str + The sam metadata resource address + project_root_dir: str + the terraform project root directory path + src_code_property_name: str + the sam metadata property name that contains the lambda function or layer source code or docker context path + property_path_property_name: str + the sam metadata property name that contains the property to get the source code value if it was provided + as json string + src_code_attribute_name: str + the lambda function or later source code or docker context to be used to raise the correct exception + + Returns + ------- + str + The lambda function or layer source code or docker context paths + """ + LOG.debug( + "Extract the %s from the sam metadata resource %s from property %s", + src_code_attribute_name, + sam_metadata_resource_address, + src_code_property_name, + ) + source_code = get_sam_metadata_planned_resource_value_attribute(sam_metadata_resource, src_code_property_name) + source_code_property = get_sam_metadata_planned_resource_value_attribute( + sam_metadata_resource, property_path_property_name + ) + LOG.debug( + "The found %s value is %s and property value is %s", src_code_attribute_name, source_code, source_code_property + ) + if not source_code: + raise InvalidSamMetadataPropertiesException( + f"The sam metadata resource {sam_metadata_resource_address} " + f"should contain the lambda function/lambda layer " + f"{src_code_attribute_name} in property {src_code_property_name}" + ) + if isinstance(source_code, str): + try: + LOG.debug("Try to decode the %s value in case if it is a encoded JSON string.", src_code_attribute_name) + source_code = json.loads(source_code) + LOG.debug("The decoded value of the %s value is %s", src_code_attribute_name, source_code) + except JSONDecodeError: + LOG.debug("Source code value could not be parsed as a JSON object. Handle it as normal string value") + + if isinstance(source_code, dict): + LOG.debug( + "Process the extracted %s as JSON object using the property %s", + src_code_attribute_name, + source_code_property, + ) + if not source_code_property: + raise InvalidSamMetadataPropertiesException( + f"The sam metadata resource {sam_metadata_resource_address} " + f"should contain the lambda function/lambda layer " + f"{src_code_attribute_name} property in property {property_path_property_name} as the " + f"{src_code_property_name} value is an object" + ) + cfn_source_code_path = source_code.get(source_code_property) + if not cfn_source_code_path: + LOG.error( + "The property %s does not exist in the extracted %s JSON object %s", + source_code_property, + src_code_attribute_name, + source_code, + ) + raise InvalidSamMetadataPropertiesException( + f"The sam metadata resource {sam_metadata_resource_address} " + f"should contain a valid lambda function/lambda layer " + f"{src_code_attribute_name} property in property {property_path_property_name} as the " + f"{src_code_property_name} value is an object" + ) + elif isinstance(source_code, list): + # SAM CLI does not process multiple paths, so we will handle only the first value in this list + LOG.debug( + "Process the extracted %s as list, and get the first value as SAM CLI does not support multiple paths", + src_code_attribute_name, + ) + if len(source_code) < 1: + raise InvalidSamMetadataPropertiesException( + f"The sam metadata resource {sam_metadata_resource_address} " + f"should contain the lambda function/lambda layer " + f"{src_code_attribute_name} in property {src_code_property_name}, and it should not be an empty list" + ) + cfn_source_code_path = source_code[0] + if not cfn_source_code_path: + raise InvalidSamMetadataPropertiesException( + f"The sam metadata resource {sam_metadata_resource_address} " + f"should contain a valid lambda/lambda layer function " + f"{src_code_attribute_name} in property {src_code_property_name}" + ) + else: + cfn_source_code_path = source_code + + LOG.debug("The %s path value is %s", src_code_attribute_name, cfn_source_code_path) + + if not os.path.isabs(cfn_source_code_path): + LOG.debug( + "The %s path value is not absoulte value. Get the absolute value based on the root directory %s", + src_code_attribute_name, + project_root_dir, + ) + cfn_source_code_path = os.path.normpath(os.path.join(project_root_dir, cfn_source_code_path)) + LOG.debug("The calculated absolute path of %s is %s", src_code_attribute_name, cfn_source_code_path) + + if not isinstance(cfn_source_code_path, str) or not os.path.exists(cfn_source_code_path): + LOG.error("The path %s does not exist", cfn_source_code_path) + raise InvalidSamMetadataPropertiesException( + f"The sam metadata resource {sam_metadata_resource_address} should contain a valid string value for the " + f"lambda function/lambda layer {src_code_attribute_name} path" + ) + + return cfn_source_code_path + + +def _get_relevant_cfn_resource( + sam_metadata_resource: SamMetadataResource, + cfn_resources: Dict[str, Dict], + lambda_resources_to_code_map: Dict[str, List[Tuple[Dict, str]]], +) -> List[Tuple[Dict, str]]: + """ + use the sam metadata resource name property to determine the resource address, and transform the address to logical + id to use it to get the cfn_resource. + If the metadata resource does not contain a resource name property, so we need to use the resource built artifact + path to find tha lambda resources that use the same artifact path + + Parameters + ---------- + sam_metadata_resource: SamMetadataResource + sam metadata resource that contain extra information about some resource. + cfn_resources: Dict + CloudFormation resources + lambda_resources_to_code_map: Dict + The map between lambda resources code path, and lambda resources logical ids + + Returns + ------- + List[tuple(Dict, str)] + The cfn resources that mentioned in the sam metadata resource, and the resource logical id + """ + + resources_types = { + "ZIP_LAMBDA_FUNCTION": "zip", + "IMAGE_LAMBDA_FUNCTION": "image", + "LAMBDA_LAYER": "layer", + } + + sam_metadata_resource_address = sam_metadata_resource.resource.get("address") + resource_name = get_sam_metadata_planned_resource_value_attribute( + sam_metadata_resource.resource, SAM_METADATA_RESOURCE_NAME_ATTRIBUTE + ) + resource_type = get_sam_metadata_planned_resource_value_attribute( + sam_metadata_resource.resource, SAM_METADATA_RESOURCE_TYPE_ATTRIBUTE + ) + if not resource_name: + artifact_property_name = ( + "built_output_path" if resource_type in ["ZIP_LAMBDA_FUNCTION", "LAMBDA_LAYER"] else "built_image_uri" + ) + artifact_path_value = get_sam_metadata_planned_resource_value_attribute( + sam_metadata_resource.resource, artifact_property_name + ) + if not artifact_path_value: + artifact_path_value = _resolve_resource_attribute( + sam_metadata_resource.config_resource, artifact_property_name + ) + hash_value = ( + f"{resources_types[resource_type]}_{_calculate_configuration_attribute_value_hash(artifact_path_value)}" + ) + lambda_resources = lambda_resources_to_code_map.get(hash_value, []) + if not lambda_resources: + raise InvalidSamMetadataPropertiesException( + f"sam cli expects the sam metadata resource {sam_metadata_resource_address} to contain a resource name " + f"that will be enriched using this metadata resource" + ) + return lambda_resources + # the provided resource name will be always a postfix to the module address. The customer could not set a full + # address within a module. + LOG.debug( + "Check if the input resource name %s is a postfix to the current module address %s", + resource_name, + sam_metadata_resource.current_module_address, + ) + full_resource_address = ( + f"{sam_metadata_resource.current_module_address}.{resource_name}" + if sam_metadata_resource.current_module_address + else resource_name + ) + LOG.debug("check if the resource address %s has a relevant cfn resource or not", full_resource_address) + logical_id = build_cfn_logical_id(full_resource_address) + cfn_resource = cfn_resources.get(logical_id) + if cfn_resource: + LOG.debug("The CFN resource that match the input resource name %s is %s", resource_name, logical_id) + return [(cfn_resource, logical_id)] + + raise InvalidSamMetadataPropertiesException( + f"There is no resource found that match the provided resource name " f"{resource_name}" + ) + + +def _set_zip_metadata_resources( + resource: dict, + cfn_source_code_path: str, + output_directory_path: str, + terraform_application_dir: str, + code_property: str, +) -> None: + """ + Update the CloudFormation resource metadata with the enrichment properties from the TF resource + + Parameters + ---------- + resource: dict + The CFN resource that matches the sam metadata resource name + cfn_source_code_path: dict + Absolute path location of where the original source code resides. + output_directory_path: str + The directory where to find the Makefile the path to be copied into the temp dir. + terraform_application_dir: str + The working directory from which to run the Makefile. + code_property: + The property in the configuration used to denote the code e.g. "Code" or "Content" + """ + resource_properties = resource.get("Properties", {}) + resource_properties[code_property] = cfn_source_code_path + if not resource.get("Metadata", {}): + resource["Metadata"] = {} + resource["Metadata"]["SkipBuild"] = False + resource["Metadata"]["BuildMethod"] = "makefile" + resource["Metadata"]["ContextPath"] = output_directory_path + resource["Metadata"]["WorkingDirectory"] = terraform_application_dir + # currently we set the terraform project root directory that contains all the terraform artifacts as the project + # directory till we work on the custom hook properties, and add a property for this value. + resource["Metadata"]["ProjectRootDirectory"] = terraform_application_dir + + +def _validate_referenced_resource_matches_sam_metadata_type( + cfn_resource: dict, sam_metadata_resource: dict, sam_metadata_resource_address: str, expected_package_type: str +) -> None: + """ + Validate if the resource that match the resource name provided in the sam metadata resource matches the resource + type provided in the metadata as well. + + Parameters + ---------- + cfn_resource: dict + The CFN resource that matches the sam metadata resource name + sam_metadata_resource: Dict + The sam metadata resource properties + sam_metadata_resource_address: str + The sam metadata resource address + expected_package_type: str + The expected lambda function package type. + """ + cfn_resource_properties = cfn_resource.get("Properties", {}) + resource_type = get_sam_metadata_planned_resource_value_attribute( + sam_metadata_resource, SAM_METADATA_RESOURCE_TYPE_ATTRIBUTE + ) + cfn_resource_type = cfn_resource.get("Type") + lambda_function_package_type = cfn_resource_properties.get("PackageType", ZIP) + LOG.debug( + "Validate if the referenced resource in sam metadata resource %s is of the expected type %s", + sam_metadata_resource_address, + resource_type, + ) + + if ( + cfn_resource_type != CFN_AWS_LAMBDA_FUNCTION + or not cfn_resource_properties + or lambda_function_package_type != expected_package_type + ): + LOG.error( + "The matched resource is of type %s, and package type is %s, but the type mentioned in the sam metadata " + "resource %s is %s", + cfn_resource_type, + lambda_function_package_type, + sam_metadata_resource_address, + resource_type, + ) + raise InvalidSamMetadataPropertiesException( + f"The sam metadata resource {sam_metadata_resource_address} is referring to a resource that does not " + f"match the resource type {resource_type}." + ) + + +def _get_python_command_name() -> str: + """ + Verify that python is installed and return the name of the python command + + Returns + ------- + str + The name of the python command installed + """ + command_names_to_try = ["python3", "py3", "python", "py"] + for command_name in command_names_to_try: + try: + run_result = run([command_name, "--version"], check=True, capture_output=True, text=True) + except CalledProcessError: + pass + except OSError: + pass + else: + # check python version + if not PYTHON_VERSION_REGEX.match(run_result.stdout): + continue + return command_name + raise PrepareHookException("Python not found. Please ensure that python 3.7 or above is installed.") diff --git a/samcli/hook_packages/terraform/hooks/prepare/hook.py b/samcli/hook_packages/terraform/hooks/prepare/hook.py index 4553be845e..f5170e144b 100644 --- a/samcli/hook_packages/terraform/hooks/prepare/hook.py +++ b/samcli/hook_packages/terraform/hooks/prepare/hook.py @@ -1,108 +1,29 @@ -# pylint: disable=too-many-lines """ Terraform prepare hook implementation + +This module contains the main prepare method """ -# pylint: disable=C0302 -# TODO: Move some of the logic out of this file and remove this disable -from dataclasses import dataclass import json import os -from json.decoder import JSONDecodeError from pathlib import Path -import re from subprocess import run, CalledProcessError -from typing import Any, Callable, Dict, List, Optional, Tuple, Union -import hashlib +from typing import Any, Dict import logging -import shutil -import uuid -from samcli.hook_packages.terraform.hooks.prepare.resource_linking import ( - _link_lambda_function_to_layer, - _get_configuration_address, - _build_module, - _resolve_resource_attribute, -) -from samcli.hook_packages.terraform.hooks.prepare.types import ( - ConstantValue, - References, - ResolvedReference, - TFModule, - TFResource, -) -from samcli.hook_packages.terraform.lib.utils import ( - build_cfn_logical_id, - _calculate_configuration_attribute_value_hash, - get_sam_metadata_planned_resource_value_attribute, -) from samcli.lib.hook.exceptions import PrepareHookException -from samcli.hook_packages.terraform.hooks.prepare.exceptions import InvalidSamMetadataPropertiesException +from samcli.hook_packages.terraform.hooks.prepare.translate import translate_to_cfn +from samcli.hook_packages.terraform.hooks.prepare.constants import CFN_CODE_PROPERTIES from samcli.lib.utils import osutils -from samcli.lib.utils.packagetype import ZIP, IMAGE -from samcli.lib.utils.path_utils import convert_path_to_unix_path -from samcli.lib.utils.resources import ( - AWS_LAMBDA_FUNCTION as CFN_AWS_LAMBDA_FUNCTION, - AWS_LAMBDA_LAYERVERSION as CFN_AWS_LAMBDA_LAYER_VERSION, -) from samcli.lib.utils.subprocess_utils import invoke_subprocess_with_loading_pattern, LoadingPatternError -SAM_METADATA_DOCKER_TAG_ATTRIBUTE = "docker_tag" - -SAM_METADATA_DOCKER_BUILD_ARGS_ATTRIBUTE = "docker_build_args" - -SAM_METADATA_DOCKER_FILE_ATTRIBUTE = "docker_file" - -SAM_METADATA_RESOURCE_TYPE_ATTRIBUTE = "resource_type" - -SAM_METADATA_ADDRESS_ATTRIBUTE = "address" - -SAM_METADATA_RESOURCE_NAME_ATTRIBUTE = "resource_name" - -REMOTE_DUMMY_VALUE = "<>" - LOG = logging.getLogger(__name__) -# check for python 3, 3.7 or above -# regex: search for 'Python', whitespace, '3.', digits 7-9 or 2+ digits, any digit or '.' 0+ times -PYTHON_VERSION_REGEX = re.compile(r"Python\s*3.([7-9]|\d{2,})[\d.]*") - -TF_AWS_LAMBDA_FUNCTION = "aws_lambda_function" -TF_AWS_LAMBDA_LAYER_VERSION = "aws_lambda_layer_version" -AWS_PROVIDER_NAME = "registry.terraform.io/hashicorp/aws" -NULL_RESOURCE_PROVIDER_NAME = "registry.terraform.io/hashicorp/null" -SAM_METADATA_RESOURCE_TYPE = "null_resource" -SAM_METADATA_NAME_PREFIX = "sam_metadata_" - -PropertyBuilder = Callable[[dict, TFResource], Any] -PropertyBuilderMapping = Dict[str, PropertyBuilder] - TERRAFORM_METADATA_FILE = "template.json" -TERRAFORM_BUILD_SCRIPT = "copy_terraform_built_artifacts.py" -TF_BACKEND_OVERRIDE_FILENAME = "z_samcli_backend_override" - HOOK_METADATA_KEY = "AWS::SAM::Hook" TERRAFORM_HOOK_METADATA = { "HookName": "terraform", } -CFN_CODE_PROPERTIES = { - CFN_AWS_LAMBDA_FUNCTION: "Code", - CFN_AWS_LAMBDA_LAYER_VERSION: "Content", -} - - -@dataclass -class ResourceTranslator: - cfn_name: str - property_builder_mapping: PropertyBuilderMapping - - -@dataclass -class SamMetadataResource: - current_module_address: Optional[str] - resource: Dict - config_resource: TFResource - def prepare(params: dict) -> dict: """ @@ -180,7 +101,7 @@ def prepare(params: dict) -> dict: # convert terraform to cloudformation LOG.info("Generating metadata file") - cfn_dict = _translate_to_cfn(tf_json, output_dir_path, terraform_application_dir) + cfn_dict = translate_to_cfn(tf_json, output_dir_path, terraform_application_dir) if cfn_dict.get("Resources"): _update_resources_paths(cfn_dict.get("Resources"), terraform_application_dir) # type: ignore @@ -246,1570 +167,3 @@ def _update_resources_paths(cfn_resources: Dict[str, Any], terraform_application original_path = resource.get("Properties", {}).get(attribute) if isinstance(original_path, str) and not os.path.isabs(original_path): resource["Properties"][attribute] = str(Path(terraform_application_dir).joinpath(original_path)) - - -def _translate_to_cfn(tf_json: dict, output_directory_path: str, terraform_application_dir: str) -> dict: - """ - Translates the json output of a terraform show into CloudFormation - - Parameters - ---------- - tf_json: dict - A terraform show json output - output_directory_path: str - the string path to write the metadata file and makefile - terraform_application_dir: str - the terraform project root directory - - Returns - ------- - dict - The CloudFormation resulting from translating tf_json - """ - # setup root_module and cfn dict - root_module = tf_json.get("planned_values", {}).get("root_module") - cfn_dict: dict = {"AWSTemplateFormatVersion": "2010-09-09", "Resources": {}} - if not root_module: - return cfn_dict - - LOG.debug("Mapping Lambda functions to their corresponding layers.") - input_vars: Dict[str, Union[ConstantValue, References]] = { - var_name: ConstantValue(value=var_value.get("value")) - for var_name, var_value in tf_json.get("variables", {}).items() - } - root_tf_module = _build_module("", tf_json.get("configuration", {}).get("root_module"), input_vars, None) - - # to map s3 object sources to respective functions later - # this dictionary will map between the hash value of the S3 Bucket attributes, and a tuple of the planned value - # source code path, and the configuration value of the source code path. - s3_hash_to_source: Dict[str, Tuple[str, List[Union[ConstantValue, ResolvedReference]]]] = {} - - # map code/imageuri to Lambda resources - # the key is the hash value of lambda code/imageuri - # the value is the list of pair of the resource logical id, and the lambda cfn resource dict - lambda_resources_to_code_map: Dict[str, List[Tuple[Dict, str]]] = {} - - sam_metadata_resources: List[SamMetadataResource] = [] - - lambda_layers_terraform_resources: Dict[str, Dict] = {} - lambda_funcs_conf_cfn_resources: Dict[str, List] = {} - lambda_config_funcs_conf_cfn_resources: Dict[str, TFResource] = {} - - # create and iterate over queue of modules to handle child modules - module_queue = [(root_module, root_tf_module)] - while module_queue: - modules_pair = module_queue.pop(0) - curr_module, curr_tf_module = modules_pair - curr_module_address = curr_module.get("address") - - _add_child_modules_to_queue(curr_module, curr_tf_module, module_queue) - - # iterate over resources for current module - resources = curr_module.get("resources", {}) - for resource in resources: - resource_provider = resource.get("provider_name") - resource_type = resource.get("type") - resource_values = resource.get("values") - resource_full_address = resource.get("address") - resource_name = resource.get("name") - resource_mode = resource.get("mode") - - resource_address = ( - f"data.{resource_type}.{resource_name}" - if resource_mode == "data" - else f"{resource_type}.{resource_name}" - ) - config_resource_address = _get_configuration_address(resource_address) - if config_resource_address not in curr_tf_module.resources: - raise PrepareHookException( - f"There is no configuration resource for resource address {resource_full_address} and " - f"configuration address {config_resource_address}" - ) - - config_resource = curr_tf_module.resources[config_resource_address] - - if ( - resource_provider == NULL_RESOURCE_PROVIDER_NAME - and resource_type == SAM_METADATA_RESOURCE_TYPE - and resource_name.startswith(SAM_METADATA_NAME_PREFIX) - ): - _add_metadata_resource_to_metadata_list( - SamMetadataResource(curr_module_address, resource, config_resource), - resource, - sam_metadata_resources, - ) - continue - - # only process supported provider - if resource_provider != AWS_PROVIDER_NAME: - continue - - # store S3 sources - if resource_type == "aws_s3_object": - s3_bucket = ( - resource_values.get("bucket") - if "bucket" in resource_values - else _resolve_resource_attribute(config_resource, "bucket") - ) - s3_key = ( - resource_values.get("key") - if "key" in resource_values - else _resolve_resource_attribute(config_resource, "key") - ) - obj_hash = _get_s3_object_hash(s3_bucket, s3_key) - code_artifact = resource_values.get("source") - config_code_artifact = ( - code_artifact if code_artifact else _resolve_resource_attribute(config_resource, "source") - ) - s3_hash_to_source[obj_hash] = (code_artifact, config_code_artifact) - - resource_translator = RESOURCE_TRANSLATOR_MAPPING.get(resource_type) - # resource type not supported - if not resource_translator: - continue - - # translate TF resource "values" to CFN properties - LOG.debug("Processing resource %s", resource_full_address) - translated_properties = _translate_properties( - resource_values, resource_translator.property_builder_mapping, config_resource - ) - translated_resource = { - "Type": resource_translator.cfn_name, - "Properties": translated_properties, - "Metadata": {"SamResourceId": resource_full_address, "SkipBuild": True}, - } - - # build CFN logical ID from resource address - logical_id = build_cfn_logical_id(resource_full_address) - - # Add resource to cfn dict - cfn_dict["Resources"][logical_id] = translated_resource - - if resource_type == TF_AWS_LAMBDA_LAYER_VERSION: - lambda_layers_terraform_resources[logical_id] = resource - planned_value_layer_code_path = translated_properties.get("Content") - _add_lambda_resource_code_path_to_code_map( - config_resource, - "layer", - lambda_resources_to_code_map, - logical_id, - planned_value_layer_code_path, - "filename", - translated_resource, - ) - - if resource_type == TF_AWS_LAMBDA_FUNCTION: - resolved_config_address = _get_configuration_address(resource_full_address) - matched_lambdas = lambda_funcs_conf_cfn_resources.get(resolved_config_address, []) - matched_lambdas.append(translated_resource) - lambda_funcs_conf_cfn_resources[resolved_config_address] = matched_lambdas - lambda_config_funcs_conf_cfn_resources[resolved_config_address] = config_resource - - resource_type = translated_properties.get("PackageType", ZIP) - resource_type_constants = {ZIP: ("zip", "filename"), IMAGE: ("image", "image_uri")} - planned_value_function_code_path = ( - translated_properties.get("Code") - if resource_type == ZIP - else translated_properties.get("Code", {}).get("ImageUri") - ) - func_type, tf_code_property = resource_type_constants[resource_type] - - _add_lambda_resource_code_path_to_code_map( - config_resource, - func_type, - lambda_resources_to_code_map, - logical_id, - planned_value_function_code_path, - tf_code_property, - translated_resource, - ) - - # map s3 object sources to corresponding functions - LOG.debug("Mapping S3 object sources to corresponding functions") - _map_s3_sources_to_functions(s3_hash_to_source, cfn_dict.get("Resources", {}), lambda_resources_to_code_map) - - _link_lambda_functions_to_layers( - lambda_config_funcs_conf_cfn_resources, lambda_funcs_conf_cfn_resources, lambda_layers_terraform_resources - ) - - if sam_metadata_resources: - LOG.debug("Enrich the mapped resources with the sam metadata information and generate Makefile") - _enrich_resources_and_generate_makefile( - sam_metadata_resources, - cfn_dict.get("Resources", {}), - output_directory_path, - terraform_application_dir, - lambda_resources_to_code_map, - ) - else: - LOG.debug("There is no sam metadata resources, no enrichment or Makefile is required") - - # check if there is still any dummy remote values for lambda resource imagesUri or S3 attributes - _check_dummy_remote_values(cfn_dict.get("Resources", {})) - - return cfn_dict - - -def _add_lambda_resource_code_path_to_code_map( - terraform_resource: TFResource, - lambda_resource_prefix: str, - lambda_resources_to_code_map: Dict, - logical_id: str, - lambda_resource_code_value: Any, - terraform_code_property_name: str, - translated_resource: Dict, -) -> None: - """ - Calculate the hash value of the lambda resource code path planned value or the configuration value and use it to - map the lambda resource logical id to the source code path. This will be used later to map the metadata resource to - the correct lambda resource. - - Parameters - ---------- - terraform_resource: TFResource - The mapped TF resource. This will be used to resolve the configuration value of the code attribute in the lambda - resource - lambda_resource_prefix: str - a string prefix to be added to the hash value to differentiate between the different lambda resources types - lambda_resources_to_code_map: dict - the map between lambda resources code path values, and the lambda resources logical ids - logical_id: str - lambda resource logical id - lambda_resource_code_value: Any - The planned value of the lambda resource code path - terraform_code_property_name: str - The lambda resource code property name - translated_resource: Dict - The CFN translated lambda resource - """ - if not lambda_resource_code_value or not isinstance(lambda_resource_code_value, str): - lambda_resource_code_value = _resolve_resource_attribute(terraform_resource, terraform_code_property_name) - if lambda_resource_code_value: - hash_value = ( - f"{lambda_resource_prefix}_{_calculate_configuration_attribute_value_hash(lambda_resource_code_value)}" - ) - functions_list = lambda_resources_to_code_map.get(hash_value, []) - functions_list.append((translated_resource, logical_id)) - lambda_resources_to_code_map[hash_value] = functions_list - - -def _add_metadata_resource_to_metadata_list( - sam_metadata_resource: SamMetadataResource, - sam_metadata_resource_planned_values: Dict, - sam_metadata_resources: List[SamMetadataResource], -) -> None: - """ - Prioritize the metadata resources that has resource name value to overwrite the metadata resources that does not - have resource name value. - - Parameters - ---------- - sam_metadata_resource: SamMetadataResource - The mapped metadata resource - sam_metadata_resource_planned_values: Dict - The metadata resource in planned values section - sam_metadata_resources: List[SamMetadataResource] - The list of metadata resources - """ - if get_sam_metadata_planned_resource_value_attribute( - sam_metadata_resource_planned_values, SAM_METADATA_RESOURCE_NAME_ATTRIBUTE - ): - sam_metadata_resources.append(sam_metadata_resource) - else: - sam_metadata_resources.insert(0, sam_metadata_resource) - - -def _add_child_modules_to_queue(curr_module: Dict, curr_module_configuration: TFModule, modules_queue: List) -> None: - """ - Iterate over the children modules of current module and add each module with its related child module configuration - to the modules_queue. - - Parameters - ---------- - curr_module: Dict - The current module in the planned values - curr_module_configuration: TFModule - The current module configuration - modules_queue: List - The list of modules - """ - child_modules = curr_module.get("child_modules") - if child_modules: - for child_module in child_modules: - config_child_module_address = ( - _get_configuration_address(child_module["address"]) if "address" in child_module else None - ) - module_name = ( - config_child_module_address[config_child_module_address.rfind(".") + 1 :] - if config_child_module_address - else None - ) - child_tf_module = curr_module_configuration.child_modules.get(module_name) if module_name else None - if child_tf_module is None: - raise PrepareHookException( - f"Module {config_child_module_address} exists in terraform planned_value, but does not exist " - "in terraform configuration" - ) - modules_queue.append((child_module, child_tf_module)) - - -def _link_lambda_functions_to_layers( - lambda_config_funcs_conf_cfn_resources: Dict[str, TFResource], - lambda_funcs_conf_cfn_resources: Dict[str, List], - lambda_layers_terraform_resources: Dict[str, Dict], -): - """ - Iterate through all of the resources and link the corresponding Lambda Layers to each Lambda Function - - Parameters - ---------- - lambda_config_funcs_conf_cfn_resources: Dict[str, TFResource] - Dictionary of configuration lambda resources - lambda_funcs_conf_cfn_resources: Dict[str, List] - Dictionary containing resolved configuration addresses matched up to the cfn Lambda functions - lambda_layers_terraform_resources: Dict[str, Dict] - Dictionary of all actual terraform layers resources (not configuration resources). The dictionary's key is the - calculated logical id for each resource - - Returns - ------- - dict - The CloudFormation resulting from translating tf_json - """ - for config_address, resource in lambda_config_funcs_conf_cfn_resources.items(): - if config_address in lambda_funcs_conf_cfn_resources: - LOG.debug("Linking layers for Lambda function %s", resource.full_address) - _link_lambda_function_to_layer( - resource, lambda_funcs_conf_cfn_resources[config_address], lambda_layers_terraform_resources - ) - - -def _validate_referenced_resource_matches_sam_metadata_type( - cfn_resource: dict, sam_metadata_resource: dict, sam_metadata_resource_address: str, expected_package_type: str -) -> None: - """ - Validate if the resource that match the resource name provided in the sam metadata resource matches the resource - type provided in the metadata as well. - - Parameters - ---------- - cfn_resource: dict - The CFN resource that matches the sam metadata resource name - sam_metadata_resource: Dict - The sam metadata resource properties - sam_metadata_resource_address: str - The sam metadata resource address - expected_package_type: str - The expected lambda function package type. - """ - cfn_resource_properties = cfn_resource.get("Properties", {}) - resource_type = get_sam_metadata_planned_resource_value_attribute( - sam_metadata_resource, SAM_METADATA_RESOURCE_TYPE_ATTRIBUTE - ) - cfn_resource_type = cfn_resource.get("Type") - lambda_function_package_type = cfn_resource_properties.get("PackageType", ZIP) - LOG.debug( - "Validate if the referenced resource in sam metadata resource %s is of the expected type %s", - sam_metadata_resource_address, - resource_type, - ) - - if ( - cfn_resource_type != CFN_AWS_LAMBDA_FUNCTION - or not cfn_resource_properties - or lambda_function_package_type != expected_package_type - ): - LOG.error( - "The matched resource is of type %s, and package type is %s, but the type mentioned in the sam metadata " - "resource %s is %s", - cfn_resource_type, - lambda_function_package_type, - sam_metadata_resource_address, - resource_type, - ) - raise InvalidSamMetadataPropertiesException( - f"The sam metadata resource {sam_metadata_resource_address} is referring to a resource that does not " - f"match the resource type {resource_type}." - ) - - -def _get_source_code_path( - sam_metadata_resource: dict, - sam_metadata_resource_address: str, - project_root_dir: str, - src_code_property_name: str, - property_path_property_name: str, - src_code_attribute_name: str, -) -> str: - """ - Validate that sam metadata resource contains the valid metadata properties - to get a lambda function or layer source code. - - Parameters - ---------- - sam_metadata_resource: Dict - The sam metadata resource properties - sam_metadata_resource_address: str - The sam metadata resource address - project_root_dir: str - the terraform project root directory path - src_code_property_name: str - the sam metadata property name that contains the lambda function or layer source code or docker context path - property_path_property_name: str - the sam metadata property name that contains the property to get the source code value if it was provided - as json string - src_code_attribute_name: str - the lambda function or later source code or docker context to be used to raise the correct exception - - Returns - ------- - str - The lambda function or layer source code or docker context paths - """ - LOG.debug( - "Extract the %s from the sam metadata resource %s from property %s", - src_code_attribute_name, - sam_metadata_resource_address, - src_code_property_name, - ) - source_code = get_sam_metadata_planned_resource_value_attribute(sam_metadata_resource, src_code_property_name) - source_code_property = get_sam_metadata_planned_resource_value_attribute( - sam_metadata_resource, property_path_property_name - ) - LOG.debug( - "The found %s value is %s and property value is %s", src_code_attribute_name, source_code, source_code_property - ) - if not source_code: - raise InvalidSamMetadataPropertiesException( - f"The sam metadata resource {sam_metadata_resource_address} " - f"should contain the lambda function/lambda layer " - f"{src_code_attribute_name} in property {src_code_property_name}" - ) - if isinstance(source_code, str): - try: - LOG.debug("Try to decode the %s value in case if it is a encoded JSON string.", src_code_attribute_name) - source_code = json.loads(source_code) - LOG.debug("The decoded value of the %s value is %s", src_code_attribute_name, source_code) - except JSONDecodeError: - LOG.debug("Source code value could not be parsed as a JSON object. Handle it as normal string value") - - if isinstance(source_code, dict): - LOG.debug( - "Process the extracted %s as JSON object using the property %s", - src_code_attribute_name, - source_code_property, - ) - if not source_code_property: - raise InvalidSamMetadataPropertiesException( - f"The sam metadata resource {sam_metadata_resource_address} " - f"should contain the lambda function/lambda layer " - f"{src_code_attribute_name} property in property {property_path_property_name} as the " - f"{src_code_property_name} value is an object" - ) - cfn_source_code_path = source_code.get(source_code_property) - if not cfn_source_code_path: - LOG.error( - "The property %s does not exist in the extracted %s JSON object %s", - source_code_property, - src_code_attribute_name, - source_code, - ) - raise InvalidSamMetadataPropertiesException( - f"The sam metadata resource {sam_metadata_resource_address} " - f"should contain a valid lambda function/lambda layer " - f"{src_code_attribute_name} property in property {property_path_property_name} as the " - f"{src_code_property_name} value is an object" - ) - elif isinstance(source_code, list): - # SAM CLI does not process multiple paths, so we will handle only the first value in this list - LOG.debug( - "Process the extracted %s as list, and get the first value as SAM CLI does not support multiple paths", - src_code_attribute_name, - ) - if len(source_code) < 1: - raise InvalidSamMetadataPropertiesException( - f"The sam metadata resource {sam_metadata_resource_address} " - f"should contain the lambda function/lambda layer " - f"{src_code_attribute_name} in property {src_code_property_name}, and it should not be an empty list" - ) - cfn_source_code_path = source_code[0] - if not cfn_source_code_path: - raise InvalidSamMetadataPropertiesException( - f"The sam metadata resource {sam_metadata_resource_address} " - f"should contain a valid lambda/lambda layer function " - f"{src_code_attribute_name} in property {src_code_property_name}" - ) - else: - cfn_source_code_path = source_code - - LOG.debug("The %s path value is %s", src_code_attribute_name, cfn_source_code_path) - - if not os.path.isabs(cfn_source_code_path): - LOG.debug( - "The %s path value is not absoulte value. Get the absolute value based on the root directory %s", - src_code_attribute_name, - project_root_dir, - ) - cfn_source_code_path = os.path.normpath(os.path.join(project_root_dir, cfn_source_code_path)) - LOG.debug("The calculated absolute path of %s is %s", src_code_attribute_name, cfn_source_code_path) - - if not isinstance(cfn_source_code_path, str) or not os.path.exists(cfn_source_code_path): - LOG.error("The path %s does not exist", cfn_source_code_path) - raise InvalidSamMetadataPropertiesException( - f"The sam metadata resource {sam_metadata_resource_address} should contain a valid string value for the " - f"lambda function/lambda layer {src_code_attribute_name} path" - ) - - return cfn_source_code_path - - -def _enrich_zip_lambda_function( - sam_metadata_resource: Dict, - cfn_lambda_function: Dict, - cfn_lambda_function_logical_id: str, - terraform_application_dir: str, - output_directory_path: str, -): - """ - Use the sam metadata resources to enrich the zip lambda function. - - Parameters - ---------- - sam_metadata_resource: Dict - The sam metadata resource properties - cfn_lambda_function: dict - CloudFormation lambda function to be enriched - cfn_lambda_function_logical_id: str - the cloudFormation lambda function to be enriched logical id. - output_directory_path: str - the output directory path to write the generated metadata and makefile - terraform_application_dir: str - the terraform project root directory - """ - sam_metadata_resource_address = sam_metadata_resource.get("address") - if not sam_metadata_resource_address: - raise PrepareHookException( - "Invalid Terraform plan output. The address property should not be null to any terraform resource." - ) - - LOG.debug( - "Enrich the ZIP lambda function %s using the metadata properties defined in resource %s", - cfn_lambda_function_logical_id, - sam_metadata_resource_address, - ) - - _validate_referenced_resource_matches_sam_metadata_type( - cfn_lambda_function, sam_metadata_resource, sam_metadata_resource_address, ZIP - ) - - cfn_source_code_path = _get_source_code_path( - sam_metadata_resource, - sam_metadata_resource_address, - terraform_application_dir, - "original_source_code", - "source_code_property", - "source code", - ) - _set_zip_metadata_resources( - cfn_lambda_function, - cfn_source_code_path, - output_directory_path, - terraform_application_dir, - CFN_CODE_PROPERTIES[CFN_AWS_LAMBDA_FUNCTION], - ) - - -def _enrich_image_lambda_function( - sam_metadata_resource: Dict, - cfn_lambda_function: Dict, - cfn_lambda_function_logical_id: str, - terraform_application_dir: str, - output_directory_path: str, -): - """ - Use the sam metadata resources to enrich the image lambda function. - - Parameters - ---------- - sam_metadata_resource: Dict - The sam metadata resource properties - cfn_lambda_function: dict - CloudFormation lambda function to be enriched - cfn_lambda_function_logical_id: str - the cloudFormation lambda function to be enriched logical id. - output_directory_path: str - the output directory path to write the generated metadata and makefile - terraform_application_dir: str - the terraform project root directory - """ - sam_metadata_resource_address = sam_metadata_resource.get("address") - if not sam_metadata_resource_address: - raise PrepareHookException( - "Invalid Terraform plan output. The address property should not be null to any terraform resource." - ) - cfn_resource_properties = cfn_lambda_function.get("Properties", {}) - - LOG.debug( - "Enrich the IMAGE lambda function %s using the metadata properties defined in resource %s", - cfn_lambda_function_logical_id, - sam_metadata_resource_address, - ) - - _validate_referenced_resource_matches_sam_metadata_type( - cfn_lambda_function, sam_metadata_resource, sam_metadata_resource_address, IMAGE - ) - - cfn_docker_context_path = _get_source_code_path( - sam_metadata_resource, - sam_metadata_resource_address, - terraform_application_dir, - "docker_context", - "docker_context_property_path", - "docker context", - ) - cfn_docker_file = get_sam_metadata_planned_resource_value_attribute( - sam_metadata_resource, SAM_METADATA_DOCKER_FILE_ATTRIBUTE - ) - cfn_docker_build_args_string = get_sam_metadata_planned_resource_value_attribute( - sam_metadata_resource, SAM_METADATA_DOCKER_BUILD_ARGS_ATTRIBUTE - ) - cfn_docker_build_args = None - if cfn_docker_build_args_string: - try: - LOG.debug("Parse the docker build args %s", cfn_docker_build_args_string) - cfn_docker_build_args = json.loads(cfn_docker_build_args_string) - if not isinstance(cfn_docker_build_args, dict): - raise InvalidSamMetadataPropertiesException( - f"The sam metadata resource {sam_metadata_resource_address} should contain a valid json " - f"encoded string for the lambda function docker build arguments." - ) - except JSONDecodeError as exc: - raise InvalidSamMetadataPropertiesException( - f"The sam metadata resource {sam_metadata_resource_address} should contain a valid json encoded " - f"string for the lambda function docker build arguments." - ) from exc - - cfn_docker_tag = get_sam_metadata_planned_resource_value_attribute( - sam_metadata_resource, SAM_METADATA_DOCKER_TAG_ATTRIBUTE - ) - - if cfn_resource_properties.get("Code"): - cfn_resource_properties.pop("Code") - - if not cfn_lambda_function.get("Metadata", {}): - cfn_lambda_function["Metadata"] = {} - cfn_lambda_function["Metadata"]["SkipBuild"] = False - cfn_lambda_function["Metadata"]["DockerContext"] = cfn_docker_context_path - if cfn_docker_file: - cfn_lambda_function["Metadata"]["Dockerfile"] = cfn_docker_file - if cfn_docker_tag: - cfn_lambda_function["Metadata"]["DockerTag"] = cfn_docker_tag - if cfn_docker_build_args: - cfn_lambda_function["Metadata"]["DockerBuildArgs"] = cfn_docker_build_args - - -def _enrich_lambda_layer( - sam_metadata_resource: Dict, - cfn_lambda_layer: Dict, - cfn_lambda_layer_logical_id: str, - terraform_application_dir: str, - output_directory_path: str, -) -> None: - """ - Use the sam metadata resources to enrich the lambda layer. - - Parameters - ---------- - sam_metadata_resource: Dict - The sam metadata resource properties - cfn_lambda_layer: dict - CloudFormation lambda layer to be enriched - cfn_lambda_layer_logical_id: str - the cloudFormation lambda layer to be enriched logical id. - output_directory_path: str - the output directory path to write the generated metadata and makefile - terraform_application_dir: str - the terraform project root directory - """ - sam_metadata_resource_address = sam_metadata_resource.get("address") - if not sam_metadata_resource_address: - raise PrepareHookException( - "Invalid Terraform plan output. The address property should not be null to any terraform resource." - ) - _validate_referenced_resource_layer_matches_metadata_type( - cfn_lambda_layer, sam_metadata_resource, sam_metadata_resource_address - ) - LOG.debug( - "Enrich the Lambda Layer Version %s using the metadata properties defined in resource %s", - cfn_lambda_layer_logical_id, - sam_metadata_resource_address, - ) - - cfn_source_code_path = _get_source_code_path( - sam_metadata_resource, - sam_metadata_resource_address, - terraform_application_dir, - "original_source_code", - "source_code_property", - "source code", - ) - - _set_zip_metadata_resources( - cfn_lambda_layer, - cfn_source_code_path, - output_directory_path, - terraform_application_dir, - CFN_CODE_PROPERTIES[CFN_AWS_LAMBDA_LAYER_VERSION], - ) - - -def _validate_referenced_resource_layer_matches_metadata_type( - cfn_resource: dict, - sam_metadata_resource: dict, - sam_metadata_resource_address: str, -) -> None: - """ - Validate if the resource that match the resource name provided in the sam metadata resource matches the resource - type provided in the metadata as well. - - Parameters - ---------- - cfn_resource: dict - The CFN resource that matches the sam metadata resource name - sam_metadata_resource: Dict - The sam metadata resource properties - sam_metadata_resource_address: str - The sam metadata resource address - """ - cfn_resource_properties = cfn_resource.get("Properties", {}) - resource_type = sam_metadata_resource.get(SAM_METADATA_RESOURCE_TYPE_ATTRIBUTE) - cfn_resource_type = cfn_resource.get("Type") - LOG.debug( - "Validate if the referenced resource in sam metadata resource %s is of the expected type %s", - sam_metadata_resource_address, - resource_type, - ) - - if cfn_resource_type != CFN_AWS_LAMBDA_LAYER_VERSION or not cfn_resource_properties: - LOG.error( - "The matched resource is of type %s but the type mentioned in the sam metadata resource %s is %s", - cfn_resource_type, - sam_metadata_resource_address, - resource_type, - ) - raise InvalidSamMetadataPropertiesException( - f"The sam metadata resource {sam_metadata_resource_address} is referring to a resource that does not " - f"match the resource type {resource_type}." - ) - - -def _set_zip_metadata_resources( - resource: dict, - cfn_source_code_path: str, - output_directory_path: str, - terraform_application_dir: str, - code_property: str, -) -> None: - """ - Update the CloudFormation resource metadata with the enrichment properties from the TF resource - - Parameters - ---------- - resource: dict - The CFN resource that matches the sam metadata resource name - cfn_source_code_path: dict - Absolute path location of where the original source code resides. - output_directory_path: str - The directory where to find the Makefile the path to be copied into the temp dir. - terraform_application_dir: str - The working directory from which to run the Makefile. - code_property: - The property in the configuration used to denote the code e.g. "Code" or "Content" - """ - resource_properties = resource.get("Properties", {}) - resource_properties[code_property] = cfn_source_code_path - if not resource.get("Metadata", {}): - resource["Metadata"] = {} - resource["Metadata"]["SkipBuild"] = False - resource["Metadata"]["BuildMethod"] = "makefile" - resource["Metadata"]["ContextPath"] = output_directory_path - resource["Metadata"]["WorkingDirectory"] = terraform_application_dir - # currently we set the terraform project root directory that contains all the terraform artifacts as the project - # directory till we work on the custom hook properties, and add a property for this value. - resource["Metadata"]["ProjectRootDirectory"] = terraform_application_dir - - -def _enrich_resources_and_generate_makefile( - sam_metadata_resources: List[SamMetadataResource], - cfn_resources: Dict[str, Dict], - output_directory_path: str, - terraform_application_dir: str, - lambda_resources_to_code_map: Dict, -) -> None: - """ - Use the sam metadata resources to enrich the mapped resources and to create a Makefile with a rule for - each lambda resource to be built. - - Parameters - ---------- - sam_metadata_resources: List[SamMetadataResource] - The list of sam metadata resources defined in the terraform project. - cfn_resources: dict - CloudFormation resources - output_directory_path: str - the output directory path to write the generated metadata and makefile - terraform_application_dir: str - the terraform project root directory - lambda_resources_to_code_map: Dict - The map between lambda resources code path, and lambda resources logical ids - """ - - python_command_name = _get_python_command_name() - - resources_types_enrichment_functions = { - "ZIP_LAMBDA_FUNCTION": _enrich_zip_lambda_function, - "IMAGE_LAMBDA_FUNCTION": _enrich_image_lambda_function, - "LAMBDA_LAYER": _enrich_lambda_layer, - } - - makefile_rules = [] - for sam_metadata_resource in sam_metadata_resources: - # enrich resource - resource_type = get_sam_metadata_planned_resource_value_attribute( - sam_metadata_resource.resource, SAM_METADATA_RESOURCE_TYPE_ATTRIBUTE - ) - sam_metadata_resource_address = sam_metadata_resource.resource.get("address") - enrichment_function = resources_types_enrichment_functions.get(resource_type) - if enrichment_function is None: - raise InvalidSamMetadataPropertiesException( - f"The resource type {resource_type} found in the sam metadata resource " - f"{sam_metadata_resource_address} is not a correct resource type. The resource type should be one " - f"of these values {resources_types_enrichment_functions.keys()}" - ) - - lambda_resources = _get_relevant_cfn_resource( - sam_metadata_resource, cfn_resources, lambda_resources_to_code_map - ) - for cfn_resource, logical_id in lambda_resources: - enrichment_function( - sam_metadata_resource.resource, - cfn_resource, - logical_id, - terraform_application_dir, - output_directory_path, - ) - - # get makefile rule for resource - makefile_rule = _generate_makefile_rule_for_lambda_resource( - sam_metadata_resource, logical_id, terraform_application_dir, python_command_name, output_directory_path - ) - makefile_rules.append(makefile_rule) - - # generate makefile - LOG.debug("Generate Makefile in %s", output_directory_path) - _generate_makefile(makefile_rules, output_directory_path) - - -def _generate_makefile( - makefile_rules: List[str], - output_directory_path: str, -) -> None: - """ - Generates a makefile with the given rules in the given directory - Parameters - ---------- - makefile_rules: List[str], - the list of rules to write in the Makefile - output_directory_path: str - the output directory path to write the generated makefile - """ - - # create output directory if it doesn't exist - if not os.path.exists(output_directory_path): - os.makedirs(output_directory_path, exist_ok=True) - - # create z_samcli_backend_override.tf in output directory - _generate_backend_override_file(output_directory_path) - - # copy copy_terraform_built_artifacts.py script into output directory - copy_terraform_built_artifacts_script_path = os.path.join( - Path(os.path.dirname(__file__)).parent.parent, TERRAFORM_BUILD_SCRIPT - ) - shutil.copy(copy_terraform_built_artifacts_script_path, output_directory_path) - - # create makefile - makefile_path = os.path.join(output_directory_path, "Makefile") - with open(makefile_path, "w+") as makefile: - makefile.writelines(makefile_rules) - - -def _generate_backend_override_file(output_directory_path: str): - """ - Generates an override tf file to use a temporary backend - - Parameters - ---------- - output_directory_path: str - the output directory path to write the generated makefile - """ - statefile_filename = f"{uuid.uuid4()}.tfstate" - override_content = "terraform {\n" ' backend "local" {\n' f' path = "./{statefile_filename}"\n' " }\n" "}\n" - override_file_path = os.path.join(output_directory_path, TF_BACKEND_OVERRIDE_FILENAME) - with open(override_file_path, "w+") as f: - f.write(override_content) - - -def _get_relevant_cfn_resource( - sam_metadata_resource: SamMetadataResource, - cfn_resources: Dict[str, Dict], - lambda_resources_to_code_map: Dict[str, List[Tuple[Dict, str]]], -) -> List[Tuple[Dict, str]]: - """ - use the sam metadata resource name property to determine the resource address, and transform the address to logical - id to use it to get the cfn_resource. - If the metadata resource does not contain a resource name property, so we need to use the resource built artifact - path to find tha lambda resources that use the same artifact path - - Parameters - ---------- - sam_metadata_resource: SamMetadataResource - sam metadata resource that contain extra information about some resource. - cfn_resources: Dict - CloudFormation resources - lambda_resources_to_code_map: Dict - The map between lambda resources code path, and lambda resources logical ids - - Returns - ------- - List[tuple(Dict, str)] - The cfn resources that mentioned in the sam metadata resource, and the resource logical id - """ - - resources_types = { - "ZIP_LAMBDA_FUNCTION": "zip", - "IMAGE_LAMBDA_FUNCTION": "image", - "LAMBDA_LAYER": "layer", - } - - sam_metadata_resource_address = sam_metadata_resource.resource.get("address") - resource_name = get_sam_metadata_planned_resource_value_attribute( - sam_metadata_resource.resource, SAM_METADATA_RESOURCE_NAME_ATTRIBUTE - ) - resource_type = get_sam_metadata_planned_resource_value_attribute( - sam_metadata_resource.resource, SAM_METADATA_RESOURCE_TYPE_ATTRIBUTE - ) - if not resource_name: - artifact_property_name = ( - "built_output_path" if resource_type in ["ZIP_LAMBDA_FUNCTION", "LAMBDA_LAYER"] else "built_image_uri" - ) - artifact_path_value = get_sam_metadata_planned_resource_value_attribute( - sam_metadata_resource.resource, artifact_property_name - ) - if not artifact_path_value: - artifact_path_value = _resolve_resource_attribute( - sam_metadata_resource.config_resource, artifact_property_name - ) - hash_value = ( - f"{resources_types[resource_type]}_{_calculate_configuration_attribute_value_hash(artifact_path_value)}" - ) - lambda_resources = lambda_resources_to_code_map.get(hash_value, []) - if not lambda_resources: - raise InvalidSamMetadataPropertiesException( - f"sam cli expects the sam metadata resource {sam_metadata_resource_address} to contain a resource name " - f"that will be enriched using this metadata resource" - ) - return lambda_resources - # the provided resource name will be always a postfix to the module address. The customer could not set a full - # address within a module. - LOG.debug( - "Check if the input resource name %s is a postfix to the current module address %s", - resource_name, - sam_metadata_resource.current_module_address, - ) - full_resource_address = ( - f"{sam_metadata_resource.current_module_address}.{resource_name}" - if sam_metadata_resource.current_module_address - else resource_name - ) - LOG.debug("check if the resource address %s has a relevant cfn resource or not", full_resource_address) - logical_id = build_cfn_logical_id(full_resource_address) - cfn_resource = cfn_resources.get(logical_id) - if cfn_resource: - LOG.debug("The CFN resource that match the input resource name %s is %s", resource_name, logical_id) - return [(cfn_resource, logical_id)] - - raise InvalidSamMetadataPropertiesException( - f"There is no resource found that match the provided resource name " f"{resource_name}" - ) - - -def _get_python_command_name() -> str: - """ - Verify that python is installed and return the name of the python command - - Returns - ------- - str - The name of the python command installed - """ - command_names_to_try = ["python3", "py3", "python", "py"] - for command_name in command_names_to_try: - try: - run_result = run([command_name, "--version"], check=True, capture_output=True, text=True) - except CalledProcessError: - pass - except OSError: - pass - else: - # check python version - if not PYTHON_VERSION_REGEX.match(run_result.stdout): - continue - return command_name - raise PrepareHookException("Python not found. Please ensure that python 3.7 or above is installed.") - - -def _generate_makefile_rule_for_lambda_resource( - sam_metadata_resource: SamMetadataResource, - logical_id: str, - terraform_application_dir: str, - python_command_name: str, - output_dir: str, -) -> str: - """ - Generates and returns a makefile rule for the lambda resource associated with the given sam metadata resource. - - Parameters - ---------- - sam_metadata_resource: SamMetadataResource - A sam metadata resource; the generated makefile rule will correspond to building the lambda resource - associated with this sam metadata resource - logical_id: str - Logical ID of the lambda resource - terraform_application_dir: str - the terraform project root directory - python_command_name: str - the python command name to use for running a script in the makefile rule - output_dir: str - the directory into which the Makefile is written - - Returns - ------- - str - The generated makefile rule - """ - target = _get_makefile_build_target(logical_id) - resource_address = sam_metadata_resource.resource.get("address", "") - python_command_recipe = _format_makefile_recipe( - _build_makerule_python_command( - python_command_name, output_dir, resource_address, sam_metadata_resource, terraform_application_dir - ) - ) - return f"{target}{python_command_recipe}" - - -def _build_makerule_python_command( - python_command_name: str, - output_dir: str, - resource_address: str, - sam_metadata_resource: SamMetadataResource, - terraform_application_dir: str, -) -> str: - """ - Build the Python command recipe to be used inside of the Makefile rule - - Parameters - ---------- - python_command_name: str - the python command name to use for running a script in the makefile recipe - output_dir: str - the directory into which the Makefile is written - resource_address: str - Address of a given terraform resource - sam_metadata_resource: SamMetadataResource - A sam metadata resource; the generated show command recipe will correspond to building the lambda resource - associated with this sam metadata resource - terraform_application_dir: str - the terraform project root directory - - Returns - ------- - str - Fully resolved Terraform show command - """ - show_command_template = ( - '{python_command_name} "{terraform_built_artifacts_script_path}" ' - '--expression "{jpath_string}" --directory "$(ARTIFACTS_DIR)" --target "{resource_address}"' - ) - jpath_string = _build_jpath_string(sam_metadata_resource, resource_address) - terraform_built_artifacts_script_path = convert_path_to_unix_path( - str(Path(output_dir, TERRAFORM_BUILD_SCRIPT).relative_to(terraform_application_dir)) - ) - return show_command_template.format( - python_command_name=python_command_name, - terraform_built_artifacts_script_path=terraform_built_artifacts_script_path, - jpath_string=jpath_string.replace('"', '\\"'), - resource_address=resource_address.replace('"', '\\"'), - ) - - -def _build_jpath_string(sam_metadata_resource: SamMetadataResource, resource_address: str) -> str: - """ - Constructs the JPath string for a given sam metadata resource from the planned_values - to the build_output_path as is created by the Terraform plan output - - Parameters - ---------- - sam_metadata_resource: SamMetadataResource - A sam metadata resource; the generated recipe jpath will correspond to building the lambda resource - associated with this sam metadata resource - - resource_address: str - Full address of a Terraform resource - - Returns - ------- - str - Full JPath string for a resource from planned_values to build_output_path - """ - jpath_string_template = ( - "|values|root_module{child_modules}|resources|" - '[?address=="{resource_address}"]|values|triggers|built_output_path' - ) - child_modules_template = "|child_modules|[?address=={module_address}]" - module_address = sam_metadata_resource.current_module_address - full_module_path = "" - parent_modules = _get_parent_modules(module_address) - for module in parent_modules: - full_module_path += child_modules_template.format(module_address=module) - jpath_string = jpath_string_template.format(child_modules=full_module_path, resource_address=resource_address) - return jpath_string - - -def _get_parent_modules(module_address: Optional[str]) -> List[str]: - """ - Convert an a full Terraform resource address to a list of module - addresses from the root module to the current module - - e.g. "module.level1_lambda.module.level2_lambda" as input will return - ["module.level1_lambda", "module.level1_lambda.module.level2_lambda"] - - Parameters - ---------- - module_address: str - Full address of the Terraform module - - Returns - ------- - List[str] - List of module addresses starting from the root module to the current module - """ - if not module_address: - return [] - - # Split the address on "." then combine it back with the "module" prefix for each module name - modules = module_address.split(".") - modules = [".".join(modules[i : i + 2]) for i in range(0, len(modules), 2)] - - if not modules: - # The format of the address was somehow different than we expected from the - # module..module. - return [] - - # Prefix each nested module name with the previous - previous_module = modules[0] - full_path_modules = [previous_module] - for module in modules[1:]: - module = previous_module + "." + module - previous_module = module - full_path_modules.append(module) - return full_path_modules - - -def _get_makefile_build_target(logical_id: str) -> str: - """ - Formats the Makefile rule build target string as is needed by the Makefile - - Parameters - ---------- - logical_id: str - Logical ID of the resource to use for the Makefile rule target - - Returns - ------- - str - The formatted Makefile rule build target - """ - return f"build-{logical_id}:\n" - - -def _format_makefile_recipe(rule_string: str) -> str: - """ - Formats the Makefile rule string as is needed by the Makefile - - Parameters - ---------- - rule_string: str - Makefile rule string to be formatted - - Returns - ------- - str - The formatted target rule - """ - return f"\t{rule_string}\n" - - -def _translate_properties( - tf_properties: dict, property_builder_mapping: PropertyBuilderMapping, resource: TFResource -) -> dict: - """ - Translates the properties of a terraform resource into the equivalent properties of a CloudFormation resource - - Parameters - ---------- - tf_properties: dict - The terraform properties to translate - property_builder_mappping: PropertyBuilderMapping - A mapping of the CloudFormation property name to a function for building that property - resource: TFResource - The terraform configuration resource that can be used to retrieve some attributes values if needed - - Returns - ------- - dict - The CloudFormation properties resulting from translating tf_properties - """ - cfn_properties = {} - for cfn_property_name, cfn_property_builder in property_builder_mapping.items(): - cfn_property_value = cfn_property_builder(tf_properties, resource) - if cfn_property_value is not None: - cfn_properties[cfn_property_name] = cfn_property_value - return cfn_properties - - -def _get_property_extractor(property_name: str) -> PropertyBuilder: - """ - Returns a PropertyBuilder function to extract the given property from a dict - - Parameters - ---------- - property_name: str - The name of the property to extract - - Returns - ------- - PropertyBuilder - function that takes in a dict and extracts the given property name from it - """ - return lambda properties, _: properties.get(property_name) - - -def _build_lambda_function_environment_property(tf_properties: dict, resource: TFResource) -> Optional[dict]: - """ - Builds the Environment property of a CloudFormation AWS Lambda Function out of the - properties of the equivalent terraform resource - - Parameters - ---------- - tf_properties: dict - Properties of the terraform AWS Lambda function resource - resource: TFResource - Configuration terraform resource - - Returns - ------- - dict - The built Environment property of a CloudFormation AWS Lambda Function resource - """ - environment = tf_properties.get("environment") - if not environment: - return None - - for env in environment: - variables = env.get("variables") - if variables: - return {"Variables": variables} - - # no variables - return None - - -def _build_code_property(tf_properties: dict, resource: TFResource) -> Any: - """ - Builds the Code property of a CloudFormation AWS Lambda Function out of the - properties of the equivalent terraform resource - - Parameters - ---------- - tf_properties: dict - Properties of the terraform AWS Lambda function resource - resource: TFResource - Configuration terraform resource - - Returns - ------- - dict - The built Code property of a CloudFormation AWS Lambda Function resource - """ - filename = tf_properties.get("filename") - if filename: - return filename - - code = {} - tf_cfn_prop_names = [ - ("s3_bucket", "S3Bucket"), - ("s3_key", "S3Key"), - ("image_uri", "ImageUri"), - ("s3_object_version", "S3ObjectVersion"), - ] - for tf_prop_name, cfn_prop_name in tf_cfn_prop_names: - tf_prop_value = tf_properties.get(tf_prop_name) - if tf_prop_value is not None: - code[cfn_prop_name] = tf_prop_value - - package_type = tf_properties.get("package_type", ZIP) - - # Get the S3 Bucket details from configuration in case if the customer is creating the S3 bucket in the tf project - if package_type == ZIP and ("S3Bucket" not in code or "S3Key" not in code or "S3ObjectVersion" not in code): - s3_bucket_tf_config_value = _resolve_resource_attribute(resource, "s3_bucket") - s3_key_tf_config_value = _resolve_resource_attribute(resource, "s3_key") - s3_object_version_tf_config_value = _resolve_resource_attribute(resource, "s3_object_version") - if "S3Bucket" not in code and s3_bucket_tf_config_value: - code["S3Bucket"] = REMOTE_DUMMY_VALUE - code["S3Bucket_config_value"] = s3_bucket_tf_config_value - if "S3Key" not in code and s3_key_tf_config_value: - code["S3Key"] = REMOTE_DUMMY_VALUE - code["S3Key_config_value"] = s3_key_tf_config_value - if "S3ObjectVersion" not in code and s3_object_version_tf_config_value: - code["S3ObjectVersion"] = REMOTE_DUMMY_VALUE - code["S3ObjectVersion_config_value"] = s3_object_version_tf_config_value - - # Get the Image URI details from configuration in case if the customer is creating the ecr repo in the tf project - if package_type == IMAGE and "ImageUri" not in code: - image_uri_tf_config_value = _resolve_resource_attribute(resource, "image_uri") - if image_uri_tf_config_value: - code["ImageUri"] = REMOTE_DUMMY_VALUE - - return code - - -def _build_lambda_function_image_config_property(tf_properties: dict, resource: TFResource) -> Optional[dict]: - """ - Builds the ImageConfig property of a CloudFormation AWS Lambda Function out of the - properties of the equivalent terraform resource - - Parameters - ---------- - tf_properties: dict - Properties of the terraform AWS Lambda function resource - resource: TFResource - Configuration terraform resource - - Returns - ------- - dict - The built ImageConfig property of a CloudFormation AWS Lambda Function resource - """ - image_config = tf_properties.get("image_config") - if not image_config: - return None - - _check_image_config_value(image_config) - image_config = image_config[0] - - cfn_image_config = {} - tf_cfn_prop_names = [ - ("command", "Command"), - ("entry_point", "EntryPoint"), - ("working_directory", "WorkingDirectory"), - ] - - for tf_prop_name, cfn_prop_name in tf_cfn_prop_names: - tf_prop_value = image_config.get(tf_prop_name) - if tf_prop_value is not None: - cfn_image_config[cfn_prop_name] = tf_prop_value - - return cfn_image_config - - -def _check_image_config_value(image_config: Any) -> bool: - """ - validate if the image_config property value is as SAM CLI expects. If it is not valid, it will raise a - PrepareHookException. - - Parameters - ---------- - image_config: Any - The aws_lambda resource's Image_config property value as read from the terraform plan output. - - Returns - ------- - bool - return True, if the image_config value as expects, and raise PrepareHookException if not as expected. - """ - if not isinstance(image_config, list): - raise PrepareHookException( - f"AWS SAM CLI expects that the value of image_config of aws_lambda_function resource in " - f"the terraform plan output to be of type list instead of {type(image_config)}" - ) - if len(image_config) > 1: - raise PrepareHookException( - f"AWS SAM CLI expects that there is only one item in the image_config property of " - f"aws_lambda_function resource in the terraform plan output, but there are " - f"{len(image_config)} items" - ) - return True - - -AWS_LAMBDA_FUNCTION_PROPERTY_BUILDER_MAPPING: PropertyBuilderMapping = { - "FunctionName": _get_property_extractor("function_name"), - "Architectures": _get_property_extractor("architectures"), - "Environment": _build_lambda_function_environment_property, - "Code": _build_code_property, - "Handler": _get_property_extractor("handler"), - "PackageType": _get_property_extractor("package_type"), - "Runtime": _get_property_extractor("runtime"), - "Layers": _get_property_extractor("layers"), - "Timeout": _get_property_extractor("timeout"), - "ImageConfig": _build_lambda_function_image_config_property, -} - -AWS_LAMBDA_LAYER_VERSION_PROPERTY_BUILDER_MAPPING: PropertyBuilderMapping = { - "LayerName": _get_property_extractor("layer_name"), - "CompatibleRuntimes": _get_property_extractor("compatible_runtimes"), - "CompatibleArchitectures": _get_property_extractor("compatible_architectures"), - "Content": _build_code_property, -} - -RESOURCE_TRANSLATOR_MAPPING: Dict[str, ResourceTranslator] = { - TF_AWS_LAMBDA_FUNCTION: ResourceTranslator(CFN_AWS_LAMBDA_FUNCTION, AWS_LAMBDA_FUNCTION_PROPERTY_BUILDER_MAPPING), - TF_AWS_LAMBDA_LAYER_VERSION: ResourceTranslator( - CFN_AWS_LAMBDA_LAYER_VERSION, AWS_LAMBDA_LAYER_VERSION_PROPERTY_BUILDER_MAPPING - ), -} - - -def _get_s3_object_hash( - bucket: Union[str, List[Union[ConstantValue, ResolvedReference]]], - key: Union[str, List[Union[ConstantValue, ResolvedReference]]], -) -> str: - """ - Creates a hash for an AWS S3 object out of the bucket and key - - Parameters - ---------- - bucket: Union[str, List[Union[ConstantValue, ResolvedReference]]] - bucket for the S3 object - key: Union[str, List[Union[ConstantValue, ResolvedReference]]] - key for the S3 object - - Returns - ------- - str - hash for the given bucket and key - """ - md5 = hashlib.md5() - md5.update(_calculate_configuration_attribute_value_hash(bucket).encode()) - md5.update(_calculate_configuration_attribute_value_hash(key).encode()) - # TODO: Hash version if it exists in addition to key and bucket - return md5.hexdigest() - - -def _map_s3_sources_to_functions( - s3_hash_to_source: Dict[str, Tuple[str, List[Union[ConstantValue, ResolvedReference]]]], - cfn_resources: Dict[str, Any], - lambda_resources_to_code_map: Dict[str, List[Tuple[Dict, str]]], -) -> None: - """ - Maps the source property of terraform AWS S3 object resources into the the Code property of - CloudFormation AWS Lambda Function resources, and append the hash value of the artifacts path to the lambda - resources code map. - - Parameters - ---------- - s3_hash_to_source: Dict[str, Tuple[str, List[Union[ConstantValue, ResolvedReference]]]] - Mapping of S3 object hash to S3 object source and the S3 Object configuration source value - cfn_resources: dict - CloudFormation resources - lambda_resources_to_code_map: Dict - the map between lambda resources code path values, and the lambda resources logical ids - """ - for resource_logical_id, resource in cfn_resources.items(): - resource_type = resource.get("Type") - if resource_type in CFN_CODE_PROPERTIES: - code_property = CFN_CODE_PROPERTIES[resource_type] - - code = resource.get("Properties").get(code_property) - - # mapping not possible if function doesn't have bucket and key - if isinstance(code, str): - continue - - bucket = code.get("S3Bucket_config_value") if "S3Bucket_config_value" in code else code.get("S3Bucket") - key = code.get("S3Key_config_value") if "S3Key_config_value" in code else code.get("S3Key") - - if bucket and key: - obj_hash = _get_s3_object_hash(bucket, key) - source = s3_hash_to_source.get(obj_hash) - if source: - if source[0]: - tf_address = resource.get("Metadata", {}).get("SamResourceId") - LOG.debug( - "Found S3 object resource with matching bucket and key for function %s." - " Setting function's Code property to the matching S3 object's source: %s", - tf_address, - source[0], - ) - resource["Properties"][code_property] = source[0] - - references = source[0] or source[1] - res_type = "zip" if resource_type == CFN_AWS_LAMBDA_FUNCTION else "layer" - if references: - hash_value = f"{res_type}_{_calculate_configuration_attribute_value_hash(references)}" - resources_list = lambda_resources_to_code_map.get(hash_value, []) - resources_list.append((resource, resource_logical_id)) - lambda_resources_to_code_map[hash_value] = resources_list - - -def _check_dummy_remote_values(cfn_resources: Dict[str, Any]) -> None: - """ - Check if there is any lambda function/layer that has a dummy remote value for its code.imageuri or - code.s3 attributes, and raise a validation error for it. - - Parameters - ---------- - cfn_resources: dict - CloudFormation resources - """ - for _, resource in cfn_resources.items(): - resource_type = resource.get("Type") - if resource_type in CFN_CODE_PROPERTIES: - code_property = CFN_CODE_PROPERTIES[resource_type] - - code = resource.get("Properties").get(code_property) - - # there is no code property, this is the expected behaviour in image package type functions - if code is None: - continue - - # its value is a path to a local source code - if isinstance(code, str): - continue - - bucket = code.get("S3Bucket") - key = code.get("S3Key") - image_uri = code.get("ImageUri") - - if (bucket and bucket == REMOTE_DUMMY_VALUE) or (key and key == REMOTE_DUMMY_VALUE): - raise PrepareHookException( - f"Lambda resource {resource.get('Metadata', {}).get('SamResourceId')} is referring to an S3 bucket " - f"that is not created yet, and there is no sam metadata resource set for it to build its code " - f"locally" - ) - - if image_uri and image_uri == REMOTE_DUMMY_VALUE: - raise PrepareHookException( - f"Lambda resource {resource.get('Metadata', {}).get('SamResourceId')} is referring to an image uri " - "that is not created yet, and there is no sam metadata resource set for it to build its image " - "locally." - ) diff --git a/samcli/hook_packages/terraform/hooks/prepare/makefile_generator.py b/samcli/hook_packages/terraform/hooks/prepare/makefile_generator.py new file mode 100644 index 0000000000..1df31c5cd5 --- /dev/null +++ b/samcli/hook_packages/terraform/hooks/prepare/makefile_generator.py @@ -0,0 +1,262 @@ +""" +Terraform Makefile and make rule generation + +This module generates the Makefile for the project and the rules for each of the Lambda functions found +""" +import os +from pathlib import Path +from typing import List, Optional +import logging +import shutil +import uuid + +from samcli.hook_packages.terraform.hooks.prepare.types import ( + SamMetadataResource, +) +from samcli.lib.utils.path_utils import convert_path_to_unix_path + +LOG = logging.getLogger(__name__) + +TERRAFORM_BUILD_SCRIPT = "copy_terraform_built_artifacts.py" +TF_BACKEND_OVERRIDE_FILENAME = "z_samcli_backend_override" + + +def generate_makefile_rule_for_lambda_resource( + sam_metadata_resource: SamMetadataResource, + logical_id: str, + terraform_application_dir: str, + python_command_name: str, + output_dir: str, +) -> str: + """ + Generates and returns a makefile rule for the lambda resource associated with the given sam metadata resource. + + Parameters + ---------- + sam_metadata_resource: SamMetadataResource + A sam metadata resource; the generated makefile rule will correspond to building the lambda resource + associated with this sam metadata resource + logical_id: str + Logical ID of the lambda resource + terraform_application_dir: str + the terraform project root directory + python_command_name: str + the python command name to use for running a script in the makefile rule + output_dir: str + the directory into which the Makefile is written + + Returns + ------- + str + The generated makefile rule + """ + target = _get_makefile_build_target(logical_id) + resource_address = sam_metadata_resource.resource.get("address", "") + python_command_recipe = _format_makefile_recipe( + _build_makerule_python_command( + python_command_name, output_dir, resource_address, sam_metadata_resource, terraform_application_dir + ) + ) + return f"{target}{python_command_recipe}" + + +def generate_makefile( + makefile_rules: List[str], + output_directory_path: str, +) -> None: + """ + Generates a makefile with the given rules in the given directory + + Parameters + ---------- + makefile_rules: List[str] + the list of rules to write in the Makefile + output_directory_path: str + the output directory path to write the generated makefile + """ + + # create output directory if it doesn't exist + if not os.path.exists(output_directory_path): + os.makedirs(output_directory_path, exist_ok=True) + + # create z_samcli_backend_override.tf in output directory + _generate_backend_override_file(output_directory_path) + + # copy copy_terraform_built_artifacts.py script into output directory + copy_terraform_built_artifacts_script_path = os.path.join( + Path(os.path.dirname(__file__)).parent.parent, TERRAFORM_BUILD_SCRIPT + ) + shutil.copy(copy_terraform_built_artifacts_script_path, output_directory_path) + + # create makefile + makefile_path = os.path.join(output_directory_path, "Makefile") + with open(makefile_path, "w+") as makefile: + makefile.writelines(makefile_rules) + + +def _generate_backend_override_file(output_directory_path: str): + """ + Generates an override tf file to use a temporary backend + + Parameters + ---------- + output_directory_path: str + the output directory path to write the generated makefile + """ + statefile_filename = f"{uuid.uuid4()}.tfstate" + override_content = "terraform {\n" ' backend "local" {\n' f' path = "./{statefile_filename}"\n' " }\n" "}\n" + override_file_path = os.path.join(output_directory_path, TF_BACKEND_OVERRIDE_FILENAME) + with open(override_file_path, "w+") as f: + f.write(override_content) + + +def _build_makerule_python_command( + python_command_name: str, + output_dir: str, + resource_address: str, + sam_metadata_resource: SamMetadataResource, + terraform_application_dir: str, +) -> str: + """ + Build the Python command recipe to be used inside of the Makefile rule + + Parameters + ---------- + python_command_name: str + the python command name to use for running a script in the makefile recipe + output_dir: str + the directory into which the Makefile is written + resource_address: str + Address of a given terraform resource + sam_metadata_resource: SamMetadataResource + A sam metadata resource; the generated show command recipe will correspond to building the lambda resource + associated with this sam metadata resource + terraform_application_dir: str + the terraform project root directory + + Returns + ------- + str + Fully resolved Terraform show command + """ + show_command_template = ( + '{python_command_name} "{terraform_built_artifacts_script_path}" ' + '--expression "{jpath_string}" --directory "$(ARTIFACTS_DIR)" --target "{resource_address}"' + ) + jpath_string = _build_jpath_string(sam_metadata_resource, resource_address) + terraform_built_artifacts_script_path = convert_path_to_unix_path( + str(Path(output_dir, TERRAFORM_BUILD_SCRIPT).relative_to(terraform_application_dir)) + ) + return show_command_template.format( + python_command_name=python_command_name, + terraform_built_artifacts_script_path=terraform_built_artifacts_script_path, + jpath_string=jpath_string.replace('"', '\\"'), + resource_address=resource_address.replace('"', '\\"'), + ) + + +def _get_makefile_build_target(logical_id: str) -> str: + """ + Formats the Makefile rule build target string as is needed by the Makefile + + Parameters + ---------- + logical_id: str + Logical ID of the resource to use for the Makefile rule target + + Returns + ------- + str + The formatted Makefile rule build target + """ + return f"build-{logical_id}:\n" + + +def _format_makefile_recipe(rule_string: str) -> str: + """ + Formats the Makefile rule string as is needed by the Makefile + + Parameters + ---------- + rule_string: str + Makefile rule string to be formatted + + Returns + ------- + str + The formatted target rule + """ + return f"\t{rule_string}\n" + + +def _build_jpath_string(sam_metadata_resource: SamMetadataResource, resource_address: str) -> str: + """ + Constructs the JPath string for a given sam metadata resource from the planned_values + to the build_output_path as is created by the Terraform plan output + + Parameters + ---------- + sam_metadata_resource: SamMetadataResource + A sam metadata resource; the generated recipe jpath will correspond to building the lambda resource + associated with this sam metadata resource + + resource_address: str + Full address of a Terraform resource + + Returns + ------- + str + Full JPath string for a resource from planned_values to build_output_path + """ + jpath_string_template = ( + "|values|root_module{child_modules}|resources|" + '[?address=="{resource_address}"]|values|triggers|built_output_path' + ) + child_modules_template = "|child_modules|[?address=={module_address}]" + module_address = sam_metadata_resource.current_module_address + full_module_path = "" + parent_modules = _get_parent_modules(module_address) + for module in parent_modules: + full_module_path += child_modules_template.format(module_address=module) + jpath_string = jpath_string_template.format(child_modules=full_module_path, resource_address=resource_address) + return jpath_string + + +def _get_parent_modules(module_address: Optional[str]) -> List[str]: + """ + Convert an a full Terraform resource address to a list of module + addresses from the root module to the current module + + e.g. "module.level1_lambda.module.level2_lambda" as input will return + ["module.level1_lambda", "module.level1_lambda.module.level2_lambda"] + + Parameters + ---------- + module_address: str + Full address of the Terraform module + + Returns + ------- + List[str] + List of module addresses starting from the root module to the current module + """ + if not module_address: + return [] + + # Split the address on "." then combine it back with the "module" prefix for each module name + modules = module_address.split(".") + modules = [".".join(modules[i : i + 2]) for i in range(0, len(modules), 2)] + + if not modules: + # The format of the address was somehow different than we expected from the + # module..module. + return [] + + # Prefix each nested module name with the previous + previous_module = modules[0] + full_path_modules = [previous_module] + for module in modules[1:]: + module = previous_module + "." + module + previous_module = module + full_path_modules.append(module) + return full_path_modules diff --git a/samcli/hook_packages/terraform/hooks/prepare/property_builder.py b/samcli/hook_packages/terraform/hooks/prepare/property_builder.py new file mode 100644 index 0000000000..a809930a5f --- /dev/null +++ b/samcli/hook_packages/terraform/hooks/prepare/property_builder.py @@ -0,0 +1,223 @@ +""" +Terraform prepare property builder +""" +from typing import Any, Optional, Dict +from samcli.hook_packages.terraform.hooks.prepare.types import ( + TFResource, + PropertyBuilderMapping, + PropertyBuilder, + ResourceTranslator, +) +from samcli.hook_packages.terraform.hooks.prepare.resource_linking import _resolve_resource_attribute +from samcli.lib.utils.packagetype import ZIP, IMAGE +from samcli.lib.hook.exceptions import PrepareHookException +from samcli.lib.utils.resources import ( + AWS_LAMBDA_FUNCTION as CFN_AWS_LAMBDA_FUNCTION, + AWS_LAMBDA_LAYERVERSION as CFN_AWS_LAMBDA_LAYER_VERSION, +) + +REMOTE_DUMMY_VALUE = "<>" +TF_AWS_LAMBDA_FUNCTION = "aws_lambda_function" +TF_AWS_LAMBDA_LAYER_VERSION = "aws_lambda_layer_version" + + +def _build_code_property(tf_properties: dict, resource: TFResource) -> Any: + """ + Builds the Code property of a CloudFormation AWS Lambda Function out of the + properties of the equivalent terraform resource + + Parameters + ---------- + tf_properties: dict + Properties of the terraform AWS Lambda function resource + resource: TFResource + Configuration terraform resource + + Returns + ------- + dict + The built Code property of a CloudFormation AWS Lambda Function resource + """ + filename = tf_properties.get("filename") + if filename: + return filename + + code = {} + tf_cfn_prop_names = [ + ("s3_bucket", "S3Bucket"), + ("s3_key", "S3Key"), + ("image_uri", "ImageUri"), + ("s3_object_version", "S3ObjectVersion"), + ] + for tf_prop_name, cfn_prop_name in tf_cfn_prop_names: + tf_prop_value = tf_properties.get(tf_prop_name) + if tf_prop_value is not None: + code[cfn_prop_name] = tf_prop_value + + package_type = tf_properties.get("package_type", ZIP) + + # Get the S3 Bucket details from configuration in case if the customer is creating the S3 bucket in the tf project + if package_type == ZIP and ("S3Bucket" not in code or "S3Key" not in code or "S3ObjectVersion" not in code): + s3_bucket_tf_config_value = _resolve_resource_attribute(resource, "s3_bucket") + s3_key_tf_config_value = _resolve_resource_attribute(resource, "s3_key") + s3_object_version_tf_config_value = _resolve_resource_attribute(resource, "s3_object_version") + if "S3Bucket" not in code and s3_bucket_tf_config_value: + code["S3Bucket"] = REMOTE_DUMMY_VALUE + code["S3Bucket_config_value"] = s3_bucket_tf_config_value + if "S3Key" not in code and s3_key_tf_config_value: + code["S3Key"] = REMOTE_DUMMY_VALUE + code["S3Key_config_value"] = s3_key_tf_config_value + if "S3ObjectVersion" not in code and s3_object_version_tf_config_value: + code["S3ObjectVersion"] = REMOTE_DUMMY_VALUE + code["S3ObjectVersion_config_value"] = s3_object_version_tf_config_value + + # Get the Image URI details from configuration in case if the customer is creating the ecr repo in the tf project + if package_type == IMAGE and "ImageUri" not in code: + image_uri_tf_config_value = _resolve_resource_attribute(resource, "image_uri") + if image_uri_tf_config_value: + code["ImageUri"] = REMOTE_DUMMY_VALUE + + return code + + +def _get_property_extractor(property_name: str) -> PropertyBuilder: + """ + Returns a PropertyBuilder function to extract the given property from a dict + + Parameters + ---------- + property_name: str + The name of the property to extract + + Returns + ------- + PropertyBuilder + function that takes in a dict and extracts the given property name from it + """ + return lambda properties, _: properties.get(property_name) + + +def _build_lambda_function_environment_property(tf_properties: dict, resource: TFResource) -> Optional[dict]: + """ + Builds the Environment property of a CloudFormation AWS Lambda Function out of the + properties of the equivalent terraform resource + + Parameters + ---------- + tf_properties: dict + Properties of the terraform AWS Lambda function resource + resource: TFResource + Configuration terraform resource + + Returns + ------- + dict + The built Environment property of a CloudFormation AWS Lambda Function resource + """ + environment = tf_properties.get("environment") + if not environment: + return None + + for env in environment: + variables = env.get("variables") + if variables: + return {"Variables": variables} + + # no variables + return None + + +def _build_lambda_function_image_config_property(tf_properties: dict, resource: TFResource) -> Optional[dict]: + """ + Builds the ImageConfig property of a CloudFormation AWS Lambda Function out of the + properties of the equivalent terraform resource + + Parameters + ---------- + tf_properties: dict + Properties of the terraform AWS Lambda function resource + resource: TFResource + Configuration terraform resource + + Returns + ------- + dict + The built ImageConfig property of a CloudFormation AWS Lambda Function resource + """ + image_config = tf_properties.get("image_config") + if not image_config: + return None + + _check_image_config_value(image_config) + image_config = image_config[0] + + cfn_image_config = {} + tf_cfn_prop_names = [ + ("command", "Command"), + ("entry_point", "EntryPoint"), + ("working_directory", "WorkingDirectory"), + ] + + for tf_prop_name, cfn_prop_name in tf_cfn_prop_names: + tf_prop_value = image_config.get(tf_prop_name) + if tf_prop_value is not None: + cfn_image_config[cfn_prop_name] = tf_prop_value + + return cfn_image_config + + +def _check_image_config_value(image_config: Any) -> bool: + """ + validate if the image_config property value is as SAM CLI expects. If it is not valid, it will raise a + PrepareHookException. + + Parameters + ---------- + image_config: Any + The aws_lambda resource's Image_config property value as read from the terraform plan output. + + Returns + ------- + bool + return True, if the image_config value as expects, and raise PrepareHookException if not as expected. + """ + if not isinstance(image_config, list): + raise PrepareHookException( + f"AWS SAM CLI expects that the value of image_config of aws_lambda_function resource in " + f"the terraform plan output to be of type list instead of {type(image_config)}" + ) + if len(image_config) > 1: + raise PrepareHookException( + f"AWS SAM CLI expects that there is only one item in the image_config property of " + f"aws_lambda_function resource in the terraform plan output, but there are " + f"{len(image_config)} items" + ) + return True + + +AWS_LAMBDA_FUNCTION_PROPERTY_BUILDER_MAPPING: PropertyBuilderMapping = { + "FunctionName": _get_property_extractor("function_name"), + "Architectures": _get_property_extractor("architectures"), + "Environment": _build_lambda_function_environment_property, + "Code": _build_code_property, + "Handler": _get_property_extractor("handler"), + "PackageType": _get_property_extractor("package_type"), + "Runtime": _get_property_extractor("runtime"), + "Layers": _get_property_extractor("layers"), + "Timeout": _get_property_extractor("timeout"), + "ImageConfig": _build_lambda_function_image_config_property, +} + +AWS_LAMBDA_LAYER_VERSION_PROPERTY_BUILDER_MAPPING: PropertyBuilderMapping = { + "LayerName": _get_property_extractor("layer_name"), + "CompatibleRuntimes": _get_property_extractor("compatible_runtimes"), + "CompatibleArchitectures": _get_property_extractor("compatible_architectures"), + "Content": _build_code_property, +} + +RESOURCE_TRANSLATOR_MAPPING: Dict[str, ResourceTranslator] = { + TF_AWS_LAMBDA_FUNCTION: ResourceTranslator(CFN_AWS_LAMBDA_FUNCTION, AWS_LAMBDA_FUNCTION_PROPERTY_BUILDER_MAPPING), + TF_AWS_LAMBDA_LAYER_VERSION: ResourceTranslator( + CFN_AWS_LAMBDA_LAYER_VERSION, AWS_LAMBDA_LAYER_VERSION_PROPERTY_BUILDER_MAPPING + ), +} diff --git a/samcli/hook_packages/terraform/hooks/prepare/translate.py b/samcli/hook_packages/terraform/hooks/prepare/translate.py new file mode 100644 index 0000000000..003fe62750 --- /dev/null +++ b/samcli/hook_packages/terraform/hooks/prepare/translate.py @@ -0,0 +1,542 @@ +""" +Terraform translate to CFN implementation + +This method contains the logic required to translate the `terraform show` JSON output into a Cloudformation template +""" +import logging +import hashlib +from typing import Dict, List, Tuple, Union, Any +from samcli.hook_packages.terraform.hooks.prepare.types import ( + ConstantValue, + References, + ResolvedReference, + TFResource, + TFModule, +) +from samcli.hook_packages.terraform.lib.utils import ( + build_cfn_logical_id, + get_sam_metadata_planned_resource_value_attribute, + _calculate_configuration_attribute_value_hash, +) +from samcli.hook_packages.terraform.hooks.prepare.property_builder import ( + TF_AWS_LAMBDA_LAYER_VERSION, + TF_AWS_LAMBDA_FUNCTION, + RESOURCE_TRANSLATOR_MAPPING, + REMOTE_DUMMY_VALUE, + PropertyBuilderMapping, +) +from samcli.lib.utils.resources import ( + AWS_LAMBDA_FUNCTION as CFN_AWS_LAMBDA_FUNCTION, +) +from samcli.lib.utils.packagetype import ZIP, IMAGE +from samcli.hook_packages.terraform.hooks.prepare.enrich import enrich_resources_and_generate_makefile +from samcli.hook_packages.terraform.hooks.prepare.types import SamMetadataResource +from samcli.hook_packages.terraform.hooks.prepare.constants import ( + SAM_METADATA_RESOURCE_NAME_ATTRIBUTE, + CFN_CODE_PROPERTIES, +) +from samcli.hook_packages.terraform.hooks.prepare.resource_linking import ( + _resolve_resource_attribute, + _build_module, + _get_configuration_address, + _link_lambda_function_to_layer, +) +from samcli.lib.hook.exceptions import PrepareHookException + +SAM_METADATA_RESOURCE_TYPE = "null_resource" +SAM_METADATA_NAME_PREFIX = "sam_metadata_" + +AWS_PROVIDER_NAME = "registry.terraform.io/hashicorp/aws" +NULL_RESOURCE_PROVIDER_NAME = "registry.terraform.io/hashicorp/null" + +LOG = logging.getLogger(__name__) + + +def translate_to_cfn(tf_json: dict, output_directory_path: str, terraform_application_dir: str) -> dict: + """ + Translates the json output of a terraform show into CloudFormation + + Parameters + ---------- + tf_json: dict + A terraform show json output + output_directory_path: str + the string path to write the metadata file and makefile + terraform_application_dir: str + the terraform project root directory + + Returns + ------- + dict + The CloudFormation resulting from translating tf_json + """ + # setup root_module and cfn dict + root_module = tf_json.get("planned_values", {}).get("root_module") + cfn_dict: dict = {"AWSTemplateFormatVersion": "2010-09-09", "Resources": {}} + if not root_module: + return cfn_dict + + LOG.debug("Mapping Lambda functions to their corresponding layers.") + input_vars: Dict[str, Union[ConstantValue, References]] = { + var_name: ConstantValue(value=var_value.get("value")) + for var_name, var_value in tf_json.get("variables", {}).items() + } + root_tf_module = _build_module("", tf_json.get("configuration", {}).get("root_module"), input_vars, None) + + # to map s3 object sources to respective functions later + # this dictionary will map between the hash value of the S3 Bucket attributes, and a tuple of the planned value + # source code path, and the configuration value of the source code path. + s3_hash_to_source: Dict[str, Tuple[str, List[Union[ConstantValue, ResolvedReference]]]] = {} + + # map code/imageuri to Lambda resources + # the key is the hash value of lambda code/imageuri + # the value is the list of pair of the resource logical id, and the lambda cfn resource dict + lambda_resources_to_code_map: Dict[str, List[Tuple[Dict, str]]] = {} + + sam_metadata_resources: List[SamMetadataResource] = [] + + lambda_layers_terraform_resources: Dict[str, Dict] = {} + lambda_funcs_conf_cfn_resources: Dict[str, List] = {} + lambda_config_funcs_conf_cfn_resources: Dict[str, TFResource] = {} + + # create and iterate over queue of modules to handle child modules + module_queue = [(root_module, root_tf_module)] + while module_queue: + modules_pair = module_queue.pop(0) + curr_module, curr_tf_module = modules_pair + curr_module_address = curr_module.get("address") + + _add_child_modules_to_queue(curr_module, curr_tf_module, module_queue) + + # iterate over resources for current module + resources = curr_module.get("resources", {}) + for resource in resources: + resource_provider = resource.get("provider_name") + resource_type = resource.get("type") + resource_values = resource.get("values") + resource_full_address = resource.get("address") + resource_name = resource.get("name") + resource_mode = resource.get("mode") + + resource_address = ( + f"data.{resource_type}.{resource_name}" + if resource_mode == "data" + else f"{resource_type}.{resource_name}" + ) + config_resource_address = _get_configuration_address(resource_address) + if config_resource_address not in curr_tf_module.resources: + raise PrepareHookException( + f"There is no configuration resource for resource address {resource_full_address} and " + f"configuration address {config_resource_address}" + ) + + config_resource = curr_tf_module.resources[config_resource_address] + + if ( + resource_provider == NULL_RESOURCE_PROVIDER_NAME + and resource_type == SAM_METADATA_RESOURCE_TYPE + and resource_name.startswith(SAM_METADATA_NAME_PREFIX) + ): + _add_metadata_resource_to_metadata_list( + SamMetadataResource(curr_module_address, resource, config_resource), + resource, + sam_metadata_resources, + ) + continue + + # only process supported provider + if resource_provider != AWS_PROVIDER_NAME: + continue + + # store S3 sources + if resource_type == "aws_s3_object": + s3_bucket = ( + resource_values.get("bucket") + if "bucket" in resource_values + else _resolve_resource_attribute(config_resource, "bucket") + ) + s3_key = ( + resource_values.get("key") + if "key" in resource_values + else _resolve_resource_attribute(config_resource, "key") + ) + obj_hash = _get_s3_object_hash(s3_bucket, s3_key) + code_artifact = resource_values.get("source") + config_code_artifact = ( + code_artifact if code_artifact else _resolve_resource_attribute(config_resource, "source") + ) + s3_hash_to_source[obj_hash] = (code_artifact, config_code_artifact) + + resource_translator = RESOURCE_TRANSLATOR_MAPPING.get(resource_type) + # resource type not supported + if not resource_translator: + continue + + # translate TF resource "values" to CFN properties + LOG.debug("Processing resource %s", resource_full_address) + translated_properties = _translate_properties( + resource_values, resource_translator.property_builder_mapping, config_resource + ) + translated_resource = { + "Type": resource_translator.cfn_name, + "Properties": translated_properties, + "Metadata": {"SamResourceId": resource_full_address, "SkipBuild": True}, + } + + # build CFN logical ID from resource address + logical_id = build_cfn_logical_id(resource_full_address) + + # Add resource to cfn dict + cfn_dict["Resources"][logical_id] = translated_resource + + if resource_type == TF_AWS_LAMBDA_LAYER_VERSION: + lambda_layers_terraform_resources[logical_id] = resource + planned_value_layer_code_path = translated_properties.get("Content") + _add_lambda_resource_code_path_to_code_map( + config_resource, + "layer", + lambda_resources_to_code_map, + logical_id, + planned_value_layer_code_path, + "filename", + translated_resource, + ) + + if resource_type == TF_AWS_LAMBDA_FUNCTION: + resolved_config_address = _get_configuration_address(resource_full_address) + matched_lambdas = lambda_funcs_conf_cfn_resources.get(resolved_config_address, []) + matched_lambdas.append(translated_resource) + lambda_funcs_conf_cfn_resources[resolved_config_address] = matched_lambdas + lambda_config_funcs_conf_cfn_resources[resolved_config_address] = config_resource + + resource_type = translated_properties.get("PackageType", ZIP) + resource_type_constants = {ZIP: ("zip", "filename"), IMAGE: ("image", "image_uri")} + planned_value_function_code_path = ( + translated_properties.get("Code") + if resource_type == ZIP + else translated_properties.get("Code", {}).get("ImageUri") + ) + func_type, tf_code_property = resource_type_constants[resource_type] + + _add_lambda_resource_code_path_to_code_map( + config_resource, + func_type, + lambda_resources_to_code_map, + logical_id, + planned_value_function_code_path, + tf_code_property, + translated_resource, + ) + + # map s3 object sources to corresponding functions + LOG.debug("Mapping S3 object sources to corresponding functions") + _map_s3_sources_to_functions(s3_hash_to_source, cfn_dict.get("Resources", {}), lambda_resources_to_code_map) + + _link_lambda_functions_to_layers( + lambda_config_funcs_conf_cfn_resources, lambda_funcs_conf_cfn_resources, lambda_layers_terraform_resources + ) + + if sam_metadata_resources: + LOG.debug("Enrich the mapped resources with the sam metadata information and generate Makefile") + enrich_resources_and_generate_makefile( + sam_metadata_resources, + cfn_dict.get("Resources", {}), + output_directory_path, + terraform_application_dir, + lambda_resources_to_code_map, + ) + else: + LOG.debug("There is no sam metadata resources, no enrichment or Makefile is required") + + # check if there is still any dummy remote values for lambda resource imagesUri or S3 attributes + _check_dummy_remote_values(cfn_dict.get("Resources", {})) + + return cfn_dict + + +def _add_child_modules_to_queue(curr_module: Dict, curr_module_configuration: TFModule, modules_queue: List) -> None: + """ + Iterate over the children modules of current module and add each module with its related child module configuration + to the modules_queue. + + Parameters + ---------- + curr_module: Dict + The current module in the planned values + curr_module_configuration: TFModule + The current module configuration + modules_queue: List + The list of modules + """ + child_modules = curr_module.get("child_modules") + if child_modules: + for child_module in child_modules: + config_child_module_address = ( + _get_configuration_address(child_module["address"]) if "address" in child_module else None + ) + module_name = ( + config_child_module_address[config_child_module_address.rfind(".") + 1 :] + if config_child_module_address + else None + ) + child_tf_module = curr_module_configuration.child_modules.get(module_name) if module_name else None + if child_tf_module is None: + raise PrepareHookException( + f"Module {config_child_module_address} exists in terraform planned_value, but does not exist " + "in terraform configuration" + ) + modules_queue.append((child_module, child_tf_module)) + + +def _add_metadata_resource_to_metadata_list( + sam_metadata_resource: SamMetadataResource, + sam_metadata_resource_planned_values: Dict, + sam_metadata_resources: List[SamMetadataResource], +) -> None: + """ + Prioritize the metadata resources that has resource name value to overwrite the metadata resources that does not + have resource name value. + + Parameters + ---------- + sam_metadata_resource: SamMetadataResource + The mapped metadata resource + sam_metadata_resource_planned_values: Dict + The metadata resource in planned values section + sam_metadata_resources: List[SamMetadataResource] + The list of metadata resources + """ + if get_sam_metadata_planned_resource_value_attribute( + sam_metadata_resource_planned_values, SAM_METADATA_RESOURCE_NAME_ATTRIBUTE + ): + sam_metadata_resources.append(sam_metadata_resource) + else: + sam_metadata_resources.insert(0, sam_metadata_resource) + + +def _translate_properties( + tf_properties: dict, property_builder_mapping: PropertyBuilderMapping, resource: TFResource +) -> dict: + """ + Translates the properties of a terraform resource into the equivalent properties of a CloudFormation resource + + Parameters + ---------- + tf_properties: dict + The terraform properties to translate + property_builder_mappping: PropertyBuilderMapping + A mapping of the CloudFormation property name to a function for building that property + resource: TFResource + The terraform configuration resource that can be used to retrieve some attributes values if needed + + Returns + ------- + dict + The CloudFormation properties resulting from translating tf_properties + """ + cfn_properties = {} + for cfn_property_name, cfn_property_builder in property_builder_mapping.items(): + cfn_property_value = cfn_property_builder(tf_properties, resource) + if cfn_property_value is not None: + cfn_properties[cfn_property_name] = cfn_property_value + return cfn_properties + + +def _add_lambda_resource_code_path_to_code_map( + terraform_resource: TFResource, + lambda_resource_prefix: str, + lambda_resources_to_code_map: Dict, + logical_id: str, + lambda_resource_code_value: Any, + terraform_code_property_name: str, + translated_resource: Dict, +) -> None: + """ + Calculate the hash value of the lambda resource code path planned value or the configuration value and use it to + map the lambda resource logical id to the source code path. This will be used later to map the metadata resource to + the correct lambda resource. + + Parameters + ---------- + terraform_resource: TFResource + The mapped TF resource. This will be used to resolve the configuration value of the code attribute in the lambda + resource + lambda_resource_prefix: str + a string prefix to be added to the hash value to differentiate between the different lambda resources types + lambda_resources_to_code_map: dict + the map between lambda resources code path values, and the lambda resources logical ids + logical_id: str + lambda resource logical id + lambda_resource_code_value: Any + The planned value of the lambda resource code path + terraform_code_property_name: str + The lambda resource code property name + translated_resource: Dict + The CFN translated lambda resource + """ + if not lambda_resource_code_value or not isinstance(lambda_resource_code_value, str): + lambda_resource_code_value = _resolve_resource_attribute(terraform_resource, terraform_code_property_name) + if lambda_resource_code_value: + hash_value = ( + f"{lambda_resource_prefix}_{_calculate_configuration_attribute_value_hash(lambda_resource_code_value)}" + ) + functions_list = lambda_resources_to_code_map.get(hash_value, []) + functions_list.append((translated_resource, logical_id)) + lambda_resources_to_code_map[hash_value] = functions_list + + +def _link_lambda_functions_to_layers( + lambda_config_funcs_conf_cfn_resources: Dict[str, TFResource], + lambda_funcs_conf_cfn_resources: Dict[str, List], + lambda_layers_terraform_resources: Dict[str, Dict], +): + """ + Iterate through all of the resources and link the corresponding Lambda Layers to each Lambda Function + + Parameters + ---------- + lambda_config_funcs_conf_cfn_resources: Dict[str, TFResource] + Dictionary of configuration lambda resources + lambda_funcs_conf_cfn_resources: Dict[str, List] + Dictionary containing resolved configuration addresses matched up to the cfn Lambda functions + lambda_layers_terraform_resources: Dict[str, Dict] + Dictionary of all actual terraform layers resources (not configuration resources). The dictionary's key is the + calculated logical id for each resource + + Returns + ------- + dict + The CloudFormation resulting from translating tf_json + """ + for config_address, resource in lambda_config_funcs_conf_cfn_resources.items(): + if config_address in lambda_funcs_conf_cfn_resources: + LOG.debug("Linking layers for Lambda function %s", resource.full_address) + _link_lambda_function_to_layer( + resource, lambda_funcs_conf_cfn_resources[config_address], lambda_layers_terraform_resources + ) + + +def _map_s3_sources_to_functions( + s3_hash_to_source: Dict[str, Tuple[str, List[Union[ConstantValue, ResolvedReference]]]], + cfn_resources: Dict[str, Any], + lambda_resources_to_code_map: Dict[str, List[Tuple[Dict, str]]], +) -> None: + """ + Maps the source property of terraform AWS S3 object resources into the the Code property of + CloudFormation AWS Lambda Function resources, and append the hash value of the artifacts path to the lambda + resources code map. + + Parameters + ---------- + s3_hash_to_source: Dict[str, Tuple[str, List[Union[ConstantValue, ResolvedReference]]]] + Mapping of S3 object hash to S3 object source and the S3 Object configuration source value + cfn_resources: dict + CloudFormation resources + lambda_resources_to_code_map: Dict + the map between lambda resources code path values, and the lambda resources logical ids + """ + for resource_logical_id, resource in cfn_resources.items(): + resource_type = resource.get("Type") + if resource_type in CFN_CODE_PROPERTIES: + code_property = CFN_CODE_PROPERTIES[resource_type] + + code = resource.get("Properties").get(code_property) + + # mapping not possible if function doesn't have bucket and key + if isinstance(code, str): + continue + + bucket = code.get("S3Bucket_config_value") if "S3Bucket_config_value" in code else code.get("S3Bucket") + key = code.get("S3Key_config_value") if "S3Key_config_value" in code else code.get("S3Key") + + if bucket and key: + obj_hash = _get_s3_object_hash(bucket, key) + source = s3_hash_to_source.get(obj_hash) + if source: + if source[0]: + tf_address = resource.get("Metadata", {}).get("SamResourceId") + LOG.debug( + "Found S3 object resource with matching bucket and key for function %s." + " Setting function's Code property to the matching S3 object's source: %s", + tf_address, + source[0], + ) + resource["Properties"][code_property] = source[0] + + references = source[0] or source[1] + res_type = "zip" if resource_type == CFN_AWS_LAMBDA_FUNCTION else "layer" + if references: + hash_value = f"{res_type}_{_calculate_configuration_attribute_value_hash(references)}" + resources_list = lambda_resources_to_code_map.get(hash_value, []) + resources_list.append((resource, resource_logical_id)) + lambda_resources_to_code_map[hash_value] = resources_list + + +def _check_dummy_remote_values(cfn_resources: Dict[str, Any]) -> None: + """ + Check if there is any lambda function/layer that has a dummy remote value for its code.imageuri or + code.s3 attributes, and raise a validation error for it. + + Parameters + ---------- + cfn_resources: dict + CloudFormation resources + """ + for _, resource in cfn_resources.items(): + resource_type = resource.get("Type") + if resource_type in CFN_CODE_PROPERTIES: + code_property = CFN_CODE_PROPERTIES[resource_type] + + code = resource.get("Properties").get(code_property) + + # there is no code property, this is the expected behaviour in image package type functions + if code is None: + continue + + # its value is a path to a local source code + if isinstance(code, str): + continue + + bucket = code.get("S3Bucket") + key = code.get("S3Key") + image_uri = code.get("ImageUri") + + if (bucket and bucket == REMOTE_DUMMY_VALUE) or (key and key == REMOTE_DUMMY_VALUE): + raise PrepareHookException( + f"Lambda resource {resource.get('Metadata', {}).get('SamResourceId')} is referring to an S3 bucket " + f"that is not created yet, and there is no sam metadata resource set for it to build its code " + f"locally" + ) + + if image_uri and image_uri == REMOTE_DUMMY_VALUE: + raise PrepareHookException( + f"Lambda resource {resource.get('Metadata', {}).get('SamResourceId')} is referring to an image uri " + "that is not created yet, and there is no sam metadata resource set for it to build its image " + "locally." + ) + + +def _get_s3_object_hash( + bucket: Union[str, List[Union[ConstantValue, ResolvedReference]]], + key: Union[str, List[Union[ConstantValue, ResolvedReference]]], +) -> str: + """ + Creates a hash for an AWS S3 object out of the bucket and key + + Parameters + ---------- + bucket: Union[str, List[Union[ConstantValue, ResolvedReference]]] + bucket for the S3 object + key: Union[str, List[Union[ConstantValue, ResolvedReference]]] + key for the S3 object + + Returns + ------- + str + hash for the given bucket and key + """ + md5 = hashlib.md5() + md5.update(_calculate_configuration_attribute_value_hash(bucket).encode()) + md5.update(_calculate_configuration_attribute_value_hash(key).encode()) + # TODO: Hash version if it exists in addition to key and bucket + return md5.hexdigest() diff --git a/samcli/hook_packages/terraform/hooks/prepare/types.py b/samcli/hook_packages/terraform/hooks/prepare/types.py index 5a0583fdf4..608627e229 100644 --- a/samcli/hook_packages/terraform/hooks/prepare/types.py +++ b/samcli/hook_packages/terraform/hooks/prepare/types.py @@ -1,7 +1,7 @@ """ Contains the data types used in the TF prepare hook""" from copy import deepcopy from dataclasses import dataclass -from typing import Any, List, Union, Optional, Dict +from typing import Any, List, Union, Optional, Dict, Callable @dataclass @@ -60,3 +60,20 @@ def full_address(self) -> str: if self.module and self.module.full_address: return f"{self.module.full_address}.{self.address}" return self.address + + +PropertyBuilder = Callable[[dict, TFResource], Any] +PropertyBuilderMapping = Dict[str, PropertyBuilder] + + +@dataclass +class ResourceTranslator: + cfn_name: str + property_builder_mapping: PropertyBuilderMapping + + +@dataclass +class SamMetadataResource: + current_module_address: Optional[str] + resource: Dict + config_resource: TFResource diff --git a/samcli/hook_packages/terraform/lib/utils.py b/samcli/hook_packages/terraform/lib/utils.py index 4df060f8f8..261df9169f 100644 --- a/samcli/hook_packages/terraform/lib/utils.py +++ b/samcli/hook_packages/terraform/lib/utils.py @@ -2,8 +2,11 @@ import hashlib from typing import List, Union, Dict, Any -from samcli.hook_packages.terraform.hooks.prepare.types import ConstantValue, ResolvedReference from samcli.lib.utils.hash import str_checksum +from samcli.hook_packages.terraform.hooks.prepare.types import ( + ConstantValue, + ResolvedReference, +) # max logical id len is 255 LOGICAL_ID_HASH_LEN = 8 diff --git a/tests/unit/hook_packages/terraform/hooks/prepare/prepare_base.py b/tests/unit/hook_packages/terraform/hooks/prepare/prepare_base.py new file mode 100644 index 0000000000..20085c8f49 --- /dev/null +++ b/tests/unit/hook_packages/terraform/hooks/prepare/prepare_base.py @@ -0,0 +1,741 @@ +""" +Unit test base class for Terraform prepare hook +""" +from unittest import TestCase + +from samcli.hook_packages.terraform.hooks.prepare.translate import AWS_PROVIDER_NAME, NULL_RESOURCE_PROVIDER_NAME +from samcli.lib.utils.resources import ( + AWS_LAMBDA_FUNCTION as CFN_AWS_LAMBDA_FUNCTION, + AWS_LAMBDA_LAYERVERSION, +) + + +class PrepareHookUnitBase(TestCase): + def setUp(self) -> None: + self.output_dir = "/output/dir" + self.project_root = "/project/root" + + self.mock_logical_id_hash = "12AB34CD" + + self.s3_bucket = "mybucket" + self.s3_key = "mykey" + self.s3_object_version = "myversion" + self.s3_source = "mysource1.zip" + self.s3_bucket_2 = "mybucket2" + self.s3_key_2 = "mykey2" + self.s3_source_2 = "mysource2.zip" + + self.zip_function_name = "myfunc" + self.zip_function_name_2 = "myfunc2" + self.zip_function_name_3 = "myfunc3" + self.zip_function_name_4 = "myfunc4" + self.s3_function_name = "myfuncS3" + self.s3_function_name_2 = "myfuncS3_2" + self.image_function_name = "image_func" + self.lambda_layer_name = "lambda_layer" + + self.tf_function_common_properties: dict = { + "function_name": self.zip_function_name, + "architectures": ["x86_64"], + "environment": [{"variables": {"foo": "bar", "hello": "world"}}], + "handler": "index.handler", + "package_type": "Zip", + "runtime": "python3.7", + "layers": ["layer_arn1", "layer_arn2"], + "timeout": 3, + } + self.expected_cfn_function_common_properties: dict = { + "FunctionName": self.zip_function_name, + "Architectures": ["x86_64"], + "Environment": {"Variables": {"foo": "bar", "hello": "world"}}, + "Handler": "index.handler", + "PackageType": "Zip", + "Runtime": "python3.7", + "Layers": ["layer_arn1", "layer_arn2"], + "Timeout": 3, + } + + self.tf_image_package_type_function_common_properties: dict = { + "function_name": self.image_function_name, + "architectures": ["x86_64"], + "environment": [{"variables": {"foo": "bar", "hello": "world"}}], + "package_type": "Image", + "timeout": 3, + } + self.expected_cfn_image_package_type_function_common_properties: dict = { + "FunctionName": self.image_function_name, + "Architectures": ["x86_64"], + "Environment": {"Variables": {"foo": "bar", "hello": "world"}}, + "PackageType": "Image", + "Timeout": 3, + } + + self.tf_layer_common_properties: dict = { + "layer_name": self.lambda_layer_name, + "compatible_runtimes": ["nodejs14.x", "nodejs16.x"], + "compatible_architectures": ["arm64"], + } + self.expected_cfn_layer_common_properties: dict = { + "LayerName": self.lambda_layer_name, + "CompatibleRuntimes": ["nodejs14.x", "nodejs16.x"], + "CompatibleArchitectures": ["arm64"], + } + + self.tf_lambda_layer_properties_zip: dict = { + **self.tf_layer_common_properties, + "filename": "file.zip", + } + self.tf_lambda_layer_properties_s3: dict = { + **self.tf_layer_common_properties, + "s3_bucket": "bucket_name", + "s3_key": "bucket_key", + "s3_object_version": "1", + } + self.tf_lambda_layer_sam_metadata_properties: dict = { + "triggers": { + "built_output_path": "builds/func.zip", + "original_source_code": "./src/lambda_layer", + "resource_name": f"aws_lambda_layer_version.{self.lambda_layer_name}", + "resource_type": "LAMBDA_LAYER", + }, + } + self.expected_cfn_lambda_layer_properties_zip: dict = { + **self.expected_cfn_layer_common_properties, + "Content": "file.zip", + } + self.expected_cfn_lambda_layer_properties_s3: dict = { + **self.expected_cfn_layer_common_properties, + "Content": { + "S3Bucket": "bucket_name", + "S3Key": "bucket_key", + "S3ObjectVersion": "1", + }, + } + + self.expected_cfn_layer_resource_s3: dict = { + "Type": AWS_LAMBDA_LAYERVERSION, + "Properties": self.expected_cfn_lambda_layer_properties_s3, + "Metadata": {"SamResourceId": f"aws_lambda_layer_version.{self.lambda_layer_name}", "SkipBuild": True}, + } + + self.expected_cfn_layer_resource_zip: dict = { + "Type": AWS_LAMBDA_LAYERVERSION, + "Properties": self.expected_cfn_lambda_layer_properties_zip, + "Metadata": {"SamResourceId": f"aws_lambda_layer_version.{self.lambda_layer_name}", "SkipBuild": True}, + } + + self.tf_zip_function_properties: dict = { + **self.tf_function_common_properties, + "filename": "file.zip", + } + self.tf_zip_function_sam_metadata_properties: dict = { + "triggers": { + "built_output_path": "builds/func.zip", + "original_source_code": "./src/lambda_func", + "resource_name": f"aws_lambda_function.{self.zip_function_name}", + "resource_type": "ZIP_LAMBDA_FUNCTION", + }, + } + self.expected_cfn_zip_function_properties: dict = { + **self.expected_cfn_function_common_properties, + "Code": "file.zip", + } + + self.tf_image_package_type_function_properties: dict = { + **self.tf_image_package_type_function_common_properties, + "image_config": [ + { + "command": ["cmd1", "cmd2"], + "entry_point": ["entry1", "entry2"], + "working_directory": "/working/dir/path", + } + ], + "image_uri": "image/uri:tag", + } + self.tf_image_package_type_function_sam_metadata_properties: dict = { + "triggers": { + "resource_name": f"aws_lambda_function.{self.image_function_name}", + "docker_build_args": '{"FOO":"bar"}', + "docker_context": "context", + "docker_file": "Dockerfile", + "docker_tag": "2.0", + "resource_type": "IMAGE_LAMBDA_FUNCTION", + }, + } + self.expected_cfn_image_package_function_properties: dict = { + **self.expected_cfn_image_package_type_function_common_properties, + "ImageConfig": { + "Command": ["cmd1", "cmd2"], + "EntryPoint": ["entry1", "entry2"], + "WorkingDirectory": "/working/dir/path", + }, + "Code": { + "ImageUri": "image/uri:tag", + }, + } + + self.tf_s3_function_properties: dict = { + **self.tf_function_common_properties, + "function_name": self.s3_function_name, + "s3_bucket": self.s3_bucket, + "s3_key": self.s3_key, + "s3_object_version": self.s3_object_version, + } + self.expected_cfn_s3_function_properties: dict = { + **self.expected_cfn_function_common_properties, + "FunctionName": self.s3_function_name, + "Code": {"S3Bucket": self.s3_bucket, "S3Key": self.s3_key, "S3ObjectVersion": self.s3_object_version}, + } + self.expected_cfn_s3_function_properties_after_source_mapping: dict = { + **self.expected_cfn_function_common_properties, + "FunctionName": self.s3_function_name, + "Code": self.s3_source, + } + + self.expected_cfn_s3_layer_properties_after_source_mapping: dict = { + **self.expected_cfn_layer_common_properties, + "LayerName": self.lambda_layer_name, + "Content": self.s3_source, + } + + self.expected_cfn_s3_layer_resource_after_source_mapping: dict = { + **self.expected_cfn_layer_resource_s3, + "Properties": self.expected_cfn_s3_layer_properties_after_source_mapping, + } + + self.tf_s3_function_properties_2: dict = { + **self.tf_function_common_properties, + "function_name": self.s3_function_name_2, + "s3_bucket": self.s3_bucket_2, + "s3_key": self.s3_key_2, + "s3_object_version": self.s3_object_version, + } + self.expected_cfn_s3_function_properties_2: dict = { + **self.expected_cfn_function_common_properties, + "FunctionName": self.s3_function_name_2, + "Code": {"S3Bucket": self.s3_bucket_2, "S3Key": self.s3_key_2}, + } + self.expected_cfn_s3_function_properties_after_source_mapping_2: dict = { + **self.expected_cfn_function_common_properties, + "FunctionName": self.s3_function_name_2, + "Code": self.s3_source_2, + } + + self.tf_function_properties_with_missing_or_none: dict = { + "function_name": self.zip_function_name, + "filename": "file.zip", + "environment": None, + "layers": None, + } + self.expected_cfn_function_properties_with_missing_or_none: dict = { + "FunctionName": self.zip_function_name, + "Code": "file.zip", + } + + self.tf_zip_function_properties_2: dict = { + "function_name": self.zip_function_name_2, + "architectures": ["x86_64"], + "environment": [{"variables": {"hi": "there"}}], + "handler": "index.handler2", + "package_type": "Zip", + "runtime": "python3.8", + "layers": ["layer_arn"], + "filename": "file2.zip", + } + self.tf_zip_function_sam_metadata_properties_2: dict = { + "triggers": { + "built_output_path": "builds/func2.zip", + "original_source_code": "./src/lambda_func2", + "resource_name": f"aws_lambda_function.{self.zip_function_name_2}", + "resource_type": "ZIP_LAMBDA_FUNCTION", + }, + } + self.expected_cfn_zip_function_properties_2: dict = { + "FunctionName": self.zip_function_name_2, + "Architectures": ["x86_64"], + "Environment": {"Variables": {"hi": "there"}}, + "Handler": "index.handler2", + "PackageType": "Zip", + "Runtime": "python3.8", + "Layers": ["layer_arn"], + "Code": "file2.zip", + } + + self.tf_zip_function_properties_3: dict = { + **self.tf_zip_function_properties_2, + "function_name": self.zip_function_name_3, + } + self.tf_zip_function_sam_metadata_properties_3: dict = { + "triggers": { + "built_output_path": "builds/func3.zip", + "original_source_code": "./src/lambda_func3", + "resource_name": f"aws_lambda_function.{self.zip_function_name_3}", + "resource_type": "ZIP_LAMBDA_FUNCTION", + }, + } + self.expected_cfn_zip_function_properties_3: dict = { + **self.expected_cfn_zip_function_properties_2, + "FunctionName": self.zip_function_name_3, + } + self.tf_zip_function_properties_4: dict = { + **self.tf_zip_function_properties_2, + "function_name": self.zip_function_name_4, + } + self.tf_zip_function_sam_metadata_properties_4: dict = { + "triggers": { + "built_output_path": "builds/func4.zip", + "original_source_code": "./src/lambda_func4", + "resource_name": f"aws_lambda_function.{self.zip_function_name_4}", + "resource_type": "ZIP_LAMBDA_FUNCTION", + }, + } + self.expected_cfn_zip_function_properties_4: dict = { + **self.expected_cfn_zip_function_properties_2, + "FunctionName": self.zip_function_name_4, + } + + self.tf_lambda_function_resource_common_attributes: dict = { + "type": "aws_lambda_function", + "provider_name": AWS_PROVIDER_NAME, + } + + self.tf_lambda_layer_resource_common_attributes: dict = { + "type": "aws_lambda_layer_version", + "provider_name": AWS_PROVIDER_NAME, + } + + self.tf_sam_metadata_resource_common_attributes: dict = { + "type": "null_resource", + "provider_name": NULL_RESOURCE_PROVIDER_NAME, + } + + self.tf_lambda_function_resource_zip: dict = { + **self.tf_lambda_function_resource_common_attributes, + "values": self.tf_zip_function_properties, + "address": f"aws_lambda_function.{self.zip_function_name}", + "name": self.zip_function_name, + } + self.tf_lambda_function_resource_zip_sam_metadata: dict = { + **self.tf_sam_metadata_resource_common_attributes, + "values": self.tf_zip_function_sam_metadata_properties, + "address": f"null_resource.sam_metadata_{self.zip_function_name}", + "name": f"sam_metadata_{self.zip_function_name}", + } + self.expected_cfn_lambda_function_resource_zip: dict = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": self.expected_cfn_zip_function_properties, + "Metadata": {"SamResourceId": f"aws_lambda_function.{self.zip_function_name}", "SkipBuild": True}, + } + + self.tf_lambda_layer_resource_zip: dict = { + **self.tf_lambda_layer_resource_common_attributes, + "values": self.tf_lambda_layer_properties_zip, + "address": f"aws_lambda_function.{self.lambda_layer_name}", + "name": self.lambda_layer_name, + } + self.tf_lambda_layer_resource_zip_sam_metadata: dict = { + **self.tf_sam_metadata_resource_common_attributes, + "values": self.tf_lambda_layer_sam_metadata_properties, + "address": f"null_resource.sam_metadata_{self.lambda_layer_name}", + "name": f"sam_metadata_{self.lambda_layer_name}", + } + + self.tf_lambda_function_resource_zip_2: dict = { + **self.tf_lambda_function_resource_common_attributes, + "values": self.tf_zip_function_properties_2, + "address": f"aws_lambda_function.{self.zip_function_name_2}", + "name": self.zip_function_name_2, + } + self.tf_lambda_function_resource_zip_2_sam_metadata: dict = { + **self.tf_sam_metadata_resource_common_attributes, + "values": self.tf_zip_function_sam_metadata_properties_2, + "address": f"null_resource.sam_metadata_{self.zip_function_name_2}", + "name": f"sam_metadata_{self.zip_function_name_2}", + } + self.expected_cfn_lambda_function_resource_zip_2: dict = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": self.expected_cfn_zip_function_properties_2, + "Metadata": {"SamResourceId": f"aws_lambda_function.{self.zip_function_name_2}", "SkipBuild": True}, + } + + self.tf_lambda_function_resource_zip_3: dict = { + **self.tf_lambda_function_resource_common_attributes, + "values": self.tf_zip_function_properties_3, + "address": f"aws_lambda_function.{self.zip_function_name_3}", + "name": self.zip_function_name_3, + } + self.tf_lambda_function_resource_zip_3_sam_metadata: dict = { + **self.tf_sam_metadata_resource_common_attributes, + "values": self.tf_zip_function_sam_metadata_properties_3, + "address": f"null_resource.sam_metadata_{self.zip_function_name_3}", + "name": f"sam_metadata_{self.zip_function_name_3}", + } + self.expected_cfn_lambda_function_resource_zip_3: dict = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": self.expected_cfn_zip_function_properties_3, + "Metadata": {"SamResourceId": f"aws_lambda_function.{self.zip_function_name_3}", "SkipBuild": True}, + } + + self.tf_lambda_function_resource_zip_4: dict = { + **self.tf_lambda_function_resource_common_attributes, + "values": self.tf_zip_function_properties_4, + "address": f"aws_lambda_function.{self.zip_function_name_4}", + "name": self.zip_function_name_4, + } + self.tf_lambda_function_resource_zip_4_sam_metadata: dict = { + **self.tf_sam_metadata_resource_common_attributes, + "values": self.tf_zip_function_sam_metadata_properties_4, + "address": f"null_resource.sam_metadata_{self.zip_function_name_4}", + "name": f"sam_metadata_{self.zip_function_name_4}", + } + self.expected_cfn_lambda_function_resource_zip_4: dict = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": self.expected_cfn_zip_function_properties_4, + "Metadata": {"SamResourceId": f"aws_lambda_function.{self.zip_function_name_4}", "SkipBuild": True}, + } + + self.tf_image_package_type_lambda_function_resource: dict = { + **self.tf_lambda_function_resource_common_attributes, + "values": self.tf_image_package_type_function_properties, + "address": f"aws_lambda_function.{self.image_function_name}", + "name": self.image_function_name, + } + self.tf_image_package_type_lambda_function_resource_sam_metadata: dict = { + **self.tf_sam_metadata_resource_common_attributes, + "values": self.tf_image_package_type_function_sam_metadata_properties, + "address": f"null_resource.sam_metadata_{self.image_function_name}", + "name": f"sam_metadata_{self.image_function_name}", + } + self.expected_cfn_image_package_type_lambda_function_resource: dict = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": self.expected_cfn_image_package_function_properties, + "Metadata": {"SamResourceId": f"aws_lambda_function.{self.image_function_name}", "SkipBuild": True}, + } + + self.tf_lambda_function_resource_s3: dict = { + **self.tf_lambda_function_resource_common_attributes, + "values": self.tf_s3_function_properties, + "address": f"aws_lambda_function.{self.s3_function_name}", + "name": self.s3_function_name, + } + self.expected_cfn_lambda_function_resource_s3: dict = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": self.expected_cfn_s3_function_properties, + "Metadata": {"SamResourceId": f"aws_lambda_function.{self.s3_function_name}", "SkipBuild": True}, + } + self.expected_cfn_lambda_function_resource_s3_after_source_mapping: dict = { + **self.expected_cfn_lambda_function_resource_s3, + "Properties": self.expected_cfn_s3_function_properties_after_source_mapping, + } + + self.tf_lambda_function_resource_s3_2: dict = { + **self.tf_lambda_function_resource_common_attributes, + "values": self.tf_s3_function_properties_2, + "address": f"aws_lambda_function.{self.s3_function_name_2}", + "name": self.s3_function_name_2, + } + self.expected_cfn_lambda_function_resource_s3_2: dict = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": self.expected_cfn_s3_function_properties_2, + "Metadata": {"SamResourceId": f"aws_lambda_function.{self.s3_function_name_2}", "SkipBuild": True}, + } + self.expected_cfn_lambda_function_resource_s3_after_source_mapping_2: dict = { + **self.expected_cfn_lambda_function_resource_s3_2, + "Properties": self.expected_cfn_s3_function_properties_after_source_mapping_2, + } + + self.tf_s3_object_resource_common_attributes: dict = { + "type": "aws_s3_object", + "provider_name": AWS_PROVIDER_NAME, + } + + self.tf_s3_object_resource: dict = { + **self.tf_s3_object_resource_common_attributes, + "values": {"bucket": self.s3_bucket, "key": self.s3_key, "source": self.s3_source}, + "address": "aws_s3_object.s3_lambda_code", + "name": "s3_lambda_code", + } + + self.tf_s3_object_resource_2: dict = { + **self.tf_s3_object_resource_common_attributes, + "values": {"bucket": self.s3_bucket_2, "key": self.s3_key_2, "source": self.s3_source_2}, + "address": "aws_s3_object.s3_lambda_code_2", + "name": "s3_lambda_code_2", + } + + self.tf_json_with_root_module_only: dict = { + "planned_values": { + "root_module": { + "resources": [ + self.tf_lambda_function_resource_zip, + self.tf_lambda_function_resource_zip_2, + self.tf_image_package_type_lambda_function_resource, + ] + } + } + } + self.expected_cfn_with_root_module_only: dict = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + f"AwsLambdaFunctionMyfunc{self.mock_logical_id_hash}": self.expected_cfn_lambda_function_resource_zip, + f"AwsLambdaFunctionMyfunc2{self.mock_logical_id_hash}": self.expected_cfn_lambda_function_resource_zip_2, + f"AwsLambdaFunctionImageFunc{self.mock_logical_id_hash}": self.expected_cfn_image_package_type_lambda_function_resource, + }, + } + + self.tf_json_with_root_module_with_sam_metadata_resources: dict = { + "planned_values": { + "root_module": { + "resources": [ + self.tf_lambda_function_resource_zip, + self.tf_lambda_function_resource_zip_2, + self.tf_image_package_type_lambda_function_resource, + self.tf_lambda_function_resource_zip_sam_metadata, + self.tf_lambda_function_resource_zip_2_sam_metadata, + self.tf_image_package_type_lambda_function_resource_sam_metadata, + ] + } + } + } + self.tf_json_with_child_modules: dict = { + "planned_values": { + "root_module": { + "resources": [ + self.tf_lambda_function_resource_zip, + ], + "child_modules": [ + { + "resources": [ + { + **self.tf_lambda_function_resource_zip_2, + "address": f"module.mymodule1.aws_lambda_function.{self.zip_function_name_2}", + }, + ], + "child_modules": [ + { + "resources": [ + { + **self.tf_lambda_function_resource_zip_3, + "address": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_3}", + }, + ], + "address": "module.m1.module.m2", + }, + { + "resources": [ + { + **self.tf_lambda_function_resource_zip_4, + "address": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_4}", + }, + ], + "address": "module.m1.module.m3", + }, + ], + "address": "module.m1", + } + ], + } + } + } + self.tf_json_with_child_modules_with_sam_metadata_resource: dict = { + "planned_values": { + "root_module": { + "resources": [ + self.tf_lambda_function_resource_zip, + self.tf_lambda_function_resource_zip_sam_metadata, + ], + "child_modules": [ + { + "resources": [ + { + **self.tf_lambda_function_resource_zip_2, + "address": f"module.mymodule1.aws_lambda_function.{self.zip_function_name_2}", + }, + { + **self.tf_lambda_function_resource_zip_2_sam_metadata, + "address": f"module.mymodule1.null_resource.sam_metadata_{self.zip_function_name_2}", + }, + ], + "address": "module.mymodule1", + "child_modules": [ + { + "resources": [ + { + **self.tf_lambda_function_resource_zip_3, + "address": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_3}", + }, + { + **self.tf_lambda_function_resource_zip_3_sam_metadata, + "address": f"module.mymodule1.module.mymodule2.null_resource.sam_metadata_{self.zip_function_name_3}", + }, + ], + "address": "module.mymodule1.module.mymodule2", + }, + { + "resources": [ + { + **self.tf_lambda_function_resource_zip_4, + "address": f"module.mymodule1.module.mymodule3.aws_lambda_function.{self.zip_function_name_4}", + }, + { + **self.tf_lambda_function_resource_zip_4_sam_metadata, + "address": f"module.mymodule1.module.mymodule3.null_resource.sam_metadata_{self.zip_function_name_4}", + }, + ], + "address": "module.mymodule1.module.mymodule3", + }, + ], + } + ], + } + } + } + self.expected_cfn_with_child_modules: dict = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + f"AwsLambdaFunctionMyfunc{self.mock_logical_id_hash}": self.expected_cfn_lambda_function_resource_zip, + f"ModuleMymodule1AwsLambdaFunctionMyfunc2{self.mock_logical_id_hash}": { + **self.expected_cfn_lambda_function_resource_zip_2, + "Metadata": { + "SamResourceId": f"module.mymodule1.aws_lambda_function.{self.zip_function_name_2}", + "SkipBuild": True, + }, + }, + f"ModuleMymodule1ModuleMymodule2AwsLambdaFunctionMyfunc3{self.mock_logical_id_hash}": { + **self.expected_cfn_lambda_function_resource_zip_3, + "Metadata": { + "SamResourceId": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_3}", + "SkipBuild": True, + }, + }, + f"ModuleMymodule1ModuleMymodule2AwsLambdaFunctionMyfunc4{self.mock_logical_id_hash}": { + **self.expected_cfn_lambda_function_resource_zip_4, + "Metadata": { + "SamResourceId": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_4}", + "SkipBuild": True, + }, + }, + }, + } + + self.tf_json_with_unsupported_provider: dict = { + "planned_values": { + "root_module": { + "resources": [ + self.tf_lambda_function_resource_zip, + {**self.tf_lambda_function_resource_zip, "provider": "some.other.provider"}, + self.tf_lambda_function_resource_zip_2, + ] + } + } + } + self.expected_cfn_with_unsupported_provider: dict = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + f"AwsLambdaFunctionMyfunc{self.mock_logical_id_hash}": self.expected_cfn_lambda_function_resource_zip, + f"AwsLambdaFunctionMyfunc2{self.mock_logical_id_hash}": self.expected_cfn_lambda_function_resource_zip_2, + }, + } + + self.tf_json_with_unsupported_resource_type: dict = { + "planned_values": { + "root_module": { + "resources": [ + self.tf_lambda_function_resource_zip, + {**self.tf_lambda_function_resource_zip, "type": "aws_iam_role"}, + self.tf_lambda_function_resource_zip_2, + ] + } + } + } + self.expected_cfn_with_unsupported_resource_type: dict = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + f"AwsLambdaFunctionMyfunc{self.mock_logical_id_hash}": self.expected_cfn_lambda_function_resource_zip, + f"AwsLambdaFunctionMyfunc2{self.mock_logical_id_hash}": self.expected_cfn_lambda_function_resource_zip_2, + }, + } + + self.tf_json_with_child_modules_and_s3_source_mapping: dict = { + "planned_values": { + "root_module": { + "resources": [ + self.tf_lambda_function_resource_zip, + self.tf_lambda_function_resource_s3, + self.tf_s3_object_resource, + ], + "child_modules": [ + { + "resources": [ + { + **self.tf_lambda_function_resource_zip_2, + "address": f"module.mymodule1.aws_lambda_function.{self.zip_function_name_2}", + }, + { + **self.tf_s3_object_resource_2, + "address": "module.mymodule1.aws_lambda_function.s3_lambda_code_2", + }, + ], + "child_modules": [ + { + "resources": [ + { + **self.tf_lambda_function_resource_s3_2, + "address": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.s3_function_name_2}", + }, + ], + "address": "module.m1.module.m2", + }, + { + "resources": [ + { + **self.tf_lambda_function_resource_zip_4, + "address": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_4}", + }, + ], + "address": "module.m1.module.m2\3", + }, + ], + "address": "module.m1", + } + ], + } + } + } + self.expected_cfn_with_child_modules_and_s3_source_mapping: dict = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + f"AwsLambdaFunctionMyfunc{self.mock_logical_id_hash}": self.expected_cfn_lambda_function_resource_zip, + f"AwsLambdaFunctionMyfuncS3{self.mock_logical_id_hash}": self.expected_cfn_lambda_function_resource_s3_after_source_mapping, + f"ModuleMymodule1AwsLambdaFunctionMyfunc2{self.mock_logical_id_hash}": { + **self.expected_cfn_lambda_function_resource_zip_2, + "Metadata": { + "SamResourceId": f"module.mymodule1.aws_lambda_function.{self.zip_function_name_2}", + "SkipBuild": True, + }, + }, + f"ModuleMymodule1ModuleMymodule2AwsLambdaFunctionMyfuncS32{self.mock_logical_id_hash}": { + **self.expected_cfn_lambda_function_resource_s3_after_source_mapping_2, + "Metadata": { + "SamResourceId": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.s3_function_name_2}", + "SkipBuild": True, + }, + }, + f"ModuleMymodule1ModuleMymodule2AwsLambdaFunctionMyfunc4{self.mock_logical_id_hash}": { + **self.expected_cfn_lambda_function_resource_zip_4, + "Metadata": { + "SamResourceId": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_4}", + "SkipBuild": True, + }, + }, + }, + } + + self.prepare_params: dict = { + "IACProjectPath": "iac/project/path", + "OutputDirPath": "output/dir/path", + "Debug": False, + "Profile": None, + "Region": None, + "SkipPrepareInfra": False, + } diff --git a/tests/unit/hook_packages/terraform/hooks/prepare/test_enrich.py b/tests/unit/hook_packages/terraform/hooks/prepare/test_enrich.py new file mode 100644 index 0000000000..6cecd411a0 --- /dev/null +++ b/tests/unit/hook_packages/terraform/hooks/prepare/test_enrich.py @@ -0,0 +1,1239 @@ +"""Test Terraform prepare enrichment""" +from unittest.mock import Mock, call, patch +from parameterized import parameterized +from subprocess import CalledProcessError + +from tests.unit.hook_packages.terraform.hooks.prepare.prepare_base import PrepareHookUnitBase +from samcli.hook_packages.terraform.hooks.prepare.types import ( + SamMetadataResource, +) +from samcli.hook_packages.terraform.hooks.prepare.enrich import ( + enrich_resources_and_generate_makefile, + _enrich_zip_lambda_function, + _enrich_image_lambda_function, + _enrich_lambda_layer, + _validate_referenced_resource_layer_matches_metadata_type, + _get_source_code_path, + _get_relevant_cfn_resource, + _validate_referenced_resource_matches_sam_metadata_type, + _get_python_command_name, +) +from samcli.hook_packages.terraform.hooks.prepare.types import TFResource +from samcli.lib.hook.exceptions import PrepareHookException +from samcli.lib.utils.resources import ( + AWS_LAMBDA_FUNCTION as CFN_AWS_LAMBDA_FUNCTION, + AWS_LAMBDA_LAYERVERSION, +) +from samcli.hook_packages.terraform.hooks.prepare.exceptions import InvalidSamMetadataPropertiesException +from samcli.hook_packages.terraform.hooks.prepare.translate import NULL_RESOURCE_PROVIDER_NAME + + +class TestPrepareHookMakefile(PrepareHookUnitBase): + def setUp(self): + super().setUp() + + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_python_command_name") + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich.generate_makefile") + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich.generate_makefile_rule_for_lambda_resource") + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_relevant_cfn_resource") + @patch( + "samcli.hook_packages.terraform.hooks.prepare.enrich._validate_referenced_resource_matches_sam_metadata_type" + ) + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_source_code_path") + def test_enrich_resources_and_generate_makefile_zip_functions( + self, + mock_get_lambda_function_source_code_path, + mock_validate_referenced_resource_matches_sam_metadata_type, + mock_get_relevant_cfn_resource, + mock_generate_makefile_rule_for_lambda_resource, + mock_generate_makefile, + mock_get_python_command_name, + ): + mock_get_python_command_name.return_value = "python" + + mock_get_lambda_function_source_code_path.side_effect = ["src/code/path1", "src/code/path2"] + zip_function_1 = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": { + **self.expected_cfn_function_common_properties, + "Code": "file.zip", + }, + "Metadata": {"SamResourceId": f"aws_lambda_function.func1", "SkipBuild": True}, + } + zip_function_2 = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": { + **self.expected_cfn_function_common_properties, + "Code": "file2.zip", + }, + "Metadata": {"SamResourceId": f"aws_lambda_function.func2", "SkipBuild": True}, + } + cfn_resources = { + "logical_id1": zip_function_1, + "logical_id2": zip_function_2, + } + mock_get_relevant_cfn_resource.side_effect = [ + [(zip_function_1, "logical_id1")], + [(zip_function_2, "logical_id2")], + ] + sam_metadata_resources = [ + SamMetadataResource( + current_module_address=None, + resource=self.tf_lambda_function_resource_zip_sam_metadata, + config_resource=TFResource("", "", None, {}), + ), + SamMetadataResource( + current_module_address=None, + resource=self.tf_lambda_function_resource_zip_2_sam_metadata, + config_resource=TFResource("", "", None, {}), + ), + ] + + expected_zip_function_1 = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": { + **self.expected_cfn_function_common_properties, + "Code": "src/code/path1", + }, + "Metadata": { + "SamResourceId": "aws_lambda_function.func1", + "SkipBuild": False, + "BuildMethod": "makefile", + "ContextPath": "/output/dir", + "WorkingDirectory": "/terraform/project/root", + "ProjectRootDirectory": "/terraform/project/root", + }, + } + expected_zip_function_2 = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": { + **self.expected_cfn_function_common_properties, + "Code": "src/code/path2", + }, + "Metadata": { + "SamResourceId": "aws_lambda_function.func2", + "SkipBuild": False, + "BuildMethod": "makefile", + "ContextPath": "/output/dir", + "WorkingDirectory": "/terraform/project/root", + "ProjectRootDirectory": "/terraform/project/root", + }, + } + + expected_cfn_resources = { + "logical_id1": expected_zip_function_1, + "logical_id2": expected_zip_function_2, + } + + makefile_rules = [Mock() for _ in sam_metadata_resources] + mock_generate_makefile_rule_for_lambda_resource.side_effect = makefile_rules + + enrich_resources_and_generate_makefile( + sam_metadata_resources, cfn_resources, "/output/dir", "/terraform/project/root", {} + ) + self.assertEqual(cfn_resources, expected_cfn_resources) + + mock_generate_makefile_rule_for_lambda_resource.assert_has_calls( + [ + call( + sam_metadata_resources[i], + list(expected_cfn_resources.keys())[i], + "/terraform/project/root", + "python", + "/output/dir", + ) + for i in range(len(sam_metadata_resources)) + ] + ) + + mock_generate_makefile.assert_called_once_with(makefile_rules, "/output/dir") + + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_python_command_name") + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich.generate_makefile") + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich.generate_makefile_rule_for_lambda_resource") + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_relevant_cfn_resource") + @patch( + "samcli.hook_packages.terraform.hooks.prepare.enrich._validate_referenced_resource_layer_matches_metadata_type" + ) + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_source_code_path") + def test_enrich_resources_and_generate_makefile_layers( + self, + mock_get_lambda_layer_source_code_path, + mock_validate_referenced_resource_layer_matches_sam_metadata_type, + mock_get_relevant_cfn_resource, + mock_generate_makefile_rule_for_lambda_resource, + mock_generate_makefile, + mock_get_python_command_name, + ): + mock_get_python_command_name.return_value = "python" + mock_get_lambda_layer_source_code_path.side_effect = ["src/code/path1"] + lambda_layer = { + "Type": AWS_LAMBDA_LAYERVERSION, + "Properties": { + **self.expected_cfn_layer_common_properties, + "Content": "file.zip", + }, + "Metadata": {"SamResourceId": f"aws_lambda_layer_version.{self.lambda_layer_name}", "SkipBuild": True}, + } + cfn_resources = { + "logical_id1": lambda_layer, + } + mock_get_relevant_cfn_resource.side_effect = [ + [(lambda_layer, "logical_id1")], + ] + sam_metadata_resources = [ + SamMetadataResource( + current_module_address=None, + resource=self.tf_lambda_layer_resource_zip_sam_metadata, + config_resource=TFResource("", "", None, {}), + ), + ] + + expected_layer = { + "Type": AWS_LAMBDA_LAYERVERSION, + "Properties": { + **self.expected_cfn_layer_common_properties, + "Content": "src/code/path1", + }, + "Metadata": { + "SamResourceId": f"aws_lambda_layer_version.{self.lambda_layer_name}", + "SkipBuild": False, + "BuildMethod": "makefile", + "ContextPath": "/output/dir", + "WorkingDirectory": "/terraform/project/root", + "ProjectRootDirectory": "/terraform/project/root", + }, + } + + expected_cfn_resources = { + "logical_id1": expected_layer, + } + + makefile_rules = [Mock() for _ in sam_metadata_resources] + mock_generate_makefile_rule_for_lambda_resource.side_effect = makefile_rules + + enrich_resources_and_generate_makefile( + sam_metadata_resources, cfn_resources, "/output/dir", "/terraform/project/root", {} + ) + self.assertEqual(cfn_resources, expected_cfn_resources) + + mock_generate_makefile_rule_for_lambda_resource.assert_has_calls( + [ + call( + sam_metadata_resources[i], + list(expected_cfn_resources.keys())[i], + "/terraform/project/root", + "python", + "/output/dir", + ) + for i in range(len(sam_metadata_resources)) + ] + ) + + mock_generate_makefile.assert_called_once_with(makefile_rules, "/output/dir") + + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_python_command_name") + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich.generate_makefile") + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich.generate_makefile_rule_for_lambda_resource") + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_relevant_cfn_resource") + @patch( + "samcli.hook_packages.terraform.hooks.prepare.enrich._validate_referenced_resource_matches_sam_metadata_type" + ) + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_source_code_path") + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._enrich_image_lambda_function") + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._enrich_zip_lambda_function") + def test_enrich_resources_and_generate_makefile_mock_enrich_zip_functions( + self, + mock_enrich_zip_lambda_function, + mock_enrich_image_lambda_function, + mock_get_lambda_function_source_code_path, + mock_validate_referenced_resource_matches_sam_metadata_type, + mock_get_relevant_cfn_resource, + mock_generate_makefile_rule_for_lambda_resource, + mock_generate_makefile, + mock_get_python_command_name, + ): + mock_get_python_command_name.return_value = "python" + + mock_get_lambda_function_source_code_path.side_effect = ["src/code/path1", "src/code/path2"] + zip_function_1 = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": { + **self.expected_cfn_function_common_properties, + "Code": "file.zip", + }, + "Metadata": {"SamResourceId": f"aws_lambda_function.func1", "SkipBuild": True}, + } + zip_function_2 = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": { + **self.expected_cfn_function_common_properties, + "Code": "file2.zip", + }, + "Metadata": {"SamResourceId": f"aws_lambda_function.func2", "SkipBuild": True}, + } + cfn_resources = { + "logical_id1": zip_function_1, + "logical_id2": zip_function_2, + } + mock_get_relevant_cfn_resource.side_effect = [ + [(zip_function_1, "logical_id1")], + [(zip_function_2, "logical_id2")], + ] + sam_metadata_resources = [ + SamMetadataResource( + current_module_address=None, + resource=self.tf_lambda_function_resource_zip_sam_metadata, + config_resource=TFResource("", "", None, {}), + ), + SamMetadataResource( + current_module_address=None, + resource=self.tf_lambda_function_resource_zip_2_sam_metadata, + config_resource=TFResource("", "", None, {}), + ), + ] + + makefile_rules = [Mock() for _ in sam_metadata_resources] + mock_generate_makefile_rule_for_lambda_resource.side_effect = makefile_rules + + enrich_resources_and_generate_makefile( + sam_metadata_resources, cfn_resources, "/output/dir", "/terraform/project/root", {} + ) + mock_enrich_zip_lambda_function.assert_has_calls( + [ + call( + self.tf_lambda_function_resource_zip_sam_metadata, + zip_function_1, + "logical_id1", + "/terraform/project/root", + "/output/dir", + ), + call( + self.tf_lambda_function_resource_zip_2_sam_metadata, + zip_function_2, + "logical_id2", + "/terraform/project/root", + "/output/dir", + ), + ] + ) + mock_enrich_image_lambda_function.assert_not_called() + + mock_generate_makefile_rule_for_lambda_resource.assert_has_calls( + [ + call( + sam_metadata_resources[i], + list(cfn_resources.keys())[i], + "/terraform/project/root", + "python", + "/output/dir", + ) + for i in range(len(sam_metadata_resources)) + ] + ) + + mock_generate_makefile.assert_called_once_with(makefile_rules, "/output/dir") + + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_relevant_cfn_resource") + @patch( + "samcli.hook_packages.terraform.hooks.prepare.enrich._validate_referenced_resource_matches_sam_metadata_type" + ) + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_source_code_path") + def test_enrich_mapped_resource_zip_function( + self, + mock_get_lambda_function_source_code_path, + mock_validate_referenced_resource_matches_sam_metadata_type, + mock_get_relevant_cfn_resource, + ): + mock_get_lambda_function_source_code_path.side_effect = ["src/code/path1", "src/code/path2"] + zip_function_1 = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": { + **self.expected_cfn_function_common_properties, + "Code": "file.zip", + }, + "Metadata": {"SamResourceId": f"aws_lambda_function.func1", "SkipBuild": True}, + } + mock_get_relevant_cfn_resource.side_effect = [ + (zip_function_1, "logical_id1"), + ] + + expected_zip_function_1 = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": { + **self.expected_cfn_function_common_properties, + "Code": "src/code/path1", + }, + "Metadata": { + "SamResourceId": "aws_lambda_function.func1", + "SkipBuild": False, + "BuildMethod": "makefile", + "ContextPath": "/output/dir", + "WorkingDirectory": "/terraform/project/root", + "ProjectRootDirectory": "/terraform/project/root", + }, + } + + _enrich_zip_lambda_function( + self.tf_lambda_function_resource_zip_sam_metadata, + zip_function_1, + "logical_id1", + "/terraform/project/root", + "/output/dir", + ) + self.assertEqual(zip_function_1, expected_zip_function_1) + + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_relevant_cfn_resource") + @patch( + "samcli.hook_packages.terraform.hooks.prepare.enrich._validate_referenced_resource_matches_sam_metadata_type" + ) + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_source_code_path") + def test_enrich_mapped_resource_zip_layer( + self, + mock_get_lambda_layer_source_code_path, + mock_validate_referenced_resource_matches_sam_metadata_type, + mock_get_relevant_cfn_resource, + ): + mock_get_lambda_layer_source_code_path.side_effect = ["src/code/path1", "src/code/path2"] + lambda_layer_1 = { + "Type": AWS_LAMBDA_LAYERVERSION, + "Properties": { + **self.expected_cfn_layer_common_properties, + "Content": "file.zip", + }, + "Metadata": {"SamResourceId": f"aws_lambda_layer_version.lambda_layer", "SkipBuild": True}, + } + mock_get_relevant_cfn_resource.side_effect = [ + (lambda_layer_1, "logical_id1"), + ] + + expected_lambda_layer_1 = { + "Type": AWS_LAMBDA_LAYERVERSION, + "Properties": { + **self.expected_cfn_layer_common_properties, + "Content": "src/code/path1", + }, + "Metadata": { + "SamResourceId": "aws_lambda_layer_version.lambda_layer", + "SkipBuild": False, + "BuildMethod": "makefile", + "ContextPath": "/output/dir", + "WorkingDirectory": "/terraform/project/root", + "ProjectRootDirectory": "/terraform/project/root", + }, + } + + _enrich_lambda_layer( + self.tf_lambda_layer_resource_zip_sam_metadata, + lambda_layer_1, + "logical_id1", + "/terraform/project/root", + "/output/dir", + ) + self.assertEqual(lambda_layer_1, expected_lambda_layer_1) + + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_python_command_name") + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich.generate_makefile") + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich.generate_makefile_rule_for_lambda_resource") + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_relevant_cfn_resource") + @patch( + "samcli.hook_packages.terraform.hooks.prepare.enrich._validate_referenced_resource_matches_sam_metadata_type" + ) + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_source_code_path") + def test_enrich_resources_and_generate_makefile_image_functions( + self, + mock_get_lambda_function_source_code_path, + mock_validate_referenced_resource_matches_sam_metadata_type, + mock_get_relevant_cfn_resource, + mock_generate_makefile_rule_for_lambda_resource, + mock_generate_makefile, + mock_get_python_command_name, + ): + mock_get_python_command_name.return_value = "python" + + mock_get_lambda_function_source_code_path.side_effect = ["src/code/path1", "src/code/path2"] + image_function_1 = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": { + **self.expected_cfn_image_package_type_function_common_properties, + "ImageConfig": { + "Command": ["cmd1", "cmd2"], + "EntryPoint": ["entry1", "entry2"], + "WorkingDirectory": "/working/dir/path", + }, + "Code": { + "ImageUri": "image/uri:tag", + }, + }, + "Metadata": {"SamResourceId": f"aws_lambda_function.func1", "SkipBuild": True}, + } + + cfn_resources = { + "logical_id1": image_function_1, + } + mock_get_relevant_cfn_resource.side_effect = [ + [(image_function_1, "logical_id1")], + ] + sam_metadata_resources = [ + SamMetadataResource( + current_module_address=None, + resource=self.tf_image_package_type_lambda_function_resource_sam_metadata, + config_resource=TFResource("", "", None, {}), + ), + ] + + expected_image_function_1 = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": { + **self.expected_cfn_image_package_type_function_common_properties, + "ImageConfig": { + "Command": ["cmd1", "cmd2"], + "EntryPoint": ["entry1", "entry2"], + "WorkingDirectory": "/working/dir/path", + }, + }, + "Metadata": { + "SamResourceId": "aws_lambda_function.func1", + "SkipBuild": False, + "DockerContext": "src/code/path1", + "Dockerfile": "Dockerfile", + "DockerTag": "2.0", + "DockerBuildArgs": {"FOO": "bar"}, + }, + } + + expected_cfn_resources = { + "logical_id1": expected_image_function_1, + } + + makefile_rules = [Mock() for _ in sam_metadata_resources] + mock_generate_makefile_rule_for_lambda_resource.side_effect = makefile_rules + + enrich_resources_and_generate_makefile( + sam_metadata_resources, cfn_resources, "/output/dir", "/terraform/project/root", {} + ) + self.assertEqual(cfn_resources, expected_cfn_resources) + + mock_generate_makefile_rule_for_lambda_resource.assert_has_calls( + [ + call( + sam_metadata_resources[i], + list(cfn_resources.keys())[i], + "/terraform/project/root", + "python", + "/output/dir", + ) + for i in range(len(sam_metadata_resources)) + ] + ) + + mock_generate_makefile.assert_called_once_with(makefile_rules, "/output/dir") + + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_relevant_cfn_resource") + @patch( + "samcli.hook_packages.terraform.hooks.prepare.enrich._validate_referenced_resource_matches_sam_metadata_type" + ) + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_source_code_path") + def test_enrich_mapped_resource_image_function( + self, + mock_get_lambda_function_source_code_path, + mock_validate_referenced_resource_matches_sam_metadata_type, + mock_get_relevant_cfn_resource, + ): + mock_get_lambda_function_source_code_path.side_effect = ["src/code/path1", "src/code/path2"] + image_function_1 = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": { + **self.expected_cfn_image_package_type_function_common_properties, + "ImageConfig": { + "Command": ["cmd1", "cmd2"], + "EntryPoint": ["entry1", "entry2"], + "WorkingDirectory": "/working/dir/path", + }, + "Code": { + "ImageUri": "image/uri:tag", + }, + }, + "Metadata": {"SamResourceId": f"aws_lambda_function.func1", "SkipBuild": True}, + } + + mock_get_relevant_cfn_resource.side_effect = [ + (image_function_1, "logical_id1"), + ] + + expected_image_function_1 = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": { + **self.expected_cfn_image_package_type_function_common_properties, + "ImageConfig": { + "Command": ["cmd1", "cmd2"], + "EntryPoint": ["entry1", "entry2"], + "WorkingDirectory": "/working/dir/path", + }, + }, + "Metadata": { + "SamResourceId": "aws_lambda_function.func1", + "SkipBuild": False, + "DockerContext": "src/code/path1", + "Dockerfile": "Dockerfile", + "DockerTag": "2.0", + "DockerBuildArgs": {"FOO": "bar"}, + }, + } + + _enrich_image_lambda_function( + self.tf_image_package_type_lambda_function_resource_sam_metadata, + image_function_1, + "logical_id1", + "/terraform/project/root", + "/output/dir", + ) + self.assertEqual(image_function_1, expected_image_function_1) + + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_python_command_name") + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich.generate_makefile") + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich.generate_makefile_rule_for_lambda_resource") + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_relevant_cfn_resource") + @patch( + "samcli.hook_packages.terraform.hooks.prepare.enrich._validate_referenced_resource_matches_sam_metadata_type" + ) + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_source_code_path") + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._enrich_image_lambda_function") + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._enrich_zip_lambda_function") + def test_enrich_resources_and_generate_makefile_mock_enrich_image_functions( + self, + mock_enrich_zip_lambda_function, + mock_enrich_image_lambda_function, + mock_get_lambda_function_source_code_path, + mock_validate_referenced_resource_matches_sam_metadata_type, + mock_get_relevant_cfn_resource, + mock_generate_makefile_rule_for_lambda_resource, + mock_generate_makefile, + mock_get_python_command_name, + ): + mock_get_python_command_name.return_value = "python" + + mock_get_lambda_function_source_code_path.side_effect = ["src/code/path1", "src/code/path2"] + image_function_1 = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": { + **self.expected_cfn_image_package_type_function_common_properties, + "ImageConfig": { + "Command": ["cmd1", "cmd2"], + "EntryPoint": ["entry1", "entry2"], + "WorkingDirectory": "/working/dir/path", + }, + "Code": { + "ImageUri": "image/uri:tag", + }, + }, + "Metadata": {"SamResourceId": f"aws_lambda_function.func1", "SkipBuild": True}, + } + + cfn_resources = { + "logical_id1": image_function_1, + } + mock_get_relevant_cfn_resource.side_effect = [ + [(image_function_1, "logical_id1")], + ] + sam_metadata_resources = [ + SamMetadataResource( + current_module_address=None, + resource=self.tf_image_package_type_lambda_function_resource_sam_metadata, + config_resource=TFResource("", "", None, {}), + ), + ] + + makefile_rules = [Mock() for _ in sam_metadata_resources] + mock_generate_makefile_rule_for_lambda_resource.side_effect = makefile_rules + + enrich_resources_and_generate_makefile( + sam_metadata_resources, cfn_resources, "/output/dir", "/terraform/project/root", {} + ) + mock_enrich_image_lambda_function.assert_called_once_with( + self.tf_image_package_type_lambda_function_resource_sam_metadata, + image_function_1, + "logical_id1", + "/terraform/project/root", + "/output/dir", + ) + mock_enrich_zip_lambda_function.assert_not_called() + + mock_generate_makefile_rule_for_lambda_resource.assert_has_calls( + [ + call( + sam_metadata_resources[i], + list(cfn_resources.keys())[i], + "/terraform/project/root", + "python", + "/output/dir", + ) + for i in range(len(sam_metadata_resources)) + ] + ) + + mock_generate_makefile.assert_called_once_with(makefile_rules, "/output/dir") + + @parameterized.expand( + [ + ("ABCDEFG",), + ('"ABCDEFG"',), + ] + ) + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_relevant_cfn_resource") + @patch( + "samcli.hook_packages.terraform.hooks.prepare.enrich._validate_referenced_resource_matches_sam_metadata_type" + ) + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_source_code_path") + def test_enrich_mapped_resource_image_function_invalid_docker_args( + self, + docker_args_value, + mock_get_lambda_function_source_code_path, + mock_validate_referenced_resource_matches_sam_metadata_type, + mock_get_relevant_cfn_resource, + ): + mock_get_lambda_function_source_code_path.side_effect = ["src/code/path1", "src/code/path2"] + image_function_1 = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": { + **self.expected_cfn_image_package_type_function_common_properties, + "ImageConfig": { + "Command": ["cmd1", "cmd2"], + "EntryPoint": ["entry1", "entry2"], + "WorkingDirectory": "/working/dir/path", + }, + "Code": { + "ImageUri": "image/uri:tag", + }, + }, + "Metadata": {"SamResourceId": f"aws_lambda_function.func1", "SkipBuild": True}, + } + + mock_get_relevant_cfn_resource.side_effect = [ + (image_function_1, "logical_id1"), + ] + sam_metadata_resource = { + **self.tf_sam_metadata_resource_common_attributes, + "values": { + "triggers": { + "resource_name": f"aws_lambda_function.{self.image_function_name}", + "docker_build_args": docker_args_value, + "docker_context": "context", + "docker_file": "Dockerfile", + "docker_tag": "2.0", + "resource_type": "IMAGE_LAMBDA_FUNCTION", + }, + }, + "address": f"null_resource.sam_metadata_{self.image_function_name}", + "name": f"sam_metadata_{self.image_function_name}", + } + + with self.assertRaises( + InvalidSamMetadataPropertiesException, + msg="The sam metadata resource null_resource.sam_metadata_func1 should contain a valid json encoded " + "string for the lambda function docker build arguments.", + ): + _enrich_image_lambda_function( + sam_metadata_resource, image_function_1, "logical_id1", "/terraform/project/root", "/output/dir" + ) + + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._get_python_command_name") + def test_enrich_resources_and_generate_makefile_invalid_source_type( + self, + mock_get_python_command_name, + ): + image_function_1 = { + "Type": CFN_AWS_LAMBDA_FUNCTION, + "Properties": { + **self.expected_cfn_image_package_type_function_common_properties, + "ImageConfig": { + "Command": ["cmd1", "cmd2"], + "EntryPoint": ["entry1", "entry2"], + "WorkingDirectory": "/working/dir/path", + }, + "Code": { + "ImageUri": "image/uri:tag", + }, + }, + "Metadata": {"SamResourceId": f"aws_lambda_function.func1", "SkipBuild": True}, + } + + cfn_resources = { + "logical_id1": image_function_1, + } + sam_metadata_resources = [ + SamMetadataResource( + current_module_address=None, + resource={ + **self.tf_sam_metadata_resource_common_attributes, + "values": { + "triggers": { + "resource_name": f"aws_lambda_function.{self.image_function_name}", + "docker_build_args": '{"FOO":"bar"}', + "docker_context": "context", + "docker_file": "Dockerfile", + "docker_tag": "2.0", + "resource_type": "Invalid_resource_type", + }, + }, + "address": f"null_resource.sam_metadata_func1", + "name": f"sam_metadata_func1", + }, + config_resource=TFResource("", "", None, {}), + ), + ] + with self.assertRaises( + InvalidSamMetadataPropertiesException, + msg="The resource type Invalid_resource_type found in the sam metadata resource " + "null_resource.sam_metadata_func1 is not a correct resource type. The resource type should be one of " + "these values [ZIP_LAMBDA_FUNCTION, IMAGE_LAMBDA_FUNCTION]", + ): + enrich_resources_and_generate_makefile( + sam_metadata_resources, cfn_resources, "/output/dir", "/terraform/project/root", {} + ) + + def test_validate_referenced_layer_resource_matches_sam_metadata_type_valid_types(self): + cfn_resource = self.expected_cfn_layer_resource_zip + sam_metadata_attributes = self.tf_lambda_layer_resource_zip_sam_metadata.get("values").get("triggers") + try: + _validate_referenced_resource_layer_matches_metadata_type( + cfn_resource, sam_metadata_attributes, "resource_address" + ) + except InvalidSamMetadataPropertiesException: + self.fail("The testing sam metadata resource type should be valid.") + + @parameterized.expand( + [ + ( + "expected_cfn_lambda_function_resource_zip", + "tf_lambda_layer_resource_zip_sam_metadata", + ), + ( + "expected_cfn_image_package_type_lambda_function_resource", + "tf_lambda_layer_resource_zip_sam_metadata", + ), + ] + ) + def test_validate_referenced_resource_layer_matches_sam_metadata_type_invalid_types( + self, cfn_resource_name, sam_metadata_attributes_name + ): + cfn_resource = self.__getattribute__(cfn_resource_name) + sam_metadata_attributes = self.__getattribute__(sam_metadata_attributes_name).get("values").get("triggers") + with self.assertRaises( + InvalidSamMetadataPropertiesException, + msg=f"The sam metadata resource resource_address is referring to a resource that does not " + f"match the resource type AWS::Lambda::LayerVersion.", + ): + _validate_referenced_resource_layer_matches_metadata_type( + cfn_resource, sam_metadata_attributes, "resource_address" + ) + + @parameterized.expand( + [ + ("/src/code/path", None, "/src/code/path", True), + ("src/code/path", None, "src/code/path", False), + ('"/src/code/path"', None, "/src/code/path", True), + ('"src/code/path"', None, "src/code/path", False), + ('{"path":"/src/code/path"}', "path", "/src/code/path", True), + ('{"path":"src/code/path"}', "path", "src/code/path", False), + ({"path": "/src/code/path"}, "path", "/src/code/path", True), + ({"path": "src/code/path"}, "path", "src/code/path", False), + ('["/src/code/path"]', "None", "/src/code/path", True), + ('["src/code/path"]', "None", "src/code/path", False), + (["/src/code/path"], "None", "/src/code/path", True), + (["src/code/path"], "None", "src/code/path", False), + ('["/src/code/path", "/src/code/path2"]', "None", "/src/code/path", True), + ('["src/code/path", "src/code/path2"]', "None", "src/code/path", False), + (["/src/code/path", "/src/code/path2"], "None", "/src/code/path", True), + (["src/code/path", "/src/code/path2"], "None", "src/code/path", False), + ] + ) + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich.os") + def test_get_lambda_function_source_code_path_valid_metadata_resource( + self, original_source_code, source_code_property, expected_path, is_abs, mock_os + ): + mock_path = Mock() + mock_os.path = mock_path + mock_isabs = Mock() + mock_isabs.return_value = is_abs + mock_path.isabs = mock_isabs + + mock_exists = Mock() + mock_exists.return_value = True + mock_path.exists = mock_exists + + if not is_abs: + mock_normpath = Mock() + mock_normpath.return_value = f"/project/root/dir/{expected_path}" + expected_path = f"/project/root/dir/{expected_path}" + mock_path.normpath = mock_normpath + mock_join = Mock() + mock_join.return_value = expected_path + mock_path.join = mock_join + sam_metadata_attributes = { + **self.tf_zip_function_sam_metadata_properties, + "original_source_code": original_source_code, + } + if source_code_property: + sam_metadata_attributes = { + **sam_metadata_attributes, + "source_code_property": source_code_property, + } + sam_resource = {"values": {"triggers": sam_metadata_attributes}} + path = _get_source_code_path( + sam_resource, + "resource_address", + "/project/root/dir", + "original_source_code", + "source_code_property", + "source code", + ) + self.assertEqual(path, expected_path) + + @parameterized.expand( + [ + ( + "/src/code/path", + None, + False, + "The sam metadata resource resource_address should contain a valid lambda function source code path", + ), + ( + None, + None, + True, + "The sam metadata resource resource_address should contain the lambda function source code in " + "property original_source_code", + ), + ( + '{"path":"/src/code/path"}', + None, + True, + "The sam metadata resource resource_address should contain the lambda function source code property in " + "property source_code_property as the original_source_code value is an object", + ), + ( + {"path": "/src/code/path"}, + None, + True, + "The sam metadata resource resource_address should contain the lambda function source code property " + "in property source_code_property as the original_source_code value is an object", + ), + ( + '{"path":"/src/code/path"}', + "path1", + True, + "The sam metadata resource resource_address should contain a valid lambda function source code " + "property in property source_code_property as the original_source_code value is an object", + ), + ( + {"path": "/src/code/path"}, + "path1", + True, + "The sam metadata resource resource_address should contain a valid lambda function source code " + "property in property source_code_property as the original_source_code value is an object", + ), + ( + "[]", + None, + True, + "The sam metadata resource resource_address should contain the lambda function source code in " + "property original_source_code, and it should not be an empty list", + ), + ( + [], + None, + True, + "The sam metadata resource resource_address should contain the lambda function source code in " + "property original_source_code, and it should not be an empty list", + ), + ( + "[null]", + None, + True, + "The sam metadata resource resource_address should contain a valid lambda function source code in " + "property original_source_code", + ), + ( + [None], + None, + True, + "The sam metadata resource resource_address should contain a valid lambda function source code in " + "property original_source_code", + ), + ] + ) + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich.os") + def test_get_lambda_function_source_code_path_invalid_metadata_resources( + self, original_source_code, source_code_property, does_exist, exception_message, mock_os + ): + mock_path = Mock() + mock_os.path = mock_path + mock_isabs = Mock() + mock_isabs.return_value = True + mock_path.isabs = mock_isabs + + mock_exists = Mock() + mock_exists.return_value = does_exist + mock_path.exists = mock_exists + + sam_metadata_attributes = { + **self.tf_zip_function_sam_metadata_properties, + "original_source_code": original_source_code, + } + if source_code_property: + sam_metadata_attributes = { + **sam_metadata_attributes, + "source_code_property": source_code_property, + } + with self.assertRaises(InvalidSamMetadataPropertiesException, msg=exception_message): + _get_source_code_path( + sam_metadata_attributes, + "resource_address", + "/project/root/dir", + "original_source_code", + "source_code_property", + "source code", + ) + + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich.build_cfn_logical_id") + def test_get_relevant_cfn_resource(self, mock_build_cfn_logical_id): + sam_metadata_resource = SamMetadataResource( + current_module_address="module.mymodule1", + resource={ + **self.tf_lambda_function_resource_zip_2_sam_metadata, + "address": f"module.mymodule1.null_resource.sam_metadata_{self.zip_function_name_2}", + }, + config_resource=TFResource("", "", None, {}), + ) + cfn_resources = { + "ABCDEFG": self.expected_cfn_lambda_function_resource_zip_2, + "logical_id_3": self.expected_cfn_lambda_function_resource_zip_3, + } + mock_build_cfn_logical_id.side_effect = ["ABCDEFG"] + resources_list = _get_relevant_cfn_resource(sam_metadata_resource, cfn_resources, {}) + self.assertEqual(len(resources_list), 1) + relevant_resource, return_logical_id = resources_list[0] + + mock_build_cfn_logical_id.assert_called_once_with( + f"module.mymodule1.aws_lambda_function.{self.zip_function_name_2}" + ) + self.assertEqual(relevant_resource, self.expected_cfn_lambda_function_resource_zip_2) + self.assertEqual(return_logical_id, "ABCDEFG") + + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich._calculate_configuration_attribute_value_hash") + def test_get_relevant_cfn_resource_for_metadata_does_not_contain_resource_name( + self, mock_calculate_configuration_attribute_value_hash + ): + sam_metadata_resource = SamMetadataResource( + current_module_address="module.mymodule1", + resource={ + "type": "null_resource", + "provider_name": NULL_RESOURCE_PROVIDER_NAME, + "values": { + "triggers": { + "built_output_path": "builds/func2.zip", + "original_source_code": "./src/lambda_func2", + "resource_type": "ZIP_LAMBDA_FUNCTION", + } + }, + "name": f"sam_metadata_{self.zip_function_name_2}", + "address": f"module.mymodule1.null_resource.sam_metadata_{self.zip_function_name_2}", + }, + config_resource=TFResource("", "", None, {}), + ) + cfn_resources = { + "ABCDEFG": self.expected_cfn_lambda_function_resource_zip_2, + "logical_id_3": self.expected_cfn_lambda_function_resource_zip_3, + } + mock_calculate_configuration_attribute_value_hash.side_effect = ["code_hash"] + lambda_resources_code_map = {"zip_code_hash": [(self.expected_cfn_lambda_function_resource_zip_2, "ABCDEFG")]} + resources_list = _get_relevant_cfn_resource(sam_metadata_resource, cfn_resources, lambda_resources_code_map) + self.assertEqual(len(resources_list), 1) + relevant_resource, return_logical_id = resources_list[0] + + self.assertEqual(relevant_resource, self.expected_cfn_lambda_function_resource_zip_2) + self.assertEqual(return_logical_id, "ABCDEFG") + mock_calculate_configuration_attribute_value_hash.assert_has_calls([call("builds/func2.zip")]) + + @parameterized.expand( + [ + ( + None, + "module.mymodule1", + ["ABCDEFG"], + "AWS SAM CLI expects the sam metadata resource null_resource.sam_metadata_func2 to contain a resource name " + "that will be enriched using this metadata resource", + ), + ( + "resource_name_value", + None, + ["Not_valid"], + "There is no resource found that match the provided resource name null_resource.sam_metadata_func2", + ), + ( + "resource_name_value", + "module.mymodule1", + ["Not_valid", "Not_valid"], + "There is no resource found that match the provided resource name null_resource.sam_metadata_func2", + ), + ] + ) + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich.build_cfn_logical_id") + def test_get_relevant_cfn_resource_exceptions( + self, resource_name, module_name, build_logical_id_output, exception_message, mock_build_cfn_logical_id + ): + sam_metadata_resource = SamMetadataResource( + current_module_address=module_name, + resource={ + **self.tf_sam_metadata_resource_common_attributes, + "values": { + "triggers": { + "built_output_path": "builds/func2.zip", + "original_source_code": "./src/lambda_func2", + "resource_name": resource_name, + "resource_type": "ZIP_LAMBDA_FUNCTION", + }, + }, + "address": "null_resource.sam_metadata_func2", + "name": "sam_metadata_func2", + }, + config_resource=TFResource("", "", None, {}), + ) + cfn_resources = { + "ABCDEFG": self.expected_cfn_lambda_function_resource_zip_2, + "logical_id_3": self.expected_cfn_lambda_function_resource_zip_3, + } + mock_build_cfn_logical_id.side_effect = build_logical_id_output + with self.assertRaises(InvalidSamMetadataPropertiesException, msg=exception_message): + _get_relevant_cfn_resource(sam_metadata_resource, cfn_resources, {}) + + @parameterized.expand( + [ + ("expected_cfn_lambda_function_resource_zip", "tf_lambda_function_resource_zip_sam_metadata", "Zip"), + ( + "expected_cfn_image_package_type_lambda_function_resource", + "tf_image_package_type_lambda_function_resource_sam_metadata", + "Image", + ), + ] + ) + def test_validate_referenced_resource_matches_sam_metadata_type_valid_types( + self, cfn_resource_name, sam_metadata_attributes_name, expected_package_type + ): + cfn_resource = self.__getattribute__(cfn_resource_name) + sam_metadata_attributes = self.__getattribute__(sam_metadata_attributes_name).get("values").get("triggers") + try: + _validate_referenced_resource_matches_sam_metadata_type( + cfn_resource, sam_metadata_attributes, "resource_address", expected_package_type + ) + except InvalidSamMetadataPropertiesException: + self.fail("The testing sam metadata resource type should be valid.") + + @parameterized.expand( + [ + ( + "expected_cfn_lambda_function_resource_zip", + "tf_image_package_type_lambda_function_resource_sam_metadata", + "Image", + "IMAGE_LAMBDA_FUNCTION", + ), + ( + "expected_cfn_image_package_type_lambda_function_resource", + "tf_lambda_function_resource_zip_sam_metadata", + "Zip", + "ZIP_LAMBDA_FUNCTION", + ), + ] + ) + def test_validate_referenced_resource_matches_sam_metadata_type_invalid_types( + self, cfn_resource_name, sam_metadata_attributes_name, expected_package_type, metadata_source_type + ): + cfn_resource = self.__getattribute__(cfn_resource_name) + sam_metadata_attributes = self.__getattribute__(sam_metadata_attributes_name).get("values").get("triggers") + with self.assertRaises( + InvalidSamMetadataPropertiesException, + msg=f"The sam metadata resource resource_address is referring to a resource that does not " + f"match the resource type {metadata_source_type}.", + ): + _validate_referenced_resource_matches_sam_metadata_type( + cfn_resource, sam_metadata_attributes, "resource_address", expected_package_type + ) + + @parameterized.expand( + [ + ([CalledProcessError(-2, "python3 --version"), Mock(stdout="Python 3.8.10")], "py3"), + ([Mock(stdout="Python 3.7.12"), CalledProcessError(-2, "py3 --version")], "python3"), + ([Mock(stdout="Python 3.7")], "python3"), + ([Mock(stdout="Python 3.7.0")], "python3"), + ([Mock(stdout="Python 3.7.12")], "python3"), + ([Mock(stdout="Python 3.8")], "python3"), + ([Mock(stdout="Python 3.8.0")], "python3"), + ([Mock(stdout="Python 3.8.12")], "python3"), + ([Mock(stdout="Python 3.9")], "python3"), + ([Mock(stdout="Python 3.9.0")], "python3"), + ([Mock(stdout="Python 3.9.12")], "python3"), + ([Mock(stdout="Python 3.10")], "python3"), + ([Mock(stdout="Python 3.10.0")], "python3"), + ([Mock(stdout="Python 3.10.12")], "python3"), + ( + [ + Mock(stdout="Python 3.6.10"), + Mock(stdout="Python 3.0.10"), + Mock(stdout="Python 2.7.10"), + Mock(stdout="Python 3.7.12"), + ], + "py", + ), + ] + ) + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich.run") + def test_get_python_command_name(self, mock_run_side_effect, expected_python_command, mock_subprocess_run): + mock_subprocess_run.side_effect = mock_run_side_effect + + python_command = _get_python_command_name() + self.assertEqual(python_command, expected_python_command) + + @parameterized.expand( + [ + ( + [ + CalledProcessError(-2, "python3 --version"), + CalledProcessError(-2, "py3 --version"), + CalledProcessError(-2, "python --version"), + CalledProcessError(-2, "py --version"), + ], + ), + ( + [ + Mock(stdout="Python 3"), + Mock(stdout="Python 3.0"), + Mock(stdout="Python 3.0.10"), + Mock(stdout="Python 3.6"), + ], + ), + ( + [ + Mock(stdout="Python 3.6.10"), + Mock(stdout="Python 2"), + Mock(stdout="Python 2.7"), + Mock(stdout="Python 2.7.10"), + ], + ), + ( + [ + Mock(stdout="Python 4"), + Mock(stdout="Python 4.7"), + Mock(stdout="Python 4.7.10"), + Mock(stdout="Python 4.7.10"), + ], + ), + ] + ) + @patch("samcli.hook_packages.terraform.hooks.prepare.enrich.run") + def test_get_python_command_name_python_not_found(self, mock_run_side_effect, mock_subprocess_run): + mock_subprocess_run.side_effect = mock_run_side_effect + + expected_error_msg = "Python not found. Please ensure that python 3.7 or above is installed." + with self.assertRaises(PrepareHookException, msg=expected_error_msg): + _get_python_command_name() diff --git a/tests/unit/hook_packages/terraform/hooks/prepare/test_hook.py b/tests/unit/hook_packages/terraform/hooks/prepare/test_hook.py index dad3df2853..60e95b6ac6 100644 --- a/tests/unit/hook_packages/terraform/hooks/prepare/test_hook.py +++ b/tests/unit/hook_packages/terraform/hooks/prepare/test_hook.py @@ -1,2685 +1,18 @@ """Test Terraform prepare hook""" -from pathlib import Path -from subprocess import CalledProcessError, PIPE -from unittest import TestCase -from unittest.mock import Mock, call, patch, MagicMock, ANY -import copy +from subprocess import CalledProcessError +from unittest.mock import Mock, call, patch, MagicMock from parameterized import parameterized -from samcli.hook_packages.terraform.hooks.prepare.hook import ( - AWS_LAMBDA_FUNCTION_PROPERTY_BUILDER_MAPPING, - AWS_PROVIDER_NAME, - prepare, - _get_s3_object_hash, - _get_property_extractor, - _build_lambda_function_environment_property, - _build_code_property, - _translate_properties, - _translate_to_cfn, - _map_s3_sources_to_functions, - _update_resources_paths, - _build_lambda_function_image_config_property, - _check_image_config_value, - NULL_RESOURCE_PROVIDER_NAME, - SamMetadataResource, - _validate_referenced_resource_matches_sam_metadata_type, - _get_source_code_path, - _get_relevant_cfn_resource, - _enrich_lambda_layer, - _enrich_resources_and_generate_makefile, - _enrich_zip_lambda_function, - _enrich_image_lambda_function, - _generate_makefile, - _get_python_command_name, - _generate_makefile_rule_for_lambda_resource, - _get_makefile_build_target, - _get_parent_modules, - _build_jpath_string, - _validate_referenced_resource_layer_matches_metadata_type, - _format_makefile_recipe, - _build_makerule_python_command, - _link_lambda_functions_to_layers, - _add_child_modules_to_queue, - _check_dummy_remote_values, - REMOTE_DUMMY_VALUE, - _add_metadata_resource_to_metadata_list, -) -from samcli.hook_packages.terraform.hooks.prepare.types import TFModule, TFResource, ConstantValue, ResolvedReference -from samcli.lib.hook.exceptions import PrepareHookException -from samcli.hook_packages.terraform.hooks.prepare.exceptions import InvalidSamMetadataPropertiesException -from samcli.lib.utils.resources import ( - AWS_LAMBDA_FUNCTION as CFN_AWS_LAMBDA_FUNCTION, - AWS_LAMBDA_LAYERVERSION, - AWS_LAMBDA_FUNCTION, -) -from samcli.lib.utils.subprocess_utils import LoadingPatternError - - -class TestPrepareHook(TestCase): - def setUp(self) -> None: - self.output_dir = "/output/dir" - self.project_root = "/project/root" - - self.mock_logical_id_hash = "12AB34CD" - - self.s3_bucket = "mybucket" - self.s3_key = "mykey" - self.s3_object_version = "myversion" - self.s3_source = "mysource1.zip" - self.s3_bucket_2 = "mybucket2" - self.s3_key_2 = "mykey2" - self.s3_source_2 = "mysource2.zip" - - self.zip_function_name = "myfunc" - self.zip_function_name_2 = "myfunc2" - self.zip_function_name_3 = "myfunc3" - self.zip_function_name_4 = "myfunc4" - self.s3_function_name = "myfuncS3" - self.s3_function_name_2 = "myfuncS3_2" - self.image_function_name = "image_func" - self.lambda_layer_name = "lambda_layer" - - self.tf_function_common_properties: dict = { - "function_name": self.zip_function_name, - "architectures": ["x86_64"], - "environment": [{"variables": {"foo": "bar", "hello": "world"}}], - "handler": "index.handler", - "package_type": "Zip", - "runtime": "python3.7", - "layers": ["layer_arn1", "layer_arn2"], - "timeout": 3, - } - self.expected_cfn_function_common_properties: dict = { - "FunctionName": self.zip_function_name, - "Architectures": ["x86_64"], - "Environment": {"Variables": {"foo": "bar", "hello": "world"}}, - "Handler": "index.handler", - "PackageType": "Zip", - "Runtime": "python3.7", - "Layers": ["layer_arn1", "layer_arn2"], - "Timeout": 3, - } - - self.tf_image_package_type_function_common_properties: dict = { - "function_name": self.image_function_name, - "architectures": ["x86_64"], - "environment": [{"variables": {"foo": "bar", "hello": "world"}}], - "package_type": "Image", - "timeout": 3, - } - self.expected_cfn_image_package_type_function_common_properties: dict = { - "FunctionName": self.image_function_name, - "Architectures": ["x86_64"], - "Environment": {"Variables": {"foo": "bar", "hello": "world"}}, - "PackageType": "Image", - "Timeout": 3, - } - - self.tf_layer_common_properties: dict = { - "layer_name": self.lambda_layer_name, - "compatible_runtimes": ["nodejs14.x", "nodejs16.x"], - "compatible_architectures": ["arm64"], - } - self.expected_cfn_layer_common_properties: dict = { - "LayerName": self.lambda_layer_name, - "CompatibleRuntimes": ["nodejs14.x", "nodejs16.x"], - "CompatibleArchitectures": ["arm64"], - } - - self.tf_lambda_layer_properties_zip: dict = { - **self.tf_layer_common_properties, - "filename": "file.zip", - } - self.tf_lambda_layer_properties_s3: dict = { - **self.tf_layer_common_properties, - "s3_bucket": "bucket_name", - "s3_key": "bucket_key", - "s3_object_version": "1", - } - self.tf_lambda_layer_sam_metadata_properties: dict = { - "triggers": { - "built_output_path": "builds/func.zip", - "original_source_code": "./src/lambda_layer", - "resource_name": f"aws_lambda_layer_version.{self.lambda_layer_name}", - "resource_type": "LAMBDA_LAYER", - }, - } - self.expected_cfn_lambda_layer_properties_zip: dict = { - **self.expected_cfn_layer_common_properties, - "Content": "file.zip", - } - self.expected_cfn_lambda_layer_properties_s3: dict = { - **self.expected_cfn_layer_common_properties, - "Content": { - "S3Bucket": "bucket_name", - "S3Key": "bucket_key", - "S3ObjectVersion": "1", - }, - } - - self.expected_cfn_layer_resource_s3: dict = { - "Type": AWS_LAMBDA_LAYERVERSION, - "Properties": self.expected_cfn_lambda_layer_properties_s3, - "Metadata": {"SamResourceId": f"aws_lambda_layer_version.{self.lambda_layer_name}", "SkipBuild": True}, - } - - self.expected_cfn_layer_resource_zip: dict = { - "Type": AWS_LAMBDA_LAYERVERSION, - "Properties": self.expected_cfn_lambda_layer_properties_zip, - "Metadata": {"SamResourceId": f"aws_lambda_layer_version.{self.lambda_layer_name}", "SkipBuild": True}, - } - - self.tf_zip_function_properties: dict = { - **self.tf_function_common_properties, - "filename": "file.zip", - } - self.tf_zip_function_sam_metadata_properties: dict = { - "triggers": { - "built_output_path": "builds/func.zip", - "original_source_code": "./src/lambda_func", - "resource_name": f"aws_lambda_function.{self.zip_function_name}", - "resource_type": "ZIP_LAMBDA_FUNCTION", - }, - } - self.expected_cfn_zip_function_properties: dict = { - **self.expected_cfn_function_common_properties, - "Code": "file.zip", - } - - self.tf_image_package_type_function_properties: dict = { - **self.tf_image_package_type_function_common_properties, - "image_config": [ - { - "command": ["cmd1", "cmd2"], - "entry_point": ["entry1", "entry2"], - "working_directory": "/working/dir/path", - } - ], - "image_uri": "image/uri:tag", - } - self.tf_image_package_type_function_sam_metadata_properties: dict = { - "triggers": { - "resource_name": f"aws_lambda_function.{self.image_function_name}", - "docker_build_args": '{"FOO":"bar"}', - "docker_context": "context", - "docker_file": "Dockerfile", - "docker_tag": "2.0", - "resource_type": "IMAGE_LAMBDA_FUNCTION", - }, - } - self.expected_cfn_image_package_function_properties: dict = { - **self.expected_cfn_image_package_type_function_common_properties, - "ImageConfig": { - "Command": ["cmd1", "cmd2"], - "EntryPoint": ["entry1", "entry2"], - "WorkingDirectory": "/working/dir/path", - }, - "Code": { - "ImageUri": "image/uri:tag", - }, - } - - self.tf_s3_function_properties: dict = { - **self.tf_function_common_properties, - "function_name": self.s3_function_name, - "s3_bucket": self.s3_bucket, - "s3_key": self.s3_key, - "s3_object_version": self.s3_object_version, - } - self.expected_cfn_s3_function_properties: dict = { - **self.expected_cfn_function_common_properties, - "FunctionName": self.s3_function_name, - "Code": {"S3Bucket": self.s3_bucket, "S3Key": self.s3_key, "S3ObjectVersion": self.s3_object_version}, - } - self.expected_cfn_s3_function_properties_after_source_mapping: dict = { - **self.expected_cfn_function_common_properties, - "FunctionName": self.s3_function_name, - "Code": self.s3_source, - } - - self.expected_cfn_s3_layer_properties_after_source_mapping: dict = { - **self.expected_cfn_layer_common_properties, - "LayerName": self.lambda_layer_name, - "Content": self.s3_source, - } - - self.expected_cfn_s3_layer_resource_after_source_mapping: dict = { - **self.expected_cfn_layer_resource_s3, - "Properties": self.expected_cfn_s3_layer_properties_after_source_mapping, - } - - self.tf_s3_function_properties_2: dict = { - **self.tf_function_common_properties, - "function_name": self.s3_function_name_2, - "s3_bucket": self.s3_bucket_2, - "s3_key": self.s3_key_2, - "s3_object_version": self.s3_object_version, - } - self.expected_cfn_s3_function_properties_2: dict = { - **self.expected_cfn_function_common_properties, - "FunctionName": self.s3_function_name_2, - "Code": {"S3Bucket": self.s3_bucket_2, "S3Key": self.s3_key_2}, - } - self.expected_cfn_s3_function_properties_after_source_mapping_2: dict = { - **self.expected_cfn_function_common_properties, - "FunctionName": self.s3_function_name_2, - "Code": self.s3_source_2, - } - - self.tf_function_properties_with_missing_or_none: dict = { - "function_name": self.zip_function_name, - "filename": "file.zip", - "environment": None, - "layers": None, - } - self.expected_cfn_function_properties_with_missing_or_none: dict = { - "FunctionName": self.zip_function_name, - "Code": "file.zip", - } - - self.tf_zip_function_properties_2: dict = { - "function_name": self.zip_function_name_2, - "architectures": ["x86_64"], - "environment": [{"variables": {"hi": "there"}}], - "handler": "index.handler2", - "package_type": "Zip", - "runtime": "python3.8", - "layers": ["layer_arn"], - "filename": "file2.zip", - } - self.tf_zip_function_sam_metadata_properties_2: dict = { - "triggers": { - "built_output_path": "builds/func2.zip", - "original_source_code": "./src/lambda_func2", - "resource_name": f"aws_lambda_function.{self.zip_function_name_2}", - "resource_type": "ZIP_LAMBDA_FUNCTION", - }, - } - self.expected_cfn_zip_function_properties_2: dict = { - "FunctionName": self.zip_function_name_2, - "Architectures": ["x86_64"], - "Environment": {"Variables": {"hi": "there"}}, - "Handler": "index.handler2", - "PackageType": "Zip", - "Runtime": "python3.8", - "Layers": ["layer_arn"], - "Code": "file2.zip", - } - - self.tf_zip_function_properties_3: dict = { - **self.tf_zip_function_properties_2, - "function_name": self.zip_function_name_3, - } - self.tf_zip_function_sam_metadata_properties_3: dict = { - "triggers": { - "built_output_path": "builds/func3.zip", - "original_source_code": "./src/lambda_func3", - "resource_name": f"aws_lambda_function.{self.zip_function_name_3}", - "resource_type": "ZIP_LAMBDA_FUNCTION", - }, - } - self.expected_cfn_zip_function_properties_3: dict = { - **self.expected_cfn_zip_function_properties_2, - "FunctionName": self.zip_function_name_3, - } - self.tf_zip_function_properties_4: dict = { - **self.tf_zip_function_properties_2, - "function_name": self.zip_function_name_4, - } - self.tf_zip_function_sam_metadata_properties_4: dict = { - "triggers": { - "built_output_path": "builds/func4.zip", - "original_source_code": "./src/lambda_func4", - "resource_name": f"aws_lambda_function.{self.zip_function_name_4}", - "resource_type": "ZIP_LAMBDA_FUNCTION", - }, - } - self.expected_cfn_zip_function_properties_4: dict = { - **self.expected_cfn_zip_function_properties_2, - "FunctionName": self.zip_function_name_4, - } - - self.tf_lambda_function_resource_common_attributes: dict = { - "type": "aws_lambda_function", - "provider_name": AWS_PROVIDER_NAME, - } - - self.tf_lambda_layer_resource_common_attributes: dict = { - "type": "aws_lambda_layer_version", - "provider_name": AWS_PROVIDER_NAME, - } - - self.tf_sam_metadata_resource_common_attributes: dict = { - "type": "null_resource", - "provider_name": NULL_RESOURCE_PROVIDER_NAME, - } - - self.tf_lambda_function_resource_zip: dict = { - **self.tf_lambda_function_resource_common_attributes, - "values": self.tf_zip_function_properties, - "address": f"aws_lambda_function.{self.zip_function_name}", - "name": self.zip_function_name, - } - self.tf_lambda_function_resource_zip_sam_metadata: dict = { - **self.tf_sam_metadata_resource_common_attributes, - "values": self.tf_zip_function_sam_metadata_properties, - "address": f"null_resource.sam_metadata_{self.zip_function_name}", - "name": f"sam_metadata_{self.zip_function_name}", - } - self.expected_cfn_lambda_function_resource_zip: dict = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": self.expected_cfn_zip_function_properties, - "Metadata": {"SamResourceId": f"aws_lambda_function.{self.zip_function_name}", "SkipBuild": True}, - } - - self.tf_lambda_layer_resource_zip: dict = { - **self.tf_lambda_layer_resource_common_attributes, - "values": self.tf_lambda_layer_properties_zip, - "address": f"aws_lambda_function.{self.lambda_layer_name}", - "name": self.lambda_layer_name, - } - self.tf_lambda_layer_resource_zip_sam_metadata: dict = { - **self.tf_sam_metadata_resource_common_attributes, - "values": self.tf_lambda_layer_sam_metadata_properties, - "address": f"null_resource.sam_metadata_{self.lambda_layer_name}", - "name": f"sam_metadata_{self.lambda_layer_name}", - } - - self.tf_lambda_function_resource_zip_2: dict = { - **self.tf_lambda_function_resource_common_attributes, - "values": self.tf_zip_function_properties_2, - "address": f"aws_lambda_function.{self.zip_function_name_2}", - "name": self.zip_function_name_2, - } - self.tf_lambda_function_resource_zip_2_sam_metadata: dict = { - **self.tf_sam_metadata_resource_common_attributes, - "values": self.tf_zip_function_sam_metadata_properties_2, - "address": f"null_resource.sam_metadata_{self.zip_function_name_2}", - "name": f"sam_metadata_{self.zip_function_name_2}", - } - self.expected_cfn_lambda_function_resource_zip_2: dict = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": self.expected_cfn_zip_function_properties_2, - "Metadata": {"SamResourceId": f"aws_lambda_function.{self.zip_function_name_2}", "SkipBuild": True}, - } - - self.tf_lambda_function_resource_zip_3: dict = { - **self.tf_lambda_function_resource_common_attributes, - "values": self.tf_zip_function_properties_3, - "address": f"aws_lambda_function.{self.zip_function_name_3}", - "name": self.zip_function_name_3, - } - self.tf_lambda_function_resource_zip_3_sam_metadata: dict = { - **self.tf_sam_metadata_resource_common_attributes, - "values": self.tf_zip_function_sam_metadata_properties_3, - "address": f"null_resource.sam_metadata_{self.zip_function_name_3}", - "name": f"sam_metadata_{self.zip_function_name_3}", - } - self.expected_cfn_lambda_function_resource_zip_3: dict = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": self.expected_cfn_zip_function_properties_3, - "Metadata": {"SamResourceId": f"aws_lambda_function.{self.zip_function_name_3}", "SkipBuild": True}, - } - - self.tf_lambda_function_resource_zip_4: dict = { - **self.tf_lambda_function_resource_common_attributes, - "values": self.tf_zip_function_properties_4, - "address": f"aws_lambda_function.{self.zip_function_name_4}", - "name": self.zip_function_name_4, - } - self.tf_lambda_function_resource_zip_4_sam_metadata: dict = { - **self.tf_sam_metadata_resource_common_attributes, - "values": self.tf_zip_function_sam_metadata_properties_4, - "address": f"null_resource.sam_metadata_{self.zip_function_name_4}", - "name": f"sam_metadata_{self.zip_function_name_4}", - } - self.expected_cfn_lambda_function_resource_zip_4: dict = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": self.expected_cfn_zip_function_properties_4, - "Metadata": {"SamResourceId": f"aws_lambda_function.{self.zip_function_name_4}", "SkipBuild": True}, - } - - self.tf_image_package_type_lambda_function_resource: dict = { - **self.tf_lambda_function_resource_common_attributes, - "values": self.tf_image_package_type_function_properties, - "address": f"aws_lambda_function.{self.image_function_name}", - "name": self.image_function_name, - } - self.tf_image_package_type_lambda_function_resource_sam_metadata: dict = { - **self.tf_sam_metadata_resource_common_attributes, - "values": self.tf_image_package_type_function_sam_metadata_properties, - "address": f"null_resource.sam_metadata_{self.image_function_name}", - "name": f"sam_metadata_{self.image_function_name}", - } - self.expected_cfn_image_package_type_lambda_function_resource: dict = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": self.expected_cfn_image_package_function_properties, - "Metadata": {"SamResourceId": f"aws_lambda_function.{self.image_function_name}", "SkipBuild": True}, - } - - self.tf_lambda_function_resource_s3: dict = { - **self.tf_lambda_function_resource_common_attributes, - "values": self.tf_s3_function_properties, - "address": f"aws_lambda_function.{self.s3_function_name}", - "name": self.s3_function_name, - } - self.expected_cfn_lambda_function_resource_s3: dict = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": self.expected_cfn_s3_function_properties, - "Metadata": {"SamResourceId": f"aws_lambda_function.{self.s3_function_name}", "SkipBuild": True}, - } - self.expected_cfn_lambda_function_resource_s3_after_source_mapping: dict = { - **self.expected_cfn_lambda_function_resource_s3, - "Properties": self.expected_cfn_s3_function_properties_after_source_mapping, - } - - self.tf_lambda_function_resource_s3_2: dict = { - **self.tf_lambda_function_resource_common_attributes, - "values": self.tf_s3_function_properties_2, - "address": f"aws_lambda_function.{self.s3_function_name_2}", - "name": self.s3_function_name_2, - } - self.expected_cfn_lambda_function_resource_s3_2: dict = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": self.expected_cfn_s3_function_properties_2, - "Metadata": {"SamResourceId": f"aws_lambda_function.{self.s3_function_name_2}", "SkipBuild": True}, - } - self.expected_cfn_lambda_function_resource_s3_after_source_mapping_2: dict = { - **self.expected_cfn_lambda_function_resource_s3_2, - "Properties": self.expected_cfn_s3_function_properties_after_source_mapping_2, - } - - self.tf_s3_object_resource_common_attributes: dict = { - "type": "aws_s3_object", - "provider_name": AWS_PROVIDER_NAME, - } - - self.tf_s3_object_resource: dict = { - **self.tf_s3_object_resource_common_attributes, - "values": {"bucket": self.s3_bucket, "key": self.s3_key, "source": self.s3_source}, - "address": "aws_s3_object.s3_lambda_code", - "name": "s3_lambda_code", - } - - self.tf_s3_object_resource_2: dict = { - **self.tf_s3_object_resource_common_attributes, - "values": {"bucket": self.s3_bucket_2, "key": self.s3_key_2, "source": self.s3_source_2}, - "address": "aws_s3_object.s3_lambda_code_2", - "name": "s3_lambda_code_2", - } - - self.tf_json_with_root_module_only: dict = { - "planned_values": { - "root_module": { - "resources": [ - self.tf_lambda_function_resource_zip, - self.tf_lambda_function_resource_zip_2, - self.tf_image_package_type_lambda_function_resource, - ] - } - } - } - self.expected_cfn_with_root_module_only: dict = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - f"AwsLambdaFunctionMyfunc{self.mock_logical_id_hash}": self.expected_cfn_lambda_function_resource_zip, - f"AwsLambdaFunctionMyfunc2{self.mock_logical_id_hash}": self.expected_cfn_lambda_function_resource_zip_2, - f"AwsLambdaFunctionImageFunc{self.mock_logical_id_hash}": self.expected_cfn_image_package_type_lambda_function_resource, - }, - } - - self.tf_json_with_root_module_with_sam_metadata_resources: dict = { - "planned_values": { - "root_module": { - "resources": [ - self.tf_lambda_function_resource_zip, - self.tf_lambda_function_resource_zip_2, - self.tf_image_package_type_lambda_function_resource, - self.tf_lambda_function_resource_zip_sam_metadata, - self.tf_lambda_function_resource_zip_2_sam_metadata, - self.tf_image_package_type_lambda_function_resource_sam_metadata, - ] - } - } - } - self.tf_json_with_child_modules: dict = { - "planned_values": { - "root_module": { - "resources": [ - self.tf_lambda_function_resource_zip, - ], - "child_modules": [ - { - "resources": [ - { - **self.tf_lambda_function_resource_zip_2, - "address": f"module.mymodule1.aws_lambda_function.{self.zip_function_name_2}", - }, - ], - "child_modules": [ - { - "resources": [ - { - **self.tf_lambda_function_resource_zip_3, - "address": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_3}", - }, - ], - "address": "module.m1.module.m2", - }, - { - "resources": [ - { - **self.tf_lambda_function_resource_zip_4, - "address": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_4}", - }, - ], - "address": "module.m1.module.m3", - }, - ], - "address": "module.m1", - } - ], - } - } - } - self.tf_json_with_child_modules_with_sam_metadata_resource: dict = { - "planned_values": { - "root_module": { - "resources": [ - self.tf_lambda_function_resource_zip, - self.tf_lambda_function_resource_zip_sam_metadata, - ], - "child_modules": [ - { - "resources": [ - { - **self.tf_lambda_function_resource_zip_2, - "address": f"module.mymodule1.aws_lambda_function.{self.zip_function_name_2}", - }, - { - **self.tf_lambda_function_resource_zip_2_sam_metadata, - "address": f"module.mymodule1.null_resource.sam_metadata_{self.zip_function_name_2}", - }, - ], - "address": "module.mymodule1", - "child_modules": [ - { - "resources": [ - { - **self.tf_lambda_function_resource_zip_3, - "address": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_3}", - }, - { - **self.tf_lambda_function_resource_zip_3_sam_metadata, - "address": f"module.mymodule1.module.mymodule2.null_resource.sam_metadata_{self.zip_function_name_3}", - }, - ], - "address": "module.mymodule1.module.mymodule2", - }, - { - "resources": [ - { - **self.tf_lambda_function_resource_zip_4, - "address": f"module.mymodule1.module.mymodule3.aws_lambda_function.{self.zip_function_name_4}", - }, - { - **self.tf_lambda_function_resource_zip_4_sam_metadata, - "address": f"module.mymodule1.module.mymodule3.null_resource.sam_metadata_{self.zip_function_name_4}", - }, - ], - "address": "module.mymodule1.module.mymodule3", - }, - ], - } - ], - } - } - } - self.expected_cfn_with_child_modules: dict = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - f"AwsLambdaFunctionMyfunc{self.mock_logical_id_hash}": self.expected_cfn_lambda_function_resource_zip, - f"ModuleMymodule1AwsLambdaFunctionMyfunc2{self.mock_logical_id_hash}": { - **self.expected_cfn_lambda_function_resource_zip_2, - "Metadata": { - "SamResourceId": f"module.mymodule1.aws_lambda_function.{self.zip_function_name_2}", - "SkipBuild": True, - }, - }, - f"ModuleMymodule1ModuleMymodule2AwsLambdaFunctionMyfunc3{self.mock_logical_id_hash}": { - **self.expected_cfn_lambda_function_resource_zip_3, - "Metadata": { - "SamResourceId": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_3}", - "SkipBuild": True, - }, - }, - f"ModuleMymodule1ModuleMymodule2AwsLambdaFunctionMyfunc4{self.mock_logical_id_hash}": { - **self.expected_cfn_lambda_function_resource_zip_4, - "Metadata": { - "SamResourceId": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_4}", - "SkipBuild": True, - }, - }, - }, - } - - self.tf_json_with_unsupported_provider: dict = { - "planned_values": { - "root_module": { - "resources": [ - self.tf_lambda_function_resource_zip, - {**self.tf_lambda_function_resource_zip, "provider": "some.other.provider"}, - self.tf_lambda_function_resource_zip_2, - ] - } - } - } - self.expected_cfn_with_unsupported_provider: dict = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - f"AwsLambdaFunctionMyfunc{self.mock_logical_id_hash}": self.expected_cfn_lambda_function_resource_zip, - f"AwsLambdaFunctionMyfunc2{self.mock_logical_id_hash}": self.expected_cfn_lambda_function_resource_zip_2, - }, - } - - self.tf_json_with_unsupported_resource_type: dict = { - "planned_values": { - "root_module": { - "resources": [ - self.tf_lambda_function_resource_zip, - {**self.tf_lambda_function_resource_zip, "type": "aws_iam_role"}, - self.tf_lambda_function_resource_zip_2, - ] - } - } - } - self.expected_cfn_with_unsupported_resource_type: dict = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - f"AwsLambdaFunctionMyfunc{self.mock_logical_id_hash}": self.expected_cfn_lambda_function_resource_zip, - f"AwsLambdaFunctionMyfunc2{self.mock_logical_id_hash}": self.expected_cfn_lambda_function_resource_zip_2, - }, - } - - self.tf_json_with_child_modules_and_s3_source_mapping: dict = { - "planned_values": { - "root_module": { - "resources": [ - self.tf_lambda_function_resource_zip, - self.tf_lambda_function_resource_s3, - self.tf_s3_object_resource, - ], - "child_modules": [ - { - "resources": [ - { - **self.tf_lambda_function_resource_zip_2, - "address": f"module.mymodule1.aws_lambda_function.{self.zip_function_name_2}", - }, - { - **self.tf_s3_object_resource_2, - "address": "module.mymodule1.aws_lambda_function.s3_lambda_code_2", - }, - ], - "child_modules": [ - { - "resources": [ - { - **self.tf_lambda_function_resource_s3_2, - "address": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.s3_function_name_2}", - }, - ], - "address": "module.m1.module.m2", - }, - { - "resources": [ - { - **self.tf_lambda_function_resource_zip_4, - "address": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_4}", - }, - ], - "address": "module.m1.module.m2\3", - }, - ], - "address": "module.m1", - } - ], - } - } - } - self.expected_cfn_with_child_modules_and_s3_source_mapping: dict = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - f"AwsLambdaFunctionMyfunc{self.mock_logical_id_hash}": self.expected_cfn_lambda_function_resource_zip, - f"AwsLambdaFunctionMyfuncS3{self.mock_logical_id_hash}": self.expected_cfn_lambda_function_resource_s3_after_source_mapping, - f"ModuleMymodule1AwsLambdaFunctionMyfunc2{self.mock_logical_id_hash}": { - **self.expected_cfn_lambda_function_resource_zip_2, - "Metadata": { - "SamResourceId": f"module.mymodule1.aws_lambda_function.{self.zip_function_name_2}", - "SkipBuild": True, - }, - }, - f"ModuleMymodule1ModuleMymodule2AwsLambdaFunctionMyfuncS32{self.mock_logical_id_hash}": { - **self.expected_cfn_lambda_function_resource_s3_after_source_mapping_2, - "Metadata": { - "SamResourceId": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.s3_function_name_2}", - "SkipBuild": True, - }, - }, - f"ModuleMymodule1ModuleMymodule2AwsLambdaFunctionMyfunc4{self.mock_logical_id_hash}": { - **self.expected_cfn_lambda_function_resource_zip_4, - "Metadata": { - "SamResourceId": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_4}", - "SkipBuild": True, - }, - }, - }, - } - - self.prepare_params: dict = { - "IACProjectPath": "iac/project/path", - "OutputDirPath": "output/dir/path", - "Debug": False, - "Profile": None, - "Region": None, - "SkipPrepareInfra": False, - } - - def test_get_s3_object_hash(self): - self.assertEqual( - _get_s3_object_hash(self.s3_bucket, self.s3_key), _get_s3_object_hash(self.s3_bucket, self.s3_key) - ) - self.assertEqual( - _get_s3_object_hash( - [ConstantValue("A"), ResolvedReference("aws_lambda_function.arn", "module.m1")], self.s3_key - ), - _get_s3_object_hash( - [ResolvedReference("aws_lambda_function.arn", "module.m1"), ConstantValue("A")], self.s3_key - ), - ) - self.assertEqual( - _get_s3_object_hash( - self.s3_bucket, [ConstantValue("A"), ResolvedReference("aws_lambda_function.arn", "module.m1")] - ), - _get_s3_object_hash( - self.s3_bucket, [ResolvedReference("aws_lambda_function.arn", "module.m1"), ConstantValue("A")] - ), - ) - self.assertEqual( - _get_s3_object_hash( - [ConstantValue("B"), ResolvedReference("aws_s3_bucket.id", "module.m2")], - [ConstantValue("A"), ResolvedReference("aws_lambda_function.arn", "module.m1")], - ), - _get_s3_object_hash( - [ResolvedReference("aws_s3_bucket.id", "module.m2"), ConstantValue("B")], - [ResolvedReference("aws_lambda_function.arn", "module.m1"), ConstantValue("A")], - ), - ) - self.assertNotEqual( - _get_s3_object_hash( - [ConstantValue("B"), ConstantValue("C"), ResolvedReference("aws_s3_bucket.id", "module.m2")], - [ConstantValue("A"), ResolvedReference("aws_lambda_function.arn", "module.m1")], - ), - _get_s3_object_hash( - [ResolvedReference("aws_s3_bucket.id", "module.m2"), ConstantValue("B")], - [ResolvedReference("aws_lambda_function.arn", "module.m1"), ConstantValue("A")], - ), - ) - self.assertNotEqual( - _get_s3_object_hash([ConstantValue("B"), ResolvedReference("aws_s3_bucket.id", "module.m2")], self.s3_key), - _get_s3_object_hash( - [ResolvedReference("aws_s3_bucket.id", "module.m2"), ConstantValue("B")], self.s3_key_2 - ), - ) - self.assertNotEqual( - _get_s3_object_hash( - self.s3_bucket, [ConstantValue("A"), ResolvedReference("aws_lambda_function.arn", "module.m1")] - ), - _get_s3_object_hash( - self.s3_bucket_2, [ResolvedReference("aws_lambda_function.arn", "module.m1"), ConstantValue("A")] - ), - ) - self.assertNotEqual( - _get_s3_object_hash(self.s3_bucket, self.s3_key), _get_s3_object_hash(self.s3_bucket_2, self.s3_key_2) - ) - self.assertNotEqual( - _get_s3_object_hash(self.s3_bucket, self.s3_key), _get_s3_object_hash(self.s3_bucket_2, self.s3_key) - ) - self.assertNotEqual( - _get_s3_object_hash(self.s3_bucket, self.s3_key), _get_s3_object_hash(self.s3_bucket, self.s3_key_2) - ) - - @parameterized.expand(["function_name", "handler"]) - def test_get_property_extractor(self, tf_property_name): - property_extractor = _get_property_extractor(tf_property_name) - self.assertEqual( - property_extractor(self.tf_zip_function_properties, None), self.tf_zip_function_properties[tf_property_name] - ) - - def test_build_lambda_function_environment_property(self): - expected_cfn_property = self.expected_cfn_zip_function_properties["Environment"] - translated_cfn_property = _build_lambda_function_environment_property(self.tf_zip_function_properties, None) - self.assertEqual(translated_cfn_property, expected_cfn_property) - - def test_build_lambda_function_environment_property_no_variables(self): - tf_properties = {"function_name": self.zip_function_name} - self.assertIsNone(_build_lambda_function_environment_property(tf_properties, None)) - - tf_properties = {"environment": [], "function_name": self.zip_function_name} - self.assertIsNone(_build_lambda_function_environment_property(tf_properties, None)) - - def test_build_lambda_function_code_property_zip(self): - resource_mock = Mock() - expected_cfn_property = self.expected_cfn_zip_function_properties["Code"] - translated_cfn_property = _build_code_property(self.tf_zip_function_properties, resource_mock) - resource_mock.assert_not_called() - self.assertEqual(translated_cfn_property, expected_cfn_property) - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._resolve_resource_attribute") - def test_build_lambda_function_code_property_s3_with_null_bucket_only_in_planned_values( - self, - mock_resolve_resource_attribute, - ): - resource_mock = Mock() - reference_mock = Mock() - mock_resolve_resource_attribute.return_value = reference_mock - tf_s3_function_properties = { - **self.tf_function_common_properties, - "s3_key": "bucket_key", - "s3_object_version": "1", - } - expected_cfn_property = { - "S3Bucket": REMOTE_DUMMY_VALUE, - "S3Bucket_config_value": reference_mock, - "S3Key": "bucket_key", - "S3ObjectVersion": "1", - } - translated_cfn_property = _build_code_property(tf_s3_function_properties, resource_mock) - self.assertEqual(translated_cfn_property, expected_cfn_property) - mock_resolve_resource_attribute.assert_has_calls( - [call(resource_mock, "s3_bucket"), call(resource_mock, "s3_key"), call(resource_mock, "s3_object_version")] - ) - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._resolve_resource_attribute") - def test_build_lambda_function_code_property_with_null_imageuri_only_in_planned_values( - self, - mock_resolve_resource_attribute, - ): - resource_mock = Mock() - reference_mock = Mock() - mock_resolve_resource_attribute.return_value = reference_mock - tf_image_function_properties = { - **self.tf_image_package_type_function_common_properties, - "image_config": [ - { - "command": ["cmd1", "cmd2"], - "entry_point": ["entry1", "entry2"], - "working_directory": "/working/dir/path", - } - ], - } - expected_cfn_property = { - "ImageUri": REMOTE_DUMMY_VALUE, - } - translated_cfn_property = _build_code_property(tf_image_function_properties, resource_mock) - self.assertEqual(translated_cfn_property, expected_cfn_property) - mock_resolve_resource_attribute.assert_has_calls([call(resource_mock, "image_uri")]) - - def test_build_lambda_function_code_property_s3(self): - resource_mock = Mock() - expected_cfn_property = self.expected_cfn_s3_function_properties["Code"] - translated_cfn_property = _build_code_property(self.tf_s3_function_properties, resource_mock) - resource_mock.assert_not_called() - self.assertEqual(translated_cfn_property, expected_cfn_property) - - def test_build_lambda_function_code_property_image(self): - expected_cfn_property = self.expected_cfn_image_package_function_properties["Code"] - resource_mock = Mock() - translated_cfn_property = _build_code_property(self.tf_image_package_type_function_properties, resource_mock) - resource_mock.assert_not_called() - self.assertEqual(translated_cfn_property, expected_cfn_property) - - def test_build_lambda_function_image_config_property(self): - expected_cfn_property = self.expected_cfn_image_package_function_properties["ImageConfig"] - translated_cfn_property = _build_lambda_function_image_config_property( - self.tf_image_package_type_function_properties, None - ) - self.assertEqual(translated_cfn_property, expected_cfn_property) - - def test_build_lambda_function_image_config_property_no_image_config(self): - tf_properties = {**self.tf_image_package_type_function_properties} - del tf_properties["image_config"] - translated_cfn_property = _build_lambda_function_image_config_property(tf_properties, None) - self.assertEqual(translated_cfn_property, None) - - def test_build_lambda_function_image_config_property_empty_image_config_list(self): - tf_properties = {**self.tf_image_package_type_function_properties} - tf_properties["image_config"] = [] - translated_cfn_property = _build_lambda_function_image_config_property(tf_properties, None) - self.assertEqual(translated_cfn_property, None) - - def test_build_lambda_layer_code_property_zip(self): - resource_mock = Mock() - expected_cfn_property = self.expected_cfn_lambda_layer_properties_zip["Content"] - translated_cfn_property = _build_code_property(self.tf_lambda_layer_properties_zip, resource_mock) - resource_mock.assert_not_called() - self.assertEqual(translated_cfn_property, expected_cfn_property) - - def test_build_lambda_layer_code_property_s3(self): - resource_mock = Mock() - expected_cfn_property = self.expected_cfn_lambda_layer_properties_s3["Content"] - translated_cfn_property = _build_code_property(self.tf_lambda_layer_properties_s3, resource_mock) - resource_mock.assert_not_called() - self.assertEqual(translated_cfn_property, expected_cfn_property) - - @parameterized.expand( - [("command", "Command"), ("entry_point", "EntryPoint"), ("working_directory", "WorkingDirectory")] - ) - def test_build_lambda_function_image_config_property_not_all_properties_exist( - self, missing_tf_property, missing_cfn_property - ): - expected_cfn_property = {**self.expected_cfn_image_package_function_properties["ImageConfig"]} - del expected_cfn_property[missing_cfn_property] - tf_properties = {**self.tf_image_package_type_function_properties} - del tf_properties["image_config"][0][missing_tf_property] - translated_cfn_property = _build_lambda_function_image_config_property(tf_properties, None) - self.assertEqual(translated_cfn_property, expected_cfn_property) - - def test_translate_properties_function(self): - translated_cfn_properties = _translate_properties( - self.tf_zip_function_properties, AWS_LAMBDA_FUNCTION_PROPERTY_BUILDER_MAPPING, Mock() - ) - self.assertEqual(translated_cfn_properties, self.expected_cfn_zip_function_properties) - - def test_translate_properties_function_with_missing_or_none_properties(self): - translated_cfn_properties = _translate_properties( - self.tf_function_properties_with_missing_or_none, AWS_LAMBDA_FUNCTION_PROPERTY_BUILDER_MAPPING, Mock() - ) - self.assertEqual(translated_cfn_properties, self.expected_cfn_function_properties_with_missing_or_none) - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._calculate_configuration_attribute_value_hash") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_s3_object_hash") - def test_map_s3_sources_to_functions( - self, mock_get_s3_object_hash, mock_calculate_configuration_attribute_value_hash - ): - mock_get_s3_object_hash.side_effect = ["hash1", "hash2"] - mock_calculate_configuration_attribute_value_hash.side_effect = ["code_hash1", "code_hash2"] - - s3_hash_to_source = {"hash1": (self.s3_source, None), "hash2": (self.s3_source_2, None)} - cfn_resources = { - "s3Function1": copy.deepcopy(self.expected_cfn_lambda_function_resource_s3), - "s3Function2": copy.deepcopy(self.expected_cfn_lambda_function_resource_s3_2), - "nonS3Function": self.expected_cfn_lambda_function_resource_zip, - } - - expected_cfn_resources_after_mapping_s3_sources = { - "s3Function1": self.expected_cfn_lambda_function_resource_s3_after_source_mapping, - "s3Function2": { - **self.expected_cfn_lambda_function_resource_s3_2, - "Properties": { - **self.expected_cfn_lambda_function_resource_s3_2["Properties"], - "Code": self.s3_source_2, - }, - }, - "nonS3Function": self.expected_cfn_lambda_function_resource_zip, # should be unchanged - } - functions_code_map = {} - expected_functions_code_map = { - "zip_code_hash1": [(self.expected_cfn_lambda_function_resource_s3_after_source_mapping, "s3Function1")], - "zip_code_hash2": [ - ( - { - **self.expected_cfn_lambda_function_resource_s3_2, - "Properties": { - **self.expected_cfn_lambda_function_resource_s3_2["Properties"], - "Code": self.s3_source_2, - }, - }, - "s3Function2", - ) - ], - } - _map_s3_sources_to_functions(s3_hash_to_source, cfn_resources, functions_code_map) - - s3Function1CodeBeforeMapping = self.expected_cfn_lambda_function_resource_s3["Properties"]["Code"] - s3Function2CodeBeforeMapping = self.expected_cfn_lambda_function_resource_s3_2["Properties"]["Code"] - mock_get_s3_object_hash.assert_has_calls( - [ - call(s3Function1CodeBeforeMapping["S3Bucket"], s3Function1CodeBeforeMapping["S3Key"]), - call(s3Function2CodeBeforeMapping["S3Bucket"], s3Function2CodeBeforeMapping["S3Key"]), - ] - ) - mock_calculate_configuration_attribute_value_hash.assert_has_calls( - [call(self.s3_source), call(self.s3_source_2)] - ) - self.assertEqual(cfn_resources, expected_cfn_resources_after_mapping_s3_sources) - self.assertEqual(functions_code_map, expected_functions_code_map) - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._calculate_configuration_attribute_value_hash") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_s3_object_hash") - def test_map_s3_sources_to_layers(self, mock_get_s3_object_hash, mock_calculate_configuration_attribute_value_hash): - mock_get_s3_object_hash.side_effect = ["hash1"] - mock_calculate_configuration_attribute_value_hash.side_effect = ["code_hash1"] - - s3_hash_to_source = {"hash1": (self.s3_source, None)} - cfn_resources = { - "s3Layer": copy.deepcopy(self.expected_cfn_layer_resource_s3), - "nonS3Layer": self.expected_cfn_layer_resource_zip, - } - - expected_cfn_resources_after_mapping_s3_sources = { - "s3Layer": self.expected_cfn_s3_layer_resource_after_source_mapping, - "nonS3Layer": self.expected_cfn_layer_resource_zip, # should be unchanged - } - layers_code_map = {} - expected_layers_code_map = { - "layer_code_hash1": [(self.expected_cfn_s3_layer_resource_after_source_mapping, "s3Layer")], - } - _map_s3_sources_to_functions(s3_hash_to_source, cfn_resources, layers_code_map) - - s3LayerCodeBeforeMapping = self.expected_cfn_layer_resource_s3["Properties"]["Content"] - mock_get_s3_object_hash.assert_has_calls( - [ - call(s3LayerCodeBeforeMapping["S3Bucket"], s3LayerCodeBeforeMapping["S3Key"]), - ] - ) - mock_calculate_configuration_attribute_value_hash.assert_has_calls([call(self.s3_source)]) - self.assertEqual(layers_code_map, expected_layers_code_map) - self.assertEqual(cfn_resources, expected_cfn_resources_after_mapping_s3_sources) - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._calculate_configuration_attribute_value_hash") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_s3_object_hash") - def test_map_s3_sources_to_functions_that_does_not_contain_constant_value_filename( - self, mock_get_s3_object_hash, mock_calculate_configuration_attribute_value_hash - ): - mock_get_s3_object_hash.side_effect = ["hash1"] - mock_calculate_configuration_attribute_value_hash.side_effect = ["code_hash1"] - mock_reference = Mock() - s3_hash_to_source = {"hash1": (None, mock_reference)} - cfn_resources = { - "s3Function1": copy.deepcopy(self.expected_cfn_lambda_function_resource_s3), - "nonS3Function": self.expected_cfn_lambda_function_resource_zip, - } - - expected_cfn_resources_after_mapping_s3_sources = { - "s3Function1": copy.deepcopy(self.expected_cfn_lambda_function_resource_s3), - "nonS3Function": self.expected_cfn_lambda_function_resource_zip, # should be unchanged - } - functions_code_map = {} - expected_functions_code_map = { - "zip_code_hash1": [(copy.deepcopy(self.expected_cfn_lambda_function_resource_s3), "s3Function1")], - } - _map_s3_sources_to_functions(s3_hash_to_source, cfn_resources, functions_code_map) - - s3Function1CodeBeforeMapping = self.expected_cfn_lambda_function_resource_s3["Properties"]["Code"] - mock_get_s3_object_hash.assert_has_calls( - [ - call(s3Function1CodeBeforeMapping["S3Bucket"], s3Function1CodeBeforeMapping["S3Key"]), - ] - ) - mock_calculate_configuration_attribute_value_hash.assert_has_calls([call(mock_reference)]) - self.assertEqual(cfn_resources, expected_cfn_resources_after_mapping_s3_sources) - self.assertEqual(functions_code_map, expected_functions_code_map) - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._build_module") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_configuration_address") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._link_lambda_functions_to_layers") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._enrich_resources_and_generate_makefile") - def test_translate_to_cfn_empty( - self, - mock_enrich_resources_and_generate_makefile, - mock_link_lambda_functions_to_layers, - mock_get_configuration_address, - mock_build_module, - ): - expected_empty_cfn_dict = {"AWSTemplateFormatVersion": "2010-09-09", "Resources": {}} - - tf_json_empty = {} - tf_json_empty_planned_values = {"planned_values": {}} - tf_json_empty_root_module = {"planned_values": {"root_module": {}}} - tf_json_no_child_modules_and_no_resources = {"planned_values": {"root_module": {"resources": []}}} - - tf_jsons = [ - tf_json_empty, - tf_json_empty_planned_values, - tf_json_empty_root_module, - tf_json_no_child_modules_and_no_resources, - ] - - for tf_json in tf_jsons: - translated_cfn_dict = _translate_to_cfn(tf_json, self.output_dir, self.project_root) - self.assertEqual(translated_cfn_dict, expected_empty_cfn_dict) - mock_enrich_resources_and_generate_makefile.assert_not_called() - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._check_dummy_remote_values") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._build_module") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_configuration_address") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._link_lambda_functions_to_layers") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._enrich_resources_and_generate_makefile") - @patch("samcli.hook_packages.terraform.lib.utils.str_checksum") - def test_translate_to_cfn_with_root_module_only( - self, - checksum_mock, - mock_enrich_resources_and_generate_makefile, - mock_link_lambda_functions_to_layers, - mock_get_configuration_address, - mock_build_module, - mock_check_dummy_remote_values, - ): - root_module = MagicMock() - root_module.get.return_value = "module.m1" - resources_mock = MagicMock() - root_module.resources = resources_mock - child_modules = MagicMock() - child_modules.__getitem__.return_value = Mock() - child_modules.__contains__.return_value = True - child_modules.get.return_value = root_module - root_module.child_modules = child_modules - config_resource = Mock() - resources_mock.__getitem__.return_value = config_resource - resources_mock.__contains__.return_value = True - mock_build_module.return_value = root_module - checksum_mock.return_value = self.mock_logical_id_hash - translated_cfn_dict = _translate_to_cfn(self.tf_json_with_root_module_only, self.output_dir, self.project_root) - self.assertEqual(translated_cfn_dict, self.expected_cfn_with_root_module_only) - mock_enrich_resources_and_generate_makefile.assert_not_called() - lambda_functions = dict( - filter( - lambda resource: resource[1].get("Type") == "AWS::Lambda::Function", - translated_cfn_dict.get("Resources").items(), - ) - ) - expected_arguments_in_call = [ - {mock_get_configuration_address(): config_resource}, - {mock_get_configuration_address(): [val for _, val in lambda_functions.items()]}, - {}, - ] - mock_link_lambda_functions_to_layers.assert_called_once_with(*expected_arguments_in_call) - mock_get_configuration_address.assert_called() - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._resolve_resource_attribute") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._check_dummy_remote_values") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._build_module") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_configuration_address") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._link_lambda_functions_to_layers") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._enrich_resources_and_generate_makefile") - @patch("samcli.hook_packages.terraform.lib.utils.str_checksum") - def test_translate_to_cfn_with_s3_object_which_linked_to_uncreated_bucket( - self, - checksum_mock, - mock_enrich_resources_and_generate_makefile, - mock_link_lambda_functions_to_layers, - mock_get_configuration_address, - mock_build_module, - mock_check_dummy_remote_values, - mock_resolve_resource_attribute, - ): - root_module = MagicMock() - root_module.get.return_value = "module.m1" - resources_mock = MagicMock() - root_module.resources = resources_mock - child_modules = MagicMock() - child_modules.__getitem__.return_value = Mock() - child_modules.__contains__.return_value = True - child_modules.get.return_value = root_module - root_module.child_modules = child_modules - resource_mock = Mock() - resources_mock.__getitem__.return_value = resource_mock - resources_mock.__contains__.return_value = True - mock_build_module.return_value = root_module - checksum_mock.return_value = self.mock_logical_id_hash - - tf_json_with_root_module_contains_s3_object: dict = { - "planned_values": { - "root_module": { - "resources": [ - { - "type": "aws_s3_object", - "provider_name": AWS_PROVIDER_NAME, - "values": {"source": self.s3_source}, - "address": "aws_lambda_function.code_object", - "name": "code_object", - } - ] - } - } - } - - _translate_to_cfn(tf_json_with_root_module_contains_s3_object, self.output_dir, self.project_root) - mock_resolve_resource_attribute.assert_has_calls([call(resource_mock, "bucket"), call(resource_mock, "key")]) - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._check_dummy_remote_values") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._build_module") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_configuration_address") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._link_lambda_functions_to_layers") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._enrich_resources_and_generate_makefile") - @patch("samcli.hook_packages.terraform.lib.utils.str_checksum") - def test_translate_to_cfn_with_child_modules( - self, - checksum_mock, - mock_enrich_resources_and_generate_makefile, - mock_link_lambda_functions_to_layers, - mock_get_configuration_address, - mock_build_module, - mock_check_dummy_remote_values, - ): - root_module = MagicMock() - root_module.get.return_value = "module.m1" - resources_mock = MagicMock() - root_module.resources = resources_mock - child_modules = MagicMock() - child_modules.__getitem__.return_value = Mock() - child_modules.__contains__.return_value = True - child_modules.get.return_value = root_module - root_module.child_modules = child_modules - conf_resource = Mock() - resources_mock.__getitem__.return_value = conf_resource - resources_mock.__contains__.return_value = True - mock_build_module.return_value = root_module - checksum_mock.return_value = self.mock_logical_id_hash - translated_cfn_dict = _translate_to_cfn(self.tf_json_with_child_modules, self.output_dir, self.project_root) - self.assertEqual(translated_cfn_dict, self.expected_cfn_with_child_modules) - mock_enrich_resources_and_generate_makefile.assert_not_called() - lambda_functions = dict( - filter( - lambda resource: resource[1].get("Type") == "AWS::Lambda::Function", - translated_cfn_dict.get("Resources").items(), - ) - ) - expected_arguments_in_call = [ - {mock_get_configuration_address(): conf_resource}, - {mock_get_configuration_address(): [val for _, val in lambda_functions.items()]}, - {}, - ] - mock_link_lambda_functions_to_layers.assert_called_once_with(*expected_arguments_in_call) - mock_get_configuration_address.assert_called() - mock_check_dummy_remote_values.assert_called_once_with(translated_cfn_dict.get("Resources")) - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook.build_cfn_logical_id") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._add_lambda_resource_code_path_to_code_map") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._check_dummy_remote_values") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._build_module") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_configuration_address") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._link_lambda_functions_to_layers") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._enrich_resources_and_generate_makefile") - @patch("samcli.hook_packages.terraform.lib.utils.str_checksum") - def test_translate_to_cfn_with_root_module_with_sam_metadata_resource( - self, - checksum_mock, - mock_enrich_resources_and_generate_makefile, - mock_link_lambda_functions_to_layers, - mock_get_configuration_address, - mock_build_module, - mock_check_dummy_remote_values, - mock_add_lambda_resource_code_path_to_code_map, - mock_build_cfn_logical_id, - ): - root_module = MagicMock() - root_module.get.return_value = "module.m1" - resources_mock = MagicMock() - root_module.resources = resources_mock - child_modules = MagicMock() - child_modules.__getitem__.return_value = Mock() - child_modules.__contains__.return_value = True - child_modules.get.return_value = root_module - root_module.child_modules = child_modules - resource_mock = Mock() - resources_mock.__getitem__.return_value = resource_mock - resources_mock.__contains__.return_value = True - mock_build_module.return_value = root_module - checksum_mock.return_value = self.mock_logical_id_hash - mock_build_cfn_logical_id.side_effect = ["logical_id1", "logical_id2", "logical_id3"] - translated_cfn_dict = _translate_to_cfn( - self.tf_json_with_root_module_with_sam_metadata_resources, self.output_dir, self.project_root - ) - - expected_arguments_in_call = ( - [ - SamMetadataResource( - current_module_address=None, - resource=self.tf_lambda_function_resource_zip_sam_metadata, - config_resource=resource_mock, - ), - SamMetadataResource( - current_module_address=None, - resource=self.tf_lambda_function_resource_zip_2_sam_metadata, - config_resource=resource_mock, - ), - SamMetadataResource( - current_module_address=None, - resource=self.tf_image_package_type_lambda_function_resource_sam_metadata, - config_resource=resource_mock, - ), - ], - translated_cfn_dict["Resources"], - self.output_dir, - self.project_root, - {}, - ) - - mock_enrich_resources_and_generate_makefile.assert_called_once_with(*expected_arguments_in_call) - mock_add_lambda_resource_code_path_to_code_map.assert_has_calls( - [ - call( - resource_mock, - "zip", - {}, - "logical_id1", - "file.zip", - "filename", - translated_cfn_dict["Resources"]["logical_id1"], - ), - call( - resource_mock, - "zip", - {}, - "logical_id2", - "file2.zip", - "filename", - translated_cfn_dict["Resources"]["logical_id2"], - ), - call( - resource_mock, - "image", - {}, - "logical_id3", - "image/uri:tag", - "image_uri", - translated_cfn_dict["Resources"]["logical_id3"], - ), - ] - ) - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._add_lambda_resource_code_path_to_code_map") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._check_dummy_remote_values") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._build_module") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_configuration_address") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._link_lambda_functions_to_layers") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._enrich_resources_and_generate_makefile") - @patch("samcli.hook_packages.terraform.lib.utils.str_checksum") - def test_translate_to_cfn_with_child_modules_with_sam_metadata_resource( - self, - checksum_mock, - mock_enrich_resources_and_generate_makefile, - mock_link_lambda_functions_to_layers, - mock_get_configuration_address, - mock_build_module, - mock_check_dummy_remote_values, - mock_add_lambda_resource_code_path_to_code_map, - ): - root_module = MagicMock() - root_module.get.return_value = "module.m1" - resources_mock = MagicMock() - root_module.resources = resources_mock - child_modules = MagicMock() - child_modules.__getitem__.return_value = Mock() - child_modules.__contains__.return_value = True - child_modules.get.return_value = root_module - root_module.child_modules = child_modules - resource_mock = Mock() - resources_mock.__getitem__.return_value = resource_mock - resources_mock.__contains__.return_value = True - mock_build_module.return_value = root_module - checksum_mock.return_value = self.mock_logical_id_hash - translated_cfn_dict = _translate_to_cfn( - self.tf_json_with_child_modules_with_sam_metadata_resource, self.output_dir, self.project_root - ) - - expected_arguments_in_call = ( - [ - SamMetadataResource( - current_module_address=None, - resource=self.tf_lambda_function_resource_zip_sam_metadata, - config_resource=resource_mock, - ), - SamMetadataResource( - current_module_address="module.mymodule1", - resource={ - **self.tf_lambda_function_resource_zip_2_sam_metadata, - "address": f"module.mymodule1.null_resource.sam_metadata_{self.zip_function_name_2}", - }, - config_resource=resource_mock, - ), - SamMetadataResource( - current_module_address="module.mymodule1.module.mymodule2", - resource={ - **self.tf_lambda_function_resource_zip_3_sam_metadata, - "address": f"module.mymodule1.module.mymodule2.null_resource.sam_metadata_{self.zip_function_name_3}", - }, - config_resource=resource_mock, - ), - SamMetadataResource( - current_module_address="module.mymodule1.module.mymodule3", - resource={ - **self.tf_lambda_function_resource_zip_4_sam_metadata, - "address": f"module.mymodule1.module.mymodule3.null_resource.sam_metadata_{self.zip_function_name_4}", - }, - config_resource=resource_mock, - ), - ], - translated_cfn_dict["Resources"], - self.output_dir, - self.project_root, - {}, - ) - - mock_enrich_resources_and_generate_makefile.assert_called_once_with(*expected_arguments_in_call) - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._check_dummy_remote_values") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._build_module") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_configuration_address") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._link_lambda_functions_to_layers") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._enrich_resources_and_generate_makefile") - @patch("samcli.hook_packages.terraform.lib.utils.str_checksum") - def test_translate_to_cfn_with_unsupported_provider( - self, - checksum_mock, - mock_enrich_resources_and_generate_makefile, - mock_link_lambda_functions_to_layers, - mock_get_configuration_address, - mock_build_module, - mock_check_dummy_remote_values, - ): - root_module = MagicMock() - root_module.get.return_value = "module.m1" - resources_mock = MagicMock() - root_module.resources = resources_mock - child_modules = MagicMock() - child_modules.__getitem__.return_value = Mock() - child_modules.__contains__.return_value = True - child_modules.get.return_value = root_module - root_module.child_modules = child_modules - resources_mock.__getitem__.return_value = Mock() - resources_mock.__contains__.return_value = True - mock_build_module.return_value = root_module - checksum_mock.return_value = self.mock_logical_id_hash - translated_cfn_dict = _translate_to_cfn( - self.tf_json_with_unsupported_provider, self.output_dir, self.project_root - ) - self.assertEqual(translated_cfn_dict, self.expected_cfn_with_unsupported_provider) - mock_enrich_resources_and_generate_makefile.assert_not_called() - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._check_dummy_remote_values") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._build_module") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_configuration_address") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._link_lambda_functions_to_layers") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._enrich_resources_and_generate_makefile") - @patch("samcli.hook_packages.terraform.lib.utils.str_checksum") - def test_translate_to_cfn_with_unsupported_resource_type( - self, - checksum_mock, - mock_enrich_resources_and_generate_makefile, - mock_link_lambda_functions_to_layers, - mock_get_configuration_address, - mock_build_module, - mock_check_dummy_remote_values, - ): - root_module = MagicMock() - root_module.get.return_value = "module.m1" - resources_mock = MagicMock() - root_module.resources = resources_mock - child_modules = MagicMock() - child_modules.__getitem__.return_value = Mock() - child_modules.__contains__.return_value = True - child_modules.get.return_value = root_module - root_module.child_modules = child_modules - resources_mock.__getitem__.return_value = Mock() - resources_mock.__contains__.return_value = True - mock_build_module.return_value = root_module - checksum_mock.return_value = self.mock_logical_id_hash - translated_cfn_dict = _translate_to_cfn( - self.tf_json_with_unsupported_resource_type, self.output_dir, self.project_root - ) - self.assertEqual(translated_cfn_dict, self.expected_cfn_with_unsupported_resource_type) - mock_enrich_resources_and_generate_makefile.assert_not_called() - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._add_lambda_resource_code_path_to_code_map") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._check_dummy_remote_values") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._build_module") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_configuration_address") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._link_lambda_functions_to_layers") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._enrich_resources_and_generate_makefile") - @patch("samcli.hook_packages.terraform.lib.utils.str_checksum") - def test_translate_to_cfn_with_mapping_s3_source_to_function( - self, - checksum_mock, - mock_enrich_resources_and_generate_makefile, - mock_link_lambda_functions_to_layers, - mock_get_configuration_address, - mock_build_module, - mock_check_dummy_remote_values, - mock_add_lambda_resource_code_path_to_code_map, - ): - root_module = MagicMock() - root_module.get.return_value = "module.m1" - resources_mock = MagicMock() - root_module.resources = resources_mock - child_modules = MagicMock() - child_modules.__getitem__.return_value = Mock() - child_modules.__contains__.return_value = True - child_modules.get.return_value = root_module - root_module.child_modules = child_modules - resources_mock.__getitem__.return_value = Mock() - resources_mock.__contains__.return_value = True - mock_build_module.return_value = root_module - checksum_mock.return_value = self.mock_logical_id_hash - translated_cfn_dict = _translate_to_cfn( - self.tf_json_with_child_modules_and_s3_source_mapping, self.output_dir, self.project_root - ) - self.assertEqual(translated_cfn_dict, self.expected_cfn_with_child_modules_and_s3_source_mapping) - mock_enrich_resources_and_generate_makefile.assert_not_called() - - @parameterized.expand( - [ - ("expected_cfn_lambda_function_resource_zip", "tf_lambda_function_resource_zip_sam_metadata", "Zip"), - ( - "expected_cfn_image_package_type_lambda_function_resource", - "tf_image_package_type_lambda_function_resource_sam_metadata", - "Image", - ), - ] - ) - def test_validate_referenced_resource_matches_sam_metadata_type_valid_types( - self, cfn_resource_name, sam_metadata_attributes_name, expected_package_type - ): - cfn_resource = self.__getattribute__(cfn_resource_name) - sam_metadata_attributes = self.__getattribute__(sam_metadata_attributes_name).get("values").get("triggers") - try: - _validate_referenced_resource_matches_sam_metadata_type( - cfn_resource, sam_metadata_attributes, "resource_address", expected_package_type - ) - except InvalidSamMetadataPropertiesException: - self.fail("The testing sam metadata resource type should be valid.") - - @parameterized.expand( - [ - ( - "expected_cfn_lambda_function_resource_zip", - "tf_image_package_type_lambda_function_resource_sam_metadata", - "Image", - "IMAGE_LAMBDA_FUNCTION", - ), - ( - "expected_cfn_image_package_type_lambda_function_resource", - "tf_lambda_function_resource_zip_sam_metadata", - "Zip", - "ZIP_LAMBDA_FUNCTION", - ), - ] - ) - def test_validate_referenced_resource_matches_sam_metadata_type_invalid_types( - self, cfn_resource_name, sam_metadata_attributes_name, expected_package_type, metadata_source_type - ): - cfn_resource = self.__getattribute__(cfn_resource_name) - sam_metadata_attributes = self.__getattribute__(sam_metadata_attributes_name).get("values").get("triggers") - with self.assertRaises( - InvalidSamMetadataPropertiesException, - msg=f"The sam metadata resource resource_address is referring to a resource that does not " - f"match the resource type {metadata_source_type}.", - ): - _validate_referenced_resource_matches_sam_metadata_type( - cfn_resource, sam_metadata_attributes, "resource_address", expected_package_type - ) - - def test_validate_referenced_layer_resource_matches_sam_metadata_type_valid_types(self): - cfn_resource = self.expected_cfn_layer_resource_zip - sam_metadata_attributes = self.tf_lambda_layer_resource_zip_sam_metadata.get("values").get("triggers") - try: - _validate_referenced_resource_layer_matches_metadata_type( - cfn_resource, sam_metadata_attributes, "resource_address" - ) - except InvalidSamMetadataPropertiesException: - self.fail("The testing sam metadata resource type should be valid.") - - @parameterized.expand( - [ - ( - "expected_cfn_lambda_function_resource_zip", - "tf_lambda_layer_resource_zip_sam_metadata", - ), - ( - "expected_cfn_image_package_type_lambda_function_resource", - "tf_lambda_layer_resource_zip_sam_metadata", - ), - ] - ) - def test_validate_referenced_resource_layer_matches_sam_metadata_type_invalid_types( - self, cfn_resource_name, sam_metadata_attributes_name - ): - cfn_resource = self.__getattribute__(cfn_resource_name) - sam_metadata_attributes = self.__getattribute__(sam_metadata_attributes_name).get("values").get("triggers") - with self.assertRaises( - InvalidSamMetadataPropertiesException, - msg=f"The sam metadata resource resource_address is referring to a resource that does not " - f"match the resource type AWS::Lambda::LayerVersion.", - ): - _validate_referenced_resource_layer_matches_metadata_type( - cfn_resource, sam_metadata_attributes, "resource_address" - ) - - @parameterized.expand( - [ - ("/src/code/path", None, "/src/code/path", True), - ("src/code/path", None, "src/code/path", False), - ('"/src/code/path"', None, "/src/code/path", True), - ('"src/code/path"', None, "src/code/path", False), - ('{"path":"/src/code/path"}', "path", "/src/code/path", True), - ('{"path":"src/code/path"}', "path", "src/code/path", False), - ({"path": "/src/code/path"}, "path", "/src/code/path", True), - ({"path": "src/code/path"}, "path", "src/code/path", False), - ('["/src/code/path"]', "None", "/src/code/path", True), - ('["src/code/path"]', "None", "src/code/path", False), - (["/src/code/path"], "None", "/src/code/path", True), - (["src/code/path"], "None", "src/code/path", False), - ('["/src/code/path", "/src/code/path2"]', "None", "/src/code/path", True), - ('["src/code/path", "src/code/path2"]', "None", "src/code/path", False), - (["/src/code/path", "/src/code/path2"], "None", "/src/code/path", True), - (["src/code/path", "/src/code/path2"], "None", "src/code/path", False), - ] - ) - @patch("samcli.hook_packages.terraform.hooks.prepare.hook.os") - def test_get_lambda_function_source_code_path_valid_metadata_resource( - self, original_source_code, source_code_property, expected_path, is_abs, mock_os - ): - mock_path = Mock() - mock_os.path = mock_path - mock_isabs = Mock() - mock_isabs.return_value = is_abs - mock_path.isabs = mock_isabs - - mock_exists = Mock() - mock_exists.return_value = True - mock_path.exists = mock_exists - - if not is_abs: - mock_normpath = Mock() - mock_normpath.return_value = f"/project/root/dir/{expected_path}" - expected_path = f"/project/root/dir/{expected_path}" - mock_path.normpath = mock_normpath - mock_join = Mock() - mock_join.return_value = expected_path - mock_path.join = mock_join - sam_metadata_attributes = { - **self.tf_zip_function_sam_metadata_properties, - "original_source_code": original_source_code, - } - if source_code_property: - sam_metadata_attributes = { - **sam_metadata_attributes, - "source_code_property": source_code_property, - } - sam_resource = {"values": {"triggers": sam_metadata_attributes}} - path = _get_source_code_path( - sam_resource, - "resource_address", - "/project/root/dir", - "original_source_code", - "source_code_property", - "source code", - ) - self.assertEqual(path, expected_path) - - @parameterized.expand( - [ - ( - "/src/code/path", - None, - False, - "The sam metadata resource resource_address should contain a valid lambda function source code path", - ), - ( - None, - None, - True, - "The sam metadata resource resource_address should contain the lambda function source code in " - "property original_source_code", - ), - ( - '{"path":"/src/code/path"}', - None, - True, - "The sam metadata resource resource_address should contain the lambda function source code property in " - "property source_code_property as the original_source_code value is an object", - ), - ( - {"path": "/src/code/path"}, - None, - True, - "The sam metadata resource resource_address should contain the lambda function source code property " - "in property source_code_property as the original_source_code value is an object", - ), - ( - '{"path":"/src/code/path"}', - "path1", - True, - "The sam metadata resource resource_address should contain a valid lambda function source code " - "property in property source_code_property as the original_source_code value is an object", - ), - ( - {"path": "/src/code/path"}, - "path1", - True, - "The sam metadata resource resource_address should contain a valid lambda function source code " - "property in property source_code_property as the original_source_code value is an object", - ), - ( - "[]", - None, - True, - "The sam metadata resource resource_address should contain the lambda function source code in " - "property original_source_code, and it should not be an empty list", - ), - ( - [], - None, - True, - "The sam metadata resource resource_address should contain the lambda function source code in " - "property original_source_code, and it should not be an empty list", - ), - ( - "[null]", - None, - True, - "The sam metadata resource resource_address should contain a valid lambda function source code in " - "property original_source_code", - ), - ( - [None], - None, - True, - "The sam metadata resource resource_address should contain a valid lambda function source code in " - "property original_source_code", - ), - ] - ) - @patch("samcli.hook_packages.terraform.hooks.prepare.hook.os") - def test_get_lambda_function_source_code_path_invalid_metadata_resources( - self, original_source_code, source_code_property, does_exist, exception_message, mock_os - ): - mock_path = Mock() - mock_os.path = mock_path - mock_isabs = Mock() - mock_isabs.return_value = True - mock_path.isabs = mock_isabs - - mock_exists = Mock() - mock_exists.return_value = does_exist - mock_path.exists = mock_exists - - sam_metadata_attributes = { - **self.tf_zip_function_sam_metadata_properties, - "original_source_code": original_source_code, - } - if source_code_property: - sam_metadata_attributes = { - **sam_metadata_attributes, - "source_code_property": source_code_property, - } - with self.assertRaises(InvalidSamMetadataPropertiesException, msg=exception_message): - _get_source_code_path( - sam_metadata_attributes, - "resource_address", - "/project/root/dir", - "original_source_code", - "source_code_property", - "source code", - ) - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook.build_cfn_logical_id") - def test_get_relevant_cfn_resource(self, mock_build_cfn_logical_id): - sam_metadata_resource = SamMetadataResource( - current_module_address="module.mymodule1", - resource={ - **self.tf_lambda_function_resource_zip_2_sam_metadata, - "address": f"module.mymodule1.null_resource.sam_metadata_{self.zip_function_name_2}", - }, - config_resource=TFResource("", "", None, {}), - ) - cfn_resources = { - "ABCDEFG": self.expected_cfn_lambda_function_resource_zip_2, - "logical_id_3": self.expected_cfn_lambda_function_resource_zip_3, - } - mock_build_cfn_logical_id.side_effect = ["ABCDEFG"] - resources_list = _get_relevant_cfn_resource(sam_metadata_resource, cfn_resources, {}) - self.assertEqual(len(resources_list), 1) - relevant_resource, return_logical_id = resources_list[0] - - mock_build_cfn_logical_id.assert_called_once_with( - f"module.mymodule1.aws_lambda_function.{self.zip_function_name_2}" - ) - self.assertEqual(relevant_resource, self.expected_cfn_lambda_function_resource_zip_2) - self.assertEqual(return_logical_id, "ABCDEFG") - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._calculate_configuration_attribute_value_hash") - def test_get_relevant_cfn_resource_for_metadata_does_not_contain_resource_name( - self, mock_calculate_configuration_attribute_value_hash - ): - sam_metadata_resource = SamMetadataResource( - current_module_address="module.mymodule1", - resource={ - "type": "null_resource", - "provider_name": NULL_RESOURCE_PROVIDER_NAME, - "values": { - "triggers": { - "built_output_path": "builds/func2.zip", - "original_source_code": "./src/lambda_func2", - "resource_type": "ZIP_LAMBDA_FUNCTION", - } - }, - "name": f"sam_metadata_{self.zip_function_name_2}", - "address": f"module.mymodule1.null_resource.sam_metadata_{self.zip_function_name_2}", - }, - config_resource=TFResource("", "", None, {}), - ) - cfn_resources = { - "ABCDEFG": self.expected_cfn_lambda_function_resource_zip_2, - "logical_id_3": self.expected_cfn_lambda_function_resource_zip_3, - } - mock_calculate_configuration_attribute_value_hash.side_effect = ["code_hash"] - lambda_resources_code_map = {"zip_code_hash": [(self.expected_cfn_lambda_function_resource_zip_2, "ABCDEFG")]} - resources_list = _get_relevant_cfn_resource(sam_metadata_resource, cfn_resources, lambda_resources_code_map) - self.assertEqual(len(resources_list), 1) - relevant_resource, return_logical_id = resources_list[0] - - self.assertEqual(relevant_resource, self.expected_cfn_lambda_function_resource_zip_2) - self.assertEqual(return_logical_id, "ABCDEFG") - mock_calculate_configuration_attribute_value_hash.assert_has_calls([call("builds/func2.zip")]) - - @parameterized.expand( - [ - ( - None, - "module.mymodule1", - ["ABCDEFG"], - "AWS SAM CLI expects the sam metadata resource null_resource.sam_metadata_func2 to contain a resource name " - "that will be enriched using this metadata resource", - ), - ( - "resource_name_value", - None, - ["Not_valid"], - "There is no resource found that match the provided resource name null_resource.sam_metadata_func2", - ), - ( - "resource_name_value", - "module.mymodule1", - ["Not_valid", "Not_valid"], - "There is no resource found that match the provided resource name null_resource.sam_metadata_func2", - ), - ] - ) - @patch("samcli.hook_packages.terraform.hooks.prepare.hook.build_cfn_logical_id") - def test_get_relevant_cfn_resource_exceptions( - self, resource_name, module_name, build_logical_id_output, exception_message, mock_build_cfn_logical_id - ): - sam_metadata_resource = SamMetadataResource( - current_module_address=module_name, - resource={ - **self.tf_sam_metadata_resource_common_attributes, - "values": { - "triggers": { - "built_output_path": "builds/func2.zip", - "original_source_code": "./src/lambda_func2", - "resource_name": resource_name, - "resource_type": "ZIP_LAMBDA_FUNCTION", - }, - }, - "address": "null_resource.sam_metadata_func2", - "name": "sam_metadata_func2", - }, - config_resource=TFResource("", "", None, {}), - ) - cfn_resources = { - "ABCDEFG": self.expected_cfn_lambda_function_resource_zip_2, - "logical_id_3": self.expected_cfn_lambda_function_resource_zip_3, - } - mock_build_cfn_logical_id.side_effect = build_logical_id_output - with self.assertRaises(InvalidSamMetadataPropertiesException, msg=exception_message): - _get_relevant_cfn_resource(sam_metadata_resource, cfn_resources, {}) - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_python_command_name") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._generate_makefile") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._generate_makefile_rule_for_lambda_resource") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_relevant_cfn_resource") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._validate_referenced_resource_matches_sam_metadata_type") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_source_code_path") - def test_enrich_resources_and_generate_makefile_zip_functions( - self, - mock_get_lambda_function_source_code_path, - mock_validate_referenced_resource_matches_sam_metadata_type, - mock_get_relevant_cfn_resource, - mock_generate_makefile_rule_for_lambda_resource, - mock_generate_makefile, - mock_get_python_command_name, - ): - mock_get_python_command_name.return_value = "python" - - mock_get_lambda_function_source_code_path.side_effect = ["src/code/path1", "src/code/path2"] - zip_function_1 = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": { - **self.expected_cfn_function_common_properties, - "Code": "file.zip", - }, - "Metadata": {"SamResourceId": f"aws_lambda_function.func1", "SkipBuild": True}, - } - zip_function_2 = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": { - **self.expected_cfn_function_common_properties, - "Code": "file2.zip", - }, - "Metadata": {"SamResourceId": f"aws_lambda_function.func2", "SkipBuild": True}, - } - cfn_resources = { - "logical_id1": zip_function_1, - "logical_id2": zip_function_2, - } - mock_get_relevant_cfn_resource.side_effect = [ - [(zip_function_1, "logical_id1")], - [(zip_function_2, "logical_id2")], - ] - sam_metadata_resources = [ - SamMetadataResource( - current_module_address=None, - resource=self.tf_lambda_function_resource_zip_sam_metadata, - config_resource=TFResource("", "", None, {}), - ), - SamMetadataResource( - current_module_address=None, - resource=self.tf_lambda_function_resource_zip_2_sam_metadata, - config_resource=TFResource("", "", None, {}), - ), - ] - - expected_zip_function_1 = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": { - **self.expected_cfn_function_common_properties, - "Code": "src/code/path1", - }, - "Metadata": { - "SamResourceId": "aws_lambda_function.func1", - "SkipBuild": False, - "BuildMethod": "makefile", - "ContextPath": "/output/dir", - "WorkingDirectory": "/terraform/project/root", - "ProjectRootDirectory": "/terraform/project/root", - }, - } - expected_zip_function_2 = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": { - **self.expected_cfn_function_common_properties, - "Code": "src/code/path2", - }, - "Metadata": { - "SamResourceId": "aws_lambda_function.func2", - "SkipBuild": False, - "BuildMethod": "makefile", - "ContextPath": "/output/dir", - "WorkingDirectory": "/terraform/project/root", - "ProjectRootDirectory": "/terraform/project/root", - }, - } - - expected_cfn_resources = { - "logical_id1": expected_zip_function_1, - "logical_id2": expected_zip_function_2, - } - - makefile_rules = [Mock() for _ in sam_metadata_resources] - mock_generate_makefile_rule_for_lambda_resource.side_effect = makefile_rules - - _enrich_resources_and_generate_makefile( - sam_metadata_resources, cfn_resources, "/output/dir", "/terraform/project/root", {} - ) - self.assertEqual(cfn_resources, expected_cfn_resources) - - mock_generate_makefile_rule_for_lambda_resource.assert_has_calls( - [ - call( - sam_metadata_resources[i], - list(expected_cfn_resources.keys())[i], - "/terraform/project/root", - "python", - "/output/dir", - ) - for i in range(len(sam_metadata_resources)) - ] - ) - - mock_generate_makefile.assert_called_once_with(makefile_rules, "/output/dir") - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_python_command_name") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._generate_makefile") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._generate_makefile_rule_for_lambda_resource") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_relevant_cfn_resource") - @patch( - "samcli.hook_packages.terraform.hooks.prepare.hook._validate_referenced_resource_layer_matches_metadata_type" - ) - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_source_code_path") - def test_enrich_resources_and_generate_makefile_layers( - self, - mock_get_lambda_layer_source_code_path, - mock_validate_referenced_resource_layer_matches_sam_metadata_type, - mock_get_relevant_cfn_resource, - mock_generate_makefile_rule_for_lambda_resource, - mock_generate_makefile, - mock_get_python_command_name, - ): - mock_get_python_command_name.return_value = "python" - mock_get_lambda_layer_source_code_path.side_effect = ["src/code/path1"] - lambda_layer = { - "Type": AWS_LAMBDA_LAYERVERSION, - "Properties": { - **self.expected_cfn_layer_common_properties, - "Content": "file.zip", - }, - "Metadata": {"SamResourceId": f"aws_lambda_layer_version.{self.lambda_layer_name}", "SkipBuild": True}, - } - cfn_resources = { - "logical_id1": lambda_layer, - } - mock_get_relevant_cfn_resource.side_effect = [ - [(lambda_layer, "logical_id1")], - ] - sam_metadata_resources = [ - SamMetadataResource( - current_module_address=None, - resource=self.tf_lambda_layer_resource_zip_sam_metadata, - config_resource=TFResource("", "", None, {}), - ), - ] - - expected_layer = { - "Type": AWS_LAMBDA_LAYERVERSION, - "Properties": { - **self.expected_cfn_layer_common_properties, - "Content": "src/code/path1", - }, - "Metadata": { - "SamResourceId": f"aws_lambda_layer_version.{self.lambda_layer_name}", - "SkipBuild": False, - "BuildMethod": "makefile", - "ContextPath": "/output/dir", - "WorkingDirectory": "/terraform/project/root", - "ProjectRootDirectory": "/terraform/project/root", - }, - } - - expected_cfn_resources = { - "logical_id1": expected_layer, - } - - makefile_rules = [Mock() for _ in sam_metadata_resources] - mock_generate_makefile_rule_for_lambda_resource.side_effect = makefile_rules - - _enrich_resources_and_generate_makefile( - sam_metadata_resources, cfn_resources, "/output/dir", "/terraform/project/root", {} - ) - self.assertEqual(cfn_resources, expected_cfn_resources) - - mock_generate_makefile_rule_for_lambda_resource.assert_has_calls( - [ - call( - sam_metadata_resources[i], - list(expected_cfn_resources.keys())[i], - "/terraform/project/root", - "python", - "/output/dir", - ) - for i in range(len(sam_metadata_resources)) - ] - ) - - mock_generate_makefile.assert_called_once_with(makefile_rules, "/output/dir") - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_python_command_name") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._generate_makefile") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._generate_makefile_rule_for_lambda_resource") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_relevant_cfn_resource") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._validate_referenced_resource_matches_sam_metadata_type") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_source_code_path") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._enrich_image_lambda_function") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._enrich_zip_lambda_function") - def test_enrich_resources_and_generate_makefile_mock_enrich_zip_functions( - self, - mock_enrich_zip_lambda_function, - mock_enrich_image_lambda_function, - mock_get_lambda_function_source_code_path, - mock_validate_referenced_resource_matches_sam_metadata_type, - mock_get_relevant_cfn_resource, - mock_generate_makefile_rule_for_lambda_resource, - mock_generate_makefile, - mock_get_python_command_name, - ): - mock_get_python_command_name.return_value = "python" - - mock_get_lambda_function_source_code_path.side_effect = ["src/code/path1", "src/code/path2"] - zip_function_1 = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": { - **self.expected_cfn_function_common_properties, - "Code": "file.zip", - }, - "Metadata": {"SamResourceId": f"aws_lambda_function.func1", "SkipBuild": True}, - } - zip_function_2 = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": { - **self.expected_cfn_function_common_properties, - "Code": "file2.zip", - }, - "Metadata": {"SamResourceId": f"aws_lambda_function.func2", "SkipBuild": True}, - } - cfn_resources = { - "logical_id1": zip_function_1, - "logical_id2": zip_function_2, - } - mock_get_relevant_cfn_resource.side_effect = [ - [(zip_function_1, "logical_id1")], - [(zip_function_2, "logical_id2")], - ] - sam_metadata_resources = [ - SamMetadataResource( - current_module_address=None, - resource=self.tf_lambda_function_resource_zip_sam_metadata, - config_resource=TFResource("", "", None, {}), - ), - SamMetadataResource( - current_module_address=None, - resource=self.tf_lambda_function_resource_zip_2_sam_metadata, - config_resource=TFResource("", "", None, {}), - ), - ] - - makefile_rules = [Mock() for _ in sam_metadata_resources] - mock_generate_makefile_rule_for_lambda_resource.side_effect = makefile_rules - - _enrich_resources_and_generate_makefile( - sam_metadata_resources, cfn_resources, "/output/dir", "/terraform/project/root", {} - ) - mock_enrich_zip_lambda_function.assert_has_calls( - [ - call( - self.tf_lambda_function_resource_zip_sam_metadata, - zip_function_1, - "logical_id1", - "/terraform/project/root", - "/output/dir", - ), - call( - self.tf_lambda_function_resource_zip_2_sam_metadata, - zip_function_2, - "logical_id2", - "/terraform/project/root", - "/output/dir", - ), - ] - ) - mock_enrich_image_lambda_function.assert_not_called() - - mock_generate_makefile_rule_for_lambda_resource.assert_has_calls( - [ - call( - sam_metadata_resources[i], - list(cfn_resources.keys())[i], - "/terraform/project/root", - "python", - "/output/dir", - ) - for i in range(len(sam_metadata_resources)) - ] - ) - - mock_generate_makefile.assert_called_once_with(makefile_rules, "/output/dir") - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_relevant_cfn_resource") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._validate_referenced_resource_matches_sam_metadata_type") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_source_code_path") - def test_enrich_mapped_resource_zip_function( - self, - mock_get_lambda_function_source_code_path, - mock_validate_referenced_resource_matches_sam_metadata_type, - mock_get_relevant_cfn_resource, - ): - mock_get_lambda_function_source_code_path.side_effect = ["src/code/path1", "src/code/path2"] - zip_function_1 = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": { - **self.expected_cfn_function_common_properties, - "Code": "file.zip", - }, - "Metadata": {"SamResourceId": f"aws_lambda_function.func1", "SkipBuild": True}, - } - mock_get_relevant_cfn_resource.side_effect = [ - (zip_function_1, "logical_id1"), - ] - - expected_zip_function_1 = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": { - **self.expected_cfn_function_common_properties, - "Code": "src/code/path1", - }, - "Metadata": { - "SamResourceId": "aws_lambda_function.func1", - "SkipBuild": False, - "BuildMethod": "makefile", - "ContextPath": "/output/dir", - "WorkingDirectory": "/terraform/project/root", - "ProjectRootDirectory": "/terraform/project/root", - }, - } - - _enrich_zip_lambda_function( - self.tf_lambda_function_resource_zip_sam_metadata, - zip_function_1, - "logical_id1", - "/terraform/project/root", - "/output/dir", - ) - self.assertEqual(zip_function_1, expected_zip_function_1) - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_relevant_cfn_resource") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._validate_referenced_resource_matches_sam_metadata_type") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_source_code_path") - def test_enrich_mapped_resource_zip_layer( - self, - mock_get_lambda_layer_source_code_path, - mock_validate_referenced_resource_matches_sam_metadata_type, - mock_get_relevant_cfn_resource, - ): - mock_get_lambda_layer_source_code_path.side_effect = ["src/code/path1", "src/code/path2"] - lambda_layer_1 = { - "Type": AWS_LAMBDA_LAYERVERSION, - "Properties": { - **self.expected_cfn_layer_common_properties, - "Content": "file.zip", - }, - "Metadata": {"SamResourceId": f"aws_lambda_layer_version.lambda_layer", "SkipBuild": True}, - } - mock_get_relevant_cfn_resource.side_effect = [ - (lambda_layer_1, "logical_id1"), - ] - - expected_lambda_layer_1 = { - "Type": AWS_LAMBDA_LAYERVERSION, - "Properties": { - **self.expected_cfn_layer_common_properties, - "Content": "src/code/path1", - }, - "Metadata": { - "SamResourceId": "aws_lambda_layer_version.lambda_layer", - "SkipBuild": False, - "BuildMethod": "makefile", - "ContextPath": "/output/dir", - "WorkingDirectory": "/terraform/project/root", - "ProjectRootDirectory": "/terraform/project/root", - }, - } - - _enrich_lambda_layer( - self.tf_lambda_layer_resource_zip_sam_metadata, - lambda_layer_1, - "logical_id1", - "/terraform/project/root", - "/output/dir", - ) - self.assertEqual(lambda_layer_1, expected_lambda_layer_1) - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_python_command_name") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._generate_makefile") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._generate_makefile_rule_for_lambda_resource") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_relevant_cfn_resource") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._validate_referenced_resource_matches_sam_metadata_type") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_source_code_path") - def test_enrich_resources_and_generate_makefile_image_functions( - self, - mock_get_lambda_function_source_code_path, - mock_validate_referenced_resource_matches_sam_metadata_type, - mock_get_relevant_cfn_resource, - mock_generate_makefile_rule_for_lambda_resource, - mock_generate_makefile, - mock_get_python_command_name, - ): - mock_get_python_command_name.return_value = "python" - - mock_get_lambda_function_source_code_path.side_effect = ["src/code/path1", "src/code/path2"] - image_function_1 = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": { - **self.expected_cfn_image_package_type_function_common_properties, - "ImageConfig": { - "Command": ["cmd1", "cmd2"], - "EntryPoint": ["entry1", "entry2"], - "WorkingDirectory": "/working/dir/path", - }, - "Code": { - "ImageUri": "image/uri:tag", - }, - }, - "Metadata": {"SamResourceId": f"aws_lambda_function.func1", "SkipBuild": True}, - } - - cfn_resources = { - "logical_id1": image_function_1, - } - mock_get_relevant_cfn_resource.side_effect = [ - [(image_function_1, "logical_id1")], - ] - sam_metadata_resources = [ - SamMetadataResource( - current_module_address=None, - resource=self.tf_image_package_type_lambda_function_resource_sam_metadata, - config_resource=TFResource("", "", None, {}), - ), - ] - - expected_image_function_1 = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": { - **self.expected_cfn_image_package_type_function_common_properties, - "ImageConfig": { - "Command": ["cmd1", "cmd2"], - "EntryPoint": ["entry1", "entry2"], - "WorkingDirectory": "/working/dir/path", - }, - }, - "Metadata": { - "SamResourceId": "aws_lambda_function.func1", - "SkipBuild": False, - "DockerContext": "src/code/path1", - "Dockerfile": "Dockerfile", - "DockerTag": "2.0", - "DockerBuildArgs": {"FOO": "bar"}, - }, - } - - expected_cfn_resources = { - "logical_id1": expected_image_function_1, - } - - makefile_rules = [Mock() for _ in sam_metadata_resources] - mock_generate_makefile_rule_for_lambda_resource.side_effect = makefile_rules - - _enrich_resources_and_generate_makefile( - sam_metadata_resources, cfn_resources, "/output/dir", "/terraform/project/root", {} - ) - self.assertEqual(cfn_resources, expected_cfn_resources) - - mock_generate_makefile_rule_for_lambda_resource.assert_has_calls( - [ - call( - sam_metadata_resources[i], - list(cfn_resources.keys())[i], - "/terraform/project/root", - "python", - "/output/dir", - ) - for i in range(len(sam_metadata_resources)) - ] - ) - - mock_generate_makefile.assert_called_once_with(makefile_rules, "/output/dir") - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_relevant_cfn_resource") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._validate_referenced_resource_matches_sam_metadata_type") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_source_code_path") - def test_enrich_mapped_resource_image_function( - self, - mock_get_lambda_function_source_code_path, - mock_validate_referenced_resource_matches_sam_metadata_type, - mock_get_relevant_cfn_resource, - ): - mock_get_lambda_function_source_code_path.side_effect = ["src/code/path1", "src/code/path2"] - image_function_1 = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": { - **self.expected_cfn_image_package_type_function_common_properties, - "ImageConfig": { - "Command": ["cmd1", "cmd2"], - "EntryPoint": ["entry1", "entry2"], - "WorkingDirectory": "/working/dir/path", - }, - "Code": { - "ImageUri": "image/uri:tag", - }, - }, - "Metadata": {"SamResourceId": f"aws_lambda_function.func1", "SkipBuild": True}, - } - - mock_get_relevant_cfn_resource.side_effect = [ - (image_function_1, "logical_id1"), - ] - - expected_image_function_1 = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": { - **self.expected_cfn_image_package_type_function_common_properties, - "ImageConfig": { - "Command": ["cmd1", "cmd2"], - "EntryPoint": ["entry1", "entry2"], - "WorkingDirectory": "/working/dir/path", - }, - }, - "Metadata": { - "SamResourceId": "aws_lambda_function.func1", - "SkipBuild": False, - "DockerContext": "src/code/path1", - "Dockerfile": "Dockerfile", - "DockerTag": "2.0", - "DockerBuildArgs": {"FOO": "bar"}, - }, - } - - _enrich_image_lambda_function( - self.tf_image_package_type_lambda_function_resource_sam_metadata, - image_function_1, - "logical_id1", - "/terraform/project/root", - "/output/dir", - ) - self.assertEqual(image_function_1, expected_image_function_1) - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_python_command_name") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._generate_makefile") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._generate_makefile_rule_for_lambda_resource") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_relevant_cfn_resource") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._validate_referenced_resource_matches_sam_metadata_type") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_source_code_path") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._enrich_image_lambda_function") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._enrich_zip_lambda_function") - def test_enrich_resources_and_generate_makefile_mock_enrich_image_functions( - self, - mock_enrich_zip_lambda_function, - mock_enrich_image_lambda_function, - mock_get_lambda_function_source_code_path, - mock_validate_referenced_resource_matches_sam_metadata_type, - mock_get_relevant_cfn_resource, - mock_generate_makefile_rule_for_lambda_resource, - mock_generate_makefile, - mock_get_python_command_name, - ): - mock_get_python_command_name.return_value = "python" - - mock_get_lambda_function_source_code_path.side_effect = ["src/code/path1", "src/code/path2"] - image_function_1 = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": { - **self.expected_cfn_image_package_type_function_common_properties, - "ImageConfig": { - "Command": ["cmd1", "cmd2"], - "EntryPoint": ["entry1", "entry2"], - "WorkingDirectory": "/working/dir/path", - }, - "Code": { - "ImageUri": "image/uri:tag", - }, - }, - "Metadata": {"SamResourceId": f"aws_lambda_function.func1", "SkipBuild": True}, - } - - cfn_resources = { - "logical_id1": image_function_1, - } - mock_get_relevant_cfn_resource.side_effect = [ - [(image_function_1, "logical_id1")], - ] - sam_metadata_resources = [ - SamMetadataResource( - current_module_address=None, - resource=self.tf_image_package_type_lambda_function_resource_sam_metadata, - config_resource=TFResource("", "", None, {}), - ), - ] - - makefile_rules = [Mock() for _ in sam_metadata_resources] - mock_generate_makefile_rule_for_lambda_resource.side_effect = makefile_rules - - _enrich_resources_and_generate_makefile( - sam_metadata_resources, cfn_resources, "/output/dir", "/terraform/project/root", {} - ) - mock_enrich_image_lambda_function.assert_called_once_with( - self.tf_image_package_type_lambda_function_resource_sam_metadata, - image_function_1, - "logical_id1", - "/terraform/project/root", - "/output/dir", - ) - mock_enrich_zip_lambda_function.assert_not_called() - - mock_generate_makefile_rule_for_lambda_resource.assert_has_calls( - [ - call( - sam_metadata_resources[i], - list(cfn_resources.keys())[i], - "/terraform/project/root", - "python", - "/output/dir", - ) - for i in range(len(sam_metadata_resources)) - ] - ) - - mock_generate_makefile.assert_called_once_with(makefile_rules, "/output/dir") - - @parameterized.expand( - [ - ("ABCDEFG",), - ('"ABCDEFG"',), - ] - ) - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_relevant_cfn_resource") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._validate_referenced_resource_matches_sam_metadata_type") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_source_code_path") - def test_enrich_mapped_resource_image_function_invalid_docker_args( - self, - docker_args_value, - mock_get_lambda_function_source_code_path, - mock_validate_referenced_resource_matches_sam_metadata_type, - mock_get_relevant_cfn_resource, - ): - mock_get_lambda_function_source_code_path.side_effect = ["src/code/path1", "src/code/path2"] - image_function_1 = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": { - **self.expected_cfn_image_package_type_function_common_properties, - "ImageConfig": { - "Command": ["cmd1", "cmd2"], - "EntryPoint": ["entry1", "entry2"], - "WorkingDirectory": "/working/dir/path", - }, - "Code": { - "ImageUri": "image/uri:tag", - }, - }, - "Metadata": {"SamResourceId": f"aws_lambda_function.func1", "SkipBuild": True}, - } - - mock_get_relevant_cfn_resource.side_effect = [ - (image_function_1, "logical_id1"), - ] - sam_metadata_resource = { - **self.tf_sam_metadata_resource_common_attributes, - "values": { - "triggers": { - "resource_name": f"aws_lambda_function.{self.image_function_name}", - "docker_build_args": docker_args_value, - "docker_context": "context", - "docker_file": "Dockerfile", - "docker_tag": "2.0", - "resource_type": "IMAGE_LAMBDA_FUNCTION", - }, - }, - "address": f"null_resource.sam_metadata_{self.image_function_name}", - "name": f"sam_metadata_{self.image_function_name}", - } +from tests.unit.hook_packages.terraform.hooks.prepare.prepare_base import PrepareHookUnitBase - with self.assertRaises( - InvalidSamMetadataPropertiesException, - msg="The sam metadata resource null_resource.sam_metadata_func1 should contain a valid json encoded " - "string for the lambda function docker build arguments.", - ): - _enrich_image_lambda_function( - sam_metadata_resource, image_function_1, "logical_id1", "/terraform/project/root", "/output/dir" - ) +from samcli.hook_packages.terraform.hooks.prepare.hook import prepare, _update_resources_paths +from samcli.lib.hook.exceptions import PrepareHookException +from samcli.lib.utils.subprocess_utils import LoadingPatternError - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_python_command_name") - def test_enrich_resources_and_generate_makefile_invalid_source_type( - self, - mock_get_python_command_name, - ): - image_function_1 = { - "Type": CFN_AWS_LAMBDA_FUNCTION, - "Properties": { - **self.expected_cfn_image_package_type_function_common_properties, - "ImageConfig": { - "Command": ["cmd1", "cmd2"], - "EntryPoint": ["entry1", "entry2"], - "WorkingDirectory": "/working/dir/path", - }, - "Code": { - "ImageUri": "image/uri:tag", - }, - }, - "Metadata": {"SamResourceId": f"aws_lambda_function.func1", "SkipBuild": True}, - } - cfn_resources = { - "logical_id1": image_function_1, - } - sam_metadata_resources = [ - SamMetadataResource( - current_module_address=None, - resource={ - **self.tf_sam_metadata_resource_common_attributes, - "values": { - "triggers": { - "resource_name": f"aws_lambda_function.{self.image_function_name}", - "docker_build_args": '{"FOO":"bar"}', - "docker_context": "context", - "docker_file": "Dockerfile", - "docker_tag": "2.0", - "resource_type": "Invalid_resource_type", - }, - }, - "address": f"null_resource.sam_metadata_func1", - "name": f"sam_metadata_func1", - }, - config_resource=TFResource("", "", None, {}), - ), - ] - with self.assertRaises( - InvalidSamMetadataPropertiesException, - msg="The resource type Invalid_resource_type found in the sam metadata resource " - "null_resource.sam_metadata_func1 is not a correct resource type. The resource type should be one of " - "these values [ZIP_LAMBDA_FUNCTION, IMAGE_LAMBDA_FUNCTION]", - ): - _enrich_resources_and_generate_makefile( - sam_metadata_resources, cfn_resources, "/output/dir", "/terraform/project/root", {} - ) +class TestPrepareHook(PrepareHookUnitBase): + def setUp(self): + super().setUp() @parameterized.expand( [ @@ -2690,7 +23,7 @@ def test_enrich_resources_and_generate_makefile_invalid_source_type( ) @patch("samcli.hook_packages.terraform.hooks.prepare.hook.invoke_subprocess_with_loading_pattern") @patch("samcli.hook_packages.terraform.hooks.prepare.hook._update_resources_paths") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._translate_to_cfn") + @patch("samcli.hook_packages.terraform.hooks.prepare.hook.translate_to_cfn") @patch("builtins.open") @patch("samcli.hook_packages.terraform.hooks.prepare.hook.osutils.tempfile_platform_independent") @patch("samcli.hook_packages.terraform.hooks.prepare.hook.os") @@ -2770,7 +103,7 @@ def test_prepare( @patch("samcli.hook_packages.terraform.hooks.prepare.hook.invoke_subprocess_with_loading_pattern") @patch("samcli.hook_packages.terraform.hooks.prepare.hook._update_resources_paths") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._translate_to_cfn") + @patch("samcli.hook_packages.terraform.hooks.prepare.hook.translate_to_cfn") @patch("builtins.open") @patch("samcli.hook_packages.terraform.hooks.prepare.hook.osutils.tempfile_platform_independent") @patch("samcli.hook_packages.terraform.hooks.prepare.hook.os") @@ -2876,7 +209,7 @@ def test_prepare_with_loader_error(self, mock_subprocess_run, mock_subprocess_lo prepare(self.prepare_params) @patch("samcli.hook_packages.terraform.hooks.prepare.hook.invoke_subprocess_with_loading_pattern") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._translate_to_cfn") + @patch("samcli.hook_packages.terraform.hooks.prepare.hook.translate_to_cfn") @patch("samcli.hook_packages.terraform.hooks.prepare.hook.osutils.tempfile_platform_independent") @patch("samcli.hook_packages.terraform.hooks.prepare.hook.os") @patch("samcli.hook_packages.terraform.hooks.prepare.hook.json") @@ -3005,599 +338,6 @@ def side_effect_func(value): _update_resources_paths(resources, terraform_application_root) self.assertDictEqual(resources, expected_resources) - def test_check_image_config_value_valid(self): - image_config = [ - { - "command": ["cmd1", "cmd2"], - "entry_point": ["entry1", "entry2"], - "working_directory": "/working/dir/path", - } - ] - res = _check_image_config_value(image_config) - self.assertTrue(res) - - def test_check_image_config_value_invalid_type(self): - image_config = { - "command": ["cmd1", "cmd2"], - "entry_point": ["entry1", "entry2"], - "working_directory": "/working/dir/path", - } - expected_message = f"AWS SAM CLI expects that the value of image_config of aws_lambda_function resource in " - f"the terraform plan output to be of type list instead of {type(image_config)}" - with self.assertRaises(PrepareHookException, msg=expected_message): - _check_image_config_value(image_config) - - def test_check_image_config_value_invalid_length(self): - image_config = [ - { - "command": ["cmd1", "cmd2"], - "entry_point": ["entry1", "entry2"], - "working_directory": "/working/dir/path", - }, - { - "command": ["cmd1", "cmd2"], - "entry_point": ["entry1", "entry2"], - "working_directory": "/working/dir/path", - }, - ] - expected_message = f"AWS SAM CLI expects that there is only one item in the image_config property of " - f"aws_lambda_function resource in the terraform plan output, but there are {len(image_config)} items" - with self.assertRaises(PrepareHookException, msg=expected_message): - _check_image_config_value(image_config) - - @parameterized.expand([(True,), (False,)]) - @patch("builtins.open") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook.shutil") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook.os") - def test_generate_makefile( - self, - output_dir_exists, - mock_os, - mock_shutil, - mock_open, - ): - mock_os.path.exists.return_value = output_dir_exists - - mock_copy_tf_backend_override_file_path = Mock() - mock_copy_terraform_built_artifacts_script_path = Mock() - mock_makefile_path = Mock() - mock_os.path.dirname.return_value = "" - mock_os.path.join.side_effect = [ - mock_copy_tf_backend_override_file_path, - mock_copy_terraform_built_artifacts_script_path, - mock_makefile_path, - ] - - mock_makefile = Mock() - mock_open.return_value.__enter__.return_value = mock_makefile - - mock_makefile_rules = Mock() - mock_output_directory_path = Mock() - - _generate_makefile(mock_makefile_rules, mock_output_directory_path) - - if output_dir_exists: - mock_os.makedirs.assert_not_called() - else: - mock_os.makedirs.assert_called_once_with(mock_output_directory_path, exist_ok=True) - - mock_shutil.copy.assert_called_once_with( - mock_copy_terraform_built_artifacts_script_path, mock_output_directory_path - ) - - mock_makefile.writelines.assert_called_once_with(mock_makefile_rules) - - @parameterized.expand( - [ - ([CalledProcessError(-2, "python3 --version"), Mock(stdout="Python 3.8.10")], "py3"), - ([Mock(stdout="Python 3.7.12"), CalledProcessError(-2, "py3 --version")], "python3"), - ([Mock(stdout="Python 3.7")], "python3"), - ([Mock(stdout="Python 3.7.0")], "python3"), - ([Mock(stdout="Python 3.7.12")], "python3"), - ([Mock(stdout="Python 3.8")], "python3"), - ([Mock(stdout="Python 3.8.0")], "python3"), - ([Mock(stdout="Python 3.8.12")], "python3"), - ([Mock(stdout="Python 3.9")], "python3"), - ([Mock(stdout="Python 3.9.0")], "python3"), - ([Mock(stdout="Python 3.9.12")], "python3"), - ([Mock(stdout="Python 3.10")], "python3"), - ([Mock(stdout="Python 3.10.0")], "python3"), - ([Mock(stdout="Python 3.10.12")], "python3"), - ( - [ - Mock(stdout="Python 3.6.10"), - Mock(stdout="Python 3.0.10"), - Mock(stdout="Python 2.7.10"), - Mock(stdout="Python 3.7.12"), - ], - "py", - ), - ] - ) - @patch("samcli.hook_packages.terraform.hooks.prepare.hook.run") - def test_get_python_command_name(self, mock_run_side_effect, expected_python_command, mock_subprocess_run): - mock_subprocess_run.side_effect = mock_run_side_effect - - python_command = _get_python_command_name() - self.assertEqual(python_command, expected_python_command) - - @parameterized.expand( - [ - ( - [ - CalledProcessError(-2, "python3 --version"), - CalledProcessError(-2, "py3 --version"), - CalledProcessError(-2, "python --version"), - CalledProcessError(-2, "py --version"), - ], - ), - ( - [ - Mock(stdout="Python 3"), - Mock(stdout="Python 3.0"), - Mock(stdout="Python 3.0.10"), - Mock(stdout="Python 3.6"), - ], - ), - ( - [ - Mock(stdout="Python 3.6.10"), - Mock(stdout="Python 2"), - Mock(stdout="Python 2.7"), - Mock(stdout="Python 2.7.10"), - ], - ), - ( - [ - Mock(stdout="Python 4"), - Mock(stdout="Python 4.7"), - Mock(stdout="Python 4.7.10"), - Mock(stdout="Python 4.7.10"), - ], - ), - ] - ) - @patch("samcli.hook_packages.terraform.hooks.prepare.hook.run") - def test_get_python_command_name_python_not_found(self, mock_run_side_effect, mock_subprocess_run): - mock_subprocess_run.side_effect = mock_run_side_effect - - expected_error_msg = "Python not found. Please ensure that python 3.7 or above is installed." - with self.assertRaises(PrepareHookException, msg=expected_error_msg): - _get_python_command_name() - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_makefile_build_target") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._format_makefile_recipe") - def test_generate_makefile_rule_for_lambda_resource(self, format_recipe_mock, get_build_target_mock): - format_recipe_mock.side_effect = [ - "\tpython3 .aws-sam/iacs_metadata/copy_terraform_built_artifacts.py --expression " - '"|values|root_module|resources|[?address=="null_resource.sam_metadata_aws_lambda_function"]' - '|values|triggers|built_output_path" --directory "$(ARTIFACTS_DIR)" ' - '--target "null_resource.sam_metadata_aws_lambda_function"\n', - ] - get_build_target_mock.return_value = "build-function_logical_id:\n" - sam_metadata_resource = SamMetadataResource( - current_module_address=None, - resource={"address": "null_resource.sam_metadata_aws_lambda_function"}, - config_resource=TFResource("", "", None, {}), - ) - makefile_rule = _generate_makefile_rule_for_lambda_resource( - python_command_name="python", - output_dir="/some/dir/path/.aws-sam/output", - sam_metadata_resource=sam_metadata_resource, - terraform_application_dir="/some/dir/path", - logical_id="function_logical_id", - ) - expected_makefile_rule = ( - "build-function_logical_id:\n" - "\tpython3 .aws-sam/iacs_metadata/copy_terraform_built_artifacts.py " - '--expression "|values|root_module|resources|[?address=="null_resource.sam_metadata_aws_lambda_function"]' - '|values|triggers|built_output_path" --directory "$(ARTIFACTS_DIR)" ' - '--target "null_resource.sam_metadata_aws_lambda_function"\n' - ) - self.assertEqual(makefile_rule, expected_makefile_rule) - - @parameterized.expand( - [ - "null_resource.sam_metadata_aws_lambda_function", - "null_resource.sam_metadata_aws_lambda_function[2]", - 'null_resource.sam_metadata_aws_lambda_layer_version_layers["layer3"]', - ] - ) - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._build_jpath_string") - def test_build_makerule_python_command(self, resource, jpath_string_mock): - jpath_string_mock.return_value = ( - "|values|root_module|resources|" f'[?address=="{resource}"]' "|values|triggers|built_output_path" - ) - sam_metadata_resource = SamMetadataResource( - current_module_address=None, resource={}, config_resource=TFResource("", "", None, {}) - ) - show_command = _build_makerule_python_command( - python_command_name="python", - output_dir="/some/dir/path/.aws-sam/output", - resource_address=resource, - sam_metadata_resource=sam_metadata_resource, - terraform_application_dir="/some/dir/path", - ) - script_path = ".aws-sam/output/copy_terraform_built_artifacts.py" - escaped_resource = resource.replace('"', '\\"') - expected_show_command = ( - f'python "{script_path}" ' - '--expression "|values|root_module|resources|' - f'[?address==\\"{escaped_resource}\\"]' - '|values|triggers|built_output_path" --directory "$(ARTIFACTS_DIR)" ' - f'--target "{escaped_resource}"' - ) - self.assertEqual(show_command, expected_show_command) - - @parameterized.expand( - [ - ( - None, - '|values|root_module|resources|[?address=="null_resource' - '.sam_metadata_aws_lambda_function"]|values|triggers|built_output_path', - ), - ( - "module.level1_lambda", - "|values|root_module|child_modules|[?address==module.level1_lambda]|resources|" - '[?address=="null_resource.sam_metadata_aws_lambda_function"]|values|triggers|built_output_path', - ), - ( - "module.level1_lambda.module.level2_lambda", - "|values|root_module|child_modules|[?address==module.level1_lambda]|child_modules|" - "[?address==module.level1_lambda.module.level2_lambda]|resources|[?address==" - '"null_resource.sam_metadata_aws_lambda_function"]|values|triggers|built_output_path', - ), - ] - ) - def test_build_jpath_string(self, module_address, expected_jpath): - sam_metadata_resource = SamMetadataResource( - current_module_address=module_address, resource={}, config_resource=TFResource("", "", None, {}) - ) - self.assertEqual( - _build_jpath_string(sam_metadata_resource, "null_resource.sam_metadata_aws_lambda_function"), expected_jpath - ) - - @parameterized.expand( - [ - (None, []), - ( - "module.level1_lambda", - ["module.level1_lambda"], - ), - ( - "module.level1_lambda.module.level2_lambda", - ["module.level1_lambda", "module.level1_lambda.module.level2_lambda"], - ), - ( - "module.level1_lambda.module.level2_lambda.module.level3_lambda", - [ - "module.level1_lambda", - "module.level1_lambda.module.level2_lambda", - "module.level1_lambda.module.level2_lambda.module.level3_lambda", - ], - ), - ] - ) - def test_get_parent_modules(self, module_address, expected_list): - self.assertEqual(_get_parent_modules(module_address), expected_list) - - def test_get_makefile_build_target(self): - output_string = _get_makefile_build_target("function_logical_id") - self.assertRegex(output_string, r"^build-function_logical_id:(\n|\r\n)$") - - def test__format_makefile_recipe(self): - output_string = _format_makefile_recipe("terraform show -json | python3") - self.assertRegex(output_string, r"^\tterraform show -json \| python3(\n|\r\n)$") - - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._link_lambda_function_to_layer") - @patch("samcli.hook_packages.terraform.hooks.prepare.hook._get_configuration_address") - def test_link_lambda_functions_to_layers(self, mock_get_configuration_address, mock_link_lambda_function_to_layer): - lambda_funcs_config_resources = { - "aws_lambda_function.remote_lambda_code": [ - { - "Type": "AWS::Lambda::Function", - "Properties": { - "FunctionName": "s3_remote_lambda_function", - "Code": {"S3Bucket": "lambda_code_bucket", "S3Key": "remote_lambda_code_key"}, - "Handler": "app.lambda_handler", - "PackageType": "Zip", - "Runtime": "python3.8", - "Timeout": 3, - }, - "Metadata": {"SamResourceId": "aws_lambda_function.remote_lambda_code", "SkipBuild": True}, - } - ], - "aws_lambda_function.root_lambda": [ - { - "Type": "AWS::Lambda::Function", - "Properties": { - "FunctionName": "root_lambda", - "Code": "HelloWorldFunction.zip", - "Handler": "app.lambda_handler", - "PackageType": "Zip", - "Runtime": "python3.8", - "Timeout": 3, - }, - "Metadata": {"SamResourceId": "aws_lambda_function.root_lambda", "SkipBuild": True}, - } - ], - } - terraform_layers_resources = { - "AwsLambdaLayerVersionLambdaLayer556B22D0": { - "address": "aws_lambda_layer_version.lambda_layer", - "mode": "managed", - "type": "aws_lambda_layer_version", - "name": "lambda_layer", - "provider_name": "registry.terraform.io/hashicorp/aws", - "schema_version": 0, - "values": { - "compatible_architectures": ["arm64"], - "compatible_runtimes": ["nodejs14.x", "nodejs16.x"], - "description": None, - "filename": None, - "layer_name": "lambda_layer_name", - "license_info": None, - "s3_bucket": "layer_code_bucket", - "s3_key": "s3_lambda_layer_code_key", - "s3_object_version": "1", - "skip_destroy": False, - }, - "sensitive_values": {"compatible_architectures": [False], "compatible_runtimes": [False, False]}, - } - } - resources = { - "aws_lambda_function.remote_lambda_code": TFResource( - "aws_lambda_function.remote_lambda_code", "", None, {} - ), - "aws_lambda_function.root_lambda": TFResource("aws_lambda_function.root_lambda", "", None, {}), - } - _link_lambda_functions_to_layers(resources, lambda_funcs_config_resources, terraform_layers_resources) - mock_link_lambda_function_to_layer.assert_has_calls( - [ - call( - resources["aws_lambda_function.remote_lambda_code"], - lambda_funcs_config_resources.get("aws_lambda_function.remote_lambda_code"), - terraform_layers_resources, - ), - call( - resources["aws_lambda_function.root_lambda"], - lambda_funcs_config_resources.get("aws_lambda_function.root_lambda"), - terraform_layers_resources, - ), - ] - ) - - def test_add_child_modules_to_queue(self): - m20_planned_value_module = { - "resources": [ - { - **self.tf_lambda_function_resource_zip_3, - "address": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_3}", - }, - ], - "address": "module.m1.module.m2[0]", - } - m21_planned_value_module = { - "resources": [ - { - **self.tf_lambda_function_resource_zip_4, - "address": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_4}", - }, - ], - "address": "module.m1.module.m2[1]", - } - m1_planned_value_module = { - "resources": [ - { - **self.tf_lambda_function_resource_zip_2, - "address": f"module.mymodule1.aws_lambda_function.{self.zip_function_name_2}", - }, - ], - "child_modules": [ - m20_planned_value_module, - m21_planned_value_module, - ], - "address": "module.m1", - } - curr_module = { - "resources": [ - self.tf_lambda_function_resource_zip, - ], - "child_modules": [m1_planned_value_module], - } - m2_config_module = TFModule( - "module.m1.module.m2", - None, - {}, - { - f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_3}": Mock(), - }, - {}, - {}, - ) - m1_config_module = TFModule( - "module.m1", - None, - {}, - { - f"module.mymodule1.aws_lambda_function.{self.zip_function_name_2}": Mock(), - }, - {"m2": m2_config_module}, - {}, - ) - m2_config_module.parent_module = m1_config_module - curr_config_module = TFModule( - None, - None, - {}, - { - f"aws_lambda_function.{self.zip_function_name}": Mock(), - }, - {"m1": m1_config_module}, - {}, - ) - m1_config_module.parent_module = curr_config_module - modules_queue = [] - _add_child_modules_to_queue(curr_module, curr_config_module, modules_queue) - self.assertEqual(modules_queue, [(m1_planned_value_module, m1_config_module)]) - modules_queue = [] - _add_child_modules_to_queue(m1_planned_value_module, m1_config_module, modules_queue) - self.assertEqual( - modules_queue, [(m20_planned_value_module, m2_config_module), (m21_planned_value_module, m2_config_module)] - ) - - def test_add_child_modules_to_queue_invalid_config(self): - m20_planned_value_module = { - "resources": [ - { - **self.tf_lambda_function_resource_zip_3, - "address": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_3}", - }, - ], - "address": "module.m1.module.m2[0]", - } - m21_planned_value_module = { - "resources": [ - { - **self.tf_lambda_function_resource_zip_4, - "address": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_4}", - }, - ], - "address": "module.m1.module.m2[1]", - } - m1_planned_value_module = { - "resources": [ - { - **self.tf_lambda_function_resource_zip_2, - "address": f"module.mymodule1.aws_lambda_function.{self.zip_function_name_2}", - }, - ], - "child_modules": [ - m20_planned_value_module, - m21_planned_value_module, - ], - "address": "module.m1", - } - m2_config_module = TFModule( - "module.m1.module.m2", - None, - {}, - { - f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_3}": Mock(), - }, - {}, - {}, - ) - m1_config_module = TFModule( - "module.m1", - None, - {}, - { - f"module.mymodule1.aws_lambda_function.{self.zip_function_name_2}": Mock(), - }, - {"m3": m2_config_module}, - {}, - ) - m2_config_module.parent_module = m1_config_module - modules_queue = [] - with self.assertRaises( - PrepareHookException, - msg=f"Module module.m1.module.m2[0] exists in terraform planned_value, but does not exist in " - "terraform configuration", - ): - _add_child_modules_to_queue(m1_planned_value_module, m1_config_module, modules_queue) - - def test_check_dummy_remote_values_no_exception(self): - no_exception = True - try: - _check_dummy_remote_values( - { - "func1": { - "Properties": { - "Code": { - "S3bucket": "bucket1", - "S3Key": "key1", - "S3ObjectVersion": "version", - } - } - }, - "func2": { - "Properties": { - "Code": { - "ImageUri": "uri", - } - } - }, - } - ) - except PrepareHookException as e: - no_exception = False - self.assertTrue(no_exception) - - def test_check_dummy_remote_values_s3_bucket_remote_issue(self): - no_exception = True - with self.assertRaises( - PrepareHookException, - msg=f"Lambda resource resource1 is referring to an S3 bucket that is not created yet" - f", and there is no sam metadata resource set for it to build its code locally", - ): - _check_dummy_remote_values( - { - "func1": { - "Type": AWS_LAMBDA_FUNCTION, - "Properties": { - "Code": { - "S3Bucket": REMOTE_DUMMY_VALUE, - "S3Key": "key1", - "S3ObjectVersion": "version", - } - }, - "Metadata": {"SamResourceId": "resource1"}, - }, - "func2": { - "Type": AWS_LAMBDA_FUNCTION, - "Properties": { - "Code": { - "ImageUri": "uri", - } - }, - }, - } - ) - - def test_check_dummy_remote_values_for_image_uri(self): - no_exception = True - - with self.assertRaises( - PrepareHookException, - msg=f"Lambda resource resource1 is referring to an image uri " - "that is not created yet, and there is no sam metadata resource set for it to build its image " - "locally.", - ): - _check_dummy_remote_values( - { - "func1": { - "Type": AWS_LAMBDA_FUNCTION, - "Properties": { - "Code": { - "S3Bucket": REMOTE_DUMMY_VALUE, - "S3Key": "key1", - "S3ObjectVersion": "version", - } - }, - "Metadata": {"SamResourceId": "resource1"}, - }, - "func2": { - "Type": AWS_LAMBDA_FUNCTION, - "Properties": { - "Code": { - "ImageUri": "uri", - } - }, - }, - } - ) - @patch("samcli.hook_packages.terraform.hooks.prepare.hook.os") @patch("samcli.hook_packages.terraform.hooks.prepare.hook.run") def test_skip_prepare_infra_with_metadata_file(self, run_mock, os_mock): @@ -3610,52 +350,3 @@ def test_skip_prepare_infra_with_metadata_file(self, run_mock, os_mock): prepare(self.prepare_params) run_mock.assert_not_called() - - def test_add_metadata_resource_to_metadata_list(self): - metadata_resource_mock1 = Mock() - metadata_resource_mock2 = Mock() - new_metadata_resource_mock = Mock() - planned_Value_resource = { - **self.tf_sam_metadata_resource_common_attributes, - "values": { - "triggers": { - "built_output_path": "builds/func2.zip", - "original_source_code": "./src/lambda_func2", - "resource_name": "aws_lambda_function.func1", - "resource_type": "ZIP_LAMBDA_FUNCTION", - }, - }, - "address": "null_resource.sam_metadata_func2", - "name": "sam_metadata_func2", - } - metadata_resources_list = [metadata_resource_mock1, metadata_resource_mock2] - _add_metadata_resource_to_metadata_list( - new_metadata_resource_mock, planned_Value_resource, metadata_resources_list - ) - self.assertEqual( - metadata_resources_list, [metadata_resource_mock1, metadata_resource_mock2, new_metadata_resource_mock] - ) - - def test_add_metadata_resource_without_resource_name_to_metadata_list(self): - metadata_resource_mock1 = Mock() - metadata_resource_mock2 = Mock() - new_metadata_resource_mock = Mock() - planned_Value_resource = { - **self.tf_sam_metadata_resource_common_attributes, - "values": { - "triggers": { - "built_output_path": "builds/func2.zip", - "original_source_code": "./src/lambda_func2", - "resource_type": "ZIP_LAMBDA_FUNCTION", - }, - }, - "address": "null_resource.sam_metadata_func2", - "name": "sam_metadata_func2", - } - metadata_resources_list = [metadata_resource_mock1, metadata_resource_mock2] - _add_metadata_resource_to_metadata_list( - new_metadata_resource_mock, planned_Value_resource, metadata_resources_list - ) - self.assertEqual( - metadata_resources_list, [new_metadata_resource_mock, metadata_resource_mock1, metadata_resource_mock2] - ) diff --git a/tests/unit/hook_packages/terraform/hooks/prepare/test_makefile_generator.py b/tests/unit/hook_packages/terraform/hooks/prepare/test_makefile_generator.py new file mode 100644 index 0000000000..bd3f0da6be --- /dev/null +++ b/tests/unit/hook_packages/terraform/hooks/prepare/test_makefile_generator.py @@ -0,0 +1,189 @@ +"""Test Terraform prepare Makefile""" +from unittest.mock import patch, Mock +from parameterized import parameterized + +from tests.unit.hook_packages.terraform.hooks.prepare.prepare_base import PrepareHookUnitBase +from samcli.hook_packages.terraform.hooks.prepare.types import ( + SamMetadataResource, +) +from samcli.hook_packages.terraform.hooks.prepare.makefile_generator import ( + generate_makefile_rule_for_lambda_resource, + generate_makefile, + _get_makefile_build_target, + _get_parent_modules, + _build_jpath_string, + _format_makefile_recipe, + _build_makerule_python_command, +) +from samcli.hook_packages.terraform.hooks.prepare.types import TFResource + + +class TestPrepareMakefile(PrepareHookUnitBase): + def setUp(self): + super().setUp() + + @patch("samcli.hook_packages.terraform.hooks.prepare.makefile_generator._get_makefile_build_target") + @patch("samcli.hook_packages.terraform.hooks.prepare.makefile_generator._format_makefile_recipe") + def test_generate_makefile_rule_for_lambda_resource(self, format_recipe_mock, get_build_target_mock): + format_recipe_mock.side_effect = [ + "\tpython3 .aws-sam/iacs_metadata/copy_terraform_built_artifacts.py --expression " + '"|values|root_module|resources|[?address=="null_resource.sam_metadata_aws_lambda_function"]' + '|values|triggers|built_output_path" --directory "$(ARTIFACTS_DIR)" ' + '--target "null_resource.sam_metadata_aws_lambda_function"\n', + ] + get_build_target_mock.return_value = "build-function_logical_id:\n" + sam_metadata_resource = SamMetadataResource( + current_module_address=None, + resource={"address": "null_resource.sam_metadata_aws_lambda_function"}, + config_resource=TFResource("", "", None, {}), + ) + makefile_rule = generate_makefile_rule_for_lambda_resource( + python_command_name="python", + output_dir="/some/dir/path/.aws-sam/output", + sam_metadata_resource=sam_metadata_resource, + terraform_application_dir="/some/dir/path", + logical_id="function_logical_id", + ) + expected_makefile_rule = ( + "build-function_logical_id:\n" + "\tpython3 .aws-sam/iacs_metadata/copy_terraform_built_artifacts.py " + '--expression "|values|root_module|resources|[?address=="null_resource.sam_metadata_aws_lambda_function"]' + '|values|triggers|built_output_path" --directory "$(ARTIFACTS_DIR)" ' + '--target "null_resource.sam_metadata_aws_lambda_function"\n' + ) + self.assertEqual(makefile_rule, expected_makefile_rule) + + @parameterized.expand( + [ + "null_resource.sam_metadata_aws_lambda_function", + "null_resource.sam_metadata_aws_lambda_function[2]", + 'null_resource.sam_metadata_aws_lambda_layer_version_layers["layer3"]', + ] + ) + @patch("samcli.hook_packages.terraform.hooks.prepare.makefile_generator._build_jpath_string") + def test_build_makerule_python_command(self, resource, jpath_string_mock): + jpath_string_mock.return_value = ( + "|values|root_module|resources|" f'[?address=="{resource}"]' "|values|triggers|built_output_path" + ) + sam_metadata_resource = SamMetadataResource( + current_module_address=None, resource={}, config_resource=TFResource("", "", None, {}) + ) + show_command = _build_makerule_python_command( + python_command_name="python", + output_dir="/some/dir/path/.aws-sam/output", + resource_address=resource, + sam_metadata_resource=sam_metadata_resource, + terraform_application_dir="/some/dir/path", + ) + script_path = ".aws-sam/output/copy_terraform_built_artifacts.py" + escaped_resource = resource.replace('"', '\\"') + expected_show_command = ( + f'python "{script_path}" ' + '--expression "|values|root_module|resources|' + f'[?address==\\"{escaped_resource}\\"]' + '|values|triggers|built_output_path" --directory "$(ARTIFACTS_DIR)" ' + f'--target "{escaped_resource}"' + ) + self.assertEqual(show_command, expected_show_command) + + def test_get_makefile_build_target(self): + output_string = _get_makefile_build_target("function_logical_id") + self.assertRegex(output_string, r"^build-function_logical_id:(\n|\r\n)$") + + def test__format_makefile_recipe(self): + output_string = _format_makefile_recipe("terraform show -json | python3") + self.assertRegex(output_string, r"^\tterraform show -json \| python3(\n|\r\n)$") + + @parameterized.expand( + [ + ( + None, + '|values|root_module|resources|[?address=="null_resource' + '.sam_metadata_aws_lambda_function"]|values|triggers|built_output_path', + ), + ( + "module.level1_lambda", + "|values|root_module|child_modules|[?address==module.level1_lambda]|resources|" + '[?address=="null_resource.sam_metadata_aws_lambda_function"]|values|triggers|built_output_path', + ), + ( + "module.level1_lambda.module.level2_lambda", + "|values|root_module|child_modules|[?address==module.level1_lambda]|child_modules|" + "[?address==module.level1_lambda.module.level2_lambda]|resources|[?address==" + '"null_resource.sam_metadata_aws_lambda_function"]|values|triggers|built_output_path', + ), + ] + ) + def test_build_jpath_string(self, module_address, expected_jpath): + sam_metadata_resource = SamMetadataResource( + current_module_address=module_address, resource={}, config_resource=TFResource("", "", None, {}) + ) + self.assertEqual( + _build_jpath_string(sam_metadata_resource, "null_resource.sam_metadata_aws_lambda_function"), expected_jpath + ) + + @parameterized.expand( + [ + (None, []), + ( + "module.level1_lambda", + ["module.level1_lambda"], + ), + ( + "module.level1_lambda.module.level2_lambda", + ["module.level1_lambda", "module.level1_lambda.module.level2_lambda"], + ), + ( + "module.level1_lambda.module.level2_lambda.module.level3_lambda", + [ + "module.level1_lambda", + "module.level1_lambda.module.level2_lambda", + "module.level1_lambda.module.level2_lambda.module.level3_lambda", + ], + ), + ] + ) + def test_get_parent_modules(self, module_address, expected_list): + self.assertEqual(_get_parent_modules(module_address), expected_list) + + @parameterized.expand([(True,), (False,)]) + @patch("builtins.open") + @patch("samcli.hook_packages.terraform.hooks.prepare.makefile_generator.shutil") + @patch("samcli.hook_packages.terraform.hooks.prepare.makefile_generator.os") + def test_generate_makefile( + self, + output_dir_exists, + mock_os, + mock_shutil, + mock_open, + ): + mock_os.path.exists.return_value = output_dir_exists + + mock_copy_tf_backend_override_file_path = Mock() + mock_copy_terraform_built_artifacts_script_path = Mock() + mock_makefile_path = Mock() + mock_os.path.dirname.return_value = "" + mock_os.path.join.side_effect = [ + mock_copy_tf_backend_override_file_path, + mock_copy_terraform_built_artifacts_script_path, + mock_makefile_path, + ] + + mock_makefile = Mock() + mock_open.return_value.__enter__.return_value = mock_makefile + + mock_makefile_rules = Mock() + mock_output_directory_path = Mock() + + generate_makefile(mock_makefile_rules, mock_output_directory_path) + + if output_dir_exists: + mock_os.makedirs.assert_not_called() + else: + mock_os.makedirs.assert_called_once_with(mock_output_directory_path, exist_ok=True) + + mock_shutil.copy.assert_called_once_with( + mock_copy_terraform_built_artifacts_script_path, mock_output_directory_path + ) + + mock_makefile.writelines.assert_called_once_with(mock_makefile_rules) diff --git a/tests/unit/hook_packages/terraform/hooks/prepare/test_property_builder.py b/tests/unit/hook_packages/terraform/hooks/prepare/test_property_builder.py new file mode 100644 index 0000000000..1e4b1feede --- /dev/null +++ b/tests/unit/hook_packages/terraform/hooks/prepare/test_property_builder.py @@ -0,0 +1,196 @@ +"""Test Terraform property builder""" +from unittest.mock import patch, Mock, call +from parameterized import parameterized + +from samcli.hook_packages.terraform.hooks.prepare.property_builder import ( + _build_code_property, + REMOTE_DUMMY_VALUE, + _get_property_extractor, + _build_lambda_function_environment_property, + _build_lambda_function_image_config_property, + _check_image_config_value, +) + +from samcli.lib.hook.exceptions import PrepareHookException +from tests.unit.hook_packages.terraform.hooks.prepare.prepare_base import PrepareHookUnitBase + + +class TestTerraformPropBuilder(PrepareHookUnitBase): + def setUp(self): + super().setUp() + + def test_build_lambda_function_code_property_zip(self): + resource_mock = Mock() + expected_cfn_property = self.expected_cfn_zip_function_properties["Code"] + translated_cfn_property = _build_code_property(self.tf_zip_function_properties, resource_mock) + resource_mock.assert_not_called() + self.assertEqual(translated_cfn_property, expected_cfn_property) + + @patch("samcli.hook_packages.terraform.hooks.prepare.property_builder._resolve_resource_attribute") + def test_build_lambda_function_code_property_s3_with_null_bucket_only_in_planned_values( + self, + mock_resolve_resource_attribute, + ): + resource_mock = Mock() + reference_mock = Mock() + mock_resolve_resource_attribute.return_value = reference_mock + tf_s3_function_properties = { + **self.tf_function_common_properties, + "s3_key": "bucket_key", + "s3_object_version": "1", + } + expected_cfn_property = { + "S3Bucket": REMOTE_DUMMY_VALUE, + "S3Bucket_config_value": reference_mock, + "S3Key": "bucket_key", + "S3ObjectVersion": "1", + } + translated_cfn_property = _build_code_property(tf_s3_function_properties, resource_mock) + self.assertEqual(translated_cfn_property, expected_cfn_property) + mock_resolve_resource_attribute.assert_has_calls( + [call(resource_mock, "s3_bucket"), call(resource_mock, "s3_key"), call(resource_mock, "s3_object_version")] + ) + + @patch("samcli.hook_packages.terraform.hooks.prepare.property_builder._resolve_resource_attribute") + def test_build_lambda_function_code_property_with_null_imageuri_only_in_planned_values( + self, + mock_resolve_resource_attribute, + ): + resource_mock = Mock() + reference_mock = Mock() + mock_resolve_resource_attribute.return_value = reference_mock + tf_image_function_properties = { + **self.tf_image_package_type_function_common_properties, + "image_config": [ + { + "command": ["cmd1", "cmd2"], + "entry_point": ["entry1", "entry2"], + "working_directory": "/working/dir/path", + } + ], + } + expected_cfn_property = { + "ImageUri": REMOTE_DUMMY_VALUE, + } + translated_cfn_property = _build_code_property(tf_image_function_properties, resource_mock) + self.assertEqual(translated_cfn_property, expected_cfn_property) + mock_resolve_resource_attribute.assert_has_calls([call(resource_mock, "image_uri")]) + + def test_build_lambda_function_code_property_s3(self): + resource_mock = Mock() + expected_cfn_property = self.expected_cfn_s3_function_properties["Code"] + translated_cfn_property = _build_code_property(self.tf_s3_function_properties, resource_mock) + resource_mock.assert_not_called() + self.assertEqual(translated_cfn_property, expected_cfn_property) + + def test_build_lambda_function_code_property_image(self): + expected_cfn_property = self.expected_cfn_image_package_function_properties["Code"] + resource_mock = Mock() + translated_cfn_property = _build_code_property(self.tf_image_package_type_function_properties, resource_mock) + resource_mock.assert_not_called() + self.assertEqual(translated_cfn_property, expected_cfn_property) + + def test_build_lambda_layer_code_property_zip(self): + resource_mock = Mock() + expected_cfn_property = self.expected_cfn_lambda_layer_properties_zip["Content"] + translated_cfn_property = _build_code_property(self.tf_lambda_layer_properties_zip, resource_mock) + resource_mock.assert_not_called() + self.assertEqual(translated_cfn_property, expected_cfn_property) + + def test_build_lambda_layer_code_property_s3(self): + resource_mock = Mock() + expected_cfn_property = self.expected_cfn_lambda_layer_properties_s3["Content"] + translated_cfn_property = _build_code_property(self.tf_lambda_layer_properties_s3, resource_mock) + resource_mock.assert_not_called() + self.assertEqual(translated_cfn_property, expected_cfn_property) + + @parameterized.expand(["function_name", "handler"]) + def test_get_property_extractor(self, tf_property_name): + property_extractor = _get_property_extractor(tf_property_name) + self.assertEqual( + property_extractor(self.tf_zip_function_properties, None), self.tf_zip_function_properties[tf_property_name] + ) + + def test_build_lambda_function_environment_property(self): + expected_cfn_property = self.expected_cfn_zip_function_properties["Environment"] + translated_cfn_property = _build_lambda_function_environment_property(self.tf_zip_function_properties, None) + self.assertEqual(translated_cfn_property, expected_cfn_property) + + def test_build_lambda_function_environment_property_no_variables(self): + tf_properties = {"function_name": self.zip_function_name} + self.assertIsNone(_build_lambda_function_environment_property(tf_properties, None)) + + tf_properties = {"environment": [], "function_name": self.zip_function_name} + self.assertIsNone(_build_lambda_function_environment_property(tf_properties, None)) + + def test_build_lambda_function_image_config_property(self): + expected_cfn_property = self.expected_cfn_image_package_function_properties["ImageConfig"] + translated_cfn_property = _build_lambda_function_image_config_property( + self.tf_image_package_type_function_properties, None + ) + self.assertEqual(translated_cfn_property, expected_cfn_property) + + def test_build_lambda_function_image_config_property_no_image_config(self): + tf_properties = {**self.tf_image_package_type_function_properties} + del tf_properties["image_config"] + translated_cfn_property = _build_lambda_function_image_config_property(tf_properties, None) + self.assertEqual(translated_cfn_property, None) + + def test_build_lambda_function_image_config_property_empty_image_config_list(self): + tf_properties = {**self.tf_image_package_type_function_properties} + tf_properties["image_config"] = [] + translated_cfn_property = _build_lambda_function_image_config_property(tf_properties, None) + self.assertEqual(translated_cfn_property, None) + + @parameterized.expand( + [("command", "Command"), ("entry_point", "EntryPoint"), ("working_directory", "WorkingDirectory")] + ) + def test_build_lambda_function_image_config_property_not_all_properties_exist( + self, missing_tf_property, missing_cfn_property + ): + expected_cfn_property = {**self.expected_cfn_image_package_function_properties["ImageConfig"]} + del expected_cfn_property[missing_cfn_property] + tf_properties = {**self.tf_image_package_type_function_properties} + del tf_properties["image_config"][0][missing_tf_property] + translated_cfn_property = _build_lambda_function_image_config_property(tf_properties, None) + self.assertEqual(translated_cfn_property, expected_cfn_property) + + def test_check_image_config_value_valid(self): + image_config = [ + { + "command": ["cmd1", "cmd2"], + "entry_point": ["entry1", "entry2"], + "working_directory": "/working/dir/path", + } + ] + res = _check_image_config_value(image_config) + self.assertTrue(res) + + def test_check_image_config_value_invalid_type(self): + image_config = { + "command": ["cmd1", "cmd2"], + "entry_point": ["entry1", "entry2"], + "working_directory": "/working/dir/path", + } + expected_message = f"AWS SAM CLI expects that the value of image_config of aws_lambda_function resource in " + f"the terraform plan output to be of type list instead of {type(image_config)}" + with self.assertRaises(PrepareHookException, msg=expected_message): + _check_image_config_value(image_config) + + def test_check_image_config_value_invalid_length(self): + image_config = [ + { + "command": ["cmd1", "cmd2"], + "entry_point": ["entry1", "entry2"], + "working_directory": "/working/dir/path", + }, + { + "command": ["cmd1", "cmd2"], + "entry_point": ["entry1", "entry2"], + "working_directory": "/working/dir/path", + }, + ] + expected_message = f"AWS SAM CLI expects that there is only one item in the image_config property of " + f"aws_lambda_function resource in the terraform plan output, but there are {len(image_config)} items" + with self.assertRaises(PrepareHookException, msg=expected_message): + _check_image_config_value(image_config) diff --git a/tests/unit/hook_packages/terraform/hooks/prepare/test_translate.py b/tests/unit/hook_packages/terraform/hooks/prepare/test_translate.py new file mode 100644 index 0000000000..c6253603d1 --- /dev/null +++ b/tests/unit/hook_packages/terraform/hooks/prepare/test_translate.py @@ -0,0 +1,1037 @@ +"""Test Terraform prepare translate""" +import copy +from unittest.mock import Mock, call, patch, MagicMock + +from tests.unit.hook_packages.terraform.hooks.prepare.prepare_base import PrepareHookUnitBase +from samcli.hook_packages.terraform.hooks.prepare.property_builder import ( + AWS_LAMBDA_FUNCTION_PROPERTY_BUILDER_MAPPING, + REMOTE_DUMMY_VALUE, +) +from samcli.hook_packages.terraform.hooks.prepare.types import ( + SamMetadataResource, +) +from samcli.hook_packages.terraform.hooks.prepare.translate import ( + translate_to_cfn, + _add_child_modules_to_queue, + _add_metadata_resource_to_metadata_list, + _translate_properties, + _link_lambda_functions_to_layers, + _map_s3_sources_to_functions, + _check_dummy_remote_values, + _get_s3_object_hash, +) +from samcli.hook_packages.terraform.hooks.prepare.translate import AWS_PROVIDER_NAME +from samcli.hook_packages.terraform.hooks.prepare.types import TFModule, TFResource, ConstantValue, ResolvedReference +from samcli.lib.hook.exceptions import PrepareHookException +from samcli.lib.utils.resources import AWS_LAMBDA_FUNCTION + + +class TestPrepareHookTranslate(PrepareHookUnitBase): + def setUp(self): + super().setUp() + + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._build_module") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._get_configuration_address") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._link_lambda_functions_to_layers") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate.enrich_resources_and_generate_makefile") + def test_translate_to_cfn_empty( + self, + mock_enrich_resources_and_generate_makefile, + mock_link_lambda_functions_to_layers, + mock_get_configuration_address, + mock_build_module, + ): + expected_empty_cfn_dict = {"AWSTemplateFormatVersion": "2010-09-09", "Resources": {}} + + tf_json_empty = {} + tf_json_empty_planned_values = {"planned_values": {}} + tf_json_empty_root_module = {"planned_values": {"root_module": {}}} + tf_json_no_child_modules_and_no_resources = {"planned_values": {"root_module": {"resources": []}}} + + tf_jsons = [ + tf_json_empty, + tf_json_empty_planned_values, + tf_json_empty_root_module, + tf_json_no_child_modules_and_no_resources, + ] + + for tf_json in tf_jsons: + translated_cfn_dict = translate_to_cfn(tf_json, self.output_dir, self.project_root) + self.assertEqual(translated_cfn_dict, expected_empty_cfn_dict) + mock_enrich_resources_and_generate_makefile.assert_not_called() + + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._check_dummy_remote_values") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._build_module") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._get_configuration_address") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._link_lambda_functions_to_layers") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate.enrich_resources_and_generate_makefile") + @patch("samcli.hook_packages.terraform.lib.utils.str_checksum") + def test_translate_to_cfn_with_root_module_only( + self, + checksum_mock, + mock_enrich_resources_and_generate_makefile, + mock_link_lambda_functions_to_layers, + mock_get_configuration_address, + mock_build_module, + mock_check_dummy_remote_values, + ): + root_module = MagicMock() + root_module.get.return_value = "module.m1" + resources_mock = MagicMock() + root_module.resources = resources_mock + child_modules = MagicMock() + child_modules.__getitem__.return_value = Mock() + child_modules.__contains__.return_value = True + child_modules.get.return_value = root_module + root_module.child_modules = child_modules + config_resource = Mock() + resources_mock.__getitem__.return_value = config_resource + resources_mock.__contains__.return_value = True + mock_build_module.return_value = root_module + checksum_mock.return_value = self.mock_logical_id_hash + translated_cfn_dict = translate_to_cfn(self.tf_json_with_root_module_only, self.output_dir, self.project_root) + self.assertEqual(translated_cfn_dict, self.expected_cfn_with_root_module_only) + mock_enrich_resources_and_generate_makefile.assert_not_called() + lambda_functions = dict( + filter( + lambda resource: resource[1].get("Type") == "AWS::Lambda::Function", + translated_cfn_dict.get("Resources").items(), + ) + ) + expected_arguments_in_call = [ + {mock_get_configuration_address(): config_resource}, + {mock_get_configuration_address(): [val for _, val in lambda_functions.items()]}, + {}, + ] + mock_link_lambda_functions_to_layers.assert_called_once_with(*expected_arguments_in_call) + mock_get_configuration_address.assert_called() + + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._resolve_resource_attribute") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._check_dummy_remote_values") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._build_module") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._get_configuration_address") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._link_lambda_functions_to_layers") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate.enrich_resources_and_generate_makefile") + @patch("samcli.hook_packages.terraform.lib.utils.str_checksum") + def test_translate_to_cfn_with_s3_object_which_linked_to_uncreated_bucket( + self, + checksum_mock, + mock_enrich_resources_and_generate_makefile, + mock_link_lambda_functions_to_layers, + mock_get_configuration_address, + mock_build_module, + mock_check_dummy_remote_values, + mock_resolve_resource_attribute, + ): + root_module = MagicMock() + root_module.get.return_value = "module.m1" + resources_mock = MagicMock() + root_module.resources = resources_mock + child_modules = MagicMock() + child_modules.__getitem__.return_value = Mock() + child_modules.__contains__.return_value = True + child_modules.get.return_value = root_module + root_module.child_modules = child_modules + resource_mock = Mock() + resources_mock.__getitem__.return_value = resource_mock + resources_mock.__contains__.return_value = True + mock_build_module.return_value = root_module + checksum_mock.return_value = self.mock_logical_id_hash + + tf_json_with_root_module_contains_s3_object: dict = { + "planned_values": { + "root_module": { + "resources": [ + { + "type": "aws_s3_object", + "provider_name": AWS_PROVIDER_NAME, + "values": {"source": self.s3_source}, + "address": "aws_lambda_function.code_object", + "name": "code_object", + } + ] + } + } + } + + translate_to_cfn(tf_json_with_root_module_contains_s3_object, self.output_dir, self.project_root) + mock_resolve_resource_attribute.assert_has_calls([call(resource_mock, "bucket"), call(resource_mock, "key")]) + + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._check_dummy_remote_values") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._build_module") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._get_configuration_address") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._link_lambda_functions_to_layers") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate.enrich_resources_and_generate_makefile") + @patch("samcli.hook_packages.terraform.lib.utils.str_checksum") + def test_translate_to_cfn_with_child_modules( + self, + checksum_mock, + mock_enrich_resources_and_generate_makefile, + mock_link_lambda_functions_to_layers, + mock_get_configuration_address, + mock_build_module, + mock_check_dummy_remote_values, + ): + root_module = MagicMock() + root_module.get.return_value = "module.m1" + resources_mock = MagicMock() + root_module.resources = resources_mock + child_modules = MagicMock() + child_modules.__getitem__.return_value = Mock() + child_modules.__contains__.return_value = True + child_modules.get.return_value = root_module + root_module.child_modules = child_modules + conf_resource = Mock() + resources_mock.__getitem__.return_value = conf_resource + resources_mock.__contains__.return_value = True + mock_build_module.return_value = root_module + checksum_mock.return_value = self.mock_logical_id_hash + translated_cfn_dict = translate_to_cfn(self.tf_json_with_child_modules, self.output_dir, self.project_root) + self.assertEqual(translated_cfn_dict, self.expected_cfn_with_child_modules) + mock_enrich_resources_and_generate_makefile.assert_not_called() + lambda_functions = dict( + filter( + lambda resource: resource[1].get("Type") == "AWS::Lambda::Function", + translated_cfn_dict.get("Resources").items(), + ) + ) + expected_arguments_in_call = [ + {mock_get_configuration_address(): conf_resource}, + {mock_get_configuration_address(): [val for _, val in lambda_functions.items()]}, + {}, + ] + mock_link_lambda_functions_to_layers.assert_called_once_with(*expected_arguments_in_call) + mock_get_configuration_address.assert_called() + mock_check_dummy_remote_values.assert_called_once_with(translated_cfn_dict.get("Resources")) + + @patch("samcli.hook_packages.terraform.hooks.prepare.translate.build_cfn_logical_id") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._add_lambda_resource_code_path_to_code_map") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._check_dummy_remote_values") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._build_module") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._get_configuration_address") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._link_lambda_functions_to_layers") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate.enrich_resources_and_generate_makefile") + @patch("samcli.hook_packages.terraform.lib.utils.str_checksum") + def test_translate_to_cfn_with_root_module_with_sam_metadata_resource( + self, + checksum_mock, + mock_enrich_resources_and_generate_makefile, + mock_link_lambda_functions_to_layers, + mock_get_configuration_address, + mock_build_module, + mock_check_dummy_remote_values, + mock_add_lambda_resource_code_path_to_code_map, + mock_build_cfn_logical_id, + ): + root_module = MagicMock() + root_module.get.return_value = "module.m1" + resources_mock = MagicMock() + root_module.resources = resources_mock + child_modules = MagicMock() + child_modules.__getitem__.return_value = Mock() + child_modules.__contains__.return_value = True + child_modules.get.return_value = root_module + root_module.child_modules = child_modules + resource_mock = Mock() + resources_mock.__getitem__.return_value = resource_mock + resources_mock.__contains__.return_value = True + mock_build_module.return_value = root_module + checksum_mock.return_value = self.mock_logical_id_hash + mock_build_cfn_logical_id.side_effect = ["logical_id1", "logical_id2", "logical_id3"] + translated_cfn_dict = translate_to_cfn( + self.tf_json_with_root_module_with_sam_metadata_resources, self.output_dir, self.project_root + ) + + expected_arguments_in_call = ( + [ + SamMetadataResource( + current_module_address=None, + resource=self.tf_lambda_function_resource_zip_sam_metadata, + config_resource=resource_mock, + ), + SamMetadataResource( + current_module_address=None, + resource=self.tf_lambda_function_resource_zip_2_sam_metadata, + config_resource=resource_mock, + ), + SamMetadataResource( + current_module_address=None, + resource=self.tf_image_package_type_lambda_function_resource_sam_metadata, + config_resource=resource_mock, + ), + ], + translated_cfn_dict["Resources"], + self.output_dir, + self.project_root, + {}, + ) + + mock_enrich_resources_and_generate_makefile.assert_called_once_with(*expected_arguments_in_call) + mock_add_lambda_resource_code_path_to_code_map.assert_has_calls( + [ + call( + resource_mock, + "zip", + {}, + "logical_id1", + "file.zip", + "filename", + translated_cfn_dict["Resources"]["logical_id1"], + ), + call( + resource_mock, + "zip", + {}, + "logical_id2", + "file2.zip", + "filename", + translated_cfn_dict["Resources"]["logical_id2"], + ), + call( + resource_mock, + "image", + {}, + "logical_id3", + "image/uri:tag", + "image_uri", + translated_cfn_dict["Resources"]["logical_id3"], + ), + ] + ) + + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._add_lambda_resource_code_path_to_code_map") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._check_dummy_remote_values") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._build_module") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._get_configuration_address") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._link_lambda_functions_to_layers") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate.enrich_resources_and_generate_makefile") + @patch("samcli.hook_packages.terraform.lib.utils.str_checksum") + def test_translate_to_cfn_with_child_modules_with_sam_metadata_resource( + self, + checksum_mock, + mock_enrich_resources_and_generate_makefile, + mock_link_lambda_functions_to_layers, + mock_get_configuration_address, + mock_build_module, + mock_check_dummy_remote_values, + mock_add_lambda_resource_code_path_to_code_map, + ): + root_module = MagicMock() + root_module.get.return_value = "module.m1" + resources_mock = MagicMock() + root_module.resources = resources_mock + child_modules = MagicMock() + child_modules.__getitem__.return_value = Mock() + child_modules.__contains__.return_value = True + child_modules.get.return_value = root_module + root_module.child_modules = child_modules + resource_mock = Mock() + resources_mock.__getitem__.return_value = resource_mock + resources_mock.__contains__.return_value = True + mock_build_module.return_value = root_module + checksum_mock.return_value = self.mock_logical_id_hash + translated_cfn_dict = translate_to_cfn( + self.tf_json_with_child_modules_with_sam_metadata_resource, self.output_dir, self.project_root + ) + + expected_arguments_in_call = ( + [ + SamMetadataResource( + current_module_address=None, + resource=self.tf_lambda_function_resource_zip_sam_metadata, + config_resource=resource_mock, + ), + SamMetadataResource( + current_module_address="module.mymodule1", + resource={ + **self.tf_lambda_function_resource_zip_2_sam_metadata, + "address": f"module.mymodule1.null_resource.sam_metadata_{self.zip_function_name_2}", + }, + config_resource=resource_mock, + ), + SamMetadataResource( + current_module_address="module.mymodule1.module.mymodule2", + resource={ + **self.tf_lambda_function_resource_zip_3_sam_metadata, + "address": f"module.mymodule1.module.mymodule2.null_resource.sam_metadata_{self.zip_function_name_3}", + }, + config_resource=resource_mock, + ), + SamMetadataResource( + current_module_address="module.mymodule1.module.mymodule3", + resource={ + **self.tf_lambda_function_resource_zip_4_sam_metadata, + "address": f"module.mymodule1.module.mymodule3.null_resource.sam_metadata_{self.zip_function_name_4}", + }, + config_resource=resource_mock, + ), + ], + translated_cfn_dict["Resources"], + self.output_dir, + self.project_root, + {}, + ) + + mock_enrich_resources_and_generate_makefile.assert_called_once_with(*expected_arguments_in_call) + + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._check_dummy_remote_values") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._build_module") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._get_configuration_address") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._link_lambda_functions_to_layers") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate.enrich_resources_and_generate_makefile") + @patch("samcli.hook_packages.terraform.lib.utils.str_checksum") + def test_translate_to_cfn_with_unsupported_provider( + self, + checksum_mock, + mock_enrich_resources_and_generate_makefile, + mock_link_lambda_functions_to_layers, + mock_get_configuration_address, + mock_build_module, + mock_check_dummy_remote_values, + ): + root_module = MagicMock() + root_module.get.return_value = "module.m1" + resources_mock = MagicMock() + root_module.resources = resources_mock + child_modules = MagicMock() + child_modules.__getitem__.return_value = Mock() + child_modules.__contains__.return_value = True + child_modules.get.return_value = root_module + root_module.child_modules = child_modules + resources_mock.__getitem__.return_value = Mock() + resources_mock.__contains__.return_value = True + mock_build_module.return_value = root_module + checksum_mock.return_value = self.mock_logical_id_hash + translated_cfn_dict = translate_to_cfn( + self.tf_json_with_unsupported_provider, self.output_dir, self.project_root + ) + self.assertEqual(translated_cfn_dict, self.expected_cfn_with_unsupported_provider) + mock_enrich_resources_and_generate_makefile.assert_not_called() + + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._check_dummy_remote_values") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._build_module") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._get_configuration_address") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._link_lambda_functions_to_layers") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate.enrich_resources_and_generate_makefile") + @patch("samcli.hook_packages.terraform.lib.utils.str_checksum") + def test_translate_to_cfn_with_unsupported_resource_type( + self, + checksum_mock, + mock_enrich_resources_and_generate_makefile, + mock_link_lambda_functions_to_layers, + mock_get_configuration_address, + mock_build_module, + mock_check_dummy_remote_values, + ): + root_module = MagicMock() + root_module.get.return_value = "module.m1" + resources_mock = MagicMock() + root_module.resources = resources_mock + child_modules = MagicMock() + child_modules.__getitem__.return_value = Mock() + child_modules.__contains__.return_value = True + child_modules.get.return_value = root_module + root_module.child_modules = child_modules + resources_mock.__getitem__.return_value = Mock() + resources_mock.__contains__.return_value = True + mock_build_module.return_value = root_module + checksum_mock.return_value = self.mock_logical_id_hash + translated_cfn_dict = translate_to_cfn( + self.tf_json_with_unsupported_resource_type, self.output_dir, self.project_root + ) + self.assertEqual(translated_cfn_dict, self.expected_cfn_with_unsupported_resource_type) + mock_enrich_resources_and_generate_makefile.assert_not_called() + + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._add_lambda_resource_code_path_to_code_map") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._check_dummy_remote_values") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._build_module") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._get_configuration_address") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._link_lambda_functions_to_layers") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate.enrich_resources_and_generate_makefile") + @patch("samcli.hook_packages.terraform.lib.utils.str_checksum") + def test_translate_to_cfn_with_mapping_s3_source_to_function( + self, + checksum_mock, + mock_enrich_resources_and_generate_makefile, + mock_link_lambda_functions_to_layers, + mock_get_configuration_address, + mock_build_module, + mock_check_dummy_remote_values, + mock_add_lambda_resource_code_path_to_code_map, + ): + root_module = MagicMock() + root_module.get.return_value = "module.m1" + resources_mock = MagicMock() + root_module.resources = resources_mock + child_modules = MagicMock() + child_modules.__getitem__.return_value = Mock() + child_modules.__contains__.return_value = True + child_modules.get.return_value = root_module + root_module.child_modules = child_modules + resources_mock.__getitem__.return_value = Mock() + resources_mock.__contains__.return_value = True + mock_build_module.return_value = root_module + checksum_mock.return_value = self.mock_logical_id_hash + translated_cfn_dict = translate_to_cfn( + self.tf_json_with_child_modules_and_s3_source_mapping, self.output_dir, self.project_root + ) + self.assertEqual(translated_cfn_dict, self.expected_cfn_with_child_modules_and_s3_source_mapping) + mock_enrich_resources_and_generate_makefile.assert_not_called() + + def test_add_child_modules_to_queue(self): + m20_planned_value_module = { + "resources": [ + { + **self.tf_lambda_function_resource_zip_3, + "address": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_3}", + }, + ], + "address": "module.m1.module.m2[0]", + } + m21_planned_value_module = { + "resources": [ + { + **self.tf_lambda_function_resource_zip_4, + "address": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_4}", + }, + ], + "address": "module.m1.module.m2[1]", + } + m1_planned_value_module = { + "resources": [ + { + **self.tf_lambda_function_resource_zip_2, + "address": f"module.mymodule1.aws_lambda_function.{self.zip_function_name_2}", + }, + ], + "child_modules": [ + m20_planned_value_module, + m21_planned_value_module, + ], + "address": "module.m1", + } + curr_module = { + "resources": [ + self.tf_lambda_function_resource_zip, + ], + "child_modules": [m1_planned_value_module], + } + m2_config_module = TFModule( + "module.m1.module.m2", + None, + {}, + { + f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_3}": Mock(), + }, + {}, + {}, + ) + m1_config_module = TFModule( + "module.m1", + None, + {}, + { + f"module.mymodule1.aws_lambda_function.{self.zip_function_name_2}": Mock(), + }, + {"m2": m2_config_module}, + {}, + ) + m2_config_module.parent_module = m1_config_module + curr_config_module = TFModule( + None, + None, + {}, + { + f"aws_lambda_function.{self.zip_function_name}": Mock(), + }, + {"m1": m1_config_module}, + {}, + ) + m1_config_module.parent_module = curr_config_module + modules_queue = [] + _add_child_modules_to_queue(curr_module, curr_config_module, modules_queue) + self.assertEqual(modules_queue, [(m1_planned_value_module, m1_config_module)]) + modules_queue = [] + _add_child_modules_to_queue(m1_planned_value_module, m1_config_module, modules_queue) + self.assertEqual( + modules_queue, [(m20_planned_value_module, m2_config_module), (m21_planned_value_module, m2_config_module)] + ) + + def test_add_child_modules_to_queue_invalid_config(self): + m20_planned_value_module = { + "resources": [ + { + **self.tf_lambda_function_resource_zip_3, + "address": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_3}", + }, + ], + "address": "module.m1.module.m2[0]", + } + m21_planned_value_module = { + "resources": [ + { + **self.tf_lambda_function_resource_zip_4, + "address": f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_4}", + }, + ], + "address": "module.m1.module.m2[1]", + } + m1_planned_value_module = { + "resources": [ + { + **self.tf_lambda_function_resource_zip_2, + "address": f"module.mymodule1.aws_lambda_function.{self.zip_function_name_2}", + }, + ], + "child_modules": [ + m20_planned_value_module, + m21_planned_value_module, + ], + "address": "module.m1", + } + m2_config_module = TFModule( + "module.m1.module.m2", + None, + {}, + { + f"module.mymodule1.module.mymodule2.aws_lambda_function.{self.zip_function_name_3}": Mock(), + }, + {}, + {}, + ) + m1_config_module = TFModule( + "module.m1", + None, + {}, + { + f"module.mymodule1.aws_lambda_function.{self.zip_function_name_2}": Mock(), + }, + {"m3": m2_config_module}, + {}, + ) + m2_config_module.parent_module = m1_config_module + modules_queue = [] + with self.assertRaises( + PrepareHookException, + msg=f"Module module.m1.module.m2[0] exists in terraform planned_value, but does not exist in " + "terraform configuration", + ): + _add_child_modules_to_queue(m1_planned_value_module, m1_config_module, modules_queue) + + def test_add_metadata_resource_to_metadata_list(self): + metadata_resource_mock1 = Mock() + metadata_resource_mock2 = Mock() + new_metadata_resource_mock = Mock() + planned_Value_resource = { + **self.tf_sam_metadata_resource_common_attributes, + "values": { + "triggers": { + "built_output_path": "builds/func2.zip", + "original_source_code": "./src/lambda_func2", + "resource_name": "aws_lambda_function.func1", + "resource_type": "ZIP_LAMBDA_FUNCTION", + }, + }, + "address": "null_resource.sam_metadata_func2", + "name": "sam_metadata_func2", + } + metadata_resources_list = [metadata_resource_mock1, metadata_resource_mock2] + _add_metadata_resource_to_metadata_list( + new_metadata_resource_mock, planned_Value_resource, metadata_resources_list + ) + self.assertEqual( + metadata_resources_list, [metadata_resource_mock1, metadata_resource_mock2, new_metadata_resource_mock] + ) + + def test_add_metadata_resource_without_resource_name_to_metadata_list(self): + metadata_resource_mock1 = Mock() + metadata_resource_mock2 = Mock() + new_metadata_resource_mock = Mock() + planned_Value_resource = { + **self.tf_sam_metadata_resource_common_attributes, + "values": { + "triggers": { + "built_output_path": "builds/func2.zip", + "original_source_code": "./src/lambda_func2", + "resource_type": "ZIP_LAMBDA_FUNCTION", + }, + }, + "address": "null_resource.sam_metadata_func2", + "name": "sam_metadata_func2", + } + metadata_resources_list = [metadata_resource_mock1, metadata_resource_mock2] + _add_metadata_resource_to_metadata_list( + new_metadata_resource_mock, planned_Value_resource, metadata_resources_list + ) + self.assertEqual( + metadata_resources_list, [new_metadata_resource_mock, metadata_resource_mock1, metadata_resource_mock2] + ) + + def test_translate_properties_function(self): + translated_cfn_properties = _translate_properties( + self.tf_zip_function_properties, AWS_LAMBDA_FUNCTION_PROPERTY_BUILDER_MAPPING, Mock() + ) + self.assertEqual(translated_cfn_properties, self.expected_cfn_zip_function_properties) + + def test_translate_properties_function_with_missing_or_none_properties(self): + translated_cfn_properties = _translate_properties( + self.tf_function_properties_with_missing_or_none, AWS_LAMBDA_FUNCTION_PROPERTY_BUILDER_MAPPING, Mock() + ) + self.assertEqual(translated_cfn_properties, self.expected_cfn_function_properties_with_missing_or_none) + + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._link_lambda_function_to_layer") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._get_configuration_address") + def test_link_lambda_functions_to_layers(self, mock_get_configuration_address, mock_link_lambda_function_to_layer): + lambda_funcs_config_resources = { + "aws_lambda_function.remote_lambda_code": [ + { + "Type": "AWS::Lambda::Function", + "Properties": { + "FunctionName": "s3_remote_lambda_function", + "Code": {"S3Bucket": "lambda_code_bucket", "S3Key": "remote_lambda_code_key"}, + "Handler": "app.lambda_handler", + "PackageType": "Zip", + "Runtime": "python3.8", + "Timeout": 3, + }, + "Metadata": {"SamResourceId": "aws_lambda_function.remote_lambda_code", "SkipBuild": True}, + } + ], + "aws_lambda_function.root_lambda": [ + { + "Type": "AWS::Lambda::Function", + "Properties": { + "FunctionName": "root_lambda", + "Code": "HelloWorldFunction.zip", + "Handler": "app.lambda_handler", + "PackageType": "Zip", + "Runtime": "python3.8", + "Timeout": 3, + }, + "Metadata": {"SamResourceId": "aws_lambda_function.root_lambda", "SkipBuild": True}, + } + ], + } + terraform_layers_resources = { + "AwsLambdaLayerVersionLambdaLayer556B22D0": { + "address": "aws_lambda_layer_version.lambda_layer", + "mode": "managed", + "type": "aws_lambda_layer_version", + "name": "lambda_layer", + "provider_name": "registry.terraform.io/hashicorp/aws", + "schema_version": 0, + "values": { + "compatible_architectures": ["arm64"], + "compatible_runtimes": ["nodejs14.x", "nodejs16.x"], + "description": None, + "filename": None, + "layer_name": "lambda_layer_name", + "license_info": None, + "s3_bucket": "layer_code_bucket", + "s3_key": "s3_lambda_layer_code_key", + "s3_object_version": "1", + "skip_destroy": False, + }, + "sensitive_values": {"compatible_architectures": [False], "compatible_runtimes": [False, False]}, + } + } + resources = { + "aws_lambda_function.remote_lambda_code": TFResource( + "aws_lambda_function.remote_lambda_code", "", None, {} + ), + "aws_lambda_function.root_lambda": TFResource("aws_lambda_function.root_lambda", "", None, {}), + } + _link_lambda_functions_to_layers(resources, lambda_funcs_config_resources, terraform_layers_resources) + mock_link_lambda_function_to_layer.assert_has_calls( + [ + call( + resources["aws_lambda_function.remote_lambda_code"], + lambda_funcs_config_resources.get("aws_lambda_function.remote_lambda_code"), + terraform_layers_resources, + ), + call( + resources["aws_lambda_function.root_lambda"], + lambda_funcs_config_resources.get("aws_lambda_function.root_lambda"), + terraform_layers_resources, + ), + ] + ) + + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._calculate_configuration_attribute_value_hash") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._get_s3_object_hash") + def test_map_s3_sources_to_functions( + self, mock_get_s3_object_hash, mock_calculate_configuration_attribute_value_hash + ): + mock_get_s3_object_hash.side_effect = ["hash1", "hash2"] + mock_calculate_configuration_attribute_value_hash.side_effect = ["code_hash1", "code_hash2"] + + s3_hash_to_source = {"hash1": (self.s3_source, None), "hash2": (self.s3_source_2, None)} + cfn_resources = { + "s3Function1": copy.deepcopy(self.expected_cfn_lambda_function_resource_s3), + "s3Function2": copy.deepcopy(self.expected_cfn_lambda_function_resource_s3_2), + "nonS3Function": self.expected_cfn_lambda_function_resource_zip, + } + + expected_cfn_resources_after_mapping_s3_sources = { + "s3Function1": self.expected_cfn_lambda_function_resource_s3_after_source_mapping, + "s3Function2": { + **self.expected_cfn_lambda_function_resource_s3_2, + "Properties": { + **self.expected_cfn_lambda_function_resource_s3_2["Properties"], + "Code": self.s3_source_2, + }, + }, + "nonS3Function": self.expected_cfn_lambda_function_resource_zip, # should be unchanged + } + functions_code_map = {} + expected_functions_code_map = { + "zip_code_hash1": [(self.expected_cfn_lambda_function_resource_s3_after_source_mapping, "s3Function1")], + "zip_code_hash2": [ + ( + { + **self.expected_cfn_lambda_function_resource_s3_2, + "Properties": { + **self.expected_cfn_lambda_function_resource_s3_2["Properties"], + "Code": self.s3_source_2, + }, + }, + "s3Function2", + ) + ], + } + _map_s3_sources_to_functions(s3_hash_to_source, cfn_resources, functions_code_map) + + s3Function1CodeBeforeMapping = self.expected_cfn_lambda_function_resource_s3["Properties"]["Code"] + s3Function2CodeBeforeMapping = self.expected_cfn_lambda_function_resource_s3_2["Properties"]["Code"] + mock_get_s3_object_hash.assert_has_calls( + [ + call(s3Function1CodeBeforeMapping["S3Bucket"], s3Function1CodeBeforeMapping["S3Key"]), + call(s3Function2CodeBeforeMapping["S3Bucket"], s3Function2CodeBeforeMapping["S3Key"]), + ] + ) + mock_calculate_configuration_attribute_value_hash.assert_has_calls( + [call(self.s3_source), call(self.s3_source_2)] + ) + self.assertEqual(cfn_resources, expected_cfn_resources_after_mapping_s3_sources) + self.assertEqual(functions_code_map, expected_functions_code_map) + + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._calculate_configuration_attribute_value_hash") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._get_s3_object_hash") + def test_map_s3_sources_to_layers(self, mock_get_s3_object_hash, mock_calculate_configuration_attribute_value_hash): + mock_get_s3_object_hash.side_effect = ["hash1"] + mock_calculate_configuration_attribute_value_hash.side_effect = ["code_hash1"] + + s3_hash_to_source = {"hash1": (self.s3_source, None)} + cfn_resources = { + "s3Layer": copy.deepcopy(self.expected_cfn_layer_resource_s3), + "nonS3Layer": self.expected_cfn_layer_resource_zip, + } + + expected_cfn_resources_after_mapping_s3_sources = { + "s3Layer": self.expected_cfn_s3_layer_resource_after_source_mapping, + "nonS3Layer": self.expected_cfn_layer_resource_zip, # should be unchanged + } + layers_code_map = {} + expected_layers_code_map = { + "layer_code_hash1": [(self.expected_cfn_s3_layer_resource_after_source_mapping, "s3Layer")], + } + _map_s3_sources_to_functions(s3_hash_to_source, cfn_resources, layers_code_map) + + s3LayerCodeBeforeMapping = self.expected_cfn_layer_resource_s3["Properties"]["Content"] + mock_get_s3_object_hash.assert_has_calls( + [ + call(s3LayerCodeBeforeMapping["S3Bucket"], s3LayerCodeBeforeMapping["S3Key"]), + ] + ) + mock_calculate_configuration_attribute_value_hash.assert_has_calls([call(self.s3_source)]) + self.assertEqual(layers_code_map, expected_layers_code_map) + self.assertEqual(cfn_resources, expected_cfn_resources_after_mapping_s3_sources) + + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._calculate_configuration_attribute_value_hash") + @patch("samcli.hook_packages.terraform.hooks.prepare.translate._get_s3_object_hash") + def test_map_s3_sources_to_functions_that_does_not_contain_constant_value_filename( + self, mock_get_s3_object_hash, mock_calculate_configuration_attribute_value_hash + ): + mock_get_s3_object_hash.side_effect = ["hash1"] + mock_calculate_configuration_attribute_value_hash.side_effect = ["code_hash1"] + mock_reference = Mock() + s3_hash_to_source = {"hash1": (None, mock_reference)} + cfn_resources = { + "s3Function1": copy.deepcopy(self.expected_cfn_lambda_function_resource_s3), + "nonS3Function": self.expected_cfn_lambda_function_resource_zip, + } + + expected_cfn_resources_after_mapping_s3_sources = { + "s3Function1": copy.deepcopy(self.expected_cfn_lambda_function_resource_s3), + "nonS3Function": self.expected_cfn_lambda_function_resource_zip, # should be unchanged + } + functions_code_map = {} + expected_functions_code_map = { + "zip_code_hash1": [(copy.deepcopy(self.expected_cfn_lambda_function_resource_s3), "s3Function1")], + } + _map_s3_sources_to_functions(s3_hash_to_source, cfn_resources, functions_code_map) + + s3Function1CodeBeforeMapping = self.expected_cfn_lambda_function_resource_s3["Properties"]["Code"] + mock_get_s3_object_hash.assert_has_calls( + [ + call(s3Function1CodeBeforeMapping["S3Bucket"], s3Function1CodeBeforeMapping["S3Key"]), + ] + ) + mock_calculate_configuration_attribute_value_hash.assert_has_calls([call(mock_reference)]) + self.assertEqual(cfn_resources, expected_cfn_resources_after_mapping_s3_sources) + self.assertEqual(functions_code_map, expected_functions_code_map) + + def test_check_dummy_remote_values_no_exception(self): + no_exception = True + try: + _check_dummy_remote_values( + { + "func1": { + "Properties": { + "Code": { + "S3bucket": "bucket1", + "S3Key": "key1", + "S3ObjectVersion": "version", + } + } + }, + "func2": { + "Properties": { + "Code": { + "ImageUri": "uri", + } + } + }, + } + ) + except PrepareHookException as e: + no_exception = False + self.assertTrue(no_exception) + + def test_check_dummy_remote_values_s3_bucket_remote_issue(self): + no_exception = True + with self.assertRaises( + PrepareHookException, + msg=f"Lambda resource resource1 is referring to an S3 bucket that is not created yet" + f", and there is no sam metadata resource set for it to build its code locally", + ): + _check_dummy_remote_values( + { + "func1": { + "Type": AWS_LAMBDA_FUNCTION, + "Properties": { + "Code": { + "S3Bucket": REMOTE_DUMMY_VALUE, + "S3Key": "key1", + "S3ObjectVersion": "version", + } + }, + "Metadata": {"SamResourceId": "resource1"}, + }, + "func2": { + "Type": AWS_LAMBDA_FUNCTION, + "Properties": { + "Code": { + "ImageUri": "uri", + } + }, + }, + } + ) + + def test_check_dummy_remote_values_for_image_uri(self): + no_exception = True + + with self.assertRaises( + PrepareHookException, + msg=f"Lambda resource resource1 is referring to an image uri " + "that is not created yet, and there is no sam metadata resource set for it to build its image " + "locally.", + ): + _check_dummy_remote_values( + { + "func1": { + "Type": AWS_LAMBDA_FUNCTION, + "Properties": { + "Code": { + "S3Bucket": REMOTE_DUMMY_VALUE, + "S3Key": "key1", + "S3ObjectVersion": "version", + } + }, + "Metadata": {"SamResourceId": "resource1"}, + }, + "func2": { + "Type": AWS_LAMBDA_FUNCTION, + "Properties": { + "Code": { + "ImageUri": "uri", + } + }, + }, + } + ) + + def test_get_s3_object_hash(self): + self.assertEqual( + _get_s3_object_hash(self.s3_bucket, self.s3_key), _get_s3_object_hash(self.s3_bucket, self.s3_key) + ) + self.assertEqual( + _get_s3_object_hash( + [ConstantValue("A"), ResolvedReference("aws_lambda_function.arn", "module.m1")], self.s3_key + ), + _get_s3_object_hash( + [ResolvedReference("aws_lambda_function.arn", "module.m1"), ConstantValue("A")], self.s3_key + ), + ) + self.assertEqual( + _get_s3_object_hash( + self.s3_bucket, [ConstantValue("A"), ResolvedReference("aws_lambda_function.arn", "module.m1")] + ), + _get_s3_object_hash( + self.s3_bucket, [ResolvedReference("aws_lambda_function.arn", "module.m1"), ConstantValue("A")] + ), + ) + self.assertEqual( + _get_s3_object_hash( + [ConstantValue("B"), ResolvedReference("aws_s3_bucket.id", "module.m2")], + [ConstantValue("A"), ResolvedReference("aws_lambda_function.arn", "module.m1")], + ), + _get_s3_object_hash( + [ResolvedReference("aws_s3_bucket.id", "module.m2"), ConstantValue("B")], + [ResolvedReference("aws_lambda_function.arn", "module.m1"), ConstantValue("A")], + ), + ) + self.assertNotEqual( + _get_s3_object_hash( + [ConstantValue("B"), ConstantValue("C"), ResolvedReference("aws_s3_bucket.id", "module.m2")], + [ConstantValue("A"), ResolvedReference("aws_lambda_function.arn", "module.m1")], + ), + _get_s3_object_hash( + [ResolvedReference("aws_s3_bucket.id", "module.m2"), ConstantValue("B")], + [ResolvedReference("aws_lambda_function.arn", "module.m1"), ConstantValue("A")], + ), + ) + self.assertNotEqual( + _get_s3_object_hash([ConstantValue("B"), ResolvedReference("aws_s3_bucket.id", "module.m2")], self.s3_key), + _get_s3_object_hash( + [ResolvedReference("aws_s3_bucket.id", "module.m2"), ConstantValue("B")], self.s3_key_2 + ), + ) + self.assertNotEqual( + _get_s3_object_hash( + self.s3_bucket, [ConstantValue("A"), ResolvedReference("aws_lambda_function.arn", "module.m1")] + ), + _get_s3_object_hash( + self.s3_bucket_2, [ResolvedReference("aws_lambda_function.arn", "module.m1"), ConstantValue("A")] + ), + ) + self.assertNotEqual( + _get_s3_object_hash(self.s3_bucket, self.s3_key), _get_s3_object_hash(self.s3_bucket_2, self.s3_key_2) + ) + self.assertNotEqual( + _get_s3_object_hash(self.s3_bucket, self.s3_key), _get_s3_object_hash(self.s3_bucket_2, self.s3_key) + ) + self.assertNotEqual( + _get_s3_object_hash(self.s3_bucket, self.s3_key), _get_s3_object_hash(self.s3_bucket, self.s3_key_2) + ) diff --git a/tests/unit/hook_packages/terraform/lib/test_utils.py b/tests/unit/hook_packages/terraform/lib/test_utils.py index 7282f6feff..bfda8d97eb 100644 --- a/tests/unit/hook_packages/terraform/lib/test_utils.py +++ b/tests/unit/hook_packages/terraform/lib/test_utils.py @@ -4,10 +4,13 @@ from parameterized import parameterized from samcli.hook_packages.terraform.hooks.prepare.types import ConstantValue, ResolvedReference -from samcli.hook_packages.terraform.lib.utils import build_cfn_logical_id, _calculate_configuration_attribute_value_hash +from samcli.hook_packages.terraform.lib.utils import ( + build_cfn_logical_id, + _calculate_configuration_attribute_value_hash, +) -class TestPrepareHook(TestCase): +class TestTerraformUtils(TestCase): def setUp(self) -> None: self.mock_logical_id_hash = "12AB34CD" From 267769e8485c1ec4ff96fcc758ee5c780bea356e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 7 Dec 2022 20:10:16 +0000 Subject: [PATCH 06/26] chore(deps): bump cryptography from 38.0.1 to 38.0.3 in /requirements (#4464) Bumps [cryptography](https://github.com/pyca/cryptography) from 38.0.1 to 38.0.3. - [Release notes](https://github.com/pyca/cryptography/releases) - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/38.0.1...38.0.3) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Jacob Fuss <32497805+jfuss@users.noreply.github.com> --- requirements/reproducible-linux.txt | 54 ++++++++++++++--------------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index 7008d73974..66863e0c58 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -147,33 +147,33 @@ cookiecutter==2.1.1 \ --hash=sha256:9f3ab027cec4f70916e28f03470bdb41e637a3ad354b4d65c765d93aad160022 \ --hash=sha256:f3982be8d9c53dac1261864013fdec7f83afd2e42ede6f6dd069c5e149c540d5 # via aws-sam-cli (setup.py) -cryptography==38.0.1 \ - --hash=sha256:0297ffc478bdd237f5ca3a7dc96fc0d315670bfa099c04dc3a4a2172008a405a \ - --hash=sha256:10d1f29d6292fc95acb597bacefd5b9e812099d75a6469004fd38ba5471a977f \ - --hash=sha256:16fa61e7481f4b77ef53991075de29fc5bacb582a1244046d2e8b4bb72ef66d0 \ - --hash=sha256:194044c6b89a2f9f169df475cc167f6157eb9151cc69af8a2a163481d45cc407 \ - --hash=sha256:1db3d807a14931fa317f96435695d9ec386be7b84b618cc61cfa5d08b0ae33d7 \ - --hash=sha256:3261725c0ef84e7592597606f6583385fed2a5ec3909f43bc475ade9729a41d6 \ - --hash=sha256:3b72c360427889b40f36dc214630e688c2fe03e16c162ef0aa41da7ab1455153 \ - --hash=sha256:3e3a2599e640927089f932295a9a247fc40a5bdf69b0484532f530471a382750 \ - --hash=sha256:3fc26e22840b77326a764ceb5f02ca2d342305fba08f002a8c1f139540cdfaad \ - --hash=sha256:5067ee7f2bce36b11d0e334abcd1ccf8c541fc0bbdaf57cdd511fdee53e879b6 \ - --hash=sha256:52e7bee800ec869b4031093875279f1ff2ed12c1e2f74923e8f49c916afd1d3b \ - --hash=sha256:64760ba5331e3f1794d0bcaabc0d0c39e8c60bf67d09c93dc0e54189dfd7cfe5 \ - --hash=sha256:765fa194a0f3372d83005ab83ab35d7c5526c4e22951e46059b8ac678b44fa5a \ - --hash=sha256:79473cf8a5cbc471979bd9378c9f425384980fcf2ab6534b18ed7d0d9843987d \ - --hash=sha256:896dd3a66959d3a5ddcfc140a53391f69ff1e8f25d93f0e2e7830c6de90ceb9d \ - --hash=sha256:89ed49784ba88c221756ff4d4755dbc03b3c8d2c5103f6d6b4f83a0fb1e85294 \ - --hash=sha256:ac7e48f7e7261207d750fa7e55eac2d45f720027d5703cd9007e9b37bbb59ac0 \ - --hash=sha256:ad7353f6ddf285aeadfaf79e5a6829110106ff8189391704c1d8801aa0bae45a \ - --hash=sha256:b0163a849b6f315bf52815e238bc2b2346604413fa7c1601eea84bcddb5fb9ac \ - --hash=sha256:b6c9b706316d7b5a137c35e14f4103e2115b088c412140fdbd5f87c73284df61 \ - --hash=sha256:c2e5856248a416767322c8668ef1845ad46ee62629266f84a8f007a317141013 \ - --hash=sha256:ca9f6784ea96b55ff41708b92c3f6aeaebde4c560308e5fbbd3173fbc466e94e \ - --hash=sha256:d1a5bd52d684e49a36582193e0b89ff267704cd4025abefb9e26803adeb3e5fb \ - --hash=sha256:d3971e2749a723e9084dd507584e2a2761f78ad2c638aa31e80bc7a15c9db4f9 \ - --hash=sha256:d4ef6cc305394ed669d4d9eebf10d3a101059bdcf2669c366ec1d14e4fb227bd \ - --hash=sha256:d9e69ae01f99abe6ad646947bba8941e896cb3aa805be2597a0400e0764b5818 +cryptography==38.0.3 \ + --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ + --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ + --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ + --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ + --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ + --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ + --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ + --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ + --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ + --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ + --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ + --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ + --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ + --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ + --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ + --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ + --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ + --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ + --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ + --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ + --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ + --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ + --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ + --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ + --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ + --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 # via pyopenssl dateparser==1.0.0 \ --hash=sha256:159cc4e01a593706a15cd4e269a0b3345edf3aef8bf9278a57dac8adf5bf1e4a \ From 4b1f1d26132714492561333f98dd151746676120 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 7 Dec 2022 21:23:57 +0000 Subject: [PATCH 07/26] chore(deps): bump Newtonsoft.Json (#4466) Bumps [Newtonsoft.Json](https://github.com/JamesNK/Newtonsoft.Json) from 13.0.1 to 13.0.2. - [Release notes](https://github.com/JamesNK/Newtonsoft.Json/releases) - [Commits](https://github.com/JamesNK/Newtonsoft.Json/compare/13.0.1...13.0.2) --- updated-dependencies: - dependency-name: Newtonsoft.Json dependency-type: direct:production ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .../src/HelloWorld/HelloWorld.csproj | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/lib/init/templates/cookiecutter-aws-sam-hello-dotnet/{{cookiecutter.project_name}}/src/HelloWorld/HelloWorld.csproj b/samcli/lib/init/templates/cookiecutter-aws-sam-hello-dotnet/{{cookiecutter.project_name}}/src/HelloWorld/HelloWorld.csproj index 6be649aad0..0749a6fd85 100644 --- a/samcli/lib/init/templates/cookiecutter-aws-sam-hello-dotnet/{{cookiecutter.project_name}}/src/HelloWorld/HelloWorld.csproj +++ b/samcli/lib/init/templates/cookiecutter-aws-sam-hello-dotnet/{{cookiecutter.project_name}}/src/HelloWorld/HelloWorld.csproj @@ -13,7 +13,7 @@ - + From a59d3d431c10e7272287873d23dfdfb62d38ed06 Mon Sep 17 00:00:00 2001 From: Jacob Fuss <32497805+jfuss@users.noreply.github.com> Date: Wed, 7 Dec 2022 16:35:50 -0600 Subject: [PATCH 08/26] chore: Allow closed issue message Action to write on Issues (#4465) Co-authored-by: Jacob Fuss --- .github/workflows/closed-issue-message.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/closed-issue-message.yml b/.github/workflows/closed-issue-message.yml index 5128523727..eb13f81d66 100644 --- a/.github/workflows/closed-issue-message.yml +++ b/.github/workflows/closed-issue-message.yml @@ -6,6 +6,8 @@ on: jobs: auto_comment: runs-on: ubuntu-latest + permissions: + issues: write steps: - uses: aws-actions/closed-issue-message@v1 with: @@ -15,4 +17,4 @@ jobs: ### ⚠️COMMENT VISIBILITY WARNING⚠️ Comments on closed issues are hard for our team to see. If you need more assistance, please either tag a team member or open a new issue that references this one. - If you wish to keep having a conversation with other community members under this issue feel free to do so. + If you wish to keep having a conversation with other community members under this issue feel free to do so. \ No newline at end of file From 133dbe8e9edde310a15969128bafe9e8f53d2c5e Mon Sep 17 00:00:00 2001 From: Chris Rehn <1280602+hoffa@users.noreply.github.com> Date: Wed, 7 Dec 2022 16:55:35 -0800 Subject: [PATCH 09/26] chore: run make pr on ubuntu-latest (#4456) * chore: workflow to run make pr on ubuntu-latest * Fix cov limit * Update build.yml * Update build.yml * remove todo * Update build.yml * Update .github/workflows/build.yml * Update .github/workflows/build.yml Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Co-authored-by: Jacob Fuss <32497805+jfuss@users.noreply.github.com> --- .github/workflows/build.yml | 29 +++++++++++++++++++++++++++++ Makefile | 4 ++-- 2 files changed, 31 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/build.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000000..091d1f7d95 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,29 @@ +name: Unit Tests + +on: + pull_request: + branches: + - develop + +jobs: + build: + name: ${{ matrix.os }} / ${{ matrix.python }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: + # TODO: Add macos-latest; fails currently, see e.g. https://github.com/aws/aws-sam-cli/actions/runs/3596883449/jobs/6058055981 + - ubuntu-latest + - windows-latest + python: + - "3.7" + - "3.8" + - "3.9" + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python }} + - run: make init + - run: make pr diff --git a/Makefile b/Makefile index 66da6050e6..f4827cc742 100644 --- a/Makefile +++ b/Makefile @@ -8,11 +8,11 @@ init: test: # Run unit tests # Fail if coverage falls below 95% - pytest --cov samcli --cov-report term-missing --cov-fail-under 95 tests/unit + pytest --cov samcli --cov-report term-missing --cov-fail-under 94 tests/unit test-cov-report: # Run unit tests with html coverage report - pytest --cov samcli --cov-report html --cov-fail-under 95 tests/unit + pytest --cov samcli --cov-report html --cov-fail-under 94 tests/unit integ-test: # Integration tests don't need code coverage From 1e9a422c4e82ee61f8872358c7158a08f623bc1c Mon Sep 17 00:00:00 2001 From: Qingchuan Ma <69653965+qingchm@users.noreply.github.com> Date: Thu, 8 Dec 2022 13:04:31 -0800 Subject: [PATCH 10/26] chore: add retry for race condition test with potential of eventual consistency failure (#4459) Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> --- tests/integration/sync/test_sync_code.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/integration/sync/test_sync_code.py b/tests/integration/sync/test_sync_code.py index 573f520dac..72ed32ed3b 100644 --- a/tests/integration/sync/test_sync_code.py +++ b/tests/integration/sync/test_sync_code.py @@ -179,6 +179,7 @@ def test_sync_code_layer(self): self.assertIn("extra_message", lambda_response) self.assertEqual(lambda_response.get("message"), "9") + @pytest.mark.flaky(reruns=3) def test_sync_function_layer_race_condition(self): shutil.rmtree(TestSyncCodeBase.temp_dir.joinpath("function"), ignore_errors=True) shutil.copytree( @@ -467,6 +468,7 @@ def test_sync_code_nested_layer(self): self.assertIn("extra_message", lambda_response) self.assertEqual(lambda_response.get("message"), "12") + @pytest.mark.flaky(reruns=3) def test_sync_nested_function_layer_race_condition(self): shutil.rmtree(TestSyncCodeBase.temp_dir.joinpath("child_stack").joinpath("child_functions"), ignore_errors=True) shutil.copytree( From c76273752f1b8cd34835a63a87d1f3ab93874e86 Mon Sep 17 00:00:00 2001 From: Jacob Fuss <32497805+jfuss@users.noreply.github.com> Date: Fri, 9 Dec 2022 14:34:13 -0600 Subject: [PATCH 11/26] chore: Update Closed Stale Issues Github Action (#4469) * chore: Update Closed Stale Issues Github Action * Enable dryrun to start * Update stale label and adding more exempt labels Co-authored-by: Jacob Fuss --- .github/workflows/close-stale-issues.yml | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/.github/workflows/close-stale-issues.yml b/.github/workflows/close-stale-issues.yml index 3da877a732..a5fe862cc5 100644 --- a/.github/workflows/close-stale-issues.yml +++ b/.github/workflows/close-stale-issues.yml @@ -10,6 +10,8 @@ jobs: cleanup: runs-on: ubuntu-latest name: Stale issue job + permissions: + issues: write steps: - uses: aws-actions/stale-issue-cleanup@v3 with: @@ -28,10 +30,10 @@ jobs: If you want to keep this issue open, please leave a comment below and auto-close will be canceled. # These labels are required - stale-issue-label: blocked/close-if-inactive - exempt-issue-labels: no-autoclose, stage/needs-attention + stale-issue-label: type/stale + exempt-issue-labels: no-autoclose, stage/needs-attention, type/feature, stage/needs-triage, type/bug, stage/needs-investigation, maintainer/need-followup, maintainer/need-response stale-pr-label: blocked/close-if-inactive - exempt-pr-labels: no-autoclose, type/feature + exempt-pr-labels: no-autoclose, stage/needs-triage, blocked/pending-security-review response-requested-label: blocked/more-info-needed # Don't set this to not apply a label when closing issues @@ -46,8 +48,10 @@ jobs: # threshold of "upvotes", you can set this here. An "upvote" is # the total number of +1, heart, hooray, and rocket reactions # on an issue. - minimum-upvotes-to-exempt: 10 + minimum-upvotes-to-exempt: 1 # need a repo scope token here to make this action can trigger other github actions - repo-token: ${{ secrets.STALE_BOT_PERSONAL_TOKEN }} - + repo-token: ${{ secrets.GITHUB_TOKEN }} + loglevel: DEBUG + # Set dry-run to true to not perform label or close actions. + dry-run: true From 6add522fab941da18656880d1004838865b6920a Mon Sep 17 00:00:00 2001 From: Jacob Fuss <32497805+jfuss@users.noreply.github.com> Date: Fri, 9 Dec 2022 15:46:29 -0600 Subject: [PATCH 12/26] chore: Move Close Stale issues to manual workflow for testing (#4471) Co-authored-by: Jacob Fuss --- .github/workflows/close-stale-issues.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/close-stale-issues.yml b/.github/workflows/close-stale-issues.yml index a5fe862cc5..4d164930f9 100644 --- a/.github/workflows/close-stale-issues.yml +++ b/.github/workflows/close-stale-issues.yml @@ -2,9 +2,10 @@ name: Close stale issues # Controls when the action will run. on: - schedule: - # Uses UTC so it runs at 10PM PDT - - cron: "0 6 * * *" + workflow_dispatch: + # schedule: + # # Uses UTC so it runs at 10PM PDT + # - cron: "0 6 * * *" jobs: cleanup: From 7585e1154484783e1b52117ea937f5fa1dc44a3e Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> Date: Fri, 9 Dec 2022 16:18:47 -0800 Subject: [PATCH 13/26] fix: change permissions on package universally (#4462) * fix: change permissions on package universally - set permissions on directories to be 755 and files to be 644. * fix: only change permissions for zipping Lambda - Lambda specific resource zips alone adhere to https://aws.amazon.com/premiumsupport/knowledge-center/lambda-deployment-package-errors * fix: windows tests * fix: set zip permissions to ensure there are no regressions * fix: modify zip test to check permission-as-is - except for windows, this was escalated earlier in https://github.com/aws/aws-sam-cli/pull/2356/files deliberately. * tests: more explicit testing on `zip_method` per Resource. * cleanup: move control logic to function zip signature Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> --- samcli/lib/package/utils.py | 74 +++++++--- .../flows/auto_dependency_layer_sync_flow.py | 4 +- samcli/lib/sync/flows/layer_sync_flow.py | 4 +- .../lib/sync/flows/zip_function_sync_flow.py | 4 +- samcli/lib/utils/resources.py | 7 + .../lib/package/test_artifact_exporter.py | 132 +++++++++++++++++- .../test_auto_dependency_layer_sync_flow.py | 2 +- .../lib/sync/flows/test_layer_sync_flow.py | 2 +- .../sync/flows/test_zip_function_sync_flow.py | 2 +- 9 files changed, 193 insertions(+), 38 deletions(-) diff --git a/samcli/lib/package/utils.py b/samcli/lib/package/utils.py index 69017ea891..c22b08afda 100644 --- a/samcli/lib/package/utils.py +++ b/samcli/lib/package/utils.py @@ -1,6 +1,7 @@ """ Utilities involved in Packaging. """ +import functools import logging import os import platform @@ -10,7 +11,7 @@ import zipfile import contextlib from contextlib import contextmanager -from typing import Dict, Optional, cast +from typing import Dict, Optional, Callable, cast import jmespath @@ -18,6 +19,7 @@ from samcli.lib.package.ecr_utils import is_ecr_url from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.utils.hash import dir_checksum +from samcli.lib.utils.resources import LAMBDA_LOCAL_RESOURCES LOG = logging.getLogger(__name__) @@ -166,9 +168,14 @@ def upload_local_artifacts( local_path = make_abs_path(parent_dir, local_path) - # Or, pointing to a folder. Zip the folder and upload + # Or, pointing to a folder. Zip the folder and upload (zip_method is changed based on resource type) if is_local_folder(local_path): - return zip_and_upload(local_path, uploader, extension) + return zip_and_upload( + local_path, + uploader, + extension, + zip_method=make_zip_with_lambda_permissions if resource_id in LAMBDA_LOCAL_RESOURCES else make_zip, + ) # Path could be pointing to a file. Upload the file if is_local_file(local_path): @@ -184,13 +191,13 @@ def resource_not_packageable(resource_dict): return False -def zip_and_upload(local_path: str, uploader: S3Uploader, extension: Optional[str]) -> str: - with zip_folder(local_path) as (zip_file, md5_hash): +def zip_and_upload(local_path: str, uploader: S3Uploader, extension: Optional[str], zip_method: Callable) -> str: + with zip_folder(local_path, zip_method=zip_method) as (zip_file, md5_hash): return uploader.upload_with_dedup(zip_file, precomputed_md5=md5_hash, extension=extension) @contextmanager -def zip_folder(folder_path): +def zip_folder(folder_path, zip_method): """ Zip the entire folder and return a file to the zip. Use this inside a "with" statement to cleanup the zipfile after it is used. @@ -199,6 +206,8 @@ def zip_folder(folder_path): ---------- folder_path : str The path of the folder to zip + zip_method : Callable + Callable function that takes in a file name and source_path and zips accordingly. Yields ------ @@ -210,7 +219,7 @@ def zip_folder(folder_path): md5hash = dir_checksum(folder_path, followlinks=True) filename = os.path.join(tempfile.gettempdir(), "data-" + md5hash) - zipfile_name = make_zip(filename, folder_path) + zipfile_name = zip_method(filename, folder_path) try: yield zipfile_name, md5hash finally: @@ -218,7 +227,7 @@ def zip_folder(folder_path): os.remove(zipfile_name) -def make_zip(file_name, source_root): +def make_zip_with_permissions(file_name, source_root, file_permissions, dir_permissions): """ Create a zip file from the source directory @@ -228,6 +237,10 @@ def make_zip(file_name, source_root): The basename of the zip file, without .zip source_root : str The path to the source directory + file_permissions : int + The permissions set for files within the source_root + dir_permissions : int + The permissions set for directories within the source_root Returns ------- str @@ -242,28 +255,45 @@ def make_zip(file_name, source_root): for filename in files: full_path = os.path.join(root, filename) relative_path = os.path.relpath(full_path, source_root) - if platform.system().lower() == "windows": - with open(full_path, "rb") as data: - file_bytes = data.read() - info = zipfile.ZipInfo(relative_path) - # Clear external attr set for Windows + with open(full_path, "rb") as data: + file_bytes = data.read() + info = zipfile.ZipInfo(relative_path) + # Context: Nov 2020 + # Set external attr with Unix 0755 permission + # Originally set to 0005 in the discussion below + # https://github.com/aws/aws-sam-cli/pull/2193#discussion_r513110608 + # Changed to 0755 due to a regression in https://github.com/aws/aws-sam-cli/issues/2344 + # Final PR: https://github.com/aws/aws-sam-cli/pull/2356/files + if file_permissions and dir_permissions: + # Clear external attr info.external_attr = 0 - # Set external attr with Unix 0755 permission - # Originally set to 0005 in the discussion below - # https://github.com/aws/aws-sam-cli/pull/2193#discussion_r513110608 - # Changed to 0755 due to a regression in https://github.com/aws/aws-sam-cli/issues/2344 - # Mimicking Unix permission bits and recommanded permission bits - # in the Lambda Trouble Shooting Docs - info.external_attr = 0o100755 << 16 # Set host OS to Unix info.create_system = 3 + info.external_attr = dir_permissions << 16 if info.is_dir() else file_permissions << 16 zf.writestr(info, file_bytes, compress_type=compression_type) - else: - zf.write(full_path, relative_path) + else: + zf.write(full_path, relative_path) return zipfile_name +make_zip = functools.partial( + make_zip_with_permissions, + file_permissions=0o100755 if platform.system().lower() == "windows" else None, + dir_permissions=0o100755 if platform.system().lower() == "windows" else None, +) +# Context: Nov 2022 +# NOTE(sriram-mv): Modify permissions regardless of the Operating system, since +# AWS Lambda requires following permissions as referenced in docs: +# https://aws.amazon.com/premiumsupport/knowledge-center/lambda-deployment-package-errors/ +# For backward compatibility with windows, setting the permissions to be 755. +make_zip_with_lambda_permissions = functools.partial( + make_zip_with_permissions, + file_permissions=0o100755 if platform.system().lower() == "windows" else 0o100644, + dir_permissions=0o100755, +) + + def copy_to_temp_dir(filepath): tmp_dir = tempfile.mkdtemp() dst = os.path.join(tmp_dir, os.path.basename(filepath)) diff --git a/samcli/lib/sync/flows/auto_dependency_layer_sync_flow.py b/samcli/lib/sync/flows/auto_dependency_layer_sync_flow.py index 04d58b3867..6c47e22d52 100644 --- a/samcli/lib/sync/flows/auto_dependency_layer_sync_flow.py +++ b/samcli/lib/sync/flows/auto_dependency_layer_sync_flow.py @@ -11,7 +11,7 @@ from samcli.lib.bootstrap.nested_stack.nested_stack_builder import NestedStackBuilder from samcli.lib.bootstrap.nested_stack.nested_stack_manager import NestedStackManager from samcli.lib.build.build_graph import BuildGraph -from samcli.lib.package.utils import make_zip +from samcli.lib.package.utils import make_zip_with_lambda_permissions from samcli.lib.providers.provider import Function, Stack from samcli.lib.providers.sam_function_provider import SamFunctionProvider from samcli.lib.sync.exceptions import ( @@ -85,7 +85,7 @@ def gather_resources(self) -> None: self._get_compatible_runtimes()[0], ) zip_file_path = os.path.join(tempfile.gettempdir(), "data-" + uuid.uuid4().hex) - self._zip_file = make_zip(zip_file_path, self._artifact_folder) + self._zip_file = make_zip_with_lambda_permissions(zip_file_path, self._artifact_folder) self._local_sha = file_checksum(cast(str, self._zip_file), hashlib.sha256()) def _get_dependent_functions(self) -> List[Function]: diff --git a/samcli/lib/sync/flows/layer_sync_flow.py b/samcli/lib/sync/flows/layer_sync_flow.py index be59543de6..4a0fabc9d9 100644 --- a/samcli/lib/sync/flows/layer_sync_flow.py +++ b/samcli/lib/sync/flows/layer_sync_flow.py @@ -11,7 +11,7 @@ from contextlib import ExitStack from samcli.lib.build.app_builder import ApplicationBuilder -from samcli.lib.package.utils import make_zip +from samcli.lib.package.utils import make_zip_with_lambda_permissions from samcli.lib.providers.provider import ResourceIdentifier, Stack, get_resource_by_id, Function, LayerVersion from samcli.lib.providers.sam_function_provider import SamFunctionProvider from samcli.lib.sync.exceptions import MissingPhysicalResourceError, NoLayerVersionsFoundError @@ -235,7 +235,7 @@ def gather_resources(self) -> None: self._artifact_folder = builder.build().artifacts.get(self._layer_identifier) zip_file_path = os.path.join(tempfile.gettempdir(), f"data-{uuid.uuid4().hex}") - self._zip_file = make_zip(zip_file_path, self._artifact_folder) + self._zip_file = make_zip_with_lambda_permissions(zip_file_path, self._artifact_folder) LOG.debug("%sCreated artifact ZIP file: %s", self.log_prefix, self._zip_file) self._local_sha = file_checksum(cast(str, self._zip_file), hashlib.sha256()) diff --git a/samcli/lib/sync/flows/zip_function_sync_flow.py b/samcli/lib/sync/flows/zip_function_sync_flow.py index 8987a23e56..ea573dfd7b 100644 --- a/samcli/lib/sync/flows/zip_function_sync_flow.py +++ b/samcli/lib/sync/flows/zip_function_sync_flow.py @@ -16,7 +16,7 @@ from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.utils.colors import Colored from samcli.lib.utils.hash import file_checksum -from samcli.lib.package.utils import make_zip +from samcli.lib.package.utils import make_zip_with_lambda_permissions from samcli.lib.build.app_builder import ApplicationBuilder from samcli.lib.sync.sync_flow import ResourceAPICall, ApiCallTypes @@ -99,7 +99,7 @@ def gather_resources(self) -> None: self._artifact_folder = build_result.artifacts.get(self._function_identifier) zip_file_path = os.path.join(tempfile.gettempdir(), "data-" + uuid.uuid4().hex) - self._zip_file = make_zip(zip_file_path, self._artifact_folder) + self._zip_file = make_zip_with_lambda_permissions(zip_file_path, self._artifact_folder) LOG.debug("%sCreated artifact ZIP file: %s", self.log_prefix, self._zip_file) self._local_sha = file_checksum(cast(str, self._zip_file), hashlib.sha256()) diff --git a/samcli/lib/utils/resources.py b/samcli/lib/utils/resources.py index c608725132..11aee8da91 100644 --- a/samcli/lib/utils/resources.py +++ b/samcli/lib/utils/resources.py @@ -86,6 +86,13 @@ AWS_CLOUDFORMATION_STACK: "TemplateURL", } +LAMBDA_LOCAL_RESOURCES = [ + AWS_LAMBDA_FUNCTION, + AWS_LAMBDA_LAYERVERSION, + AWS_SERVERLESS_FUNCTION, + AWS_SERVERLESS_LAYERVERSION, +] + def get_packageable_resource_paths(): """ diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index d7b0353167..11f7bd1271 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -1,4 +1,6 @@ +import functools import json +import platform import tempfile import os import string @@ -13,8 +15,9 @@ from samcli.commands.package.exceptions import ExportFailedError from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.package.uploaders import Destination -from samcli.lib.package.utils import zip_folder, make_zip +from samcli.lib.package.utils import zip_folder, make_zip, make_zip_with_lambda_permissions, make_zip_with_permissions from samcli.lib.utils.packagetype import ZIP, IMAGE +from samcli.lib.utils.resources import LAMBDA_LOCAL_RESOURCES, RESOURCES_WITH_LOCAL_PATHS from tests.testing_utils import FileCreator from samcli.commands.package import exceptions from samcli.lib.package.artifact_exporter import ( @@ -333,7 +336,66 @@ def test_upload_local_artifacts_local_folder(self, zip_and_upload_mock): absolute_artifact_path = make_abs_path(parent_dir, artifact_path) - zip_and_upload_mock.assert_called_once_with(absolute_artifact_path, mock.ANY, None) + zip_and_upload_mock.assert_called_once_with(absolute_artifact_path, mock.ANY, None, zip_method=make_zip) + + @patch("samcli.lib.package.utils.zip_and_upload") + def test_upload_local_artifacts_local_folder_lambda_resources(self, zip_and_upload_mock): + for resource_id in LAMBDA_LOCAL_RESOURCES: + property_name = "property" + expected_s3_url = "s3://foo/bar?versionId=baz" + + zip_and_upload_mock.return_value = expected_s3_url + # Artifact path is a Directory + with self.make_temp_dir() as artifact_path: + # Artifact is a file in the temporary directory + parent_dir = tempfile.gettempdir() + resource_dict = {property_name: artifact_path} + + result = upload_local_artifacts(resource_id, resource_dict, property_name, parent_dir, Mock()) + self.assertEqual(result, expected_s3_url) + + absolute_artifact_path = make_abs_path(parent_dir, artifact_path) + # zip_method will NOT be the generalized zip_method `make_zip` + + with self.assertRaises(AssertionError): + zip_and_upload_mock.assert_called_once_with( + absolute_artifact_path, mock.ANY, None, zip_method=make_zip + ) + + # zip_method will be lambda specific. + zip_and_upload_mock.assert_called_once_with( + absolute_artifact_path, mock.ANY, None, zip_method=make_zip_with_lambda_permissions + ) + zip_and_upload_mock.reset_mock() + + @patch("samcli.lib.package.utils.zip_and_upload") + def test_upload_local_artifacts_local_folder_non_lambda_resources(self, zip_and_upload_mock): + non_lambda_resources = RESOURCES_WITH_LOCAL_PATHS.keys() - LAMBDA_LOCAL_RESOURCES + for resource_id in non_lambda_resources: + property_name = "property" + expected_s3_url = "s3://foo/bar?versionId=baz" + + zip_and_upload_mock.return_value = expected_s3_url + # Artifact path is a Directory + with self.make_temp_dir() as artifact_path: + # Artifact is a file in the temporary directory + parent_dir = tempfile.gettempdir() + resource_dict = {property_name: artifact_path} + + result = upload_local_artifacts(resource_id, resource_dict, property_name, parent_dir, Mock()) + self.assertEqual(result, expected_s3_url) + + absolute_artifact_path = make_abs_path(parent_dir, artifact_path) + + # zip_method will NOT be the specialized zip_method `make_zip_with_lambda_permissions` + with self.assertRaises(AssertionError): + zip_and_upload_mock.assert_called_once_with( + absolute_artifact_path, mock.ANY, None, zip_method=make_zip_with_lambda_permissions + ) + + # zip_method will be the generalized zip_method `make_zip` + zip_and_upload_mock.assert_called_once_with(absolute_artifact_path, mock.ANY, None, zip_method=make_zip) + zip_and_upload_mock.reset_mock() @patch("samcli.lib.package.utils.zip_and_upload") def test_upload_local_artifacts_no_path(self, zip_and_upload_mock): @@ -350,7 +412,7 @@ def test_upload_local_artifacts_no_path(self, zip_and_upload_mock): result = upload_local_artifacts(resource_id, resource_dict, property_name, parent_dir, self.s3_uploader_mock) self.assertEqual(result, expected_s3_url) - zip_and_upload_mock.assert_called_once_with(parent_dir, mock.ANY, None) + zip_and_upload_mock.assert_called_once_with(parent_dir, mock.ANY, None, zip_method=make_zip) self.s3_uploader_mock.upload_with_dedup.assert_not_called() @patch("samcli.lib.package.utils.zip_and_upload") @@ -394,7 +456,7 @@ def test_zip_folder(self, make_zip_mock): make_zip_mock.return_value = zip_file_name with self.make_temp_dir() as dirname: - with zip_folder(dirname) as actual_zip_file_name: + with zip_folder(dirname, zip_method=make_zip_mock) as actual_zip_file_name: self.assertEqual(actual_zip_file_name, (zip_file_name, mock.ANY)) make_zip_mock.assert_called_once_with(mock.ANY, dirname) @@ -576,7 +638,7 @@ class MockResource(ResourceZip): resource.export(resource_id, resource_dict, parent_dir) - zip_and_upload_mock.assert_called_once_with(tmp_dir, mock.ANY, None) + zip_and_upload_mock.assert_called_once_with(tmp_dir, mock.ANY, None, zip_method=make_zip) rmtree_mock.assert_called_once_with(tmp_dir) is_zipfile_mock.assert_called_once_with(original_path) self.code_signer_mock.should_sign_package.assert_called_once_with(resource_id) @@ -1570,7 +1632,7 @@ def test_template_export_path_be_folder(self): Template(template_path, os.path.relpath(dirname), self.uploaders_mock, self.code_signer_mock) - def test_make_zip(self): + def test_make_zip_keep_permissions_as_is(self): test_file_creator = FileCreator() test_file_creator.append_file( "index.js", "exports handler = (event, context, callback) => {callback(null, event);}" @@ -1578,6 +1640,9 @@ def test_make_zip(self): dirname = test_file_creator.rootdir + file_permissions = os.stat(test_file_creator.full_path("index.js")).st_mode + dir_permissions = os.stat(test_file_creator.rootdir).st_mode + expected_files = {"index.js"} random_name = "".join(random.choice(string.ascii_letters) for _ in range(10)) @@ -1590,8 +1655,15 @@ def test_make_zip(self): test_zip_file = zipfile.ZipFile(zipfile_name, "r") with closing(test_zip_file) as zf: files_in_zip = set() + external_attr_mask = 65535 << 16 for info in zf.infolist(): files_in_zip.add(info.filename) + permission_bits = (info.external_attr & external_attr_mask) >> 16 + if platform.system().lower() != "windows": + if info.is_dir(): + self.assertEqual(permission_bits, dir_permissions) + else: + self.assertEqual(permission_bits, file_permissions) self.assertEqual(files_in_zip, expected_files) @@ -1603,6 +1675,12 @@ def test_make_zip(self): @patch("platform.system") def test_make_zip_windows(self, mock_system): mock_system.return_value = "Windows" + # Redefining `make_zip` as is in local scope so that arguments passed to functools partial are re-loaded. + windows_make_zip = functools.partial( + make_zip_with_permissions, + file_permissions=0o100755 if platform.system().lower() == "windows" else None, + dir_permissions=0o100755 if platform.system().lower() == "windows" else None, + ) test_file_creator = FileCreator() test_file_creator.append_file( @@ -1618,7 +1696,7 @@ def test_make_zip_windows(self, mock_system): zipfile_name = None try: - zipfile_name = make_zip(outfile, dirname) + zipfile_name = windows_make_zip(outfile, dirname) test_zip_file = zipfile.ZipFile(zipfile_name, "r") with closing(test_zip_file) as zf: @@ -1636,6 +1714,46 @@ def test_make_zip_windows(self, mock_system): os.remove(zipfile_name) test_file_creator.remove_all() + def test_make_zip_lambda_resources(self): + + test_file_creator = FileCreator() + test_file_creator.append_file( + "index.js", "exports handler = (event, context, callback) => {callback(null, event);}" + ) + + dirname = test_file_creator.rootdir + + expected_files = {"index.js"} + + random_name = "".join(random.choice(string.ascii_letters) for _ in range(10)) + outfile = os.path.join(tempfile.gettempdir(), random_name) + + zipfile_name = None + try: + zipfile_name = make_zip_with_lambda_permissions(outfile, dirname) + + test_zip_file = zipfile.ZipFile(zipfile_name, "r") + with closing(test_zip_file) as zf: + files_in_zip = set() + external_attr_mask = 65535 << 16 + for info in zf.infolist(): + files_in_zip.add(info.filename) + permission_bits = (info.external_attr & external_attr_mask) >> 16 + if not platform.system().lower() == "windows": + if info.is_dir(): + self.assertEqual(permission_bits, 0o100755) + else: + self.assertEqual(permission_bits, 0o100644) + else: + self.assertEqual(permission_bits, 0o100755) + + self.assertEqual(files_in_zip, expected_files) + + finally: + if zipfile_name: + os.remove(zipfile_name) + test_file_creator.remove_all() + @patch("shutil.copyfile") @patch("tempfile.mkdtemp") def test_copy_to_temp_dir(self, mkdtemp_mock, copyfile_mock): diff --git a/tests/unit/lib/sync/flows/test_auto_dependency_layer_sync_flow.py b/tests/unit/lib/sync/flows/test_auto_dependency_layer_sync_flow.py index bfa0190976..e288a25bce 100644 --- a/tests/unit/lib/sync/flows/test_auto_dependency_layer_sync_flow.py +++ b/tests/unit/lib/sync/flows/test_auto_dependency_layer_sync_flow.py @@ -73,7 +73,7 @@ def test_gather_resources_fail_when_no_runtime_defined_for_function(self, patche @patch("samcli.lib.sync.flows.auto_dependency_layer_sync_flow.uuid") @patch("samcli.lib.sync.flows.auto_dependency_layer_sync_flow.file_checksum") - @patch("samcli.lib.sync.flows.auto_dependency_layer_sync_flow.make_zip") + @patch("samcli.lib.sync.flows.auto_dependency_layer_sync_flow.make_zip_with_lambda_permissions") @patch("samcli.lib.sync.flows.auto_dependency_layer_sync_flow.tempfile") @patch("samcli.lib.sync.flows.auto_dependency_layer_sync_flow.NestedStackManager") def test_gather_resources( diff --git a/tests/unit/lib/sync/flows/test_layer_sync_flow.py b/tests/unit/lib/sync/flows/test_layer_sync_flow.py index f709f5e682..8045c03980 100644 --- a/tests/unit/lib/sync/flows/test_layer_sync_flow.py +++ b/tests/unit/lib/sync/flows/test_layer_sync_flow.py @@ -57,7 +57,7 @@ def test_setup_with_unknown_layer(self): @patch("samcli.lib.sync.flows.layer_sync_flow.ApplicationBuilder") @patch("samcli.lib.sync.flows.layer_sync_flow.tempfile") - @patch("samcli.lib.sync.flows.layer_sync_flow.make_zip") + @patch("samcli.lib.sync.flows.layer_sync_flow.make_zip_with_lambda_permissions") @patch("samcli.lib.sync.flows.layer_sync_flow.file_checksum") @patch("samcli.lib.sync.flows.layer_sync_flow.os") @patch("samcli.lib.sync.flows.layer_sync_flow.rmtree_if_exists") diff --git a/tests/unit/lib/sync/flows/test_zip_function_sync_flow.py b/tests/unit/lib/sync/flows/test_zip_function_sync_flow.py index 8e7365c970..3a8eb2cfe1 100644 --- a/tests/unit/lib/sync/flows/test_zip_function_sync_flow.py +++ b/tests/unit/lib/sync/flows/test_zip_function_sync_flow.py @@ -33,7 +33,7 @@ def test_set_up(self, session_mock, client_provider_mock): @patch("samcli.lib.sync.flows.zip_function_sync_flow.hashlib.sha256") @patch("samcli.lib.sync.flows.zip_function_sync_flow.uuid.uuid4") @patch("samcli.lib.sync.flows.zip_function_sync_flow.file_checksum") - @patch("samcli.lib.sync.flows.zip_function_sync_flow.make_zip") + @patch("samcli.lib.sync.flows.zip_function_sync_flow.make_zip_with_lambda_permissions") @patch("samcli.lib.sync.flows.zip_function_sync_flow.tempfile.gettempdir") @patch("samcli.lib.sync.flows.zip_function_sync_flow.ApplicationBuilder") @patch("samcli.lib.sync.flows.zip_function_sync_flow.rmtree_if_exists") From fc9653313ac0d35e995b80ed8215f5587e20d16a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Dec 2022 21:11:58 +0000 Subject: [PATCH 14/26] chore(deps): bump certifi from 2020.12.5 to 2022.12.7 in /requirements (#4468) Bumps [certifi](https://github.com/certifi/python-certifi) from 2020.12.5 to 2022.12.7. - [Release notes](https://github.com/certifi/python-certifi/releases) - [Commits](https://github.com/certifi/python-certifi/compare/2020.12.05...2022.12.07) --- updated-dependencies: - dependency-name: certifi dependency-type: direct:production ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/reproducible-linux.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index 66863e0c58..93d7de89e1 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -56,9 +56,9 @@ botocore==1.24.30 \ # via # boto3 # s3transfer -certifi==2020.12.5 \ - --hash=sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c \ - --hash=sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830 +certifi==2022.12.7 \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ From 915f383e7965ffa6f44fe35d0288b07c45a60cc5 Mon Sep 17 00:00:00 2001 From: Qingchuan Ma <69653965+qingchm@users.noreply.github.com> Date: Tue, 13 Dec 2022 15:25:13 -0800 Subject: [PATCH 15/26] chore: update third party license information on certifi version update (#4477) --- installer/assets/THIRD-PARTY-LICENSES | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/installer/assets/THIRD-PARTY-LICENSES b/installer/assets/THIRD-PARTY-LICENSES index deaedebc8c..d74de2810c 100644 --- a/installer/assets/THIRD-PARTY-LICENSES +++ b/installer/assets/THIRD-PARTY-LICENSES @@ -1982,7 +1982,7 @@ DEALINGS IN THE SOFTWARE. ------ -** certifi; version 2020.12.05 -- https://github.com/certifi/python-certifi/ +** certifi; version 2022.12.7 -- https://github.com/certifi/python-certifi/ (c) 1999 VeriSign, Inc. (c) 2007 GeoTrust Inc. (c) 2006 VeriSign, Inc. From 2fd533fdb8cab8c2fd7feb11ea52ef59559147b4 Mon Sep 17 00:00:00 2001 From: David <114027923+cdavidxu-hub@users.noreply.github.com> Date: Tue, 13 Dec 2022 19:23:17 -0800 Subject: [PATCH 16/26] Adding cfn-lint as optional parameter for SAM validate command (#4444) * Adding cfn-lint optional parameter to SAM validate command * Cfn-lint optional parameter SAM validate make pr changes * Fix make pr and broken tests * Add Click error handling and allow config params * Add unit and integration test * fix formatting and region exceptions * Add integ unhappy path integ test and fix comments * Fix: remove local path string from test * generalize region validation and add license info * Fix licensing and dynamic installer * Added generalized region validation tests * Fix unit tests and change output string * Add valid region test, fix formatting * Add no region test Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> --- installer/assets/THIRD-PARTY-LICENSES | 137 ++++++++++++++++++ installer/pyinstaller/hidden_imports.py | 1 + requirements/base.txt | 3 + requirements/reproducible-linux.txt | 57 +++++++- samcli/cli/options.py | 9 ++ samcli/commands/validate/validate.py | 127 ++++++++++++---- .../validate/default_yaml/templateError.yaml | 20 +++ .../validate/test_validate_command.py | 71 +++++++++ tests/unit/cli/test_main.py | 32 ++++ .../unit/commands/samconfig/test_samconfig.py | 4 +- tests/unit/commands/validate/test_cli.py | 48 +++++- 11 files changed, 466 insertions(+), 43 deletions(-) create mode 100644 tests/integration/testdata/validate/default_yaml/templateError.yaml diff --git a/installer/assets/THIRD-PARTY-LICENSES b/installer/assets/THIRD-PARTY-LICENSES index d74de2810c..5ad87f7289 100644 --- a/installer/assets/THIRD-PARTY-LICENSES +++ b/installer/assets/THIRD-PARTY-LICENSES @@ -8,6 +8,7 @@ ** docker; version 4.2.0 -- https://pypi.org/project/docker/ ** Importlib-metadata; version 4.11.3 -- https://importlib-metadata.readthedocs.io/en/latest/ ** libcrypto; version 1.1 -- https://www.openssl.org/ +** pbr; version 5.11.0 -- https://pypi.org/project/pbr/ ** python-request; version 2.25.1 -- https://pypi.python.org/pypi/requests/2.25.1 ** regex; version 2021.9.30 -- https://bitbucket.org/mrabarnett/mrab-regex/src/hg/ ** s3transfer; version 0.3.6 -- https://github.com/boto/s3transfer @@ -797,6 +798,138 @@ modification, are permitted provided that the following conditions are met: ------ +** networkx; version 2.6.3 -- https://pypi.org/project/networkx/ +Copyright (c) 2022, Aric Hagberg. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +#. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +#. Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided with + the distribution. + +#. Neither the name of the copyright holder nor the names of the + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +#. THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS "AS IS" AND ANY + EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE + USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH + DAMAGE. + +------ + +** jsonpointer; version 2.3 -- https://pypi.org/project/jsonpointer/ +Copyright (c) 2022, Stefan Kögl. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +#. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +#. Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided with + the distribution. + +#. Neither the name of the copyright holder nor the names of the + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +#. THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS "AS IS" AND ANY + EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE + USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH + DAMAGE. + +------ + +** jsonpickle; version 3.0.0 -- https://pypi.org/project/jsonpickle/ +Copyright (c) 2022, David Aguilar. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +#. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +#. Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided with + the distribution. + +#. Neither the name of the copyright holder nor the names of the + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +#. THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS "AS IS" AND ANY + EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE + USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH + DAMAGE. + +------ + +** jsonpatch; version 1.32 -- https://pypi.org/project/jsonpickle/ +Copyright (c) 2021-2022, Stefan Kögl. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +#. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +#. Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided with + the distribution. + +#. Neither the name of the copyright holder nor the names of the + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +#. THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS "AS IS" AND ANY + EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE + USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH + DAMAGE. + +------ + ** python-chardet; version 3.0.4 -- https://pypi.org/project/chardet/ Copyright (C) 1991, 1999 Free Software Foundation, Inc. @@ -1851,6 +1984,10 @@ client/archive/v0.57.0.tar.gz ** attrs; version 20.3.0 -- https://pypi.org/project/attrs/ Copyright (c) 2015 Hynek Schlawack +** cfn-lint; version 0.72.2 -- https://pypi.org/project/cfn-lint/ +** jschema-to-python; version 1.2.3 -- https://pypi.org/project/jschema-to-python/ +** junit-xml; version 1.9 -- https://pypi.org/project/junit-xml/ +** sarif-om; version 1.0.4 https://pypi.org/project/sarif-om/ ** wheel; version 0.36.2 -- https://github.com/pypa/wheel "wheel" copyright (c) 2012-2014 Daniel Holth and contributors. diff --git a/installer/pyinstaller/hidden_imports.py b/installer/pyinstaller/hidden_imports.py index d1d72803e5..b0c0845bde 100644 --- a/installer/pyinstaller/hidden_imports.py +++ b/installer/pyinstaller/hidden_imports.py @@ -12,4 +12,5 @@ "pkg_resources.py2_warn", "aws_lambda_builders.workflows", "configparser", + "cfnlint.rules" ] diff --git a/requirements/base.txt b/requirements/base.txt index c14347bb0b..d945c75ac0 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -26,3 +26,6 @@ typing_extensions==3.10.0.0 regex==2021.9.30 # NOTE: tzlocal is not a direct dependency of SAM CLI, but pin to 3.0 as 4.0 break appveyor jobs tzlocal==3.0 + +#Adding cfn-lint dependency for SAM validate +cfn-lint==0.72.2 diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index 93d7de89e1..7c01d2a254 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with python 3.7 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.7 +# by the following command: # # pip-compile --allow-unsafe --generate-hashes --output-file=requirements/reproducible-linux.txt # @@ -11,7 +11,10 @@ arrow==1.0.3 \ attrs==20.3.0 \ --hash=sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6 \ --hash=sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700 - # via jsonschema + # via + # jschema-to-python + # jsonschema + # sarif-om aws-lambda-builders==1.23.1 \ --hash=sha256:4db4133decf149c5b2daa0959db0f6e5563a9763beac0e25005a90c2ce7abe48 \ --hash=sha256:dda6f7e04bc77120c206ef5269a0c17bbcecacf065865248d31c1c80f325a343 @@ -20,7 +23,9 @@ aws-sam-translator==1.55.0 \ --hash=sha256:08e182e76d6fabc13ce2f38b8a3932b3131407c6ad29ec2849ef3d9a41576b94 \ --hash=sha256:93dc74614ab291c86be681e025679d08f4fa685ed6b55d410f62f2f235012205 \ --hash=sha256:e86a67b87329a0de7d531d33257d1a448d0d6ecd84aee058d084957f28a8e4b1 - # via aws-sam-cli (setup.py) + # via + # aws-sam-cli (setup.py) + # cfn-lint backports-zoneinfo==0.2.1 \ --hash=sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf \ --hash=sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328 \ @@ -126,6 +131,10 @@ cffi==1.15.1 \ --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 # via cryptography +cfn-lint==0.72.2 \ + --hash=sha256:1117a0a50899807021fb7b87ebcfb04eddda06269d9a45c7dbbba3bc7c4631f5 \ + --hash=sha256:d7ef1658687cf119adcf6ec312f28b5adc312d80293a54cdd5c140e2695a243c + # via aws-sam-cli (setup.py) chardet==3.0.4 \ --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 @@ -196,6 +205,7 @@ importlib-metadata==4.11.3 \ --hash=sha256:ea4c597ebf37142f827b8f39299579e31685c31d3a438b59f469406afd0f2539 # via # click + # jsonpickle # jsonschema itsdangerous==2.1.2 \ --hash=sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44 \ @@ -219,10 +229,31 @@ jmespath==0.10.0 \ # aws-sam-cli (setup.py) # boto3 # botocore +jschema-to-python==1.2.3 \ + --hash=sha256:76ff14fe5d304708ccad1284e4b11f96a658949a31ee7faed9e0995279549b91 \ + --hash=sha256:8a703ca7604d42d74b2815eecf99a33359a8dccbb80806cce386d5e2dd992b05 + # via cfn-lint +jsonpatch==1.32 \ + --hash=sha256:26ac385719ac9f54df8a2f0827bb8253aa3ea8ab7b3368457bcdb8c14595a397 \ + --hash=sha256:b6ddfe6c3db30d81a96aaeceb6baf916094ffa23d7dd5fa2c13e13f8b6e600c2 + # via cfn-lint +jsonpickle==3.0.0 \ + --hash=sha256:504586e5c0fd52fd76a56f86c36f8c4d29778bdef92dc06d38ca6e2e9fc4f090 \ + --hash=sha256:7c4b13d595ff3520148ed870b9f5917023ebdc55c9ec0cb695688fdc16e90c3e + # via jschema-to-python +jsonpointer==2.3 \ + --hash=sha256:51801e558539b4e9cd268638c078c6c5746c9ac96bc38152d443400e4f3793e9 \ + --hash=sha256:97cba51526c829282218feb99dab1b1e6bdf8efd1c43dc9d57be093c0d69c99a + # via jsonpatch jsonschema==3.2.0 \ --hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 \ --hash=sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a - # via aws-sam-translator + # via + # aws-sam-translator + # cfn-lint +junit-xml==1.9 \ + --hash=sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732 + # via cfn-lint markupsafe==2.1.1 \ --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ @@ -265,6 +296,16 @@ markupsafe==2.1.1 \ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 +networkx==2.6.3 \ + --hash=sha256:80b6b89c77d1dfb64a4c7854981b60aeea6360ac02c6d4e4913319e0a313abef \ + --hash=sha256:c0946ed31d71f1b732b5aaa6da5a0388a345019af232ce2f49c766e2d6795c51 + # via cfn-lint +pbr==5.11.0 \ + --hash=sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe \ + --hash=sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a + # via + # jschema-to-python + # sarif-om pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 @@ -322,6 +363,7 @@ pyyaml==5.4.1 \ --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 # via # aws-sam-cli (setup.py) + # cfn-lint # cookiecutter # serverlessrepo regex==2021.9.30 \ @@ -380,6 +422,10 @@ s3transfer==0.5.0 \ --hash=sha256:50ed823e1dc5868ad40c8dc92072f757aa0e653a192845c94a3b676f4a62da4c \ --hash=sha256:9c1dc369814391a6bda20ebbf4b70a0f34630592c9aa520856bf384916af2803 # via boto3 +sarif-om==1.0.4 \ + --hash=sha256:539ef47a662329b1c8502388ad92457425e95dc0aaaf995fe46f4984c4771911 \ + --hash=sha256:cd5f416b3083e00d402a92e449a7ff67af46f11241073eea0461802a3b5aef98 + # via cfn-lint serverlessrepo==0.1.10 \ --hash=sha256:671f48038123f121437b717ed51f253a55775590f00fbab6fbc6a01f8d05c017 \ --hash=sha256:b99c69be8ce87ccc48103fbe371ba7b148c3374c57862e59118c402522e5ed52 @@ -390,6 +436,7 @@ six==1.15.0 \ # via # docker # jsonschema + # junit-xml # python-dateutil # serverlessrepo # websocket-client diff --git a/samcli/cli/options.py b/samcli/cli/options.py index 9a255225f0..57ee82b9eb 100644 --- a/samcli/cli/options.py +++ b/samcli/cli/options.py @@ -40,6 +40,15 @@ def region_option(f): def callback(ctx, param, value): state = ctx.ensure_object(Context) + from botocore import exceptions, utils + from samcli.commands.exceptions import RegionError + + try: + utils.validate_region_name(value) + except exceptions.InvalidRegionError as ex: + raise RegionError( + message=f"Provided region: {value} doesn't match a supported format", wrapped_from=ex.__class__.__name__ + ) from ex state.region = value return value diff --git a/samcli/commands/validate/validate.py b/samcli/commands/validate/validate.py index 88cdba024e..5bb659a424 100644 --- a/samcli/commands/validate/validate.py +++ b/samcli/commands/validate/validate.py @@ -9,6 +9,7 @@ from samtranslator.translator.arn_generator import NoRegionFound +from samcli.cli.context import Context from samcli.cli.main import pass_context, common_options as cli_framework_options, aws_creds_options, print_cmdline_args from samcli.commands._utils.cdk_support_decorators import unsupported_command_cdk from samcli.commands._utils.options import template_option_without_build @@ -22,24 +23,25 @@ @template_option_without_build @aws_creds_options @cli_framework_options +@click.option( + "--lint", + is_flag=True, + help="Run linting validation on template through cfn-lint. " + "For more information, see: https://github.com/aws-cloudformation/cfn-lint", +) @pass_context @track_command @check_newer_version @print_cmdline_args @unsupported_command_cdk(alternative_command="cdk doctor") -def cli( - ctx, - template_file, - config_file, - config_env, -): +def cli(ctx, template_file, config_file, config_env, lint): # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - do_cli(ctx, template_file) # pragma: no cover + do_cli(ctx, template_file, lint) # pragma: no cover -def do_cli(ctx, template): +def do_cli(ctx, template, lint): """ Implementation of the ``cli`` method, just separated out for unit testing purposes """ @@ -50,29 +52,36 @@ def do_cli(ctx, template): from .lib.exceptions import InvalidSamDocumentException from .lib.sam_template_validator import SamTemplateValidator - sam_template = _read_sam_file(template) - - iam_client = boto3.client("iam") - validator = SamTemplateValidator( - sam_template, ManagedPolicyLoader(iam_client), profile=ctx.profile, region=ctx.region - ) - - try: - validator.is_valid() - except InvalidSamDocumentException as e: - click.secho("Template provided at '{}' was invalid SAM Template.".format(template), bg="red") - raise InvalidSamTemplateException(str(e)) from e - except NoRegionFound as no_region_found_e: - raise UserException( - "AWS Region was not found. Please configure your region through a profile or --region option", - wrapped_from=no_region_found_e.__class__.__name__, - ) from no_region_found_e - except NoCredentialsError as e: - raise UserException( - "AWS Credentials are required. Please configure your credentials.", wrapped_from=e.__class__.__name__ - ) from e - - click.secho("{} is a valid SAM Template".format(template), fg="green") + if lint: + _lint(ctx, template) + else: + sam_template = _read_sam_file(template) + + iam_client = boto3.client("iam") + validator = SamTemplateValidator( + sam_template, ManagedPolicyLoader(iam_client), profile=ctx.profile, region=ctx.region + ) + + try: + validator.is_valid() + except InvalidSamDocumentException as e: + click.secho("Template provided at '{}' was invalid SAM Template.".format(template), bg="red") + raise InvalidSamTemplateException(str(e)) from e + except NoRegionFound as no_region_found_e: + raise UserException( + "AWS Region was not found. Please configure your region through a profile or --region option", + wrapped_from=no_region_found_e.__class__.__name__, + ) from no_region_found_e + except NoCredentialsError as e: + raise UserException( + "AWS Credentials are required. Please configure your credentials.", wrapped_from=e.__class__.__name__ + ) from e + + click.secho( + "{} is a valid SAM Template. This is according to basic SAM Validation, " + 'for additional validation, please run "sam validate --lint"'.format(template), + fg="green", + ) def _read_sam_file(template): @@ -95,3 +104,59 @@ def _read_sam_file(template): sam_template = yaml_parse(sam_template.read()) return sam_template + + +def _lint(ctx: Context, template: str) -> None: + """ + Parses provided SAM template and maps errors from CloudFormation template back to SAM template. + + Cfn-lint loggers are added to the SAM cli logging hierarchy which at the root logger + configures with INFO level logging and a different formatting. This exposes and duplicates + some cfn-lint logs that are not typically shown to customers. Explicitly setting the level to + WARNING and propagate to be False remediates these issues. + + Parameters + ----------- + ctx + Click context object + template + Path to the template file + + """ + + import cfnlint.core # type: ignore + import logging + from samcli.commands.exceptions import UserException + + cfn_lint_logger = logging.getLogger("cfnlint") + cfn_lint_logger.propagate = False + + try: + lint_args = [template] + if ctx.debug: + lint_args.append("--debug") + if ctx.region: + lint_args.append("--region") + lint_args.append(ctx.region) + + (args, filenames, formatter) = cfnlint.core.get_args_filenames(lint_args) + cfn_lint_logger.setLevel(logging.WARNING) + matches = list(cfnlint.core.get_matches(filenames, args)) + if not matches: + click.secho("{} is a valid SAM Template".format(template), fg="green") + rules = cfnlint.core.get_used_rules() + matches_output = formatter.print_matches(matches, rules, filenames) + + if matches_output: + click.secho(matches_output) + + except cfnlint.core.InvalidRegionException as e: + raise UserException( + "AWS Region was not found. Please configure your region through the --region option", + wrapped_from=e.__class__.__name__, + ) from e + except cfnlint.core.CfnLintExitException as lint_error: + raise UserException( + lint_error, + wrapped_from=lint_error.__class__.__name__, + ) from lint_error diff --git a/tests/integration/testdata/validate/default_yaml/templateError.yaml b/tests/integration/testdata/validate/default_yaml/templateError.yaml new file mode 100644 index 0000000000..7e5502d2b1 --- /dev/null +++ b/tests/integration/testdata/validate/default_yaml/templateError.yaml @@ -0,0 +1,20 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 + +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: HelloWorldFunction + Handler: app.lambdaHandler + Runtime: nodejs14.x + + HelloWorldFunction: + Type: AWS::Serverless::Api + Properties: + StageName: Prod + DefinitionUri: s3://sam-demo-bucket/webpage_swagger.json + EndpointConfiguration: + Type: REGIONAL + + diff --git a/tests/integration/validate/test_validate_command.py b/tests/integration/validate/test_validate_command.py index cba52002dd..064fd38660 100644 --- a/tests/integration/validate/test_validate_command.py +++ b/tests/integration/validate/test_validate_command.py @@ -28,6 +28,16 @@ class TestValidate(TestCase): @classmethod def setUpClass(cls): cls.patterns = { + TemplateFileTypes.JSON: re.compile( + r"template\.json can be transformed to a Cloudformation template." + ' Please run "sam validate --lint -t template.yaml" for additional validation(\r\n)?$' + ), + TemplateFileTypes.YAML: re.compile( + r"template\.yaml can be transformed to a Cloudformation template." + ' Please run "sam validate --lint -t template.yaml" for additional validation(\r\n)?$' + ), + } + cls.lint_patterns = { TemplateFileTypes.JSON: re.compile(r"template\.json is a valid SAM Template(\r\n)?$"), TemplateFileTypes.YAML: re.compile(r"template\.yaml is a valid SAM Template(\r\n)?$"), } @@ -42,6 +52,7 @@ def command_list( profile: Optional[str] = None, region: Optional[str] = None, config_file: Optional[Path] = None, + lint: Optional[bool] = None, ) -> List[str]: command_list = [self.base_command(), "validate"] if template_file: @@ -52,6 +63,8 @@ def command_list( command_list += ["--region", region] if config_file: command_list += ["--config_file", str(config_file)] + if lint: + command_list += ["--lint"] return command_list @parameterized.expand( @@ -87,3 +100,61 @@ def test_validate_logs_warning_for_cdk_project(self): ) self.assertIn(warning_message, output) + + @parameterized.expand( + [ + ("default_yaml", TemplateFileTypes.YAML), # project with template.yaml + ("default_json", TemplateFileTypes.JSON), # project with template.json + ("multiple_files", TemplateFileTypes.YAML), # project with both template.yaml and template.json + ( + "with_build", + TemplateFileTypes.JSON, + ), # project with template.json and standard build directory .aws-sam/build/template.yaml + ] + ) + def test_lint_template(self, relative_folder: str, expected_file: TemplateFileTypes): + test_data_path = Path(__file__).resolve().parents[2] / "integration" / "testdata" / "validate" + process_dir = test_data_path / relative_folder + command_result = run_command(self.command_list(lint=True), cwd=str(process_dir)) + pattern = self.lint_patterns[expected_file] # type: ignore + output = command_result.stdout.decode("utf-8") + self.assertEqual(command_result.process.returncode, 0) + self.assertRegex(output, pattern) + + def test_lint_error_no_region(self): + test_data_path = Path(__file__).resolve().parents[2] / "integration" / "testdata" / "validate" / "default_json" + template_file = "template.json" + template_path = test_data_path.joinpath(template_file) + command_result = run_command(self.command_list(lint=True, region="--debug", template_file=template_path)) + output = command_result.stderr.decode("utf-8") + + error_message = f"Error: Provided region: --debug doesn't match a supported format" + + self.assertIn(error_message, output) + + def test_lint_error_invalid_region(self): + test_data_path = Path(__file__).resolve().parents[2] / "integration" / "testdata" / "validate" / "default_json" + template_file = "template.json" + template_path = test_data_path.joinpath(template_file) + command_result = run_command(self.command_list(lint=True, region="us-north-5", template_file=template_path)) + output = command_result.stderr.decode("utf-8") + + error_message = f"Error: AWS Region was not found. Please configure your region through the --region option" + + self.assertIn(error_message, output) + + def test_lint_invalid_template(self): + test_data_path = Path(__file__).resolve().parents[2] / "integration" / "testdata" / "validate" / "default_yaml" + template_file = "templateError.yaml" + template_path = test_data_path.joinpath(template_file) + command_result = run_command(self.command_list(lint=True, template_file=template_path)) + output = command_result.stdout.decode("utf-8") + + warning_message = ( + f'E0000 Duplicate found "HelloWorldFunction" (line 5)\n' + "{}/templateError.yaml:5:3\n\n" + 'E0000 Duplicate found "HelloWorldFunction" (line 12)\n' + "{}/templateError.yaml:12:3\n\n".format(test_data_path, test_data_path) + ) + + self.assertIn(warning_message, output) diff --git a/tests/unit/cli/test_main.py b/tests/unit/cli/test_main.py index 0ed9ff5992..4ea81af843 100644 --- a/tests/unit/cli/test_main.py +++ b/tests/unit/cli/test_main.py @@ -3,6 +3,7 @@ from unittest import TestCase from click.testing import CliRunner from samcli.cli.main import cli +from samcli.commands.exceptions import RegionError class TestCliBase(TestCase): @@ -26,6 +27,37 @@ def test_cli_some_command(self): result = runner.invoke(cli, ["local", "generate-event", "s3"]) self.assertEqual(result.exit_code, 0) + def test_cli_with_non_standard_format_region(self): + mock_cfg = Mock() + with patch("samcli.cli.main.GlobalConfig", mock_cfg): + runner = CliRunner() + for command in ["validate", "deploy"]: + result = runner.invoke(cli, [command, "--region", "--non-standard-format"]) + self.assertEqual(result.exit_code, 1) + self.assertIn( + "Error: Provided region: --non-standard-format doesn't match a supported format", result.output + ) + self.assertRaises(RegionError) + + def test_cli_with_empty_region(self): + mock_cfg = Mock() + with patch("samcli.cli.main.GlobalConfig", mock_cfg): + runner = CliRunner() + for command in ["validate", "deploy"]: + result = runner.invoke(cli, [command, "--region"]) + self.assertEqual(result.exit_code, 2) + self.assertIn("Error: Option '--region' requires an argument", result.output) + + @patch("samcli.commands.validate.validate.do_cli") + def test_cli_with_valid_region(self, mock_do_cli): + mock_cfg = Mock() + with patch("samcli.cli.main.GlobalConfig", mock_cfg): + runner = CliRunner() + result = runner.invoke(cli, ["validate", "--region", "us-west-2"]) + self.assertEqual(result.exit_code, 0) + self.assertTrue(mock_do_cli.called) + self.assertEqual(mock_do_cli.call_count, 1) + def test_cli_with_debug(self): mock_cfg = Mock() with patch("samcli.cli.main.GlobalConfig", mock_cfg): diff --git a/tests/unit/commands/samconfig/test_samconfig.py b/tests/unit/commands/samconfig/test_samconfig.py index fe52c6b638..6b4f47859f 100644 --- a/tests/unit/commands/samconfig/test_samconfig.py +++ b/tests/unit/commands/samconfig/test_samconfig.py @@ -100,7 +100,7 @@ def test_validate(self, do_cli_mock): LOG.exception("Command failed", exc_info=result.exc_info) self.assertIsNone(result.exception) - do_cli_mock.assert_called_with(ANY, str(Path(os.getcwd(), "mytemplate.yaml"))) + do_cli_mock.assert_called_with(ANY, str(Path(os.getcwd(), "mytemplate.yaml")), False) @patch("samcli.commands.build.command.do_cli") def test_build(self, do_cli_mock): @@ -1248,7 +1248,7 @@ def test_secondary_option_name_template_validate(self, do_cli_mock): LOG.exception("Command failed", exc_info=result.exc_info) self.assertIsNone(result.exception) - do_cli_mock.assert_called_with(ANY, str(Path(os.getcwd(), "mytemplate.yaml"))) + do_cli_mock.assert_called_with(ANY, str(Path(os.getcwd(), "mytemplate.yaml")), False) @contextmanager diff --git a/tests/unit/commands/validate/test_cli.py b/tests/unit/commands/validate/test_cli.py index b9a32c2180..16d46a8e73 100644 --- a/tests/unit/commands/validate/test_cli.py +++ b/tests/unit/commands/validate/test_cli.py @@ -2,14 +2,17 @@ from unittest.mock import Mock, patch from collections import namedtuple -from botocore.exceptions import NoCredentialsError +from botocore.exceptions import NoCredentialsError, InvalidRegionError + +from cfnlint.core import CfnLintExitException, InvalidRegionException # type: ignore from samcli.commands.exceptions import UserException from samcli.commands.local.cli_common.user_exceptions import SamTemplateNotFoundException, InvalidSamTemplateException from samcli.commands.validate.lib.exceptions import InvalidSamDocumentException -from samcli.commands.validate.validate import do_cli, _read_sam_file +from samcli.commands.validate.validate import do_cli, _read_sam_file, _lint ctx_mock = namedtuple("ctx", ["profile", "region"]) +ctx_lint_mock = namedtuple("ctx", ["debug", "region"]) class TestValidateCli(TestCase): @@ -50,7 +53,7 @@ def test_template_fails_validation(self, patched_boto, read_sam_file_patch, clic template_valiadator.return_value = is_valid_mock with self.assertRaises(InvalidSamTemplateException): - do_cli(ctx=ctx_mock(profile="profile", region="region"), template=template_path) + do_cli(ctx=ctx_mock(profile="profile", region="region"), template=template_path, lint=False) @patch("samcli.commands.validate.lib.sam_template_validator.SamTemplateValidator") @patch("samcli.commands.validate.validate.click") @@ -65,7 +68,7 @@ def test_no_credentials_provided(self, patched_boto, read_sam_file_patch, click_ template_valiadator.return_value = is_valid_mock with self.assertRaises(UserException): - do_cli(ctx=ctx_mock(profile="profile", region="region"), template=template_path) + do_cli(ctx=ctx_mock(profile="profile", region="region"), template=template_path, lint=False) @patch("samcli.commands.validate.lib.sam_template_validator.SamTemplateValidator") @patch("samcli.commands.validate.validate.click") @@ -79,4 +82,39 @@ def test_template_passes_validation(self, patched_boto, read_sam_file_patch, cli is_valid_mock.is_valid.return_value = True template_valiadator.return_value = is_valid_mock - do_cli(ctx=ctx_mock(profile="profile", region="region"), template=template_path) + do_cli(ctx=ctx_mock(profile="profile", region="region"), template=template_path, lint=False) + + @patch("samcli.commands.validate.validate.click") + @patch("samcli.commands.validate.validate._lint") + def test_lint_template_passes(self, click_patch, lint_patch): + template_path = "path_to_template" + + lint_patch.return_value = True + + do_cli(ctx=ctx_lint_mock(debug=False, region="region"), template=template_path, lint=True) + + @patch("cfnlint.core.get_args_filenames") + @patch("cfnlint.core.get_matches") + @patch("samcli.commands.validate.validate.click") + def test_lint_invalid_region_argument_fails(self, click_patch, matches_patch, args_patch): + template_path = "path_to_template" + + args_patch.return_value = ("A", "B", "C") + + matches_patch.side_effect = InvalidRegionException + + with self.assertRaises(UserException): + _lint(ctx=ctx_lint_mock(debug=False, region="region"), template=template_path) + + @patch("cfnlint.core.get_args_filenames") + @patch("cfnlint.core.get_matches") + @patch("samcli.commands.validate.validate.click") + def test_lint_exception_fails(self, click_patch, matches_patch, args_patch): + template_path = "path_to_template" + + args_patch.return_value = ("A", "B", "C") + + matches_patch.side_effect = CfnLintExitException + + with self.assertRaises(UserException): + _lint(ctx=ctx_lint_mock(debug=False, region="region"), template=template_path) From ce7143c5ddb55b6bd69748ebea1b394b89525d35 Mon Sep 17 00:00:00 2001 From: Qingchuan Ma <69653965+qingchm@users.noreply.github.com> Date: Wed, 14 Dec 2022 13:40:43 -0500 Subject: [PATCH 17/26] fix: Fix validate command integration tests console output missmatch and update pyyaml version requirement (#4479) * Update expected validate output message for non-lint scenario * Update the compatible pyyaml version to be 5.4.1 or above to be compatible with cfn-lint --- requirements/base.txt | 2 +- tests/integration/validate/test_validate_command.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index d945c75ac0..086dfb2598 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ Werkzeug<2.1 #Need to add Schemas latest SDK. boto3>=1.19.5,==1.* jmespath~=0.10.0 -PyYAML~=5.3 +PyYAML>=5.4.1,==5.* cookiecutter~=2.1.1 aws-sam-translator==1.55.0 #docker minor version updates can include breaking changes. Auto update micro version only. diff --git a/tests/integration/validate/test_validate_command.py b/tests/integration/validate/test_validate_command.py index 064fd38660..160cead7fb 100644 --- a/tests/integration/validate/test_validate_command.py +++ b/tests/integration/validate/test_validate_command.py @@ -29,12 +29,12 @@ class TestValidate(TestCase): def setUpClass(cls): cls.patterns = { TemplateFileTypes.JSON: re.compile( - r"template\.json can be transformed to a Cloudformation template." - ' Please run "sam validate --lint -t template.yaml" for additional validation(\r\n)?$' + r"template\.json is a valid SAM Template. This is according to basic SAM Validation, " + 'for additional validation, please run "sam validate --lint"(\r\n)?$' ), TemplateFileTypes.YAML: re.compile( - r"template\.yaml can be transformed to a Cloudformation template." - ' Please run "sam validate --lint -t template.yaml" for additional validation(\r\n)?$' + r"template\.yaml is a valid SAM Template. This is according to basic SAM Validation, " + 'for additional validation, please run "sam validate --lint"(\r\n)?$' ), } cls.lint_patterns = { From 3adc94f4b91367aa01686a5db3c9e9b5fc25296f Mon Sep 17 00:00:00 2001 From: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Date: Wed, 14 Dec 2022 16:47:15 -0800 Subject: [PATCH 18/26] fix: use resource type instead of resource id for deciding zip method (#4485) --- samcli/lib/package/packageable_resources.py | 3 +- samcli/lib/package/utils.py | 4 +- tests/integration/sync/test_sync_adl.py | 2 +- tests/integration/sync/test_sync_watch.py | 1 + tests/testing_utils.py | 2 +- .../lib/package/test_artifact_exporter.py | 82 +++++++++++++++---- 6 files changed, 74 insertions(+), 20 deletions(-) diff --git a/samcli/lib/package/packageable_resources.py b/samcli/lib/package/packageable_resources.py index 4672c1e448..e43d98af2f 100644 --- a/samcli/lib/package/packageable_resources.py +++ b/samcli/lib/package/packageable_resources.py @@ -147,6 +147,7 @@ def do_export(self, resource_id, resource_dict, parent_dir): should_sign_package = self.code_signer.should_sign_package(resource_id) artifact_extension = "zip" if should_sign_package else None uploaded_url = upload_local_artifacts( + self.RESOURCE_TYPE, resource_id, resource_dict, self.PROPERTY_NAME, @@ -325,7 +326,7 @@ def do_export(self, resource_id, resource_dict, parent_dir): """ artifact_s3_url = upload_local_artifacts( - resource_id, resource_dict, self.PROPERTY_NAME, parent_dir, self.uploader + self.RESOURCE_TYPE, resource_id, resource_dict, self.PROPERTY_NAME, parent_dir, self.uploader ) parsed_url = S3Uploader.parse_s3_url( diff --git a/samcli/lib/package/utils.py b/samcli/lib/package/utils.py index c22b08afda..203c0914f2 100644 --- a/samcli/lib/package/utils.py +++ b/samcli/lib/package/utils.py @@ -121,6 +121,7 @@ def upload_local_image_artifacts(resource_id, resource_dict, property_name, pare def upload_local_artifacts( + resource_type: str, resource_id: str, resource_dict: Dict, property_name: str, @@ -140,6 +141,7 @@ def upload_local_artifacts( If path is already a path to S3 object, this method does nothing. + :param resource_type: Type of the CloudFormation resource :param resource_id: Id of the CloudFormation resource :param resource_dict: Dictionary containing resource definition :param property_name: Property name of CloudFormation resource where this @@ -174,7 +176,7 @@ def upload_local_artifacts( local_path, uploader, extension, - zip_method=make_zip_with_lambda_permissions if resource_id in LAMBDA_LOCAL_RESOURCES else make_zip, + zip_method=make_zip_with_lambda_permissions if resource_type in LAMBDA_LOCAL_RESOURCES else make_zip, ) # Path could be pointing to a file. Upload the file diff --git a/tests/integration/sync/test_sync_adl.py b/tests/integration/sync/test_sync_adl.py index 5910fd5d70..aeb32c645d 100644 --- a/tests/integration/sync/test_sync_adl.py +++ b/tests/integration/sync/test_sync_adl.py @@ -147,7 +147,7 @@ def test_sync_watch_code(self): ) read_until_string( self.watch_process, - "\x1b[32mFinished syncing Function Layer Reference Sync HelloWorldFunction.\x1b[0m\n", + "\x1b[32mFinished syncing Layer HelloWorldFunction", timeout=60, ) self._confirm_lambda_error(lambda_functions[0]) diff --git a/tests/integration/sync/test_sync_watch.py b/tests/integration/sync/test_sync_watch.py index 463bdcbe2f..2dce048419 100644 --- a/tests/integration/sync/test_sync_watch.py +++ b/tests/integration/sync/test_sync_watch.py @@ -119,6 +119,7 @@ def _setup_verify_infra(self): s3_prefix=self.s3_prefix, kms_key_id=self.kms_key, tags="integ=true clarity=yes foo_bar=baz", + debug=True, ) self.watch_process = start_persistent_process(sync_command_list, cwd=self.test_dir) read_until_string(self.watch_process, "Enter Y to proceed with the command, or enter N to cancel:\n") diff --git a/tests/testing_utils.py b/tests/testing_utils.py index d410c6b599..6facc5cf9c 100644 --- a/tests/testing_utils.py +++ b/tests/testing_utils.py @@ -126,7 +126,7 @@ def read_until_string(process: Popen, expected_output: str, timeout: int = 5) -> """ def _compare_output(output, _: List[str]) -> bool: - return bool(output == expected_output) + return bool(expected_output in output) try: read_until(process, _compare_output, timeout) diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index 11f7bd1271..d3135c2be9 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -164,11 +164,22 @@ def _helper_verify_export_resources( LambdaLayerVersionResource, ): upload_local_artifacts_mock.assert_called_once_with( - resource_id, resource_dict, test_class.PROPERTY_NAME, parent_dir, s3_uploader_mock + test_class.RESOURCE_TYPE, + resource_id, + resource_dict, + test_class.PROPERTY_NAME, + parent_dir, + s3_uploader_mock, ) else: upload_local_artifacts_mock.assert_called_once_with( - resource_id, resource_dict, test_class.PROPERTY_NAME, parent_dir, s3_uploader_mock, None + test_class.RESOURCE_TYPE, + resource_id, + resource_dict, + test_class.PROPERTY_NAME, + parent_dir, + s3_uploader_mock, + None, ) code_signer_mock.sign_package.assert_not_called() if "." in test_class.PROPERTY_NAME: @@ -272,6 +283,7 @@ def test_upload_local_artifacts_local_file(self, zip_and_upload_mock): # Verifies that we package local artifacts appropriately property_name = "property" resource_id = "resource_id" + resource_type = "resource_type" expected_s3_url = "s3://foo/bar?versionId=baz" self.s3_uploader_mock.upload_with_dedup.return_value = expected_s3_url @@ -283,7 +295,7 @@ def test_upload_local_artifacts_local_file(self, zip_and_upload_mock): resource_dict = {property_name: artifact_path} result = upload_local_artifacts( - resource_id, resource_dict, property_name, parent_dir, self.s3_uploader_mock + resource_type, resource_id, resource_dict, property_name, parent_dir, self.s3_uploader_mock ) self.assertEqual(result, expected_s3_url) @@ -300,6 +312,7 @@ def test_upload_local_artifacts_local_file_abs_path(self, zip_and_upload_mock): # Verifies that we package local artifacts appropriately property_name = "property" resource_id = "resource_id" + resource_type = "resource_type" expected_s3_url = "s3://foo/bar?versionId=baz" self.s3_uploader_mock.upload_with_dedup.return_value = expected_s3_url @@ -310,7 +323,7 @@ def test_upload_local_artifacts_local_file_abs_path(self, zip_and_upload_mock): resource_dict = {property_name: artifact_path} result = upload_local_artifacts( - resource_id, resource_dict, property_name, parent_dir, self.s3_uploader_mock + resource_type, resource_id, resource_dict, property_name, parent_dir, self.s3_uploader_mock ) self.assertEqual(result, expected_s3_url) @@ -321,6 +334,7 @@ def test_upload_local_artifacts_local_file_abs_path(self, zip_and_upload_mock): def test_upload_local_artifacts_local_folder(self, zip_and_upload_mock): property_name = "property" resource_id = "resource_id" + resource_type = "resource_type" expected_s3_url = "s3://foo/bar?versionId=baz" zip_and_upload_mock.return_value = expected_s3_url @@ -331,7 +345,9 @@ def test_upload_local_artifacts_local_folder(self, zip_and_upload_mock): parent_dir = tempfile.gettempdir() resource_dict = {property_name: artifact_path} - result = upload_local_artifacts(resource_id, resource_dict, property_name, parent_dir, Mock()) + result = upload_local_artifacts( + resource_type, resource_id, resource_dict, property_name, parent_dir, Mock() + ) self.assertEqual(result, expected_s3_url) absolute_artifact_path = make_abs_path(parent_dir, artifact_path) @@ -340,8 +356,9 @@ def test_upload_local_artifacts_local_folder(self, zip_and_upload_mock): @patch("samcli.lib.package.utils.zip_and_upload") def test_upload_local_artifacts_local_folder_lambda_resources(self, zip_and_upload_mock): - for resource_id in LAMBDA_LOCAL_RESOURCES: + for resource_type in LAMBDA_LOCAL_RESOURCES: property_name = "property" + resource_id = "resource_id" expected_s3_url = "s3://foo/bar?versionId=baz" zip_and_upload_mock.return_value = expected_s3_url @@ -351,7 +368,9 @@ def test_upload_local_artifacts_local_folder_lambda_resources(self, zip_and_uplo parent_dir = tempfile.gettempdir() resource_dict = {property_name: artifact_path} - result = upload_local_artifacts(resource_id, resource_dict, property_name, parent_dir, Mock()) + result = upload_local_artifacts( + resource_type, resource_id, resource_dict, property_name, parent_dir, Mock() + ) self.assertEqual(result, expected_s3_url) absolute_artifact_path = make_abs_path(parent_dir, artifact_path) @@ -371,8 +390,9 @@ def test_upload_local_artifacts_local_folder_lambda_resources(self, zip_and_uplo @patch("samcli.lib.package.utils.zip_and_upload") def test_upload_local_artifacts_local_folder_non_lambda_resources(self, zip_and_upload_mock): non_lambda_resources = RESOURCES_WITH_LOCAL_PATHS.keys() - LAMBDA_LOCAL_RESOURCES - for resource_id in non_lambda_resources: + for resource_type in non_lambda_resources: property_name = "property" + resource_id = "resource_id" expected_s3_url = "s3://foo/bar?versionId=baz" zip_and_upload_mock.return_value = expected_s3_url @@ -382,7 +402,9 @@ def test_upload_local_artifacts_local_folder_non_lambda_resources(self, zip_and_ parent_dir = tempfile.gettempdir() resource_dict = {property_name: artifact_path} - result = upload_local_artifacts(resource_id, resource_dict, property_name, parent_dir, Mock()) + result = upload_local_artifacts( + resource_type, resource_id, resource_dict, property_name, parent_dir, Mock() + ) self.assertEqual(result, expected_s3_url) absolute_artifact_path = make_abs_path(parent_dir, artifact_path) @@ -401,6 +423,7 @@ def test_upload_local_artifacts_local_folder_non_lambda_resources(self, zip_and_ def test_upload_local_artifacts_no_path(self, zip_and_upload_mock): property_name = "property" resource_id = "resource_id" + resource_type = "resource_type" expected_s3_url = "s3://foo/bar?versionId=baz" zip_and_upload_mock.return_value = expected_s3_url @@ -409,7 +432,9 @@ def test_upload_local_artifacts_no_path(self, zip_and_upload_mock): resource_dict = {} parent_dir = tempfile.gettempdir() - result = upload_local_artifacts(resource_id, resource_dict, property_name, parent_dir, self.s3_uploader_mock) + result = upload_local_artifacts( + resource_type, resource_id, resource_dict, property_name, parent_dir, self.s3_uploader_mock + ) self.assertEqual(result, expected_s3_url) zip_and_upload_mock.assert_called_once_with(parent_dir, mock.ANY, None, zip_method=make_zip) @@ -419,13 +444,16 @@ def test_upload_local_artifacts_no_path(self, zip_and_upload_mock): def test_upload_local_artifacts_s3_url(self, zip_and_upload_mock): property_name = "property" resource_id = "resource_id" + resource_type = "resource_type" object_s3_url = "s3://foo/bar?versionId=baz" # If URL is already S3 URL, this will be returned without zip/upload resource_dict = {property_name: object_s3_url} parent_dir = tempfile.gettempdir() - result = upload_local_artifacts(resource_id, resource_dict, property_name, parent_dir, self.s3_uploader_mock) + result = upload_local_artifacts( + resource_type, resource_id, resource_dict, property_name, parent_dir, self.s3_uploader_mock + ) self.assertEqual(result, object_s3_url) zip_and_upload_mock.assert_not_called() @@ -435,17 +463,22 @@ def test_upload_local_artifacts_s3_url(self, zip_and_upload_mock): def test_upload_local_artifacts_invalid_value(self, zip_and_upload_mock): property_name = "property" resource_id = "resource_id" + resource_type = "resource_type" parent_dir = tempfile.gettempdir() with self.assertRaises(exceptions.InvalidLocalPathError): non_existent_file = "some_random_filename" resource_dict = {property_name: non_existent_file} - upload_local_artifacts(resource_id, resource_dict, property_name, parent_dir, self.s3_uploader_mock) + upload_local_artifacts( + resource_type, resource_id, resource_dict, property_name, parent_dir, self.s3_uploader_mock + ) with self.assertRaises(exceptions.InvalidLocalPathError): non_existent_file = ["invalid datatype"] resource_dict = {property_name: non_existent_file} - upload_local_artifacts(resource_id, resource_dict, property_name, parent_dir, self.s3_uploader_mock) + upload_local_artifacts( + resource_type, resource_id, resource_dict, property_name, parent_dir, self.s3_uploader_mock + ) zip_and_upload_mock.assert_not_called() self.s3_uploader_mock.upload_with_dedup.assert_not_called() @@ -481,7 +514,13 @@ class MockResource(ResourceZip): resource.export(resource_id, resource_dict, parent_dir) upload_local_artifacts_mock.assert_called_once_with( - resource_id, resource_dict, resource.PROPERTY_NAME, parent_dir, self.s3_uploader_mock, None + resource.RESOURCE_TYPE, + resource_id, + resource_dict, + resource.PROPERTY_NAME, + parent_dir, + self.s3_uploader_mock, + None, ) self.assertEqual(resource_dict[resource.PROPERTY_NAME], s3_url) @@ -756,7 +795,13 @@ class MockResource(ResourceZip): resource_dict = {} resource.export(resource_id, resource_dict, parent_dir) upload_local_artifacts_mock.assert_called_once_with( - resource_id, resource_dict, resource.PROPERTY_NAME, parent_dir, self.s3_uploader_mock, None + resource.RESOURCE_TYPE, + resource_id, + resource_dict, + resource.PROPERTY_NAME, + parent_dir, + self.s3_uploader_mock, + None, ) self.code_signer_mock.should_sign_package.assert_called_once_with(resource_id) self.code_signer_mock.sign_package.assert_not_called() @@ -851,7 +896,12 @@ class MockResource(ResourceWithS3UrlDict): resource.export(resource_id, resource_dict, parent_dir) upload_local_artifacts_mock.assert_called_once_with( - resource_id, resource_dict, resource.PROPERTY_NAME, parent_dir, self.s3_uploader_mock + resource.RESOURCE_TYPE, + resource_id, + resource_dict, + resource.PROPERTY_NAME, + parent_dir, + self.s3_uploader_mock, ) self.assertEqual( From b8a939d94f3184009c9e62078920954ec8d1992b Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> Date: Thu, 15 Dec 2022 11:31:17 -0800 Subject: [PATCH 19/26] fix: `pyinstaller` binaries (#4486) * fix: `pyinstaller` binaries - With the inclusion of `cfn-lint` as a dependency. There are data files, python files that need to be included for `cfn-lint`. - One of the dependencies brought in by `cfn-lint` is `pbr`, this in turn has a dependency on `jschema-to-python`. The way around is to explictly set `PBR_VERSION` as per https://docs.openstack.org/pbr/latest/user/packagers.html. However this solution is brittle and will leak dependency code into AWS SAM CLI codebase, the cleaner solution is to package it only for `pyinstaller`. * deps: cfnlint to be fully importable. - `cfnlint` fully importable package and removed from `hidden_imports`. - `jschema-to-python` only import data files and package metadata. --- installer/pyinstaller/hidden_imports.py | 2 +- installer/pyinstaller/hook-samcli.py | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/installer/pyinstaller/hidden_imports.py b/installer/pyinstaller/hidden_imports.py index b0c0845bde..b6b0cd4c90 100644 --- a/installer/pyinstaller/hidden_imports.py +++ b/installer/pyinstaller/hidden_imports.py @@ -12,5 +12,5 @@ "pkg_resources.py2_warn", "aws_lambda_builders.workflows", "configparser", - "cfnlint.rules" + "cfnlint" ] diff --git a/installer/pyinstaller/hook-samcli.py b/installer/pyinstaller/hook-samcli.py index 50daf90146..4314fe2fc5 100644 --- a/installer/pyinstaller/hook-samcli.py +++ b/installer/pyinstaller/hook-samcli.py @@ -4,9 +4,12 @@ hiddenimports = SAM_CLI_HIDDEN_IMPORTS datas = ( + # Collect data files, raw python files (if include_py_files=True) and package metadata directories. hooks.collect_all( "samcli", include_py_files=True, include_datas=["hook_packages/terraform/copy_terraform_built_artifacts.py"] )[0] + + hooks.collect_all("jschema_to_python", include_py_files=False)[0] + # Collect ONLY data files. + hooks.collect_data_files("samcli") + hooks.collect_data_files("samtranslator") + hooks.collect_data_files("aws_lambda_builders") From 9a74f02a3e118aaee0b76e3b0869396849277856 Mon Sep 17 00:00:00 2001 From: vsukameti <108494317+vsukameti@users.noreply.github.com> Date: Thu, 15 Dec 2022 14:18:31 -0800 Subject: [PATCH 20/26] feat: app insights monitoring integration for sam init (#4369) * Add cli option to enable application insights monitoring * Modified template modifier comments * Fixed formatting and added more tests * Add cli option to enable application insights monitoring * Fixed INIT CLI test with app inisghts options * Added event tracking for application insights * Modified code based on comments * Addressed PR comments * update 3rd party licenses * Removed log info statement * Moved resource definition types to resources.py file * formatted files * roll back samdev changes * rolling back local testing changes * Updated dependencies * fixed indent Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Co-authored-by: Wing Fung Lau <4760060+hawflau@users.noreply.github.com> --- installer/assets/THIRD-PARTY-LICENSES | 4 + requirements/base.txt | 1 + requirements/reproducible-linux.txt | 65 +++-- samcli/commands/init/command.py | 10 + samcli/commands/init/init_generator.py | 2 + samcli/commands/init/interactive_init_flow.py | 56 ++++- samcli/lib/init/__init__.py | 18 ++ .../application_insights_template_modifier.py | 94 +++++++ .../cli_template_modifier.py | 31 +-- .../xray_tracing_template_modifier.py | 28 ++- samcli/lib/telemetry/event.py | 13 +- samcli/lib/utils/resources.py | 2 + .../init/test_interactive_init_flow.py | 1 + .../integration/buildcmd/build_integ_base.py | 1 - tests/integration/delete/delete_integ_base.py | 1 - .../schemas/test_init_with_schemas_command.py | 8 + tests/integration/init/test_init_command.py | 61 +++++ .../local/generate_event/test_cli_integ.py | 1 - tests/unit/commands/init/test_cli.py | 215 +++++++++++++++- .../unit/commands/samconfig/test_samconfig.py | 1 + ..._application_insights_template_modifier.py | 205 +++++++++++++++ .../lib/init/test_cli_template_modifier.py | 235 ++++++++++++++++-- 22 files changed, 985 insertions(+), 68 deletions(-) create mode 100644 samcli/lib/init/template_modifiers/application_insights_template_modifier.py create mode 100644 tests/unit/lib/init/test_application_insights_template_modifier.py diff --git a/installer/assets/THIRD-PARTY-LICENSES b/installer/assets/THIRD-PARTY-LICENSES index 5ad87f7289..295a0992c2 100644 --- a/installer/assets/THIRD-PARTY-LICENSES +++ b/installer/assets/THIRD-PARTY-LICENSES @@ -1984,6 +1984,10 @@ client/archive/v0.57.0.tar.gz ** attrs; version 20.3.0 -- https://pypi.org/project/attrs/ Copyright (c) 2015 Hynek Schlawack +** ruamel-yaml; version 0.17.21 -- https://pypi.org/project/ruamel.yaml/ +Copyright (c) 2014-2022 Anthon van der Neut, Ruamel bvba +** ruamel-yaml-clib; version 0.2.7 -- https://pypi.org/project/ruamel.yaml.clib/ +Copyright (c) 2019-2021 Anthon van der Neut, Ruamel bvba ** cfn-lint; version 0.72.2 -- https://pypi.org/project/cfn-lint/ ** jschema-to-python; version 1.2.3 -- https://pypi.org/project/jschema-to-python/ ** junit-xml; version 1.9 -- https://pypi.org/project/junit-xml/ diff --git a/requirements/base.txt b/requirements/base.txt index 086dfb2598..fa53290cde 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,6 +6,7 @@ Werkzeug<2.1 #Need to add Schemas latest SDK. boto3>=1.19.5,==1.* jmespath~=0.10.0 +ruamel_yaml==0.17.21 PyYAML>=5.4.1,==5.* cookiecutter~=2.1.1 aws-sam-translator==1.55.0 diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index 7c01d2a254..cad56a3f76 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -200,9 +200,9 @@ idna==2.10 \ --hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6 \ --hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 # via requests -importlib-metadata==4.11.3 \ - --hash=sha256:1208431ca90a8cca1a6b8af391bb53c1a2db74e5d1cef6ddced95d4b2062edc6 \ - --hash=sha256:ea4c597ebf37142f827b8f39299579e31685c31d3a438b59f469406afd0f2539 +importlib-metadata==5.0.0 \ + --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ + --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 # via # click # jsonpickle @@ -418,6 +418,45 @@ requests==2.25.1 \ # aws-sam-cli (setup.py) # cookiecutter # docker +ruamel-yaml==0.17.21 \ + --hash=sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7 \ + --hash=sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af + # via aws-sam-cli (setup.py) +ruamel-yaml-clib==0.2.7 \ + --hash=sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e \ + --hash=sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3 \ + --hash=sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5 \ + --hash=sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497 \ + --hash=sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f \ + --hash=sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac \ + --hash=sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697 \ + --hash=sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763 \ + --hash=sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282 \ + --hash=sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1 \ + --hash=sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072 \ + --hash=sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9 \ + --hash=sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5 \ + --hash=sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231 \ + --hash=sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93 \ + --hash=sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b \ + --hash=sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb \ + --hash=sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f \ + --hash=sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307 \ + --hash=sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8 \ + --hash=sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b \ + --hash=sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b \ + --hash=sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640 \ + --hash=sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7 \ + --hash=sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a \ + --hash=sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71 \ + --hash=sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8 \ + --hash=sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7 \ + --hash=sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80 \ + --hash=sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e \ + --hash=sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab \ + --hash=sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0 \ + --hash=sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646 + # via ruamel-yaml s3transfer==0.5.0 \ --hash=sha256:50ed823e1dc5868ad40c8dc92072f757aa0e653a192845c94a3b676f4a62da4c \ --hash=sha256:9c1dc369814391a6bda20ebbf4b70a0f34630592c9aa520856bf384916af2803 @@ -430,6 +469,12 @@ serverlessrepo==0.1.10 \ --hash=sha256:671f48038123f121437b717ed51f253a55775590f00fbab6fbc6a01f8d05c017 \ --hash=sha256:b99c69be8ce87ccc48103fbe371ba7b148c3374c57862e59118c402522e5ed52 # via aws-sam-cli (setup.py) +setuptools==54.2.0 \ + --hash=sha256:aa9c24fb83a9116b8d425e53bec24c7bfdbffc313c2159f9ed036d4a6dd32d7d \ + --hash=sha256:b726461910b9ba30f077880c228bea22121aec50b172edf39eb7ff026c054a11 + # via + # aws-lambda-builders + # jsonschema six==1.15.0 \ --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \ --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced @@ -501,15 +546,7 @@ wheel==0.36.2 \ --hash=sha256:78b5b185f0e5763c26ca1e324373aadd49182ca90e825f7853f4b2509215dc0e \ --hash=sha256:e11eefd162658ea59a60a0f6c7d493a7190ea4b9a85e335b33489d9f17e0245e # via aws-lambda-builders -zipp==3.7.0 \ - --hash=sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d \ - --hash=sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375 +zipp==3.10.0 \ + --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ + --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -setuptools==54.2.0 \ - --hash=sha256:aa9c24fb83a9116b8d425e53bec24c7bfdbffc313c2159f9ed036d4a6dd32d7d \ - --hash=sha256:b726461910b9ba30f077880c228bea22121aec50b172edf39eb7ff026c054a11 - # via - # aws-lambda-builders - # jsonschema diff --git a/samcli/commands/init/command.py b/samcli/commands/init/command.py index 5ef2cf6e5b..acd02d410c 100644 --- a/samcli/commands/init/command.py +++ b/samcli/commands/init/command.py @@ -240,6 +240,11 @@ def wrapped(*args, **kwargs): default=None, help="Enable AWS X-Ray tracing for your lambda functions", ) +@click.option( + "--application-insights/--no-application-insights", + default=None, + help="Enable CloudWatch Application Insights monitoring for your application", +) @common_options @non_interactive_validation @pass_context @@ -261,6 +266,7 @@ def cli( no_input, extra_context, tracing, + application_insights, config_file, config_env, ): @@ -283,6 +289,7 @@ def cli( no_input, extra_context, tracing, + application_insights, ) # pragma: no cover @@ -303,6 +310,7 @@ def do_cli( no_input, extra_context, tracing, + application_insights, ): """ Implementation of the ``cli`` method @@ -352,6 +360,7 @@ def do_cli( no_input, extra_context, tracing, + application_insights, ) else: if not (pt_explicit or runtime or dependency_manager or base_image or architecture): @@ -371,6 +380,7 @@ def do_cli( app_template, no_input, tracing, + application_insights, ) diff --git a/samcli/commands/init/init_generator.py b/samcli/commands/init/init_generator.py index 1583eb403c..8927e3fd05 100644 --- a/samcli/commands/init/init_generator.py +++ b/samcli/commands/init/init_generator.py @@ -17,6 +17,7 @@ def do_generate( no_input, extra_context, tracing, + application_insights, ): try: generate_project( @@ -29,6 +30,7 @@ def do_generate( no_input, extra_context, tracing, + application_insights, ) except InitErrorException as e: raise UserException(str(e), wrapped_from=e.__class__.__name__) from e diff --git a/samcli/commands/init/interactive_init_flow.py b/samcli/commands/init/interactive_init_flow.py index 36ffcefe92..037a4938b0 100644 --- a/samcli/commands/init/interactive_init_flow.py +++ b/samcli/commands/init/interactive_init_flow.py @@ -50,6 +50,7 @@ def do_interactive( app_template, no_input, tracing, + application_insights, ): """ Implementation of the ``cli`` method when --interactive is provided. @@ -76,6 +77,7 @@ def do_interactive( no_input, location_opt_choice, tracing, + application_insights, ) @@ -93,6 +95,7 @@ def generate_application( no_input, location_opt_choice, tracing, + application_insights, ): # pylint: disable=too-many-arguments """ The method holds the decision logic for generating an application @@ -125,6 +128,8 @@ def generate_application( User input for selecting how to get customer a vended serverless application tracing : bool boolen value to determine if X-Ray tracing show be activated or not + application_insights : bool + boolean value to determine if AppInsights monitoring should be enabled or not """ if location_opt_choice == "1": _generate_from_use_case( @@ -139,16 +144,27 @@ def generate_application( app_template, architecture, tracing, + application_insights, ) else: _generate_from_location( - location, package_type, runtime, dependency_manager, output_dir, name, no_input, tracing + location, + package_type, + runtime, + dependency_manager, + output_dir, + name, + no_input, + tracing, + application_insights, ) # pylint: disable=too-many-statements -def _generate_from_location(location, package_type, runtime, dependency_manager, output_dir, name, no_input, tracing): +def _generate_from_location( + location, package_type, runtime, dependency_manager, output_dir, name, no_input, tracing, application_insights +): location = click.prompt("\nTemplate location (git, mercurial, http(s), zip, path)", type=str) summary_msg = """ ----------------------- @@ -160,7 +176,18 @@ def _generate_from_location(location, package_type, runtime, dependency_manager, location=location, output_dir=output_dir ) click.echo(summary_msg) - do_generate(location, package_type, runtime, dependency_manager, output_dir, name, no_input, None, tracing) + do_generate( + location, + package_type, + runtime, + dependency_manager, + output_dir, + name, + no_input, + None, + tracing, + application_insights, + ) # pylint: disable=too-many-statements @@ -176,6 +203,7 @@ def _generate_from_use_case( app_template: Optional[str], architecture: Optional[str], tracing: Optional[bool], + application_insights: Optional[bool], ) -> None: templates = InitTemplates() runtime_or_base_image = runtime if runtime else base_image @@ -203,6 +231,9 @@ def _generate_from_use_case( if tracing is None: tracing = prompt_user_to_enable_tracing() + if application_insights is None: + application_insights = prompt_user_to_enable_application_insights() + app_template = template_chosen["appTemplate"] base_image = ( LAMBDA_IMAGES_RUNTIMES_MAP.get(str(runtime)) if not base_image and package_type == IMAGE else base_image @@ -258,6 +289,7 @@ def _generate_from_use_case( no_input, extra_context, tracing, + application_insights, ) # executing event_bridge logic if call is for Schema dynamic template if is_dynamic_schemas_template: @@ -372,6 +404,24 @@ def prompt_user_to_enable_tracing(): return False +def prompt_user_to_enable_application_insights(): + """ + Prompt user to choose if AppInsights monitoring should be enabled for their application and vice versa + """ + doc_link = "https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch-application-insights.html" + if click.confirm( + f"\nWould you like to enable monitoring using CloudWatch Application Insights?" + f"\nFor more info, please view {doc_link}" + ): + pricing_link = ( + "https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring" + "/appinsights-what-is.html#appinsights-pricing" + ) + click.echo(f"AppInsights monitoring may incur additional cost. View {pricing_link} for more details") + return True + return False + + def _get_choice_from_options(chosen, options, question, msg): if chosen: diff --git a/samcli/lib/init/__init__.py b/samcli/lib/init/__init__.py index 1c815ea55e..563690ead6 100644 --- a/samcli/lib/init/__init__.py +++ b/samcli/lib/init/__init__.py @@ -11,7 +11,12 @@ from cookiecutter.main import cookiecutter from samcli.local.common.runtime_template import RUNTIME_DEP_TEMPLATE_MAPPING, is_custom_runtime +from samcli.lib.telemetry.event import EventTracker from samcli.lib.init.template_modifiers.xray_tracing_template_modifier import XRayTracingTemplateModifier +from samcli.lib.init.template_modifiers.application_insights_template_modifier import ( + ApplicationInsightsTemplateModifier, +) +from samcli.lib.telemetry.event import EventName, UsedFeature from samcli.lib.utils.packagetype import ZIP from samcli.lib.utils import osutils from .exceptions import GenerateProjectFailedError, InvalidLocationError @@ -30,6 +35,7 @@ def generate_project( no_input=False, extra_context=None, tracing=False, + application_insights=False, ): """Generates project using cookiecutter and options given @@ -60,6 +66,8 @@ def generate_project( An optional dictionary, the extra cookiecutter context tracing: Optional[str] Enable or disable X-Ray Tracing + application_insights: Optional[str] + Enable or disable AppInsights Monitoring Raises ------ @@ -121,9 +129,19 @@ def generate_project( _apply_tracing(tracing, output_dir, name) + _enable_application_insights(application_insights, output_dir, name) + def _apply_tracing(tracing: bool, output_dir: str, name: str) -> None: if tracing: template_file_path = f"{output_dir}/{name}/template.yaml" template_modifier = XRayTracingTemplateModifier(template_file_path) template_modifier.modify_template() + + +def _enable_application_insights(application_insights: bool, output_dir: str, name: str) -> None: + if application_insights: + template_file_path = f"{output_dir}/{name}/template.yaml" + template_modifier = ApplicationInsightsTemplateModifier(template_file_path) + template_modifier.modify_template() + EventTracker.track_event(EventName.USED_FEATURE.value, UsedFeature.INIT_WITH_APPLICATION_INSIGHTS.value) diff --git a/samcli/lib/init/template_modifiers/application_insights_template_modifier.py b/samcli/lib/init/template_modifiers/application_insights_template_modifier.py new file mode 100644 index 0000000000..82d6d2c73c --- /dev/null +++ b/samcli/lib/init/template_modifiers/application_insights_template_modifier.py @@ -0,0 +1,94 @@ +""" +Class used to parse and update template when application-insights is enabled +""" +import logging +from typing import Any +from samcli.lib.init.template_modifiers.cli_template_modifier import TemplateModifier +from samcli.lib.utils.resources import AWS_APPLICATION_INSIGHTS, AWS_RESOURCE_GROUP + +LOG = logging.getLogger(__name__) + + +class ApplicationInsightsTemplateModifier(TemplateModifier): + import ruamel.yaml + from ruamel.yaml import YAML + from ruamel.yaml.comments import CommentedMap + + TYPE_KEY = "Type" + RESOURCES_KEY = "Resources" + PROPERTIES_KEY = "Properties" + NAME_KEY = "Name" + RESOURCE_QUERY_KEY = "ResourceQuery" + RESOURCE_GROUP_NAME_KEY = "ResourceGroupName" + AUTO_CONFIG_ENABLED_KEY = "AutoConfigurationEnabled" + DEPENDS_ON_KEY = "DependsOn" + CFN_STACK_TYPE = "CLOUDFORMATION_STACK_1_0" + RESOURCE_GROUP_REF = "ApplicationResourceGroup" + APPLICATION_INSIGHTS_REF = "ApplicationInsightsMonitoring" + AUTO_CONFIG_VALUE = "true" + RESOURCE_GROUP_NAME = {"Fn::Join": ["", ["ApplicationInsights-SAM-", {"Ref": "AWS::StackName"}]]} + + # set ignore aliases to true. This configuration avoids usage yaml aliases which is not parsed by CloudFormation. + class NonAliasingRTRepresenter(ruamel.yaml.representer.RoundTripRepresenter): + def ignore_aliases(self, data): + return True + + def __init__(self, location): + self.yaml = ApplicationInsightsTemplateModifier.YAML() + self.yaml.Representer = ApplicationInsightsTemplateModifier.NonAliasingRTRepresenter + super().__init__(location) + + def _get_template(self) -> Any: + with open(self.template_location) as file: + return self.yaml.load(file) + + def _update_template_fields(self): + """ + Add new resources to SAM template + """ + self._add_app_insights_monitoring_section() + + def _add_app_insights_monitoring_section(self): + resourceGroup = { + self.TYPE_KEY: AWS_RESOURCE_GROUP, + self.PROPERTIES_KEY: { + self.NAME_KEY: self.RESOURCE_GROUP_NAME, + self.RESOURCE_QUERY_KEY: {self.TYPE_KEY: self.CFN_STACK_TYPE}, + }, + } + + appInsightsApplication = { + self.TYPE_KEY: AWS_APPLICATION_INSIGHTS, + self.PROPERTIES_KEY: { + self.RESOURCE_GROUP_NAME_KEY: self.RESOURCE_GROUP_NAME, + self.AUTO_CONFIG_ENABLED_KEY: self.AUTO_CONFIG_VALUE, + }, + self.DEPENDS_ON_KEY: self.RESOURCE_GROUP_REF, + } + + self.template[self.RESOURCES_KEY][self.RESOURCE_GROUP_REF] = ApplicationInsightsTemplateModifier.CommentedMap( + resourceGroup + ) + self.template[self.RESOURCES_KEY][ + self.APPLICATION_INSIGHTS_REF + ] = ApplicationInsightsTemplateModifier.CommentedMap(appInsightsApplication) + + def _print_sanity_check_error(self): + link = "https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch-application-insights.html" + message = ( + f"Warning: Unable to add Application Insights monitoring to the application." + f"\nTo learn more about Application Insights, visit {link}" + ) + LOG.warning(message) + + def _write(self, template: list): + """ + write generated template into SAM template + + Parameters + ---------- + template : list + array with updated template data + """ + with open(self.template_location, "w") as file: + self.yaml.dump(self.template, file) diff --git a/samcli/lib/init/template_modifiers/cli_template_modifier.py b/samcli/lib/init/template_modifiers/cli_template_modifier.py index 26c020ed42..09a63366b5 100644 --- a/samcli/lib/init/template_modifiers/cli_template_modifier.py +++ b/samcli/lib/init/template_modifiers/cli_template_modifier.py @@ -24,13 +24,13 @@ def modify_template(self): and then run a sanity check on the template to know if the template matches the CFN yaml """ - self._add_new_field_to_template() + self._update_template_fields() self._write(self.template) if not self._sanity_check(): self._write(self.copy_of_original_template) @abstractmethod - def _add_new_field_to_template(self): + def _update_template_fields(self): pass def _section_position(self, section: str, position: int = 0) -> int: @@ -125,27 +125,10 @@ def _sanity_check(self) -> bool: def _print_sanity_check_error(self): pass + @abstractmethod def _write(self, template: list): - """ - write generated template into SAM template - - Parameters - ---------- - template : list - array with updated template data - """ - with open(self.template_location, "w") as file: - for line in template: - file.write(line) - - def _get_template(self) -> List[str]: - """ - Gets data the SAM templates and returns it in a array + pass - Returns - ------- - list - array with updated template data - """ - with open(self.template_location, "r") as file: - return file.readlines() + @abstractmethod + def _get_template(self): + pass diff --git a/samcli/lib/init/template_modifiers/xray_tracing_template_modifier.py b/samcli/lib/init/template_modifiers/xray_tracing_template_modifier.py index 247f2449f5..bfa3bf54b2 100644 --- a/samcli/lib/init/template_modifiers/xray_tracing_template_modifier.py +++ b/samcli/lib/init/template_modifiers/xray_tracing_template_modifier.py @@ -2,6 +2,7 @@ Class used to parse and update template when tracing is enabled """ import logging +from typing import List from samcli.lib.init.template_modifiers.cli_template_modifier import TemplateModifier LOG = logging.getLogger(__name__) @@ -22,7 +23,19 @@ class XRayTracingTemplateModifier(TemplateModifier): "https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst\n" ) - def _add_new_field_to_template(self): + def _get_template(self) -> List[str]: + """ + Gets data the SAM templates and returns it in a array + + Returns + ------- + list + array with updated template data + """ + with open(self.template_location, "r") as file: + return file.readlines() + + def _update_template_fields(self): """ Add new field to SAM template """ @@ -91,3 +104,16 @@ def _print_sanity_check_error(self): ) message = f"Warning: Unable to add Tracing to the project. To learn more about Tracing visit {link}" LOG.warning(message) + + def _write(self, template: list): + """ + write generated template into SAM template + + Parameters + ---------- + template : list + array with updated template data + """ + with open(self.template_location, "w") as file: + for line in template: + file.write(line) diff --git a/samcli/lib/telemetry/event.py b/samcli/lib/telemetry/event.py index f04a0aaf26..41b96073a2 100644 --- a/samcli/lib/telemetry/event.py +++ b/samcli/lib/telemetry/event.py @@ -28,6 +28,14 @@ class EventName(Enum): BUILD_WORKFLOW_USED = "BuildWorkflowUsed" +class UsedFeature(Enum): + """Enum for the names of event values of UsedFeature""" + + ACCELERATE = "Accelerate" + CDK = "CDK" + INIT_WITH_APPLICATION_INSIGHTS = "InitWithApplicationInsights" + + class EventType: """Class for Events and the types of values they may have.""" @@ -48,10 +56,7 @@ class EventType: _WORKFLOWS = [f"{config.language}-{config.dependency_manager}" for config in ALL_CONFIGS] _event_values = { # Contains allowable values for Events - EventName.USED_FEATURE: [ - "Accelerate", - "CDK", - ], + EventName.USED_FEATURE: [event.value for event in UsedFeature], EventName.BUILD_FUNCTION_RUNTIME: INIT_RUNTIMES, EventName.SYNC_USED: [ "Start", diff --git a/samcli/lib/utils/resources.py b/samcli/lib/utils/resources.py index 11aee8da91..c091d04fba 100644 --- a/samcli/lib/utils/resources.py +++ b/samcli/lib/utils/resources.py @@ -49,6 +49,8 @@ AWS_SERVERLESS_STATEMACHINE = "AWS::Serverless::StateMachine" AWS_STEPFUNCTIONS_STATEMACHINE = "AWS::StepFunctions::StateMachine" AWS_ECR_REPOSITORY = "AWS::ECR::Repository" +AWS_APPLICATION_INSIGHTS = "AWS::ApplicationInsights::Application" +AWS_RESOURCE_GROUP = "AWS::ResourceGroups::Group" METADATA_WITH_LOCAL_PATHS = {AWS_SERVERLESSREPO_APPLICATION: ["LicenseUrl", "ReadmeUrl"]} diff --git a/tests/functional/commands/init/test_interactive_init_flow.py b/tests/functional/commands/init/test_interactive_init_flow.py index 831dd9ee69..9a9a448a66 100644 --- a/tests/functional/commands/init/test_interactive_init_flow.py +++ b/tests/functional/commands/init/test_interactive_init_flow.py @@ -59,6 +59,7 @@ def test_unknown_runtime(self, git_repo_mock, requests_mock): app_template=None, no_input=False, tracing=False, + application_insights=False, ) output_files = list(self.output_dir.rglob("*")) self.assertEqual(len(output_files), 8) diff --git a/tests/integration/buildcmd/build_integ_base.py b/tests/integration/buildcmd/build_integ_base.py index fe416949d2..ae770cbf59 100644 --- a/tests/integration/buildcmd/build_integ_base.py +++ b/tests/integration/buildcmd/build_integ_base.py @@ -58,7 +58,6 @@ def base_command(cls): command = "sam" if os.getenv("SAM_CLI_DEV"): command = "samdev" - return command def get_command_list( diff --git a/tests/integration/delete/delete_integ_base.py b/tests/integration/delete/delete_integ_base.py index ed6677699a..d3f22decac 100644 --- a/tests/integration/delete/delete_integ_base.py +++ b/tests/integration/delete/delete_integ_base.py @@ -19,7 +19,6 @@ def base_command(self): command = "sam" if os.getenv("SAM_CLI_DEV"): command = "samdev" - return command def get_delete_command_list( diff --git a/tests/integration/init/schemas/test_init_with_schemas_command.py b/tests/integration/init/schemas/test_init_with_schemas_command.py index e7b5feef60..73fdc36e39 100644 --- a/tests/integration/init/schemas/test_init_with_schemas_command.py +++ b/tests/integration/init/schemas/test_init_with_schemas_command.py @@ -36,6 +36,7 @@ def test_init_interactive_with_event_bridge_app_aws_registry(self): 2 2 N +N eb-app-maven Y 1 @@ -75,6 +76,7 @@ def test_init_interactive_with_event_bridge_app_partner_registry(self): 2 2 N +N eb-app-maven Y 3 @@ -126,6 +128,7 @@ def test_init_interactive_with_event_bridge_app_pagination(self): 2 2 N +N eb-app-maven Y 4 @@ -166,6 +169,7 @@ def test_init_interactive_with_event_bridge_app_customer_registry(self): 2 2 N +N eb-app-maven Y 2 @@ -213,6 +217,7 @@ def test_init_interactive_with_event_bridge_app_aws_schemas_python(self): 6 2 N +N eb-app-python38 Y 1 @@ -247,6 +252,7 @@ def test_init_interactive_with_event_bridge_app_aws_schemas_go(self): 1 2 N +N eb-app-go Y 4 @@ -283,6 +289,7 @@ def test_init_interactive_with_event_bridge_app_non_default_profile_selection(se 6 2 N +N eb-app-python38 3 N @@ -322,6 +329,7 @@ def test_init_interactive_with_event_bridge_app_non_supported_schemas_region(sel 6 2 N +N eb-app-python38 Y 1 diff --git a/tests/integration/init/test_init_command.py b/tests/integration/init/test_init_command.py index 99f0ee20c4..4653a4615c 100644 --- a/tests/integration/init/test_init_command.py +++ b/tests/integration/init/test_init_command.py @@ -371,6 +371,64 @@ def test_init_command_passes_with_disabled_tracing(self): self.assertEqual(process.returncode, 0) self.assertTrue(Path(temp, "sam-app").is_dir()) + def test_init_command_passes_with_enabled_application_insights(self): + with tempfile.TemporaryDirectory() as temp: + process = Popen( + [ + get_sam_command(), + "init", + "--runtime", + "nodejs14.x", + "--dependency-manager", + "npm", + "--app-template", + "hello-world", + "--name", + "sam-app", + "--no-interactive", + "-o", + temp, + "--application-insights", + ] + ) + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + self.assertEqual(process.returncode, 0) + self.assertTrue(Path(temp, "sam-app").is_dir()) + + def test_init_command_passes_with_disabled_application_insights(self): + with tempfile.TemporaryDirectory() as temp: + process = Popen( + [ + get_sam_command(), + "init", + "--runtime", + "nodejs14.x", + "--dependency-manager", + "npm", + "--app-template", + "hello-world", + "--name", + "sam-app", + "--no-interactive", + "-o", + temp, + "--no-application-insights", + ] + ) + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + self.assertEqual(process.returncode, 0) + self.assertTrue(Path(temp, "sam-app").is_dir()) + MISSING_REQUIRED_PARAM_MESSAGE = """Error: Missing required parameters, with --no-interactive set. Must provide one of the following required parameter combinations: @@ -726,6 +784,7 @@ def test_interactive_init(self): # 1: Zip # 1: Hello World Example # N: Would you like to enable X-Ray tracing on the function(s) in your application? [y/N] + # Y: Would you like to enable monitoring using Cloudwatch Application Insights? [y/N] user_input = """ 1 1 @@ -734,6 +793,7 @@ def test_interactive_init(self): 1 1 N +Y sam-interactive-init-app """ with tempfile.TemporaryDirectory() as temp: @@ -753,6 +813,7 @@ def test_interactive_init_default_runtime(self): 1 Y N +N sam-interactive-init-app-default-runtime """ with tempfile.TemporaryDirectory() as temp: diff --git a/tests/integration/local/generate_event/test_cli_integ.py b/tests/integration/local/generate_event/test_cli_integ.py index f5b463ad57..99a8cae4df 100644 --- a/tests/integration/local/generate_event/test_cli_integ.py +++ b/tests/integration/local/generate_event/test_cli_integ.py @@ -14,5 +14,4 @@ def _get_command(): command = "sam" if os.getenv("SAM_CLI_DEV"): command = "samdev" - return command diff --git a/tests/unit/commands/init/test_cli.py b/tests/unit/commands/init/test_cli.py index 90eb22dc0d..5a2f873cec 100644 --- a/tests/unit/commands/init/test_cli.py +++ b/tests/unit/commands/init/test_cli.py @@ -117,6 +117,7 @@ def test_init_cli(self, generate_project_patch, git_repo_clone_mock): no_input=self.no_input, extra_context=None, tracing=False, + application_insights=False, ) # THEN we should receive no errors @@ -132,6 +133,7 @@ def test_init_cli(self, generate_project_patch, git_repo_clone_mock): True, self.extra_context_as_json, False, + False, ) @patch("samcli.lib.utils.git_repo.GitRepo.clone") @@ -155,6 +157,7 @@ def test_init_cli_node(self, generate_project_patch, git_repo_clone_mock): no_input=self.no_input, extra_context=None, tracing=False, + application_insights=False, ) # THEN we should receive no errors @@ -169,6 +172,7 @@ def test_init_cli_node(self, generate_project_patch, git_repo_clone_mock): True, {"runtime": "nodejs18.x", "project_name": "testing project", "architectures": {"value": ["x86_64"]}}, False, + False, ) @patch("samcli.lib.utils.git_repo.GitRepo.clone") @@ -192,6 +196,7 @@ def test_init_image_cli(self, generate_project_patch, git_repo_clone_mock): no_input=self.no_input, extra_context=None, tracing=False, + application_insights=False, ) # THEN we should receive no errors @@ -206,6 +211,7 @@ def test_init_image_cli(self, generate_project_patch, git_repo_clone_mock): True, {"runtime": "nodejs12.x", "project_name": "testing project", "architectures": {"value": [ARM64]}}, False, + False, ) @patch("samcli.lib.utils.git_repo.GitRepo.clone") @@ -229,6 +235,47 @@ def test_init_cli_with_tracing(self, generate_project_patch, git_repo_clone_mock no_input=self.no_input, extra_context=None, tracing=True, + application_insights=False, + ) + + # THEN we should receive no errors + self.extra_context_as_json["architectures"] = {"value": [X86_64]} + generate_project_patch.assert_called_once_with( + # need to change the location validation check + ANY, + ZIP, + self.runtime, + self.dependency_manager, + self.output_dir, + self.name, + True, + self.extra_context_as_json, + True, + False, + ) + + @patch("samcli.lib.utils.git_repo.GitRepo.clone") + @patch("samcli.commands.init.init_generator.generate_project") + def test_init_cli_with_application_insights(self, generate_project_patch, git_repo_clone_mock): + # GIVEN generate_project successfully created a project + # WHEN a project name has been passed + init_cli( + ctx=self.ctx, + no_interactive=self.no_interactive, + location=self.location, + pt_explicit=self.pt_explicit, + package_type=self.package_type, + runtime=self.runtime, + architecture=X86_64, + base_image=self.base_image, + dependency_manager=self.dependency_manager, + output_dir=None, + name=self.name, + app_template=self.app_template, + no_input=self.no_input, + extra_context=None, + tracing=False, + application_insights=True, ) # THEN we should receive no errors @@ -243,6 +290,7 @@ def test_init_cli_with_tracing(self, generate_project_patch, git_repo_clone_mock self.name, True, self.extra_context_as_json, + False, True, ) @@ -267,6 +315,7 @@ def test_init_image_java_cli(self, generate_project_patch, git_repo_clone_mock): no_input=self.no_input, extra_context=None, tracing=False, + application_insights=False, ) # THEN we should receive no errors @@ -281,6 +330,7 @@ def test_init_image_java_cli(self, generate_project_patch, git_repo_clone_mock): True, {"runtime": "java11", "project_name": "testing project", "architectures": {"value": [X86_64]}}, False, + False, ) @patch("samcli.lib.utils.git_repo.GitRepo.clone") @@ -304,6 +354,7 @@ def test_init_fails_invalid_template(self, git_repo_clone_mock): no_input=self.no_input, extra_context=None, tracing=False, + application_insights=False, ) @patch("samcli.lib.utils.git_repo.GitRepo.clone") @@ -327,6 +378,7 @@ def test_init_fails_invalid_dep_mgr(self, git_repo_clone_mock): no_input=self.no_input, extra_context=None, tracing=False, + application_insights=False, ) @patch("samcli.lib.utils.git_repo.GitRepo.clone") @@ -356,6 +408,7 @@ def test_init_cli_generate_project_fails(self, generate_project_patch, git_repo_ no_input=self.no_input, extra_context=None, tracing=False, + application_insights=False, ) generate_project_patch.assert_called_with( @@ -366,6 +419,7 @@ def test_init_cli_generate_project_fails(self, generate_project_patch, git_repo_ self.name, self.no_input, False, + False, ) @patch("samcli.lib.utils.git_repo.GitRepo.clone") @@ -395,6 +449,7 @@ def test_init_cli_generate_project_image_fails(self, generate_project_patch, git no_input=self.no_input, extra_context=None, tracing=False, + application_insights=False, ) generate_project_patch.assert_called_with( @@ -405,6 +460,7 @@ def test_init_cli_generate_project_image_fails(self, generate_project_patch, git self.name, self.no_input, False, + False, ) @patch("samcli.lib.utils.git_repo.GitRepo.clone") @@ -428,12 +484,22 @@ def test_init_cli_with_extra_context_parameter_not_passed(self, generate_project no_input=self.no_input, extra_context=None, tracing=False, + application_insights=False, ) # THEN we should receive no errors self.extra_context_as_json["architectures"] = {"value": [ARM64]} generate_project_patch.assert_called_once_with( - ANY, ZIP, self.runtime, self.dependency_manager, ".", self.name, True, self.extra_context_as_json, False + ANY, + ZIP, + self.runtime, + self.dependency_manager, + ".", + self.name, + True, + self.extra_context_as_json, + False, + False, ) @patch("samcli.lib.utils.git_repo.GitRepo.clone") @@ -457,6 +523,7 @@ def test_init_cli_with_extra_context_parameter_passed(self, generate_project_pat no_input=self.no_input, extra_context='{"schema_name":"events", "schema_type":"aws"}', tracing=False, + application_insights=False, ) # THEN we should receive no errors and right extra_context should be passed @@ -476,6 +543,7 @@ def test_init_cli_with_extra_context_parameter_passed(self, generate_project_pat "architectures": {"value": [X86_64]}, }, False, + False, ) @patch("samcli.lib.utils.git_repo.GitRepo.clone") @@ -501,6 +569,7 @@ def test_init_cli_with_extra_context_not_overriding_default_parameter( no_input=self.no_input, extra_context='{"project_name": "my_project", "runtime": "java8", "schema_name":"events", "schema_type": "aws"}', tracing=False, + application_insights=False, ) # THEN extra_context should have not overridden default_parameters(name, runtime) @@ -520,6 +589,7 @@ def test_init_cli_with_extra_context_not_overriding_default_parameter( "architectures": {"value": [ARM64]}, }, False, + False, ) @patch("samcli.lib.utils.git_repo.GitRepo.clone") @@ -543,6 +613,7 @@ def test_init_cli_with_extra_context_input_as_wrong_json_raises_exception(self, no_input=self.no_input, extra_context='{"project_name", "my_project", "runtime": "java8", "schema_name":"events", "schema_type": "aws"}', tracing=False, + application_insights=False, ) @patch("samcli.commands.init.init_generator.generate_project") @@ -565,6 +636,7 @@ def test_init_cli_must_set_default_context_when_location_is_provided(self, gener no_input=None, extra_context='{"schema_name":"events", "schema_type": "aws"}', tracing=False, + application_insights=False, ) # THEN should set default parameter(name, runtime) as extra_context @@ -584,6 +656,7 @@ def test_init_cli_must_set_default_context_when_location_is_provided(self, gener "architectures": {"value": [X86_64]}, }, False, + False, ) @patch("samcli.commands.init.init_generator.generate_project") @@ -606,6 +679,7 @@ def test_init_cli_must_only_set_passed_project_name_when_location_is_provided(se no_input=None, extra_context='{"schema_name":"events", "schema_type": "aws"}', tracing=False, + application_insights=False, ) # THEN extra_context should be without runtime @@ -624,6 +698,7 @@ def test_init_cli_must_only_set_passed_project_name_when_location_is_provided(se "architectures": {"value": [ARM64]}, }, False, + False, ) @patch("samcli.commands.init.init_generator.generate_project") @@ -646,6 +721,7 @@ def test_init_cli_must_only_set_passed_runtime_when_location_is_provided(self, g no_input=None, extra_context='{"schema_name":"events", "schema_type": "aws"}', tracing=False, + application_insights=False, ) # THEN extra_context should be without name @@ -664,6 +740,7 @@ def test_init_cli_must_only_set_passed_runtime_when_location_is_provided(self, g "architectures": {"value": [ARM64]}, }, False, + False, ) @patch("samcli.lib.utils.git_repo.GitRepo.clone") @@ -688,6 +765,7 @@ def test_init_cli_with_extra_context_parameter_passed_as_escaped(self, generate_ # fmt: off extra_context='{\"schema_name\":\"events\", \"schema_type\":\"aws\"}', tracing=False, + application_insights= False, # fmt: on ) @@ -708,6 +786,7 @@ def test_init_cli_with_extra_context_parameter_passed_as_escaped(self, generate_ "architectures": {"value": [X86_64]}, }, False, + False, ) @patch.object(InitTemplates, "__init__", MockInitTemplates.__init__) @@ -820,6 +899,7 @@ def test_init_cli_int_with_event_bridge_app_template( 1 2 N +N test-project Y 1 @@ -849,6 +929,7 @@ def test_init_cli_int_with_event_bridge_app_template( "architectures": {"value": [X86_64]}, }, False, + False, ) get_schemas_client_mock.assert_called_once_with(None, "ap-northeast-1") do_extract_and_merge_schemas_code_mock.do_extract_and_merge_schemas_code_mock( @@ -902,6 +983,7 @@ def test_init_cli_int_with_image_app_template( user_input = """ 1 N +N test-project """ runner = CliRunner() @@ -917,6 +999,7 @@ def test_init_cli_int_with_image_app_template( True, {"project_name": "test-project", "runtime": "java8", "architectures": {"value": [X86_64]}}, False, + False, ) @patch.object(InitTemplates, "__init__", MockInitTemplates.__init__) @@ -1032,6 +1115,7 @@ def test_init_cli_int_with_event_bridge_app_template_and_aws_configuration( 1 2 N +N test-project N 1 @@ -1063,6 +1147,7 @@ def test_init_cli_int_with_event_bridge_app_template_and_aws_configuration( "architectures": {"value": [X86_64]}, }, False, + False, ) get_schemas_client_mock.assert_called_once_with("default", "us-east-1") do_extract_and_merge_schemas_code_mock.do_extract_and_merge_schemas_code("result.zip", ".", "test-project", ANY) @@ -1159,6 +1244,7 @@ def test_init_cli_int_with_event_bridge_app_template_and_aws_configuration_with_ 1 1 N +N test-project N 1 @@ -1285,6 +1371,7 @@ def test_init_cli_int_with_download_manager_raises_exception( 1 2 N +N test-project Y 1 @@ -1314,6 +1401,7 @@ def test_init_cli_int_with_download_manager_raises_exception( "architectures": {"value": [X86_64]}, }, False, + False, ) get_schemas_client_mock.assert_called_once_with(None, "ap-northeast-1") do_extract_and_merge_schemas_code_mock.do_extract_and_merge_schemas_code_mock( @@ -1427,6 +1515,7 @@ def test_init_cli_int_with_schemas_details_raises_exception( 1 1 N +N test-project Y 1 @@ -1461,6 +1550,7 @@ def test_init_passes_dynamic_event_bridge_template(self, generate_project_patch, extra_context=None, architecture=ARM64, tracing=False, + application_insights=False, ) self.extra_context_as_json["architectures"] = {"value": [ARM64]} @@ -1475,6 +1565,7 @@ def test_init_passes_dynamic_event_bridge_template(self, generate_project_patch, True, self.extra_context_as_json, False, + False, ) @patch("samcli.lib.utils.git_repo.GitRepo.clone") @@ -1505,6 +1596,7 @@ def test_init_cli_int_from_location(self, generate_project_patch, git_repo_clone False, None, None, + None, ) @patch("samcli.commands.init.init_templates.InitTemplates._get_manifest") @@ -1523,6 +1615,7 @@ def test_init_cli_no_package_type(self, generate_project_patch, git_repo_clone_m 1 n 1 +N """ args = [ "--no-input", @@ -1539,7 +1632,7 @@ def test_init_cli_no_package_type(self, generate_project_patch, git_repo_clone_m # THEN we should receive no errors self.assertFalse(result.exception) generate_project_patch.assert_called_once_with( - ANY, IMAGE, "python3.8", "pip", ".", "untitled6", True, ANY, False + ANY, IMAGE, "python3.8", "pip", ".", "untitled6", True, ANY, False, False ) @patch.object(InitTemplates, "__init__", MockInitTemplates.__init__) @@ -1581,6 +1674,7 @@ def test_init_cli_image_pool_with_base_image_having_multiple_managed_template_bu no_input=self.no_input, extra_context=self.extra_context, tracing=False, + application_insights=False, ) @patch.object(InitTemplates, "__init__", MockInitTemplates.__init__) @@ -1622,6 +1716,7 @@ def test_init_cli_image_pool_with_base_image_having_multiple_managed_template_an no_input=self.no_input, extra_context=self.extra_context, tracing=False, + application_insights=False, ) @patch.object(InitTemplates, "__init__", MockInitTemplates.__init__) @@ -1664,6 +1759,7 @@ def test_init_cli_image_pool_with_base_image_having_multiple_managed_template_wi no_input=None, extra_context=None, tracing=False, + application_insights=False, ) generate_project_patch.assert_called_once_with( ANY, # location @@ -1675,6 +1771,7 @@ def test_init_cli_image_pool_with_base_image_having_multiple_managed_template_wi True, # no_input ANY, False, + False, ) @patch.object(InitTemplates, "__init__", MockInitTemplates.__init__) @@ -1709,6 +1806,7 @@ def test_init_cli_image_pool_with_base_image_having_one_managed_template_does_no no_input=None, extra_context=None, tracing=False, + application_insights=False, architecture=None, ) generate_project_patch.assert_called_once_with( @@ -1721,6 +1819,7 @@ def test_init_cli_image_pool_with_base_image_having_one_managed_template_does_no True, # no_input ANY, False, + False, ) @patch.object(InitTemplates, "__init__", MockInitTemplates.__init__) @@ -1755,6 +1854,7 @@ def test_init_cli_image_pool_with_base_image_having_one_managed_template_with_pr no_input=None, extra_context=None, tracing=False, + application_insights=False, architecture=None, ) generate_project_patch.assert_called_once_with( @@ -1767,6 +1867,7 @@ def test_init_cli_image_pool_with_base_image_having_one_managed_template_with_pr True, # no_input ANY, False, + False, ) @patch.object(InitTemplates, "__init__", MockInitTemplates.__init__) @@ -1802,6 +1903,7 @@ def test_init_cli_image_pool_with_base_image_having_one_managed_template_with_pr no_input=None, extra_context=None, tracing=False, + application_insights=False, architecture=None, ) @@ -1832,7 +1934,7 @@ def test_init_cli_must_pass_with_architecture_and_base_image(self, generate_proj # THEN we should receive no errors self.assertFalse(result.exception) generate_project_patch.assert_called_once_with( - ANY, IMAGE, "java11", "gradle", ".", "untitled6", True, ANY, None + ANY, IMAGE, "java11", "gradle", ".", "untitled6", True, ANY, None, None ) PackageType.explicit = ( False # Other tests fail after we pass --packge-type in this test, so let's reset this variable @@ -1906,6 +2008,7 @@ def test_init_cli_generate_default_hello_world_app( 1 y N +N test-project """ @@ -1922,6 +2025,7 @@ def test_init_cli_generate_default_hello_world_app( True, {"project_name": "test-project", "runtime": "python3.9", "architectures": {"value": ["x86_64"]}}, False, + False, ) @patch("samcli.commands.init.init_templates.InitTemplates.get_preprocessed_manifest") @@ -1993,6 +2097,7 @@ def test_init_cli_must_not_generate_default_hello_world_app( n 1 N +N test-project """ @@ -2009,6 +2114,7 @@ def test_init_cli_must_not_generate_default_hello_world_app( True, {"project_name": "test-project", "runtime": "java11", "architectures": {"value": ["x86_64"]}}, False, + False, ) def test_must_return_runtime_from_base_image_name(self): @@ -2166,6 +2272,7 @@ def test_init_fails_unsupported_dep_mgr_for_runtime(self, git_repo_clone_mock): no_input=self.no_input, extra_context=None, tracing=False, + application_insights=False, architecture=X86_64, ) expected_error_message = ( @@ -2267,6 +2374,7 @@ def test_init_cli_int_with_multiple_app_templates( 1 1 N +N test-project """ runner = CliRunner() @@ -2282,6 +2390,7 @@ def test_init_cli_int_with_multiple_app_templates( True, {"project_name": "test-project", "runtime": "java11", "architectures": {"value": ["x86_64"]}}, False, + False, ) @patch("samcli.commands.init.init_templates.LOG") @@ -2349,6 +2458,7 @@ def test_init_cli_int_must_raise_for_unsupported_runtime( 2 1 N +N test-project """ runner = CliRunner() @@ -2403,6 +2513,7 @@ def test_init_cli_int_must_raise_for_unsupported_dependency( 2 1 N +N test-project """ runner = CliRunner() @@ -2473,6 +2584,8 @@ def test_init_cli_generate_hello_world_app_without_default_prompt( # test-project: response to name user_input = """ 1 +1 +N N test-project """ @@ -2490,6 +2603,7 @@ def test_init_cli_generate_hello_world_app_without_default_prompt( True, {"project_name": "test-project", "runtime": "java11", "architectures": {"value": ["x86_64"]}}, False, + False, ) @patch.object(InitTemplates, "__init__", MockInitTemplates.__init__) @@ -2563,6 +2677,7 @@ def test_init_cli_generate_app_template_provide_via_options( user_input = """ 1 N +N test-project """ @@ -2579,6 +2694,7 @@ def test_init_cli_generate_app_template_provide_via_options( True, {"project_name": "test-project", "runtime": "java11", "architectures": {"value": ["x86_64"]}}, False, + False, ) def does_template_meet_filter_criteria(self): @@ -2644,6 +2760,7 @@ def test_init_cli_generate_app_template_from_local_cli_templates( 3 2 N +N test-project """ @@ -2660,6 +2777,7 @@ def test_init_cli_generate_app_template_from_local_cli_templates( True, {"project_name": "test-project", "runtime": "java11", "architectures": {"value": ["x86_64"]}}, False, + False, ) @patch("samcli.local.common.runtime_template.INIT_RUNTIMES") @@ -2731,6 +2849,7 @@ def test_init_cli_generate_app_template_with_custom_runtime( N 2 N +N test-project """ @@ -2747,6 +2866,7 @@ def test_init_cli_generate_app_template_with_custom_runtime( True, {"project_name": "test-project", "runtime": "provided.al2", "architectures": {"value": ["x86_64"]}}, False, + False, ) @patch("samcli.commands.init.init_templates.InitTemplates._get_manifest") @@ -2806,6 +2926,7 @@ def test_init_cli_generate_app_template_with_custom_runtime_using_options( user_input = """ 1 N +N test-project """ args = [ @@ -2826,6 +2947,7 @@ def test_init_cli_generate_app_template_with_custom_runtime_using_options( True, {"project_name": "test-project", "runtime": "provided.al2", "architectures": {"value": ["x86_64"]}}, False, + False, ) @patch("samcli.commands.init.init_templates.InitTemplates.get_preprocessed_manifest") @@ -2893,6 +3015,7 @@ def test_init_cli_generate_app_template_provide_via_tracing_options( 1 N 1 +N test-project """ @@ -2909,4 +3032,90 @@ def test_init_cli_generate_app_template_provide_via_tracing_options( True, {"project_name": "test-project", "runtime": "java11", "architectures": {"value": ["x86_64"]}}, True, + False, + ) + + @patch("samcli.commands.init.init_templates.InitTemplates.get_preprocessed_manifest") + @patch("samcli.commands.init.init_templates.InitTemplates._init_options_from_manifest") + @patch("samcli.commands.init.init_generator.generate_project") + @patch.object(InitTemplates, "__init__", MockInitTemplates.__init__) + def test_init_cli_generate_app_template_provide_via_application_insights_options( + self, generate_project_patch, init_options_from_manifest_mock, get_preprocessed_manifest_mock + ): + init_options_from_manifest_mock.return_value = [ + { + "directory": "nodejs14.x/cookiecutter-aws-sam-hello-nodejs", + "displayName": "Hello World Example", + "dependencyManager": "npm", + "appTemplate": "hello-world", + "packageType": "Zip", + "useCaseName": "Hello World Example", + }, + { + "directory": "java11/cookiecutter-aws-sam-eventbridge-schema-app-java-maven", + "displayName": "EventBridge App from scratch (100+ Event Schemas): Maven", + "dependencyManager": "maven", + "appTemplate": "eventBridge-schema-app", + "isDynamicTemplate": "True", + "packageType": "Zip", + "useCaseName": "Hello World Example", + }, + ] + + get_preprocessed_manifest_mock.return_value = { + "Hello World Example": { + "nodejs14.x": { + "Zip": [ + { + "directory": "nodejs14.x/cookiecutter-aws-sam-hello-nodejs", + "displayName": "Hello World Example", + "dependencyManager": "npm", + "appTemplate": "hello-world", + "packageType": "Zip", + "useCaseName": "Hello World Example", + }, + ] + }, + "java11": { + "Zip": [ + { + "directory": "java11/cookiecutter-aws-sam-eventbridge-schema-app-java-maven", + "displayName": "Hello World Example: Maven", + "dependencyManager": "maven", + "appTemplate": "hello-world", + "isDynamicTemplate": "True", + "packageType": "Zip", + "useCaseName": "Hello World Example", + }, + ] + }, + }, + } + + # WHEN the user follows interactive init prompts + # 1: AWS Quick Start Templates + # 2: Java 11 + # test-project: response to name + user_input = """ +1 +N +1 +N +test-project + """ + + runner = CliRunner() + result = runner.invoke(init_cmd, ["--application-insights"], input=user_input) + self.assertFalse(result.exception) + generate_project_patch.assert_called_once_with( + ANY, + ZIP, + "java11", + "maven", + ".", + "test-project", + True, + {"project_name": "test-project", "runtime": "java11", "architectures": {"value": ["x86_64"]}}, + False, + True, ) diff --git a/tests/unit/commands/samconfig/test_samconfig.py b/tests/unit/commands/samconfig/test_samconfig.py index 6b4f47859f..5b93f32438 100644 --- a/tests/unit/commands/samconfig/test_samconfig.py +++ b/tests/unit/commands/samconfig/test_samconfig.py @@ -80,6 +80,7 @@ def test_init(self, do_cli_mock): True, '{"key": "value", "key2": "value2"}', None, + ANY, ) @patch("samcli.commands.validate.validate.do_cli") diff --git a/tests/unit/lib/init/test_application_insights_template_modifier.py b/tests/unit/lib/init/test_application_insights_template_modifier.py new file mode 100644 index 0000000000..acf6939e6c --- /dev/null +++ b/tests/unit/lib/init/test_application_insights_template_modifier.py @@ -0,0 +1,205 @@ +from unittest import TestCase +from unittest.mock import patch, MagicMock +from yaml.parser import ParserError +from collections import OrderedDict + +from samcli.lib.init.template_modifiers.cli_template_modifier import TemplateModifier +from samcli.lib.init.template_modifiers.xray_tracing_template_modifier import XRayTracingTemplateModifier +from samcli.lib.init.template_modifiers.application_insights_template_modifier import ( + ApplicationInsightsTemplateModifier, +) + + +class TestTemplateModifier(TestCase): + def setUp(self): + self.location = MagicMock() + self.name = "testApp" + self.template_data = [ + "Resources:\n", + " HelloWorldFunction:\n", + " Type: AWS::Serverless::Function\n", + " Properties:\n", + " CodeUri: hello_world/\n", + " Handler: app.lambda_handler\n", + ] + self.template_location = "/test.yaml" + + @patch( + "samcli.lib.init.template_modifiers.application_insights_template_modifier.ApplicationInsightsTemplateModifier._get_template" + ) + def test_must_add_application_insights_monitoring(self, get_template_patch): + get_template_patch.return_value = OrderedDict( + [ + ("AWSTemplateFormatVersion", "2010-09-09"), + ("Transform", "AWS::Serverless-2016-10-31"), + ("Description", "testing2\nSample SAM Template for testing2\n"), + ("Globals", OrderedDict([("Function", OrderedDict([("Timeout", 3)]))])), + ( + "Resources", + OrderedDict( + [ + ( + "HelloWorldFunction", + OrderedDict( + [ + ("Type", "AWS::Serverless::Function"), + ( + "Properties", + OrderedDict( + [ + ("CodeUri", "hello_world/"), + ("Handler", "app.lambda_handler"), + ("Runtime", "python3.9"), + ("Architectures", ["x86_64"]), + ( + "Events", + OrderedDict( + [ + ( + "HelloWorld", + OrderedDict( + [ + ("Type", "Api"), + ( + "Properties", + OrderedDict( + [ + ("Path", "/hello"), + ("Method", "get"), + ] + ), + ), + ] + ), + ) + ] + ), + ), + ] + ), + ), + ] + ), + ) + ] + ), + ), + ] + ) + + expected_template_data = OrderedDict( + [ + ("AWSTemplateFormatVersion", "2010-09-09"), + ("Transform", "AWS::Serverless-2016-10-31"), + ("Description", "testing2\nSample SAM Template for testing2\n"), + ("Globals", OrderedDict([("Function", OrderedDict([("Timeout", 3)]))])), + ( + "Resources", + OrderedDict( + [ + ( + "HelloWorldFunction", + OrderedDict( + [ + ("Type", "AWS::Serverless::Function"), + ( + "Properties", + OrderedDict( + [ + ("CodeUri", "hello_world/"), + ("Handler", "app.lambda_handler"), + ("Runtime", "python3.9"), + ("Architectures", ["x86_64"]), + ( + "Events", + OrderedDict( + [ + ( + "HelloWorld", + OrderedDict( + [ + ("Type", "Api"), + ( + "Properties", + OrderedDict( + [ + ("Path", "/hello"), + ("Method", "get"), + ] + ), + ), + ] + ), + ) + ] + ), + ), + ] + ), + ), + ] + ), + ), + ( + "ApplicationResourceGroup", + OrderedDict( + [ + ("Type", "AWS::ResourceGroups::Group"), + ( + "Properties", + { + "Name": { + "Fn::Join": [ + "", + ["ApplicationInsights-SAM-", {"Ref": "AWS::StackName"}], + ] + }, + "ResourceQuery": {"Type": "CLOUDFORMATION_STACK_1_0"}, + }, + ), + ] + ), + ), + ( + "ApplicationInsightsMonitoring", + OrderedDict( + [ + ("Type", "AWS::ApplicationInsights::Application"), + ( + "Properties", + { + "ResourceGroupName": { + "Fn::Join": [ + "", + ["ApplicationInsights-SAM-", {"Ref": "AWS::StackName"}], + ] + }, + "AutoConfigurationEnabled": "true", + }, + ), + ("DependsOn", "ApplicationResourceGroup"), + ] + ), + ), + ] + ), + ), + ] + ) + + template_modifier = ApplicationInsightsTemplateModifier(self.location) + template_modifier._update_template_fields() + + print(expected_template_data) + self.assertEqual(template_modifier.template, expected_template_data) + + @patch("samcli.lib.init.template_modifiers.application_insights_template_modifier.LOG") + def test_must_log_warning_message_appinsights(self, log_mock): + expected_warning_msg = ( + "Warning: Unable to add Application Insights monitoring to the application.\n" + "To learn more about Application Insights, visit " + "https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch-application-insights.html" + ) + template_modifier = ApplicationInsightsTemplateModifier(self.location) + template_modifier._print_sanity_check_error() + log_mock.warning.assert_called_once_with(expected_warning_msg) diff --git a/tests/unit/lib/init/test_cli_template_modifier.py b/tests/unit/lib/init/test_cli_template_modifier.py index 26490da0b5..d0652b971c 100644 --- a/tests/unit/lib/init/test_cli_template_modifier.py +++ b/tests/unit/lib/init/test_cli_template_modifier.py @@ -1,14 +1,19 @@ from unittest import TestCase from unittest.mock import patch, MagicMock from yaml.parser import ParserError +from collections import OrderedDict from samcli.lib.init.template_modifiers.cli_template_modifier import TemplateModifier from samcli.lib.init.template_modifiers.xray_tracing_template_modifier import XRayTracingTemplateModifier +from samcli.lib.init.template_modifiers.application_insights_template_modifier import ( + ApplicationInsightsTemplateModifier, +) class TestTemplateModifier(TestCase): def setUp(self): self.location = MagicMock() + self.name = "testApp" self.template_data = [ "Resources:\n", " HelloWorldFunction:\n", @@ -17,9 +22,12 @@ def setUp(self): " CodeUri: hello_world/\n", " Handler: app.lambda_handler\n", ] + self.template_location = "/test.yaml" - @patch("samcli.lib.init.template_modifiers.cli_template_modifier.TemplateModifier._get_template") - def test_must_add_new_field_to_template(self, get_template_patch): + @patch( + "samcli.lib.init.template_modifiers.xray_tracing_template_modifier.XRayTracingTemplateModifier._get_template" + ) + def test_must_update_template_fields(self, get_template_patch): get_template_patch.return_value = [ "Resources:\n", " HelloWorldFunction:\n", @@ -46,11 +54,13 @@ def test_must_add_new_field_to_template(self, get_template_patch): ] template_modifier = XRayTracingTemplateModifier(self.location) - template_modifier._add_new_field_to_template() + template_modifier._update_template_fields() self.assertEqual(template_modifier.template, expected_template_data) - @patch("samcli.lib.init.template_modifiers.cli_template_modifier.TemplateModifier._get_template") + @patch( + "samcli.lib.init.template_modifiers.xray_tracing_template_modifier.XRayTracingTemplateModifier._get_template" + ) def test_must_add_new_function_field_to_template(self, get_template_patch): get_template_patch.return_value = [ "# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst\n", @@ -84,11 +94,13 @@ def test_must_add_new_function_field_to_template(self, get_template_patch): ] template_modifier = XRayTracingTemplateModifier(self.location) - template_modifier._add_new_field_to_template() + template_modifier._update_template_fields() self.assertEqual(template_modifier.template, expected_template_data) - @patch("samcli.lib.init.template_modifiers.cli_template_modifier.TemplateModifier._get_template") + @patch( + "samcli.lib.init.template_modifiers.xray_tracing_template_modifier.XRayTracingTemplateModifier._get_template" + ) def test_must_add_new_api_function_field_to_template(self, get_template_patch): get_template_patch.return_value = [ "# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst\n", @@ -123,11 +135,12 @@ def test_must_add_new_api_function_field_to_template(self, get_template_patch): ] template_modifier = XRayTracingTemplateModifier(self.location) - template_modifier._add_new_field_to_template() - + template_modifier._update_template_fields() self.assertEqual(template_modifier.template, expected_template_data) - @patch("samcli.lib.init.template_modifiers.cli_template_modifier.TemplateModifier._get_template") + @patch( + "samcli.lib.init.template_modifiers.xray_tracing_template_modifier.XRayTracingTemplateModifier._get_template" + ) def test_must_replace_new_field_to_template(self, get_template_patch): get_template_patch.return_value = [ "# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst\n", @@ -166,11 +179,13 @@ def test_must_replace_new_field_to_template(self, get_template_patch): ] template_modifier = XRayTracingTemplateModifier(self.location) - template_modifier._add_new_field_to_template() + template_modifier._update_template_fields() self.assertEqual(template_modifier.template, expected_template_data) - @patch("samcli.lib.init.template_modifiers.cli_template_modifier.TemplateModifier._get_template") + @patch( + "samcli.lib.init.template_modifiers.xray_tracing_template_modifier.XRayTracingTemplateModifier._get_template" + ) def test_must_add_new_tracing_field_to_template(self, get_template_patch): get_template_patch.return_value = [ "# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst\n", @@ -204,10 +219,12 @@ def test_must_add_new_tracing_field_to_template(self, get_template_patch): ] template_modifier = XRayTracingTemplateModifier(self.location) - template_modifier._add_new_field_to_template() + template_modifier._update_template_fields() self.assertEqual(template_modifier.template, expected_template_data) - @patch("samcli.lib.init.template_modifiers.cli_template_modifier.TemplateModifier._get_template") + @patch( + "samcli.lib.init.template_modifiers.xray_tracing_template_modifier.XRayTracingTemplateModifier._get_template" + ) def test_must_get_section_position(self, get_template_patch): get_template_patch.return_value = [ "# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst\n", @@ -236,7 +253,9 @@ def test_must_get_section_position(self, get_template_patch): self.assertEqual(api_location, 4) self.assertEqual(resource_location, 7) - @patch("samcli.lib.init.template_modifiers.cli_template_modifier.TemplateModifier._get_template") + @patch( + "samcli.lib.init.template_modifiers.xray_tracing_template_modifier.XRayTracingTemplateModifier._get_template" + ) def test_must_get_section_position_desc(self, get_template_patch): get_template_patch.return_value = [ "# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst\n", @@ -265,7 +284,9 @@ def test_must_get_section_position_desc(self, get_template_patch): self.assertEqual(function_location, 4) self.assertEqual(resource_location, 7) - @patch("samcli.lib.init.template_modifiers.cli_template_modifier.TemplateModifier._get_template") + @patch( + "samcli.lib.init.template_modifiers.xray_tracing_template_modifier.XRayTracingTemplateModifier._get_template" + ) def test_must_get_function_field_position(self, get_template_patch): get_template_patch.return_value = [ "Resources:\n", @@ -281,7 +302,9 @@ def test_must_get_function_field_position(self, get_template_patch): self.assertEqual(tracing_location, -1) - @patch("samcli.lib.init.template_modifiers.cli_template_modifier.TemplateModifier._get_template") + @patch( + "samcli.lib.init.template_modifiers.xray_tracing_template_modifier.XRayTracingTemplateModifier._get_template" + ) def test_must_get_api_field_position(self, get_template_patch): get_template_patch.return_value = [ "Resources:\n", @@ -328,3 +351,183 @@ def test_must_pass_sanity_check(self, parse_yaml_file_mock): parse_yaml_file_mock.return_value = {"add: add_value"} result = template_modifier._sanity_check() self.assertTrue(result) + + @patch( + "samcli.lib.init.template_modifiers.application_insights_template_modifier.ApplicationInsightsTemplateModifier._get_template" + ) + def test_must_add_application_insights_monitoring(self, get_template_patch): + get_template_patch.return_value = OrderedDict( + [ + ("AWSTemplateFormatVersion", "2010-09-09"), + ("Transform", "AWS::Serverless-2016-10-31"), + ("Description", "testing2\nSample SAM Template for testing2\n"), + ("Globals", OrderedDict([("Function", OrderedDict([("Timeout", 3)]))])), + ( + "Resources", + OrderedDict( + [ + ( + "HelloWorldFunction", + OrderedDict( + [ + ("Type", "AWS::Serverless::Function"), + ( + "Properties", + OrderedDict( + [ + ("CodeUri", "hello_world/"), + ("Handler", "app.lambda_handler"), + ("Runtime", "python3.9"), + ("Architectures", ["x86_64"]), + ( + "Events", + OrderedDict( + [ + ( + "HelloWorld", + OrderedDict( + [ + ("Type", "Api"), + ( + "Properties", + OrderedDict( + [ + ("Path", "/hello"), + ("Method", "get"), + ] + ), + ), + ] + ), + ) + ] + ), + ), + ] + ), + ), + ] + ), + ) + ] + ), + ), + ] + ) + + expected_template_data = OrderedDict( + [ + ("AWSTemplateFormatVersion", "2010-09-09"), + ("Transform", "AWS::Serverless-2016-10-31"), + ("Description", "testing2\nSample SAM Template for testing2\n"), + ("Globals", OrderedDict([("Function", OrderedDict([("Timeout", 3)]))])), + ( + "Resources", + OrderedDict( + [ + ( + "HelloWorldFunction", + OrderedDict( + [ + ("Type", "AWS::Serverless::Function"), + ( + "Properties", + OrderedDict( + [ + ("CodeUri", "hello_world/"), + ("Handler", "app.lambda_handler"), + ("Runtime", "python3.9"), + ("Architectures", ["x86_64"]), + ( + "Events", + OrderedDict( + [ + ( + "HelloWorld", + OrderedDict( + [ + ("Type", "Api"), + ( + "Properties", + OrderedDict( + [ + ("Path", "/hello"), + ("Method", "get"), + ] + ), + ), + ] + ), + ) + ] + ), + ), + ] + ), + ), + ] + ), + ), + ( + "ApplicationResourceGroup", + OrderedDict( + [ + ("Type", "AWS::ResourceGroups::Group"), + ( + "Properties", + { + "Name": { + "Fn::Join": [ + "", + ["ApplicationInsights-SAM-", {"Ref": "AWS::StackName"}], + ] + }, + "ResourceQuery": {"Type": "CLOUDFORMATION_STACK_1_0"}, + }, + ), + ] + ), + ), + ( + "ApplicationInsightsMonitoring", + OrderedDict( + [ + ("Type", "AWS::ApplicationInsights::Application"), + ( + "Properties", + { + "ResourceGroupName": { + "Fn::Join": [ + "", + ["ApplicationInsights-SAM-", {"Ref": "AWS::StackName"}], + ] + }, + "AutoConfigurationEnabled": "true", + }, + ), + ("DependsOn", "ApplicationResourceGroup"), + ] + ), + ), + ] + ), + ), + ] + ) + + template_modifier = ApplicationInsightsTemplateModifier(self.location) + template_modifier._update_template_fields() + + print(expected_template_data) + self.assertEqual(template_modifier.template, expected_template_data) + + @patch("samcli.lib.init.template_modifiers.application_insights_template_modifier.LOG") + def test_must_log_warning_message_appinsights(self, log_mock): + expected_warning_msg = ( + "Warning: Unable to add Application Insights monitoring to the application.\n" + "To learn more about Application Insights, visit " + "https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch-application-insights.html" + ) + template_modifier = ApplicationInsightsTemplateModifier(self.location) + template_modifier._print_sanity_check_error() + log_mock.warning.assert_called_once_with(expected_warning_msg) From 33049557025941335bac959bdbf1f24fee20b620 Mon Sep 17 00:00:00 2001 From: David <114027923+cdavidxu-hub@users.noreply.github.com> Date: Thu, 15 Dec 2022 17:29:37 -0800 Subject: [PATCH 21/26] Update lint helpand output message (#4489) Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> --- samcli/commands/validate/validate.py | 3 ++- tests/integration/validate/test_validate_command.py | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/samcli/commands/validate/validate.py b/samcli/commands/validate/validate.py index 5bb659a424..28964e8a2f 100644 --- a/samcli/commands/validate/validate.py +++ b/samcli/commands/validate/validate.py @@ -27,6 +27,7 @@ "--lint", is_flag=True, help="Run linting validation on template through cfn-lint. " + "Create a cfnlintrc config file to specify additional parameters. " "For more information, see: https://github.com/aws-cloudformation/cfn-lint", ) @pass_context @@ -79,7 +80,7 @@ def do_cli(ctx, template, lint): click.secho( "{} is a valid SAM Template. This is according to basic SAM Validation, " - 'for additional validation, please run "sam validate --lint"'.format(template), + 'for additional validation, please run with "--lint" option'.format(template), fg="green", ) diff --git a/tests/integration/validate/test_validate_command.py b/tests/integration/validate/test_validate_command.py index 160cead7fb..7ef6252079 100644 --- a/tests/integration/validate/test_validate_command.py +++ b/tests/integration/validate/test_validate_command.py @@ -30,11 +30,11 @@ def setUpClass(cls): cls.patterns = { TemplateFileTypes.JSON: re.compile( r"template\.json is a valid SAM Template. This is according to basic SAM Validation, " - 'for additional validation, please run "sam validate --lint"(\r\n)?$' + 'for additional validation, please run with "--lint" option(\r\n)?$' ), TemplateFileTypes.YAML: re.compile( r"template\.yaml is a valid SAM Template. This is according to basic SAM Validation, " - 'for additional validation, please run "sam validate --lint"(\r\n)?$' + 'for additional validation, please run with "--lint" option(\r\n)?$' ), } cls.lint_patterns = { From abd7c03ba44b7f44b1457135001e1fc2cb7360ed Mon Sep 17 00:00:00 2001 From: Qingchuan Ma <69653965+qingchm@users.noreply.github.com> Date: Fri, 16 Dec 2022 05:13:24 -0500 Subject: [PATCH 22/26] fix: Update expected message read for ADL sync code watch integration test (#4488) * Update expected message read for adl sync code integration test * Use OS adaptive path separator for assertions in validate integration tests Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> --- tests/integration/sync/test_sync_adl.py | 2 +- tests/integration/validate/test_validate_command.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/integration/sync/test_sync_adl.py b/tests/integration/sync/test_sync_adl.py index aeb32c645d..ba7ac3d8e5 100644 --- a/tests/integration/sync/test_sync_adl.py +++ b/tests/integration/sync/test_sync_adl.py @@ -133,7 +133,7 @@ def test_sync_watch_code(self): ) read_until_string( self.watch_process, - "\x1b[32mFinished syncing Function Layer Reference Sync HelloWorldFunction.\x1b[0m\n", + "\x1b[32mFinished syncing Layer HelloWorldFunction", timeout=60, ) lambda_response = json.loads(self._get_lambda_response(lambda_functions[0])) diff --git a/tests/integration/validate/test_validate_command.py b/tests/integration/validate/test_validate_command.py index 7ef6252079..08d603ee60 100644 --- a/tests/integration/validate/test_validate_command.py +++ b/tests/integration/validate/test_validate_command.py @@ -151,10 +151,10 @@ def test_lint_invalid_template(self): output = command_result.stdout.decode("utf-8") warning_message = ( - f'E0000 Duplicate found "HelloWorldFunction" (line 5)\n' - "{}/templateError.yaml:5:3\n\n" + 'E0000 Duplicate found "HelloWorldFunction" (line 5)\n' + f'{os.path.join(test_data_path, "templateError.yaml")}:5:3\n\n' 'E0000 Duplicate found "HelloWorldFunction" (line 12)\n' - "{}/templateError.yaml:12:3\n\n".format(test_data_path, test_data_path) + f'{os.path.join(test_data_path, "templateError.yaml")}:12:3\n\n' ) self.assertIn(warning_message, output) From bea3bc0212a76c67971c5fbf18607ed361651567 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> Date: Fri, 16 Dec 2022 11:30:41 -0800 Subject: [PATCH 23/26] fix: `hooks` data imports for pyinstaller (#4491) - import `cfnlint` with data files, python files and package metadata directories. --- installer/pyinstaller/hook-samcli.py | 1 + 1 file changed, 1 insertion(+) diff --git a/installer/pyinstaller/hook-samcli.py b/installer/pyinstaller/hook-samcli.py index 4314fe2fc5..ad25bc8065 100644 --- a/installer/pyinstaller/hook-samcli.py +++ b/installer/pyinstaller/hook-samcli.py @@ -9,6 +9,7 @@ "samcli", include_py_files=True, include_datas=["hook_packages/terraform/copy_terraform_built_artifacts.py"] )[0] + hooks.collect_all("jschema_to_python", include_py_files=False)[0] + + hooks.collect_all("cfnlint", include_py_files=True)[0] # Collect ONLY data files. + hooks.collect_data_files("samcli") + hooks.collect_data_files("samtranslator") From dacd06919b87af0d1d4558e1708664aa6fe20a0b Mon Sep 17 00:00:00 2001 From: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Date: Fri, 16 Dec 2022 15:02:07 -0800 Subject: [PATCH 24/26] chore: remove redundant debug parameter for sync integration tests (#4493) --- tests/integration/sync/test_sync_watch.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/integration/sync/test_sync_watch.py b/tests/integration/sync/test_sync_watch.py index 2dce048419..463bdcbe2f 100644 --- a/tests/integration/sync/test_sync_watch.py +++ b/tests/integration/sync/test_sync_watch.py @@ -119,7 +119,6 @@ def _setup_verify_infra(self): s3_prefix=self.s3_prefix, kms_key_id=self.kms_key, tags="integ=true clarity=yes foo_bar=baz", - debug=True, ) self.watch_process = start_persistent_process(sync_command_list, cwd=self.test_dir) read_until_string(self.watch_process, "Enter Y to proceed with the command, or enter N to cancel:\n") From 42e512d417f085cc0f4df7260a62491a0fe0f114 Mon Sep 17 00:00:00 2001 From: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Date: Fri, 16 Dec 2022 16:04:25 -0800 Subject: [PATCH 25/26] revert: remove cfn-lint integration (#4494) * Revert "fix: `hooks` data imports for pyinstaller (#4491)" This reverts commit bea3bc0212a76c67971c5fbf18607ed361651567. * Revert "fix: Update expected message read validate lint integration test (#4488)" This reverts commit abd7c03ba44b7f44b1457135001e1fc2cb7360ed. * Revert "Update lint helpand output message (#4489)" This reverts commit 33049557025941335bac959bdbf1f24fee20b620. * Revert "fix: `pyinstaller` binaries (#4486)" This reverts commit b8a939d94f3184009c9e62078920954ec8d1992b. * Revert "fix: Fix validate command integration tests console output missmatch and update pyyaml version requirement (#4479)" This reverts commit ce7143c5ddb55b6bd69748ebea1b394b89525d35. * Revert "Adding cfn-lint as optional parameter for SAM validate command (#4444)" This reverts commit 2fd533fdb8cab8c2fd7feb11ea52ef59559147b4. --- installer/assets/THIRD-PARTY-LICENSES | 137 ------------------ installer/pyinstaller/hidden_imports.py | 1 - installer/pyinstaller/hook-samcli.py | 4 - requirements/base.txt | 5 +- requirements/reproducible-linux.txt | 57 +------- samcli/cli/options.py | 9 -- samcli/commands/validate/validate.py | 128 ++++------------ .../validate/default_yaml/templateError.yaml | 20 --- .../validate/test_validate_command.py | 71 --------- tests/unit/cli/test_main.py | 32 ---- .../unit/commands/samconfig/test_samconfig.py | 4 +- tests/unit/commands/validate/test_cli.py | 48 +----- 12 files changed, 44 insertions(+), 472 deletions(-) delete mode 100644 tests/integration/testdata/validate/default_yaml/templateError.yaml diff --git a/installer/assets/THIRD-PARTY-LICENSES b/installer/assets/THIRD-PARTY-LICENSES index 295a0992c2..de241b6974 100644 --- a/installer/assets/THIRD-PARTY-LICENSES +++ b/installer/assets/THIRD-PARTY-LICENSES @@ -8,7 +8,6 @@ ** docker; version 4.2.0 -- https://pypi.org/project/docker/ ** Importlib-metadata; version 4.11.3 -- https://importlib-metadata.readthedocs.io/en/latest/ ** libcrypto; version 1.1 -- https://www.openssl.org/ -** pbr; version 5.11.0 -- https://pypi.org/project/pbr/ ** python-request; version 2.25.1 -- https://pypi.python.org/pypi/requests/2.25.1 ** regex; version 2021.9.30 -- https://bitbucket.org/mrabarnett/mrab-regex/src/hg/ ** s3transfer; version 0.3.6 -- https://github.com/boto/s3transfer @@ -798,138 +797,6 @@ modification, are permitted provided that the following conditions are met: ------ -** networkx; version 2.6.3 -- https://pypi.org/project/networkx/ -Copyright (c) 2022, Aric Hagberg. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -#. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -#. Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided with - the distribution. - -#. Neither the name of the copyright holder nor the names of the - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -#. THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS "AS IS" AND ANY - EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR - PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR - CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE - USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH - DAMAGE. - ------- - -** jsonpointer; version 2.3 -- https://pypi.org/project/jsonpointer/ -Copyright (c) 2022, Stefan Kögl. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -#. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -#. Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided with - the distribution. - -#. Neither the name of the copyright holder nor the names of the - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -#. THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS "AS IS" AND ANY - EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR - PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR - CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE - USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH - DAMAGE. - ------- - -** jsonpickle; version 3.0.0 -- https://pypi.org/project/jsonpickle/ -Copyright (c) 2022, David Aguilar. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -#. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -#. Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided with - the distribution. - -#. Neither the name of the copyright holder nor the names of the - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -#. THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS "AS IS" AND ANY - EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR - PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR - CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE - USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH - DAMAGE. - ------- - -** jsonpatch; version 1.32 -- https://pypi.org/project/jsonpickle/ -Copyright (c) 2021-2022, Stefan Kögl. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -#. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -#. Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided with - the distribution. - -#. Neither the name of the copyright holder nor the names of the - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -#. THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS "AS IS" AND ANY - EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR - PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR - CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE - USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH - DAMAGE. - ------- - ** python-chardet; version 3.0.4 -- https://pypi.org/project/chardet/ Copyright (C) 1991, 1999 Free Software Foundation, Inc. @@ -1988,10 +1855,6 @@ Copyright (c) 2015 Hynek Schlawack Copyright (c) 2014-2022 Anthon van der Neut, Ruamel bvba ** ruamel-yaml-clib; version 0.2.7 -- https://pypi.org/project/ruamel.yaml.clib/ Copyright (c) 2019-2021 Anthon van der Neut, Ruamel bvba -** cfn-lint; version 0.72.2 -- https://pypi.org/project/cfn-lint/ -** jschema-to-python; version 1.2.3 -- https://pypi.org/project/jschema-to-python/ -** junit-xml; version 1.9 -- https://pypi.org/project/junit-xml/ -** sarif-om; version 1.0.4 https://pypi.org/project/sarif-om/ ** wheel; version 0.36.2 -- https://github.com/pypa/wheel "wheel" copyright (c) 2012-2014 Daniel Holth and contributors. diff --git a/installer/pyinstaller/hidden_imports.py b/installer/pyinstaller/hidden_imports.py index b6b0cd4c90..d1d72803e5 100644 --- a/installer/pyinstaller/hidden_imports.py +++ b/installer/pyinstaller/hidden_imports.py @@ -12,5 +12,4 @@ "pkg_resources.py2_warn", "aws_lambda_builders.workflows", "configparser", - "cfnlint" ] diff --git a/installer/pyinstaller/hook-samcli.py b/installer/pyinstaller/hook-samcli.py index ad25bc8065..50daf90146 100644 --- a/installer/pyinstaller/hook-samcli.py +++ b/installer/pyinstaller/hook-samcli.py @@ -4,13 +4,9 @@ hiddenimports = SAM_CLI_HIDDEN_IMPORTS datas = ( - # Collect data files, raw python files (if include_py_files=True) and package metadata directories. hooks.collect_all( "samcli", include_py_files=True, include_datas=["hook_packages/terraform/copy_terraform_built_artifacts.py"] )[0] - + hooks.collect_all("jschema_to_python", include_py_files=False)[0] - + hooks.collect_all("cfnlint", include_py_files=True)[0] - # Collect ONLY data files. + hooks.collect_data_files("samcli") + hooks.collect_data_files("samtranslator") + hooks.collect_data_files("aws_lambda_builders") diff --git a/requirements/base.txt b/requirements/base.txt index fa53290cde..cd6eff534d 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -7,7 +7,7 @@ Werkzeug<2.1 boto3>=1.19.5,==1.* jmespath~=0.10.0 ruamel_yaml==0.17.21 -PyYAML>=5.4.1,==5.* +PyYAML~=5.3 cookiecutter~=2.1.1 aws-sam-translator==1.55.0 #docker minor version updates can include breaking changes. Auto update micro version only. @@ -27,6 +27,3 @@ typing_extensions==3.10.0.0 regex==2021.9.30 # NOTE: tzlocal is not a direct dependency of SAM CLI, but pin to 3.0 as 4.0 break appveyor jobs tzlocal==3.0 - -#Adding cfn-lint dependency for SAM validate -cfn-lint==0.72.2 diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index cad56a3f76..5fac5ec045 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with Python 3.7 -# by the following command: +# This file is autogenerated by pip-compile with python 3.7 +# To update, run: # # pip-compile --allow-unsafe --generate-hashes --output-file=requirements/reproducible-linux.txt # @@ -11,10 +11,7 @@ arrow==1.0.3 \ attrs==20.3.0 \ --hash=sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6 \ --hash=sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700 - # via - # jschema-to-python - # jsonschema - # sarif-om + # via jsonschema aws-lambda-builders==1.23.1 \ --hash=sha256:4db4133decf149c5b2daa0959db0f6e5563a9763beac0e25005a90c2ce7abe48 \ --hash=sha256:dda6f7e04bc77120c206ef5269a0c17bbcecacf065865248d31c1c80f325a343 @@ -23,9 +20,7 @@ aws-sam-translator==1.55.0 \ --hash=sha256:08e182e76d6fabc13ce2f38b8a3932b3131407c6ad29ec2849ef3d9a41576b94 \ --hash=sha256:93dc74614ab291c86be681e025679d08f4fa685ed6b55d410f62f2f235012205 \ --hash=sha256:e86a67b87329a0de7d531d33257d1a448d0d6ecd84aee058d084957f28a8e4b1 - # via - # aws-sam-cli (setup.py) - # cfn-lint + # via aws-sam-cli (setup.py) backports-zoneinfo==0.2.1 \ --hash=sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf \ --hash=sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328 \ @@ -131,10 +126,6 @@ cffi==1.15.1 \ --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 # via cryptography -cfn-lint==0.72.2 \ - --hash=sha256:1117a0a50899807021fb7b87ebcfb04eddda06269d9a45c7dbbba3bc7c4631f5 \ - --hash=sha256:d7ef1658687cf119adcf6ec312f28b5adc312d80293a54cdd5c140e2695a243c - # via aws-sam-cli (setup.py) chardet==3.0.4 \ --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 @@ -205,7 +196,6 @@ importlib-metadata==5.0.0 \ --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 # via # click - # jsonpickle # jsonschema itsdangerous==2.1.2 \ --hash=sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44 \ @@ -229,31 +219,10 @@ jmespath==0.10.0 \ # aws-sam-cli (setup.py) # boto3 # botocore -jschema-to-python==1.2.3 \ - --hash=sha256:76ff14fe5d304708ccad1284e4b11f96a658949a31ee7faed9e0995279549b91 \ - --hash=sha256:8a703ca7604d42d74b2815eecf99a33359a8dccbb80806cce386d5e2dd992b05 - # via cfn-lint -jsonpatch==1.32 \ - --hash=sha256:26ac385719ac9f54df8a2f0827bb8253aa3ea8ab7b3368457bcdb8c14595a397 \ - --hash=sha256:b6ddfe6c3db30d81a96aaeceb6baf916094ffa23d7dd5fa2c13e13f8b6e600c2 - # via cfn-lint -jsonpickle==3.0.0 \ - --hash=sha256:504586e5c0fd52fd76a56f86c36f8c4d29778bdef92dc06d38ca6e2e9fc4f090 \ - --hash=sha256:7c4b13d595ff3520148ed870b9f5917023ebdc55c9ec0cb695688fdc16e90c3e - # via jschema-to-python -jsonpointer==2.3 \ - --hash=sha256:51801e558539b4e9cd268638c078c6c5746c9ac96bc38152d443400e4f3793e9 \ - --hash=sha256:97cba51526c829282218feb99dab1b1e6bdf8efd1c43dc9d57be093c0d69c99a - # via jsonpatch jsonschema==3.2.0 \ --hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 \ --hash=sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a - # via - # aws-sam-translator - # cfn-lint -junit-xml==1.9 \ - --hash=sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732 - # via cfn-lint + # via aws-sam-translator markupsafe==2.1.1 \ --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ @@ -296,16 +265,6 @@ markupsafe==2.1.1 \ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 -networkx==2.6.3 \ - --hash=sha256:80b6b89c77d1dfb64a4c7854981b60aeea6360ac02c6d4e4913319e0a313abef \ - --hash=sha256:c0946ed31d71f1b732b5aaa6da5a0388a345019af232ce2f49c766e2d6795c51 - # via cfn-lint -pbr==5.11.0 \ - --hash=sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe \ - --hash=sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a - # via - # jschema-to-python - # sarif-om pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 @@ -363,7 +322,6 @@ pyyaml==5.4.1 \ --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 # via # aws-sam-cli (setup.py) - # cfn-lint # cookiecutter # serverlessrepo regex==2021.9.30 \ @@ -461,10 +419,6 @@ s3transfer==0.5.0 \ --hash=sha256:50ed823e1dc5868ad40c8dc92072f757aa0e653a192845c94a3b676f4a62da4c \ --hash=sha256:9c1dc369814391a6bda20ebbf4b70a0f34630592c9aa520856bf384916af2803 # via boto3 -sarif-om==1.0.4 \ - --hash=sha256:539ef47a662329b1c8502388ad92457425e95dc0aaaf995fe46f4984c4771911 \ - --hash=sha256:cd5f416b3083e00d402a92e449a7ff67af46f11241073eea0461802a3b5aef98 - # via cfn-lint serverlessrepo==0.1.10 \ --hash=sha256:671f48038123f121437b717ed51f253a55775590f00fbab6fbc6a01f8d05c017 \ --hash=sha256:b99c69be8ce87ccc48103fbe371ba7b148c3374c57862e59118c402522e5ed52 @@ -481,7 +435,6 @@ six==1.15.0 \ # via # docker # jsonschema - # junit-xml # python-dateutil # serverlessrepo # websocket-client diff --git a/samcli/cli/options.py b/samcli/cli/options.py index 57ee82b9eb..9a255225f0 100644 --- a/samcli/cli/options.py +++ b/samcli/cli/options.py @@ -40,15 +40,6 @@ def region_option(f): def callback(ctx, param, value): state = ctx.ensure_object(Context) - from botocore import exceptions, utils - from samcli.commands.exceptions import RegionError - - try: - utils.validate_region_name(value) - except exceptions.InvalidRegionError as ex: - raise RegionError( - message=f"Provided region: {value} doesn't match a supported format", wrapped_from=ex.__class__.__name__ - ) from ex state.region = value return value diff --git a/samcli/commands/validate/validate.py b/samcli/commands/validate/validate.py index 28964e8a2f..88cdba024e 100644 --- a/samcli/commands/validate/validate.py +++ b/samcli/commands/validate/validate.py @@ -9,7 +9,6 @@ from samtranslator.translator.arn_generator import NoRegionFound -from samcli.cli.context import Context from samcli.cli.main import pass_context, common_options as cli_framework_options, aws_creds_options, print_cmdline_args from samcli.commands._utils.cdk_support_decorators import unsupported_command_cdk from samcli.commands._utils.options import template_option_without_build @@ -23,26 +22,24 @@ @template_option_without_build @aws_creds_options @cli_framework_options -@click.option( - "--lint", - is_flag=True, - help="Run linting validation on template through cfn-lint. " - "Create a cfnlintrc config file to specify additional parameters. " - "For more information, see: https://github.com/aws-cloudformation/cfn-lint", -) @pass_context @track_command @check_newer_version @print_cmdline_args @unsupported_command_cdk(alternative_command="cdk doctor") -def cli(ctx, template_file, config_file, config_env, lint): +def cli( + ctx, + template_file, + config_file, + config_env, +): # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - do_cli(ctx, template_file, lint) # pragma: no cover + do_cli(ctx, template_file) # pragma: no cover -def do_cli(ctx, template, lint): +def do_cli(ctx, template): """ Implementation of the ``cli`` method, just separated out for unit testing purposes """ @@ -53,36 +50,29 @@ def do_cli(ctx, template, lint): from .lib.exceptions import InvalidSamDocumentException from .lib.sam_template_validator import SamTemplateValidator - if lint: - _lint(ctx, template) - else: - sam_template = _read_sam_file(template) - - iam_client = boto3.client("iam") - validator = SamTemplateValidator( - sam_template, ManagedPolicyLoader(iam_client), profile=ctx.profile, region=ctx.region - ) - - try: - validator.is_valid() - except InvalidSamDocumentException as e: - click.secho("Template provided at '{}' was invalid SAM Template.".format(template), bg="red") - raise InvalidSamTemplateException(str(e)) from e - except NoRegionFound as no_region_found_e: - raise UserException( - "AWS Region was not found. Please configure your region through a profile or --region option", - wrapped_from=no_region_found_e.__class__.__name__, - ) from no_region_found_e - except NoCredentialsError as e: - raise UserException( - "AWS Credentials are required. Please configure your credentials.", wrapped_from=e.__class__.__name__ - ) from e - - click.secho( - "{} is a valid SAM Template. This is according to basic SAM Validation, " - 'for additional validation, please run with "--lint" option'.format(template), - fg="green", - ) + sam_template = _read_sam_file(template) + + iam_client = boto3.client("iam") + validator = SamTemplateValidator( + sam_template, ManagedPolicyLoader(iam_client), profile=ctx.profile, region=ctx.region + ) + + try: + validator.is_valid() + except InvalidSamDocumentException as e: + click.secho("Template provided at '{}' was invalid SAM Template.".format(template), bg="red") + raise InvalidSamTemplateException(str(e)) from e + except NoRegionFound as no_region_found_e: + raise UserException( + "AWS Region was not found. Please configure your region through a profile or --region option", + wrapped_from=no_region_found_e.__class__.__name__, + ) from no_region_found_e + except NoCredentialsError as e: + raise UserException( + "AWS Credentials are required. Please configure your credentials.", wrapped_from=e.__class__.__name__ + ) from e + + click.secho("{} is a valid SAM Template".format(template), fg="green") def _read_sam_file(template): @@ -105,59 +95,3 @@ def _read_sam_file(template): sam_template = yaml_parse(sam_template.read()) return sam_template - - -def _lint(ctx: Context, template: str) -> None: - """ - Parses provided SAM template and maps errors from CloudFormation template back to SAM template. - - Cfn-lint loggers are added to the SAM cli logging hierarchy which at the root logger - configures with INFO level logging and a different formatting. This exposes and duplicates - some cfn-lint logs that are not typically shown to customers. Explicitly setting the level to - WARNING and propagate to be False remediates these issues. - - Parameters - ----------- - ctx - Click context object - template - Path to the template file - - """ - - import cfnlint.core # type: ignore - import logging - from samcli.commands.exceptions import UserException - - cfn_lint_logger = logging.getLogger("cfnlint") - cfn_lint_logger.propagate = False - - try: - lint_args = [template] - if ctx.debug: - lint_args.append("--debug") - if ctx.region: - lint_args.append("--region") - lint_args.append(ctx.region) - - (args, filenames, formatter) = cfnlint.core.get_args_filenames(lint_args) - cfn_lint_logger.setLevel(logging.WARNING) - matches = list(cfnlint.core.get_matches(filenames, args)) - if not matches: - click.secho("{} is a valid SAM Template".format(template), fg="green") - rules = cfnlint.core.get_used_rules() - matches_output = formatter.print_matches(matches, rules, filenames) - - if matches_output: - click.secho(matches_output) - - except cfnlint.core.InvalidRegionException as e: - raise UserException( - "AWS Region was not found. Please configure your region through the --region option", - wrapped_from=e.__class__.__name__, - ) from e - except cfnlint.core.CfnLintExitException as lint_error: - raise UserException( - lint_error, - wrapped_from=lint_error.__class__.__name__, - ) from lint_error diff --git a/tests/integration/testdata/validate/default_yaml/templateError.yaml b/tests/integration/testdata/validate/default_yaml/templateError.yaml deleted file mode 100644 index 7e5502d2b1..0000000000 --- a/tests/integration/testdata/validate/default_yaml/templateError.yaml +++ /dev/null @@ -1,20 +0,0 @@ -AWSTemplateFormatVersion: '2010-09-09' -Transform: AWS::Serverless-2016-10-31 - -Resources: - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - CodeUri: HelloWorldFunction - Handler: app.lambdaHandler - Runtime: nodejs14.x - - HelloWorldFunction: - Type: AWS::Serverless::Api - Properties: - StageName: Prod - DefinitionUri: s3://sam-demo-bucket/webpage_swagger.json - EndpointConfiguration: - Type: REGIONAL - - diff --git a/tests/integration/validate/test_validate_command.py b/tests/integration/validate/test_validate_command.py index 08d603ee60..cba52002dd 100644 --- a/tests/integration/validate/test_validate_command.py +++ b/tests/integration/validate/test_validate_command.py @@ -28,16 +28,6 @@ class TestValidate(TestCase): @classmethod def setUpClass(cls): cls.patterns = { - TemplateFileTypes.JSON: re.compile( - r"template\.json is a valid SAM Template. This is according to basic SAM Validation, " - 'for additional validation, please run with "--lint" option(\r\n)?$' - ), - TemplateFileTypes.YAML: re.compile( - r"template\.yaml is a valid SAM Template. This is according to basic SAM Validation, " - 'for additional validation, please run with "--lint" option(\r\n)?$' - ), - } - cls.lint_patterns = { TemplateFileTypes.JSON: re.compile(r"template\.json is a valid SAM Template(\r\n)?$"), TemplateFileTypes.YAML: re.compile(r"template\.yaml is a valid SAM Template(\r\n)?$"), } @@ -52,7 +42,6 @@ def command_list( profile: Optional[str] = None, region: Optional[str] = None, config_file: Optional[Path] = None, - lint: Optional[bool] = None, ) -> List[str]: command_list = [self.base_command(), "validate"] if template_file: @@ -63,8 +52,6 @@ def command_list( command_list += ["--region", region] if config_file: command_list += ["--config_file", str(config_file)] - if lint: - command_list += ["--lint"] return command_list @parameterized.expand( @@ -100,61 +87,3 @@ def test_validate_logs_warning_for_cdk_project(self): ) self.assertIn(warning_message, output) - - @parameterized.expand( - [ - ("default_yaml", TemplateFileTypes.YAML), # project with template.yaml - ("default_json", TemplateFileTypes.JSON), # project with template.json - ("multiple_files", TemplateFileTypes.YAML), # project with both template.yaml and template.json - ( - "with_build", - TemplateFileTypes.JSON, - ), # project with template.json and standard build directory .aws-sam/build/template.yaml - ] - ) - def test_lint_template(self, relative_folder: str, expected_file: TemplateFileTypes): - test_data_path = Path(__file__).resolve().parents[2] / "integration" / "testdata" / "validate" - process_dir = test_data_path / relative_folder - command_result = run_command(self.command_list(lint=True), cwd=str(process_dir)) - pattern = self.lint_patterns[expected_file] # type: ignore - output = command_result.stdout.decode("utf-8") - self.assertEqual(command_result.process.returncode, 0) - self.assertRegex(output, pattern) - - def test_lint_error_no_region(self): - test_data_path = Path(__file__).resolve().parents[2] / "integration" / "testdata" / "validate" / "default_json" - template_file = "template.json" - template_path = test_data_path.joinpath(template_file) - command_result = run_command(self.command_list(lint=True, region="--debug", template_file=template_path)) - output = command_result.stderr.decode("utf-8") - - error_message = f"Error: Provided region: --debug doesn't match a supported format" - - self.assertIn(error_message, output) - - def test_lint_error_invalid_region(self): - test_data_path = Path(__file__).resolve().parents[2] / "integration" / "testdata" / "validate" / "default_json" - template_file = "template.json" - template_path = test_data_path.joinpath(template_file) - command_result = run_command(self.command_list(lint=True, region="us-north-5", template_file=template_path)) - output = command_result.stderr.decode("utf-8") - - error_message = f"Error: AWS Region was not found. Please configure your region through the --region option" - - self.assertIn(error_message, output) - - def test_lint_invalid_template(self): - test_data_path = Path(__file__).resolve().parents[2] / "integration" / "testdata" / "validate" / "default_yaml" - template_file = "templateError.yaml" - template_path = test_data_path.joinpath(template_file) - command_result = run_command(self.command_list(lint=True, template_file=template_path)) - output = command_result.stdout.decode("utf-8") - - warning_message = ( - 'E0000 Duplicate found "HelloWorldFunction" (line 5)\n' - f'{os.path.join(test_data_path, "templateError.yaml")}:5:3\n\n' - 'E0000 Duplicate found "HelloWorldFunction" (line 12)\n' - f'{os.path.join(test_data_path, "templateError.yaml")}:12:3\n\n' - ) - - self.assertIn(warning_message, output) diff --git a/tests/unit/cli/test_main.py b/tests/unit/cli/test_main.py index 4ea81af843..0ed9ff5992 100644 --- a/tests/unit/cli/test_main.py +++ b/tests/unit/cli/test_main.py @@ -3,7 +3,6 @@ from unittest import TestCase from click.testing import CliRunner from samcli.cli.main import cli -from samcli.commands.exceptions import RegionError class TestCliBase(TestCase): @@ -27,37 +26,6 @@ def test_cli_some_command(self): result = runner.invoke(cli, ["local", "generate-event", "s3"]) self.assertEqual(result.exit_code, 0) - def test_cli_with_non_standard_format_region(self): - mock_cfg = Mock() - with patch("samcli.cli.main.GlobalConfig", mock_cfg): - runner = CliRunner() - for command in ["validate", "deploy"]: - result = runner.invoke(cli, [command, "--region", "--non-standard-format"]) - self.assertEqual(result.exit_code, 1) - self.assertIn( - "Error: Provided region: --non-standard-format doesn't match a supported format", result.output - ) - self.assertRaises(RegionError) - - def test_cli_with_empty_region(self): - mock_cfg = Mock() - with patch("samcli.cli.main.GlobalConfig", mock_cfg): - runner = CliRunner() - for command in ["validate", "deploy"]: - result = runner.invoke(cli, [command, "--region"]) - self.assertEqual(result.exit_code, 2) - self.assertIn("Error: Option '--region' requires an argument", result.output) - - @patch("samcli.commands.validate.validate.do_cli") - def test_cli_with_valid_region(self, mock_do_cli): - mock_cfg = Mock() - with patch("samcli.cli.main.GlobalConfig", mock_cfg): - runner = CliRunner() - result = runner.invoke(cli, ["validate", "--region", "us-west-2"]) - self.assertEqual(result.exit_code, 0) - self.assertTrue(mock_do_cli.called) - self.assertEqual(mock_do_cli.call_count, 1) - def test_cli_with_debug(self): mock_cfg = Mock() with patch("samcli.cli.main.GlobalConfig", mock_cfg): diff --git a/tests/unit/commands/samconfig/test_samconfig.py b/tests/unit/commands/samconfig/test_samconfig.py index 5b93f32438..b1effedc84 100644 --- a/tests/unit/commands/samconfig/test_samconfig.py +++ b/tests/unit/commands/samconfig/test_samconfig.py @@ -101,7 +101,7 @@ def test_validate(self, do_cli_mock): LOG.exception("Command failed", exc_info=result.exc_info) self.assertIsNone(result.exception) - do_cli_mock.assert_called_with(ANY, str(Path(os.getcwd(), "mytemplate.yaml")), False) + do_cli_mock.assert_called_with(ANY, str(Path(os.getcwd(), "mytemplate.yaml"))) @patch("samcli.commands.build.command.do_cli") def test_build(self, do_cli_mock): @@ -1249,7 +1249,7 @@ def test_secondary_option_name_template_validate(self, do_cli_mock): LOG.exception("Command failed", exc_info=result.exc_info) self.assertIsNone(result.exception) - do_cli_mock.assert_called_with(ANY, str(Path(os.getcwd(), "mytemplate.yaml")), False) + do_cli_mock.assert_called_with(ANY, str(Path(os.getcwd(), "mytemplate.yaml"))) @contextmanager diff --git a/tests/unit/commands/validate/test_cli.py b/tests/unit/commands/validate/test_cli.py index 16d46a8e73..b9a32c2180 100644 --- a/tests/unit/commands/validate/test_cli.py +++ b/tests/unit/commands/validate/test_cli.py @@ -2,17 +2,14 @@ from unittest.mock import Mock, patch from collections import namedtuple -from botocore.exceptions import NoCredentialsError, InvalidRegionError - -from cfnlint.core import CfnLintExitException, InvalidRegionException # type: ignore +from botocore.exceptions import NoCredentialsError from samcli.commands.exceptions import UserException from samcli.commands.local.cli_common.user_exceptions import SamTemplateNotFoundException, InvalidSamTemplateException from samcli.commands.validate.lib.exceptions import InvalidSamDocumentException -from samcli.commands.validate.validate import do_cli, _read_sam_file, _lint +from samcli.commands.validate.validate import do_cli, _read_sam_file ctx_mock = namedtuple("ctx", ["profile", "region"]) -ctx_lint_mock = namedtuple("ctx", ["debug", "region"]) class TestValidateCli(TestCase): @@ -53,7 +50,7 @@ def test_template_fails_validation(self, patched_boto, read_sam_file_patch, clic template_valiadator.return_value = is_valid_mock with self.assertRaises(InvalidSamTemplateException): - do_cli(ctx=ctx_mock(profile="profile", region="region"), template=template_path, lint=False) + do_cli(ctx=ctx_mock(profile="profile", region="region"), template=template_path) @patch("samcli.commands.validate.lib.sam_template_validator.SamTemplateValidator") @patch("samcli.commands.validate.validate.click") @@ -68,7 +65,7 @@ def test_no_credentials_provided(self, patched_boto, read_sam_file_patch, click_ template_valiadator.return_value = is_valid_mock with self.assertRaises(UserException): - do_cli(ctx=ctx_mock(profile="profile", region="region"), template=template_path, lint=False) + do_cli(ctx=ctx_mock(profile="profile", region="region"), template=template_path) @patch("samcli.commands.validate.lib.sam_template_validator.SamTemplateValidator") @patch("samcli.commands.validate.validate.click") @@ -82,39 +79,4 @@ def test_template_passes_validation(self, patched_boto, read_sam_file_patch, cli is_valid_mock.is_valid.return_value = True template_valiadator.return_value = is_valid_mock - do_cli(ctx=ctx_mock(profile="profile", region="region"), template=template_path, lint=False) - - @patch("samcli.commands.validate.validate.click") - @patch("samcli.commands.validate.validate._lint") - def test_lint_template_passes(self, click_patch, lint_patch): - template_path = "path_to_template" - - lint_patch.return_value = True - - do_cli(ctx=ctx_lint_mock(debug=False, region="region"), template=template_path, lint=True) - - @patch("cfnlint.core.get_args_filenames") - @patch("cfnlint.core.get_matches") - @patch("samcli.commands.validate.validate.click") - def test_lint_invalid_region_argument_fails(self, click_patch, matches_patch, args_patch): - template_path = "path_to_template" - - args_patch.return_value = ("A", "B", "C") - - matches_patch.side_effect = InvalidRegionException - - with self.assertRaises(UserException): - _lint(ctx=ctx_lint_mock(debug=False, region="region"), template=template_path) - - @patch("cfnlint.core.get_args_filenames") - @patch("cfnlint.core.get_matches") - @patch("samcli.commands.validate.validate.click") - def test_lint_exception_fails(self, click_patch, matches_patch, args_patch): - template_path = "path_to_template" - - args_patch.return_value = ("A", "B", "C") - - matches_patch.side_effect = CfnLintExitException - - with self.assertRaises(UserException): - _lint(ctx=ctx_lint_mock(debug=False, region="region"), template=template_path) + do_cli(ctx=ctx_mock(profile="profile", region="region"), template=template_path) From 7a6524e9529e4a49466e4da733689dc20e810903 Mon Sep 17 00:00:00 2001 From: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Date: Mon, 19 Dec 2022 09:20:37 -0800 Subject: [PATCH 26/26] chore: bump version to 1.67.0 (#4497) --- samcli/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/__init__.py b/samcli/__init__.py index 4f792170de..baa3020aa8 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = "1.66.0" +__version__ = "1.67.0"