diff --git a/requirements/base.txt b/requirements/base.txt
index 7e7884b603..e265981b86 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -1,18 +1,18 @@
chevron~=0.12
click~=7.1
-Flask~=1.1.2
+Flask~=1.1.4
#Need to add Schemas latest SDK.
boto3>=1.19.5,==1.*
jmespath~=0.10.0
PyYAML~=5.3
-cookiecutter~=1.7.2
+cookiecutter~=2.1.1
aws-sam-translator==1.46.0
#docker minor version updates can include breaking changes. Auto update micro version only.
docker~=4.2.0
dateparser~=1.0
requests==2.25.1
serverlessrepo==0.1.10
-aws_lambda_builders==1.17.0
+aws_lambda_builders==1.18.0
tomlkit==0.7.2
watchdog==2.1.2
diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt
index dcb71ce836..163bdc5dc6 100644
--- a/requirements/reproducible-linux.txt
+++ b/requirements/reproducible-linux.txt
@@ -12,10 +12,10 @@ attrs==20.3.0 \
--hash=sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6 \
--hash=sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700
# via jsonschema
-aws-lambda-builders==1.17.0 \
- --hash=sha256:1d296dc521f3f3f356ffbe290ca204713b1e8a24612262cf1c9283ffe34dc443 \
- --hash=sha256:3eb7ca5ab71761766586db080a8b80ab81346b307fa72d5cea64ccd69fb41efe \
- --hash=sha256:abae4ccfc419fc5cd8eebd4cc81e335ec7610f53804ce1aa2b655159ce339610
+aws-lambda-builders==1.18.0 \
+ --hash=sha256:555c1bb72bab633deeb806cc347b40ed865d63166e536c89ff71f0ba739577b1 \
+ --hash=sha256:c0bd4b4288f0aa9cba27493065f5fb986425b5b49620b93a391620403eeb97e0 \
+ --hash=sha256:c5235699d36b8edda7e649fbc3a23ed868eda1e15c4a83528df33939bdb75754
# via aws-sam-cli (setup.py)
aws-sam-translator==1.46.0 \
--hash=sha256:03cb83135c98b1c47bf2b3c15507808c7e06d4717a3ca3da1cee6e7ebcb5282b \
@@ -78,9 +78,9 @@ click==7.1.2 \
# aws-sam-cli (setup.py)
# cookiecutter
# flask
-cookiecutter==1.7.3 \
- --hash=sha256:6b9a4d72882e243be077a7397d0f1f76fe66cf3df91f3115dbb5330e214fa457 \
- --hash=sha256:f8671531fa96ab14339d0c59b4f662a4f12a2ecacd94a0f70a3500843da588e2
+cookiecutter==2.1.1 \
+ --hash=sha256:9f3ab027cec4f70916e28f03470bdb41e637a3ad354b4d65c765d93aad160022 \
+ --hash=sha256:f3982be8d9c53dac1261864013fdec7f83afd2e42ede6f6dd069c5e149c540d5
# via aws-sam-cli (setup.py)
dateparser==1.0.0 \
--hash=sha256:159cc4e01a593706a15cd4e269a0b3345edf3aef8bf9278a57dac8adf5bf1e4a \
@@ -90,9 +90,9 @@ docker==4.2.2 \
--hash=sha256:03a46400c4080cb6f7aa997f881ddd84fef855499ece219d75fbdb53289c17ab \
--hash=sha256:26eebadce7e298f55b76a88c4f8802476c5eaddbdbe38dbc6cce8781c47c9b54
# via aws-sam-cli (setup.py)
-flask==1.1.2 \
- --hash=sha256:4efa1ae2d7c9865af48986de8aeb8504bf32c7f3d6fdc9353d34b21f4b127060 \
- --hash=sha256:8a4fdd8936eba2512e9c85df320a37e694c93945b33ef33c89946a340a238557
+flask==1.1.4 \
+ --hash=sha256:0fbeb6180d383a9186d0d6ed954e0042ad9f18e0e8de088b2b419d526927d196 \
+ --hash=sha256:c34f04500f2cbbea882b1acb02002ad6fe6b7ffa64a6164577995657f50aed22
# via aws-sam-cli (setup.py)
idna==2.10 \
--hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6 \
@@ -201,10 +201,6 @@ markupsafe==2.0.1 \
# via
# aws-sam-cli (setup.py)
# jinja2
-poyo==0.5.0 \
- --hash=sha256:3e2ca8e33fdc3c411cd101ca395668395dd5dc7ac775b8e809e3def9f9fe041a \
- --hash=sha256:e26956aa780c45f011ca9886f044590e2d8fd8b61db7b1c1cf4e0869f48ed4dd
- # via cookiecutter
pyrsistent==0.17.3 \
--hash=sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e
# via jsonschema
@@ -254,6 +250,7 @@ pyyaml==5.4.1 \
--hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0
# via
# aws-sam-cli (setup.py)
+ # cookiecutter
# serverlessrepo
regex==2021.9.30 \
--hash=sha256:0de8ad66b08c3e673b61981b9e3626f8784d5564f8c3928e2ad408c0eb5ac38c \
@@ -319,8 +316,6 @@ six==1.15.0 \
--hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \
--hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced
# via
- # aws-lambda-builders
- # cookiecutter
# docker
# jsonschema
# python-dateutil
diff --git a/samcli/__init__.py b/samcli/__init__.py
index da88753bc8..cf4f9029d9 100644
--- a/samcli/__init__.py
+++ b/samcli/__init__.py
@@ -2,4 +2,4 @@
SAM CLI version
"""
-__version__ = "1.52.0"
+__version__ = "1.53.0"
diff --git a/samcli/commands/_utils/command_exception_handler.py b/samcli/commands/_utils/command_exception_handler.py
new file mode 100644
index 0000000000..c4da732b34
--- /dev/null
+++ b/samcli/commands/_utils/command_exception_handler.py
@@ -0,0 +1,70 @@
+"""
+Contains method decorator which can be used to convert common exceptions into click exceptions
+which will end exeecution gracefully
+"""
+from functools import wraps
+from typing import Callable, Dict, Any, Optional
+
+from botocore.exceptions import NoRegionError, ClientError
+
+from samcli.commands._utils.options import parameterized_option
+from samcli.commands.exceptions import CredentialsError, RegionError
+from samcli.lib.utils.boto_utils import get_client_error_code
+
+
+@parameterized_option
+def command_exception_handler(f, additional_mapping: Optional[Dict[Any, Callable[[Any], None]]] = None):
+ """
+ This function returns a wrapped function definition, which handles configured exceptions gracefully
+ """
+
+ def decorator_command_exception_handler(func):
+ @wraps(func)
+ def wrapper_command_exception_handler(*args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except Exception as ex:
+ exception_type = type(ex)
+
+ # check if there is a custom handling defined
+ exception_handler = (additional_mapping or {}).get(exception_type)
+ if exception_handler:
+ exception_handler(ex)
+
+ # if no custom handling defined search for default handlers
+ exception_handler = COMMON_EXCEPTION_HANDLER_MAPPING.get(exception_type)
+ if exception_handler:
+ exception_handler(ex)
+
+ # if no handler defined, raise the exception
+ raise ex
+
+ return wrapper_command_exception_handler
+
+ return decorator_command_exception_handler(f)
+
+
+def _handle_no_region_error(ex: NoRegionError) -> None:
+ raise RegionError(
+ "No region information found. Please provide --region parameter or configure default region settings. "
+ "\nFor more information please visit https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/"
+ "setup-credentials.html#setup-credentials-setting-region"
+ )
+
+
+def _handle_client_errors(ex: ClientError) -> None:
+ error_code = get_client_error_code(ex)
+
+ if error_code in ("ExpiredToken", "ExpiredTokenException"):
+ raise CredentialsError(
+ "Your credential configuration is invalid or has expired token value. \nFor more information please "
+ "visit: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html"
+ )
+
+ raise ex
+
+
+COMMON_EXCEPTION_HANDLER_MAPPING: Dict[Any, Callable] = {
+ NoRegionError: _handle_no_region_error,
+ ClientError: _handle_client_errors,
+}
diff --git a/samcli/commands/_utils/experimental.py b/samcli/commands/_utils/experimental.py
index 1f238f160e..3d0acbaa32 100644
--- a/samcli/commands/_utils/experimental.py
+++ b/samcli/commands/_utils/experimental.py
@@ -43,7 +43,6 @@ class ExperimentalFlag:
"""Class for storing all experimental related ConfigEntries"""
All = ExperimentalEntry("experimentalAll", EXPERIMENTAL_ENV_VAR_PREFIX + "FEATURES")
- Accelerate = ExperimentalEntry("experimentalAccelerate", EXPERIMENTAL_ENV_VAR_PREFIX + "ACCELERATE")
Esbuild = ExperimentalEntry("experimentalEsbuild", EXPERIMENTAL_ENV_VAR_PREFIX + "ESBUILD")
@@ -240,7 +239,7 @@ def prompt_experimental(
if is_experimental_enabled(config_entry):
update_experimental_context()
return True
- confirmed = click.confirm(prompt, default=False)
+ confirmed = click.confirm(Colored().yellow(prompt), default=False)
if confirmed:
set_experimental(config_entry=config_entry, enabled=True)
update_experimental_context()
diff --git a/samcli/commands/_utils/options.py b/samcli/commands/_utils/options.py
index cc849f351b..e2eeebb54f 100644
--- a/samcli/commands/_utils/options.py
+++ b/samcli/commands/_utils/options.py
@@ -367,12 +367,11 @@ def common_observability_click_options():
"-t",
is_flag=True,
help="Tail events. This will ignore the end time argument and continue to fetch events as they "
- "become available. [Beta Feature] If in beta --tail without a --name will pull from all possible resources",
+ "become available. If option --tail without a --name will pull from all possible resources",
),
click.option(
"--output",
help="""
- [Beta Feature]
The formatting style of the command output. Following options are available:\n
TEXT: Prints information as regular text with some formatting (default option)\n
JSON: Prints each line as JSON without formatting
@@ -474,19 +473,20 @@ def stack_name_option(f, required=False, callback=None):
return stack_name_click_option(required, callback)(f)
-def s3_bucket_click_option(guided):
- callback = None if guided else partial(artifact_callback, artifact=ZIP)
+def s3_bucket_click_option(disable_callback):
+ callback = None if disable_callback else partial(artifact_callback, artifact=ZIP)
+
return click.option(
"--s3-bucket",
required=False,
- callback=callback,
help="The name of the S3 bucket where this command uploads the artifacts that are referenced in your template.",
+ callback=callback,
)
@parameterized_option
-def s3_bucket_option(f, guided=False):
- return s3_bucket_click_option(guided)(f)
+def s3_bucket_option(f, disable_callback=False):
+ return s3_bucket_click_option(disable_callback)(f)
def build_dir_click_option():
diff --git a/samcli/commands/build/build_context.py b/samcli/commands/build/build_context.py
index 817547402c..1090b17b72 100644
--- a/samcli/commands/build/build_context.py
+++ b/samcli/commands/build/build_context.py
@@ -408,6 +408,10 @@ def manifest_path_override(self) -> Optional[str]:
def mode(self) -> Optional[str]:
return self._mode
+ @property
+ def use_base_dir(self) -> bool:
+ return self._use_raw_codeuri
+
@property
def resources_to_build(self) -> ResourcesToBuildCollector:
"""
@@ -562,7 +566,7 @@ def _is_function_buildable(function: Function):
docker_context = cast(str, metadata.get("DockerContext", ""))
if not dockerfile or not docker_context:
LOG.debug(
- "Skip Building %s function, as it does not contain either Dockerfile or DockerContext "
+ "Skip Building %s function, as it is missing either Dockerfile or DockerContext "
"metadata properties.",
function.full_path,
)
diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py
index 06130eb68b..13700f3d12 100644
--- a/samcli/commands/delete/command.py
+++ b/samcli/commands/delete/command.py
@@ -4,6 +4,7 @@
import logging
+from typing import Optional
import click
from samcli.cli.main import aws_creds_options, common_options, pass_context, print_cmdline_args
@@ -63,12 +64,26 @@
is_flag=True,
required=False,
)
+@click.option(
+ "--s3-bucket",
+ help=("The S3 bucket path you want to delete."),
+ type=click.STRING,
+ default=None,
+ required=False,
+)
+@click.option(
+ "--s3-prefix",
+ help=("The S3 prefix you want to delete"),
+ type=click.STRING,
+ default=None,
+ required=False,
+)
@aws_creds_options
@common_options
@pass_context
@check_newer_version
@print_cmdline_args
-def cli(ctx, stack_name: str, config_file: str, config_env: str, no_prompts: bool):
+def cli(ctx, stack_name: str, config_file: str, config_env: str, no_prompts: bool, s3_bucket: str, s3_prefix: str):
"""
`sam delete` command entry point
"""
@@ -81,10 +96,21 @@ def cli(ctx, stack_name: str, config_file: str, config_env: str, no_prompts: boo
config_env=config_env,
profile=ctx.profile,
no_prompts=no_prompts,
+ s3_bucket=s3_bucket,
+ s3_prefix=s3_prefix,
) # pragma: no cover
-def do_cli(stack_name: str, region: str, config_file: str, config_env: str, profile: str, no_prompts: bool):
+def do_cli(
+ stack_name: str,
+ region: str,
+ config_file: str,
+ config_env: str,
+ profile: str,
+ no_prompts: bool,
+ s3_bucket: Optional[str],
+ s3_prefix: Optional[str],
+):
"""
Implementation of the ``cli`` method
"""
@@ -97,5 +123,7 @@ def do_cli(stack_name: str, region: str, config_file: str, config_env: str, prof
config_file=config_file,
config_env=config_env,
no_prompts=no_prompts,
+ s3_bucket=s3_bucket,
+ s3_prefix=s3_prefix,
) as delete_context:
delete_context.run()
diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py
index f228580fd1..d060dbc681 100644
--- a/samcli/commands/delete/delete_context.py
+++ b/samcli/commands/delete/delete_context.py
@@ -4,8 +4,10 @@
import logging
import json
-import boto3
+from typing import Optional
+
+import boto3
import click
from click import confirm
@@ -36,15 +38,25 @@
class DeleteContext:
# TODO: Separate this context into 2 separate contexts guided and non-guided, just like deploy.
- def __init__(self, stack_name: str, region: str, profile: str, config_file: str, config_env: str, no_prompts: bool):
+ def __init__(
+ self,
+ stack_name: str,
+ region: str,
+ profile: str,
+ config_file: str,
+ config_env: str,
+ no_prompts: bool,
+ s3_bucket: Optional[str],
+ s3_prefix: Optional[str],
+ ):
self.stack_name = stack_name
self.region = region
self.profile = profile
self.config_file = config_file
self.config_env = config_env
self.no_prompts = no_prompts
- self.s3_bucket = None
- self.s3_prefix = None
+ self.s3_bucket = s3_bucket
+ self.s3_prefix = s3_prefix
self.cf_utils = None
self.s3_uploader = None
self.ecr_uploader = None
@@ -95,8 +107,10 @@ def parse_config_file(self):
self.region = config_options.get("region", None)
if not self.profile:
self.profile = config_options.get("profile", None)
- self.s3_bucket = config_options.get("s3_bucket", None)
- self.s3_prefix = config_options.get("s3_prefix", None)
+ if not self.s3_bucket:
+ self.s3_bucket = config_options.get("s3_bucket", None)
+ if not self.s3_prefix:
+ self.s3_prefix = config_options.get("s3_prefix", None)
def init_clients(self):
"""
@@ -142,8 +156,9 @@ def s3_prompts(self):
Guided prompts asking user to delete s3 artifacts
"""
# Note: s3_bucket and s3_prefix information is only
- # available if a local toml file is present or if
- # this information is obtained from the template resources and so if this
+ # available if it is provided as an option flag, a
+ # local toml file or if this information is obtained
+ # from the template resources and so if this
# information is not found, warn the user that S3 artifacts
# will need to be manually deleted.
@@ -319,12 +334,14 @@ def delete(self):
self.cf_utils.delete_stack(stack_name=self.stack_name, retain_resources=retain_resources)
self.cf_utils.wait_for_delete(self.stack_name)
- # If s3_bucket information is not available, warn the user
+ # Warn the user that s3 information is missing and to use --s3 options
if not self.s3_bucket:
- LOG.debug("Cannot delete s3 files as no s3_bucket found")
+ LOG.debug("Cannot delete s3 objects as bucket is missing")
click.secho(
- "\nWarning: s3_bucket and s3_prefix information could not be obtained from local config file"
- " or cloudformation template, delete the s3 files manually if required",
+ "\nWarning: Cannot resolve s3 bucket information from command options"
+ " , local config file or cloudformation template. Please use"
+ " --s3-bucket next time and"
+ " delete s3 files manually if required.",
fg="yellow",
)
diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py
index 1a31d0d6f8..17f2358eab 100644
--- a/samcli/commands/deploy/command.py
+++ b/samcli/commands/deploy/command.py
@@ -109,7 +109,7 @@
help="Preserves the state of previously provisioned resources when an operation fails.",
)
@stack_name_option(callback=guided_deploy_stack_name) # pylint: disable=E1120
-@s3_bucket_option(guided=True) # pylint: disable=E1120
+@s3_bucket_option(disable_callback=True) # pylint: disable=E1120
@image_repository_option
@image_repositories_option
@force_upload_option
diff --git a/samcli/commands/deploy/deploy_context.py b/samcli/commands/deploy/deploy_context.py
index 25d6068944..b96374288b 100644
--- a/samcli/commands/deploy/deploy_context.py
+++ b/samcli/commands/deploy/deploy_context.py
@@ -285,7 +285,7 @@ def deploy(
s3_uploader=s3_uploader,
tags=tags,
)
- LOG.info(result)
+ LOG.debug(result)
except deploy_exceptions.DeployFailedError as ex:
LOG.error(str(ex))
diff --git a/samcli/commands/exceptions.py b/samcli/commands/exceptions.py
index 10a8dbf337..07229d5063 100644
--- a/samcli/commands/exceptions.py
+++ b/samcli/commands/exceptions.py
@@ -90,3 +90,9 @@ class InvalidImageException(UserException):
"""
Value provided to --build-image or --invoke-image is invalid URI
"""
+
+
+class InvalidStackNameException(UserException):
+ """
+ Value provided to --stack-name is invalid
+ """
diff --git a/samcli/commands/local/cli_common/invoke_context.py b/samcli/commands/local/cli_common/invoke_context.py
index b0953bdb1a..6c0198ac56 100644
--- a/samcli/commands/local/cli_common/invoke_context.py
+++ b/samcli/commands/local/cli_common/invoke_context.py
@@ -9,13 +9,13 @@
from pathlib import Path
from typing import Dict, List, Optional, IO, cast, Tuple, Any, Type
+from samcli.commands.local.cli_common.user_exceptions import InvokeContextException, DebugContextException
from samcli.lib.utils import osutils
from samcli.lib.providers.provider import Stack, Function
from samcli.lib.providers.sam_stack_provider import SamLocalStackProvider
from samcli.lib.utils.async_utils import AsyncContext
from samcli.lib.utils.stream_writer import StreamWriter
from samcli.commands.exceptions import ContainersInitializationException
-from samcli.commands.local.cli_common.user_exceptions import InvokeContextException, DebugContextException
from samcli.commands.local.lib.local_lambda import LocalLambdaRunner
from samcli.commands.local.lib.debug_context import DebugContext
from samcli.local.lambdafn.runtime import LambdaRuntime, WarmLambdaRuntime
@@ -28,6 +28,24 @@
LOG = logging.getLogger(__name__)
+class DockerIsNotReachableException(InvokeContextException):
+ """
+ Docker is not installed or not running at the moment
+ """
+
+
+class InvalidEnvironmentVariablesFileException(InvokeContextException):
+ """
+ User provided an environment variables file which couldn't be read by SAM CLI
+ """
+
+
+class NoFunctionIdentifierProvidedException(InvokeContextException):
+ """
+ If template has more than one function defined and user didn't provide any function logical id
+ """
+
+
class ContainersInitializationMode(Enum):
EAGER = "EAGER"
LAZY = "LAZY"
@@ -239,7 +257,7 @@ def __enter__(self) -> "InvokeContext":
)
if not self._container_manager.is_docker_reachable:
- raise InvokeContextException(
+ raise DockerIsNotReachableException(
"Running AWS SAM projects locally requires Docker. Have you got it installed and running?"
)
@@ -320,7 +338,7 @@ def function_identifier(self) -> str:
all_function_full_paths = [f.full_path for f in all_functions]
# There are more functions in the template, and function identifier is not provided, hence raise.
- raise InvokeContextException(
+ raise NoFunctionIdentifierProvidedException(
"You must provide a function logical ID when there are more than one functions in your template. "
"Possible options in your template: {}".format(all_function_full_paths)
)
@@ -427,7 +445,8 @@ def _get_stacks(self) -> List[Stack]:
)
return stacks
except (TemplateNotFoundException, TemplateFailedParsingException) as ex:
- raise InvokeContextException(str(ex)) from ex
+ LOG.debug("Can't read stacks information, either template is not found or it is invalid", exc_info=ex)
+ raise ex
@staticmethod
def _get_env_vars_value(filename: Optional[str]) -> Optional[Dict]:
@@ -449,7 +468,7 @@ def _get_env_vars_value(filename: Optional[str]) -> Optional[Dict]:
return cast(Dict, json.load(fp))
except Exception as ex:
- raise InvokeContextException(
+ raise InvalidEnvironmentVariablesFileException(
"Could not read environment variables overrides from file {}: {}".format(filename, str(ex))
) from ex
diff --git a/samcli/commands/logs/command.py b/samcli/commands/logs/command.py
index ca3d01f9b9..991fd0eb86 100644
--- a/samcli/commands/logs/command.py
+++ b/samcli/commands/logs/command.py
@@ -9,14 +9,13 @@
from samcli.cli.cli_config_file import configuration_option, TomlProvider
from samcli.cli.main import pass_context, common_options as cli_framework_options, aws_creds_options, print_cmdline_args
from samcli.commands._utils.options import common_observability_options
+from samcli.commands.logs.validation_and_exception_handlers import (
+ SAM_LOGS_ADDITIONAL_EXCEPTION_HANDLERS,
+ stack_name_cw_log_group_validation,
+)
from samcli.lib.telemetry.metric import track_command
+from samcli.commands._utils.command_exception_handler import command_exception_handler
from samcli.lib.utils.version_checker import check_newer_version
-from samcli.commands._utils.experimental import (
- ExperimentalFlag,
- force_experimental_option,
- experimental,
- prompt_experimental,
-)
LOG = logging.getLogger(__name__)
@@ -37,9 +36,10 @@
$ sam logs -n HelloWorldFunction --stack-name mystack --tail \n
\b
Use the --filter option to quickly find logs that match terms, phrases or values in your log events.
-$ sam logs -n HelloWorldFunction --stack-name mystack --filter "error" \n
+$ sam logs -n HelloWorldFunction --stack-name mystack --filter 'error' \n
\b
-[Beta Features]
+Fetch logs for all supported resources in your application, and additionally from the specified log groups.
+$ sam logs --cw-log-group /aws/lambda/myfunction-123 --cw-log-group /aws/lambda/myfunction-456
\b
You can now fetch logs from supported resources, by only providing --stack-name parameter
$ sam logs --stack-name mystack \n
@@ -55,9 +55,9 @@
"--name",
"-n",
multiple=True,
- help="Name(s) of your AWS Lambda function. If this function is a part of a CloudFormation stack, "
- "this can be the LogicalID of function resource in the CloudFormation/SAM template. "
- "[Beta Feature] Multiple names can be provided by repeating the parameter again. "
+ help="The name of the resource for which to fetch logs. If this resource is a part of an AWS CloudFormation stack, "
+ "this can be the LogicalID of the resource in the CloudFormation/SAM template. "
+ "Multiple names can be provided by repeating the parameter again. "
"If resource is in a nested stack, name can be prepended by nested stack name to pull logs "
"from that resource (NestedStackLogicalId/ResourceLogicalId). "
"If it is not provided and no --cw-log-group have been given, it will scan "
@@ -76,27 +76,24 @@
"--include-traces",
"-i",
is_flag=True,
- help="[Beta Feature] Include the XRay traces in the log output.",
+ help="Include the XRay traces in the log output.",
)
@click.option(
"--cw-log-group",
multiple=True,
- help="[Beta Feature] "
- "Additional CloudWatch Log group names that are not auto-discovered based upon --name parameter. "
+ help="Additional CloudWatch Log group names that are not auto-discovered based upon --name parameter. "
"When provided, it will only tail the given CloudWatch Log groups. If you want to tail log groups related "
"to resources, please also provide their names as well",
)
@common_observability_options
-@experimental
@cli_framework_options
@aws_creds_options
@pass_context
@track_command
@check_newer_version
@print_cmdline_args
-@force_experimental_option("include_traces", config_entry=ExperimentalFlag.Accelerate) # pylint: disable=E1120
-@force_experimental_option("cw_log_group", config_entry=ExperimentalFlag.Accelerate) # pylint: disable=E1120
-@force_experimental_option("output", config_entry=ExperimentalFlag.Accelerate) # pylint: disable=E1120
+@command_exception_handler(SAM_LOGS_ADDITIONAL_EXCEPTION_HANDLERS)
+@stack_name_cw_log_group_validation
def cli(
ctx,
name,
@@ -155,10 +152,7 @@ def do_cli(
from samcli.lib.observability.util import OutputOption
from samcli.lib.utils.boto_utils import get_boto_client_provider_with_config, get_boto_resource_provider_with_config
- if not names or len(names) > 1:
- if not prompt_experimental(ExperimentalFlag.Accelerate):
- return
- else:
+ if names and len(names) <= 1:
click.echo(
"You can now use 'sam logs' without --name parameter, "
"which will pull the logs from all supported resources in your stack."
@@ -169,7 +163,9 @@ def do_cli(
boto_client_provider = get_boto_client_provider_with_config(region=region, profile=profile)
boto_resource_provider = get_boto_resource_provider_with_config(region=region, profile=profile)
- resource_logical_id_resolver = ResourcePhysicalIdResolver(boto_resource_provider, stack_name, names)
+ resource_logical_id_resolver = ResourcePhysicalIdResolver(
+ boto_resource_provider, boto_client_provider, stack_name, names
+ )
# only fetch all resources when no CloudWatch log group defined
fetch_all_when_no_resource_name_given = not cw_log_groups
diff --git a/samcli/commands/logs/logs_context.py b/samcli/commands/logs/logs_context.py
index 7dea7b51d1..92f666c6de 100644
--- a/samcli/commands/logs/logs_context.py
+++ b/samcli/commands/logs/logs_context.py
@@ -25,6 +25,12 @@ class InvalidTimestampError(UserException):
"""
+class TimeParseError(UserException):
+ """
+ Used to throw if parsing of the given time string or UTC conversion is failed
+ """
+
+
def parse_time(time_str: str, property_name: str):
"""
Parse the time from the given string, convert to UTC, and return the datetime object
@@ -47,14 +53,20 @@ def parse_time(time_str: str, property_name: str):
InvalidTimestampError
If the string cannot be parsed as a timestamp
"""
- if not time_str:
- return None
+ try:
+ if not time_str:
+ return None
- parsed = parse_date(time_str)
- if not parsed:
- raise InvalidTimestampError("Unable to parse the time provided by '{}'".format(property_name))
+ parsed = parse_date(time_str)
+ if not parsed:
+ raise InvalidTimestampError(f"Unable to parse the time provided by '{property_name}'")
- return to_utc(parsed)
+ return to_utc(parsed)
+ except InvalidTimestampError as ex:
+ raise ex
+ except Exception as ex:
+ LOG.error("Failed to parse given time information %s", time_str, exc_info=ex)
+ raise TimeParseError(f"Unable to parse the time information '{property_name}': '{time_str}'") from ex
class ResourcePhysicalIdResolver:
@@ -73,11 +85,13 @@ class ResourcePhysicalIdResolver:
def __init__(
self,
boto_resource_provider: BotoProviderType,
+ boto_client_provider: BotoProviderType,
stack_name: str,
resource_names: Optional[List[str]] = None,
supported_resource_types: Optional[Set[str]] = None,
):
self._boto_resource_provider = boto_resource_provider
+ self._boto_client_provider = boto_client_provider
self._stack_name = stack_name
if resource_names is None:
resource_names = []
@@ -126,7 +140,10 @@ def _fetch_resources_from_stack(
"""
LOG.debug("Getting logical id of the all resources for stack '%s'", self._stack_name)
stack_resources = get_resource_summaries(
- self._boto_resource_provider, self._stack_name, ResourcePhysicalIdResolver.DEFAULT_SUPPORTED_RESOURCES
+ self._boto_resource_provider,
+ self._boto_client_provider,
+ self._stack_name,
+ ResourcePhysicalIdResolver.DEFAULT_SUPPORTED_RESOURCES,
)
if selected_resource_names:
@@ -161,4 +178,10 @@ def _get_selected_resources(
selected_resource = resource_summaries.get(selected_resource_name)
if selected_resource:
resources.append(selected_resource)
+ else:
+ LOG.warning(
+ "Resource name (%s) does not exist. Available resource names: %s",
+ selected_resource_name,
+ ", ".join(resource_summaries.keys()),
+ )
return resources
diff --git a/samcli/commands/logs/puller_factory.py b/samcli/commands/logs/puller_factory.py
index 3cd5246d1e..09e745aff8 100644
--- a/samcli/commands/logs/puller_factory.py
+++ b/samcli/commands/logs/puller_factory.py
@@ -5,6 +5,8 @@
import logging
from typing import List, Optional
+from botocore.exceptions import ClientError
+
from samcli.commands.exceptions import UserException
from samcli.commands.logs.console_consumers import CWConsoleEventConsumer
from samcli.commands.traces.traces_puller_factory import generate_trace_puller
@@ -25,7 +27,7 @@
ObservabilityCombinedPuller,
)
from samcli.lib.observability.util import OutputOption
-from samcli.lib.utils.boto_utils import BotoProviderType
+from samcli.lib.utils.boto_utils import BotoProviderType, get_client_error_code
from samcli.lib.utils.cloudformation import CloudFormationResourceSummary
from samcli.lib.utils.colors import Colored
@@ -101,9 +103,11 @@ def generate_puller(
# populate puller instances for the additional CloudWatch log groups
for cw_log_group in additional_cw_log_groups:
consumer = generate_consumer(filter_pattern, output)
+ logs_client = boto_client_provider("logs")
+ _validate_cw_log_group_name(cw_log_group, logs_client)
pullers.append(
CWLogPuller(
- boto_client_provider("logs"),
+ logs_client,
consumer,
cw_log_group,
)
@@ -122,6 +126,14 @@ def generate_puller(
return ObservabilityCombinedPuller(pullers)
+def _validate_cw_log_group_name(cw_log_group, logs_client):
+ try:
+ _ = logs_client.describe_log_streams(logGroupName=cw_log_group, limit=1)
+ except ClientError as ex:
+ if get_client_error_code(ex) == "ResourceNotFoundException":
+ LOG.warning("CloudWatch log group name (%s) does not exist.", cw_log_group)
+
+
def generate_consumer(
filter_pattern: Optional[str] = None, output: OutputOption = OutputOption.text, resource_name: Optional[str] = None
):
diff --git a/samcli/commands/logs/validation_and_exception_handlers.py b/samcli/commands/logs/validation_and_exception_handlers.py
new file mode 100644
index 0000000000..edf3224802
--- /dev/null
+++ b/samcli/commands/logs/validation_and_exception_handlers.py
@@ -0,0 +1,68 @@
+"""
+Contains helper functions for validation and exception handling of "sam logs" command
+"""
+from functools import wraps
+from typing import Dict, Any, Callable
+
+import click
+from botocore.exceptions import ClientError
+from click import Context, BadOptionUsage
+
+from samcli.commands.exceptions import InvalidStackNameException
+from samcli.lib.utils.boto_utils import get_client_error_code
+
+
+def stack_name_cw_log_group_validation(func):
+ """
+ Wrapper Validation function that will run last after the all cli parmaters have been loaded
+ to check for conditions surrounding `--stack-name` and `--cw-log-group`. The
+ reason they are done last instead of in callback functions, is because the options depend
+ on each other, and this breaks cyclic dependencies.
+
+ :param func: Click command function
+ :return: Click command function after validation
+ """
+
+ @wraps(func)
+ def wrapped(*args, **kwargs):
+ ctx = click.get_current_context()
+ stack_name = ctx.params.get("stack_name")
+ cw_log_groups = ctx.params.get("cw_log_group")
+ names = ctx.params.get("name")
+
+ # if --name is provided --stack-name should be provided as well
+ if names and not stack_name:
+ raise BadOptionUsage(
+ option_name="--stack-name",
+ ctx=ctx,
+ message="Missing option. Please provide '--stack-name' when using '--name' option",
+ )
+
+ # either --stack-name or --cw-log-group flags should be provided
+ if not stack_name and not cw_log_groups:
+ raise BadOptionUsage(
+ option_name="--stack-name",
+ ctx=ctx,
+ message="Missing option. Please provide '--stack-name' or '--cw-log-group'",
+ )
+
+ return func(*args, **kwargs)
+
+ return wrapped
+
+
+def _handle_client_error(ex: ClientError) -> None:
+ """
+ Handles client error which was caused by ListStackResources event
+ """
+ operation_name = ex.operation_name
+ client_error_code = get_client_error_code(ex)
+ if client_error_code == "ValidationError" and operation_name == "ListStackResources":
+ click_context: Context = click.get_current_context()
+ stack_name_value = click_context.params.get("stack_name")
+ raise InvalidStackNameException(
+ f"Invalid --stack-name parameter. Stack with id '{stack_name_value}' does not exist"
+ )
+
+
+SAM_LOGS_ADDITIONAL_EXCEPTION_HANDLERS: Dict[Any, Callable] = {ClientError: _handle_client_error}
diff --git a/samcli/commands/sync/command.py b/samcli/commands/sync/command.py
index 415c41758a..fd642e897d 100644
--- a/samcli/commands/sync/command.py
+++ b/samcli/commands/sync/command.py
@@ -8,6 +8,7 @@
from samcli.cli.main import pass_context, common_options as cli_framework_options, aws_creds_options, print_cmdline_args
from samcli.commands._utils.cdk_support_decorators import unsupported_command_cdk
from samcli.commands._utils.options import (
+ s3_bucket_option,
template_option_without_build,
parameter_override_option,
capabilities_option,
@@ -27,6 +28,7 @@
)
from samcli.cli.cli_config_file import configuration_option, TomlProvider
from samcli.commands._utils.click_mutex import ClickMutex
+from samcli.commands.sync.sync_context import SyncContext
from samcli.lib.utils.colors import Colored
from samcli.lib.utils.version_checker import check_newer_version
from samcli.lib.bootstrap.bootstrap import manage_stack
@@ -34,7 +36,7 @@
from samcli.lib.telemetry.metric import track_command, track_template_warnings
from samcli.lib.warnings.sam_cli_warning import CodeDeployWarning, CodeDeployConditionWarning
from samcli.commands.build.command import _get_mode_value_from_envvar
-from samcli.lib.sync.sync_flow_factory import SyncFlowFactory
+from samcli.lib.sync.sync_flow_factory import SyncCodeResources, SyncFlowFactory
from samcli.lib.sync.sync_flow_executor import SyncFlowExecutor
from samcli.lib.providers.sam_stack_provider import SamLocalStackProvider
from samcli.lib.providers.provider import (
@@ -44,13 +46,6 @@
)
from samcli.cli.context import Context
from samcli.lib.sync.watch_manager import WatchManager
-from samcli.commands._utils.experimental import (
- ExperimentalFlag,
- experimental,
- is_experimental_enabled,
- set_experimental,
- update_experimental_context,
-)
if TYPE_CHECKING: # pragma: no cover
from samcli.commands.deploy.deploy_context import DeployContext
@@ -60,7 +55,7 @@
LOG = logging.getLogger(__name__)
HELP_TEXT = """
-[Beta Feature] Update/Sync local artifacts to AWS
+Update/Sync local artifacts to AWS
By default, the sync command runs a full stack update. You can specify --code or --watch to switch modes.
\b
@@ -77,20 +72,8 @@
Enter Y to proceed with the command, or enter N to cancel:
"""
-SYNC_CONFIRMATION_TEXT_WITH_BETA = """
-This feature is currently in beta. Visit the docs page to learn more about the AWS Beta terms https://aws.amazon.com/service-terms/.
-
-The SAM CLI will use the AWS Lambda, Amazon API Gateway, and AWS StepFunctions APIs to upload your code without
-performing a CloudFormation deployment. This will cause drift in your CloudFormation stack.
-**The sync command should only be used against a development stack**.
-
-Confirm that you are synchronizing a development stack and want to turn on beta features.
-
-Enter Y to proceed with the command, or enter N to cancel:
-"""
-
-SHORT_HELP = "[Beta Feature] Sync a project to AWS"
+SHORT_HELP = "Sync a project to AWS"
DEFAULT_TEMPLATE_NAME = "template.yaml"
DEFAULT_CAPABILITIES = ("CAPABILITY_NAMED_IAM", "CAPABILITY_AUTO_EXPAND")
@@ -122,19 +105,21 @@
@click.option(
"--resource",
multiple=True,
- help="Sync code for all types of the resource.",
+ type=click.Choice(SyncCodeResources.values(), case_sensitive=True),
+ help=f"Sync code for all resources of the given resource type. Accepted values are {SyncCodeResources.values()}",
)
@click.option(
"--dependency-layer/--no-dependency-layer",
default=True,
is_flag=True,
- help="This option separates the dependencies of individual function into another layer, for speeding up the sync"
+ help="This option separates the dependencies of individual function into another layer, for speeding up the sync."
"process",
)
@stack_name_option(required=True) # pylint: disable=E1120
@base_dir_option
@image_repository_option
@image_repositories_option
+@s3_bucket_option(disable_callback=True) # pylint: disable=E1120
@s3_prefix_option
@kms_key_id_option
@role_arn_option
@@ -145,7 +130,6 @@
@notification_arns_option
@tags_option
@capabilities_option(default=DEFAULT_CAPABILITIES) # pylint: disable=E1120
-@experimental
@pass_context
@track_command
@image_repository_validation
@@ -166,6 +150,7 @@ def cli(
parameter_overrides: dict,
image_repository: str,
image_repositories: Optional[Tuple[str]],
+ s3_bucket: str,
s3_prefix: str,
kms_key_id: str,
capabilities: Optional[List[str]],
@@ -197,6 +182,7 @@ def cli(
mode,
image_repository,
image_repositories,
+ s3_bucket,
s3_prefix,
kms_key_id,
capabilities,
@@ -224,6 +210,7 @@ def do_cli(
mode: Optional[str],
image_repository: str,
image_repositories: Optional[Tuple[str]],
+ s3_bucket: str,
s3_prefix: str,
kms_key_id: str,
capabilities: Optional[List[str]],
@@ -242,29 +229,14 @@ def do_cli(
from samcli.commands.package.package_context import PackageContext
from samcli.commands.deploy.deploy_context import DeployContext
- s3_bucket = manage_stack(profile=profile, region=region)
- click.echo(f"\n\t\tManaged S3 bucket: {s3_bucket}")
+ if not click.confirm(Colored().yellow(SYNC_CONFIRMATION_TEXT), default=True):
+ return
- click.echo(f"\n\t\tDefault capabilities applied: {DEFAULT_CAPABILITIES}")
- click.echo("To override with customized capabilities, use --capabilities flag or set it in samconfig.toml")
+ s3_bucket_name = s3_bucket or manage_stack(profile=profile, region=region)
build_dir = DEFAULT_BUILD_DIR_WITH_AUTO_DEPENDENCY_LAYER if dependency_layer else DEFAULT_BUILD_DIR
LOG.debug("Using build directory as %s", build_dir)
- build_dir = DEFAULT_BUILD_DIR_WITH_AUTO_DEPENDENCY_LAYER if dependency_layer else DEFAULT_BUILD_DIR
- LOG.debug("Using build directory as %s", build_dir)
-
- confirmation_text = SYNC_CONFIRMATION_TEXT
-
- if not is_experimental_enabled(ExperimentalFlag.Accelerate):
- confirmation_text = SYNC_CONFIRMATION_TEXT_WITH_BETA
-
- if not click.confirm(Colored().yellow(confirmation_text), default=False):
- return
-
- set_experimental(ExperimentalFlag.Accelerate)
- update_experimental_context()
-
with BuildContext(
resource_identifier=None,
template_file=template_file,
@@ -287,7 +259,7 @@ def do_cli(
with osutils.tempfile_platform_independent() as output_template_file:
with PackageContext(
template_file=built_template,
- s3_bucket=s3_bucket,
+ s3_bucket=s3_bucket_name,
image_repository=image_repository,
image_repositories=image_repositories,
s3_prefix=s3_prefix,
@@ -313,7 +285,7 @@ def do_cli(
with DeployContext(
template_file=output_template_file.name,
stack_name=stack_name,
- s3_bucket=s3_bucket,
+ s3_bucket=s3_bucket_name,
image_repository=image_repository,
image_repositories=image_repositories,
no_progressbar=True,
@@ -335,14 +307,17 @@ def do_cli(
disable_rollback=False,
poll_delay=poll_delay,
) as deploy_context:
- if watch:
- execute_watch(template_file, build_context, package_context, deploy_context, dependency_layer)
- elif code:
- execute_code_sync(
- template_file, build_context, deploy_context, resource_id, resource, dependency_layer
- )
- else:
- execute_infra_contexts(build_context, package_context, deploy_context)
+ with SyncContext(dependency_layer, build_context.build_dir, build_context.cache_dir):
+ if watch:
+ execute_watch(
+ template_file, build_context, package_context, deploy_context, dependency_layer
+ )
+ elif code:
+ execute_code_sync(
+ template_file, build_context, deploy_context, resource_id, resource, dependency_layer
+ )
+ else:
+ execute_infra_contexts(build_context, package_context, deploy_context)
def execute_infra_contexts(
diff --git a/samcli/commands/sync/sync_context.py b/samcli/commands/sync/sync_context.py
new file mode 100644
index 0000000000..6995494875
--- /dev/null
+++ b/samcli/commands/sync/sync_context.py
@@ -0,0 +1,106 @@
+"""
+Context object used by sync command
+"""
+import logging
+from dataclasses import dataclass
+from pathlib import Path
+from typing import Optional, cast, Dict
+
+import tomlkit
+from tomlkit.api import _TOMLDocument as TOMLDocument
+from tomlkit.items import Item
+
+from samcli.lib.build.build_graph import DEFAULT_DEPENDENCIES_DIR
+from samcli.lib.utils.osutils import rmtree_if_exists
+
+LOG = logging.getLogger(__name__)
+
+
+DEFAULT_SYNC_STATE_FILE_NAME = "sync.toml"
+
+SYNC_STATE = "sync_state"
+DEPENDENCY_LAYER = "dependency_layer"
+
+
+@dataclass
+class SyncState:
+ dependency_layer: bool
+
+
+def _sync_state_to_toml_document(sync_state: SyncState) -> TOMLDocument:
+ sync_state_toml_table = tomlkit.table()
+ sync_state_toml_table[DEPENDENCY_LAYER] = sync_state.dependency_layer
+
+ toml_document = tomlkit.document()
+ toml_document.add((tomlkit.comment("This file is auto generated by SAM CLI sync command")))
+ toml_document.add(SYNC_STATE, cast(Item, sync_state_toml_table))
+
+ return toml_document
+
+
+def _toml_document_to_sync_state(toml_document: Dict) -> Optional[SyncState]:
+ if not toml_document:
+ return None
+
+ sync_state_toml_table = toml_document.get(SYNC_STATE)
+ if not sync_state_toml_table:
+ return None
+
+ return SyncState(sync_state_toml_table.get(DEPENDENCY_LAYER))
+
+
+class SyncContext:
+
+ _current_state: SyncState
+ _previous_state: Optional[SyncState]
+ _build_dir: Path
+ _cache_dir: Path
+ _file_path: Path
+
+ def __init__(self, dependency_layer: bool, build_dir: str, cache_dir: str):
+ self._current_state = SyncState(dependency_layer)
+ self._previous_state = None
+ self._build_dir = Path(build_dir)
+ self._cache_dir = Path(cache_dir)
+ self._file_path = Path(build_dir).parent.joinpath(DEFAULT_SYNC_STATE_FILE_NAME)
+
+ def __enter__(self) -> "SyncContext":
+ self._read()
+ LOG.debug(
+ "Entering sync context, previous state: %s, current state: %s", self._previous_state, self._current_state
+ )
+
+ # if adl parameter is changed between sam sync runs, cleanup build, cache and dependencies folders
+ if self._previous_state and self._previous_state.dependency_layer != self._current_state.dependency_layer:
+ self._cleanup_build_folders()
+
+ return self
+
+ def __exit__(self, *args):
+ self._write()
+
+ def _write(self) -> None:
+ with open(self._file_path, "w+") as file:
+ file.write(tomlkit.dumps(_sync_state_to_toml_document(self._current_state)))
+
+ def _read(self) -> None:
+ try:
+ with open(self._file_path) as file:
+ toml_document = cast(Dict, tomlkit.loads(file.read()))
+ self._previous_state = _toml_document_to_sync_state(toml_document)
+ except OSError:
+ LOG.debug("Missing previous sync state, will create a new file at the end of this execution")
+
+ def _cleanup_build_folders(self):
+ """
+ Cleans up build, cache and dependencies folders for clean start of the next session
+ """
+ LOG.debug("Cleaning up build directory %s", self._build_dir)
+ rmtree_if_exists(self._build_dir)
+
+ LOG.debug("Cleaning up cache directory %s", self._cache_dir)
+ rmtree_if_exists(self._cache_dir)
+
+ dependencies_dir = Path(DEFAULT_DEPENDENCIES_DIR)
+ LOG.debug("Cleaning up dependencies directory: %s", dependencies_dir)
+ rmtree_if_exists(dependencies_dir)
diff --git a/samcli/commands/traces/command.py b/samcli/commands/traces/command.py
index f014d2320b..bd787d939e 100644
--- a/samcli/commands/traces/command.py
+++ b/samcli/commands/traces/command.py
@@ -7,20 +7,27 @@
from samcli.cli.cli_config_file import configuration_option, TomlProvider
from samcli.cli.main import pass_context, common_options as cli_framework_options, aws_creds_options, print_cmdline_args
+from samcli.commands._utils.command_exception_handler import command_exception_handler
from samcli.commands._utils.options import common_observability_options
from samcli.lib.observability.util import OutputOption
from samcli.lib.telemetry.metric import track_command
from samcli.lib.utils.version_checker import check_newer_version
-from samcli.commands._utils.experimental import ExperimentalFlag, force_experimental
+
LOG = logging.getLogger(__name__)
HELP_TEXT = """
-[Beta Feature] Use this command to fetch AWS X-Ray traces generated by your stack.\n
+Use this command to fetch AWS X-Ray traces generated by your stack.\n
+\b
+Run the following command to fetch X-Ray traces by ID.
+$ sam traces --trace-id tracing-id-1 --trace-id tracing-id-2
+\b
+Run the following command to tail X-Ray traces as they become available.
+$ sam traces --tail
"""
-@click.command("traces", help=HELP_TEXT, short_help="[Beta Feature] Fetch AWS X-Ray traces")
+@click.command("traces", help=HELP_TEXT, short_help="Fetch AWS X-Ray traces")
@configuration_option(provider=TomlProvider(section="parameters"))
@click.option(
"--trace-id",
@@ -30,12 +37,12 @@
)
@common_observability_options
@cli_framework_options
-@force_experimental(config_entry=ExperimentalFlag.Accelerate) # pylint: disable=E1120
@aws_creds_options
@pass_context
@track_command
@check_newer_version
@print_cmdline_args
+@command_exception_handler
def cli(
ctx,
trace_id,
diff --git a/samcli/lib/bootstrap/nested_stack/nested_stack_manager.py b/samcli/lib/bootstrap/nested_stack/nested_stack_manager.py
index 10fccb3f7d..0263470d34 100644
--- a/samcli/lib/bootstrap/nested_stack/nested_stack_manager.py
+++ b/samcli/lib/bootstrap/nested_stack/nested_stack_manager.py
@@ -198,10 +198,4 @@ def _get_dependencies_dir(self, function_full_path: str) -> Optional[str]:
function_full_path
)
- if not function_build_definition or not function_build_definition.dependencies_dir:
- return None
-
- if not os.path.isdir(function_build_definition.dependencies_dir):
- return None
-
- return function_build_definition.dependencies_dir
+ return function_build_definition.dependencies_dir if function_build_definition else None
diff --git a/samcli/lib/build/app_builder.py b/samcli/lib/build/app_builder.py
index bbd756350c..7cafb9cae1 100644
--- a/samcli/lib/build/app_builder.py
+++ b/samcli/lib/build/app_builder.py
@@ -201,6 +201,7 @@ def build(self) -> ApplicationBuildResult:
self._cache_dir,
self._manifest_path_override,
self._is_building_specific_resource,
+ bool(self._container_manager),
),
)
else:
@@ -214,6 +215,7 @@ def build(self) -> ApplicationBuildResult:
self._cache_dir,
self._manifest_path_override,
self._is_building_specific_resource,
+ bool(self._container_manager),
)
return ApplicationBuildResult(build_graph, build_strategy.build())
diff --git a/samcli/lib/build/build_graph.py b/samcli/lib/build/build_graph.py
index 1a3b8e28b2..ffd51f4959 100644
--- a/samcli/lib/build/build_graph.py
+++ b/samcli/lib/build/build_graph.py
@@ -6,6 +6,7 @@
import logging
import os
import threading
+from abc import abstractmethod
from pathlib import Path
from typing import Sequence, Tuple, List, Any, Optional, Dict, cast, NamedTuple
from copy import deepcopy
@@ -501,6 +502,10 @@ def dependencies_dir(self) -> str:
def env_vars(self) -> Dict:
return deepcopy(self._env_vars)
+ @abstractmethod
+ def get_resource_full_paths(self) -> str:
+ """Returns string representation of resources' full path information for this build definition"""
+
class LayerBuildDefinition(AbstractBuildDefinition):
"""
@@ -527,6 +532,12 @@ def __init__(
# this and move "layer" out of LayerBuildDefinition to take advantage of type check.
self.layer: LayerVersion = None # type: ignore
+ def get_resource_full_paths(self) -> str:
+ if not self.layer:
+ LOG.debug("LayerBuildDefinition with uuid (%s) doesn't have a layer assigned to it", self.uuid)
+ return ""
+ return self.layer.full_path
+
def __str__(self) -> str:
return (
f"LayerBuildDefinition({self.full_path}, {self.codeuri}, {self.source_hash}, {self.uuid}, "
@@ -616,6 +627,10 @@ def get_build_dir(self, artifact_root_dir: str) -> str:
self._validate_functions()
return self.functions[0].get_build_dir(artifact_root_dir)
+ def get_resource_full_paths(self) -> str:
+ """Returns list of functions' full path information as a list of str"""
+ return ", ".join([function.full_path for function in self.functions])
+
def _validate_functions(self) -> None:
if not self.functions:
raise InvalidBuildGraphException("Build definition doesn't have any function definition to build")
diff --git a/samcli/lib/build/build_strategy.py b/samcli/lib/build/build_strategy.py
index 2af7e16f57..a62d2e7012 100644
--- a/samcli/lib/build/build_strategy.py
+++ b/samcli/lib/build/build_strategy.py
@@ -8,9 +8,8 @@
import shutil
from abc import abstractmethod, ABC
from copy import deepcopy
-from typing import Callable, Dict, List, Any, Optional, cast, Set
+from typing import Callable, Dict, List, Any, Optional, cast, Set, Tuple, TypeVar
-from samcli.commands._utils.experimental import is_experimental_enabled, ExperimentalFlag
from samcli.lib.utils import osutils
from samcli.lib.utils.async_utils import AsyncContext
from samcli.lib.utils.hash import dir_checksum
@@ -28,6 +27,11 @@
LOG = logging.getLogger(__name__)
+# type definition which can be used in generic types for both FunctionBuildDefinition & LayerBuildDefinition
+FunctionOrLayerBuildDefinition = TypeVar(
+ "FunctionOrLayerBuildDefinition", FunctionBuildDefinition, LayerBuildDefinition
+)
+
def clean_redundant_folders(base_dir: str, uuids: Set[str]) -> None:
"""
@@ -139,7 +143,7 @@ def build_single_function_definition(self, build_definition: FunctionBuildDefini
build_definition.runtime,
build_definition.metadata,
build_definition.architecture,
- [function.full_path for function in build_definition.functions],
+ build_definition.get_resource_full_paths(),
)
# build into one of the functions from this build definition
@@ -163,7 +167,7 @@ def build_single_function_definition(self, build_definition: FunctionBuildDefini
single_build_dir,
build_definition.metadata,
container_env_vars,
- build_definition.dependencies_dir if is_experimental_enabled(ExperimentalFlag.Accelerate) else None,
+ build_definition.dependencies_dir,
build_definition.download_dependencies,
)
function_build_results[single_full_path] = result
@@ -210,7 +214,7 @@ def build_single_layer_definition(self, layer_definition: LayerBuildDefinition)
layer.build_architecture,
single_build_dir,
layer_definition.env_vars,
- layer_definition.dependencies_dir if is_experimental_enabled(ExperimentalFlag.Accelerate) else None,
+ layer_definition.dependencies_dir,
layer_definition.download_dependencies,
)
}
@@ -259,8 +263,8 @@ def build_single_function_definition(self, build_definition: FunctionBuildDefini
if not cache_function_dir.exists() or build_definition.source_hash != source_hash:
LOG.info(
- "Cache is invalid, running build and copying resources to function build definition of %s",
- build_definition.uuid,
+ "Cache is invalid, running build and copying resources for following functions (%s)",
+ build_definition.get_resource_full_paths(),
)
build_result = self._delegate_build_strategy.build_single_function_definition(build_definition)
function_build_results.update(build_result)
@@ -275,8 +279,8 @@ def build_single_function_definition(self, build_definition: FunctionBuildDefini
break
else:
LOG.info(
- "Valid cache found, copying previously built resources from function build definition of %s",
- build_definition.uuid,
+ "Valid cache found, copying previously built resources for following functions (%s)",
+ build_definition.get_resource_full_paths(),
)
for function in build_definition.functions:
# artifacts directory will be created by the builder
@@ -298,8 +302,8 @@ def build_single_layer_definition(self, layer_definition: LayerBuildDefinition)
if not cache_function_dir.exists() or layer_definition.source_hash != source_hash:
LOG.info(
- "Cache is invalid, running build and copying resources to layer build definition of %s",
- layer_definition.uuid,
+ "Cache is invalid, running build and copying resources for following layers (%s)",
+ layer_definition.get_resource_full_paths(),
)
build_result = self._delegate_build_strategy.build_single_layer_definition(layer_definition)
layer_build_result.update(build_result)
@@ -314,8 +318,8 @@ def build_single_layer_definition(self, layer_definition: LayerBuildDefinition)
break
else:
LOG.info(
- "Valid cache found, copying previously built resources from layer build definition of %s",
- layer_definition.uuid,
+ "Valid cache found, copying previously built resources for following layers (%s)",
+ layer_definition.get_resource_full_paths(),
)
# artifacts directory will be created by the builder
artifacts_dir = str(pathlib.Path(self._build_dir, layer_definition.layer.full_path))
@@ -345,45 +349,46 @@ def __init__(
self,
build_graph: BuildGraph,
delegate_build_strategy: BuildStrategy,
- async_context: Optional[AsyncContext] = None,
) -> None:
super().__init__(build_graph)
self._delegate_build_strategy = delegate_build_strategy
- self._async_context = async_context if async_context else AsyncContext()
def build(self) -> Dict[str, str]:
- """
- Runs all build and collects results from async context
- """
- result = {}
with self._delegate_build_strategy:
- # ignore result
- super().build()
- # wait for other executions to complete
-
- async_results = self._async_context.run_async()
- for async_result in async_results:
- result.update(async_result)
+ return super().build()
- return result
+ def _build_layers(self, build_graph: BuildGraph) -> Dict[str, str]:
+ return self._run_builds_async(self.build_single_layer_definition, build_graph.get_layer_build_definitions())
- def build_single_function_definition(self, build_definition: FunctionBuildDefinition) -> Dict[str, str]:
- """
- Passes single function build into async context, no actual result returned from this function
- """
- self._async_context.add_async_task(
- self._delegate_build_strategy.build_single_function_definition, build_definition
+ def _build_functions(self, build_graph: BuildGraph) -> Dict[str, str]:
+ return self._run_builds_async(
+ self.build_single_function_definition, build_graph.get_function_build_definitions()
)
- return {}
+
+ @staticmethod
+ def _run_builds_async(
+ build_method: Callable[[FunctionOrLayerBuildDefinition], Dict[str, str]],
+ build_definitions: Tuple[FunctionOrLayerBuildDefinition, ...],
+ ) -> Dict[str, str]:
+ """Builds given list of build definitions in async and return the result"""
+ if not build_definitions:
+ return dict()
+
+ async_context = AsyncContext()
+ for build_definition in build_definitions:
+ async_context.add_async_task(build_method, build_definition)
+ async_results = async_context.run_async()
+
+ build_result: Dict[str, str] = dict()
+ for async_result in async_results:
+ build_result.update(async_result)
+ return build_result
def build_single_layer_definition(self, layer_definition: LayerBuildDefinition) -> Dict[str, str]:
- """
- Passes single layer build into async context, no actual result returned from this function
- """
- self._async_context.add_async_task(
- self._delegate_build_strategy.build_single_layer_definition, layer_definition
- )
- return {}
+ return self._delegate_build_strategy.build_single_layer_definition(layer_definition)
+
+ def build_single_function_definition(self, build_definition: FunctionBuildDefinition) -> Dict[str, str]:
+ return self._delegate_build_strategy.build_single_function_definition(build_definition)
class IncrementalBuildStrategy(BuildStrategy):
@@ -446,14 +451,17 @@ def _check_whether_manifest_is_changed(
if is_manifest_changed or is_dependencies_dir_missing:
build_definition.manifest_hash = manifest_hash
LOG.info(
- "Manifest file is changed (new hash: %s) or dependency folder (%s) is missing for %s, "
+ "Manifest file is changed (new hash: %s) or dependency folder (%s) is missing for (%s), "
"downloading dependencies and copying/building source",
manifest_hash,
build_definition.dependencies_dir,
- build_definition.uuid,
+ build_definition.get_resource_full_paths(),
)
else:
- LOG.info("Manifest is not changed for %s, running incremental build", build_definition.uuid)
+ LOG.info(
+ "Manifest is not changed for (%s), running incremental build",
+ build_definition.get_resource_full_paths(),
+ )
build_definition.download_dependencies = is_manifest_changed or is_dependencies_dir_missing
@@ -487,6 +495,7 @@ def __init__(
cache_dir: str,
manifest_path_override: Optional[str],
is_building_specific_resource: bool,
+ use_container: bool,
):
super().__init__(build_graph)
self._incremental_build_strategy = IncrementalBuildStrategy(
@@ -503,6 +512,7 @@ def __init__(
cache_dir,
)
self._is_building_specific_resource = is_building_specific_resource
+ self._use_container = use_container
def build(self) -> Dict[str, str]:
result = {}
@@ -513,32 +523,32 @@ def build(self) -> Dict[str, str]:
def build_single_function_definition(self, build_definition: FunctionBuildDefinition) -> Dict[str, str]:
if self._is_incremental_build_supported(build_definition.runtime):
LOG.debug(
- "Running incremental build for runtime %s for build definition %s",
+ "Running incremental build for runtime %s for following resources (%s)",
build_definition.runtime,
- build_definition.uuid,
+ build_definition.get_resource_full_paths(),
)
return self._incremental_build_strategy.build_single_function_definition(build_definition)
LOG.debug(
- "Running incremental build for runtime %s for build definition %s",
+ "Running incremental build for runtime %s for following resources (%s)",
build_definition.runtime,
- build_definition.uuid,
+ build_definition.get_resource_full_paths(),
)
return self._cached_build_strategy.build_single_function_definition(build_definition)
def build_single_layer_definition(self, layer_definition: LayerBuildDefinition) -> Dict[str, str]:
if self._is_incremental_build_supported(layer_definition.build_method):
LOG.debug(
- "Running incremental build for runtime %s for build definition %s",
+ "Running incremental build for runtime %s for following resources (%s)",
layer_definition.build_method,
- layer_definition.uuid,
+ layer_definition.get_resource_full_paths(),
)
return self._incremental_build_strategy.build_single_layer_definition(layer_definition)
LOG.debug(
- "Running cached build for runtime %s for build definition %s",
+ "Running cached build for runtime %s for following resources (%s)",
layer_definition.build_method,
- layer_definition.uuid,
+ layer_definition.get_resource_full_paths,
)
return self._cached_build_strategy.build_single_layer_definition(layer_definition)
@@ -557,9 +567,12 @@ def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
self._cached_build_strategy._clean_redundant_cached()
self._incremental_build_strategy._clean_redundant_dependencies()
- @staticmethod
- def _is_incremental_build_supported(runtime: Optional[str]) -> bool:
- if not runtime or not is_experimental_enabled(ExperimentalFlag.Accelerate):
+ def _is_incremental_build_supported(self, runtime: Optional[str]) -> bool:
+ # incremental build doesn't support in container build
+ if self._use_container:
+ return False
+
+ if not runtime:
return False
for supported_runtime_prefix in CachedOrIncrementalBuildStrategyWrapper.SUPPORTED_RUNTIME_PREFIXES:
diff --git a/samcli/lib/deploy/deployer.py b/samcli/lib/deploy/deployer.py
index f3cbadfa49..8caa5139e0 100644
--- a/samcli/lib/deploy/deployer.py
+++ b/samcli/lib/deploy/deployer.py
@@ -585,14 +585,18 @@ def sync(
kwargs = self._process_kwargs(kwargs, s3_uploader, capabilities, role_arn, notification_arns)
try:
+ msg = ""
+
if exists:
result = self.update_stack(**kwargs)
self.wait_for_execute(stack_name, "UPDATE", False)
- LOG.info("\nStack update succeeded. Sync infra completed.\n")
+ msg = "\nStack update succeeded. Sync infra completed.\n"
else:
result = self.create_stack(**kwargs)
self.wait_for_execute(stack_name, "CREATE", False)
- LOG.info("\nStack creation succeeded. Sync infra completed.\n")
+ msg = "\nStack creation succeeded. Sync infra completed.\n"
+
+ LOG.info(self._colored.green(msg))
return result
except botocore.exceptions.ClientError as ex:
diff --git a/samcli/lib/init/__init__.py b/samcli/lib/init/__init__.py
index 832b698e2a..1c815ea55e 100644
--- a/samcli/lib/init/__init__.py
+++ b/samcli/lib/init/__init__.py
@@ -116,6 +116,13 @@ def generate_project(
except CookiecutterException as e:
raise GenerateProjectFailedError(project=name, provider_error=e) from e
+ except TypeError as ex:
+ LOG.debug("Error from cookiecutter: %s", ex)
+
+ _apply_tracing(tracing, output_dir, name)
+
+
+def _apply_tracing(tracing: bool, output_dir: str, name: str) -> None:
if tracing:
template_file_path = f"{output_dir}/{name}/template.yaml"
template_modifier = XRayTracingTemplateModifier(template_file_path)
diff --git a/samcli/lib/init/templates/cookiecutter-aws-sam-hello-dotnet/{{cookiecutter.project_name}}/src/HelloWorld/HelloWorld.csproj b/samcli/lib/init/templates/cookiecutter-aws-sam-hello-dotnet/{{cookiecutter.project_name}}/src/HelloWorld/HelloWorld.csproj
index cd45883758..6be649aad0 100644
--- a/samcli/lib/init/templates/cookiecutter-aws-sam-hello-dotnet/{{cookiecutter.project_name}}/src/HelloWorld/HelloWorld.csproj
+++ b/samcli/lib/init/templates/cookiecutter-aws-sam-hello-dotnet/{{cookiecutter.project_name}}/src/HelloWorld/HelloWorld.csproj
@@ -13,7 +13,7 @@
-
+
diff --git a/samcli/lib/init/templates/cookiecutter-aws-sam-hello-golang/requirements-dev.txt b/samcli/lib/init/templates/cookiecutter-aws-sam-hello-golang/requirements-dev.txt
index b10c8d1048..aa7117612c 100644
--- a/samcli/lib/init/templates/cookiecutter-aws-sam-hello-golang/requirements-dev.txt
+++ b/samcli/lib/init/templates/cookiecutter-aws-sam-hello-golang/requirements-dev.txt
@@ -1,4 +1,4 @@
-cookiecutter==1.6.0
+cookiecutter==2.1.1
flake8==3.5.0
pytest==3.3.2
pytest-cookies==0.3.0
diff --git a/samcli/lib/observability/cw_logs/cw_log_group_provider.py b/samcli/lib/observability/cw_logs/cw_log_group_provider.py
index 537c06d50f..6cf4c2dbaa 100644
--- a/samcli/lib/observability/cw_logs/cw_log_group_provider.py
+++ b/samcli/lib/observability/cw_logs/cw_log_group_provider.py
@@ -4,7 +4,6 @@
import logging
from typing import Optional
-from samcli.commands._utils.experimental import force_experimental, ExperimentalFlag
from samcli.lib.utils.resources import (
AWS_LAMBDA_FUNCTION,
AWS_APIGATEWAY_RESTAPI,
@@ -53,7 +52,6 @@ def for_lambda_function(function_name: str) -> str:
return "/aws/lambda/{}".format(function_name)
@staticmethod
- @force_experimental(config_entry=ExperimentalFlag.Accelerate) # pylint: disable=E1120
def for_apigw_rest_api(rest_api_id: str, stage: str = "Prod") -> str:
"""
Returns the CloudWatch Log Group Name created by default for the AWS Api gateway rest api with given id
@@ -76,7 +74,6 @@ def for_apigw_rest_api(rest_api_id: str, stage: str = "Prod") -> str:
return "API-Gateway-Execution-Logs_{}/{}".format(rest_api_id, stage)
@staticmethod
- @force_experimental(config_entry=ExperimentalFlag.Accelerate) # pylint: disable=E1120
def for_apigwv2_http_api(
boto_client_provider: BotoProviderType, http_api_id: str, stage: str = "$default"
) -> Optional[str]:
@@ -110,7 +107,6 @@ def for_apigwv2_http_api(
return log_group_name
@staticmethod
- @force_experimental(config_entry=ExperimentalFlag.Accelerate) # pylint: disable=E1120
def for_step_functions(
boto_client_provider: BotoProviderType,
step_function_name: str,
@@ -155,6 +151,6 @@ def for_step_functions(
step_function_name,
log_group_arn,
)
- LOG.warning("Logging is not configured for StepFunctions (%s)")
+ LOG.warning("Logging is not configured for StepFunctions (%s)", step_function_name)
return None
diff --git a/samcli/lib/observability/observability_info_puller.py b/samcli/lib/observability/observability_info_puller.py
index e0691a0433..f658195f6e 100644
--- a/samcli/lib/observability/observability_info_puller.py
+++ b/samcli/lib/observability/observability_info_puller.py
@@ -88,6 +88,9 @@ def load_events(self, event_ids: Union[List[Any], Dict]):
List of event ids that will be pulled
"""
+ def stop_tailing(self):
+ self.cancelled = True
+
# pylint: disable=fixme
# fixme add ABC parent class back once we bump the pylint to a version 2.8.2 or higher
@@ -187,8 +190,7 @@ def tail(self, start_time: Optional[datetime] = None, filter_pattern: Optional[s
async_context.run_async()
except KeyboardInterrupt:
LOG.info(" CTRL+C received, cancelling...")
- for puller in self._pullers:
- puller.cancelled = True
+ self.stop_tailing()
def load_time_period(
self,
@@ -218,3 +220,9 @@ def load_events(self, event_ids: Union[List[Any], Dict]):
async_context.add_async_task(puller.load_events, event_ids)
LOG.debug("Running all 'load_time_period' tasks in parallel")
async_context.run_async()
+
+ def stop_tailing(self):
+ # if ObservabilityCombinedPuller A is a child puller in other ObservabilityCombinedPuller B, make sure A's child
+ # pullers stop as well when B stops.
+ for puller in self._pullers:
+ puller.stop_tailing()
diff --git a/samcli/lib/providers/provider.py b/samcli/lib/providers/provider.py
index ea816af7b9..3c07a082cb 100644
--- a/samcli/lib/providers/provider.py
+++ b/samcli/lib/providers/provider.py
@@ -603,22 +603,22 @@ def get_parent_stack(child_stack: "Stack", stacks: List["Stack"]) -> Optional["S
return None
@staticmethod
- def get_stack_by_logical_id(logical_id: str, stacks: List["Stack"]) -> Optional["Stack"]:
+ def get_stack_by_full_path(full_path: str, stacks: List["Stack"]) -> Optional["Stack"]:
"""
- Return the stack with given logical id
+ Return the stack with given full path
Parameters
----------
- logical_id str
- logical_id of the stack
+ full_path str
+ full path of the stack like ChildStack/ChildChildStack
stacks : List[Stack]
a list of stack for searching
Returns
-------
Stack
- The stack with the given logical id
+ The stack with the given full path
"""
for stack in stacks:
- if stack.name == logical_id:
+ if stack.stack_path == full_path:
return stack
return None
diff --git a/samcli/lib/providers/sam_function_provider.py b/samcli/lib/providers/sam_function_provider.py
index bcf18e4adf..a63e953862 100644
--- a/samcli/lib/providers/sam_function_provider.py
+++ b/samcli/lib/providers/sam_function_provider.py
@@ -616,7 +616,9 @@ def _locate_layer_from_nested( # pylint: disable=too-many-return-statements
LOG.debug("Search layer %s in child stack", layer_reference)
child_stacks = Stack.get_child_stacks(stack, stacks)
- child_stack = Stack.get_stack_by_logical_id(layer_stack_reference, child_stacks)
+ stack_prefix = stack.stack_path + "/" if stack.stack_path else ""
+ stack_path = stack_prefix + layer_stack_reference
+ child_stack = Stack.get_stack_by_full_path(stack_path, child_stacks)
if not child_stack:
LOG.debug("Child stack not found, layer can not be located in templates")
return None
diff --git a/samcli/lib/sync/flows/function_sync_flow.py b/samcli/lib/sync/flows/function_sync_flow.py
index 43cbbdf5af..47e38396ac 100644
--- a/samcli/lib/sync/flows/function_sync_flow.py
+++ b/samcli/lib/sync/flows/function_sync_flow.py
@@ -60,7 +60,7 @@ def __init__(
)
self._function_identifier = function_identifier
self._function_provider = self._build_context.function_provider
- self._function = cast(Function, self._function_provider.functions.get(self._function_identifier))
+ self._function = cast(Function, self._function_provider.get(self._function_identifier))
self._lambda_client = None
self._lambda_waiter = None
self._lambda_waiter_config = {"Delay": 1, "MaxAttempts": 60}
diff --git a/samcli/lib/sync/flows/generic_api_sync_flow.py b/samcli/lib/sync/flows/generic_api_sync_flow.py
index 16acda723e..37155916b9 100644
--- a/samcli/lib/sync/flows/generic_api_sync_flow.py
+++ b/samcli/lib/sync/flows/generic_api_sync_flow.py
@@ -1,9 +1,9 @@
"""SyncFlow interface for HttpApi and RestApi"""
import logging
from pathlib import Path
-from typing import Any, Dict, List, Optional, TYPE_CHECKING, cast
+from typing import Any, Dict, List, Optional, TYPE_CHECKING
-from samcli.lib.sync.sync_flow import SyncFlow, ResourceAPICall
+from samcli.lib.sync.sync_flow import SyncFlow, ResourceAPICall, get_definition_path
from samcli.lib.providers.provider import Stack, get_resource_by_id, ResourceIdentifier
# BuildContext and DeployContext will only be imported for type checking to improve performance
@@ -20,7 +20,7 @@ class GenericApiSyncFlow(SyncFlow):
_api_client: Any
_api_identifier: str
- _definition_uri: Optional[str]
+ _definition_uri: Optional[Path]
_stacks: List[Stack]
_swagger_body: Optional[bytes]
@@ -65,19 +65,21 @@ def gather_resources(self) -> None:
def _process_definition_file(self) -> Optional[bytes]:
if self._definition_uri is None:
return None
- with open(self._definition_uri, "rb") as swagger_file:
+ with open(str(self._definition_uri), "rb") as swagger_file:
swagger_body = swagger_file.read()
return swagger_body
- def _get_definition_file(self, api_identifier: str) -> Optional[str]:
+ def _get_definition_file(self, api_identifier: str) -> Optional[Path]:
api_resource = get_resource_by_id(self._stacks, ResourceIdentifier(api_identifier))
- if api_resource is None:
+ if not api_resource:
return None
- properties = api_resource.get("Properties", {})
- definition_file = properties.get("DefinitionUri")
- if self._build_context.base_dir and definition_file:
- definition_file = str(Path(self._build_context.base_dir).joinpath(definition_file))
- return cast(Optional[str], definition_file)
+ return get_definition_path(
+ api_resource,
+ self._api_identifier,
+ self._build_context.use_base_dir,
+ self._build_context.base_dir,
+ self._stacks,
+ )
def compare_remote(self) -> bool:
return False
diff --git a/samcli/lib/sync/flows/layer_sync_flow.py b/samcli/lib/sync/flows/layer_sync_flow.py
index a58e0cba90..be59543de6 100644
--- a/samcli/lib/sync/flows/layer_sync_flow.py
+++ b/samcli/lib/sync/flows/layer_sync_flow.py
@@ -12,7 +12,7 @@
from samcli.lib.build.app_builder import ApplicationBuilder
from samcli.lib.package.utils import make_zip
-from samcli.lib.providers.provider import ResourceIdentifier, Stack, get_resource_by_id, Function
+from samcli.lib.providers.provider import ResourceIdentifier, Stack, get_resource_by_id, Function, LayerVersion
from samcli.lib.providers.sam_function_provider import SamFunctionProvider
from samcli.lib.sync.exceptions import MissingPhysicalResourceError, NoLayerVersionsFoundError
from samcli.lib.sync.sync_flow import SyncFlow, ResourceAPICall, ApiCallTypes
@@ -20,6 +20,7 @@
from samcli.lib.utils.colors import Colored
from samcli.lib.utils.hash import file_checksum
from samcli.lib.sync.flows.function_sync_flow import wait_for_function_update_complete
+from samcli.lib.utils.osutils import rmtree_if_exists
if TYPE_CHECKING: # pragma: no cover
from samcli.commands.build.build_context import BuildContext
@@ -175,6 +176,18 @@ class LayerSyncFlow(AbstractLayerSyncFlow):
"""SyncFlow for Lambda Layers"""
_new_layer_version: Optional[int]
+ _layer: LayerVersion
+
+ def __init__(
+ self,
+ layer_identifier: str,
+ build_context: "BuildContext",
+ deploy_context: "DeployContext",
+ physical_id_mapping: Dict[str, str],
+ stacks: List[Stack],
+ ):
+ super().__init__(layer_identifier, build_context, deploy_context, physical_id_mapping, stacks)
+ self._layer = cast(LayerVersion, build_context.layer_provider.get(self._layer_identifier))
def set_up(self) -> None:
super().set_up()
@@ -205,6 +218,8 @@ def set_up(self) -> None:
def gather_resources(self) -> None:
"""Build layer and ZIP it into a temp file in self._zip_file"""
with self._get_lock_chain():
+
+ rmtree_if_exists(self._layer.get_build_dir(self._build_context.build_dir))
builder = ApplicationBuilder(
self._build_context.collect_build_resources(self._layer_identifier),
self._build_context.build_dir,
diff --git a/samcli/lib/sync/flows/stepfunctions_sync_flow.py b/samcli/lib/sync/flows/stepfunctions_sync_flow.py
index 09479288f8..35bd31bc7d 100644
--- a/samcli/lib/sync/flows/stepfunctions_sync_flow.py
+++ b/samcli/lib/sync/flows/stepfunctions_sync_flow.py
@@ -1,11 +1,11 @@
"""Base SyncFlow for StepFunctions"""
import logging
from pathlib import Path
-from typing import Any, Dict, List, TYPE_CHECKING, cast, Optional
+from typing import Any, Dict, List, TYPE_CHECKING, Optional
from samcli.lib.providers.provider import Stack, get_resource_by_id, ResourceIdentifier
-from samcli.lib.sync.sync_flow import SyncFlow, ResourceAPICall
+from samcli.lib.sync.sync_flow import SyncFlow, ResourceAPICall, get_definition_path
from samcli.lib.sync.exceptions import InfraSyncRequiredError
from samcli.lib.providers.exceptions import MissingLocalDefinition
@@ -21,7 +21,7 @@ class StepFunctionsSyncFlow(SyncFlow):
_state_machine_identifier: str
_stepfunctions_client: Any
_stacks: List[Stack]
- _definition_uri: Optional[str]
+ _definition_uri: Optional[Path]
_states_definition: Optional[str]
def __init__(
@@ -75,18 +75,20 @@ def gather_resources(self) -> None:
def _process_definition_file(self) -> Optional[str]:
if self._definition_uri is None:
return None
- with open(self._definition_uri, "r", encoding="utf-8") as states_file:
+ with open(str(self._definition_uri), "r", encoding="utf-8") as states_file:
states_data = states_file.read()
return states_data
- def _get_definition_file(self, state_machine_identifier: str) -> Optional[str]:
- if self._resource is None:
+ def _get_definition_file(self, state_machine_identifier: str) -> Optional[Path]:
+ if not self._resource:
return None
- properties = self._resource.get("Properties", {})
- definition_file = properties.get("DefinitionUri")
- if self._build_context.base_dir:
- definition_file = str(Path(self._build_context.base_dir).joinpath(definition_file))
- return cast(Optional[str], definition_file)
+ return get_definition_path(
+ self._resource,
+ state_machine_identifier,
+ self._build_context.use_base_dir,
+ self._build_context.base_dir,
+ self._stacks,
+ )
def compare_remote(self) -> bool:
# Not comparing with remote right now, instead only making update api calls
diff --git a/samcli/lib/sync/flows/zip_function_sync_flow.py b/samcli/lib/sync/flows/zip_function_sync_flow.py
index 81f032fcdb..8987a23e56 100644
--- a/samcli/lib/sync/flows/zip_function_sync_flow.py
+++ b/samcli/lib/sync/flows/zip_function_sync_flow.py
@@ -20,6 +20,7 @@
from samcli.lib.build.app_builder import ApplicationBuilder
from samcli.lib.sync.sync_flow import ResourceAPICall, ApiCallTypes
+from samcli.lib.utils.osutils import rmtree_if_exists
if TYPE_CHECKING: # pragma: no cover
from samcli.commands.deploy.deploy_context import DeployContext
@@ -79,6 +80,7 @@ def gather_resources(self) -> None:
if self.has_locks():
exit_stack.enter_context(self._get_lock_chain())
+ rmtree_if_exists(self._function.get_build_dir(self._build_context.build_dir))
builder = ApplicationBuilder(
self._build_context.collect_build_resources(self._function_identifier),
self._build_context.build_dir,
diff --git a/samcli/lib/sync/sync_flow.py b/samcli/lib/sync/sync_flow.py
index 6885679187..3f7b5953a1 100644
--- a/samcli/lib/sync/sync_flow.py
+++ b/samcli/lib/sync/sync_flow.py
@@ -3,6 +3,7 @@
from abc import ABC, abstractmethod
from enum import Enum
+from pathlib import Path
from threading import Lock
from typing import Any, Dict, List, NamedTuple, Optional, TYPE_CHECKING, cast, Set
from boto3.session import Session
@@ -315,3 +316,40 @@ def execute(self) -> List["SyncFlow"]:
dependencies = self.gather_dependencies()
LOG.debug("%sFinished", self.log_prefix)
return dependencies
+
+
+def get_definition_path(
+ resource: Dict, identifier: str, use_base_dir: bool, base_dir: str, stacks: List[Stack]
+) -> Optional[Path]:
+ """
+ A helper method used by non-function sync flows to resolve definition file path
+ that are relative to the child stack to absolute path for nested stacks
+
+ Parameters
+ -------
+ resource: Dict
+ The resource's template dict
+ identifier: str
+ The logical ID identifier of the resource
+ use_base_dir: bool
+ Whether or not the base_dir option was used
+ base_dir: str
+ Base directory if provided, otherwise the root template directory
+ stacks: List[Stack]
+ The list of stacks for the application
+
+ Returns
+ -------
+ Optional[Path]
+ A resolved absolute path for the definition file
+ """
+ properties = resource.get("Properties", {})
+ definition_file = properties.get("DefinitionUri")
+ definition_path = None
+ if definition_file:
+ definition_path = Path(base_dir).joinpath(definition_file)
+ if not use_base_dir:
+ child_stack = Stack.get_stack_by_full_path(ResourceIdentifier(identifier).stack_path, stacks)
+ if child_stack:
+ definition_path = Path(child_stack.location).parent.joinpath(definition_file)
+ return definition_path
diff --git a/samcli/lib/sync/sync_flow_factory.py b/samcli/lib/sync/sync_flow_factory.py
index fe1c252003..8f0ffe617c 100644
--- a/samcli/lib/sync/sync_flow_factory.py
+++ b/samcli/lib/sync/sync_flow_factory.py
@@ -16,7 +16,7 @@
from samcli.lib.sync.flows.rest_api_sync_flow import RestApiSyncFlow
from samcli.lib.sync.flows.http_api_sync_flow import HttpApiSyncFlow
from samcli.lib.sync.flows.stepfunctions_sync_flow import StepFunctionsSyncFlow
-from samcli.lib.utils.boto_utils import get_boto_resource_provider_with_config
+from samcli.lib.utils.boto_utils import get_boto_resource_provider_with_config, get_boto_client_provider_with_config
from samcli.lib.utils.cloudformation import get_resource_summaries
from samcli.lib.utils.resources import (
AWS_SERVERLESS_FUNCTION,
@@ -38,6 +38,35 @@
LOG = logging.getLogger(__name__)
+class SyncCodeResources:
+ """
+ A class that records the supported resource types that can perform sync --code
+ """
+
+ _accepted_resources = [
+ AWS_SERVERLESS_FUNCTION,
+ AWS_LAMBDA_FUNCTION,
+ AWS_SERVERLESS_LAYERVERSION,
+ AWS_LAMBDA_LAYERVERSION,
+ AWS_SERVERLESS_API,
+ AWS_APIGATEWAY_RESTAPI,
+ AWS_SERVERLESS_HTTPAPI,
+ AWS_APIGATEWAY_V2_API,
+ AWS_SERVERLESS_STATEMACHINE,
+ AWS_STEPFUNCTIONS_STATEMACHINE,
+ ]
+
+ @classmethod
+ def values(cls) -> List[str]:
+ """
+ A class getter to retrieve the accepted resource list
+
+ Returns: List[str]
+ The accepted resources list
+ """
+ return cls._accepted_resources
+
+
class SyncFlowFactory(ResourceTypeBasedFactory[SyncFlow]): # pylint: disable=E1136
"""Factory class for SyncFlow
Creates appropriate SyncFlow types based on stack resource types
@@ -74,12 +103,17 @@ def __init__(
def load_physical_id_mapping(self) -> None:
"""Load physical IDs of the stack resources from remote"""
LOG.debug("Loading physical ID mapping")
- provider = get_boto_resource_provider_with_config(
+ resource_provider = get_boto_resource_provider_with_config(
+ region=self._deploy_context.region, profile=self._deploy_context.profile
+ )
+ client_provider = get_boto_client_provider_with_config(
region=self._deploy_context.region, profile=self._deploy_context.profile
)
resource_mapping = get_resource_summaries(
- boto_resource_provider=provider, stack_name=self._deploy_context.stack_name
+ boto_resource_provider=resource_provider,
+ boto_client_provider=client_provider,
+ stack_name=self._deploy_context.stack_name,
)
# get the resource_id -> physical_id mapping
diff --git a/samcli/lib/utils/boto_utils.py b/samcli/lib/utils/boto_utils.py
index 887d8df323..eab922f7dd 100644
--- a/samcli/lib/utils/boto_utils.py
+++ b/samcli/lib/utils/boto_utils.py
@@ -4,9 +4,9 @@
from typing import Any, Optional
from boto3 import Session
-from typing_extensions import Protocol
-
from botocore.config import Config
+from botocore.exceptions import ClientError
+from typing_extensions import Protocol
from samcli import __version__
from samcli.cli.global_config import GlobalConfig
@@ -38,7 +38,7 @@ def get_boto_config_with_user_agent(**kwargs) -> Config:
# Type definition of following boto providers, which is equal to Callable[[str], Any]
class BotoProviderType(Protocol):
def __call__(self, service_name: str) -> Any:
- ...
+ ... # pragma: no cover
def get_boto_client_provider_from_session_with_config(session: Session, **kwargs) -> BotoProviderType:
@@ -135,3 +135,8 @@ def get_boto_resource_provider_with_config(
return get_boto_resource_provider_from_session_with_config(
Session(region_name=region, profile_name=profile), **kwargs
)
+
+
+def get_client_error_code(client_error: ClientError) -> Optional[str]:
+ """Extracts error code from boto ClientError"""
+ return client_error.response.get("Error", {}).get("Code")
diff --git a/samcli/lib/utils/cloudformation.py b/samcli/lib/utils/cloudformation.py
index 25a85458f6..c75f3d8220 100644
--- a/samcli/lib/utils/cloudformation.py
+++ b/samcli/lib/utils/cloudformation.py
@@ -3,17 +3,38 @@
"""
import logging
import posixpath
-from typing import Dict, Set, Optional
+from typing import Dict, Set, Optional, Iterable, Any
from attr import dataclass
from botocore.exceptions import ClientError
-from samcli.lib.utils.boto_utils import BotoProviderType
+from samcli.lib.utils.boto_utils import BotoProviderType, get_client_error_code
from samcli.lib.utils.resources import AWS_CLOUDFORMATION_STACK
LOG = logging.getLogger(__name__)
+# list of possible values for active stacks
+# CFN console has a way to display active stacks but it is not possible in API calls
+STACK_ACTIVE_STATUS = [
+ "CREATE_IN_PROGRESS",
+ "CREATE_COMPLETE",
+ "ROLLBACK_IN_PROGRESS",
+ "ROLLBACK_FAILED",
+ "ROLLBACK_COMPLETE",
+ "DELETE_IN_PROGRESS",
+ "DELETE_FAILED",
+ "UPDATE_IN_PROGRESS",
+ "UPDATE_COMPLETE_CLEANUP_IN_PROGRESS",
+ "UPDATE_COMPLETE",
+ "UPDATE_ROLLBACK_IN_PROGRESS",
+ "UPDATE_ROLLBACK_FAILED",
+ "UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS",
+ "UPDATE_ROLLBACK_COMPLETE",
+ "REVIEW_IN_PROGRESS",
+]
+
+
@dataclass
class CloudFormationResourceSummary:
"""
@@ -27,6 +48,7 @@ class CloudFormationResourceSummary:
def get_resource_summaries(
boto_resource_provider: BotoProviderType,
+ boto_client_provider: BotoProviderType,
stack_name: str,
resource_types: Optional[Set[str]] = None,
nested_stack_prefix: Optional[str] = None,
@@ -38,6 +60,8 @@ def get_resource_summaries(
----------
boto_resource_provider : BotoProviderType
A callable which will return boto3 resource
+ boto_client_provider : BotoProviderType
+ A callable which will return boto3 client
stack_name : str
Name of the stack which is deployed to CFN
resource_types : Optional[Set[str]]
@@ -52,7 +76,18 @@ def get_resource_summaries(
"""
LOG.debug("Fetching stack (%s) resources", stack_name)
- cfn_resource_summaries = boto_resource_provider("cloudformation").Stack(stack_name).resource_summaries.all()
+ try:
+ cfn_resource_summaries = list(
+ boto_resource_provider("cloudformation").Stack(stack_name).resource_summaries.all()
+ )
+ except ClientError as ex:
+ if get_client_error_code(ex) == "ValidationError" and LOG.isEnabledFor(logging.DEBUG):
+ LOG.debug(
+ "Invalid stack name (%s). Available stack names: %s",
+ stack_name,
+ ", ".join(list_active_stack_names(boto_client_provider)),
+ )
+ raise ex
resource_summaries: Dict[str, CloudFormationResourceSummary] = {}
for cfn_resource_summary in cfn_resource_summaries:
@@ -68,6 +103,7 @@ def get_resource_summaries(
resource_summaries.update(
get_resource_summaries(
boto_resource_provider,
+ boto_client_provider,
resource_summary.physical_resource_id,
resource_types,
new_nested_stack_prefix,
@@ -90,7 +126,9 @@ def get_resource_summaries(
return resource_summaries
-def get_resource_summary(boto_resource_provider: BotoProviderType, stack_name: str, resource_logical_id: str):
+def get_resource_summary(
+ boto_resource_provider: BotoProviderType, stack_name: str, resource_logical_id: str
+) -> Optional[CloudFormationResourceSummary]:
"""
Returns resource summary of given single resource with its logical id
@@ -120,3 +158,35 @@ def get_resource_summary(boto_resource_provider: BotoProviderType, stack_name: s
"Failed to pull resource (%s) information from stack (%s)", resource_logical_id, stack_name, exc_info=e
)
return None
+
+
+def list_active_stack_names(boto_client_provider: BotoProviderType, show_nested_stacks: bool = False) -> Iterable[str]:
+ """
+ Returns list of active cloudformation stack names
+
+ Parameters
+ ----------
+ boto_client_provider : BotoProviderType
+ A callable which will return boto3 client
+ show_nested_stacks : bool
+ True; will display nested stack names as well. False; will hide nested stack names from the list.
+
+ Returns
+ -------
+ Iterable[str] List of stack names that is currently active
+ """
+ cfn_client = boto_client_provider("cloudformation")
+ first_call = True
+ next_token: Optional[str] = None
+
+ while first_call or next_token:
+ first_call = False
+ kwargs: Dict[str, Any] = {"StackStatusFilter": STACK_ACTIVE_STATUS}
+ if next_token:
+ kwargs["NextToken"] = next_token
+ list_stacks_result = cfn_client.list_stacks(**kwargs)
+ for stack_summary in list_stacks_result.get("StackSummaries", []):
+ if not show_nested_stacks and stack_summary.get("RootId"):
+ continue
+ yield stack_summary.get("StackName")
+ next_token = list_stacks_result.get("NextToken")
diff --git a/samcli/lib/utils/git_repo.py b/samcli/lib/utils/git_repo.py
index 47f6ede128..de3f1c4840 100644
--- a/samcli/lib/utils/git_repo.py
+++ b/samcli/lib/utils/git_repo.py
@@ -129,8 +129,15 @@ def clone(self, clone_dir: Path, clone_name: str, replace_existing: bool = False
temp_path = os.path.normpath(os.path.join(tempdir, clone_name))
git_executable: str = GitRepo._git_executable()
LOG.info("\nCloning from %s (process may take a moment)", self.url)
+ command = [git_executable, "clone", self.url, clone_name]
+ if platform.system().lower() == "windows":
+ LOG.debug(
+ "Configure core.longpaths=true in git clone. "
+ "You might also need to enable long paths in Windows registry."
+ )
+ command += ["--config", "core.longpaths=true"]
check_output(
- [git_executable, "clone", self.url, clone_name],
+ command,
cwd=tempdir,
stderr=subprocess.STDOUT,
)
diff --git a/samcli/lib/utils/osutils.py b/samcli/lib/utils/osutils.py
index 174b6ffb50..0813c98b58 100644
--- a/samcli/lib/utils/osutils.py
+++ b/samcli/lib/utils/osutils.py
@@ -8,7 +8,8 @@
import sys
import tempfile
from contextlib import contextmanager
-from typing import List, Optional
+from pathlib import Path
+from typing import List, Optional, Union
LOG = logging.getLogger(__name__)
@@ -69,6 +70,14 @@ def rmtree_callback(function, path, excinfo):
LOG.debug("rmtree failed in %s for %s, details: %s", function, path, excinfo)
+def rmtree_if_exists(path: Union[str, Path]):
+ """Removes given path if the path exists"""
+ path_obj = Path(str(path))
+ if path_obj.exists():
+ LOG.debug("Cleaning up path %s", str(path))
+ shutil.rmtree(path_obj)
+
+
def stdout():
"""
Returns the stdout as a byte stream in a Py2/PY3 compatible manner
diff --git a/samcli/runtime_config.json b/samcli/runtime_config.json
index 62a9ae5dda..85296d7286 100644
--- a/samcli/runtime_config.json
+++ b/samcli/runtime_config.json
@@ -1,3 +1,3 @@
{
- "app_template_repo_commit": "ae437cbf533bd156e7a5aab337bc077d40c0cc19"
+ "app_template_repo_commit": "b057f88e4d4fe04acecbc22bab6b12cfbf695761"
}
diff --git a/tests/integration/buildcmd/build_integ_base.py b/tests/integration/buildcmd/build_integ_base.py
index b15de58a9c..ffa3398519 100644
--- a/tests/integration/buildcmd/build_integ_base.py
+++ b/tests/integration/buildcmd/build_integ_base.py
@@ -777,8 +777,7 @@ def _verify_process_code_and_output(self, command_result):
# check HelloWorld and HelloMars functions are built in the same build
self.assertRegex(
command_result.stderr.decode("utf-8"),
- "Building codeuri: .* runtime: .* metadata: .* functions: "
- "\\['HelloWorldFunction', 'HelloMarsFunction'\\]",
+ "Building codeuri: .* runtime: .* metadata: .* functions: " "HelloWorldFunction, HelloMarsFunction",
)
@@ -816,7 +815,7 @@ def _verify_process_code_and_output(self, command_result, function_full_paths):
for function_full_path in function_full_paths:
self.assertRegex(
command_result.stderr.decode("utf-8"),
- f"Building codeuri: .* runtime: .* metadata: .* functions: \\[.*'{function_full_path}'.*\\]",
+ f"Building codeuri: .* runtime: .* metadata: .* functions: .*{function_full_path}.*",
)
def _verify_invoke_built_functions(self, template_path, overrides, function_and_expected):
@@ -861,7 +860,7 @@ def _verify_process_code_and_output(self, command_result, function_full_paths, l
for function_full_path in function_full_paths:
self.assertRegex(
command_result.stderr.decode("utf-8"),
- f"Building codeuri: .* runtime: .* metadata: .* functions: \\[.*'{function_full_path}'.*\\]",
+ f"Building codeuri: .* runtime: .* metadata: .* functions:.*{function_full_path}.*",
)
self.assertIn(
f"Building layer '{layer_full_path}'",
diff --git a/tests/integration/buildcmd/test_build_cmd.py b/tests/integration/buildcmd/test_build_cmd.py
index d21cb9c600..7695412434 100644
--- a/tests/integration/buildcmd/test_build_cmd.py
+++ b/tests/integration/buildcmd/test_build_cmd.py
@@ -1653,9 +1653,9 @@ def test_no_cached_override_build(self):
cmdlist = self.get_command_list(parameter_overrides=overrides)
cmdlist.extend(["--config-file", config_file])
command_result = run_command(cmdlist, cwd=self.working_dir)
- self.assertTrue(
- "Valid cache found, copying previously built resources from function build definition of"
- in str(command_result.stderr),
+ self.assertRegex(
+ str(command_result.stderr),
+ "Manifest is not changed for .* running incremental build",
"Should have built using cache",
)
cmdlist.extend(["--no-cached"])
@@ -1684,7 +1684,7 @@ def test_cached_build_with_env_vars(self):
LOG.info("Running Command (cache should be invalid): %s", cmdlist)
command_result = run_command(cmdlist, cwd=self.working_dir)
self.assertTrue(
- "Cache is invalid, running build and copying resources to function build definition"
+ "Cache is invalid, running build and copying resources for following functions"
in command_result.stderr.decode("utf-8")
)
@@ -1692,7 +1692,7 @@ def test_cached_build_with_env_vars(self):
command_result_with_cache = run_command(cmdlist, cwd=self.working_dir)
self.assertTrue(
- "Valid cache found, copying previously built resources from function build definition"
+ "Valid cache found, copying previously built resources for following functions"
in command_result_with_cache.stderr.decode("utf-8")
)
@@ -1728,8 +1728,17 @@ def test_repeated_cached_build_hits_cache(self, use_container):
container_env_var="FOO=BAR" if use_container else None,
)
- cache_invalid_output = "Cache is invalid, running build and copying resources to "
- cache_valid_output = "Valid cache found, copying previously built resources from "
+ cache_invalid_output_use_container = "Cache is invalid, running build and copying resources "
+ cache_valid_output_use_container = "Valid cache found, copying previously built resources "
+
+ cache_invalid_output_no_container = "Manifest file is changed"
+ cache_valid_output_no_container = "Manifest is not changed"
+
+ cache_invalid_output, cache_valid_output = (
+ (cache_invalid_output_use_container, cache_valid_output_use_container)
+ if use_container
+ else (cache_invalid_output_no_container, cache_valid_output_no_container)
+ )
LOG.info("Running Command (cache should be invalid): %s", cmdlist)
command_result = run_command(cmdlist, cwd=self.working_dir).stderr.decode("utf-8")
@@ -1805,6 +1814,35 @@ def test_dedup_build(self, use_container, code_uri, function1_handler, function2
)
+@skipIf(
+ ((IS_WINDOWS and RUNNING_ON_CI) and not CI_OVERRIDE),
+ "Skip build tests on windows when running in CI unless overridden",
+)
+class TestParallelBuildsJavaWithLayers(DedupBuildIntegBase):
+ template = "template-java-maven-with-layers.yaml"
+
+ @pytest.mark.flaky(reruns=3)
+ def test_dedup_build(self):
+ """
+ Build template above and verify that each function call returns as expected
+ """
+
+ cmdlist = self.get_command_list(parallel=True)
+ command_result = run_command(cmdlist, cwd=self.working_dir)
+
+ self.assertEqual(command_result.process.returncode, 0)
+ self._verify_build_artifact(self.default_build_dir, "HelloWorldFunction")
+ self._verify_build_artifact(self.default_build_dir, "HelloWorldLayer")
+
+ if not SKIP_DOCKER_TESTS:
+ self._verify_invoke_built_function(
+ self.built_template,
+ "HelloWorldFunction",
+ None,
+ "hello world. sum is 12.",
+ )
+
+
@skipIf(
((IS_WINDOWS and RUNNING_ON_CI) and not CI_OVERRIDE),
"Skip build tests on windows when running in CI unless overridden",
@@ -1996,7 +2034,7 @@ def test_nested_build(self, use_container, cached, parallel):
command_result = run_command(cmdlist, cwd=self.working_dir)
# make sure functions are deduplicated properly, in stderr they will show up in the same line.
- self.assertRegex(command_result.stderr.decode("utf-8"), r"Building .+'Function2',.+LocalNestedStack/Function2")
+ self.assertRegex(command_result.stderr.decode("utf-8"), r"Building .+Function2,.+LocalNestedStack/Function2")
function_full_paths = ["Function", "Function2", "LocalNestedStack/Function1", "LocalNestedStack/Function2"]
stack_paths = ["", "LocalNestedStack"]
diff --git a/tests/integration/delete/delete_integ_base.py b/tests/integration/delete/delete_integ_base.py
index 5eb15810b3..ed6677699a 100644
--- a/tests/integration/delete/delete_integ_base.py
+++ b/tests/integration/delete/delete_integ_base.py
@@ -1,5 +1,6 @@
import os
from pathlib import Path
+from typing import Optional
from unittest import TestCase
@@ -22,10 +23,19 @@ def base_command(self):
return command
def get_delete_command_list(
- self, stack_name=None, region=None, config_file=None, config_env=None, profile=None, no_prompts=None
+ self,
+ stack_name=None,
+ region=None,
+ config_file=None,
+ config_env=None,
+ profile=None,
+ no_prompts=None,
+ s3_bucket=None,
+ s3_prefix=None,
):
command_list = [self.base_command(), "delete"]
+ # Convert all values as string to make behaviour uniform across platforms
if stack_name:
command_list += ["--stack-name", str(stack_name)]
if region:
@@ -38,5 +48,9 @@ def get_delete_command_list(
command_list += ["--profile", str(profile)]
if no_prompts:
command_list += ["--no-prompts"]
+ if s3_bucket:
+ command_list += ["--s3-bucket", str(s3_bucket)]
+ if s3_prefix:
+ command_list += ["--s3-prefix", str(s3_prefix)]
return command_list
diff --git a/tests/integration/delete/test_delete_command.py b/tests/integration/delete/test_delete_command.py
index d20535950f..4a953e4176 100644
--- a/tests/integration/delete/test_delete_command.py
+++ b/tests/integration/delete/test_delete_command.py
@@ -44,17 +44,72 @@ def setUpClass(cls):
DeleteIntegBase.setUpClass()
def setUp(self):
- self.cf_client = boto3.client("cloudformation")
+ # Save reference to session object to get region_name
+ self._session = boto3.session.Session()
+ self.cf_client = self._session.client("cloudformation")
+ self.s3_client = self._session.client("s3")
self.sns_arn = os.environ.get("AWS_SNS")
time.sleep(CFN_SLEEP)
super().setUp()
+ @parameterized.expand(
+ [
+ "aws-serverless-function.yaml",
+ ]
+ )
+ @pytest.mark.flaky(reruns=3)
+ def test_s3_options(self, template_file):
+ template_path = self.test_data_path.joinpath(template_file)
+
+ stack_name = self._method_to_stack_name(self.id())
+
+ deploy_command_list = self.get_deploy_command_list(
+ template_file=template_path,
+ stack_name=stack_name,
+ capabilities="CAPABILITY_IAM",
+ image_repository=self.ecr_repo_name,
+ s3_bucket=self.bucket_name,
+ s3_prefix=self.s3_prefix,
+ force_upload=True,
+ notification_arns=self.sns_arn,
+ parameter_overrides="Parameter=Clarity",
+ kms_key_id=self.kms_key,
+ no_execute_changeset=False,
+ tags="integ=true clarity=yes foo_bar=baz",
+ confirm_changeset=False,
+ region=self._session.region_name,
+ )
+ deploy_process_execute = run_command(deploy_command_list)
+
+ delete_command_list = self.get_delete_command_list(
+ stack_name=stack_name,
+ region=self._session.region_name,
+ no_prompts=True,
+ s3_bucket=self.bucket_name,
+ s3_prefix=self.s3_prefix,
+ )
+ delete_process_execute = run_command(delete_command_list)
+
+ self.assertEqual(delete_process_execute.process.returncode, 0)
+
+ # Check if the stack was deleted
+ try:
+ resp = self.cf_client.describe_stacks(StackName=stack_name)
+ except ClientError as ex:
+ self.assertIn(f"Stack with id {stack_name} does not exist", str(ex))
+
+ # Check for zero objects in bucket
+ s3_objects_resp = self.s3_client.list_objects_v2(Bucket=self.bucket_name, Prefix=self.s3_prefix)
+ self.assertEqual(s3_objects_resp["KeyCount"], 0)
+
@pytest.mark.flaky(reruns=3)
def test_delete_command_no_stack_deployed(self):
stack_name = self._method_to_stack_name(self.id())
- delete_command_list = self.get_delete_command_list(stack_name=stack_name, region="us-east-1", no_prompts=True)
+ delete_command_list = self.get_delete_command_list(
+ stack_name=stack_name, region=self._session.region_name, no_prompts=True
+ )
delete_process_execute = run_command(delete_command_list)
self.assertEqual(delete_process_execute.process.returncode, 0)
@@ -99,7 +154,7 @@ def test_delete_no_prompts_with_s3_prefix_present_zip(self, template_file):
config_file_path = self.test_data_path.joinpath(config_file_name)
delete_command_list = self.get_delete_command_list(
- stack_name=stack_name, config_file=config_file_path, region="us-east-1", no_prompts=True
+ stack_name=stack_name, config_file=config_file_path, region=self._session.region_name, no_prompts=True
)
delete_process_execute = run_command(delete_command_list)
@@ -136,7 +191,7 @@ def test_delete_no_prompts_with_s3_prefix_present_image(self, template_file):
config_file_path = self.test_data_path.joinpath(config_file_name)
delete_command_list = self.get_delete_command_list(
- stack_name=stack_name, config_file=config_file_path, region="us-east-1", no_prompts=True
+ stack_name=stack_name, config_file=config_file_path, region=self._session.region_name, no_prompts=True
)
delete_process_execute = run_command(delete_command_list)
@@ -204,7 +259,9 @@ def test_delete_no_config_file_zip(self, template_file):
deploy_command_list, "{}\n\n\n\n\nn\n\n\n".format(stack_name).encode()
)
- delete_command_list = self.get_delete_command_list(stack_name=stack_name, region="us-east-1", no_prompts=True)
+ delete_command_list = self.get_delete_command_list(
+ stack_name=stack_name, region=self._session.region_name, no_prompts=True
+ )
delete_process_execute = run_command(delete_command_list)
self.assertEqual(delete_process_execute.process.returncode, 0)
@@ -238,12 +295,14 @@ def test_delete_no_prompts_no_s3_prefix_zip(self, template_file):
no_execute_changeset=False,
tags="integ=true clarity=yes foo_bar=baz",
confirm_changeset=False,
- region="us-east-1",
+ region=self._session.region_name,
)
deploy_process_execute = run_command(deploy_command_list)
- delete_command_list = self.get_delete_command_list(stack_name=stack_name, region="us-east-1", no_prompts=True)
+ delete_command_list = self.get_delete_command_list(
+ stack_name=stack_name, region=self._session.region_name, no_prompts=True
+ )
delete_process_execute = run_command(delete_command_list)
@@ -280,12 +339,14 @@ def test_delete_no_prompts_no_s3_prefix_image(self, template_file):
no_execute_changeset=False,
tags="integ=true clarity=yes foo_bar=baz",
confirm_changeset=False,
- region="us-east-1",
+ region=self._session.region_name,
)
deploy_process_execute = run_command(deploy_command_list)
- delete_command_list = self.get_delete_command_list(stack_name=stack_name, region="us-east-1", no_prompts=True)
+ delete_command_list = self.get_delete_command_list(
+ stack_name=stack_name, region=self._session.region_name, no_prompts=True
+ )
delete_process_execute = run_command(delete_command_list)
@@ -325,7 +386,9 @@ def test_delete_nested_stacks(self, template_file):
deploy_process_execute = run_command(deploy_command_list)
- delete_command_list = self.get_delete_command_list(stack_name=stack_name, region="us-east-1", no_prompts=True)
+ delete_command_list = self.get_delete_command_list(
+ stack_name=stack_name, region=self._session.region_name, no_prompts=True
+ )
delete_process_execute = run_command(delete_command_list)
@@ -352,7 +415,9 @@ def test_delete_stack_termination_protection_enabled(self):
self.cf_client.create_stack(StackName=stack_name, TemplateBody=template_str, EnableTerminationProtection=True)
- delete_command_list = self.get_delete_command_list(stack_name=stack_name, region="us-east-1", no_prompts=True)
+ delete_command_list = self.get_delete_command_list(
+ stack_name=stack_name, region=self._session.region_name, no_prompts=True
+ )
delete_process_execute = run_command(delete_command_list)
@@ -414,7 +479,7 @@ def test_delete_guided_no_stack_name_no_region(self, template_file):
no_execute_changeset=False,
tags="integ=true clarity=yes foo_bar=baz",
confirm_changeset=False,
- region="us-east-1",
+ region=self._session.region_name,
)
deploy_process_execute = run_command(deploy_command_list)
@@ -451,11 +516,11 @@ def test_delete_guided_ecr_repository_present(self, template_file):
no_execute_changeset=False,
tags="integ=true clarity=yes foo_bar=baz",
confirm_changeset=False,
- region="us-east-1",
+ region=self._session.region_name,
)
deploy_process_execute = run_command(deploy_command_list)
- delete_command_list = self.get_delete_command_list(stack_name=stack_name, region="us-east-1")
+ delete_command_list = self.get_delete_command_list(stack_name=stack_name, region=self._session.region_name)
delete_process_execute = run_command_with_input(delete_command_list, "y\ny\ny\n".encode())
self.assertEqual(delete_process_execute.process.returncode, 0)
@@ -491,12 +556,12 @@ def test_delete_guided_no_s3_prefix_image(self, template_file):
no_execute_changeset=False,
tags="integ=true clarity=yes foo_bar=baz",
confirm_changeset=False,
- region="us-east-1",
+ region=self._session.region_name,
)
deploy_process_execute = run_command(deploy_command_list)
- delete_command_list = self.get_delete_command_list(stack_name=stack_name, region="us-east-1")
+ delete_command_list = self.get_delete_command_list(stack_name=stack_name, region=self._session.region_name)
delete_process_execute = run_command_with_input(delete_command_list, "y\n".encode())
@@ -530,12 +595,12 @@ def test_delete_guided_retain_s3_artifact(self, template_file):
no_execute_changeset=False,
tags="integ=true clarity=yes foo_bar=baz",
confirm_changeset=False,
- region="us-east-1",
+ region=self._session.region_name,
)
deploy_process_execute = run_command(deploy_command_list)
self.add_left_over_resources_from_stack(stack_name)
- delete_command_list = self.get_delete_command_list(stack_name=stack_name, region="us-east-1")
+ delete_command_list = self.get_delete_command_list(stack_name=stack_name, region=self._session.region_name)
delete_process_execute = run_command_with_input(delete_command_list, "y\nn\nn\n".encode())
self.assertEqual(delete_process_execute.process.returncode, 0)
diff --git a/tests/integration/logs/logs_integ_base.py b/tests/integration/logs/logs_integ_base.py
index 57b81e0504..a6b8d5cbe1 100644
--- a/tests/integration/logs/logs_integ_base.py
+++ b/tests/integration/logs/logs_integ_base.py
@@ -1,12 +1,18 @@
+import logging
from typing import Optional, List
-from unittest import TestCase
+from unittest import TestCase, skipIf
-from tests.testing_utils import get_sam_command
+from tests.testing_utils import get_sam_command, RUNNING_ON_CI, RUNNING_TEST_FOR_MASTER_ON_CI, RUN_BY_CANARY
RETRY_COUNT = 20 # retry required because of log buffering configuration for each service
RETRY_SLEEP = 2
+LOG = logging.getLogger(__name__)
+SKIP_LOGS_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI and not RUN_BY_CANARY
+
+
+@skipIf(SKIP_LOGS_TESTS, "Skip logs tests in CI/CD only")
class LogsIntegBase(TestCase):
@staticmethod
def get_logs_command_list(
@@ -19,7 +25,6 @@ def get_logs_command_list(
start_time: Optional[str] = None,
end_time: Optional[str] = None,
output: Optional[str] = None,
- beta_features: bool = False,
):
command_list = [get_sam_command(), "logs", "--stack-name", stack_name]
@@ -40,7 +45,5 @@ def get_logs_command_list(
command_list += ["--end-time", end_time]
if output:
command_list += ["--output", output]
- if beta_features:
- command_list += ["--beta-features"]
return command_list
diff --git a/tests/integration/logs/test_logs_command.py b/tests/integration/logs/test_logs_command.py
index b9d2a77112..b929b73b47 100644
--- a/tests/integration/logs/test_logs_command.py
+++ b/tests/integration/logs/test_logs_command.py
@@ -3,35 +3,33 @@
import time
from pathlib import Path
from typing import List, Optional, Tuple
-from unittest import skipIf
import boto3
import pytest
import requests
-from tests.integration.logs.logs_integ_base import RETRY_SLEEP
from parameterized import parameterized
+from samcli.lib.utils.boto_utils import get_boto_resource_provider_with_config, get_boto_client_provider_with_config
+from samcli.lib.utils.cloudformation import get_resource_summaries
from tests.integration.deploy.deploy_integ_base import DeployIntegBase
from tests.integration.logs.logs_integ_base import LogsIntegBase, RETRY_COUNT
+from tests.integration.logs.logs_integ_base import RETRY_SLEEP
from tests.testing_utils import (
- run_command,
- RUNNING_ON_CI,
- RUNNING_TEST_FOR_MASTER_ON_CI,
- RUN_BY_CANARY,
start_persistent_process,
read_until,
kill_process,
+ run_command,
method_to_stack_name,
)
LOG = logging.getLogger(__name__)
-SKIP_LOGS_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI and not RUN_BY_CANARY
-@skipIf(SKIP_LOGS_TESTS, "Skip logs tests in CI/CD only")
-class TestLogsCommand(LogsIntegBase):
+class LogsIntegTestCases(LogsIntegBase):
+ test_template_folder = ""
+
stack_name = ""
- stack_resources = []
+ stack_resources = {}
stack_info = None
def setUp(self):
@@ -41,40 +39,40 @@ def setUp(self):
@pytest.fixture(scope="class")
def deploy_testing_stack(self):
test_data_path = Path(__file__).resolve().parents[1].joinpath("testdata", "logs")
- TestLogsCommand.stack_name = method_to_stack_name("test_logs_command")
+ LogsIntegTestCases.stack_name = method_to_stack_name("test_logs_command")
cfn_client = boto3.client("cloudformation")
deploy_cmd = DeployIntegBase.get_deploy_command_list(
- stack_name=self.stack_name,
- template_file=test_data_path.joinpath("python-apigw-sfn", "template.yaml"),
+ stack_name=LogsIntegTestCases.stack_name,
+ template_file=test_data_path.joinpath(self.test_template_folder, "template.yaml"),
resolve_s3=True,
- capabilities="CAPABILITY_IAM",
+ capabilities_list=["CAPABILITY_IAM", "CAPABILITY_AUTO_EXPAND"],
)
deploy_result = run_command(deploy_cmd)
yield deploy_result, cfn_client
- cfn_client.delete_stack(StackName=self.stack_name)
+ cfn_client.delete_stack(StackName=LogsIntegTestCases.stack_name)
@pytest.fixture(autouse=True, scope="class")
- def sync_code_base(self, deploy_testing_stack):
+ def logs_base(self, deploy_testing_stack):
deploy_result = deploy_testing_stack[0]
- cfn_client = deploy_testing_stack[1]
self.assertEqual(
deploy_result.process.returncode, 0, f"Deployment of the test stack is failed with {deploy_result.stderr}"
)
-
- cfn_resource = boto3.resource("cloudformation")
- TestLogsCommand.stack_resources = cfn_client.describe_stack_resources(StackName=TestLogsCommand.stack_name).get(
- "StackResources", []
+ stack_resource_summaries = get_resource_summaries(
+ get_boto_resource_provider_with_config(),
+ get_boto_client_provider_with_config(),
+ LogsIntegTestCases.stack_name,
)
- TestLogsCommand.stack_info = cfn_resource.Stack(TestLogsCommand.stack_name)
-
- def _get_physical_id(self, logical_id: str):
- for stack_resource in self.stack_resources:
- if stack_resource["LogicalResourceId"] == logical_id:
- return stack_resource["PhysicalResourceId"]
+ LogsIntegTestCases.stack_resources = {
+ resource_full_path: stack_resource_summary.physical_resource_id
+ for resource_full_path, stack_resource_summary in stack_resource_summaries.items()
+ }
+ cfn_resource = boto3.resource("cloudformation")
+ LogsIntegTestCases.stack_info = cfn_resource.Stack(LogsIntegTestCases.stack_name)
- return None
+ def _get_physical_id(self, resource_path: str):
+ return self.stack_resources[resource_path]
def _get_output_value(self, key: str):
for output in self.stack_info.outputs:
@@ -83,22 +81,18 @@ def _get_output_value(self, key: str):
return None
- @parameterized.expand([("ApiGwFunction",), ("SfnFunction",)])
- def test_function_logs(self, function_name: str):
- expected_log_output = f"Hello world from {function_name} function"
+ def _test_function_logs(self, function_name):
+ expected_log_output = f"Hello world from {function_name} function" # Hello world from ApiGwFunction function
LOG.info("Invoking function %s", function_name)
lambda_invoke_result = self.lambda_client.invoke(FunctionName=self._get_physical_id(function_name))
LOG.info("Lambda invoke result %s", lambda_invoke_result)
-
cmd_list = self.get_logs_command_list(self.stack_name, name=function_name)
self._check_logs(cmd_list, [expected_log_output])
- @parameterized.expand([("ApiGwFunction",), ("SfnFunction",)])
- def test_tail(self, function_name: str):
+ def _test_tail(self, function_name):
cmd_list = self.get_logs_command_list(self.stack_name, name=function_name, tail=True)
tail_process = start_persistent_process(cmd_list)
-
- expected_log_output = f"Hello world from {function_name} function"
+ expected_log_output = f"Hello world from {function_name} function" # Hello world from ApiGwFunction function
LOG.info("Invoking function %s", function_name)
lambda_invoke_result = self.lambda_client.invoke(FunctionName=self._get_physical_id(function_name))
LOG.info("Lambda invoke result %s", lambda_invoke_result)
@@ -111,112 +105,106 @@ def _check_logs(output: str, _: List[str]) -> bool:
finally:
kill_process(tail_process)
- @parameterized.expand([("ApiGwFunction",), ("SfnFunction",)])
- def test_filter(self, function_name: str):
- log_filter = "this should be filtered"
+ def _test_filter(self, function_name):
+ function_name_for_filter = function_name.replace("/", "")
+ log_filter = f"this should be filtered {function_name_for_filter}" # this should be filtered ApiGwFunction
LOG.info("Invoking function %s", function_name)
lambda_invoke_result = self.lambda_client.invoke(FunctionName=self._get_physical_id(function_name))
LOG.info("Lambda invoke result %s", lambda_invoke_result)
-
cmd_list = self.get_logs_command_list(self.stack_name, name=function_name, filter=log_filter)
self._check_logs(cmd_list, [log_filter])
- @parameterized.expand(itertools.product(["HelloWorldServerlessApi"], ["hello", "world"]))
- def test_apigw_logs(self, apigw_name: str, path: str):
- apigw_url = f"{self._get_output_value(apigw_name)}{path}"
+ def _test_apigw_logs(self, apigw_name, path):
+ # apigw name in output section doesn't have forward slashes
+ apigw_name_from_output = apigw_name.replace("/", "")
+ apigw_url = f"{self._get_output_value(apigw_name_from_output)}{path}"
apigw_result = requests.get(apigw_url)
LOG.info("APIGW result %s", apigw_result)
- cmd_list = self.get_logs_command_list(self.stack_name, name=apigw_name, beta_features=True)
+ cmd_list = self.get_logs_command_list(self.stack_name, name=apigw_name)
self._check_logs(cmd_list, [f"HTTP Method: GET, Resource Path: /{path}"])
- @parameterized.expand([("MyStateMachine",)])
- def test_sfn_logs(self, state_machine_name: str):
+ def _test_sfn_logs(self, state_machine_name):
sfn_physical_id = self._get_physical_id(state_machine_name)
sfn_invoke_result = self.sfn_client.start_execution(stateMachineArn=sfn_physical_id)
execution_arn = sfn_invoke_result.get("executionArn", "")
LOG.info("SFN invoke result %s", sfn_invoke_result)
- cmd_list = self.get_logs_command_list(self.stack_name, name=state_machine_name, beta_features=True)
- self._check_logs(cmd_list, execution_arn)
+ cmd_list = self.get_logs_command_list(self.stack_name, name=state_machine_name)
+ self._check_logs(cmd_list, [execution_arn])
- @parameterized.expand(itertools.product(["HelloWorldServerlessApi"], ["hello"]))
- def test_end_to_end_apigw(self, apigw_name: str, path: str):
- apigw_url = f"{self._get_output_value(apigw_name)}{path}"
+ def _test_end_to_end_apigw(self, apigw_name, path):
+ # apigw name in output section doesn't have forward slashes
+ apigw_name_from_output = apigw_name.replace("/", "")
+ apigw_url = f"{self._get_output_value(apigw_name_from_output)}{path}"
apigw_result = requests.get(apigw_url)
LOG.info("APIGW result %s", apigw_result)
- cmd_list = self.get_logs_command_list(self.stack_name, beta_features=True)
+ cmd_list = self.get_logs_command_list(self.stack_name)
self._check_logs(
- cmd_list, [f"HTTP Method: GET, Resource Path: /{path}", "Hello world from ApiGwFunction function"]
+ cmd_list,
+ [
+ f"HTTP Method: GET, Resource Path: /{path}",
+ # Hello world from HelloWorldServerlessApi/hello function
+ f"Hello world from {apigw_name_from_output}/{path} function",
+ ],
)
- @parameterized.expand(itertools.product(["HelloWorldServerlessApi"], ["world"]))
- def test_end_to_end_sfn(self, apigw_name: str, path: str):
- apigw_url = f"{self._get_output_value(apigw_name)}{path}"
+ def _test_end_to_end_sfn(self, apigw_name, path):
+ # apigw name in output section doesn't have forward slashes
+ apigw_name_from_output = apigw_name.replace("/", "")
+ apigw_url = f"{self._get_output_value(apigw_name_from_output)}{path}"
apigw_result = requests.get(apigw_url)
LOG.info("APIGW result %s", apigw_result)
- cmd_list = self.get_logs_command_list(self.stack_name, beta_features=True)
+ cmd_list = self.get_logs_command_list(self.stack_name)
self._check_logs(
cmd_list,
[
f"HTTP Method: GET, Resource Path: /{path}",
'"type": "TaskStateEntered"',
- "Hello world from ApiGwFunction function",
+ # Hello world from HelloWorldServerlessApi/world function
+ f"Hello world from {apigw_name_from_output}/{path} function",
],
)
- @parameterized.expand(itertools.product(["ApiGwFunction", "SfnFunction"], [None, "text", "json"]))
- def test_output(self, function_name: str, output: Optional[str]):
- expected_log_output = f"Hello world from {function_name} function"
+ def _test_output(self, function_name, output):
+ expected_log_output = f"Hello world from {function_name} function" # Hello world from ApiGwFunction function
LOG.info("Invoking function %s", function_name)
lambda_invoke_result = self.lambda_client.invoke(FunctionName=self._get_physical_id(function_name))
LOG.info("Lambda invoke result %s", lambda_invoke_result)
-
- cmd_list = self.get_logs_command_list(self.stack_name, name=function_name, output=output, beta_features=True)
+ cmd_list = self.get_logs_command_list(self.stack_name, name=function_name, output=output)
self._check_logs(cmd_list, [expected_log_output], output=output)
- @parameterized.expand(
- itertools.product(
- ["ApiGwFunction", "SfnFunction"],
- [
- (None, None, True),
- (None, "1 minute", True),
- ("1 minute", None, True),
- ("now", None, False),
- ],
- )
- )
- def test_start_end(self, function_name: str, start_end_time_params: Tuple[Optional[str], Optional[str], bool]):
+ def _test_start_end(self, function_name, start_end_time_params):
(start_time, end_time, should_succeed) = start_end_time_params
- expected_log_output = f"Hello world from {function_name} function"
+ expected_log_output = f"Hello world from {function_name} function" # Hello world from ApiGwFunction function
LOG.info("Invoking function %s", function_name)
lambda_invoke_result = self.lambda_client.invoke(FunctionName=self._get_physical_id(function_name))
LOG.info("Lambda invoke result %s", lambda_invoke_result)
-
cmd_list = self.get_logs_command_list(
self.stack_name, name=function_name, start_time=start_time, end_time=end_time
)
-
if not should_succeed:
with self.assertRaises(ValueError):
self._check_logs(cmd_list, [expected_log_output], retries=2)
else:
self._check_logs(cmd_list, [expected_log_output])
- @parameterized.expand([("ApiGwFunction",), ("SfnFunction",)])
- def test_include_traces(self, function_name: str):
- expected_log_output = f"Hello world from {function_name} function"
+ def _test_include_traces(self, function_name):
+ expected_log_output = f"Hello world from {function_name} function" # Hello world from ApiGwFunction function
LOG.info("Invoking function %s", function_name)
lambda_invoke_result = self.lambda_client.invoke(FunctionName=self._get_physical_id(function_name))
LOG.info("Lambda invoke result %s", lambda_invoke_result)
-
- cmd_list = self.get_logs_command_list(
- self.stack_name, name=function_name, include_traces=True, beta_features=True
- )
+ cmd_list = self.get_logs_command_list(self.stack_name, name=function_name, include_traces=True)
self._check_logs(cmd_list, ["New XRay Service Graph", "XRay Event [revision ", expected_log_output])
def _check_logs(self, cmd_list: List, log_strings: List[str], output: str = "text", retries=RETRY_COUNT):
for _ in range(retries):
cmd_result = run_command(cmd_list)
cmd_stdout = cmd_result.stdout.decode("utf-8")
+ cmd_stderr = cmd_result.stderr.decode("utf-8")
+
+ if cmd_result.process.returncode != 0:
+ LOG.info(cmd_stdout)
+ LOG.error(cmd_stderr)
+
self.assertEqual(cmd_result.process.returncode, 0)
log_string_found = True
for log_string in log_strings:
@@ -234,3 +222,143 @@ def _check_logs(self, cmd_list: List, log_strings: List[str], output: str = "tex
time.sleep(RETRY_SLEEP)
raise ValueError(f"No match found for one of the expected log outputs '{log_strings}'")
+
+
+REGULAR_STACK_FUNCTION_LIST = [
+ "ApiGwFunction",
+ "SfnFunction",
+]
+REGULAR_STACK_APIGW_LIST = [
+ "HelloWorldServerlessApi",
+]
+REGULAR_STACK_SFN_LIST = [
+ "MyStateMachine",
+]
+
+
+class TestLogsCommandWithRegularStack(LogsIntegTestCases):
+ test_template_folder = "python-apigw-sfn"
+
+ @parameterized.expand(REGULAR_STACK_FUNCTION_LIST)
+ def test_function_logs(self, function_name: str):
+ self._test_function_logs(function_name)
+
+ @parameterized.expand(REGULAR_STACK_FUNCTION_LIST)
+ def test_tail(self, function_name: str):
+ self._test_tail(function_name)
+
+ @parameterized.expand(REGULAR_STACK_FUNCTION_LIST)
+ def test_filter(self, function_name: str):
+ self._test_filter(function_name)
+
+ @parameterized.expand(itertools.product(REGULAR_STACK_APIGW_LIST, ["hello", "world"]))
+ def test_apigw_logs(self, apigw_name: str, path: str):
+ self._test_apigw_logs(apigw_name, path)
+
+ @parameterized.expand(REGULAR_STACK_SFN_LIST)
+ def test_sfn_logs(self, state_machine_name: str):
+ self._test_sfn_logs(state_machine_name)
+
+ @parameterized.expand(itertools.product(REGULAR_STACK_APIGW_LIST, ["hello"]))
+ def test_end_to_end_apigw(self, apigw_name: str, path: str):
+ self._test_end_to_end_apigw(apigw_name, path)
+
+ @parameterized.expand(itertools.product(REGULAR_STACK_APIGW_LIST, ["world"]))
+ def test_end_to_end_sfn(self, apigw_name: str, path: str):
+ self._test_end_to_end_sfn(apigw_name, path)
+
+ @parameterized.expand(itertools.product(REGULAR_STACK_FUNCTION_LIST, [None, "text", "json"]))
+ def test_output(self, function_name: str, output: Optional[str]):
+ self._test_output(function_name, output)
+
+ @parameterized.expand(
+ itertools.product(
+ REGULAR_STACK_FUNCTION_LIST,
+ [
+ (None, None, True),
+ (None, "1 minute", True),
+ ("1 minute", None, True),
+ ("now", None, False),
+ ],
+ )
+ )
+ def test_start_end(self, function_name: str, start_end_time_params: Tuple[Optional[str], Optional[str], bool]):
+ self._test_start_end(function_name, start_end_time_params)
+
+ @parameterized.expand(REGULAR_STACK_FUNCTION_LIST)
+ def test_include_traces(self, function_name: str):
+ self._test_include_traces(function_name)
+
+
+NESTED_STACK_FUNCTION_LIST = [
+ "ApiGwFunction",
+ "SfnFunction",
+ "ChildStack/ApiGwFunction",
+ "ChildStack/SfnFunction",
+ "ChildStack/GrandChildStack/ApiGwFunction",
+ "ChildStack/GrandChildStack/SfnFunction",
+]
+NESTED_STACK_APIGW_LIST = [
+ "HelloWorldServerlessApi",
+ "ChildStack/HelloWorldServerlessApi",
+ "ChildStack/GrandChildStack/HelloWorldServerlessApi",
+]
+NESTED_STACK_SFN_LIST = [
+ "MyStateMachine",
+ "ChildStack/MyStateMachine",
+ "ChildStack/GrandChildStack/MyStateMachine",
+]
+
+
+class TestLogsCommandWithNestedStack(LogsIntegTestCases):
+ test_template_folder = "nested-python-apigw-sfn"
+
+ @parameterized.expand(NESTED_STACK_FUNCTION_LIST)
+ def test_function_logs(self, function_name: str):
+ self._test_function_logs(function_name)
+
+ @parameterized.expand(NESTED_STACK_FUNCTION_LIST)
+ def test_tail(self, function_name: str):
+ self._test_tail(function_name)
+
+ @parameterized.expand(NESTED_STACK_FUNCTION_LIST)
+ def test_filter(self, function_name: str):
+ self._test_filter(function_name)
+
+ @parameterized.expand(itertools.product(NESTED_STACK_APIGW_LIST, ["hello", "world"]))
+ def test_apigw_logs(self, apigw_name: str, path: str):
+ self._test_apigw_logs(apigw_name, path)
+
+ @parameterized.expand(NESTED_STACK_SFN_LIST)
+ def test_sfn_logs(self, state_machine_name: str):
+ self._test_sfn_logs(state_machine_name)
+
+ @parameterized.expand(itertools.product(NESTED_STACK_APIGW_LIST, ["hello"]))
+ def test_end_to_end_apigw(self, apigw_name: str, path: str):
+ self._test_end_to_end_apigw(apigw_name, path)
+
+ @parameterized.expand(itertools.product(NESTED_STACK_APIGW_LIST, ["world"]))
+ def test_end_to_end_sfn(self, apigw_name: str, path: str):
+ self._test_end_to_end_sfn(apigw_name, path)
+
+ @parameterized.expand(itertools.product(NESTED_STACK_FUNCTION_LIST, [None, "text", "json"]))
+ def test_output(self, function_name: str, output: Optional[str]):
+ self._test_output(function_name, output)
+
+ @parameterized.expand(
+ itertools.product(
+ NESTED_STACK_FUNCTION_LIST,
+ [
+ (None, None, True),
+ (None, "1 minute", True),
+ ("1 minute", None, True),
+ ("now", None, False),
+ ],
+ )
+ )
+ def test_start_end(self, function_name: str, start_end_time_params: Tuple[Optional[str], Optional[str], bool]):
+ self._test_start_end(function_name, start_end_time_params)
+
+ @parameterized.expand(NESTED_STACK_FUNCTION_LIST)
+ def test_include_traces(self, function_name: str):
+ self._test_include_traces(function_name)
diff --git a/tests/integration/sync/sync_integ_base.py b/tests/integration/sync/sync_integ_base.py
index 1a4d75613e..1aa6aa4b31 100644
--- a/tests/integration/sync/sync_integ_base.py
+++ b/tests/integration/sync/sync_integ_base.py
@@ -3,22 +3,26 @@
import logging
import json
import shutil
+import tempfile
import time
import uuid
+import zipfile
from pathlib import Path
+from typing import Callable
import boto3
+import requests
from botocore.exceptions import ClientError
from botocore.config import Config
from samcli.lib.bootstrap.bootstrap import SAM_CLI_STACK_NAME
from tests.integration.buildcmd.build_integ_base import BuildIntegBase
from tests.integration.package.package_integ_base import PackageIntegBase
+from tests.testing_utils import get_sam_command
-CFN_SLEEP = 3
-CFN_PYTHON_VERSION_SUFFIX = os.environ.get("PYTHON_VERSION", "0.0.0").replace(".", "-")
RETRY_ATTEMPTS = 20
RETRY_WAIT = 1
+ZIP_FILE = "layer_zip.zip"
LOG = logging.getLogger(__name__)
@@ -39,6 +43,7 @@ def setUp(self):
self.sns_arn = os.environ.get("AWS_SNS")
self.stacks = []
self.s3_prefix = uuid.uuid4().hex
+ self.dependency_layer = True if self.dependency_layer is None else self.dependency_layer
super().setUp()
def tearDown(self):
@@ -83,7 +88,9 @@ def _get_lambda_response(self, lambda_function):
lambda_response = self.lambda_client.invoke(
FunctionName=lambda_function, InvocationType="RequestResponse"
)
- payload = json.loads(lambda_response.get("Payload").read().decode("utf-8"))
+ lambda_response_payload = lambda_response.get("Payload").read().decode("utf-8")
+ LOG.info("Lambda Response Payload: %s", lambda_response_payload)
+ payload = json.loads(lambda_response_payload)
return payload.get("body")
except Exception:
if count == RETRY_ATTEMPTS:
@@ -91,6 +98,39 @@ def _get_lambda_response(self, lambda_function):
count += 1
return ""
+ def _confirm_lambda_response(self, lambda_function: str, verification_function: Callable) -> None:
+ count = 0
+ while count < RETRY_ATTEMPTS:
+ try:
+ time.sleep(RETRY_WAIT)
+ lambda_response = self.lambda_client.invoke(
+ FunctionName=lambda_function, InvocationType="RequestResponse"
+ )
+ lambda_response_payload = lambda_response.get("Payload").read().decode("utf-8")
+ LOG.info("Lambda Response Payload: %s", lambda_response_payload)
+ payload = json.loads(lambda_response_payload)
+ verification_function(payload)
+ except Exception:
+ if count == RETRY_ATTEMPTS:
+ raise
+ count += 1
+
+ def _confirm_lambda_error(self, lambda_function):
+ count = 0
+ while count < RETRY_ATTEMPTS:
+ try:
+ time.sleep(RETRY_WAIT)
+ lambda_response = self.lambda_client.invoke(
+ FunctionName=lambda_function, InvocationType="RequestResponse"
+ )
+ if lambda_response.get("FunctionError"):
+ return
+ except Exception:
+ if count == RETRY_ATTEMPTS:
+ raise
+ count += 1
+ return ""
+
def _get_api_message(self, rest_api):
api_resource = self.api_client.get_resources(restApiId=rest_api)
for item in api_resource.get("items"):
@@ -136,12 +176,35 @@ def _get_sfn_response(self, state_machine):
count += 1
return ""
- def base_command(self):
- command = "sam"
- if os.getenv("SAM_CLI_DEV"):
- command = "samdev"
+ @staticmethod
+ def update_file(source, destination):
+ with open(source, "rb") as source_file:
+ with open(destination, "wb") as destination_file:
+ destination_file.write(source_file.read())
+
+ @staticmethod
+ def _extract_contents_from_layer_zip(dep_dir, zipped_layer):
+ with tempfile.TemporaryDirectory() as extract_path:
+ zipped_path = Path(extract_path, ZIP_FILE)
+ with open(zipped_path, "wb") as file:
+ file.write(zipped_layer.content)
+ with zipfile.ZipFile(zipped_path) as zip_ref:
+ zip_ref.extractall(extract_path)
+ return os.listdir(Path(extract_path, dep_dir))
+
+ def get_layer_contents(self, arn, dep_dir):
+ layer = self.lambda_client.get_layer_version_by_arn(Arn=arn)
+ layer_location = layer.get("Content", {}).get("Location", "")
+ zipped_layer = requests.get(layer_location)
+ return SyncIntegBase._extract_contents_from_layer_zip(dep_dir, zipped_layer)
- return command
+ def get_dependency_layer_contents_from_arn(self, stack_resources, dep_dir, version):
+ layers = stack_resources["AWS::Lambda::LayerVersion"]
+ for layer in layers:
+ if "DepLayer" in layer:
+ layer_version = layer[:-1] + str(version)
+ return self.get_layer_contents(layer_version, dep_dir)
+ return None
def get_sync_command_list(
self,
@@ -158,6 +221,7 @@ def get_sync_command_list(
base_dir=None,
image_repository=None,
image_repositories=None,
+ s3_bucket=None,
s3_prefix=None,
kms_key_id=None,
capabilities=None,
@@ -168,7 +232,7 @@ def get_sync_command_list(
metadata=None,
debug=None,
):
- command_list = [self.base_command(), "sync"]
+ command_list = [get_sam_command(), "sync"]
command_list += ["-t", str(template_file)]
if code:
@@ -199,6 +263,8 @@ def get_sync_command_list(
command_list += ["--image-repository", str(image_repository)]
if image_repositories:
command_list += ["--image-repositories", str(image_repositories)]
+ if s3_bucket:
+ command_list += ["--s3-bucket", str(s3_bucket)]
if s3_prefix:
command_list += ["--s3-prefix", str(s3_prefix)]
if kms_key_id:
diff --git a/tests/integration/sync/test_sync_adl.py b/tests/integration/sync/test_sync_adl.py
new file mode 100644
index 0000000000..39cc6517a6
--- /dev/null
+++ b/tests/integration/sync/test_sync_adl.py
@@ -0,0 +1,168 @@
+import json
+import os.path
+from unittest import skipIf
+
+from samcli.lib.utils.resources import AWS_LAMBDA_FUNCTION
+from tests.integration.sync.test_sync_code import TestSyncCodeBase, SKIP_SYNC_TESTS, TestSyncCode
+from tests.integration.sync.test_sync_watch import TestSyncWatchBase
+from tests.testing_utils import run_command_with_input, read_until_string, IS_WINDOWS
+
+
+@skipIf(SKIP_SYNC_TESTS, "Skip sync tests in CI/CD only")
+class TestSyncAdlCasesWithCodeParameter(TestSyncCodeBase):
+ template = "template-python-no-dependencies.yaml"
+ folder = "code"
+ dependency_layer = True
+
+ def test_sync_code_function_without_dependencies(self):
+ # CFN Api call here to collect all the stack resources
+ self.stack_resources = self._get_stacks(TestSyncCode.stack_name)
+
+ # first assert that lambda returns initial response
+ lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION)
+ lambda_response = json.loads(self._get_lambda_response(lambda_functions[0]))
+ self.assertEqual(lambda_response.get("message"), "hello world")
+
+ # update app.py with updated response
+ self.update_file(
+ self.test_data_path.joinpath("code", "after", "python_function_no_deps", "app_without_numpy.py"),
+ TestSyncCode.temp_dir.joinpath("python_function_no_deps", "app.py"),
+ )
+ # Run code sync
+ sync_command_list = self.get_sync_command_list(
+ template_file=TestSyncCode.template_path,
+ code=True,
+ watch=False,
+ resource_id_list=["HelloWorldFunction"],
+ dependency_layer=True,
+ stack_name=TestSyncCode.stack_name,
+ parameter_overrides="Parameter=Clarity",
+ image_repository=self.ecr_repo_name,
+ s3_prefix=self.s3_prefix,
+ kms_key_id=self.kms_key,
+ tags="integ=true clarity=yes foo_bar=baz",
+ )
+ sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode())
+ self.assertEqual(sync_process_execute.process.returncode, 0)
+
+ # Confirm lambda returns updated response
+ lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION)
+ lambda_response = json.loads(self._get_lambda_response(lambda_functions[0]))
+ self.assertEqual(lambda_response.get("message"), "hello mars")
+
+ # update app.py with some dependency which is missing in requirements.txt
+ self.update_file(
+ self.test_data_path.joinpath("code", "after", "python_function_no_deps", "app_with_numpy.py"),
+ TestSyncCode.temp_dir.joinpath("python_function_no_deps", "app.py"),
+ )
+ # Run code sync
+ sync_command_list = self.get_sync_command_list(
+ template_file=TestSyncCode.template_path,
+ code=True,
+ watch=False,
+ resource_id_list=["HelloWorldFunction"],
+ dependency_layer=True,
+ stack_name=TestSyncCode.stack_name,
+ parameter_overrides="Parameter=Clarity",
+ image_repository=self.ecr_repo_name,
+ s3_prefix=self.s3_prefix,
+ kms_key_id=self.kms_key,
+ tags="integ=true clarity=yes foo_bar=baz",
+ )
+ sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode())
+ self.assertEqual(sync_process_execute.process.returncode, 0)
+
+ # confirm that lambda execution will fail
+ lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION)
+ self._confirm_lambda_error(lambda_functions[0])
+
+ # finally, update requirements.txt with missing dependency
+ self.update_file(
+ self.test_data_path.joinpath("code", "after", "python_function_no_deps", "requirements.txt"),
+ TestSyncCode.temp_dir.joinpath("python_function_no_deps", "requirements.txt"),
+ )
+ # Run code sync
+ sync_command_list = self.get_sync_command_list(
+ template_file=TestSyncCode.template_path,
+ code=True,
+ watch=False,
+ resource_id_list=["HelloWorldFunction"],
+ dependency_layer=True,
+ stack_name=TestSyncCode.stack_name,
+ parameter_overrides="Parameter=Clarity",
+ image_repository=self.ecr_repo_name,
+ s3_prefix=self.s3_prefix,
+ kms_key_id=self.kms_key,
+ tags="integ=true clarity=yes foo_bar=baz",
+ )
+ sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode())
+ self.assertEqual(sync_process_execute.process.returncode, 0)
+ lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION)
+
+ # confirm that updated lambda returns expected result
+ lambda_response = json.loads(self._get_lambda_response(lambda_functions[0]))
+ self.assertEqual(lambda_response.get("message"), "hello mars")
+ self.assertIn("extra_message", lambda_response)
+
+
+@skipIf(SKIP_SYNC_TESTS or IS_WINDOWS, "Skip sync tests in CI/CD only")
+class TestSyncAdlWithWatchStartWithNoDependencies(TestSyncWatchBase):
+ @classmethod
+ def setUpClass(cls):
+ cls.template_before = os.path.join("code", "before", "template-python-no-dependencies.yaml")
+ cls.dependency_layer = True
+ super().setUpClass()
+
+ def run_initial_infra_validation(self):
+ self.stack_resources = self._get_stacks(self.stack_name)
+ lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION)
+ lambda_response = json.loads(self._get_lambda_response(lambda_functions[0]))
+ self.assertEqual(lambda_response.get("message"), "hello world")
+ self.assertNotIn("extra_message", lambda_response)
+
+ def test_sync_watch_code(self):
+ self.stack_resources = self._get_stacks(self.stack_name)
+ lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION)
+
+ # change lambda with another output
+ self.update_file(
+ self.test_dir.joinpath("code", "after", "python_function_no_deps", "app_without_numpy.py"),
+ self.test_dir.joinpath("code", "before", "python_function_no_deps", "app.py"),
+ )
+ read_until_string(
+ self.watch_process,
+ "\x1b[32mFinished syncing Function Layer Reference Sync HelloWorldFunction.\x1b[0m\n",
+ timeout=60,
+ )
+ lambda_response = json.loads(self._get_lambda_response(lambda_functions[0]))
+ self.assertEqual(lambda_response.get("message"), "hello mars")
+ self.assertNotIn("extra_message", lambda_response)
+
+ # change lambda with import with missing dependency
+ self.update_file(
+ self.test_dir.joinpath("code", "after", "python_function_no_deps", "app_with_numpy.py"),
+ self.test_dir.joinpath("code", "before", "python_function_no_deps", "app.py"),
+ )
+ read_until_string(
+ self.watch_process,
+ "\x1b[32mFinished syncing Function Layer Reference Sync HelloWorldFunction.\x1b[0m\n",
+ timeout=60,
+ )
+ self._confirm_lambda_error(lambda_functions[0])
+
+ # add dependency and confirm it executes as expected
+ self.update_file(
+ self.test_dir.joinpath("code", "after", "python_function_no_deps", "requirements.txt"),
+ self.test_dir.joinpath("code", "before", "python_function_no_deps", "requirements.txt"),
+ )
+ read_until_string(
+ self.watch_process,
+ "\x1b[32mFinished syncing Function Layer Reference Sync HelloWorldFunction.\x1b[0m\n",
+ timeout=60,
+ )
+
+ def _verify_lambda_response(_lambda_response):
+ self.assertEqual(lambda_response.get("message"), "hello mars")
+ self.assertIn("extra_message", lambda_response)
+
+ self._confirm_lambda_response(self._get_lambda_response(lambda_functions[0]), _verify_lambda_response)
diff --git a/tests/integration/sync/test_sync_code.py b/tests/integration/sync/test_sync_code.py
index d34a2efefc..e9d237380f 100644
--- a/tests/integration/sync/test_sync_code.py
+++ b/tests/integration/sync/test_sync_code.py
@@ -12,6 +12,7 @@
import pytest
import boto3
+from parameterized import parameterized_class
from samcli.lib.utils.resources import (
AWS_APIGATEWAY_RESTAPI,
@@ -28,7 +29,6 @@
SKIP_SYNC_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI and not RUN_BY_CANARY
IS_WINDOWS = platform.system().lower() == "windows"
# Some wait time for code updates to be reflected on each service
-LAMBDA_SLEEP = 3
API_SLEEP = 5
SFN_SLEEP = 5
CFN_PYTHON_VERSION_SUFFIX = os.environ.get("PYTHON_VERSION", "0.0.0").replace(".", "-")
@@ -57,7 +57,7 @@ def execute_infra_sync(self):
template_file=TestSyncCodeBase.template_path,
code=False,
watch=False,
- dependency_layer=True,
+ dependency_layer=self.dependency_layer,
stack_name=TestSyncCodeBase.stack_name,
parameter_overrides="Parameter=Clarity",
image_repository=self.ecr_repo_name,
@@ -88,6 +88,7 @@ def sync_code_base(self, execute_infra_sync):
@skipIf(SKIP_SYNC_TESTS, "Skip sync tests in CI/CD only")
+@parameterized_class([{"dependency_layer": True}, {"dependency_layer": False}])
class TestSyncCode(TestSyncCodeBase):
template = "template-python.yaml"
folder = "code"
@@ -98,13 +99,20 @@ def test_sync_code_function(self):
self.test_data_path.joinpath(self.folder).joinpath("after").joinpath("function"),
TestSyncCodeBase.temp_dir.joinpath("function"),
)
+
+ self.stack_resources = self._get_stacks(TestSyncCodeBase.stack_name)
+ if self.dependency_layer:
+ # Test update manifest
+ layer_contents = self.get_dependency_layer_contents_from_arn(self.stack_resources, "python", 1)
+ self.assertNotIn("requests", layer_contents)
+
# Run code sync
sync_command_list = self.get_sync_command_list(
template_file=TestSyncCodeBase.template_path,
code=True,
watch=False,
resource_list=["AWS::Serverless::Function"],
- dependency_layer=True,
+ dependency_layer=self.dependency_layer,
stack_name=TestSyncCodeBase.stack_name,
parameter_overrides="Parameter=Clarity",
image_repository=self.ecr_repo_name,
@@ -125,6 +133,10 @@ def test_sync_code_function(self):
self.assertIn("extra_message", lambda_response)
self.assertEqual(lambda_response.get("message"), "8")
+ if self.dependency_layer:
+ layer_contents = self.get_dependency_layer_contents_from_arn(self.stack_resources, "python", 2)
+ self.assertIn("requests", layer_contents)
+
def test_sync_code_layer(self):
shutil.rmtree(TestSyncCodeBase.temp_dir.joinpath("layer"), ignore_errors=True)
shutil.copytree(
@@ -137,7 +149,7 @@ def test_sync_code_layer(self):
code=True,
watch=False,
resource_list=["AWS::Serverless::LayerVersion"],
- dependency_layer=True,
+ dependency_layer=self.dependency_layer,
stack_name=TestSyncCodeBase.stack_name,
parameter_overrides="Parameter=Clarity",
image_repository=self.ecr_repo_name,
@@ -174,7 +186,7 @@ def test_sync_function_layer_race_condition(self):
template_file=TestSyncCodeBase.template_path,
code=True,
watch=False,
- dependency_layer=True,
+ dependency_layer=self.dependency_layer,
resource_list=["AWS::Serverless::LayerVersion", "AWS::Serverless::Function"],
stack_name=TestSyncCodeBase.stack_name,
parameter_overrides="Parameter=Clarity",
@@ -254,10 +266,31 @@ def test_sync_code_state_machine(self):
state_machine = self.stack_resources.get(AWS_STEPFUNCTIONS_STATEMACHINE)[0]
self.assertEqual(self._get_sfn_response(state_machine), '"World 2"')
+ def test_sync_code_invalid_resource_type(self):
+ sync_command_list = self.get_sync_command_list(
+ template_file=TestSyncCodeBase.template_path,
+ code=True,
+ watch=False,
+ resource_list=["AWS::Serverless::InvalidResource"],
+ stack_name=TestSyncCodeBase.stack_name,
+ parameter_overrides="Parameter=Clarity",
+ image_repository=self.ecr_repo_name,
+ s3_prefix=self.s3_prefix,
+ kms_key_id=self.kms_key,
+ tags="integ=true clarity=yes foo_bar=baz",
+ )
+ sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode())
+ self.assertEqual(sync_process_execute.process.returncode, 2)
+ self.assertIn(
+ "Invalid value for '--resource': invalid choice: AWS::Serverless::InvalidResource",
+ str(sync_process_execute.stderr),
+ )
+
@skipIf(SKIP_SYNC_TESTS, "Skip sync tests in CI/CD only")
class TestSyncCodeDotnetFunctionTemplate(TestSyncCodeBase):
template = "template-dotnet.yaml"
+ dependency_layer = False
folder = "code"
def test_sync_code_shared_codeuri(self):
@@ -296,6 +329,62 @@ def test_sync_code_shared_codeuri(self):
@skipIf(SKIP_SYNC_TESTS, "Skip sync tests in CI/CD only")
+@parameterized_class([{"dependency_layer": True}, {"dependency_layer": False}])
+class TestSyncCodeNodejsFunctionTemplate(TestSyncCodeBase):
+ template = "template-nodejs.yaml"
+ folder = "code"
+
+ def test_sync_code_nodejs_function(self):
+ shutil.rmtree(Path(TestSyncCodeBase.temp_dir).joinpath("nodejs_function"), ignore_errors=True)
+ shutil.copytree(
+ self.test_data_path.joinpath("code").joinpath("after").joinpath("nodejs_function"),
+ Path(TestSyncCodeBase.temp_dir).joinpath("nodejs_function"),
+ )
+
+ self.stack_resources = self._get_stacks(TestSyncCodeBase.stack_name)
+ if self.dependency_layer:
+ # Test update manifest
+ layer_contents = self.get_dependency_layer_contents_from_arn(
+ self.stack_resources, str(Path("nodejs", "node_modules")), 1
+ )
+ self.assertNotIn("@faker-js", layer_contents)
+
+ # Run code sync
+ sync_command_list = self.get_sync_command_list(
+ template_file=TestSyncCodeBase.template_path,
+ code=True,
+ watch=False,
+ resource_list=["AWS::Serverless::Function"],
+ dependency_layer=self.dependency_layer,
+ stack_name=TestSyncCodeBase.stack_name,
+ parameter_overrides="Parameter=Clarity",
+ image_repository=self.ecr_repo_name,
+ s3_prefix=self.s3_prefix,
+ kms_key_id=self.kms_key,
+ tags="integ=true clarity=yes foo_bar=baz",
+ )
+ sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode())
+ self.assertEqual(sync_process_execute.process.returncode, 0)
+
+ # CFN Api call here to collect all the stack resources
+ self.stack_resources = self._get_stacks(TestSyncCodeBase.stack_name)
+ # Lambda Api call here, which tests both the python function and the layer
+ lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION)
+ for lambda_function in lambda_functions:
+ if lambda_function == "HelloWorldFunction":
+ lambda_response = json.loads(self._get_lambda_response(lambda_function))
+ self.assertIn("extra_message", lambda_response)
+ self.assertEqual(lambda_response.get("message"), "Hello world!")
+
+ if self.dependency_layer:
+ layer_contents = self.get_dependency_layer_contents_from_arn(
+ self.stack_resources, str(Path("nodejs", "node_modules")), 2
+ )
+ self.assertIn("@faker-js", layer_contents)
+
+
+@skipIf(SKIP_SYNC_TESTS, "Skip sync tests in CI/CD only")
+@parameterized_class([{"dependency_layer": True}, {"dependency_layer": False}])
class TestSyncCodeNested(TestSyncCodeBase):
template = "template.yaml"
folder = "nested"
@@ -315,7 +404,7 @@ def test_sync_code_nested_function(self):
code=True,
watch=False,
resource_list=["AWS::Serverless::Function"],
- dependency_layer=True,
+ dependency_layer=self.dependency_layer,
stack_name=TestSyncCodeBase.stack_name,
parameter_overrides="Parameter=Clarity",
image_repository=self.ecr_repo_name,
@@ -348,7 +437,7 @@ def test_sync_code_nested_layer(self):
code=True,
watch=False,
resource_list=["AWS::Serverless::LayerVersion"],
- dependency_layer=True,
+ dependency_layer=self.dependency_layer,
stack_name=TestSyncCodeBase.stack_name,
parameter_overrides="Parameter=Clarity",
image_repository=self.ecr_repo_name,
@@ -388,7 +477,7 @@ def test_sync_nested_function_layer_race_condition(self):
template_file=TestSyncCodeBase.template_path,
code=True,
watch=False,
- dependency_layer=True,
+ dependency_layer=self.dependency_layer,
stack_name=TestSyncCodeBase.stack_name,
resource_list=["AWS::Serverless::LayerVersion", "AWS::Serverless::Function"],
parameter_overrides="Parameter=Clarity",
@@ -410,7 +499,6 @@ def test_sync_nested_function_layer_race_condition(self):
self.assertIn("extra_message", lambda_response)
self.assertEqual(lambda_response.get("message"), "10")
- @pytest.mark.skip(reason="Currently not properly supported")
def test_sync_code_nested_rest_api(self):
shutil.rmtree(
TestSyncCodeBase.temp_dir.joinpath("child_stack").joinpath("child_child_stack").joinpath("apigateway"),
@@ -429,6 +517,7 @@ def test_sync_code_nested_rest_api(self):
template_file=TestSyncCodeBase.template_path,
code=True,
watch=False,
+ dependency_layer=self.dependency_layer,
resource_list=["AWS::Serverless::Api"],
stack_name=TestSyncCodeBase.stack_name,
parameter_overrides="Parameter=Clarity",
@@ -447,7 +536,6 @@ def test_sync_code_nested_rest_api(self):
rest_api = self.stack_resources.get(AWS_APIGATEWAY_RESTAPI)[0]
self.assertEqual(self._get_api_message(rest_api), '{"message": "hello 2"}')
- @pytest.mark.skip(reason="Currently not properly supported")
def test_sync_code_nested_state_machine(self):
shutil.rmtree(
TestSyncCodeBase.temp_dir.joinpath("child_stack").joinpath("child_child_stack").joinpath("statemachine"),
@@ -486,6 +574,7 @@ def test_sync_code_nested_state_machine(self):
@skipIf(SKIP_SYNC_TESTS, "Skip sync tests in CI/CD only")
+@parameterized_class([{"dependency_layer": True}, {"dependency_layer": False}])
class TestSyncCodeNestedWithIntrinsics(TestSyncCodeBase):
template = "template.yaml"
folder = "nested_intrinsics"
@@ -509,14 +598,13 @@ def test_sync_code_nested_getattr_layer(self):
code=True,
watch=False,
resource_list=["AWS::Serverless::LayerVersion"],
- dependency_layer=True,
+ dependency_layer=self.dependency_layer,
stack_name=TestSyncCodeBase.stack_name,
parameter_overrides="Parameter=Clarity",
image_repository=self.ecr_repo_name,
s3_prefix=self.s3_prefix,
kms_key_id=self.kms_key,
tags="integ=true clarity=yes foo_bar=baz",
- debug=True,
)
sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode())
self.assertEqual(sync_process_execute.process.returncode, 0)
diff --git a/tests/integration/sync/test_sync_infra.py b/tests/integration/sync/test_sync_infra.py
index 189970078b..a06e52db6d 100644
--- a/tests/integration/sync/test_sync_infra.py
+++ b/tests/integration/sync/test_sync_infra.py
@@ -9,7 +9,7 @@
from unittest import skipIf
import pytest
-from parameterized import parameterized
+from parameterized import parameterized, parameterized_class
from samcli.lib.utils.resources import (
AWS_APIGATEWAY_RESTAPI,
@@ -34,6 +34,7 @@
@skipIf(SKIP_SYNC_TESTS, "Skip sync tests in CI/CD only")
+@parameterized_class([{"dependency_layer": True}, {"dependency_layer": False}])
class TestSyncInfra(SyncIntegBase):
@skipIf(
IS_WINDOWS,
@@ -52,7 +53,7 @@ def test_sync_infra(self, runtime):
template_file=template_path,
code=False,
watch=False,
- dependency_layer=True,
+ dependency_layer=self.dependency_layer,
stack_name=stack_name,
parameter_overrides="Parameter=Clarity",
image_repository=self.ecr_repo_name,
@@ -89,7 +90,7 @@ def test_sync_infra(self, runtime):
template_file=template_path,
code=False,
watch=False,
- dependency_layer=True,
+ dependency_layer=self.dependency_layer,
stack_name=stack_name,
parameter_overrides="Parameter=Clarity",
image_repository=self.ecr_repo_name,
@@ -129,7 +130,7 @@ def test_sync_infra_no_confirm(self, template_file):
template_file=template_path,
code=False,
watch=False,
- dependency_layer=True,
+ dependency_layer=self.dependency_layer,
stack_name=stack_name,
parameter_overrides="Parameter=Clarity",
image_repository=self.ecr_repo_name,
@@ -151,7 +152,7 @@ def test_sync_infra_no_stack_name(self, template_file):
template_file=template_path,
code=False,
watch=False,
- dependency_layer=True,
+ dependency_layer=self.dependency_layer,
parameter_overrides="Parameter=Clarity",
image_repository=self.ecr_repo_name,
s3_prefix=self.s3_prefix,
@@ -174,7 +175,7 @@ def test_sync_infra_no_capabilities(self, template_file):
template_file=template_path,
code=False,
watch=False,
- dependency_layer=True,
+ dependency_layer=self.dependency_layer,
stack_name=stack_name,
parameter_overrides="Parameter=Clarity",
image_repository=self.ecr_repo_name,
@@ -192,6 +193,54 @@ def test_sync_infra_no_capabilities(self, template_file):
str(sync_process_execute.stderr),
)
+ @parameterized.expand(["infra/template-python-before.yaml"])
+ def test_sync_infra_s3_bucket_option(self, template_file):
+ template_path = str(self.test_data_path.joinpath(template_file))
+ stack_name = self._method_to_stack_name(self.id())
+
+ sync_command_list = self.get_sync_command_list(
+ template_file=template_path,
+ code=False,
+ watch=False,
+ dependency_layer=self.dependency_layer,
+ stack_name=stack_name,
+ parameter_overrides="Parameter=Clarity",
+ image_repository=self.ecr_repo_name,
+ s3_bucket=self.bucket_name,
+ s3_prefix=self.s3_prefix,
+ kms_key_id=self.kms_key,
+ capabilities_list=["CAPABILITY_IAM", "CAPABILITY_AUTO_EXPAND"],
+ tags="integ=true clarity=yes foo_bar=baz",
+ )
+
+ sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode())
+ self.assertEqual(sync_process_execute.process.returncode, 0)
+ self.assertIn("Stack creation succeeded. Sync infra completed.", str(sync_process_execute.stderr))
+
+ # Make sure all resources are created properly after specifying custom bucket
+ # CFN Api call here to collect all the stack resources
+ self.stack_resources = self._get_stacks(stack_name)
+
+ # Lambda Api call here, which tests both the python function and the layer
+ lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION)
+ for lambda_function in lambda_functions:
+ lambda_response = json.loads(self._get_lambda_response(lambda_function))
+ self.assertIn("extra_message", lambda_response)
+ self.assertEqual(lambda_response.get("message"), "7")
+
+ # ApiGateway Api call here, which tests both of the RestApi
+ rest_api = self.stack_resources.get(AWS_APIGATEWAY_RESTAPI)[0]
+ self.assertEqual(self._get_api_message(rest_api), '{"message": "hello 1"}')
+
+ # SFN Api call here, which tests the StateMachine
+ state_machine = self.stack_resources.get(AWS_STEPFUNCTIONS_STATEMACHINE)[0]
+ self.assertEqual(self._get_sfn_response(state_machine), '"World 1"')
+
+
+@skipIf(SKIP_SYNC_TESTS, "Skip sync tests in CI/CD only")
+class TestSyncInfraCDKTemplates(SyncIntegBase):
+ dependency_layer = None
+
@parameterized.expand(
[
(
@@ -309,3 +358,48 @@ def test_cdk_templates(self, template_file, template_after, function_id, depende
lambda_response = json.loads(self._get_lambda_response(lambda_function))
self.assertIn("extra_message", lambda_response)
self.assertEqual(lambda_response.get("message"), "9")
+
+
+@skipIf(SKIP_SYNC_TESTS, "Skip sync tests in CI/CD only")
+@parameterized_class([{"dependency_layer": True}, {"dependency_layer": False}])
+class TestSyncInfraWithJava(SyncIntegBase):
+ @parameterized.expand(["infra/template-java.yaml"])
+ def test_sync_infra_with_java(self, template_file):
+ """This will test a case where user will flip ADL flag between sync sessions"""
+ template_path = str(self.test_data_path.joinpath(template_file))
+ stack_name = self._method_to_stack_name(self.id())
+ self.stacks.append({"name": stack_name})
+
+ # first run with current dependency layer value
+ self._run_sync_and_validate_lambda_call(self.dependency_layer, template_path, stack_name)
+
+ # now flip the dependency layer value and re-run the sync & tests
+ self._run_sync_and_validate_lambda_call(not self.dependency_layer, template_path, stack_name)
+
+ def _run_sync_and_validate_lambda_call(self, dependency_layer: bool, template_path: str, stack_name: str) -> None:
+ # Run infra sync
+ sync_command_list = self.get_sync_command_list(
+ template_file=template_path,
+ code=False,
+ watch=False,
+ dependency_layer=dependency_layer,
+ stack_name=stack_name,
+ parameter_overrides="Parameter=Clarity",
+ image_repository=self.ecr_repo_name,
+ s3_prefix=self.s3_prefix,
+ kms_key_id=self.kms_key,
+ capabilities_list=["CAPABILITY_IAM", "CAPABILITY_AUTO_EXPAND"],
+ tags="integ=true clarity=yes foo_bar=baz",
+ )
+ sync_process_execute = run_command_with_input(sync_command_list, "y\n".encode())
+ self.assertEqual(sync_process_execute.process.returncode, 0)
+ self.assertIn("Sync infra completed.", str(sync_process_execute.stderr))
+
+ self.stack_resources = self._get_stacks(stack_name)
+ lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION)
+ for lambda_function in lambda_functions:
+ lambda_response = json.loads(self._get_lambda_response(lambda_function))
+ self.assertIn("message", lambda_response)
+ self.assertIn("sum", lambda_response)
+ self.assertEqual(lambda_response.get("message"), "hello world")
+ self.assertEqual(lambda_response.get("sum"), 12)
diff --git a/tests/integration/sync/test_sync_watch.py b/tests/integration/sync/test_sync_watch.py
index 46edc6834a..423bc580f5 100644
--- a/tests/integration/sync/test_sync_watch.py
+++ b/tests/integration/sync/test_sync_watch.py
@@ -66,7 +66,6 @@ def setUp(self):
self.s3_prefix = uuid.uuid4().hex
self.test_dir = Path(tempfile.mkdtemp())
self.template_before = "" if not self.template_before else self.template_before
- self.dependency_layer = True if self.dependency_layer is None else self.dependency_layer
self.stack_name = self._method_to_stack_name(self.id())
# Remove temp dir so that shutil.copytree will not throw an error
# Needed for python 3.6 and 3.7 as these versions don't have dirs_exist_ok
@@ -91,6 +90,19 @@ def tearDown(self):
cfn_client.delete_stack(StackName=stack_name)
super().tearDown()
+ def run_initial_infra_validation(self) -> None:
+ """Runs initial infra validation after deployment is completed"""
+ self.stack_resources = self._get_stacks(self.stack_name)
+ lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION)
+ for lambda_function in lambda_functions:
+ lambda_response = json.loads(self._get_lambda_response(lambda_function))
+ self.assertIn("extra_message", lambda_response)
+ self.assertEqual(lambda_response.get("message"), "7")
+ rest_api = self.stack_resources.get(AWS_APIGATEWAY_RESTAPI)[0]
+ self.assertEqual(self._get_api_message(rest_api), '{"message": "hello 1"}')
+ state_machine = self.stack_resources.get(AWS_STEPFUNCTIONS_STATEMACHINE)[0]
+ self.assertEqual(self._get_sfn_response(state_machine), '"World 1"')
+
def _setup_verify_infra(self):
template_path = self.test_dir.joinpath(self.template_before)
self.stacks.append({"name": self.stack_name})
@@ -115,17 +127,7 @@ def _setup_verify_infra(self):
read_until_string(self.watch_process, "\x1b[32mInfra sync completed.\x1b[0m\n", timeout=600)
- # Initial Infra Validation
- self.stack_resources = self._get_stacks(self.stack_name)
- lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION)
- for lambda_function in lambda_functions:
- lambda_response = json.loads(self._get_lambda_response(lambda_function))
- self.assertIn("extra_message", lambda_response)
- self.assertEqual(lambda_response.get("message"), "7")
- rest_api = self.stack_resources.get(AWS_APIGATEWAY_RESTAPI)[0]
- self.assertEqual(self._get_api_message(rest_api), '{"message": "hello 1"}')
- state_machine = self.stack_resources.get(AWS_STEPFUNCTIONS_STATEMACHINE)[0]
- self.assertEqual(self._get_sfn_response(state_machine), '"World 1"')
+ self.run_initial_infra_validation()
def _verify_infra_changes(self, resources):
# Lambda
@@ -143,12 +145,6 @@ def _verify_infra_changes(self, resources):
state_machine = resources.get(AWS_STEPFUNCTIONS_STATEMACHINE)[0]
self.assertEqual(self._get_sfn_response(state_machine), '"World 2"')
- @staticmethod
- def update_file(source, destination):
- with open(source, "rb") as source_file:
- with open(destination, "wb") as destination_file:
- destination_file.write(source_file.read())
-
@parameterized_class(
[{"runtime": "python", "dependency_layer": True}, {"runtime": "python", "dependency_layer": False}]
@@ -159,12 +155,6 @@ def setUpClass(cls):
cls.template_before = f"infra/template-{cls.runtime}-before.yaml"
super(TestSyncCodeInfra, cls).setUpClass()
- def setup(self):
- super(TestSyncCodeInfra, self).setUp()
-
- def tearDown(self):
- super(TestSyncCodeInfra, self).tearDown()
-
def test_sync_watch_infra(self):
self.update_file(
@@ -181,24 +171,31 @@ def test_sync_watch_infra(self):
@parameterized_class([{"dependency_layer": True}, {"dependency_layer": False}])
class TestSyncWatchCode(TestSyncWatchBase):
- @classmethod
- def setUpClass(cls):
- cls.template_before = f"code/before/template-python.yaml"
- super(TestSyncWatchCode, cls).setUpClass()
-
- def setup(self):
- super(TestSyncWatchCode, self).setUp()
-
- def tearDown(self):
- super(TestSyncWatchCode, self).tearDown()
+ template_before = str(Path("code", "before", "template-python.yaml"))
def test_sync_watch_code(self):
self.stack_resources = self._get_stacks(self.stack_name)
+ if self.dependency_layer:
+ # Test update manifest
+ layer_contents = self.get_dependency_layer_contents_from_arn(self.stack_resources, "python", 1)
+ self.assertNotIn("requests", layer_contents)
+ self.update_file(
+ self.test_dir.joinpath("code", "after", "function", "requirements.txt"),
+ self.test_dir.joinpath("code", "before", "function", "requirements.txt"),
+ )
+ read_until_string(
+ self.watch_process,
+ "\x1b[32mFinished syncing Function Layer Reference Sync HelloWorldFunction.\x1b[0m\n",
+ timeout=45,
+ )
+ layer_contents = self.get_dependency_layer_contents_from_arn(self.stack_resources, "python", 2)
+ self.assertIn("requests", layer_contents)
+
# Test Lambda Function
self.update_file(
- self.test_dir.joinpath("code/after/function/app.py"),
- self.test_dir.joinpath("code/before/function/app.py"),
+ self.test_dir.joinpath("code", "after", "function", "app.py"),
+ self.test_dir.joinpath("code", "before", "function", "app.py"),
)
read_until_string(
self.watch_process, "\x1b[32mFinished syncing Lambda Function HelloWorldFunction.\x1b[0m\n", timeout=30
@@ -211,8 +208,8 @@ def test_sync_watch_code(self):
# Test Lambda Layer
self.update_file(
- self.test_dir.joinpath("code/after/layer/layer_method.py"),
- self.test_dir.joinpath("code/before/layer/layer_method.py"),
+ self.test_dir.joinpath("code", "after", "layer", "layer_method.py"),
+ self.test_dir.joinpath("code", "before", "layer", "layer_method.py"),
)
read_until_string(
self.watch_process,
@@ -227,8 +224,8 @@ def test_sync_watch_code(self):
# Test APIGW
self.update_file(
- self.test_dir.joinpath("code/after/apigateway/definition.json"),
- self.test_dir.joinpath("code/before/apigateway/definition.json"),
+ self.test_dir.joinpath("code", "after", "apigateway", "definition.json"),
+ self.test_dir.joinpath("code", "before", "apigateway", "definition.json"),
)
read_until_string(self.watch_process, "\x1b[32mFinished syncing RestApi HelloWorldApi.\x1b[0m\n", timeout=20)
time.sleep(API_SLEEP)
@@ -237,8 +234,8 @@ def test_sync_watch_code(self):
# Test SFN
self.update_file(
- self.test_dir.joinpath("code/after/statemachine/function.asl.json"),
- self.test_dir.joinpath("code/before/statemachine/function.asl.json"),
+ self.test_dir.joinpath("code", "after", "statemachine", "function.asl.json"),
+ self.test_dir.joinpath("code", "before", "statemachine", "function.asl.json"),
)
read_until_string(
self.watch_process, "\x1b[32mFinished syncing StepFunctions HelloStepFunction.\x1b[0m\n", timeout=20
@@ -248,24 +245,14 @@ def test_sync_watch_code(self):
self.assertEqual(self._get_sfn_response(state_machine), '"World 2"')
+@parameterized_class([{"dependency_layer": True}, {"dependency_layer": False}])
class TestSyncInfraNestedStacks(TestSyncWatchBase):
- @classmethod
- def setUpClass(cls):
- cls.template_before = f"infra/parent-stack.yaml"
- cls.dependency_layer = False
- # cls.parameter_overrides = "EnableNestedStack=true"
- super(TestSyncInfraNestedStacks, cls).setUpClass()
-
- def setup(self):
- super(TestSyncInfraNestedStacks, self).setUp()
-
- def tearDown(self):
- super(TestSyncInfraNestedStacks, self).tearDown()
+ template_before = str(Path("infra", "parent-stack.yaml"))
def test_sync_watch_infra_nested_stack(self):
self.update_file(
- self.test_dir.joinpath(f"infra/template-python-after.yaml"),
- self.test_dir.joinpath(f"infra/template-python-before.yaml"),
+ self.test_dir.joinpath("infra", "template-python-after.yaml"),
+ self.test_dir.joinpath("infra", "template-python-before.yaml"),
)
read_until_string(self.watch_process, "\x1b[32mInfra sync completed.\x1b[0m\n", timeout=600)
@@ -275,26 +262,34 @@ def test_sync_watch_infra_nested_stack(self):
self._verify_infra_changes(self.stack_resources)
+@parameterized_class([{"dependency_layer": True}, {"dependency_layer": False}])
class TestSyncCodeWatchNestedStacks(TestSyncWatchBase):
- @classmethod
- def setUpClass(cls):
- cls.template_before = f"code/before/parent-stack.yaml"
- cls.dependency_layer = False
- super(TestSyncCodeWatchNestedStacks, cls).setUpClass()
-
- def setup(self):
- super(TestSyncCodeWatchNestedStacks, self).setUp()
-
- def tearDown(self):
- super(TestSyncCodeWatchNestedStacks, self).tearDown()
+ template_before = str(Path("code", "before", "parent-stack.yaml"))
def test_sync_watch_code_nested_stack(self):
self.stack_resources = self._get_stacks(self.stack_name)
+ if self.dependency_layer:
+ # Test update manifest
+ layer_contents = self.get_dependency_layer_contents_from_arn(self.stack_resources, "python", 1)
+ self.assertNotIn("requests", layer_contents)
+ self.update_file(
+ self.test_dir.joinpath("code", "after", "function", "requirements.txt"),
+ self.test_dir.joinpath("code", "before", "function", "requirements.txt"),
+ )
+ read_until_string(
+ self.watch_process,
+ "\x1b[32mFinished syncing Function Layer Reference Sync "
+ "LocalNestedChildStack/HelloWorldFunction.\x1b[0m\n",
+ timeout=45,
+ )
+ layer_contents = self.get_dependency_layer_contents_from_arn(self.stack_resources, "python", 2)
+ self.assertIn("requests", layer_contents)
+
# Test Lambda Function
self.update_file(
- self.test_dir.joinpath("code/after/function/app.py"),
- self.test_dir.joinpath("code/before/function/app.py"),
+ self.test_dir.joinpath("code", "after", "function", "app.py"),
+ self.test_dir.joinpath("code", "before", "function", "app.py"),
)
read_until_string(
self.watch_process,
@@ -309,8 +304,8 @@ def test_sync_watch_code_nested_stack(self):
# Test Lambda Layer
self.update_file(
- self.test_dir.joinpath("code/after/layer/layer_method.py"),
- self.test_dir.joinpath("code/before/layer/layer_method.py"),
+ self.test_dir.joinpath("code", "after", "layer", "layer_method.py"),
+ self.test_dir.joinpath("code", "before", "layer", "layer_method.py"),
)
read_until_string(
self.watch_process,
@@ -325,8 +320,8 @@ def test_sync_watch_code_nested_stack(self):
# Test APIGW
self.update_file(
- self.test_dir.joinpath("code/after/apigateway/definition.json"),
- self.test_dir.joinpath("code/before/apigateway/definition.json"),
+ self.test_dir.joinpath("code", "after", "apigateway", "definition.json"),
+ self.test_dir.joinpath("code", "before", "apigateway", "definition.json"),
)
read_until_string(
self.watch_process,
@@ -339,8 +334,8 @@ def test_sync_watch_code_nested_stack(self):
# Test SFN
self.update_file(
- self.test_dir.joinpath("code/after/statemachine/function.asl.json"),
- self.test_dir.joinpath("code/before/statemachine/function.asl.json"),
+ self.test_dir.joinpath("code", "after", "statemachine", "function.asl.json"),
+ self.test_dir.joinpath("code", "before", "statemachine", "function.asl.json"),
)
read_until_string(
self.watch_process,
diff --git a/tests/integration/telemetry/test_experimental_metric.py b/tests/integration/telemetry/test_experimental_metric.py
index 4d9a4f2fee..d9a829054d 100644
--- a/tests/integration/telemetry/test_experimental_metric.py
+++ b/tests/integration/telemetry/test_experimental_metric.py
@@ -2,6 +2,7 @@
import platform
import time
from pathlib import Path
+from unittest import skip
from unittest.mock import ANY
from .integ_base import IntegBase, TelemetryServer
@@ -13,6 +14,10 @@ class TestExperimentalMetric(IntegBase):
Validates the basic tenets/contract Telemetry module needs to adhere to
"""
+ @skip(
+ "Accelerate are not in experimental any more, just skip this test. If we have new experimental commands, "
+ "we can update this test"
+ )
def test_must_send_experimental_metrics_if_experimental_command(self):
"""
Metrics should be sent if "Disabled via config file but Enabled via Envvar"
@@ -53,7 +58,6 @@ def test_must_send_experimental_metrics_if_experimental_command(self):
"region": ANY,
"commandName": ANY,
"metricSpecificAttributes": {
- "experimentalAccelerate": True,
"experimentalAll": False,
"experimentalEsbuild": False,
},
@@ -67,6 +71,10 @@ def test_must_send_experimental_metrics_if_experimental_command(self):
self.assertEqual(request["data"], expected_data)
os.environ["SAM_CLI_BETA_ACCELERATE"] = "0"
+ @skip(
+ "Accelerate are not in experimental any more, just skip this test. If we have new experimental commands, "
+ "we can update this test"
+ )
def test_must_send_experimental_metrics_if_experimental_option(self):
"""
Metrics should be sent if "Disabled via config file but Enabled via Envvar"
@@ -81,7 +89,7 @@ def test_must_send_experimental_metrics_if_experimental_option(self):
process = self.run_cmd(cmd_list=[self.cmd, "logs", "--include-traces"], optout_envvar_value="1")
process.communicate()
- self.assertEqual(process.returncode, 1, "Command should fail")
+ self.assertEqual(process.returncode, 2, "Command should fail")
all_requests = server.get_all_requests()
self.assertEqual(1, len(all_requests), "Command run metric must be sent")
request = all_requests[0]
@@ -104,7 +112,6 @@ def test_must_send_experimental_metrics_if_experimental_option(self):
"region": ANY,
"commandName": ANY,
"metricSpecificAttributes": {
- "experimentalAccelerate": True,
"experimentalAll": True,
"experimentalEsbuild": True,
},
@@ -188,7 +195,7 @@ def test_must_send_not_experimental_metrics_if_not_experimental(self):
process = self.run_cmd(cmd_list=[self.cmd, "logs", "--name", "abc"], optout_envvar_value="1")
process.communicate()
- self.assertEqual(process.returncode, 1, "Command should fail")
+ self.assertEqual(process.returncode, 2, "Command should fail")
all_requests = server.get_all_requests()
self.assertEqual(1, len(all_requests), "Command run metric must be sent")
request = all_requests[0]
diff --git a/tests/integration/testdata/buildcmd/Java/maven-with-layer/HelloWorldFunction/pom.xml b/tests/integration/testdata/buildcmd/Java/maven-with-layer/HelloWorldFunction/pom.xml
new file mode 100644
index 0000000000..6c18c4e84e
--- /dev/null
+++ b/tests/integration/testdata/buildcmd/Java/maven-with-layer/HelloWorldFunction/pom.xml
@@ -0,0 +1,58 @@
+
+ 4.0.0
+ helloworld
+ HelloWorld
+ 1.0
+ jar
+ A sample Hello World created for SAM CLI.
+
+ 8
+ 8
+
+
+
+
+ helloworld
+ HelloWorldLayer
+ 1.0
+ provided
+
+
+ com.amazonaws
+ aws-lambda-java-core
+ 1.2.1
+
+
+ com.amazonaws
+ aws-lambda-java-events
+ 3.11.0
+
+
+ junit
+ junit
+ 4.13.2
+ test
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-shade-plugin
+ 3.2.4
+
+
+
+
+ package
+
+ shade
+
+
+
+
+
+
+
diff --git a/tests/integration/testdata/buildcmd/Java/maven-with-layer/HelloWorldFunction/src/main/java/helloworld/App.java b/tests/integration/testdata/buildcmd/Java/maven-with-layer/HelloWorldFunction/src/main/java/helloworld/App.java
new file mode 100644
index 0000000000..770ee5605d
--- /dev/null
+++ b/tests/integration/testdata/buildcmd/Java/maven-with-layer/HelloWorldFunction/src/main/java/helloworld/App.java
@@ -0,0 +1,17 @@
+package helloworld;
+
+import com.amazonaws.services.lambda.runtime.Context;
+import com.amazonaws.services.lambda.runtime.LambdaLogger;
+
+import helloworldlayer.SimpleMath;
+
+/**
+ * Handler for requests to Lambda function.
+ */
+public class App {
+
+ public String handleRequest(Context context) {
+ int sumResult = SimpleMath.sum(7, 5);
+ return String.format("hello world. sum is %d.", sumResult);
+ }
+}
diff --git a/tests/integration/testdata/buildcmd/Java/maven-with-layer/HelloWorldLayer/pom.xml b/tests/integration/testdata/buildcmd/Java/maven-with-layer/HelloWorldLayer/pom.xml
new file mode 100644
index 0000000000..637bd8fa44
--- /dev/null
+++ b/tests/integration/testdata/buildcmd/Java/maven-with-layer/HelloWorldLayer/pom.xml
@@ -0,0 +1,36 @@
+
+ 4.0.0
+ helloworld
+ HelloWorldLayer
+ 1.0
+ jar
+ A sample Hello World created for SAM CLI.
+
+ 8
+ 8
+
+
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-shade-plugin
+ 3.2.4
+
+
+
+
+ package
+
+ shade
+
+
+
+
+
+
+
diff --git a/tests/integration/testdata/buildcmd/Java/maven-with-layer/HelloWorldLayer/src/main/java/helloworldlayer/SimpleMath.java b/tests/integration/testdata/buildcmd/Java/maven-with-layer/HelloWorldLayer/src/main/java/helloworldlayer/SimpleMath.java
new file mode 100644
index 0000000000..1ad779173b
--- /dev/null
+++ b/tests/integration/testdata/buildcmd/Java/maven-with-layer/HelloWorldLayer/src/main/java/helloworldlayer/SimpleMath.java
@@ -0,0 +1,8 @@
+package helloworldlayer;
+
+public class SimpleMath {
+
+ public static int sum(int a, int b) {
+ return a + b;
+ }
+}
diff --git a/tests/integration/testdata/buildcmd/template-java-maven-with-layers.yaml b/tests/integration/testdata/buildcmd/template-java-maven-with-layers.yaml
new file mode 100644
index 0000000000..14fd2a0c90
--- /dev/null
+++ b/tests/integration/testdata/buildcmd/template-java-maven-with-layers.yaml
@@ -0,0 +1,27 @@
+AWSTemplateFormatVersion: "2010-09-09"
+Transform: AWS::Serverless-2016-10-31
+
+Globals:
+ Function:
+ Timeout: 30
+
+Resources:
+ HelloWorldFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ CodeUri: Java/maven-with-layer/HelloWorldFunction
+ Handler: helloworld.App::handleRequest
+ Runtime: java8
+ MemorySize: 512
+ Layers:
+ - !Ref HelloWorldLayer
+
+ HelloWorldLayer:
+ Type: AWS::Serverless::LayerVersion
+ Properties:
+ ContentUri: Java/maven-with-layer/HelloWorldLayer
+ CompatibleRuntimes:
+ - java8
+ Metadata:
+ BuildMethod: java8
+ BuildArchitecture: x86_64
diff --git a/tests/integration/testdata/logs/nested-python-apigw-sfn/apigw-function/app.py b/tests/integration/testdata/logs/nested-python-apigw-sfn/apigw-function/app.py
new file mode 100644
index 0000000000..41092111f7
--- /dev/null
+++ b/tests/integration/testdata/logs/nested-python-apigw-sfn/apigw-function/app.py
@@ -0,0 +1,6 @@
+
+def handler(event, context):
+ print("Hello world from HelloWorldServerlessApi/hello function")
+ print("Hello world from ApiGwFunction function")
+ print("this should be filtered ApiGwFunction")
+ return {}
diff --git a/tests/integration/testdata/logs/nested-python-apigw-sfn/child-stack/apigw-function/app.py b/tests/integration/testdata/logs/nested-python-apigw-sfn/child-stack/apigw-function/app.py
new file mode 100644
index 0000000000..34d4ff43ec
--- /dev/null
+++ b/tests/integration/testdata/logs/nested-python-apigw-sfn/child-stack/apigw-function/app.py
@@ -0,0 +1,6 @@
+
+def handler(event, context):
+ print("Hello world from ChildStackHelloWorldServerlessApi/hello function")
+ print("Hello world from ChildStack/ApiGwFunction function")
+ print("this should be filtered ChildStackApiGwFunction")
+ return {}
diff --git a/tests/integration/testdata/logs/nested-python-apigw-sfn/child-stack/grand-child-stack/apigw-function/app.py b/tests/integration/testdata/logs/nested-python-apigw-sfn/child-stack/grand-child-stack/apigw-function/app.py
new file mode 100644
index 0000000000..11cd083294
--- /dev/null
+++ b/tests/integration/testdata/logs/nested-python-apigw-sfn/child-stack/grand-child-stack/apigw-function/app.py
@@ -0,0 +1,6 @@
+
+def handler(event, context):
+ print("Hello world from ChildStackGrandChildStackHelloWorldServerlessApi/hello function")
+ print("Hello world from ChildStack/GrandChildStack/ApiGwFunction function")
+ print("this should be filtered ChildStackGrandChildStackApiGwFunction")
+ return {}
diff --git a/tests/integration/testdata/logs/nested-python-apigw-sfn/child-stack/grand-child-stack/sfn-function/app.py b/tests/integration/testdata/logs/nested-python-apigw-sfn/child-stack/grand-child-stack/sfn-function/app.py
new file mode 100644
index 0000000000..5777bf7ccb
--- /dev/null
+++ b/tests/integration/testdata/logs/nested-python-apigw-sfn/child-stack/grand-child-stack/sfn-function/app.py
@@ -0,0 +1,6 @@
+
+def handler(event, context):
+ print("Hello world from ChildStackGrandChildStackHelloWorldServerlessApi/world function")
+ print("Hello world from ChildStack/GrandChildStack/SfnFunction function")
+ print("this should be filtered ChildStackGrandChildStackSfnFunction")
+ return {}
diff --git a/tests/integration/testdata/logs/nested-python-apigw-sfn/child-stack/grand-child-stack/template.yaml b/tests/integration/testdata/logs/nested-python-apigw-sfn/child-stack/grand-child-stack/template.yaml
new file mode 100644
index 0000000000..b7a4dc30f0
--- /dev/null
+++ b/tests/integration/testdata/logs/nested-python-apigw-sfn/child-stack/grand-child-stack/template.yaml
@@ -0,0 +1,120 @@
+AWSTemplateFormatVersion: '2010-09-09'
+Transform: AWS::Serverless-2016-10-31
+Description: Logs Command Test
+
+Globals:
+ Function:
+ Timeout: 10
+
+Resources:
+ MyStateMachine:
+ Type: AWS::Serverless::StateMachine
+ Properties:
+ Definition:
+ Comment: A Hello World example of the Amazon States Language using Pass states
+ StartAt: Hello
+ States:
+ Hello:
+ Type: Pass
+ Result: Hello
+ Next: CallFunction
+ CallFunction:
+ Type: Task
+ Resource: !GetAtt SfnFunction.Arn
+ Next: World
+ World:
+ Type: Pass
+ Result: World
+ End: true
+ Type: EXPRESS
+ Policies:
+ - Version: '2012-10-17'
+ Statement:
+ - Effect: "Allow"
+ Action:
+ - "logs:*"
+ Resource: "*"
+ - Version: '2012-10-17'
+ Statement:
+ - Effect: "Allow"
+ Action:
+ - "lambda:InvokeFunction"
+ Resource:
+ - !GetAtt SfnFunction.Arn
+ Logging:
+ Level: ALL
+ Destinations:
+ - CloudWatchLogsLogGroup:
+ LogGroupArn: !GetAtt MyStateMachineLogGroup.Arn
+ Tracing:
+ Enabled: True
+ Events:
+ GetApi:
+ Type: Api
+ Properties:
+ Path: /world
+ Method: get
+ RestApiId: !Ref HelloWorldServerlessApi
+
+ MyStateMachineLogGroup:
+ Type: AWS::Logs::LogGroup
+ Properties:
+ LogGroupName: !Join ['-', ['/aws/vendedlogs/', !Select [2, !Split ['/', !Ref AWS::StackId]]]]
+
+ ApiGwFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ CodeUri: apigw-function/
+ Handler: app.handler
+ Runtime: python3.9
+ Tracing: Active
+ Events:
+ HelloWorld:
+ Type: Api
+ Properties:
+ Path: /hello
+ Method: get
+ RestApiId: !Ref HelloWorldServerlessApi
+
+ SfnFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ CodeUri: sfn-function/
+ Handler: app.handler
+ Runtime: python3.9
+ Tracing: Active
+
+ HelloWorldServerlessApi:
+ Type: AWS::Serverless::Api
+ Properties:
+ StageName: "Prod"
+ TracingEnabled: True
+ MethodSettings:
+ - LoggingLevel: INFO
+ ResourcePath: '/*'
+ HttpMethod: '*'
+
+ ApiGwAccountConfig:
+ Type: "AWS::ApiGateway::Account"
+ Properties:
+ CloudWatchRoleArn: !GetAtt "ApiGatewayLoggingRole.Arn"
+ ApiGatewayLoggingRole:
+ Type: "AWS::IAM::Role"
+ Properties:
+ AssumeRolePolicyDocument:
+ Version: "2012-10-17"
+ Statement:
+ - Effect: Allow
+ Principal:
+ Service:
+ - "apigateway.amazonaws.com"
+ Action: "sts:AssumeRole"
+ Path: "/"
+ ManagedPolicyArns:
+ - !Sub "arn:${AWS::Partition}:iam::aws:policy/service-role/AmazonAPIGatewayPushToCloudWatchLogs"
+
+Outputs:
+ HelloWorldServerlessApi:
+ Description: "API Gateway endpoint URL for Prod stage for Hello World function"
+ Value: !Sub "https://${HelloWorldServerlessApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/"
+
diff --git a/tests/integration/testdata/logs/nested-python-apigw-sfn/child-stack/sfn-function/app.py b/tests/integration/testdata/logs/nested-python-apigw-sfn/child-stack/sfn-function/app.py
new file mode 100644
index 0000000000..771b19ceb3
--- /dev/null
+++ b/tests/integration/testdata/logs/nested-python-apigw-sfn/child-stack/sfn-function/app.py
@@ -0,0 +1,6 @@
+
+def handler(event, context):
+ print("Hello world from ChildStackHelloWorldServerlessApi/world function")
+ print("Hello world from ChildStack/SfnFunction function")
+ print("this should be filtered ChildStackSfnFunction")
+ return {}
diff --git a/tests/integration/testdata/logs/nested-python-apigw-sfn/child-stack/template.yaml b/tests/integration/testdata/logs/nested-python-apigw-sfn/child-stack/template.yaml
new file mode 100644
index 0000000000..c6f1aef18f
--- /dev/null
+++ b/tests/integration/testdata/logs/nested-python-apigw-sfn/child-stack/template.yaml
@@ -0,0 +1,128 @@
+AWSTemplateFormatVersion: '2010-09-09'
+Transform: AWS::Serverless-2016-10-31
+Description: Logs Command Test
+
+Globals:
+ Function:
+ Timeout: 10
+
+Resources:
+ MyStateMachine:
+ Type: AWS::Serverless::StateMachine
+ Properties:
+ Definition:
+ Comment: A Hello World example of the Amazon States Language using Pass states
+ StartAt: Hello
+ States:
+ Hello:
+ Type: Pass
+ Result: Hello
+ Next: CallFunction
+ CallFunction:
+ Type: Task
+ Resource: !GetAtt SfnFunction.Arn
+ Next: World
+ World:
+ Type: Pass
+ Result: World
+ End: true
+ Type: EXPRESS
+ Policies:
+ - Version: '2012-10-17'
+ Statement:
+ - Effect: "Allow"
+ Action:
+ - "logs:*"
+ Resource: "*"
+ - Version: '2012-10-17'
+ Statement:
+ - Effect: "Allow"
+ Action:
+ - "lambda:InvokeFunction"
+ Resource:
+ - !GetAtt SfnFunction.Arn
+ Logging:
+ Level: ALL
+ Destinations:
+ - CloudWatchLogsLogGroup:
+ LogGroupArn: !GetAtt MyStateMachineLogGroup.Arn
+ Tracing:
+ Enabled: True
+ Events:
+ GetApi:
+ Type: Api
+ Properties:
+ Path: /world
+ Method: get
+ RestApiId: !Ref HelloWorldServerlessApi
+
+ MyStateMachineLogGroup:
+ Type: AWS::Logs::LogGroup
+ Properties:
+ LogGroupName: !Join ['-', ['/aws/vendedlogs/', !Select [2, !Split ['/', !Ref AWS::StackId]]]]
+
+ ApiGwFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ CodeUri: apigw-function/
+ Handler: app.handler
+ Runtime: python3.9
+ Tracing: Active
+ Events:
+ HelloWorld:
+ Type: Api
+ Properties:
+ Path: /hello
+ Method: get
+ RestApiId: !Ref HelloWorldServerlessApi
+
+ SfnFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ CodeUri: sfn-function/
+ Handler: app.handler
+ Runtime: python3.9
+ Tracing: Active
+
+ HelloWorldServerlessApi:
+ Type: AWS::Serverless::Api
+ Properties:
+ StageName: "Prod"
+ TracingEnabled: True
+ MethodSettings:
+ - LoggingLevel: INFO
+ ResourcePath: '/*'
+ HttpMethod: '*'
+
+ ApiGwAccountConfig:
+ Type: "AWS::ApiGateway::Account"
+ Properties:
+ CloudWatchRoleArn: !GetAtt "ApiGatewayLoggingRole.Arn"
+ ApiGatewayLoggingRole:
+ Type: "AWS::IAM::Role"
+ Properties:
+ AssumeRolePolicyDocument:
+ Version: "2012-10-17"
+ Statement:
+ - Effect: Allow
+ Principal:
+ Service:
+ - "apigateway.amazonaws.com"
+ Action: "sts:AssumeRole"
+ Path: "/"
+ ManagedPolicyArns:
+ - !Sub "arn:${AWS::Partition}:iam::aws:policy/service-role/AmazonAPIGatewayPushToCloudWatchLogs"
+
+ GrandChildStack:
+ Type: AWS::Serverless::Application
+ Properties:
+ Location: grand-child-stack/template.yaml
+
+Outputs:
+ HelloWorldServerlessApi:
+ Description: "API Gateway endpoint URL for Prod stage for Hello World function"
+ Value: !Sub "https://${HelloWorldServerlessApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/"
+ GrandChildStackHelloWorldServerlessApi:
+ Description: "API Gateway endpoint URL from grand-child stack"
+ Value: !GetAtt [GrandChildStack, Outputs.HelloWorldServerlessApi]
+
diff --git a/tests/integration/testdata/logs/nested-python-apigw-sfn/sfn-function/app.py b/tests/integration/testdata/logs/nested-python-apigw-sfn/sfn-function/app.py
new file mode 100644
index 0000000000..bce563d058
--- /dev/null
+++ b/tests/integration/testdata/logs/nested-python-apigw-sfn/sfn-function/app.py
@@ -0,0 +1,6 @@
+
+def handler(event, context):
+ print("Hello world from HelloWorldServerlessApi/world function")
+ print("Hello world from SfnFunction function")
+ print("this should be filtered SfnFunction")
+ return {}
diff --git a/tests/integration/testdata/logs/nested-python-apigw-sfn/template.yaml b/tests/integration/testdata/logs/nested-python-apigw-sfn/template.yaml
new file mode 100644
index 0000000000..5506f108f3
--- /dev/null
+++ b/tests/integration/testdata/logs/nested-python-apigw-sfn/template.yaml
@@ -0,0 +1,131 @@
+AWSTemplateFormatVersion: '2010-09-09'
+Transform: AWS::Serverless-2016-10-31
+Description: Logs Command Test
+
+Globals:
+ Function:
+ Timeout: 10
+
+Resources:
+ MyStateMachine:
+ Type: AWS::Serverless::StateMachine
+ Properties:
+ Definition:
+ Comment: A Hello World example of the Amazon States Language using Pass states
+ StartAt: Hello
+ States:
+ Hello:
+ Type: Pass
+ Result: Hello
+ Next: CallFunction
+ CallFunction:
+ Type: Task
+ Resource: !GetAtt SfnFunction.Arn
+ Next: World
+ World:
+ Type: Pass
+ Result: World
+ End: true
+ Type: EXPRESS
+ Policies:
+ - Version: '2012-10-17'
+ Statement:
+ - Effect: "Allow"
+ Action:
+ - "logs:*"
+ Resource: "*"
+ - Version: '2012-10-17'
+ Statement:
+ - Effect: "Allow"
+ Action:
+ - "lambda:InvokeFunction"
+ Resource:
+ - !GetAtt SfnFunction.Arn
+ Logging:
+ Level: ALL
+ Destinations:
+ - CloudWatchLogsLogGroup:
+ LogGroupArn: !GetAtt MyStateMachineLogGroup.Arn
+ Tracing:
+ Enabled: True
+ Events:
+ GetApi:
+ Type: Api
+ Properties:
+ Path: /world
+ Method: get
+ RestApiId: !Ref HelloWorldServerlessApi
+
+ MyStateMachineLogGroup:
+ Type: AWS::Logs::LogGroup
+ Properties:
+ LogGroupName: !Join ['-', ['/aws/vendedlogs/', !Select [2, !Split ['/', !Ref AWS::StackId]]]]
+
+ ApiGwFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ CodeUri: apigw-function/
+ Handler: app.handler
+ Runtime: python3.9
+ Tracing: Active
+ Events:
+ HelloWorld:
+ Type: Api
+ Properties:
+ Path: /hello
+ Method: get
+ RestApiId: !Ref HelloWorldServerlessApi
+
+ SfnFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ CodeUri: sfn-function/
+ Handler: app.handler
+ Runtime: python3.9
+ Tracing: Active
+
+ HelloWorldServerlessApi:
+ Type: AWS::Serverless::Api
+ Properties:
+ StageName: "Prod"
+ TracingEnabled: True
+ MethodSettings:
+ - LoggingLevel: INFO
+ ResourcePath: '/*'
+ HttpMethod: '*'
+
+ ApiGwAccountConfig:
+ Type: "AWS::ApiGateway::Account"
+ Properties:
+ CloudWatchRoleArn: !GetAtt "ApiGatewayLoggingRole.Arn"
+ ApiGatewayLoggingRole:
+ Type: "AWS::IAM::Role"
+ Properties:
+ AssumeRolePolicyDocument:
+ Version: "2012-10-17"
+ Statement:
+ - Effect: Allow
+ Principal:
+ Service:
+ - "apigateway.amazonaws.com"
+ Action: "sts:AssumeRole"
+ Path: "/"
+ ManagedPolicyArns:
+ - !Sub "arn:${AWS::Partition}:iam::aws:policy/service-role/AmazonAPIGatewayPushToCloudWatchLogs"
+
+ ChildStack:
+ Type: AWS::Serverless::Application
+ Properties:
+ Location: child-stack/template.yaml
+
+Outputs:
+ HelloWorldServerlessApi:
+ Description: "API Gateway endpoint URL for Prod stage for Hello World function"
+ Value: !Sub "https://${HelloWorldServerlessApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/"
+ ChildStackHelloWorldServerlessApi:
+ Description: "API Gateway endpoint URL from child stack"
+ Value: !GetAtt [ChildStack, Outputs.HelloWorldServerlessApi]
+ ChildStackGrandChildStackHelloWorldServerlessApi:
+ Description: "API Gateway endpoint URL from grand-child stack"
+ Value: !GetAtt [ChildStack, Outputs.GrandChildStackHelloWorldServerlessApi]
+
diff --git a/tests/integration/testdata/logs/python-apigw-sfn/apigw-function/app.py b/tests/integration/testdata/logs/python-apigw-sfn/apigw-function/app.py
index 1b6f8f02c4..41092111f7 100644
--- a/tests/integration/testdata/logs/python-apigw-sfn/apigw-function/app.py
+++ b/tests/integration/testdata/logs/python-apigw-sfn/apigw-function/app.py
@@ -1,5 +1,6 @@
def handler(event, context):
+ print("Hello world from HelloWorldServerlessApi/hello function")
print("Hello world from ApiGwFunction function")
- print("this should be filtered")
+ print("this should be filtered ApiGwFunction")
return {}
diff --git a/tests/integration/testdata/logs/python-apigw-sfn/sfn-function/app.py b/tests/integration/testdata/logs/python-apigw-sfn/sfn-function/app.py
index 4405b2e9ee..bce563d058 100644
--- a/tests/integration/testdata/logs/python-apigw-sfn/sfn-function/app.py
+++ b/tests/integration/testdata/logs/python-apigw-sfn/sfn-function/app.py
@@ -1,5 +1,6 @@
def handler(event, context):
+ print("Hello world from HelloWorldServerlessApi/world function")
print("Hello world from SfnFunction function")
- print("this should be filtered")
+ print("this should be filtered SfnFunction")
return {}
diff --git a/tests/integration/testdata/sync/code/after/function/requirements.txt b/tests/integration/testdata/sync/code/after/function/requirements.txt
index 296d654528..18ce7ecdd9 100644
--- a/tests/integration/testdata/sync/code/after/function/requirements.txt
+++ b/tests/integration/testdata/sync/code/after/function/requirements.txt
@@ -1 +1,2 @@
-numpy
\ No newline at end of file
+numpy
+requests
\ No newline at end of file
diff --git a/tests/integration/testdata/sync/code/after/nodejs_function/app.js b/tests/integration/testdata/sync/code/after/nodejs_function/app.js
new file mode 100644
index 0000000000..4a2983dffd
--- /dev/null
+++ b/tests/integration/testdata/sync/code/after/nodejs_function/app.js
@@ -0,0 +1,21 @@
+import * as faker from '@faker-js/faker';
+
+const name = faker.faker.name.firstName();
+let response;
+
+exports.lambdaHandler = async (event, context) => {
+ try {
+ response = {
+ 'statusCode': 200,
+ 'body': JSON.stringify({
+ message: 'Hello world!',
+ extra_message: name
+ })
+ }
+ } catch (err) {
+ console.log(err);
+ return err;
+ }
+
+ return response
+};
diff --git a/tests/integration/testdata/sync/code/after/nodejs_function/package.json b/tests/integration/testdata/sync/code/after/nodejs_function/package.json
new file mode 100644
index 0000000000..9a53f32b35
--- /dev/null
+++ b/tests/integration/testdata/sync/code/after/nodejs_function/package.json
@@ -0,0 +1,12 @@
+{
+ "name": "hello_world",
+ "version": "1.0.0",
+ "description": "hello world sample for NodeJS",
+ "main": "app.js",
+ "author": "SAM CLI",
+ "license": "MIT",
+ "dependencies": {
+ "axios": ">=0.21.1",
+ "@faker-js/faker": "7.1.0"
+ }
+}
\ No newline at end of file
diff --git a/tests/integration/testdata/sync/code/after/python_function_no_deps/app_with_numpy.py b/tests/integration/testdata/sync/code/after/python_function_no_deps/app_with_numpy.py
new file mode 100644
index 0000000000..f6d374214d
--- /dev/null
+++ b/tests/integration/testdata/sync/code/after/python_function_no_deps/app_with_numpy.py
@@ -0,0 +1,12 @@
+import json
+import numpy as np
+
+def lambda_handler(event, context):
+
+ return {
+ "statusCode": 200,
+ "body": json.dumps({
+ "message": "hello mars",
+ "extra_message": np.array([1, 2, 3, 4, 5, 6]).tolist() # checking external library call will succeed
+ }),
+ }
diff --git a/tests/integration/testdata/sync/code/after/python_function_no_deps/app_without_numpy.py b/tests/integration/testdata/sync/code/after/python_function_no_deps/app_without_numpy.py
new file mode 100644
index 0000000000..8d01d1887b
--- /dev/null
+++ b/tests/integration/testdata/sync/code/after/python_function_no_deps/app_without_numpy.py
@@ -0,0 +1,10 @@
+import json
+
+def lambda_handler(event, context):
+
+ return {
+ "statusCode": 200,
+ "body": json.dumps({
+ "message": "hello mars",
+ }),
+ }
diff --git a/tests/integration/testdata/sync/code/after/python_function_no_deps/requirements.txt b/tests/integration/testdata/sync/code/after/python_function_no_deps/requirements.txt
new file mode 100644
index 0000000000..296d654528
--- /dev/null
+++ b/tests/integration/testdata/sync/code/after/python_function_no_deps/requirements.txt
@@ -0,0 +1 @@
+numpy
\ No newline at end of file
diff --git a/tests/integration/testdata/sync/code/before/nodejs_function/app.js b/tests/integration/testdata/sync/code/before/nodejs_function/app.js
new file mode 100644
index 0000000000..d30644d033
--- /dev/null
+++ b/tests/integration/testdata/sync/code/before/nodejs_function/app.js
@@ -0,0 +1,18 @@
+let response;
+
+exports.lambdaHandler = async (event, context) => {
+ try {
+ const ret = await axios(url);
+ response = {
+ 'statusCode': 200,
+ 'body': JSON.stringify({
+ message: 'hello world',
+ })
+ }
+ } catch (err) {
+ console.log(err);
+ return err;
+ }
+
+ return response
+};
diff --git a/tests/integration/testdata/sync/code/before/nodejs_function/package.json b/tests/integration/testdata/sync/code/before/nodejs_function/package.json
new file mode 100644
index 0000000000..ac85310bcf
--- /dev/null
+++ b/tests/integration/testdata/sync/code/before/nodejs_function/package.json
@@ -0,0 +1,11 @@
+{
+ "name": "hello_world",
+ "version": "1.0.0",
+ "description": "hello world sample for NodeJS",
+ "main": "app.js",
+ "author": "SAM CLI",
+ "license": "MIT",
+ "dependencies": {
+ "axios": "^0.27.2"
+ }
+}
\ No newline at end of file
diff --git a/tests/integration/testdata/sync/code/before/python_function_no_deps/app.py b/tests/integration/testdata/sync/code/before/python_function_no_deps/app.py
new file mode 100644
index 0000000000..f94f332620
--- /dev/null
+++ b/tests/integration/testdata/sync/code/before/python_function_no_deps/app.py
@@ -0,0 +1,10 @@
+import json
+
+def lambda_handler(event, context):
+
+ return {
+ "statusCode": 200,
+ "body": json.dumps({
+ "message": "hello world",
+ }),
+ }
diff --git a/tests/integration/testdata/sync/code/before/python_function_no_deps/requirements.txt b/tests/integration/testdata/sync/code/before/python_function_no_deps/requirements.txt
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/integration/testdata/sync/code/before/template-nodejs.yaml b/tests/integration/testdata/sync/code/before/template-nodejs.yaml
new file mode 100644
index 0000000000..c138a8e7a8
--- /dev/null
+++ b/tests/integration/testdata/sync/code/before/template-nodejs.yaml
@@ -0,0 +1,21 @@
+AWSTemplateFormatVersion: "2010-09-09"
+Transform: AWS::Serverless-2016-10-31
+
+Globals:
+ Function:
+ Timeout: 10
+
+Resources:
+ HelloWorldFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ CodeUri: nodejs_function/
+ Handler: app.lambdaHandler
+ Runtime: nodejs16.x
+ Tracing: Active
+
+ HelloWorldApi:
+ Type: AWS::Serverless::Api
+ Properties:
+ StageName: prod
+ DefinitionUri: apigateway/definition.json
diff --git a/tests/integration/testdata/sync/code/before/template-python-no-dependencies.yaml b/tests/integration/testdata/sync/code/before/template-python-no-dependencies.yaml
new file mode 100644
index 0000000000..b95b0c7670
--- /dev/null
+++ b/tests/integration/testdata/sync/code/before/template-python-no-dependencies.yaml
@@ -0,0 +1,14 @@
+AWSTemplateFormatVersion: "2010-09-09"
+Transform: AWS::Serverless-2016-10-31
+
+Globals:
+ Function:
+ Timeout: 10
+
+Resources:
+ HelloWorldFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ CodeUri: python_function_no_deps/
+ Handler: app.lambda_handler
+ Runtime: python3.7
\ No newline at end of file
diff --git a/tests/integration/testdata/sync/infra/before/Java/HelloWorldFunction/pom.xml b/tests/integration/testdata/sync/infra/before/Java/HelloWorldFunction/pom.xml
new file mode 100644
index 0000000000..6c18c4e84e
--- /dev/null
+++ b/tests/integration/testdata/sync/infra/before/Java/HelloWorldFunction/pom.xml
@@ -0,0 +1,58 @@
+
+ 4.0.0
+ helloworld
+ HelloWorld
+ 1.0
+ jar
+ A sample Hello World created for SAM CLI.
+
+ 8
+ 8
+
+
+
+
+ helloworld
+ HelloWorldLayer
+ 1.0
+ provided
+
+
+ com.amazonaws
+ aws-lambda-java-core
+ 1.2.1
+
+
+ com.amazonaws
+ aws-lambda-java-events
+ 3.11.0
+
+
+ junit
+ junit
+ 4.13.2
+ test
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-shade-plugin
+ 3.2.4
+
+
+
+
+ package
+
+ shade
+
+
+
+
+
+
+
diff --git a/tests/integration/testdata/sync/infra/before/Java/HelloWorldFunction/src/main/java/helloworld/App.java b/tests/integration/testdata/sync/infra/before/Java/HelloWorldFunction/src/main/java/helloworld/App.java
new file mode 100644
index 0000000000..f8ea92bbd6
--- /dev/null
+++ b/tests/integration/testdata/sync/infra/before/Java/HelloWorldFunction/src/main/java/helloworld/App.java
@@ -0,0 +1,53 @@
+package helloworld;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.URL;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import com.amazonaws.services.lambda.runtime.Context;
+import com.amazonaws.services.lambda.runtime.RequestHandler;
+import com.amazonaws.services.lambda.runtime.events.APIGatewayProxyRequestEvent;
+import com.amazonaws.services.lambda.runtime.events.APIGatewayProxyResponseEvent;
+
+import helloworldlayer.SimpleMath;
+
+/**
+ * Handler for requests to Lambda function.
+ */
+public class App implements RequestHandler {
+
+ public APIGatewayProxyResponseEvent handleRequest(final APIGatewayProxyRequestEvent input, final Context context) {
+ Map headers = new HashMap<>();
+ headers.put("Content-Type", "application/json");
+ headers.put("X-Custom-Header", "application/json");
+
+ APIGatewayProxyResponseEvent response = new APIGatewayProxyResponseEvent()
+ .withHeaders(headers);
+
+ int sumResult = SimpleMath.sum(7, 5);
+
+ try {
+ final String pageContents = this.getPageContents("https://checkip.amazonaws.com");
+ String output = String.format("{ \"message\": \"hello world\", \"location\": \"%s\", \"sum\": %d }", pageContents, sumResult);
+
+ return response
+ .withStatusCode(200)
+ .withBody(output);
+ } catch (IOException e) {
+ return response
+ .withBody("{}")
+ .withStatusCode(500);
+ }
+ }
+
+ private String getPageContents(String address) throws IOException{
+ URL url = new URL(address);
+ try(BufferedReader br = new BufferedReader(new InputStreamReader(url.openStream()))) {
+ return br.lines().collect(Collectors.joining(System.lineSeparator()));
+ }
+ }
+}
diff --git a/tests/integration/testdata/sync/infra/before/Java/HelloWorldLayer/pom.xml b/tests/integration/testdata/sync/infra/before/Java/HelloWorldLayer/pom.xml
new file mode 100644
index 0000000000..637bd8fa44
--- /dev/null
+++ b/tests/integration/testdata/sync/infra/before/Java/HelloWorldLayer/pom.xml
@@ -0,0 +1,36 @@
+
+ 4.0.0
+ helloworld
+ HelloWorldLayer
+ 1.0
+ jar
+ A sample Hello World created for SAM CLI.
+
+ 8
+ 8
+
+
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-shade-plugin
+ 3.2.4
+
+
+
+
+ package
+
+ shade
+
+
+
+
+
+
+
diff --git a/tests/integration/testdata/sync/infra/before/Java/HelloWorldLayer/src/main/java/helloworldlayer/SimpleMath.java b/tests/integration/testdata/sync/infra/before/Java/HelloWorldLayer/src/main/java/helloworldlayer/SimpleMath.java
new file mode 100644
index 0000000000..1ad779173b
--- /dev/null
+++ b/tests/integration/testdata/sync/infra/before/Java/HelloWorldLayer/src/main/java/helloworldlayer/SimpleMath.java
@@ -0,0 +1,8 @@
+package helloworldlayer;
+
+public class SimpleMath {
+
+ public static int sum(int a, int b) {
+ return a + b;
+ }
+}
diff --git a/tests/integration/testdata/sync/infra/template-java.yaml b/tests/integration/testdata/sync/infra/template-java.yaml
new file mode 100644
index 0000000000..5e93e2368c
--- /dev/null
+++ b/tests/integration/testdata/sync/infra/template-java.yaml
@@ -0,0 +1,27 @@
+AWSTemplateFormatVersion: "2010-09-09"
+Transform: AWS::Serverless-2016-10-31
+
+Globals:
+ Function:
+ Timeout: 30
+
+Resources:
+ HelloWorldFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ CodeUri: before/Java/HelloWorldFunction
+ Handler: helloworld.App::handleRequest
+ Runtime: java8
+ MemorySize: 512
+ Layers:
+ - !Ref HelloWorldLayer
+
+ HelloWorldLayer:
+ Type: AWS::Serverless::LayerVersion
+ Properties:
+ ContentUri: before/Java/HelloWorldLayer
+ CompatibleRuntimes:
+ - java8
+ Metadata:
+ BuildMethod: java8
+ BuildArchitecture: x86_64
diff --git a/tests/integration/testdata/sync/infra/template-python-after.yaml b/tests/integration/testdata/sync/infra/template-python-after.yaml
index b91dd212dc..24705c7ed2 100644
--- a/tests/integration/testdata/sync/infra/template-python-after.yaml
+++ b/tests/integration/testdata/sync/infra/template-python-after.yaml
@@ -31,7 +31,6 @@ Resources:
HelloWorldLayer:
Type: AWS::Serverless::LayerVersion
Properties:
- LayerName: HelloWorldLayer
Description: Hello World Layer
ContentUri: after/Python/layer/
CompatibleRuntimes:
diff --git a/tests/integration/testdata/sync/infra/template-python-before.yaml b/tests/integration/testdata/sync/infra/template-python-before.yaml
index 19ae2189db..407f643797 100644
--- a/tests/integration/testdata/sync/infra/template-python-before.yaml
+++ b/tests/integration/testdata/sync/infra/template-python-before.yaml
@@ -31,7 +31,6 @@ Resources:
HelloWorldLayer:
Type: AWS::Serverless::LayerVersion
Properties:
- LayerName: HelloWorldLayer
Description: Hello World Layer
ContentUri: before/Python/layer/
CompatibleRuntimes:
diff --git a/tests/integration/testdata/sync/infra/template-ruby-after.yaml b/tests/integration/testdata/sync/infra/template-ruby-after.yaml
index dc869b1691..bf3818a7f5 100644
--- a/tests/integration/testdata/sync/infra/template-ruby-after.yaml
+++ b/tests/integration/testdata/sync/infra/template-ruby-after.yaml
@@ -21,7 +21,6 @@ Resources:
HelloWorldRubyLayer:
Type: AWS::Serverless::LayerVersion
Properties:
- LayerName: HelloWorldRubyLayer
Description: Hello World Ruby Layer
ContentUri: after/Ruby/layer/
CompatibleRuntimes:
diff --git a/tests/integration/testdata/sync/infra/template-ruby-before.yaml b/tests/integration/testdata/sync/infra/template-ruby-before.yaml
index f5c312b2cb..c57469007e 100644
--- a/tests/integration/testdata/sync/infra/template-ruby-before.yaml
+++ b/tests/integration/testdata/sync/infra/template-ruby-before.yaml
@@ -21,7 +21,6 @@ Resources:
HelloWorldRubyLayer:
Type: AWS::Serverless::LayerVersion
Properties:
- LayerName: HelloWorldRubyLayer
Description: Hello World Ruby Layer
ContentUri: before/Ruby/layer/
CompatibleRuntimes:
diff --git a/tests/integration/testdata/sync/nested/before/child_stack/template.yaml b/tests/integration/testdata/sync/nested/before/child_stack/template.yaml
index bdd94e5c7e..91caa7142a 100644
--- a/tests/integration/testdata/sync/nested/before/child_stack/template.yaml
+++ b/tests/integration/testdata/sync/nested/before/child_stack/template.yaml
@@ -29,7 +29,6 @@ Resources:
HelloWorldLayer:
Type: AWS::Serverless::LayerVersion
Properties:
- LayerName: HelloWorldLayer
Description: Hello World Layer
# Currently if a base_dir option is provided, the nested stack code URIs
# Needs to be relative to the base_dir instead of the child templates
diff --git a/tests/integration/testdata/sync/nested/before/template.yaml b/tests/integration/testdata/sync/nested/before/template.yaml
index 8466d25b19..7afeb10382 100644
--- a/tests/integration/testdata/sync/nested/before/template.yaml
+++ b/tests/integration/testdata/sync/nested/before/template.yaml
@@ -19,7 +19,6 @@ Resources:
HelloWorldLayer:
Type: AWS::Serverless::LayerVersion
Properties:
- LayerName: HelloWorldLayer
Description: Hello World Layer
ContentUri: root_layer/
CompatibleRuntimes:
diff --git a/tests/integration/testdata/sync/nested_intrinsics/before/child_stack/child_layer/template.yaml b/tests/integration/testdata/sync/nested_intrinsics/before/child_stack/child_layer/template.yaml
index e0a43abef9..6c2089f68d 100644
--- a/tests/integration/testdata/sync/nested_intrinsics/before/child_stack/child_layer/template.yaml
+++ b/tests/integration/testdata/sync/nested_intrinsics/before/child_stack/child_layer/template.yaml
@@ -5,7 +5,6 @@ Resources:
HelloWorldLayer:
Type: AWS::Serverless::LayerVersion
Properties:
- LayerName: HelloWorldLayer
Description: Hello World Layer
ContentUri: layer/
CompatibleRuntimes:
diff --git a/tests/integration/traces/test_traces_command.py b/tests/integration/traces/test_traces_command.py
index 896dd479b3..cc0ee89693 100644
--- a/tests/integration/traces/test_traces_command.py
+++ b/tests/integration/traces/test_traces_command.py
@@ -83,7 +83,7 @@ def test_function_traces(self, function_name):
LOG.info("Invoking function %s", function_name)
lambda_invoke_result = self.lambda_client.invoke(FunctionName=function_id)
LOG.info("Lambda invoke result %s", lambda_invoke_result)
- cmd_list = self.get_traces_command_list(beta_features=True)
+ cmd_list = self.get_traces_command_list()
self._check_traces(cmd_list, expected_trace_output)
@parameterized.expand([("ApiGwFunction",), ("SfnFunction",)])
@@ -111,7 +111,7 @@ def test_trace_id(self, function_name):
trace_id = trace_summaries[0].get("Id")
LOG.info("Trace id: %s", trace_id)
- cmd_list = self.get_traces_command_list(trace_id=trace_id, beta_features=True)
+ cmd_list = self.get_traces_command_list(trace_id=trace_id)
self._check_traces(cmd_list, expected_trace_output, has_service_graph=False)
@parameterized.expand([("ApiGwFunction",), ("SfnFunction",)])
@@ -124,7 +124,7 @@ def test_trace_start_time(self, function_name):
lambda_invoke_result = self.lambda_client.invoke(FunctionName=function_id)
LOG.info("Lambda invoke result %s", lambda_invoke_result)
- cmd_list = self.get_traces_command_list(start_time=str(start_time), beta_features=True)
+ cmd_list = self.get_traces_command_list(start_time=str(start_time))
self._check_traces(cmd_list, expected_trace_output)
@parameterized.expand([("ApiGwFunction",), ("SfnFunction",)])
@@ -137,7 +137,7 @@ def test_trace_end_time(self, function_name):
LOG.info("Lambda invoke result %s", lambda_invoke_result)
end_time = datetime.utcnow()
- cmd_list = self.get_traces_command_list(end_time=str(end_time), beta_features=True)
+ cmd_list = self.get_traces_command_list(end_time=str(end_time))
self._check_traces(cmd_list, expected_trace_output)
@parameterized.expand([("ApiGwFunction",), ("SfnFunction",)])
@@ -149,7 +149,7 @@ def test_traces_with_tail(self, function_name: str):
lambda_invoke_result = self.lambda_client.invoke(FunctionName=function_id)
LOG.info("Lambda invoke result %s", lambda_invoke_result)
- cmd_list = self.get_traces_command_list(tail=True, beta_features=True)
+ cmd_list = self.get_traces_command_list(tail=True)
tail_process = start_persistent_process(cmd_list)
def _check_traces(output: str, _: List[str]) -> bool:
@@ -171,7 +171,7 @@ def test_traces_with_output_option(self, function_name, output):
lambda_invoke_result = self.lambda_client.invoke(FunctionName=function_id)
LOG.info("Lambda invoke result %s", lambda_invoke_result)
- cmd_list = self.get_traces_command_list(output=output, beta_features=True)
+ cmd_list = self.get_traces_command_list(output=output)
output_check = OutputOption.json if output == OutputOption.json.name else OutputOption.text
self._check_traces(cmd_list, expected_trace_output, output=output_check)
diff --git a/tests/integration/traces/traces_integ_base.py b/tests/integration/traces/traces_integ_base.py
index 76cc2872ac..97afafda96 100644
--- a/tests/integration/traces/traces_integ_base.py
+++ b/tests/integration/traces/traces_integ_base.py
@@ -17,7 +17,6 @@ def get_traces_command_list(
end_time: Optional[str] = None,
tail: bool = False,
output: Optional[str] = None,
- beta_features: bool = False,
):
command_list = [get_sam_command(), "traces"]
@@ -31,7 +30,5 @@ def get_traces_command_list(
command_list += ["--output", output]
if tail:
command_list += ["--tail"]
- if beta_features:
- command_list += ["--beta-features"]
return command_list
diff --git a/tests/testing_utils.py b/tests/testing_utils.py
index 7b56a82c30..d50dc5ffe9 100644
--- a/tests/testing_utils.py
+++ b/tests/testing_utils.py
@@ -45,10 +45,14 @@ def get_sam_command():
def method_to_stack_name(method_name):
"""Method expects method name which can be a full path. Eg: test.integration.test_deploy_command.method_name"""
method_name = method_name.split(".")[-1]
- return f"{method_name.replace('_', '-')}-{CFN_PYTHON_VERSION_SUFFIX}-{uuid4().hex}"[:128]
+ stack_name = f"{method_name.replace('_', '-')}-{CFN_PYTHON_VERSION_SUFFIX}-{uuid4().hex}"
+ if not stack_name.startswith("test"):
+ stack_name = f"test-{stack_name}"
+ return stack_name[:128]
def run_command(command_list, cwd=None, env=None, timeout=TIMEOUT) -> CommandResult:
+ LOG.info("Running command: %s", " ".join(command_list))
process_execute = Popen(command_list, cwd=cwd, env=env, stdout=PIPE, stderr=PIPE)
try:
stdout_data, stderr_data = process_execute.communicate(timeout=timeout)
@@ -63,6 +67,8 @@ def run_command(command_list, cwd=None, env=None, timeout=TIMEOUT) -> CommandRes
def run_command_with_input(command_list, stdin_input, timeout=TIMEOUT) -> CommandResult:
+ LOG.info("Running command: %s", " ".join(command_list))
+ LOG.info("With input: %s", stdin_input)
process_execute = Popen(command_list, stdout=PIPE, stderr=PIPE, stdin=PIPE)
try:
stdout_data, stderr_data = process_execute.communicate(stdin_input, timeout=timeout)
diff --git a/tests/unit/commands/_utils/test_command_exception_handler.py b/tests/unit/commands/_utils/test_command_exception_handler.py
new file mode 100644
index 0000000000..f206de5e98
--- /dev/null
+++ b/tests/unit/commands/_utils/test_command_exception_handler.py
@@ -0,0 +1,80 @@
+from typing import Callable
+from unittest import TestCase
+
+from botocore.exceptions import NoRegionError, ClientError
+from parameterized import parameterized
+
+from samcli.commands._utils.command_exception_handler import command_exception_handler
+from samcli.commands.exceptions import RegionError, CredentialsError, UserException
+
+
+@command_exception_handler
+def echo_command(proxy_function: Callable):
+ return proxy_function()
+
+
+class UnhandledException(Exception):
+ pass
+
+
+class TestCommandExceptionHandler(TestCase):
+ def test_no_exception(self):
+ self.assertEqual(echo_command(lambda: 5), 5)
+
+ def test_no_region_error(self):
+ def _proxy_function_that_raises_region_error():
+ raise NoRegionError()
+
+ with self.assertRaises(RegionError):
+ echo_command(_proxy_function_that_raises_region_error)
+
+ @parameterized.expand([("ExpiredToken",), ("ExpiredTokenException",)])
+ def test_expired_token_error(self, error_code):
+ def _proxy_function_that_raises_expired_token():
+ raise ClientError({"Error": {"Code": error_code}}, "mock")
+
+ with self.assertRaises(CredentialsError):
+ echo_command(_proxy_function_that_raises_expired_token)
+
+ def test_unhandled_client_error(self):
+ client_error = ClientError({"Error": {"Code": "UnhandledCode"}}, "mock")
+
+ def _proxy_function_that_raises_unhandled_client_error():
+ raise client_error
+
+ with self.assertRaises(ClientError) as ex:
+ echo_command(_proxy_function_that_raises_unhandled_client_error)
+ self.assertEqual(client_error, ex)
+
+ def test_unhandled_exception(self):
+ def _proxy_function_that_raises_unhandled_exception():
+ raise UnhandledException()
+
+ with self.assertRaises(UnhandledException):
+ echo_command(_proxy_function_that_raises_unhandled_exception)
+
+
+class CustomException(Exception):
+ pass
+
+
+class CustomUserException(UserException):
+ pass
+
+
+def _custom_handler(ex: CustomException):
+ raise CustomUserException("Error")
+
+
+@command_exception_handler({CustomException: _custom_handler})
+def command_with_custom_exception_handler(proxy_function: Callable):
+ proxy_function()
+
+
+class TestCommandExceptionHandlerWithCustomHandler(TestCase):
+ def test_custom_exception(self):
+ def _proxy_custom_exception():
+ raise CustomException()
+
+ with self.assertRaises(CustomUserException):
+ command_with_custom_exception_handler(_proxy_custom_exception)
diff --git a/tests/unit/commands/_utils/test_experimental.py b/tests/unit/commands/_utils/test_experimental.py
index 4e20daaaf3..ed94cb35fc 100644
--- a/tests/unit/commands/_utils/test_experimental.py
+++ b/tests/unit/commands/_utils/test_experimental.py
@@ -15,6 +15,7 @@
ExperimentalEntry,
ExperimentalFlag,
)
+from samcli.lib.utils.colors import Colored
class TestExperimental(TestCase):
@@ -58,13 +59,13 @@ def test_set_experimental(self):
self.gc_mock.return_value.set_value.assert_called_once_with(config_entry, False, is_flag=True, flush=False)
def test_get_all_experimental(self):
- self.assertEqual(len(get_all_experimental()), 3)
+ self.assertEqual(len(get_all_experimental()), 2)
def test_get_all_experimental_statues(self):
- self.assertEqual(len(get_all_experimental_statues()), 3)
+ self.assertEqual(len(get_all_experimental_statues()), 2)
def test_get_enabled_experimental_flags(self):
- self.assertEqual(len(get_enabled_experimental_flags()), 3)
+ self.assertEqual(len(get_enabled_experimental_flags()), 2)
@patch("samcli.commands._utils.experimental.set_experimental")
@patch("samcli.commands._utils.experimental.get_all_experimental")
@@ -118,5 +119,5 @@ def test_prompt_experimental(self, update_experimental_context, enabled_mock, co
prompt_experimental(config_entry, prompt)
set_experimental_mock.assert_called_once_with(config_entry=config_entry, enabled=True)
enabled_mock.assert_called_once_with(config_entry)
- confirm_mock.assert_called_once_with(prompt, default=False)
+ confirm_mock.assert_called_once_with(Colored().yellow(prompt), default=False)
update_experimental_context.assert_called_once()
diff --git a/tests/unit/commands/buildcmd/test_build_context.py b/tests/unit/commands/buildcmd/test_build_context.py
index ef9f2ce913..dde06145d2 100644
--- a/tests/unit/commands/buildcmd/test_build_context.py
+++ b/tests/unit/commands/buildcmd/test_build_context.py
@@ -467,6 +467,7 @@ def test_must_return_many_functions_to_build(
self.assertEqual(context.stacks, [stack])
self.assertEqual(context.manifest_path_override, os.path.abspath("manifest_path"))
self.assertEqual(context.mode, "buildmode")
+ self.assertFalse(context.use_base_dir)
self.assertFalse(context.is_building_specific_resource)
resources_to_build = context.resources_to_build
self.assertEqual(resources_to_build.functions, [func1, func2, func6])
diff --git a/tests/unit/commands/delete/test_command.py b/tests/unit/commands/delete/test_command.py
index 7160553793..19f7542241 100644
--- a/tests/unit/commands/delete/test_command.py
+++ b/tests/unit/commands/delete/test_command.py
@@ -31,6 +31,8 @@ def test_all_args(self, mock_delete_context, mock_delete_click):
config_env=self.config_env,
profile=self.profile,
no_prompts=self.no_prompts,
+ s3_bucket=self.s3_bucket,
+ s3_prefix=self.s3_prefix,
)
mock_delete_context.assert_called_with(
@@ -40,6 +42,8 @@ def test_all_args(self, mock_delete_context, mock_delete_click):
config_file=self.config_file,
config_env=self.config_env,
no_prompts=self.no_prompts,
+ s3_bucket=self.s3_bucket,
+ s3_prefix=self.s3_prefix,
)
context_mock.run.assert_called_with()
diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py
index f4bc47d861..d14b88ebf9 100644
--- a/tests/unit/commands/delete/test_delete_context.py
+++ b/tests/unit/commands/delete/test_delete_context.py
@@ -26,6 +26,8 @@ def test_delete_context_stack_does_not_exist(self, patched_click_get_current_con
config_env="default",
profile="test",
no_prompts=True,
+ s3_bucket=None,
+ s3_prefix=None,
) as delete_context:
delete_context.run()
@@ -44,6 +46,8 @@ def test_delete_context_enter(self):
config_env="default",
profile="test",
no_prompts=True,
+ s3_bucket=None,
+ s3_prefix=None,
) as delete_context:
self.assertEqual(delete_context.parse_config_file.call_count, 1)
self.assertEqual(delete_context.init_clients.call_count, 1)
@@ -73,6 +77,8 @@ def test_delete_context_parse_config_file(self, patched_click_get_current_contex
config_env="default",
profile=None,
no_prompts=True,
+ s3_bucket=None,
+ s3_prefix=None,
) as delete_context:
self.assertEqual(delete_context.stack_name, "test")
self.assertEqual(delete_context.region, "us-east-1")
@@ -93,6 +99,8 @@ def test_delete_no_user_input(self, patched_click_get_current_context, patched_c
config_env=None,
profile=None,
no_prompts=None,
+ s3_bucket=None,
+ s3_prefix=None,
) as delete_context:
delete_context.run()
@@ -136,6 +144,8 @@ def test_delete_context_valid_execute_run(self, patched_click_get_current_contex
config_env="default",
profile=None,
no_prompts=True,
+ s3_bucket=None,
+ s3_prefix=None,
) as delete_context:
delete_context.run()
@@ -163,13 +173,17 @@ def test_delete_context_no_s3_bucket(
config_env="default",
profile="test",
no_prompts=True,
+ s3_bucket=None,
+ s3_prefix=None,
) as delete_context:
delete_context.run()
expected_click_secho_calls = [
call(
- "\nWarning: s3_bucket and s3_prefix information could not be obtained from local config file"
- " or cloudformation template, delete the s3 files manually if required",
+ "\nWarning: Cannot resolve s3 bucket information from command options"
+ " , local config file or cloudformation template. Please use"
+ " --s3-bucket next time and"
+ " delete s3 files manually if required.",
fg="yellow",
),
]
@@ -201,6 +215,8 @@ def test_guided_prompts_s3_bucket_prefix_present_execute_run(
config_env="default",
profile="test",
no_prompts=None,
+ s3_bucket=None,
+ s3_prefix=None,
) as delete_context:
patched_confirm.side_effect = [True, False, True]
delete_context.s3_bucket = "s3_bucket"
@@ -258,6 +274,8 @@ def test_guided_prompts_s3_bucket_present_no_prefix_execute_run(
config_env="default",
profile="test",
no_prompts=None,
+ s3_bucket=None,
+ s3_prefix=None,
) as delete_context:
patched_confirm.side_effect = [True, True]
delete_context.s3_bucket = "s3_bucket"
@@ -307,6 +325,8 @@ def test_guided_prompts_ecr_companion_stack_present_execute_run(
config_env="default",
profile="test",
no_prompts=None,
+ s3_bucket=None,
+ s3_prefix=None,
) as delete_context:
patched_confirm.side_effect = [True, False, True, True, True]
delete_context.s3_bucket = "s3_bucket"
@@ -384,6 +404,8 @@ def test_no_prompts_input_is_ecr_companion_stack_present_execute_run(
config_env="default",
profile="test",
no_prompts=True,
+ s3_bucket=None,
+ s3_prefix=None,
) as delete_context:
delete_context.s3_bucket = "s3_bucket"
delete_context.s3_prefix = "s3_prefix"
@@ -424,6 +446,8 @@ def test_retain_resources_delete_stack(self, patched_click_get_current_context,
config_env="default",
profile="test",
no_prompts=True,
+ s3_bucket=None,
+ s3_prefix=None,
) as delete_context:
delete_context.s3_bucket = "s3_bucket"
delete_context.s3_prefix = "s3_prefix"
@@ -434,3 +458,50 @@ def test_retain_resources_delete_stack(self, patched_click_get_current_context,
self.assertEqual(CfnUtils.get_stack_template.call_count, 2)
self.assertEqual(CfnUtils.delete_stack.call_count, 4)
self.assertEqual(CfnUtils.wait_for_delete.call_count, 4)
+
+ @patch.object(DeleteContext, "parse_config_file", MagicMock())
+ @patch.object(DeleteContext, "init_clients", MagicMock())
+ def test_s3_option_flag(self):
+ with DeleteContext(
+ stack_name="test",
+ region="us-east-1",
+ config_file="samconfig.toml",
+ config_env="default",
+ profile="test",
+ no_prompts=True,
+ s3_bucket="s3_bucket",
+ s3_prefix="s3_prefix",
+ ) as delete_context:
+ self.assertEqual(delete_context.s3_bucket, "s3_bucket")
+ self.assertEqual(delete_context.s3_prefix, "s3_prefix")
+
+ @patch.object(
+ TomlProvider,
+ "__call__",
+ MagicMock(
+ return_value=(
+ {
+ "stack_name": "test",
+ "region": "us-east-1",
+ "profile": "developer",
+ "s3_bucket": "s3_bucket",
+ "s3_prefix": "s3_prefix",
+ }
+ )
+ ),
+ )
+ @patch.object(DeleteContext, "parse_config_file", MagicMock())
+ @patch.object(DeleteContext, "init_clients", MagicMock())
+ def test_s3_option_flag_overrides_config(self):
+ with DeleteContext(
+ stack_name="test",
+ region="us-east-1",
+ config_file="samconfig.toml",
+ config_env="default",
+ profile="test",
+ no_prompts=True,
+ s3_bucket="s3_bucket_override",
+ s3_prefix="s3_prefix_override",
+ ) as delete_context:
+ self.assertEqual(delete_context.s3_bucket, "s3_bucket_override")
+ self.assertEqual(delete_context.s3_prefix, "s3_prefix_override")
diff --git a/tests/unit/commands/local/cli_common/test_invoke_context.py b/tests/unit/commands/local/cli_common/test_invoke_context.py
index 9406d20f14..585b0ad125 100644
--- a/tests/unit/commands/local/cli_common/test_invoke_context.py
+++ b/tests/unit/commands/local/cli_common/test_invoke_context.py
@@ -5,8 +5,15 @@
import os
from samcli.commands._utils.template import TemplateFailedParsingException
-from samcli.commands.local.cli_common.user_exceptions import InvokeContextException, DebugContextException
-from samcli.commands.local.cli_common.invoke_context import InvokeContext, ContainersInitializationMode, ContainersMode
+from samcli.commands.local.cli_common.invoke_context import (
+ InvokeContext,
+ ContainersInitializationMode,
+ ContainersMode,
+ DebugContextException,
+ DockerIsNotReachableException,
+ NoFunctionIdentifierProvidedException,
+ InvalidEnvironmentVariablesFileException,
+)
from unittest import TestCase
from unittest.mock import Mock, PropertyMock, patch, ANY, mock_open, call
@@ -398,7 +405,7 @@ def test_must_raise_if_docker_is_not_reachable(self, SamFunctionProviderMock):
invoke_context._get_container_manager = Mock()
invoke_context._get_container_manager.return_value = container_manager_mock
- with self.assertRaises(InvokeContextException) as ex_ctx:
+ with self.assertRaises(DockerIsNotReachableException) as ex_ctx:
invoke_context.__enter__()
self.assertEqual(
@@ -411,7 +418,7 @@ def test_must_raise_if_template_cannot_be_parsed(self, get_buildable_stacks_mock
invoke_context = InvokeContext("template-file")
get_buildable_stacks_mock.side_effect = TemplateFailedParsingException("")
- with self.assertRaises(InvokeContextException) as ex_ctx:
+ with self.assertRaises(TemplateFailedParsingException) as ex_ctx:
invoke_context.__enter__()
@@ -489,7 +496,7 @@ def test_must_raise_if_more_than_one_function(self):
context._function_provider = Mock()
context._function_provider.get_all.return_value = [Mock(), Mock(), Mock()] # Provider returns three functions
- with self.assertRaises(InvokeContextException):
+ with self.assertRaises(NoFunctionIdentifierProvidedException):
context.function_identifier
@@ -1001,7 +1008,7 @@ def test_must_raise_if_failed_to_parse_json(self):
with patch("samcli.commands.local.cli_common.invoke_context.open", m):
- with self.assertRaises(InvokeContextException) as ex_ctx:
+ with self.assertRaises(InvalidEnvironmentVariablesFileException) as ex_ctx:
InvokeContext._get_env_vars_value(filename)
msg = str(ex_ctx.exception)
diff --git a/tests/unit/commands/local/lib/test_local_lambda.py b/tests/unit/commands/local/lib/test_local_lambda.py
index dd3101a380..55675b064e 100644
--- a/tests/unit/commands/local/lib/test_local_lambda.py
+++ b/tests/unit/commands/local/lib/test_local_lambda.py
@@ -3,14 +3,12 @@
"""
import os
import posixpath
-from platform import architecture
from unittest import TestCase
from unittest.mock import Mock, patch
from parameterized import parameterized, param
from samcli.lib.utils.architecture import X86_64, ARM64
-from samcli.commands.local.cli_common.user_exceptions import InvokeContextException
from samcli.commands.local.lib.local_lambda import LocalLambdaRunner
from samcli.lib.providers.provider import Function
from samcli.lib.utils.packagetype import ZIP, IMAGE
diff --git a/tests/unit/commands/local/lib/test_provider.py b/tests/unit/commands/local/lib/test_provider.py
index 9dbca89e97..6e544566f2 100644
--- a/tests/unit/commands/local/lib/test_provider.py
+++ b/tests/unit/commands/local/lib/test_provider.py
@@ -802,26 +802,39 @@ def test_get_resource_full_path_by_id(self, resource_id, expected_full_path):
class TestGetStack(TestCase):
- root_stack = Stack("", "", "template.yaml", None, {})
- child_stack = Stack("", "child", "template.yaml", None, {})
+ root_stack = Stack("", "Root", "template.yaml", None, {})
+ child_stack = Stack("Root", "Child", "root_stack/template.yaml", None, {})
+ child_child_stack = Stack("Root/Child", "ChildChild", "root_stack/child_stack/template.yaml", None, {})
def test_get_parent_stack(self):
- stack = Stack.get_parent_stack(self.child_stack, [self.root_stack, self.child_stack])
+ stack = Stack.get_parent_stack(self.child_stack, [self.root_stack, self.child_stack, self.child_child_stack])
self.assertEqual(stack, self.root_stack)
- stack = Stack.get_parent_stack(self.root_stack, [self.root_stack, self.child_stack])
+ stack = Stack.get_parent_stack(self.root_stack, [self.root_stack, self.child_stack, self.child_child_stack])
self.assertIsNone(stack)
- def test_get_stack_by_logical_id(self):
- stack = Stack.get_stack_by_logical_id("child", [self.root_stack, self.child_stack])
+ def test_get_stack_by_full_path(self):
+ stack = Stack.get_stack_by_full_path("Root/Child", [self.root_stack, self.child_stack, self.child_child_stack])
self.assertEqual(stack, self.child_stack)
- stack = Stack.get_stack_by_logical_id("not_exist", [self.root_stack, self.child_stack])
+ stack = Stack.get_stack_by_full_path("Root", [self.root_stack, self.child_stack, self.child_child_stack])
+ self.assertEqual(stack, self.root_stack)
+
+ stack = Stack.get_stack_by_full_path("Child/Child", [self.root_stack, self.child_stack, self.child_child_stack])
self.assertIsNone(stack)
def test_get_child_stacks(self):
- stack_list = Stack.get_child_stacks(self.root_stack, [self.root_stack, self.child_stack])
+ stack_list = Stack.get_child_stacks(
+ self.root_stack, [self.root_stack, self.child_stack, self.child_child_stack]
+ )
self.assertEqual(stack_list, [self.child_stack])
- stack_list = Stack.get_child_stacks(self.child_stack, [self.root_stack, self.child_stack])
+ stack_list = Stack.get_child_stacks(
+ self.child_stack, [self.root_stack, self.child_stack, self.child_child_stack]
+ )
+ self.assertEqual(stack_list, [self.child_child_stack])
+
+ stack_list = Stack.get_child_stacks(
+ self.child_child_stack, [self.root_stack, self.child_stack, self.child_child_stack]
+ )
self.assertEqual(stack_list, [])
diff --git a/tests/unit/commands/logs/test_command.py b/tests/unit/commands/logs/test_command.py
index ac65365674..d2e1d6c0d8 100644
--- a/tests/unit/commands/logs/test_command.py
+++ b/tests/unit/commands/logs/test_command.py
@@ -1,10 +1,13 @@
import itertools
from unittest import TestCase
-from unittest.mock import Mock, patch, call, ANY
+from unittest.mock import Mock, patch, call
+import pytest
+from botocore.exceptions import ClientError
+from click.testing import CliRunner
from parameterized import parameterized
-from samcli.commands.logs.command import do_cli
+from samcli.commands.logs.command import do_cli, cli
from samcli.lib.observability.util import OutputOption
@@ -88,7 +91,7 @@ def test_logs_command(
patched_boto_resource_provider.assert_called_with(region=self.region, profile=self.profile)
patched_resource_physical_id_resolver.assert_called_with(
- mocked_resource_provider, self.stack_name, self.function_name
+ mocked_resource_provider, mocked_client_provider, self.stack_name, self.function_name
)
fetch_param = not bool(len(cw_log_group))
@@ -109,3 +112,64 @@ def test_logs_command(
mocked_puller.assert_has_calls(
[call.load_time_period(mocked_start_time, mocked_end_time, self.filter_pattern)]
)
+
+ def test_without_stack_name_or_cw_log_group(
+ self, patched_is_experimental_enabled, patched_update_experimental_context
+ ):
+ cli_runner = CliRunner()
+ result = cli_runner.invoke(cli, [])
+ self.assertIn("Please provide '--stack-name' or '--cw-log-group'", result.output)
+
+ @patch("samcli.commands.logs.logs_context.ResourcePhysicalIdResolver.get_resource_information")
+ @patch("samcli.commands.logs.puller_factory.generate_puller")
+ def test_with_stack_name_but_without_cw_log_group_should_succeed(
+ self,
+ patched_generate_puller,
+ patched_get_resource_information,
+ patched_is_experimental_enabled,
+ patched_update_experimental_context,
+ ):
+ cli_runner = CliRunner()
+ cli_runner.invoke(cli, ["--stack-name", "abcdef"])
+ patched_get_resource_information.assert_called_with(True)
+ patched_generate_puller.assert_called_once()
+
+ @patch("samcli.commands.logs.logs_context.ResourcePhysicalIdResolver.get_resource_information")
+ @patch("samcli.commands.logs.puller_factory.generate_puller")
+ def test_with_cw_log_group_but_without_stack_name_should_succeed(
+ self,
+ patched_generate_puller,
+ patched_get_resource_information,
+ patched_is_experimental_enabled,
+ patched_update_experimental_context,
+ ):
+ cli_runner = CliRunner()
+ cli_runner.invoke(cli, ["--cw-log-group", "abcdef"])
+ patched_get_resource_information.assert_called_with(False)
+ patched_generate_puller.assert_called_once()
+
+ def test_with_name_but_without_stack_name_should_fail(
+ self, patched_is_experimental_enabled, patched_update_experimental_context
+ ):
+ cli_runner = CliRunner()
+ result = cli_runner.invoke(cli, ["--name", "abcdef"])
+ self.assertIn("Missing option. Please provide '--stack-name' when using '--name' option", result.output)
+
+ @pytest.fixture(autouse=True)
+ def inject_fixtures(self, caplog):
+ self._caplog = caplog
+
+ @patch("samcli.commands.logs.logs_context.ResourcePhysicalIdResolver.get_resource_information")
+ def test_invalid_stack_name_should_fail(
+ self, patched_get_resource_information, patched_is_experimental_enabled, patched_update_experimental_context
+ ):
+ patched_get_resource_information.side_effect = ClientError(
+ {"Error": {"Code": "ValidationError"}}, "ListStackResources"
+ )
+ self._caplog.set_level(100000)
+ cli_runner = CliRunner()
+ invalid_stack_name = "my-invalid-stack-name"
+ result = cli_runner.invoke(cli, ["--stack-name", invalid_stack_name, "--region", "us-west-2"])
+ self.assertIn(
+ f"Invalid --stack-name parameter. Stack with id '{invalid_stack_name}' does not exist", result.output
+ )
diff --git a/tests/unit/commands/logs/test_logs_context.py b/tests/unit/commands/logs/test_logs_context.py
index be5e6304d8..90d19377d6 100644
--- a/tests/unit/commands/logs/test_logs_context.py
+++ b/tests/unit/commands/logs/test_logs_context.py
@@ -47,6 +47,16 @@ def test_parse_time_raises_exception(self, parse_date_mock):
self.assertEqual(str(ctx.exception), "Unable to parse the time provided by 'some prop'")
+ @patch("samcli.commands.logs.logs_context.parse_date")
+ def test_parse_time_internal_call_raises_exception(self, parse_date_mock):
+ given_input = "some time"
+ parse_date_mock.side_effect = ValueError("Invalid date time")
+
+ with self.assertRaises(UserException) as ctx:
+ parse_time(given_input, "some prop")
+
+ self.assertEqual(str(ctx.exception), "Unable to parse the time information 'some prop': 'some time'")
+
def test_parse_time_empty_time(self):
result = parse_time(None, "some prop")
self.assertIsNone(result)
@@ -54,7 +64,7 @@ def test_parse_time_empty_time(self):
class TestResourcePhysicalIdResolver(TestCase):
def test_get_resource_information_with_resources(self):
- resource_physical_id_resolver = ResourcePhysicalIdResolver(Mock(), "stack_name", ["resource_name"])
+ resource_physical_id_resolver = ResourcePhysicalIdResolver(Mock(), Mock(), "stack_name", ["resource_name"])
with mock.patch(
"samcli.commands.logs.logs_context.ResourcePhysicalIdResolver._fetch_resources_from_stack"
) as mocked_fetch:
diff --git a/tests/unit/commands/samconfig/test_samconfig.py b/tests/unit/commands/samconfig/test_samconfig.py
index e47f4e2763..db9092a9f8 100644
--- a/tests/unit/commands/samconfig/test_samconfig.py
+++ b/tests/unit/commands/samconfig/test_samconfig.py
@@ -947,6 +947,7 @@ def test_sync(
"stack_name": "mystack",
"image_repository": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1",
"base_dir": "path",
+ "s3_bucket": "mybucket",
"s3_prefix": "myprefix",
"kms_key_id": "mykms",
"parameter_overrides": 'Key1=Value1 Key2="Multiple spaces in the value"',
@@ -990,6 +991,7 @@ def test_sync(
None,
"123456789012.dkr.ecr.us-east-1.amazonaws.com/test1",
None,
+ "mybucket",
"myprefix",
"mykms",
["cap1", "cap2"],
diff --git a/tests/unit/commands/sync/test_command.py b/tests/unit/commands/sync/test_command.py
index bebe5677e9..5438131f2d 100644
--- a/tests/unit/commands/sync/test_command.py
+++ b/tests/unit/commands/sync/test_command.py
@@ -31,6 +31,7 @@ def setUp(self):
self.image_repository = "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1"
self.image_repositories = None
self.mode = "mode"
+ self.s3_bucket = "s3-bucket"
self.s3_prefix = "s3-prefix"
self.kms_key_id = "kms-key-id"
self.notification_arns = []
@@ -49,7 +50,6 @@ def setUp(self):
@parameterized.expand([(False, False, True), (False, False, False)])
@patch("os.environ", {**os.environ, "SAM_CLI_POLL_DELAY": 10})
- @patch("samcli.commands.sync.command.update_experimental_context")
@patch("samcli.commands.sync.command.click")
@patch("samcli.commands.sync.command.execute_code_sync")
@patch("samcli.commands.build.command.click")
@@ -60,11 +60,13 @@ def setUp(self):
@patch("samcli.commands.deploy.deploy_context.DeployContext")
@patch("samcli.commands.build.command.os")
@patch("samcli.commands.sync.command.manage_stack")
+ @patch("samcli.commands.sync.command.SyncContext")
def test_infra_must_succeed_sync(
self,
code,
watch,
auto_dependency_layer,
+ SyncContextMock,
manage_stack_mock,
os_mock,
DeployContextMock,
@@ -75,7 +77,6 @@ def test_infra_must_succeed_sync(
mock_build_click,
execute_code_sync_mock,
click_mock,
- update_experimental_context_mock,
):
build_context_mock = Mock()
@@ -84,6 +85,8 @@ def test_infra_must_succeed_sync(
PackageContextMock.return_value.__enter__.return_value = package_context_mock
deploy_context_mock = Mock()
DeployContextMock.return_value.__enter__.return_value = deploy_context_mock
+ sync_context_mock = Mock()
+ SyncContextMock.return_value.__enter__.return_value = sync_context_mock
do_cli(
self.template_file,
@@ -100,6 +103,7 @@ def test_infra_must_succeed_sync(
self.mode,
self.image_repository,
self.image_repositories,
+ self.s3_bucket,
self.s3_prefix,
self.kms_key_id,
self.capabilities,
@@ -177,7 +181,6 @@ def test_infra_must_succeed_sync(
execute_code_sync_mock.assert_not_called()
@parameterized.expand([(False, True, False)])
- @patch("samcli.commands.sync.command.update_experimental_context")
@patch("samcli.commands.sync.command.click")
@patch("samcli.commands.sync.command.execute_watch")
@patch("samcli.commands.build.command.click")
@@ -188,11 +191,13 @@ def test_infra_must_succeed_sync(
@patch("samcli.commands.deploy.deploy_context.DeployContext")
@patch("samcli.commands.build.command.os")
@patch("samcli.commands.sync.command.manage_stack")
+ @patch("samcli.commands.sync.command.SyncContext")
def test_watch_must_succeed_sync(
self,
code,
watch,
auto_dependency_layer,
+ SyncContextMock,
manage_stack_mock,
os_mock,
DeployContextMock,
@@ -203,7 +208,6 @@ def test_watch_must_succeed_sync(
mock_build_click,
execute_watch_mock,
click_mock,
- update_experimental_context_mock,
):
build_context_mock = Mock()
@@ -212,6 +216,8 @@ def test_watch_must_succeed_sync(
PackageContextMock.return_value.__enter__.return_value = package_context_mock
deploy_context_mock = Mock()
DeployContextMock.return_value.__enter__.return_value = deploy_context_mock
+ sync_context_mock = Mock()
+ SyncContextMock.return_value.__enter__.return_value = sync_context_mock
do_cli(
self.template_file,
@@ -228,6 +234,7 @@ def test_watch_must_succeed_sync(
self.mode,
self.image_repository,
self.image_repositories,
+ self.s3_bucket,
self.s3_prefix,
self.kms_key_id,
self.capabilities,
@@ -303,7 +310,6 @@ def test_watch_must_succeed_sync(
)
@parameterized.expand([(True, False, True)])
- @patch("samcli.commands.sync.command.update_experimental_context")
@patch("samcli.commands.sync.command.click")
@patch("samcli.commands.sync.command.execute_code_sync")
@patch("samcli.commands.build.command.click")
@@ -314,11 +320,13 @@ def test_watch_must_succeed_sync(
@patch("samcli.commands.deploy.deploy_context.DeployContext")
@patch("samcli.commands.build.command.os")
@patch("samcli.commands.sync.command.manage_stack")
+ @patch("samcli.commands.sync.command.SyncContext")
def test_code_must_succeed_sync(
self,
code,
watch,
auto_dependency_layer,
+ SyncContextMock,
manage_stack_mock,
os_mock,
DeployContextMock,
@@ -329,7 +337,6 @@ def test_code_must_succeed_sync(
mock_build_click,
execute_code_sync_mock,
click_mock,
- update_experimental_context_mock,
):
build_context_mock = Mock()
@@ -338,6 +345,8 @@ def test_code_must_succeed_sync(
PackageContextMock.return_value.__enter__.return_value = package_context_mock
deploy_context_mock = Mock()
DeployContextMock.return_value.__enter__.return_value = deploy_context_mock
+ sync_context_mock = Mock()
+ SyncContextMock.return_value.__enter__.return_value = sync_context_mock
do_cli(
self.template_file,
@@ -354,6 +363,7 @@ def test_code_must_succeed_sync(
self.mode,
self.image_repository,
self.image_repositories,
+ self.s3_bucket,
self.s3_prefix,
self.kms_key_id,
self.capabilities,
@@ -478,7 +488,7 @@ def test_execute_code_sync_single_type_resource(
):
resource_identifier_strings = ["Function1", "Function2"]
- resource_types = ["Type1"]
+ resource_types = ["AWS::Serverless::Function"]
sync_flows = [MagicMock(), MagicMock(), MagicMock()]
sync_flow_factory_mock.return_value.create_sync_flow.side_effect = sync_flows
get_unique_resource_ids_mock.return_value = {
@@ -508,7 +518,7 @@ def test_execute_code_sync_single_type_resource(
self.assertEqual(sync_flow_executor_mock.return_value.add_sync_flow.call_count, 3)
get_unique_resource_ids_mock.assert_called_once_with(
- get_stacks_mock.return_value[0], resource_identifier_strings, ["Type1"]
+ get_stacks_mock.return_value[0], resource_identifier_strings, ["AWS::Serverless::Function"]
)
@patch("samcli.commands.sync.command.click")
@@ -525,7 +535,7 @@ def test_execute_code_sync_multiple_type_resource(
click_mock,
):
resource_identifier_strings = ["Function1", "Function2"]
- resource_types = ["Type1", "Type2"]
+ resource_types = ["AWS::Serverless::Function", "AWS::Serverless::LayerVersion"]
sync_flows = [MagicMock(), MagicMock(), MagicMock(), MagicMock()]
sync_flow_factory_mock.return_value.create_sync_flow.side_effect = sync_flows
get_unique_resource_ids_mock.return_value = {
@@ -559,7 +569,9 @@ def test_execute_code_sync_multiple_type_resource(
self.assertEqual(sync_flow_executor_mock.return_value.add_sync_flow.call_count, 4)
get_unique_resource_ids_mock.assert_any_call(
- get_stacks_mock.return_value[0], resource_identifier_strings, ["Type1", "Type2"]
+ get_stacks_mock.return_value[0],
+ resource_identifier_strings,
+ ["AWS::Serverless::Function", "AWS::Serverless::LayerVersion"],
)
@patch("samcli.commands.sync.command.click")
diff --git a/tests/unit/commands/sync/test_sync_context.py b/tests/unit/commands/sync/test_sync_context.py
new file mode 100644
index 0000000000..9238fede82
--- /dev/null
+++ b/tests/unit/commands/sync/test_sync_context.py
@@ -0,0 +1,98 @@
+from pathlib import Path
+from unittest import TestCase, mock
+from unittest.mock import mock_open, call, patch, Mock, MagicMock
+
+import tomlkit
+from parameterized import parameterized, parameterized_class
+
+from samcli.commands.sync.sync_context import (
+ SyncState,
+ _sync_state_to_toml_document,
+ SYNC_STATE,
+ DEPENDENCY_LAYER,
+ _toml_document_to_sync_state,
+ SyncContext,
+)
+from samcli.lib.build.build_graph import DEFAULT_DEPENDENCIES_DIR
+
+
+class TestSyncState(TestCase):
+ @parameterized.expand([(True,), (False,)])
+ def test_sync_state(self, dependency_layer):
+ sync_state = SyncState(dependency_layer)
+ self.assertEqual(sync_state.dependency_layer, dependency_layer)
+
+
+TOML_TEMPLATE = """
+[sync_state]
+dependency_layer = {dependency_layer}"""
+
+
+class TestSyncStateToTomlSerde(TestCase):
+ @parameterized.expand([(True,), (False,)])
+ def test_sync_state_to_toml(self, dependency_layer):
+ sync_state = SyncState(dependency_layer)
+
+ toml_document = _sync_state_to_toml_document(sync_state)
+ self.assertIsNotNone(toml_document)
+
+ sync_state_toml_table = toml_document.get(SYNC_STATE)
+ self.assertIsNotNone(sync_state_toml_table)
+
+ dependency_layer_toml_field = sync_state_toml_table.get(DEPENDENCY_LAYER)
+ self.assertEqual(dependency_layer_toml_field, dependency_layer)
+
+ @parameterized.expand([(True,), (False,)])
+ def test_toml_to_sync_state(self, dependency_layer):
+ toml_doc = tomlkit.loads(TOML_TEMPLATE.format(dependency_layer=str(dependency_layer).lower()))
+
+ sync_state = _toml_document_to_sync_state(toml_doc)
+ self.assertEqual(sync_state.dependency_layer, dependency_layer)
+
+ def test_none_toml_doc_should_return_none(self):
+ self.assertIsNone(_toml_document_to_sync_state(None))
+
+ def test_none_toml_table_should_return_none(self):
+ self.assertIsNone(_toml_document_to_sync_state(tomlkit.document()))
+
+
+@parameterized_class([{"dependency_layer": True}, {"dependency_layer": False}])
+class TestSyncContext(TestCase):
+
+ dependency_layer: bool
+
+ def setUp(self) -> None:
+ self.build_dir = "build_dir"
+ self.cache_dir = "cache_dir"
+ self.sync_context = SyncContext(self.dependency_layer, self.build_dir, self.cache_dir)
+
+ @parameterized.expand([(True,), (False,)])
+ @patch("samcli.commands.sync.sync_context.rmtree_if_exists")
+ def test_sync_context_dependency_layer(self, previous_dependency_layer_value, patched_rmtree_if_exists):
+ previous_session_state = TOML_TEMPLATE.format(dependency_layer=str(previous_dependency_layer_value).lower())
+ with mock.patch("builtins.open", mock_open(read_data=previous_session_state)) as mock_file:
+ with self.sync_context:
+ pass
+
+ mock_file.assert_has_calls(
+ [call().write(tomlkit.dumps(_sync_state_to_toml_document(self.sync_context._current_state)))]
+ )
+
+ if previous_dependency_layer_value != self.dependency_layer:
+ patched_rmtree_if_exists.assert_has_calls(
+ [
+ call(self.sync_context._build_dir),
+ call(self.sync_context._cache_dir),
+ call(Path(DEFAULT_DEPENDENCIES_DIR)),
+ ]
+ )
+
+ @patch("samcli.commands.sync.sync_context.rmtree_if_exists")
+ def test_sync_context_has_no_previous_state_if_file_doesnt_exist(self, patched_rmtree_if_exists):
+ with mock.patch("builtins.open", mock_open()) as mock_file:
+ mock_file.side_effect = [OSError("File does not exist"), MagicMock()]
+ with self.sync_context:
+ pass
+ self.assertIsNone(self.sync_context._previous_state)
+ self.assertIsNotNone(self.sync_context._current_state)
+ patched_rmtree_if_exists.assert_not_called()
diff --git a/tests/unit/lib/bootstrap/nested_stack/test_nested_stack_manager.py b/tests/unit/lib/bootstrap/nested_stack/test_nested_stack_manager.py
index ab258a8be8..089893d966 100644
--- a/tests/unit/lib/bootstrap/nested_stack/test_nested_stack_manager.py
+++ b/tests/unit/lib/bootstrap/nested_stack/test_nested_stack_manager.py
@@ -102,20 +102,6 @@ def test_function_build_definition_without_dependencies_dir(self):
self.assertEqual(template, result)
- def test_non_existent_dependencies_dir(self):
- resources = {"MyFunction": {"Type": AWS_SERVERLESS_FUNCTION, "Properties": {"Runtime": "python3.8"}}}
- self.stack.resources = resources
- template = {"Resources": resources}
- build_graph = Mock()
- build_graph.get_function_build_definition_with_full_path.return_value = Mock(dependencies_dir="foo/bar")
- app_build_result = ApplicationBuildResult(build_graph, {"MyFunction": "path/to/build/dir"})
- nested_stack_manager = NestedStackManager(
- self.stack, self.stack_name, self.build_dir, template, app_build_result
- )
- result = nested_stack_manager.generate_auto_dependency_layer_stack()
-
- self.assertEqual(template, result)
-
@patch("samcli.lib.bootstrap.nested_stack.nested_stack_manager.move_template")
@patch("samcli.lib.bootstrap.nested_stack.nested_stack_manager.osutils")
@patch("samcli.lib.bootstrap.nested_stack.nested_stack_manager.os.path.isdir")
diff --git a/tests/unit/lib/build_module/test_build_strategy.py b/tests/unit/lib/build_module/test_build_strategy.py
index 48023dcdd6..b72e983d38 100644
--- a/tests/unit/lib/build_module/test_build_strategy.py
+++ b/tests/unit/lib/build_module/test_build_strategy.py
@@ -1,5 +1,6 @@
import itertools
from copy import deepcopy
+from typing import List, Dict
from unittest import TestCase
from unittest.mock import Mock, patch, MagicMock, call, ANY
@@ -32,15 +33,15 @@ def setUp(self):
self.function1_1 = Mock()
self.function1_1.inlinecode = None
self.function1_1.get_build_dir = Mock()
- self.function1_1.full_path = Mock()
+ self.function1_1.full_path = "function1_1"
self.function1_2 = Mock()
self.function1_2.inlinecode = None
self.function1_2.get_build_dir = Mock()
- self.function1_2.full_path = Mock()
+ self.function1_2.full_path = "function1_2"
self.function2 = Mock()
self.function2.inlinecode = None
self.function2.get_build_dir = Mock()
- self.function2.full_path = Mock()
+ self.function2.full_path = "function2"
self.function_build_definition1 = FunctionBuildDefinition("runtime", "codeuri", ZIP, X86_64, {}, "handler")
self.function_build_definition2 = FunctionBuildDefinition("runtime2", "codeuri", ZIP, X86_64, {}, "handler")
@@ -378,36 +379,48 @@ def test_redundant_cached_should_be_clean(self):
class ParallelBuildStrategyTest(BuildStrategyBaseTest):
- def test_given_async_context_should_call_expected_methods(self):
- mock_async_context = Mock()
+ @patch("samcli.lib.build.build_strategy.AsyncContext")
+ def test_given_async_context_should_call_expected_methods(self, patched_async_context):
delegate_build_strategy = MagicMock(wraps=_TestBuildStrategy(self.build_graph))
- parallel_build_strategy = ParallelBuildStrategy(self.build_graph, delegate_build_strategy, mock_async_context)
+ parallel_build_strategy = ParallelBuildStrategy(self.build_graph, delegate_build_strategy)
- given_build_results = [
- {"function1": "function_location1"},
- {"function2": "function_location2"},
+ mock_layer_async_context = Mock()
+ mock_function_async_context = Mock()
+ patched_async_context.side_effect = [mock_layer_async_context, mock_function_async_context]
+
+ layer_build_results: List[Dict[str, str]] = [
{"layer1": "layer_location1"},
{"layer2": "layer_location2"},
]
- mock_async_context.run_async.return_value = given_build_results
+ function_build_results: List[Dict[str, str]] = [
+ {"function1": "function_location1"},
+ {"function2": "function_location2"},
+ ]
+ mock_layer_async_context.run_async.return_value = layer_build_results
+ mock_function_async_context.run_async.return_value = function_build_results
results = parallel_build_strategy.build()
expected_results = {}
- for given_build_result in given_build_results:
+ for given_build_result in layer_build_results + function_build_results:
expected_results.update(given_build_result)
self.assertEqual(results, expected_results)
# assert that result has collected
- mock_async_context.run_async.assert_has_calls([call()])
+ mock_layer_async_context.run_async.assert_has_calls([call()])
+ mock_function_async_context.run_async.assert_has_calls([call()])
# assert that delegated function calls have been registered in async context
- mock_async_context.add_async_task.assert_has_calls(
+ mock_layer_async_context.add_async_task.assert_has_calls(
[
- call(delegate_build_strategy.build_single_layer_definition, self.layer_build_definition1),
- call(delegate_build_strategy.build_single_layer_definition, self.layer_build_definition2),
- call(delegate_build_strategy.build_single_function_definition, self.function_build_definition1),
- call(delegate_build_strategy.build_single_function_definition, self.function_build_definition2),
+ call(parallel_build_strategy.build_single_layer_definition, self.layer_build_definition1),
+ call(parallel_build_strategy.build_single_layer_definition, self.layer_build_definition2),
+ ]
+ )
+ mock_function_async_context.add_async_task.assert_has_calls(
+ [
+ call(parallel_build_strategy.build_single_function_definition, self.function_build_definition1),
+ call(parallel_build_strategy.build_single_function_definition, self.function_build_definition2),
]
)
@@ -547,23 +560,19 @@ def setUp(self) -> None:
"cache_dir",
"manifest_path_override",
False,
+ False,
)
@parameterized.expand(
[
- ("python3.7", True),
- ("nodejs12.x", True),
- ("ruby2.7", True),
- ("python3.7", False),
+ "python3.7",
+ "nodejs12.x",
+ "ruby2.7",
]
)
- @patch("samcli.lib.build.build_strategy.is_experimental_enabled")
- def test_will_call_incremental_build_strategy(
- self, mocked_read, mocked_write, runtime, experimental_enabled, patched_experimental
- ):
- patched_experimental.return_value = experimental_enabled
+ def test_will_call_incremental_build_strategy(self, mocked_read, mocked_write, runtime):
build_definition = FunctionBuildDefinition(runtime, "codeuri", "packate_type", X86_64, {}, "handler")
- self.build_graph.put_function_build_definition(build_definition, Mock())
+ self.build_graph.put_function_build_definition(build_definition, Mock(full_path="function_full_path"))
with patch.object(
self.build_strategy, "_incremental_build_strategy"
) as patched_incremental_build_strategy, patch.object(
@@ -571,12 +580,8 @@ def test_will_call_incremental_build_strategy(
) as patched_cached_build_strategy:
self.build_strategy.build()
- if experimental_enabled:
- patched_incremental_build_strategy.build_single_function_definition.assert_called_with(build_definition)
- patched_cached_build_strategy.assert_not_called()
- else:
- patched_cached_build_strategy.build_single_function_definition.assert_called_with(build_definition)
- patched_incremental_build_strategy.assert_not_called()
+ patched_incremental_build_strategy.build_single_function_definition.assert_called_with(build_definition)
+ patched_cached_build_strategy.assert_not_called()
@parameterized.expand(
[
@@ -587,7 +592,7 @@ def test_will_call_incremental_build_strategy(
)
def test_will_call_cached_build_strategy(self, mocked_read, mocked_write, runtime):
build_definition = FunctionBuildDefinition(runtime, "codeuri", "packate_type", X86_64, {}, "handler")
- self.build_graph.put_function_build_definition(build_definition, Mock())
+ self.build_graph.put_function_build_definition(build_definition, Mock(full_path="function_full_path"))
with patch.object(
self.build_strategy, "_incremental_build_strategy"
) as patched_incremental_build_strategy, patch.object(
@@ -615,7 +620,14 @@ def test_exit_build_strategy_for_specific_resource(
mocked_build_graph.get_function_build_definitions.return_value = []
cached_build_strategy = CachedOrIncrementalBuildStrategyWrapper(
- mocked_build_graph, Mock(), temp_base_dir, build_dir, cache_dir, None, is_building_specific_resource
+ mocked_build_graph,
+ Mock(),
+ temp_base_dir,
+ build_dir,
+ cache_dir,
+ None,
+ is_building_specific_resource,
+ False,
)
cached_build_strategy.build()
@@ -630,3 +642,41 @@ def test_exit_build_strategy_for_specific_resource(
mocked_build_graph.clean_redundant_definitions_and_update.assert_called_once()
clean_cache_mock.assert_called_once()
clean_dep_mock.assert_called_once()
+
+ @parameterized.expand(
+ [
+ ("python", True),
+ ("ruby", True),
+ ("nodejs", True),
+ ("python", False),
+ ("ruby", False),
+ ("nodejs", False),
+ ]
+ )
+ def test_wrapper_with_or_without_container(self, mocked_read, mocked_write, runtime, use_container):
+ build_strategy = CachedOrIncrementalBuildStrategyWrapper(
+ self.build_graph,
+ Mock(),
+ "base_dir",
+ "build_dir",
+ "cache_dir",
+ "manifest_path_override",
+ False,
+ use_container,
+ )
+
+ build_definition = FunctionBuildDefinition(runtime, "codeuri", "packate_type", X86_64, {}, "handler")
+ self.build_graph.put_function_build_definition(build_definition, Mock(full_path="function_full_path"))
+ with patch.object(
+ build_strategy, "_incremental_build_strategy"
+ ) as patched_incremental_build_strategy, patch.object(
+ build_strategy, "_cached_build_strategy"
+ ) as patched_cached_build_strategy:
+ build_strategy.build()
+
+ if not use_container:
+ patched_incremental_build_strategy.build_single_function_definition.assert_called_with(build_definition)
+ patched_cached_build_strategy.assert_not_called()
+ else:
+ patched_cached_build_strategy.build_single_function_definition.assert_called_with(build_definition)
+ patched_incremental_build_strategy.assert_not_called()
diff --git a/tests/unit/lib/observability/cw_logs/test_cw_log_group_provider.py b/tests/unit/lib/observability/cw_logs/test_cw_log_group_provider.py
index d3f2bd71cd..19c4746f37 100644
--- a/tests/unit/lib/observability/cw_logs/test_cw_log_group_provider.py
+++ b/tests/unit/lib/observability/cw_logs/test_cw_log_group_provider.py
@@ -1,30 +1,25 @@
from unittest import TestCase
-from unittest.mock import Mock, ANY, patch
+from unittest.mock import Mock, ANY
from parameterized import parameterized
-from samcli.commands._utils.experimental import set_experimental, ExperimentalFlag
from samcli.lib.observability.cw_logs.cw_log_group_provider import LogGroupProvider
-@patch("samcli.commands._utils.experimental.update_experimental_context")
class TestLogGroupProvider_for_lambda_function(TestCase):
- def setUp(self) -> None:
- set_experimental(config_entry=ExperimentalFlag.Accelerate, enabled=True)
-
- def test_must_return_log_group_name(self, patched_update_experimental_context):
+ def test_must_return_log_group_name(self):
expected = "/aws/lambda/my_function_name"
result = LogGroupProvider.for_lambda_function("my_function_name")
self.assertEqual(expected, result)
- def test_rest_api_log_group_name(self, patched_update_experimental_context):
+ def test_rest_api_log_group_name(self):
expected = "API-Gateway-Execution-Logs_my_function_name/Prod"
result = LogGroupProvider.for_resource(Mock(), "AWS::ApiGateway::RestApi", "my_function_name")
self.assertEqual(expected, result)
- def test_http_api_log_group_name(self, patched_update_experimental_context):
+ def test_http_api_log_group_name(self):
given_client_provider = Mock()
given_client_provider(ANY).get_stage.return_value = {
"AccessLogSettings": {"DestinationArn": "test:my_log_group"}
@@ -34,14 +29,14 @@ def test_http_api_log_group_name(self, patched_update_experimental_context):
self.assertEqual(expected, result)
- def test_http_api_log_group_name_not_exist(self, patched_update_experimental_context):
+ def test_http_api_log_group_name_not_exist(self):
given_client_provider = Mock()
given_client_provider(ANY).get_stage.return_value = {}
result = LogGroupProvider.for_resource(given_client_provider, "AWS::ApiGatewayV2::Api", "my_function_name")
self.assertIsNone(result)
- def test_step_functions(self, patched_update_experimental_context):
+ def test_step_functions(self):
given_client_provider = Mock()
given_cw_log_group_name = "sam-app-logs-command-test-MyStateMachineLogGroup-ucwMaQpNBJTD"
given_client_provider(ANY).describe_state_machine.return_value = {
@@ -63,7 +58,7 @@ def test_step_functions(self, patched_update_experimental_context):
self.assertIsNotNone(result)
self.assertEqual(result, given_cw_log_group_name)
- def test_invalid_step_functions(self, patched_update_experimental_context):
+ def test_invalid_step_functions(self):
given_client_provider = Mock()
given_client_provider(ANY).describe_state_machine.return_value = {"loggingConfiguration": {"destinations": []}}
@@ -74,7 +69,7 @@ def test_invalid_step_functions(self, patched_update_experimental_context):
self.assertIsNone(result)
@parameterized.expand(["non-ARN-log-group", "invalid:log:arn"])
- def test_invalid_step_functions_configuration(self, patched_update_experimental_context, log_group_arn):
+ def test_invalid_step_functions_configuration(self, log_group_arn):
given_client_provider = Mock()
given_client_provider(ANY).describe_state_machine.return_value = {
"loggingConfiguration": {"destinations": [{"cloudWatchLogsLogGroup": {"logGroupArn": log_group_arn}}]}
diff --git a/tests/unit/lib/observability/test_observability_info_puller.py b/tests/unit/lib/observability/test_observability_info_puller.py
index 2b3b6b2016..a050533fc9 100644
--- a/tests/unit/lib/observability/test_observability_info_puller.py
+++ b/tests/unit/lib/observability/test_observability_info_puller.py
@@ -85,8 +85,11 @@ def test_tail_cancel(self, patched_async_context):
mock_puller_1 = Mock()
mock_puller_2 = Mock()
+ mock_puller_3 = Mock()
- combined_puller = ObservabilityCombinedPuller([mock_puller_1, mock_puller_2])
+ child_combined_puller = ObservabilityCombinedPuller([mock_puller_3])
+
+ combined_puller = ObservabilityCombinedPuller([mock_puller_1, mock_puller_2, child_combined_puller])
given_start_time = Mock()
given_filter_pattern = Mock()
@@ -97,12 +100,14 @@ def test_tail_cancel(self, patched_async_context):
[
call.add_async_task(mock_puller_1.tail, given_start_time, given_filter_pattern),
call.add_async_task(mock_puller_2.tail, given_start_time, given_filter_pattern),
+ call.add_async_task(child_combined_puller.tail, given_start_time, given_filter_pattern),
call.run_async(),
]
)
- self.assertTrue(mock_puller_1.cancelled)
- self.assertTrue(mock_puller_2.cancelled)
+ self.assertTrue(mock_puller_1.stop_tailing.called)
+ self.assertTrue(mock_puller_2.stop_tailing.called)
+ self.assertTrue(mock_puller_3.stop_tailing.called)
@patch("samcli.lib.observability.observability_info_puller.AsyncContext")
def test_load_time_period(self, patched_async_context):
diff --git a/tests/unit/lib/package/test_uploaders.py b/tests/unit/lib/package/test_uploaders.py
new file mode 100644
index 0000000000..754a85d89c
--- /dev/null
+++ b/tests/unit/lib/package/test_uploaders.py
@@ -0,0 +1,26 @@
+from unittest import TestCase
+from unittest.mock import Mock
+
+from parameterized import parameterized
+
+from samcli.lib.package.uploaders import Destination, Uploaders
+
+
+class TestUploaders(TestCase):
+ @parameterized.expand([(Destination.S3,), (Destination.ECR,), (None,)])
+ def test_uploader_get(self, destination):
+ ecr_uploader = Mock()
+ s3_uploader = Mock()
+
+ uploaders = Uploaders(s3_uploader, ecr_uploader)
+
+ if not destination:
+ with self.assertRaises(ValueError):
+ uploaders.get(destination)
+ elif destination == Destination.S3:
+ self.assertEqual(uploaders.get(destination), s3_uploader)
+ elif destination == Destination.ECR:
+ self.assertEqual(uploaders.get(destination), ecr_uploader)
+
+ self.assertEqual(s3_uploader, uploaders.s3)
+ self.assertEqual(ecr_uploader, uploaders.ecr)
diff --git a/tests/unit/lib/sync/flows/test_http_api_sync_flow.py b/tests/unit/lib/sync/flows/test_http_api_sync_flow.py
index 02da1a9c81..b4790c6637 100644
--- a/tests/unit/lib/sync/flows/test_http_api_sync_flow.py
+++ b/tests/unit/lib/sync/flows/test_http_api_sync_flow.py
@@ -1,6 +1,7 @@
from unittest import TestCase
from unittest.mock import MagicMock, mock_open, patch
from pathlib import Path
+from samcli.lib.providers.provider import Stack
from samcli.lib.sync.flows.http_api_sync_flow import HttpApiSyncFlow
from samcli.lib.providers.exceptions import MissingLocalDefinition
@@ -45,31 +46,51 @@ def test_sync_direct(self, session_mock):
sync_flow._api_client.reimport_api.assert_called_once_with(ApiId="PhysicalApi1", Body='{"key": "value"}')
@patch("samcli.lib.sync.flows.generic_api_sync_flow.get_resource_by_id")
- @patch("samcli.lib.sync.flows.generic_api_sync_flow.Path.joinpath")
- def test_get_definition_file(self, join_path_mock, get_resource_mock):
+ @patch("samcli.lib.sync.flows.generic_api_sync_flow.get_definition_path")
+ def test_get_definition_file(self, get_path_mock, get_resource_mock):
sync_flow = self.create_sync_flow()
- sync_flow._build_context.base_dir = None
- join_path_mock.return_value = "test_uri"
+ sync_flow._build_context.use_base_dir = False
+ sync_flow._build_context.base_dir = "base_dir"
get_resource_mock.return_value = {"Properties": {"DefinitionUri": "test_uri"}}
- result_uri = sync_flow._get_definition_file("test")
+ get_path_mock.return_value = Path("base_dir").joinpath("test_uri")
+
+ result_uri = sync_flow._get_definition_file(sync_flow._api_identifier)
- self.assertEqual(result_uri, "test_uri")
+ get_path_mock.assert_called_with(
+ {"Properties": {"DefinitionUri": "test_uri"}},
+ sync_flow._api_identifier,
+ False,
+ "base_dir",
+ sync_flow._stacks,
+ )
+ self.assertEqual(result_uri, Path("base_dir").joinpath("test_uri"))
- get_resource_mock.return_value = {"Properties": {}}
- result_uri = sync_flow._get_definition_file("test")
+ get_resource_mock.return_value = {}
+ result_uri = sync_flow._get_definition_file(sync_flow._api_identifier)
self.assertEqual(result_uri, None)
@patch("samcli.lib.sync.flows.generic_api_sync_flow.get_resource_by_id")
- def test_get_definition_file_with_base_dir(self, get_resource_mock):
+ @patch("samcli.lib.sync.flows.generic_api_sync_flow.get_definition_path")
+ def test_get_definition_file_with_base_dir(self, get_path_mock, get_resource_mock):
sync_flow = self.create_sync_flow()
+ sync_flow._build_context.use_base_dir = True
sync_flow._build_context.base_dir = "base_dir"
get_resource_mock.return_value = {"Properties": {"DefinitionUri": "test_uri"}}
- result_uri = sync_flow._get_definition_file("test")
+ get_path_mock.return_value = Path("base_dir").joinpath("test_uri")
- self.assertEqual(result_uri, str(Path("base_dir").joinpath("test_uri")))
+ result_uri = sync_flow._get_definition_file(sync_flow._api_identifier)
+
+ get_path_mock.assert_called_with(
+ {"Properties": {"DefinitionUri": "test_uri"}},
+ sync_flow._api_identifier,
+ True,
+ "base_dir",
+ sync_flow._stacks,
+ )
+ self.assertEqual(result_uri, Path("base_dir").joinpath("test_uri"))
def test_process_definition_file(self):
sync_flow = self.create_sync_flow()
diff --git a/tests/unit/lib/sync/flows/test_layer_sync_flow.py b/tests/unit/lib/sync/flows/test_layer_sync_flow.py
index afcab1cdf8..f709f5e682 100644
--- a/tests/unit/lib/sync/flows/test_layer_sync_flow.py
+++ b/tests/unit/lib/sync/flows/test_layer_sync_flow.py
@@ -60,8 +60,15 @@ def test_setup_with_unknown_layer(self):
@patch("samcli.lib.sync.flows.layer_sync_flow.make_zip")
@patch("samcli.lib.sync.flows.layer_sync_flow.file_checksum")
@patch("samcli.lib.sync.flows.layer_sync_flow.os")
+ @patch("samcli.lib.sync.flows.layer_sync_flow.rmtree_if_exists")
def test_setup_gather_resources(
- self, patched_os, patched_file_checksum, patched_make_zip, patched_tempfile, patched_app_builder
+ self,
+ patched_rmtree_if_exists,
+ patched_os,
+ patched_file_checksum,
+ patched_make_zip,
+ patched_tempfile,
+ patched_app_builder,
):
given_collect_build_resources = Mock()
self.build_context_mock.collect_build_resources.return_value = given_collect_build_resources
@@ -81,6 +88,8 @@ def test_setup_gather_resources(
self.layer_sync_flow.gather_resources()
+ layer_object = self.build_context_mock.layer_provider.get(self.layer_identifier)
+ patched_rmtree_if_exists.assert_called_with(layer_object.get_build_dir(self.build_context_mock.build_dir))
self.build_context_mock.collect_build_resources.assert_called_with(self.layer_identifier)
patched_app_builder.assert_called_with(
diff --git a/tests/unit/lib/sync/flows/test_rest_api_sync_flow.py b/tests/unit/lib/sync/flows/test_rest_api_sync_flow.py
index af338bee5c..4a68eae98a 100644
--- a/tests/unit/lib/sync/flows/test_rest_api_sync_flow.py
+++ b/tests/unit/lib/sync/flows/test_rest_api_sync_flow.py
@@ -7,7 +7,7 @@
from samcli.lib.utils.colors import Colored
from samcli.lib.sync.flows.rest_api_sync_flow import RestApiSyncFlow
from samcli.lib.providers.exceptions import MissingLocalDefinition
-from samcli.lib.providers.provider import ResourceIdentifier
+from samcli.lib.providers.provider import ResourceIdentifier, Stack
class TestRestApiSyncFlow(TestCase):
@@ -263,31 +263,51 @@ def test_delete_deployment_failure(self, session_mock):
)
@patch("samcli.lib.sync.flows.generic_api_sync_flow.get_resource_by_id")
- @patch("samcli.lib.sync.flows.generic_api_sync_flow.Path.joinpath")
- def test_get_definition_file(self, join_path_mock, get_resource_mock):
+ @patch("samcli.lib.sync.flows.generic_api_sync_flow.get_definition_path")
+ def test_get_definition_file(self, get_path_mock, get_resource_mock):
sync_flow = self.create_sync_flow()
- sync_flow._build_context.base_dir = None
- join_path_mock.return_value = "test_uri"
+ sync_flow._build_context.use_base_dir = False
+ sync_flow._build_context.base_dir = "base_dir"
get_resource_mock.return_value = {"Properties": {"DefinitionUri": "test_uri"}}
- result_uri = sync_flow._get_definition_file("test")
+ get_path_mock.return_value = Path("base_dir").joinpath("test_uri")
+
+ result_uri = sync_flow._get_definition_file(sync_flow._api_identifier)
- self.assertEqual(result_uri, "test_uri")
+ get_path_mock.assert_called_with(
+ {"Properties": {"DefinitionUri": "test_uri"}},
+ sync_flow._api_identifier,
+ False,
+ "base_dir",
+ sync_flow._stacks,
+ )
+ self.assertEqual(result_uri, Path("base_dir").joinpath("test_uri"))
- get_resource_mock.return_value = {"Properties": {}}
- result_uri = sync_flow._get_definition_file("test")
+ get_resource_mock.return_value = {}
+ result_uri = sync_flow._get_definition_file(sync_flow._api_identifier)
self.assertEqual(result_uri, None)
@patch("samcli.lib.sync.flows.generic_api_sync_flow.get_resource_by_id")
- def test_get_definition_file_with_base_dir(self, get_resource_mock):
+ @patch("samcli.lib.sync.flows.generic_api_sync_flow.get_definition_path")
+ def test_get_definition_file_with_base_dir(self, get_path_mock, get_resource_mock):
sync_flow = self.create_sync_flow()
+ sync_flow._build_context.use_base_dir = True
sync_flow._build_context.base_dir = "base_dir"
get_resource_mock.return_value = {"Properties": {"DefinitionUri": "test_uri"}}
- result_uri = sync_flow._get_definition_file("test")
+ get_path_mock.return_value = Path("base_dir").joinpath("test_uri")
- self.assertEqual(result_uri, str(Path("base_dir").joinpath("test_uri")))
+ result_uri = sync_flow._get_definition_file(sync_flow._api_identifier)
+
+ get_path_mock.assert_called_with(
+ {"Properties": {"DefinitionUri": "test_uri"}},
+ sync_flow._api_identifier,
+ True,
+ "base_dir",
+ sync_flow._stacks,
+ )
+ self.assertEqual(result_uri, Path("base_dir").joinpath("test_uri"))
def test_process_definition_file(self):
sync_flow = self.create_sync_flow()
diff --git a/tests/unit/lib/sync/flows/test_stepfunctions_sync_flow.py b/tests/unit/lib/sync/flows/test_stepfunctions_sync_flow.py
index 6d4bebf2f2..6af77cd60b 100644
--- a/tests/unit/lib/sync/flows/test_stepfunctions_sync_flow.py
+++ b/tests/unit/lib/sync/flows/test_stepfunctions_sync_flow.py
@@ -1,6 +1,7 @@
from unittest import TestCase
-from unittest.mock import ANY, MagicMock, mock_open, patch
+from unittest.mock import MagicMock, mock_open, patch
from pathlib import Path
+from samcli.lib.providers.provider import Stack
from samcli.lib.sync.flows.stepfunctions_sync_flow import StepFunctionsSyncFlow
from samcli.lib.sync.exceptions import InfraSyncRequiredError
@@ -54,32 +55,50 @@ def test_sync_direct(self, session_mock):
stateMachineArn="PhysicalId1", definition='{"key": "value"}'
)
- @patch("samcli.lib.sync.flows.stepfunctions_sync_flow.get_resource_by_id")
- @patch("samcli.lib.sync.flows.stepfunctions_sync_flow.Path.joinpath")
- def test_get_definition_file(self, join_path_mock, get_resource_mock):
+ @patch("samcli.lib.sync.flows.stepfunctions_sync_flow.get_definition_path")
+ def test_get_definition_file(self, get_path_mock):
sync_flow = self.create_sync_flow()
- sync_flow._build_context.base_dir = None
- join_path_mock.return_value = "test_uri"
+ sync_flow._build_context.use_base_dir = False
+ sync_flow._build_context.base_dir = "base_dir"
sync_flow._resource = {"Properties": {"DefinitionUri": "test_uri"}}
- result_uri = sync_flow._get_definition_file("test")
+ get_path_mock.return_value = Path("base_dir").joinpath("test_uri")
+
+ result_uri = sync_flow._get_definition_file(sync_flow._state_machine_identifier)
- self.assertEqual(result_uri, "test_uri")
+ get_path_mock.assert_called_with(
+ {"Properties": {"DefinitionUri": "test_uri"}},
+ sync_flow._state_machine_identifier,
+ False,
+ "base_dir",
+ sync_flow._stacks,
+ )
+ self.assertEqual(result_uri, Path("base_dir").joinpath("test_uri"))
- sync_flow._resource = {"Properties": {}}
- result_uri = sync_flow._get_definition_file("test")
+ sync_flow._resource = {}
+ result_uri = sync_flow._get_definition_file(sync_flow._state_machine_identifier)
self.assertEqual(result_uri, None)
- @patch("samcli.lib.sync.flows.stepfunctions_sync_flow.get_resource_by_id")
- def test_get_definition_file_with_base_dir(self, get_resource_mock):
+ @patch("samcli.lib.sync.flows.stepfunctions_sync_flow.get_definition_path")
+ def test_get_definition_file_with_base_dir(self, get_path_mock):
sync_flow = self.create_sync_flow()
+ sync_flow._build_context.use_base_dir = True
sync_flow._build_context.base_dir = "base_dir"
sync_flow._resource = {"Properties": {"DefinitionUri": "test_uri"}}
- result_uri = sync_flow._get_definition_file("test")
+ get_path_mock.return_value = Path("base_dir").joinpath("test_uri")
- self.assertEqual(result_uri, str(Path("base_dir").joinpath("test_uri")))
+ result_uri = sync_flow._get_definition_file(sync_flow._state_machine_identifier)
+
+ get_path_mock.assert_called_with(
+ {"Properties": {"DefinitionUri": "test_uri"}},
+ sync_flow._state_machine_identifier,
+ True,
+ "base_dir",
+ sync_flow._stacks,
+ )
+ self.assertEqual(result_uri, Path("base_dir").joinpath("test_uri"))
def test_process_definition_file(self):
sync_flow = self.create_sync_flow()
diff --git a/tests/unit/lib/sync/flows/test_zip_function_sync_flow.py b/tests/unit/lib/sync/flows/test_zip_function_sync_flow.py
index 07e1b4d134..7750088a96 100644
--- a/tests/unit/lib/sync/flows/test_zip_function_sync_flow.py
+++ b/tests/unit/lib/sync/flows/test_zip_function_sync_flow.py
@@ -10,9 +10,11 @@
class TestZipFunctionSyncFlow(TestCase):
def create_function_sync_flow(self):
+ self.build_context_mock = MagicMock()
+ self.function_identifier = "Function1"
sync_flow = ZipFunctionSyncFlow(
- "Function1",
- build_context=MagicMock(),
+ self.function_identifier,
+ build_context=self.build_context_mock,
deploy_context=MagicMock(),
physical_id_mapping={},
stacks=[MagicMock()],
@@ -34,9 +36,18 @@ def test_set_up(self, session_mock, client_provider_mock):
@patch("samcli.lib.sync.flows.zip_function_sync_flow.make_zip")
@patch("samcli.lib.sync.flows.zip_function_sync_flow.tempfile.gettempdir")
@patch("samcli.lib.sync.flows.zip_function_sync_flow.ApplicationBuilder")
+ @patch("samcli.lib.sync.flows.zip_function_sync_flow.rmtree_if_exists")
@patch("samcli.lib.sync.sync_flow.Session")
def test_gather_resources(
- self, session_mock, builder_mock, gettempdir_mock, make_zip_mock, file_checksum_mock, uuid4_mock, sha256_mock
+ self,
+ session_mock,
+ rmtree_if_exists_mock,
+ builder_mock,
+ gettempdir_mock,
+ make_zip_mock,
+ file_checksum_mock,
+ uuid4_mock,
+ sha256_mock,
):
get_mock = MagicMock()
get_mock.return_value = "ArtifactFolder1"
@@ -53,6 +64,8 @@ def test_gather_resources(
sync_flow.set_up()
sync_flow.gather_resources()
+ function_object = self.build_context_mock.function_provider.get(self.function_identifier)
+ rmtree_if_exists_mock.assert_called_once_with(function_object.get_build_dir(self.build_context_mock.build_dir))
get_mock.assert_called_once_with("Function1")
self.assertEqual(sync_flow._artifact_folder, "ArtifactFolder1")
make_zip_mock.assert_called_once_with("temp_folder" + os.sep + "data-uuid_value", "ArtifactFolder1")
@@ -178,7 +191,7 @@ def test_get_resource_api_calls(self, resource_api_call_mock):
function_mock = MagicMock()
function_mock.layers = [layer1, layer2]
function_mock.codeuri = "CodeUri/"
- build_context.function_provider.functions.get.return_value = function_mock
+ build_context.function_provider.get.return_value = function_mock
sync_flow = ZipFunctionSyncFlow(
"Function1",
build_context=build_context,
diff --git a/tests/unit/lib/sync/test_sync_flow.py b/tests/unit/lib/sync/test_sync_flow.py
index 1b161bc1cf..2bffa3799c 100644
--- a/tests/unit/lib/sync/test_sync_flow.py
+++ b/tests/unit/lib/sync/test_sync_flow.py
@@ -1,9 +1,9 @@
-from samcli.lib.providers.provider import ResourceIdentifier
+from pathlib import Path
+from samcli.lib.providers.provider import ResourceIdentifier, Stack
from unittest import TestCase
-from unittest.mock import MagicMock, call, patch, Mock
+from unittest.mock import MagicMock, patch, Mock
-from samcli.lib.sync.sync_flow import SyncFlow, ResourceAPICall, ApiCallTypes
-from samcli.lib.utils.lock_distributor import LockChain
+from samcli.lib.sync.sync_flow import SyncFlow, ResourceAPICall, ApiCallTypes, get_definition_path
from parameterized import parameterized
@@ -148,3 +148,21 @@ def test_hash(self):
sync_flow._equality_keys = MagicMock()
sync_flow._equality_keys.return_value = "A"
self.assertEqual(hash(sync_flow), hash((type(sync_flow), "A")))
+
+ @patch("samcli.lib.sync.sync_flow.Stack.get_stack_by_full_path")
+ def test_get_definition_path(self, get_stack_mock):
+ resource = {"Properties": {"DefinitionUri": "test_uri"}}
+ get_stack_mock.return_value = Stack("parent_path", "stack_name", "location/template.yaml", None, {})
+
+ definition_path = get_definition_path(resource, "identifier", False, "base_dir", [])
+ self.assertEqual(definition_path, Path("location").joinpath("test_uri"))
+
+ resource = {"Properties": {"DefinitionUri": ""}}
+ definition_path = get_definition_path(resource, "identifier", False, "base_dir", [])
+ self.assertEqual(definition_path, None)
+
+ def test_get_definition_file_with_base_dir(self):
+ resource = {"Properties": {"DefinitionUri": "test_uri"}}
+
+ definition_path = get_definition_path(resource, "identifier", True, "base_dir", [])
+ self.assertEqual(definition_path, Path("base_dir").joinpath("test_uri"))
diff --git a/tests/unit/lib/sync/test_sync_flow_factory.py b/tests/unit/lib/sync/test_sync_flow_factory.py
index ea6625b66a..80c39711fc 100644
--- a/tests/unit/lib/sync/test_sync_flow_factory.py
+++ b/tests/unit/lib/sync/test_sync_flow_factory.py
@@ -1,8 +1,20 @@
from unittest import TestCase
from unittest.mock import MagicMock, patch, Mock
-from samcli.lib.sync.sync_flow_factory import SyncFlowFactory
+from samcli.lib.sync.sync_flow_factory import SyncCodeResources, SyncFlowFactory
from samcli.lib.utils.cloudformation import CloudFormationResourceSummary
+from samcli.lib.utils.resources import (
+ AWS_SERVERLESS_FUNCTION,
+ AWS_LAMBDA_FUNCTION,
+ AWS_SERVERLESS_LAYERVERSION,
+ AWS_LAMBDA_LAYERVERSION,
+ AWS_SERVERLESS_API,
+ AWS_APIGATEWAY_RESTAPI,
+ AWS_SERVERLESS_HTTPAPI,
+ AWS_APIGATEWAY_V2_API,
+ AWS_SERVERLESS_STATEMACHINE,
+ AWS_STEPFUNCTIONS_STATEMACHINE,
+)
class TestSyncFlowFactory(TestCase):
@@ -31,7 +43,10 @@ def create_factory(self, auto_dependency_layer: bool = False):
@patch("samcli.lib.sync.sync_flow_factory.get_resource_summaries")
@patch("samcli.lib.sync.sync_flow_factory.get_boto_resource_provider_with_config")
- def test_load_physical_id_mapping(self, get_boto_resource_provider_mock, get_resource_summaries_mock):
+ @patch("samcli.lib.sync.sync_flow_factory.get_boto_client_provider_with_config")
+ def test_load_physical_id_mapping(
+ self, get_boto_client_provider_mock, get_boto_resource_provider_mock, get_resource_summaries_mock
+ ):
resource_summary_1 = CloudFormationResourceSummary(
resource_type="", logical_resource_id="", physical_resource_id="PhysicalResource1"
)
@@ -158,3 +173,21 @@ def test_create_none_generator_sync_flow(self, get_resource_by_id_mock):
factory._get_generator_function = get_generator_function_mock
self.assertIsNone(factory.create_sync_flow(resource_identifier))
+
+
+class TestSyncCodeResources(TestCase):
+ def test_values(self):
+ output = SyncCodeResources.values()
+ expected = [
+ AWS_SERVERLESS_FUNCTION,
+ AWS_LAMBDA_FUNCTION,
+ AWS_SERVERLESS_LAYERVERSION,
+ AWS_LAMBDA_LAYERVERSION,
+ AWS_SERVERLESS_API,
+ AWS_APIGATEWAY_RESTAPI,
+ AWS_SERVERLESS_HTTPAPI,
+ AWS_APIGATEWAY_V2_API,
+ AWS_SERVERLESS_STATEMACHINE,
+ AWS_STEPFUNCTIONS_STATEMACHINE,
+ ]
+ self.assertEqual(expected, output)
diff --git a/tests/unit/lib/utils/test_boto_utils.py b/tests/unit/lib/utils/test_boto_utils.py
index 9cde1d3fb0..626d9d1ab2 100644
--- a/tests/unit/lib/utils/test_boto_utils.py
+++ b/tests/unit/lib/utils/test_boto_utils.py
@@ -9,6 +9,7 @@
get_boto_resource_provider_with_config,
get_boto_resource_provider_from_session_with_config,
get_boto_client_provider_from_session_with_config,
+ get_client_error_code,
)
TEST_VERSION = "1.0.0"
@@ -121,3 +122,7 @@ def test_get_boto_resource_provider_from_session_with_config(self, patched_get_c
self.assertEqual(resource, given_resource)
patched_get_config.assert_called_with(param=given_config_param)
given_session.resource.assert_called_with(given_resource_name, config=given_config)
+
+ @parameterized.expand([({}, None), ({"Error": {}}, None), ({"Error": {"Code": "ErrorCode"}}, "ErrorCode")])
+ def test_get_client_error_code(self, response, expected):
+ self.assertEqual(expected, get_client_error_code(Mock(response=response)))
diff --git a/tests/unit/lib/utils/test_cloudformation.py b/tests/unit/lib/utils/test_cloudformation.py
index 7bddd47710..ea52adf6b4 100644
--- a/tests/unit/lib/utils/test_cloudformation.py
+++ b/tests/unit/lib/utils/test_cloudformation.py
@@ -7,6 +7,7 @@
CloudFormationResourceSummary,
get_resource_summaries,
get_resource_summary,
+ list_active_stack_names,
)
from samcli.lib.utils.resources import AWS_CLOUDFORMATION_STACK
@@ -31,6 +32,7 @@ def test_cfn_resource_summary(self):
class TestCloudformationUtils(TestCase):
def test_get_resource_summaries(self):
resource_provider_mock = Mock()
+ client_provider_mock = Mock()
given_stack_name = "stack_name"
given_resource_types = {"ResourceType0"}
@@ -68,7 +70,9 @@ def test_get_resource_summaries(self):
given_nested_stack_resource_array,
]
- resource_summaries = get_resource_summaries(resource_provider_mock, given_stack_name, given_resource_types)
+ resource_summaries = get_resource_summaries(
+ resource_provider_mock, client_provider_mock, given_stack_name, given_resource_types
+ )
self.assertEqual(len(resource_summaries), 4)
self.assertEqual(
@@ -127,3 +131,47 @@ def test_get_resource_summary_fail(self):
resource_summary = get_resource_summary(resource_provider_mock, given_stack_name, given_resource_logical_id)
self.assertIsNone(resource_summary)
+
+ @patch("samcli.lib.utils.cloudformation.LOG")
+ @patch("samcli.lib.utils.cloudformation.list_active_stack_names")
+ def test_get_resource_summaries_invalid_stack(self, patched_list_active_stack_names, patched_log):
+ resource_provider_mock = Mock()
+ client_provider_mock = Mock()
+ patched_log.isEnabledFor.return_value = True
+ patched_list_active_stack_names.return_value = []
+
+ resource_provider_mock.side_effect = ClientError({"Error": {"Code": "ValidationError"}}, "operation")
+
+ with self.assertRaises(ClientError):
+ get_resource_summaries(resource_provider_mock, client_provider_mock, "invalid-stack")
+ patched_log.debug.assert_called_with(
+ "Invalid stack name (%s). Available stack names: %s", "invalid-stack", ", ".join([])
+ )
+
+ def test_list_active_stack_names(self):
+ cfn_client_mock = Mock()
+ cfn_client_mock.list_stacks.side_effect = [
+ {
+ "StackSummaries": [{"StackName": "A"}, {"StackName": "B"}, {"StackName": "C", "RootId": "A"}],
+ "NextToken": "X",
+ },
+ {"StackSummaries": [{"StackName": "D"}, {"StackName": "E"}, {"StackName": "F", "RootId": "A"}]},
+ ]
+ client_provider_mock = Mock()
+ client_provider_mock.return_value = cfn_client_mock
+
+ self.assertEqual(["A", "B", "D", "E"], list(list_active_stack_names(client_provider_mock)))
+
+ def test_list_active_stack_names_with_nested_stacks(self):
+ cfn_client_mock = Mock()
+ cfn_client_mock.list_stacks.side_effect = [
+ {
+ "StackSummaries": [{"StackName": "A"}, {"StackName": "B"}, {"StackName": "C", "RootId": "A"}],
+ "NextToken": "X",
+ },
+ {"StackSummaries": [{"StackName": "D"}, {"StackName": "E"}, {"StackName": "F", "RootId": "A"}]},
+ ]
+ client_provider_mock = Mock()
+ client_provider_mock.return_value = cfn_client_mock
+
+ self.assertEqual(["A", "B", "C", "D", "E", "F"], list(list_active_stack_names(client_provider_mock, True)))
diff --git a/tests/unit/lib/utils/test_git_repo.py b/tests/unit/lib/utils/test_git_repo.py
index c3ce27b6df..a7f789de25 100644
--- a/tests/unit/lib/utils/test_git_repo.py
+++ b/tests/unit/lib/utils/test_git_repo.py
@@ -53,6 +53,7 @@ def test_git_executable_fails(self, mock_popen):
@patch("samcli.lib.utils.git_repo.subprocess.Popen")
@patch("samcli.lib.utils.git_repo.platform.system")
def test_clone_happy_case(self, platform_mock, popen_mock, check_output_mock, shutil_mock, path_exist_mock):
+ platform_mock.return_value = "Not Windows"
path_exist_mock.return_value = False
self.repo.clone(clone_dir=self.local_clone_dir, clone_name=REPO_NAME)
self.local_clone_dir.mkdir.assert_called_once_with(mode=0o700, parents=True, exist_ok=True)
@@ -207,6 +208,7 @@ def test_checkout_commit_when_commit_not_exist(self, check_output_mock, log_mock
@patch("samcli.lib.utils.git_repo.subprocess.Popen")
@patch("samcli.lib.utils.git_repo.platform.system")
def test_clone_with_commit(self, platform_mock, popen_mock, check_output_mock, shutil_mock, path_exist_mock):
+ platform_mock.return_value = "Not Windows"
path_exist_mock.return_value = False
self.repo.clone(clone_dir=self.local_clone_dir, clone_name=REPO_NAME, commit=COMMIT)
self.local_clone_dir.mkdir.assert_called_once_with(mode=0o700, parents=True, exist_ok=True)
@@ -221,3 +223,29 @@ def test_clone_with_commit(self, platform_mock, popen_mock, check_output_mock, s
shutil_mock.rmtree.assert_not_called()
shutil_mock.copytree.assert_called_with(ANY, EXPECTED_DEFAULT_CLONE_PATH, ignore=ANY)
shutil_mock.ignore_patterns.assert_called_with("*.git")
+
+ @patch("samcli.lib.utils.git_repo.Path.exists")
+ @patch("samcli.lib.utils.git_repo.shutil")
+ @patch("samcli.lib.utils.git_repo.check_output")
+ @patch("samcli.lib.utils.git_repo.subprocess.Popen")
+ @patch("samcli.lib.utils.git_repo.platform.system")
+ def test_clone_with_longpaths_configured_in_windows(
+ self, platform_mock, popen_mock, check_output_mock, shutil_mock, path_exist_mock
+ ):
+ platform_mock.return_value = "windows"
+ path_exist_mock.return_value = False
+ self.repo.clone(clone_dir=self.local_clone_dir, clone_name=REPO_NAME)
+ self.local_clone_dir.mkdir.assert_called_once_with(mode=0o700, parents=True, exist_ok=True)
+ popen_mock.assert_called_once_with(["git"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ check_output_mock.assert_has_calls(
+ [
+ call(
+ ["git", "clone", self.repo.url, REPO_NAME, "--config", "core.longpaths=true"],
+ cwd=ANY,
+ stderr=subprocess.STDOUT,
+ )
+ ]
+ )
+ shutil_mock.rmtree.assert_not_called()
+ shutil_mock.copytree.assert_called_with(ANY, EXPECTED_DEFAULT_CLONE_PATH, ignore=ANY)
+ shutil_mock.ignore_patterns.assert_called_with("*.git")
diff --git a/tests/unit/lib/utils/test_osutils.py b/tests/unit/lib/utils/test_osutils.py
index d65dac1436..e09e1b47ee 100644
--- a/tests/unit/lib/utils/test_osutils.py
+++ b/tests/unit/lib/utils/test_osutils.py
@@ -6,8 +6,9 @@
import sys
from unittest import TestCase
-from unittest.mock import patch
+from unittest.mock import patch, Mock
from samcli.lib.utils import osutils
+from samcli.lib.utils.osutils import rmtree_if_exists
class Test_mkdir_temp(TestCase):
@@ -77,3 +78,25 @@ def test_must_return_sys_stdout(self, patched_open, os_walk):
patched_open.assert_any_call(os.path.join("b", target_file), "rb")
patched_open.assert_any_call(os.path.join("a", target_file), "wb")
patched_open.assert_any_call(os.path.join("b", target_file), "wb")
+
+
+class Test_rmtree_if_exists(TestCase):
+ @patch("samcli.lib.utils.osutils.Path")
+ @patch("samcli.lib.utils.osutils.shutil.rmtree")
+ def test_must_skip_if_path_doesnt_exist(self, patched_rmtree, patched_path):
+ mock_path_obj = Mock()
+ mock_path_obj.exists.return_value = False
+ patched_path.return_value = mock_path_obj
+
+ rmtree_if_exists(Mock())
+ patched_rmtree.assert_not_called()
+
+ @patch("samcli.lib.utils.osutils.Path")
+ @patch("samcli.lib.utils.osutils.shutil.rmtree")
+ def test_must_delete_if_path_exist(self, patched_rmtree, patched_path):
+ mock_path_obj = Mock()
+ mock_path_obj.exists.return_value = True
+ patched_path.return_value = mock_path_obj
+
+ rmtree_if_exists(Mock())
+ patched_rmtree.assert_called_with(mock_path_obj)