Skip to content

Commit

Permalink
Merge from aws/aws-sam-cli/develop
Browse files Browse the repository at this point in the history
  • Loading branch information
aws-sam-cli-bot authored Jun 8, 2022
2 parents a17ca8a + eda6089 commit 7e2d795
Show file tree
Hide file tree
Showing 37 changed files with 1,520 additions and 248 deletions.
2 changes: 1 addition & 1 deletion DEVELOPMENT_GUIDE.md
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ contribute to the repository, there are a few more things to consider.
We support 3.7 and 3.8 versions. Our CI/CD pipeline is setup to run
unit tests against all Python versions. So make sure you test it
with all versions before sending a Pull Request.
See [Unit testing with multiple Python versions](#unit-testing-with-multiple-python-versions).
See [Unit testing with multiple Python versions](#unit-testing-with-multiple-python-versions-optional).

If you chose to use `pyenv` in the previous session, setting up a
different Python version should be easy:
Expand Down
2 changes: 1 addition & 1 deletion requirements/base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ boto3>=1.19.5,==1.*
jmespath~=0.10.0
PyYAML~=5.3
cookiecutter~=1.7.2
aws-sam-translator==1.45.0
aws-sam-translator==1.46.0
#docker minor version updates can include breaking changes. Auto update micro version only.
docker~=4.2.0
dateparser~=1.0
Expand Down
8 changes: 4 additions & 4 deletions requirements/reproducible-linux.txt
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,10 @@ aws-lambda-builders==1.17.0 \
--hash=sha256:3eb7ca5ab71761766586db080a8b80ab81346b307fa72d5cea64ccd69fb41efe \
--hash=sha256:abae4ccfc419fc5cd8eebd4cc81e335ec7610f53804ce1aa2b655159ce339610
# via aws-sam-cli (setup.py)
aws-sam-translator==1.45.0 \
--hash=sha256:40a6dd5a0aba32c7b38b0f5c54470396acdcd75e4b64251b015abdf922a18b5f \
--hash=sha256:bf321ab62aa1731d3e471fd55de6f5d1ab07dfc169cd254aa523dd9ad30246f9 \
--hash=sha256:cd4761c01902e5103e60202373275886e59edcc778edf18ca22d380059ed44e7
aws-sam-translator==1.46.0 \
--hash=sha256:03cb83135c98b1c47bf2b3c15507808c7e06d4717a3ca3da1cee6e7ebcb5282b \
--hash=sha256:095d1c8b9cb7fdaec6ff70914f8ae1269f14d91594b9f452b63548425b3de93b \
--hash=sha256:53ed4cbabf8b9247c1be9c5f47fcfc2764578f55b12e7fe24b9bc41d6fce47cc
# via aws-sam-cli (setup.py)
backports-zoneinfo==0.2.1 \
--hash=sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf \
Expand Down
2 changes: 1 addition & 1 deletion samcli/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@
SAM CLI version
"""

__version__ = "1.51.0"
__version__ = "1.52.0"
7 changes: 5 additions & 2 deletions samcli/commands/_utils/table_print.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,17 @@
MIN_OFFSET = 20


def pprint_column_names(format_string, format_kwargs, margin=None, table_header=None, color="yellow"):
def pprint_column_names(
format_string, format_kwargs, margin=None, table_header=None, color="yellow", display_sleep=False
):
"""
:param format_string: format string to be used that has the strings, minimum width to be replaced
:param format_kwargs: dictionary that is supplied to the format_string to format the string
:param margin: margin that is to be reduced from column width for columnar text.
:param table_header: Supplied table header
:param color: color supplied for table headers and column names.
:param display_sleep: flag to format table_header to include deployer's client_sleep
:return: boilerplate table string
"""

Expand Down Expand Up @@ -59,7 +62,7 @@ def pprint_wrap(func):
def wrap(*args, **kwargs):
# The table is setup with the column names, format_string contains the column names.
if table_header:
click.secho("\n" + table_header)
click.secho("\n" + table_header.format(args[0].client_sleep) if display_sleep else table_header)
click.secho("-" * usable_width, fg=color)
click.secho(format_string.format(*format_args, **format_kwargs), fg=color)
click.secho("-" * usable_width, fg=color)
Expand Down
32 changes: 30 additions & 2 deletions samcli/commands/delete/command.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import logging

from typing import Optional
import click
from samcli.cli.main import aws_creds_options, common_options, pass_context, print_cmdline_args

Expand Down Expand Up @@ -63,12 +64,26 @@
is_flag=True,
required=False,
)
@click.option(
"--s3-bucket",
help=("The S3 bucket path you want to delete."),
type=click.STRING,
default=None,
required=False,
)
@click.option(
"--s3-prefix",
help=("The S3 prefix you want to delete"),
type=click.STRING,
default=None,
required=False,
)
@aws_creds_options
@common_options
@pass_context
@check_newer_version
@print_cmdline_args
def cli(ctx, stack_name: str, config_file: str, config_env: str, no_prompts: bool):
def cli(ctx, stack_name: str, config_file: str, config_env: str, no_prompts: bool, s3_bucket: str, s3_prefix: str):
"""
`sam delete` command entry point
"""
Expand All @@ -81,10 +96,21 @@ def cli(ctx, stack_name: str, config_file: str, config_env: str, no_prompts: boo
config_env=config_env,
profile=ctx.profile,
no_prompts=no_prompts,
s3_bucket=s3_bucket,
s3_prefix=s3_prefix,
) # pragma: no cover


def do_cli(stack_name: str, region: str, config_file: str, config_env: str, profile: str, no_prompts: bool):
def do_cli(
stack_name: str,
region: str,
config_file: str,
config_env: str,
profile: str,
no_prompts: bool,
s3_bucket: Optional[str],
s3_prefix: Optional[str],
):
"""
Implementation of the ``cli`` method
"""
Expand All @@ -97,5 +123,7 @@ def do_cli(stack_name: str, region: str, config_file: str, config_env: str, prof
config_file=config_file,
config_env=config_env,
no_prompts=no_prompts,
s3_bucket=s3_bucket,
s3_prefix=s3_prefix,
) as delete_context:
delete_context.run()
41 changes: 29 additions & 12 deletions samcli/commands/delete/delete_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,10 @@
import logging

import json
import boto3

from typing import Optional

import boto3

import click
from click import confirm
Expand Down Expand Up @@ -36,15 +38,25 @@

class DeleteContext:
# TODO: Separate this context into 2 separate contexts guided and non-guided, just like deploy.
def __init__(self, stack_name: str, region: str, profile: str, config_file: str, config_env: str, no_prompts: bool):
def __init__(
self,
stack_name: str,
region: str,
profile: str,
config_file: str,
config_env: str,
no_prompts: bool,
s3_bucket: Optional[str],
s3_prefix: Optional[str],
):
self.stack_name = stack_name
self.region = region
self.profile = profile
self.config_file = config_file
self.config_env = config_env
self.no_prompts = no_prompts
self.s3_bucket = None
self.s3_prefix = None
self.s3_bucket = s3_bucket
self.s3_prefix = s3_prefix
self.cf_utils = None
self.s3_uploader = None
self.ecr_uploader = None
Expand Down Expand Up @@ -95,8 +107,10 @@ def parse_config_file(self):
self.region = config_options.get("region", None)
if not self.profile:
self.profile = config_options.get("profile", None)
self.s3_bucket = config_options.get("s3_bucket", None)
self.s3_prefix = config_options.get("s3_prefix", None)
if not self.s3_bucket:
self.s3_bucket = config_options.get("s3_bucket", None)
if not self.s3_prefix:
self.s3_prefix = config_options.get("s3_prefix", None)

def init_clients(self):
"""
Expand Down Expand Up @@ -142,8 +156,9 @@ def s3_prompts(self):
Guided prompts asking user to delete s3 artifacts
"""
# Note: s3_bucket and s3_prefix information is only
# available if a local toml file is present or if
# this information is obtained from the template resources and so if this
# available if it is provided as an option flag, a
# local toml file or if this information is obtained
# from the template resources and so if this
# information is not found, warn the user that S3 artifacts
# will need to be manually deleted.

Expand Down Expand Up @@ -319,12 +334,14 @@ def delete(self):
self.cf_utils.delete_stack(stack_name=self.stack_name, retain_resources=retain_resources)
self.cf_utils.wait_for_delete(self.stack_name)

# If s3_bucket information is not available, warn the user
# Warn the user that s3 information is missing and to use --s3 options
if not self.s3_bucket:
LOG.debug("Cannot delete s3 files as no s3_bucket found")
LOG.debug("Cannot delete s3 objects as bucket is missing")
click.secho(
"\nWarning: s3_bucket and s3_prefix information could not be obtained from local config file"
" or cloudformation template, delete the s3 files manually if required",
"\nWarning: Cannot resolve s3 bucket information from command options"
" , local config file or cloudformation template. Please use"
" --s3-bucket next time and"
" delete s3 files manually if required.",
fg="yellow",
)

Expand Down
8 changes: 6 additions & 2 deletions samcli/commands/sync/command.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,10 @@
HELP_TEXT = """
[Beta Feature] Update/Sync local artifacts to AWS
By default, the sync command runs a full stack update. You can specify --code or --watch to switch modes
By default, the sync command runs a full stack update. You can specify --code or --watch to switch modes.
\b
Sync also supports nested stacks and nested stack resources. For example
$ sam sync --code --stack-name {stack} --resource-id {ChildStack}/{ResourceId}
"""

SYNC_CONFIRMATION_TEXT = """
Expand Down Expand Up @@ -113,7 +116,8 @@
@click.option(
"--resource-id",
multiple=True,
help="Sync code for all the resources with the ID.",
help="Sync code for all the resources with the ID. To sync a resource within a nested stack, "
"use the following pattern {ChildStack}/{logicalId}.",
)
@click.option(
"--resource",
Expand Down
1 change: 1 addition & 0 deletions samcli/lib/build/app_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -413,6 +413,7 @@ def _build_lambda_image(self, function_name: str, metadata: Dict, architecture:
"buildargs": docker_build_args,
"decode": True,
"platform": get_docker_platform(architecture),
"rm": True,
}
if docker_build_target:
build_args["target"] = cast(str, docker_build_target)
Expand Down
80 changes: 47 additions & 33 deletions samcli/lib/deploy/deployer.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

import sys
import math
from collections import OrderedDict
from collections import OrderedDict, deque
import logging
import time
from datetime import datetime
Expand Down Expand Up @@ -53,7 +53,7 @@
}
)

DESCRIBE_STACK_EVENTS_TABLE_HEADER_NAME = "CloudFormation events from stack operations"
DESCRIBE_STACK_EVENTS_TABLE_HEADER_NAME = "CloudFormation events from stack operations (refresh every {} seconds)"

DESCRIBE_CHANGESET_FORMAT_STRING = "{Operation:<{0}} {LogicalResourceId:<{1}} {ResourceType:<{2}} {Replacement:<{3}}"
DESCRIBE_CHANGESET_DEFAULT_ARGS = OrderedDict(
Expand Down Expand Up @@ -360,6 +360,7 @@ def get_last_event_time(self, stack_name):
format_string=DESCRIBE_STACK_EVENTS_FORMAT_STRING,
format_kwargs=DESCRIBE_STACK_EVENTS_DEFAULT_ARGS,
table_header=DESCRIBE_STACK_EVENTS_TABLE_HEADER_NAME,
display_sleep=True,
)
def describe_stack_events(self, stack_name, time_stamp_marker, **kwargs):
"""
Expand All @@ -377,45 +378,50 @@ def describe_stack_events(self, stack_name, time_stamp_marker, **kwargs):
try:
# Only sleep if there have been no retry_attempts
time.sleep(0 if retry_attempts else self.client_sleep)
describe_stacks_resp = self._client.describe_stacks(StackName=stack_name)
paginator = self._client.get_paginator("describe_stack_events")
response_iterator = paginator.paginate(StackName=stack_name)
stack_status = describe_stacks_resp["Stacks"][0]["StackStatus"]
latest_time_stamp_marker = time_stamp_marker
new_events = deque() # event buffer
for event_items in response_iterator:
for event in event_items["StackEvents"]:
if event["EventId"] not in events and utc_to_timestamp(event["Timestamp"]) > time_stamp_marker:
events.add(event["EventId"])
latest_time_stamp_marker = max(
latest_time_stamp_marker, utc_to_timestamp(event["Timestamp"])
)
row_color = self.deploy_color.get_stack_events_status_color(status=event["ResourceStatus"])
pprint_columns(
columns=[
event["ResourceStatus"],
event["ResourceType"],
event["LogicalResourceId"],
event.get("ResourceStatusReason", "-"),
],
width=kwargs["width"],
margin=kwargs["margin"],
format_string=DESCRIBE_STACK_EVENTS_FORMAT_STRING,
format_args=kwargs["format_args"],
columns_dict=DESCRIBE_STACK_EVENTS_DEFAULT_ARGS.copy(),
color=row_color,
)
# Skip already shown old event entries
elif utc_to_timestamp(event["Timestamp"]) <= time_stamp_marker:
time_stamp_marker = latest_time_stamp_marker
# Skip already shown old event entries or former deployments
if utc_to_timestamp(event["Timestamp"]) <= time_stamp_marker:
break
else: # go to next loop if not break from inside loop
time_stamp_marker = latest_time_stamp_marker # update marker if all events are new
if event["EventId"] not in events:
events.add(event["EventId"])
# Events are in reverse chronological order
# Pushing in front reverse the order to display older events first
new_events.appendleft(event)
else: # go to next loop (page of events) if not break from inside loop
continue
break # reached here only if break from inner loop!

if self._check_stack_not_in_progress(stack_status):
stack_change_in_progress = False
break
# Override timestamp marker with latest event (last in deque)
if len(new_events) > 0:
time_stamp_marker = utc_to_timestamp(new_events[-1]["Timestamp"])

for new_event in new_events:
row_color = self.deploy_color.get_stack_events_status_color(status=new_event["ResourceStatus"])
pprint_columns(
columns=[
new_event["ResourceStatus"],
new_event["ResourceType"],
new_event["LogicalResourceId"],
new_event.get("ResourceStatusReason", "-"),
],
width=kwargs["width"],
margin=kwargs["margin"],
format_string=DESCRIBE_STACK_EVENTS_FORMAT_STRING,
format_args=kwargs["format_args"],
columns_dict=DESCRIBE_STACK_EVENTS_DEFAULT_ARGS.copy(),
color=row_color,
)
# Skip events from another consecutive deployment triggered during sleep by another process
if self._is_root_stack_event(new_event) and self._check_stack_not_in_progress(
new_event["ResourceStatus"]
):
stack_change_in_progress = False
break

# Reset retry attempts if iteration is a success to use client_sleep again
retry_attempts = 0
except botocore.exceptions.ClientError as ex:
Expand All @@ -426,6 +432,14 @@ def describe_stack_events(self, stack_name, time_stamp_marker, **kwargs):
# Sleep in exponential backoff mode
time.sleep(math.pow(self.backoff, retry_attempts))

@staticmethod
def _is_root_stack_event(event: Dict) -> bool:
return bool(
event["ResourceType"] == "AWS::CloudFormation::Stack"
and event["StackName"] == event["LogicalResourceId"]
and event["PhysicalResourceId"] == event["StackId"]
)

@staticmethod
def _check_stack_not_in_progress(status: str) -> bool:
return "IN_PROGRESS" not in status
Expand Down
2 changes: 1 addition & 1 deletion samcli/runtime_config.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
"app_template_repo_commit": "663f90c5ea51fbc5264924b51311934e1af1403b"
"app_template_repo_commit": "ae437cbf533bd156e7a5aab337bc077d40c0cc19"
}
Loading

0 comments on commit 7e2d795

Please sign in to comment.