From b79e2c00f63f48afe93f8eea96fe8c2241e1faad Mon Sep 17 00:00:00 2001 From: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Date: Thu, 6 Oct 2022 17:21:43 -0700 Subject: [PATCH 1/8] feat: skip sync confirmation for subsequent runs (#4276) * feat: skip sync confirmation for subsequent runs * make black --- samcli/cli/global_config.py | 23 +++++++++++++++++++++++ samcli/commands/sync/command.py | 15 ++++++++++++--- tests/unit/cli/test_global_config.py | 12 ++++++++++++ 3 files changed, 47 insertions(+), 3 deletions(-) diff --git a/samcli/cli/global_config.py b/samcli/cli/global_config.py index c2b72366e4..5095654c35 100644 --- a/samcli/cli/global_config.py +++ b/samcli/cli/global_config.py @@ -13,6 +13,7 @@ import click +from samcli.lib.utils.hash import str_checksum LOG = logging.getLogger(__name__) @@ -32,6 +33,7 @@ class DefaultEntry: INSTALLATION_ID = ConfigEntry("installationId", None) LAST_VERSION_CHECK = ConfigEntry("lastVersionCheck", None) TELEMETRY = ConfigEntry("telemetryEnabled", "SAM_CLI_TELEMETRY") + ACCELERATE_OPT_IN_STACKS = ConfigEntry("accelerateOptInStacks", None) class Singleton(type): @@ -413,3 +415,24 @@ def last_version_check(self) -> Optional[float]: @last_version_check.setter def last_version_check(self, value: float): self.set_value(DefaultEntry.LAST_VERSION_CHECK, value) + + def is_accelerate_opt_in_stack(self, template_file: str, stack_name: str) -> bool: + """ + Returns True, if current folder with stack name is been accepted to use sam sync before. + Returns False, if this is first time that user runs sam sync with current folder and given stack name. + """ + accelerate_opt_in_stacks = ( + self.get_value(DefaultEntry.ACCELERATE_OPT_IN_STACKS, value_type=list, default=[]) or [] + ) + return str_checksum(template_file + stack_name) in accelerate_opt_in_stacks + + def set_accelerate_opt_in_stack(self, template_file: str, stack_name: str) -> None: + """ + Stores current folder and stack name into config, so that next time that user runs sam sync, they don't need + to accept warning message again. + """ + accelerate_opt_in_stacks = ( + self.get_value(DefaultEntry.ACCELERATE_OPT_IN_STACKS, value_type=list, default=[]) or [] + ) + accelerate_opt_in_stacks.append(str_checksum(template_file + stack_name)) + self.set_value(DefaultEntry.ACCELERATE_OPT_IN_STACKS, accelerate_opt_in_stacks) diff --git a/samcli/commands/sync/command.py b/samcli/commands/sync/command.py index 287701d20b..c5cbf8962a 100644 --- a/samcli/commands/sync/command.py +++ b/samcli/commands/sync/command.py @@ -66,10 +66,13 @@ $ sam sync --code --stack-name {stack} --resource-id {ChildStack}/{ResourceId} """ -SYNC_CONFIRMATION_TEXT = """ +SYNC_INFO_TEXT = """ The SAM CLI will use the AWS Lambda, Amazon API Gateway, and AWS StepFunctions APIs to upload your code without performing a CloudFormation deployment. This will cause drift in your CloudFormation stack. **The sync command should only be used against a development stack**. +""" + +SYNC_CONFIRMATION_TEXT = """ Confirm that you are synchronizing a development stack. Enter Y to proceed with the command, or enter N to cancel: @@ -232,13 +235,19 @@ def do_cli( """ Implementation of the ``cli`` method """ + from samcli.cli.global_config import GlobalConfig from samcli.lib.utils import osutils from samcli.commands.build.build_context import BuildContext from samcli.commands.package.package_context import PackageContext from samcli.commands.deploy.deploy_context import DeployContext - if not click.confirm(Colored().yellow(SYNC_CONFIRMATION_TEXT), default=True): - return + global_config = GlobalConfig() + if not global_config.is_accelerate_opt_in_stack(template_file, stack_name): + if not click.confirm(Colored().yellow(SYNC_INFO_TEXT + SYNC_CONFIRMATION_TEXT), default=True): + return + global_config.set_accelerate_opt_in_stack(template_file, stack_name) + else: + LOG.info(Colored().yellow(SYNC_INFO_TEXT)) s3_bucket_name = s3_bucket or manage_stack(profile=profile, region=region) diff --git a/tests/unit/cli/test_global_config.py b/tests/unit/cli/test_global_config.py index ca0c0d461c..81bb058ed8 100644 --- a/tests/unit/cli/test_global_config.py +++ b/tests/unit/cli/test_global_config.py @@ -1,4 +1,5 @@ import os +import uuid from unittest.mock import ANY, MagicMock, patch, mock_open from unittest import TestCase from samcli.cli.global_config import ConfigEntry, DefaultEntry, GlobalConfig @@ -308,3 +309,14 @@ def test_get_last_version_check(self): def test_set_last_version_check(self): GlobalConfig().last_version_check = 123.4 self.assertEqual(GlobalConfig()._config_data[DefaultEntry.LAST_VERSION_CHECK.config_key], 123.4) + + def test_is_accelerate_opt_in_stack_return_false_first_time(self): + self.assertFalse(GlobalConfig().is_accelerate_opt_in_stack(uuid.uuid4().hex, uuid.uuid4().hex)) + + def test_is_accelerate_opt_in_stack_return_true_second_time(self): + template_path = uuid.uuid4().hex + stack_name = uuid.uuid4().hex + self.assertFalse(GlobalConfig().is_accelerate_opt_in_stack(template_path, stack_name)) + + GlobalConfig().set_accelerate_opt_in_stack(template_path, stack_name) + self.assertTrue(GlobalConfig().is_accelerate_opt_in_stack(template_path, stack_name)) From 4d75d4fea7d47b714956a170d47ed68792720c4c Mon Sep 17 00:00:00 2001 From: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Date: Thu, 6 Oct 2022 18:03:35 -0700 Subject: [PATCH 2/8] feat: use watch and code flags at the same time (#4273) * feat: allow using --code with --watch for sam sync * catch code only sync early for not running infra sync at all * fix unit tests * add unit tests * add integration tests & update help text --- samcli/commands/sync/command.py | 19 +++- samcli/lib/sync/sync_flow_factory.py | 27 +++-- samcli/lib/sync/watch_manager.py | 33 ++++-- tests/integration/sync/test_sync_watch.py | 100 ++++++++++++++++++ .../before/template-python-code-only.yaml | 28 +++++ tests/unit/commands/sync/test_command.py | 28 ++++- tests/unit/lib/sync/test_watch_manager.py | 44 +++++++- 7 files changed, 255 insertions(+), 24 deletions(-) create mode 100644 tests/integration/testdata/sync/code/before/template-python-code-only.yaml diff --git a/samcli/commands/sync/command.py b/samcli/commands/sync/command.py index c5cbf8962a..fac24d7838 100644 --- a/samcli/commands/sync/command.py +++ b/samcli/commands/sync/command.py @@ -63,7 +63,15 @@ By default, the sync command runs a full stack update. You can specify --code or --watch to switch modes. \b Sync also supports nested stacks and nested stack resources. For example -$ sam sync --code --stack-name {stack} --resource-id {ChildStack}/{ResourceId} + +$ sam sync --code --stack-name {stack} --resource-id \\ +{ChildStack}/{ResourceId} + +Running --watch with --code option will provide a way to run code synchronization only, that will speed up start time +and will skip any template change. Please remember to update your deployed stack by running without --code option. + +$ sam sync --code --watch --stack-name {stack} + """ SYNC_INFO_TEXT = """ @@ -93,14 +101,12 @@ is_flag=True, help="Sync code resources. This includes Lambda Functions, API Gateway, and Step Functions.", cls=ClickMutex, - incompatible_params=["watch"], ) @click.option( "--watch", is_flag=True, help="Watch local files and automatically sync with remote.", cls=ClickMutex, - incompatible_params=["code"], ) @click.option( "--resource-id", @@ -340,7 +346,7 @@ def do_cli( with SyncContext(dependency_layer, build_context.build_dir, build_context.cache_dir): if watch: execute_watch( - template_file, build_context, package_context, deploy_context, dependency_layer + template_file, build_context, package_context, deploy_context, dependency_layer, code ) elif code: execute_code_sync( @@ -425,6 +431,7 @@ def execute_watch( package_context: "PackageContext", deploy_context: "DeployContext", auto_dependency_layer: bool, + skip_infra_syncs: bool, ): """Start sync watch execution @@ -439,7 +446,9 @@ def execute_watch( deploy_context : DeployContext DeployContext """ - watch_manager = WatchManager(template, build_context, package_context, deploy_context, auto_dependency_layer) + watch_manager = WatchManager( + template, build_context, package_context, deploy_context, auto_dependency_layer, skip_infra_syncs + ) watch_manager.start() diff --git a/samcli/lib/sync/sync_flow_factory.py b/samcli/lib/sync/sync_flow_factory.py index 8f0ffe617c..992c7da4c0 100644 --- a/samcli/lib/sync/sync_flow_factory.py +++ b/samcli/lib/sync/sync_flow_factory.py @@ -2,6 +2,9 @@ import logging from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, cast +from botocore.exceptions import ClientError + +from samcli.commands.exceptions import InvalidStackNameException from samcli.lib.bootstrap.nested_stack.nested_stack_manager import NestedStackManager from samcli.lib.providers.provider import Stack, get_resource_by_id, ResourceIdentifier from samcli.lib.sync.flows.auto_dependency_layer_sync_flow import AutoDependencyLayerParentSyncFlow @@ -16,7 +19,11 @@ from samcli.lib.sync.flows.rest_api_sync_flow import RestApiSyncFlow from samcli.lib.sync.flows.http_api_sync_flow import HttpApiSyncFlow from samcli.lib.sync.flows.stepfunctions_sync_flow import StepFunctionsSyncFlow -from samcli.lib.utils.boto_utils import get_boto_resource_provider_with_config, get_boto_client_provider_with_config +from samcli.lib.utils.boto_utils import ( + get_boto_resource_provider_with_config, + get_boto_client_provider_with_config, + get_client_error_code, +) from samcli.lib.utils.cloudformation import get_resource_summaries from samcli.lib.utils.resources import ( AWS_SERVERLESS_FUNCTION, @@ -110,11 +117,19 @@ def load_physical_id_mapping(self) -> None: region=self._deploy_context.region, profile=self._deploy_context.profile ) - resource_mapping = get_resource_summaries( - boto_resource_provider=resource_provider, - boto_client_provider=client_provider, - stack_name=self._deploy_context.stack_name, - ) + try: + resource_mapping = get_resource_summaries( + boto_resource_provider=resource_provider, + boto_client_provider=client_provider, + stack_name=self._deploy_context.stack_name, + ) + except ClientError as ex: + error_code = get_client_error_code(ex) + if error_code == "ValidationError": + raise InvalidStackNameException( + f"Invalid --stack-name parameter. Stack with id '{self._deploy_context.stack_name}' does not exist" + ) from ex + raise ex # get the resource_id -> physical_id mapping self._physical_id_mapping = { diff --git a/samcli/lib/sync/watch_manager.py b/samcli/lib/sync/watch_manager.py index 0171c4bbc2..ab4627721d 100644 --- a/samcli/lib/sync/watch_manager.py +++ b/samcli/lib/sync/watch_manager.py @@ -44,6 +44,7 @@ class WatchManager: _waiting_infra_sync: bool _color: Colored _auto_dependency_layer: bool + _skip_infra_syncs: bool def __init__( self, @@ -52,6 +53,7 @@ def __init__( package_context: "PackageContext", deploy_context: "DeployContext", auto_dependency_layer: bool, + skip_infra_syncs: bool, ): """Manager for sync watch execution logic. This manager will observe template and its code resources. @@ -74,6 +76,7 @@ def __init__( self._package_context = package_context self._deploy_context = deploy_context self._auto_dependency_layer = auto_dependency_layer + self._skip_infra_syncs = skip_infra_syncs self._sync_flow_factory = None self._sync_flow_executor = ContinuousSyncFlowExecutor() @@ -89,6 +92,14 @@ def queue_infra_sync(self) -> None: """Queue up an infra structure sync. A simple bool flag is suffice """ + if self._skip_infra_syncs: + LOG.info( + self._color.yellow( + "You have enabled the --code flag, which limits sam sync updates to code changes only. To do a " + "complete infrastructure and code sync, remove the --code flag." + ) + ) + return self._waiting_infra_sync = True def _update_stacks(self) -> None: @@ -166,6 +177,9 @@ def start(self) -> None: # This is a wrapper for gracefully handling Ctrl+C or other termination cases. try: self.queue_infra_sync() + if self._skip_infra_syncs: + self._start_sync() + LOG.info(self._color.green("Sync watch started.")) self._start() except KeyboardInterrupt: LOG.info(self._color.cyan("Shutting down sync watch...")) @@ -181,6 +195,16 @@ def _start(self) -> None: self._execute_infra_sync() time.sleep(1) + def _start_sync(self): + """ + Update stacks and populate all triggers + """ + self._observer.unschedule_all() + self._update_stacks() + self._add_template_triggers() + self._add_code_triggers() + self._start_code_sync() + def _execute_infra_sync(self) -> None: LOG.info(self._color.cyan("Queued infra sync. Waiting for in progress code syncs to complete...")) self._waiting_infra_sync = False @@ -188,6 +212,7 @@ def _execute_infra_sync(self) -> None: try: LOG.info(self._color.cyan("Starting infra sync.")) self._execute_infra_context() + LOG.info(self._color.green("Infra sync completed.")) except Exception as e: LOG.error( self._color.red("Failed to sync infra. Code sync is paused until template/stack is fixed."), @@ -197,15 +222,9 @@ def _execute_infra_sync(self) -> None: self._observer.unschedule_all() self._add_template_triggers() else: - # Update stacks and repopulate triggers # Trigger are not removed until infra sync is finished as there # can be code changes during infra sync. - self._observer.unschedule_all() - self._update_stacks() - self._add_template_triggers() - self._add_code_triggers() - self._start_code_sync() - LOG.info(self._color.green("Infra sync completed.")) + self._start_sync() def _on_code_change_wrapper(self, resource_id: ResourceIdentifier) -> OnChangeCallback: """Wrapper method that generates a callback for code changes. diff --git a/tests/integration/sync/test_sync_watch.py b/tests/integration/sync/test_sync_watch.py index cc26de3ffb..2385ca6d36 100644 --- a/tests/integration/sync/test_sync_watch.py +++ b/tests/integration/sync/test_sync_watch.py @@ -468,3 +468,103 @@ def test_sync_watch_code(self): lambda_response = json.loads(self._get_lambda_response(lambda_function)) self.assertIn("extra_message", lambda_response) self.assertEqual(lambda_response.get("message"), "7") + + +@parameterized_class( + [{"runtime": "python", "dependency_layer": True}, {"runtime": "python", "dependency_layer": False}] +) +class TestSyncWatchCodeOnly(TestSyncWatchBase): + template_before = str(Path("code", "before", "template-python-code-only.yaml")) + + def run_initial_infra_validation(self) -> None: + """Runs initial infra validation after deployment is completed""" + self.stack_resources = self._get_stacks(self.stack_name) + lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION) + for lambda_function in lambda_functions: + lambda_response = json.loads(self._get_lambda_response(lambda_function)) + self.assertIn("extra_message", lambda_response) + self.assertEqual(lambda_response.get("message"), "7") + + def test_sync_watch_code(self): + # first kill previously started sync process + kill_process(self.watch_process) + # start new one with code only + template_path = self.test_dir.joinpath(self.template_before) + sync_command_list = self.get_sync_command_list( + template_file=str(template_path), + code=True, + watch=True, + dependency_layer=self.dependency_layer, + stack_name=self.stack_name, + parameter_overrides="Parameter=Clarity", + image_repository=self.ecr_repo_name, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key, + tags="integ=true clarity=yes foo_bar=baz", + ) + self.watch_process = start_persistent_process(sync_command_list, cwd=self.test_dir) + read_until_string(self.watch_process, "Enter Y to proceed with the command, or enter N to cancel:\n") + + self.watch_process.stdin.write("y\n") + read_until_string(self.watch_process, "\x1b[32mSync watch started.\x1b[0m\n", timeout=30) + + self.stack_resources = self._get_stacks(self.stack_name) + + if self.dependency_layer: + # Test update manifest + layer_contents = self.get_dependency_layer_contents_from_arn(self.stack_resources, "python", 1) + self.assertNotIn("requests", layer_contents) + self.update_file( + self.test_dir.joinpath("code", "after", "function", "requirements.txt"), + self.test_dir.joinpath("code", "before", "function", "requirements.txt"), + ) + read_until_string( + self.watch_process, + "\x1b[32mFinished syncing Function Layer Reference Sync HelloWorldFunction.\x1b[0m\n", + timeout=45, + ) + layer_contents = self.get_dependency_layer_contents_from_arn(self.stack_resources, "python", 2) + self.assertIn("requests", layer_contents) + + # Test Lambda Function + self.update_file( + self.test_dir.joinpath("code", "after", "function", "app.py"), + self.test_dir.joinpath("code", "before", "function", "app.py"), + ) + read_until_string( + self.watch_process, "\x1b[32mFinished syncing Lambda Function HelloWorldFunction.\x1b[0m\n", timeout=30 + ) + lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION) + for lambda_function in lambda_functions: + lambda_response = json.loads(self._get_lambda_response(lambda_function)) + self.assertIn("extra_message", lambda_response) + self.assertEqual(lambda_response.get("message"), "8") + + # Test Lambda Layer + self.update_file( + self.test_dir.joinpath("code", "after", "layer", "layer_method.py"), + self.test_dir.joinpath("code", "before", "layer", "layer_method.py"), + ) + read_until_string( + self.watch_process, + "\x1b[32mFinished syncing Function Layer Reference Sync HelloWorldFunction.\x1b[0m\n", + timeout=30, + ) + lambda_functions = self.stack_resources.get(AWS_LAMBDA_FUNCTION) + for lambda_function in lambda_functions: + lambda_response = json.loads(self._get_lambda_response(lambda_function)) + self.assertIn("extra_message", lambda_response) + self.assertEqual(lambda_response.get("message"), "9") + + # updating infra should not trigger an infra sync + self.update_file( + self.test_dir.joinpath(f"infra/template-{self.runtime}-after.yaml"), + self.test_dir.joinpath(f"code/before/template-{self.runtime}-code-only.yaml"), + ) + + read_until_string( + self.watch_process, + "\x1b[33mYou have enabled the --code flag, which limits sam sync updates to code changes only. To do a " + "complete infrastructure and code sync, remove the --code flag.\x1b[0m\n", + timeout=30, + ) diff --git a/tests/integration/testdata/sync/code/before/template-python-code-only.yaml b/tests/integration/testdata/sync/code/before/template-python-code-only.yaml new file mode 100644 index 0000000000..f788e2e386 --- /dev/null +++ b/tests/integration/testdata/sync/code/before/template-python-code-only.yaml @@ -0,0 +1,28 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 + +Globals: + Function: + Timeout: 10 + +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: function/ + Handler: app.lambda_handler + Runtime: python3.7 + Layers: + - Ref: HelloWorldLayer + Tracing: Active + + HelloWorldLayer: + Type: AWS::Serverless::LayerVersion + Properties: + LayerName: HelloWorldLayer + Description: Hello World Layer + ContentUri: layer/ + CompatibleRuntimes: + - python3.7 + Metadata: + BuildMethod: python3.7 diff --git a/tests/unit/commands/sync/test_command.py b/tests/unit/commands/sync/test_command.py index dbabc332dd..e9da1588d2 100644 --- a/tests/unit/commands/sync/test_command.py +++ b/tests/unit/commands/sync/test_command.py @@ -1,3 +1,4 @@ +import itertools import os from unittest import TestCase from unittest.mock import ANY, MagicMock, Mock, patch @@ -229,7 +230,7 @@ def test_watch_must_succeed_sync( execute_watch_mock, click_mock, ): - + skip_infra_syncs = watch and code build_context_mock = Mock() BuildContextMock.return_value.__enter__.return_value = build_context_mock package_context_mock = Mock() @@ -328,7 +329,12 @@ def test_watch_must_succeed_sync( on_failure=None, ) execute_watch_mock.assert_called_once_with( - self.template_file, build_context_mock, package_context_mock, deploy_context_mock, auto_dependency_layer + self.template_file, + build_context_mock, + package_context_mock, + deploy_context_mock, + auto_dependency_layer, + skip_infra_syncs, ) @parameterized.expand([(True, False, True, False), (True, False, False, True)]) @@ -650,21 +656,33 @@ def setUp(self) -> None: self.package_context = MagicMock() self.deploy_context = MagicMock() - @parameterized.expand([(True,), (False,)]) + @parameterized.expand(itertools.product([True, False], [True, False])) @patch("samcli.commands.sync.command.click") @patch("samcli.commands.sync.command.WatchManager") def test_execute_watch( self, + code, auto_dependency_layer, watch_manager_mock, click_mock, ): + skip_infra_syncs = code execute_watch( - self.template_file, self.build_context, self.package_context, self.deploy_context, auto_dependency_layer + self.template_file, + self.build_context, + self.package_context, + self.deploy_context, + auto_dependency_layer, + skip_infra_syncs, ) watch_manager_mock.assert_called_once_with( - self.template_file, self.build_context, self.package_context, self.deploy_context, auto_dependency_layer + self.template_file, + self.build_context, + self.package_context, + self.deploy_context, + auto_dependency_layer, + skip_infra_syncs, ) watch_manager_mock.return_value.start.assert_called_once_with() diff --git a/tests/unit/lib/sync/test_watch_manager.py b/tests/unit/lib/sync/test_watch_manager.py index f7e7a3acaf..7eaec90f65 100644 --- a/tests/unit/lib/sync/test_watch_manager.py +++ b/tests/unit/lib/sync/test_watch_manager.py @@ -21,7 +21,7 @@ def setUp(self) -> None: self.package_context = MagicMock() self.deploy_context = MagicMock() self.watch_manager = WatchManager( - self.template, self.build_context, self.package_context, self.deploy_context, False + self.template, self.build_context, self.package_context, self.deploy_context, False, False ) def tearDown(self) -> None: @@ -227,6 +227,48 @@ def test__start(self, sleep_mock): self.path_observer.start.assert_called_once_with() + @patch("samcli.lib.sync.watch_manager.time.sleep") + def test_start_code_only(self, sleep_mock): + sleep_mock.side_effect = KeyboardInterrupt() + + stop_code_sync_mock = MagicMock() + execute_infra_sync_mock = MagicMock() + + update_stacks_mock = MagicMock() + add_template_trigger_mock = MagicMock() + add_code_trigger_mock = MagicMock() + start_code_sync_mock = MagicMock() + + self.watch_manager._stop_code_sync = stop_code_sync_mock + self.watch_manager._execute_infra_context = execute_infra_sync_mock + self.watch_manager._update_stacks = update_stacks_mock + self.watch_manager._add_template_triggers = add_template_trigger_mock + self.watch_manager._add_code_triggers = add_code_trigger_mock + self.watch_manager._start_code_sync = start_code_sync_mock + + self.watch_manager._skip_infra_syncs = True + with self.assertRaises(KeyboardInterrupt): + self.watch_manager._start() + + self.path_observer.start.assert_called_once_with() + self.assertFalse(self.watch_manager._waiting_infra_sync) + + stop_code_sync_mock.assert_not_called() + execute_infra_sync_mock.assert_not_called() + update_stacks_mock.assert_not_called() + add_template_trigger_mock.assert_not_called() + add_code_trigger_mock.assert_not_called() + start_code_sync_mock.assert_not_called() + + self.path_observer.unschedule_all.assert_not_called() + + self.path_observer.start.assert_called_once_with() + + def test_start_code_only_infra_sync_not_set(self): + self.watch_manager._skip_infra_syncs = True + self.watch_manager.queue_infra_sync() + self.assertFalse(self.watch_manager._waiting_infra_sync) + @patch("samcli.lib.sync.watch_manager.time.sleep") def test__start_infra_exception(self, sleep_mock): sleep_mock.side_effect = KeyboardInterrupt() From 45aa0f33326b4897c05ac29609866a27385e1af9 Mon Sep 17 00:00:00 2001 From: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Date: Fri, 7 Oct 2022 12:54:48 -0700 Subject: [PATCH 3/8] chore: fix sam sync integ tests after latest changes (#4286) --- tests/integration/sync/test_sync_watch.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/integration/sync/test_sync_watch.py b/tests/integration/sync/test_sync_watch.py index 2385ca6d36..463bdcbe2f 100644 --- a/tests/integration/sync/test_sync_watch.py +++ b/tests/integration/sync/test_sync_watch.py @@ -503,9 +503,6 @@ def test_sync_watch_code(self): tags="integ=true clarity=yes foo_bar=baz", ) self.watch_process = start_persistent_process(sync_command_list, cwd=self.test_dir) - read_until_string(self.watch_process, "Enter Y to proceed with the command, or enter N to cancel:\n") - - self.watch_process.stdin.write("y\n") read_until_string(self.watch_process, "\x1b[32mSync watch started.\x1b[0m\n", timeout=30) self.stack_resources = self._get_stacks(self.stack_name) From eb93774d66dd1d76c5435a542a4ea81e27e347bd Mon Sep 17 00:00:00 2001 From: aws-sam-cli-bot <46753707+aws-sam-cli-bot@users.noreply.github.com> Date: Tue, 11 Oct 2022 13:52:30 -0700 Subject: [PATCH 4/8] chore: Bump SamTranslator version to 1.53.0 (#4294) --- requirements/base.txt | 2 +- requirements/reproducible-linux.txt | 8 ++-- .../models/function_with_event_filtering.yaml | 42 ++++++++++++++++++- 3 files changed, 46 insertions(+), 6 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 8fb0546d37..54c036be57 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ boto3>=1.19.5,==1.* jmespath~=0.10.0 PyYAML~=5.3 cookiecutter~=2.1.1 -aws-sam-translator==1.52.0 +aws-sam-translator==1.53.0 #docker minor version updates can include breaking changes. Auto update micro version only. docker~=4.2.0 dateparser~=1.0 diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index 10893a9c5d..f20d2d3d34 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -17,10 +17,10 @@ aws-lambda-builders==1.19.0 \ --hash=sha256:38fcb9023df09f3c39504498cf45a213a29b176be5cec36126b13b77604731bd \ --hash=sha256:61e3f1e77b62ab72b97f822c09385ce16dc0e5478b52de7296a79570be41be73 # via aws-sam-cli (setup.py) -aws-sam-translator==1.52.0 \ - --hash=sha256:1b5f156695a6772170aefe3f5fe91882c7f2e1cf1ccaa9bb32502c5d96fcc3ea \ - --hash=sha256:4a0331e8de2a18cae9ced0b4d6f2da5b575f17b2cc5f6e1ba0c86e3878875ef1 \ - --hash=sha256:bde0c471b1dcefb13246a20977c81bc91786c30277cdd733b8846acf97759a9d +aws-sam-translator==1.53.0 \ + --hash=sha256:392ed4f5fb08f72cb68a8800f0bc278d2a3b6609bd1ac66bfcdeaaa94cdc18e5 \ + --hash=sha256:84d780ad82f1a176e2f5d4c397749d1e71214cc97ee7cccd50f823fd7c7e7cdf \ + --hash=sha256:85252646cf123642d08442137b60445e69e30bfd2f8b663b1202b20ab3782b10 # via aws-sam-cli (setup.py) backports-zoneinfo==0.2.1 \ --hash=sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf \ diff --git a/tests/functional/commands/validate/lib/models/function_with_event_filtering.yaml b/tests/functional/commands/validate/lib/models/function_with_event_filtering.yaml index 5aabd16149..32af438134 100644 --- a/tests/functional/commands/validate/lib/models/function_with_event_filtering.yaml +++ b/tests/functional/commands/validate/lib/models/function_with_event_filtering.yaml @@ -29,13 +29,53 @@ Resources: } } }' - MySqsQueue: + MySqsEvent: Type: SQS Properties: Queue: !GetAtt MySqsQueue.Arn FilterCriteria: Filters: - Pattern: '{"name": "value"}' + MSKEvent: + Type: MSK + Properties: + StartingPosition: LATEST + Stream: arn:aws:kafka:us-west-2:012345678901:cluster/mycluster/6cc0432b-8618-4f44-bccc-e1fbd8fb7c4d-2 + Topics: + - "MyDummyTestTopic" + FilterCriteria: + Filters: + - Pattern: '{"name": "value"}' + MyKafkaEvent: + Type: SelfManagedKafka + Properties: + KafkaBootstrapServers: + - "abc.xyz.com:9092" + Topics: + - "Topic1" + SourceAccessConfigurations: + - Type: SASL_SCRAM_512_AUTH + URI: arn:aws:secretsmanager:us-west-2:123456789012:secret:my-path/my-secret-name-1a2b3c + - Type: VPC_SUBNET + URI: subnet:subnet-12345 + - Type: VPC_SECURITY_GROUP + URI: security_group:sg-67890 + FilterCriteria: + Filters: + - Pattern: '{"name": "value"}' + MyMQQueue: + Type: MQ + Properties: + Broker: arn:aws:mq:us-east-2:123456789012:broker:MyBroker:b-1234a5b6-78cd-901e-2fgh-3i45j6k178l9 + Queues: + - "Queue1" + SourceAccessConfigurations: + - Type: BASIC_AUTH + URI: arn:aws:secretsmanager:us-west-2:123456789012:secret:my-path/my-secret-name-1a2b3c + SecretsManagerKmsKeyId: 1abc23d4-567f-8ab9-cde0-1fab234c5d67 + FilterCriteria: + Filters: + - Pattern: '{"name": "value"}' KinesisStream: Type: AWS::Kinesis::Stream From cf055e9b1fd16f0cb6d263fb95c864387b19baed Mon Sep 17 00:00:00 2001 From: Kevin Richardson Date: Wed, 12 Oct 2022 12:16:19 -0400 Subject: [PATCH 5/8] Fix pipeline template generation from custom git repositories (#4207) * Fix pipeline template generation from custom git repositories This commit restructures the `_generate_from_custom_location()` function to ensure `self._generate_from_pipeline_template()` is called within the context manager under which the custom git repository is cloned. Fixes https://github.com/aws/aws-sam-cli/issues/4206 * Test that template generation workflow works with remote git repo Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Co-authored-by: Lucas <12496191+lucashuy@users.noreply.github.com> --- .../pipeline/init/interactive_init_flow.py | 19 +++++-- .../init/test_initeractive_init_flow.py | 55 +++++++++++++++++++ 2 files changed, 68 insertions(+), 6 deletions(-) diff --git a/samcli/commands/pipeline/init/interactive_init_flow.py b/samcli/commands/pipeline/init/interactive_init_flow.py index 2c8159a1e4..85ef019895 100644 --- a/samcli/commands/pipeline/init/interactive_init_flow.py +++ b/samcli/commands/pipeline/init/interactive_init_flow.py @@ -114,13 +114,20 @@ def _generate_from_custom_location( pipeline_template_git_location: str = click.prompt("Template Git location") if os.path.exists(pipeline_template_git_location): pipeline_template_local_dir = Path(pipeline_template_git_location) - else: - with osutils.mkdir_temp(ignore_errors=True) as tempdir: - tempdir_path = Path(tempdir) - pipeline_template_local_dir = _clone_pipeline_templates( - pipeline_template_git_location, tempdir_path, CUSTOM_PIPELINE_TEMPLATE_REPO_LOCAL_NAME - ) + return self._select_and_generate_from_pipeline_template(pipeline_template_local_dir) + with osutils.mkdir_temp(ignore_errors=True) as tempdir: + tempdir_path = Path(tempdir) + pipeline_template_local_dir = _clone_pipeline_templates( + pipeline_template_git_location, tempdir_path, CUSTOM_PIPELINE_TEMPLATE_REPO_LOCAL_NAME + ) + return self._select_and_generate_from_pipeline_template(pipeline_template_local_dir) + def _select_and_generate_from_pipeline_template(self, pipeline_template_local_dir: Path) -> List[str]: + """ + Determine if the specified custom pipeline template directory contains + more than one template, prompt the user to choose one if it does, and + then generate the template and return the list of files. + """ if os.path.exists(pipeline_template_local_dir.joinpath("manifest.yaml")): pipeline_templates_manifest: PipelineTemplatesManifest = _read_app_pipeline_templates_manifest( pipeline_template_local_dir diff --git a/tests/unit/commands/pipeline/init/test_initeractive_init_flow.py b/tests/unit/commands/pipeline/init/test_initeractive_init_flow.py index 5d9311d869..8836e7f37d 100644 --- a/tests/unit/commands/pipeline/init/test_initeractive_init_flow.py +++ b/tests/unit/commands/pipeline/init/test_initeractive_init_flow.py @@ -323,6 +323,61 @@ def test_generate_pipeline_configuration_file_from_custom_remote_pipeline_templa overwrite_if_exists=True, ) + @patch("samcli.lib.cookiecutter.template.cookiecutter") + @patch("samcli.commands.pipeline.init.interactive_init_flow.InteractiveFlowCreator.create_flow") + @patch("samcli.commands.pipeline.init.interactive_init_flow._prompt_pipeline_template") + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.commands.pipeline.init.interactive_init_flow.osutils") + @patch("samcli.commands.pipeline.init.interactive_init_flow.click") + @patch("samcli.lib.cookiecutter.question.click") + def test_generate_pipeline_configuration_file_from_custom_remote_pipeline_template_with_manifest_happy_case( + self, + questions_click_mock, + init_flow_click_mock, + osutils_mock, + git_clone_mock, + prompt_pipeline_template_mock, + create_interactive_flow_mock, + cookiecutter_mock, + ): + # setup + questions_click_mock.prompt.return_value = "2" # Custom pipeline templates + init_flow_click_mock.prompt.return_value = "https://github.com/any-custom-pipeline-template-repo.git" + clone_temp_dir = "/tmp/any/dir" + cookiecutter_output_dir_mock = "/tmp/any/dir2" + osutils_mock.mkdir_temp.return_value.__enter__ = Mock( + side_effect=[clone_temp_dir, cookiecutter_output_dir_mock] + ) + osutils_mock.mkdir_temp.return_value.__exit__ = Mock() + templates_path = Path(__file__).parent.parent.parent.parent.parent.joinpath( + Path("integration", "testdata", "pipeline", "custom_template_with_manifest") + ) + weather_templates_path = templates_path.joinpath("weather") + git_clone_mock.return_value = templates_path + # Mock that the user selected the 'weather' pipeline. The click-based mocking + # approach can't be used here because it was already used to pass a fake Git + # repository address for `init_flow_click_mock`. + prompt_pipeline_template_mock.return_value = Mock(location="weather") + interactive_flow_mock = Mock() + create_interactive_flow_mock.return_value = interactive_flow_mock + cookiecutter_context_mock = {"key": "value"} + interactive_flow_mock.run.return_value = cookiecutter_context_mock + + # trigger + InteractiveInitFlow(allow_bootstrap=False).do_interactive() + + # verify + git_clone_mock.assert_called_once_with(Path(clone_temp_dir), "custom-pipeline-template", replace_existing=True) + create_interactive_flow_mock.assert_called_once_with(str(weather_templates_path.joinpath("questions.json"))) + interactive_flow_mock.run.assert_called_once() + cookiecutter_mock.assert_called_once_with( + template=str(weather_templates_path), + output_dir=cookiecutter_output_dir_mock, + no_input=True, + extra_context=cookiecutter_context_mock, + overwrite_if_exists=True, + ) + @patch("samcli.lib.cookiecutter.question.click") def test_prompt_cicd_provider_will_not_prompt_if_the_list_of_providers_has_only_one_provider(self, click_mock): gitlab_provider = Mock(id="gitlab", display_name="Gitlab CI/CD") From 8ee3c6aabc707d2308e6d46a46601dc03bebfdd5 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 12 Oct 2022 14:43:03 -0700 Subject: [PATCH 6/8] feat: updating app templates repo hash with (cc4dfaca55de3adb10de2d458a04fd9618984b77) (#4301) Co-authored-by: GitHub Action --- samcli/runtime_config.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/runtime_config.json b/samcli/runtime_config.json index 37dbacbb92..ba98d81598 100644 --- a/samcli/runtime_config.json +++ b/samcli/runtime_config.json @@ -1,3 +1,3 @@ { - "app_template_repo_commit": "cd87689f13a5a3e42a90e66e4eaba476a0f9b906" + "app_template_repo_commit": "cc4dfaca55de3adb10de2d458a04fd9618984b77" } From 393db0c57862051c3ad8fe76513081166bd977e4 Mon Sep 17 00:00:00 2001 From: Xia Zhao <78883180+xazhao@users.noreply.github.com> Date: Wed, 12 Oct 2022 17:33:16 -0700 Subject: [PATCH 7/8] Feat/sam pipeline OIDC (#4270) * Add OIDC support (#3995) * support for GitHub Actions OIDC * GitHub Actions oidc support * Unit tests for GitHub Actions OIDC support * Add tests for GitHub Actions OIDC support * Add comments for new methods and fix formatting * use choice for identity provider instead of flag * Create pipeline oidc provider class * Remove unused methods * Add defaults for OIDC URL and ClientID * Get subject claim from object and use Session to get Client * Mock Session during test * Create method to share common logic * Add AllowedValues to CreateNewOidcProvider * Remove duplicate calls to samconfig * Add text to prompt * fix formatting * Add links to technical methods comments * Support GitLab OIDC for pipeline bootstrap (#4037) * Support GitLab OIDC for pipeline bootstrap * Fix incorrect variable mappings * use constants instead of strings * Support Bitbucket OIDC in pipeline bootstrap (#4041) * Support Bitbucket OIDC in pipeline bootstrap * Add documentation * Make capitlization consistent * Add comments to method * Re-use permissions provider answer for init * Make help text shorter * add constant inside constructor * Fix comments * move variables into class * change saved iam parameter * change help message * use dataclass * Optimize interactive flow and fix bugs * Support bootstrap stack update * fix duplicate bootstrap bug * bug fix * Update UX and fix unit tests * Improve UX and add more tests * recover the link * Address comments * Re-organize imports * Use AWS Partition instead of hardcoding aws * Address some comments * Recover imports Co-authored-by: sidhujus <105385029+sidhujus@users.noreply.github.com> --- requirements/base.txt | 1 + requirements/reproducible-linux.txt | 102 ++++++ samcli/commands/pipeline/bootstrap/cli.py | 235 ++++++++++++- .../pipeline/bootstrap/guided_context.py | 253 ++++++++++++-- .../pipeline/bootstrap/oidc_config.py | 81 +++++ .../bootstrap/pipeline_oidc_provider.py | 162 +++++++++ .../pipeline/init/interactive_init_flow.py | 40 ++- samcli/lib/cookiecutter/template.py | 1 + samcli/lib/pipeline/bootstrap/resource.py | 28 ++ samcli/lib/pipeline/bootstrap/stage.py | 170 +++++++++- .../pipeline/bootstrap/stage_resources.yaml | 102 ++++-- .../lib/utils/managed_cloudformation_stack.py | 107 ++++++ .../pipeline/test_bootstrap_command.py | 79 ++++- .../integration/pipeline/test_init_command.py | 1 + .../commands/pipeline/bootstrap/test_cli.py | 321 +++++++++++++++++- .../pipeline/bootstrap/test_guided_context.py | 293 +++++++++++++++- .../pipeline/bootstrap/test_oidc_config.py | 65 ++++ .../init/test_initeractive_init_flow.py | 12 +- tests/unit/lib/cookiecutter/test_template.py | 2 +- .../pipeline/bootstrap/test_environment.py | 204 +++++++++-- .../test_managed_cloudformation_stack.py | 35 +- 21 files changed, 2155 insertions(+), 139 deletions(-) create mode 100644 samcli/commands/pipeline/bootstrap/oidc_config.py create mode 100644 samcli/commands/pipeline/bootstrap/pipeline_oidc_provider.py create mode 100644 tests/unit/commands/pipeline/bootstrap/test_oidc_config.py diff --git a/requirements/base.txt b/requirements/base.txt index 54c036be57..f999942982 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -15,6 +15,7 @@ serverlessrepo==0.1.10 aws_lambda_builders==1.19.0 tomlkit==0.7.2 watchdog==2.1.2 +pyopenssl==22.0.0 # See https://github.com/pallets/markupsafe/issues/286 but breaking change in # MarkupSafe causes jinja to break which caused flask to break (which we depend on) diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index f20d2d3d34..a2ed607ed3 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -61,6 +61,72 @@ certifi==2020.12.5 \ --hash=sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c \ --hash=sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830 # via requests +cffi==1.15.1 \ + --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ + --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ + --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ + --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ + --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ + --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ + --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ + --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ + --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ + --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ + --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ + --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ + --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ + --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ + --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ + --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ + --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ + --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ + --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ + --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ + --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ + --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ + --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ + --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ + --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ + --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ + --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ + --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ + --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ + --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ + --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ + --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ + --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ + --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ + --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ + --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ + --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ + --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ + --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ + --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ + --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ + --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ + --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ + --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ + --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ + --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ + --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ + --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ + --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ + --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ + --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ + --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ + --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ + --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ + --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ + --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ + --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ + --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ + --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ + --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ + --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ + --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ + --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ + --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 + # via cryptography chardet==3.0.4 \ --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 @@ -82,6 +148,34 @@ cookiecutter==2.1.1 \ --hash=sha256:9f3ab027cec4f70916e28f03470bdb41e637a3ad354b4d65c765d93aad160022 \ --hash=sha256:f3982be8d9c53dac1261864013fdec7f83afd2e42ede6f6dd069c5e149c540d5 # via aws-sam-cli (setup.py) +cryptography==38.0.1 \ + --hash=sha256:0297ffc478bdd237f5ca3a7dc96fc0d315670bfa099c04dc3a4a2172008a405a \ + --hash=sha256:10d1f29d6292fc95acb597bacefd5b9e812099d75a6469004fd38ba5471a977f \ + --hash=sha256:16fa61e7481f4b77ef53991075de29fc5bacb582a1244046d2e8b4bb72ef66d0 \ + --hash=sha256:194044c6b89a2f9f169df475cc167f6157eb9151cc69af8a2a163481d45cc407 \ + --hash=sha256:1db3d807a14931fa317f96435695d9ec386be7b84b618cc61cfa5d08b0ae33d7 \ + --hash=sha256:3261725c0ef84e7592597606f6583385fed2a5ec3909f43bc475ade9729a41d6 \ + --hash=sha256:3b72c360427889b40f36dc214630e688c2fe03e16c162ef0aa41da7ab1455153 \ + --hash=sha256:3e3a2599e640927089f932295a9a247fc40a5bdf69b0484532f530471a382750 \ + --hash=sha256:3fc26e22840b77326a764ceb5f02ca2d342305fba08f002a8c1f139540cdfaad \ + --hash=sha256:5067ee7f2bce36b11d0e334abcd1ccf8c541fc0bbdaf57cdd511fdee53e879b6 \ + --hash=sha256:52e7bee800ec869b4031093875279f1ff2ed12c1e2f74923e8f49c916afd1d3b \ + --hash=sha256:64760ba5331e3f1794d0bcaabc0d0c39e8c60bf67d09c93dc0e54189dfd7cfe5 \ + --hash=sha256:765fa194a0f3372d83005ab83ab35d7c5526c4e22951e46059b8ac678b44fa5a \ + --hash=sha256:79473cf8a5cbc471979bd9378c9f425384980fcf2ab6534b18ed7d0d9843987d \ + --hash=sha256:896dd3a66959d3a5ddcfc140a53391f69ff1e8f25d93f0e2e7830c6de90ceb9d \ + --hash=sha256:89ed49784ba88c221756ff4d4755dbc03b3c8d2c5103f6d6b4f83a0fb1e85294 \ + --hash=sha256:ac7e48f7e7261207d750fa7e55eac2d45f720027d5703cd9007e9b37bbb59ac0 \ + --hash=sha256:ad7353f6ddf285aeadfaf79e5a6829110106ff8189391704c1d8801aa0bae45a \ + --hash=sha256:b0163a849b6f315bf52815e238bc2b2346604413fa7c1601eea84bcddb5fb9ac \ + --hash=sha256:b6c9b706316d7b5a137c35e14f4103e2115b088c412140fdbd5f87c73284df61 \ + --hash=sha256:c2e5856248a416767322c8668ef1845ad46ee62629266f84a8f007a317141013 \ + --hash=sha256:ca9f6784ea96b55ff41708b92c3f6aeaebde4c560308e5fbbd3173fbc466e94e \ + --hash=sha256:d1a5bd52d684e49a36582193e0b89ff267704cd4025abefb9e26803adeb3e5fb \ + --hash=sha256:d3971e2749a723e9084dd507584e2a2761f78ad2c638aa31e80bc7a15c9db4f9 \ + --hash=sha256:d4ef6cc305394ed669d4d9eebf10d3a101059bdcf2669c366ec1d14e4fb227bd \ + --hash=sha256:d9e69ae01f99abe6ad646947bba8941e896cb3aa805be2597a0400e0764b5818 + # via pyopenssl dateparser==1.0.0 \ --hash=sha256:159cc4e01a593706a15cd4e269a0b3345edf3aef8bf9278a57dac8adf5bf1e4a \ --hash=sha256:17202df32c7a36e773136ff353aa3767e987f8b3e27374c39fd21a30a803d6f8 @@ -201,6 +295,14 @@ markupsafe==2.0.1 \ # via # aws-sam-cli (setup.py) # jinja2 +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via cffi +pyopenssl==22.0.0 \ + --hash=sha256:660b1b1425aac4a1bea1d94168a85d99f0b3144c869dd4390d27629d0087f1bf \ + --hash=sha256:ea252b38c87425b64116f808355e8da644ef9b07e429398bfece610f893ee2e0 + # via aws-sam-cli (setup.py) pyrsistent==0.17.3 \ --hash=sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e # via jsonschema diff --git a/samcli/commands/pipeline/bootstrap/cli.py b/samcli/commands/pipeline/bootstrap/cli.py index c7aeb6049e..b80aa44127 100644 --- a/samcli/commands/pipeline/bootstrap/cli.py +++ b/samcli/commands/pipeline/bootstrap/cli.py @@ -9,13 +9,19 @@ from samcli.cli.cli_config_file import configuration_option, TomlProvider from samcli.cli.main import pass_context, common_options, aws_creds_options, print_cmdline_args + +from samcli.commands.pipeline.bootstrap.pipeline_oidc_provider import PipelineOidcProvider from samcli.lib.config.samconfig import SamConfig -from samcli.lib.pipeline.bootstrap.stage import Stage +from samcli.commands.pipeline.bootstrap.oidc_config import ( + BitbucketOidcConfig, + GitHubOidcConfig, + OidcConfig, + GitLabOidcConfig, +) + from samcli.lib.telemetry.metric import track_command -from samcli.lib.utils.colors import Colored from samcli.lib.utils.version_checker import check_newer_version -from .guided_context import GuidedContext -from ..external_links import CONFIG_AWS_CRED_ON_CICD_URL +from .guided_context import BITBUCKET, GITHUB_ACTIONS, GITLAB, IAM, OPEN_ID_CONNECT SHORT_HELP = "Generates the required AWS resources to connect your CI/CD system." @@ -26,6 +32,8 @@ PIPELINE_CONFIG_DIR = os.path.join(".aws-sam", "pipeline") PIPELINE_CONFIG_FILENAME = "pipelineconfig.toml" +PERMISSIONS_PROVIDERS = [OPEN_ID_CONNECT, IAM] +OPENID_CONNECT = "OpenID Connect (OIDC)" @click.command("bootstrap", short_help=SHORT_HELP, help=HELP_TEXT, context_settings=dict(max_content_width=120)) @@ -87,6 +95,66 @@ is_flag=True, help="Prompt to confirm if the resources are to be deployed.", ) +@click.option( + "--permissions-provider", + default=IAM, + required=False, + type=click.Choice(PERMISSIONS_PROVIDERS), + help="Choose a permissions provider to assume the pipeline execution role. Default is to use an IAM User.", +) +@click.option( + "--oidc-provider-url", + help="The URL of the OIDC provider.", + required=False, +) +@click.option("--oidc-client-id", help="The client ID configured to use with the OIDC provider.", required=False) +@click.option( + "--github-org", + help="The GitHub organization that the repository belongs to. " + "If there is no organization enter the Username of the repository owner instead " + "Only used if using GitHub Actions OIDC for user permissions", + required=False, +) +@click.option( + "--github-repo", + help="The name of the GitHub Repository that deployments will occur from. " + "Only used if using GitHub Actions OIDC for permissions", + required=False, +) +@click.option( + "--deployment-branch", + help="The name of the branch that deployments will occur from. " + "Only used if using GitHub Actions OIDC for permissions", + required=False, +) +@click.option( + "--oidc-provider", + help="The name of the CI/CD system that will be used for OIDC permissions " + "we currently only support GitLab, GitHub, and Bitbucket", + type=click.Choice([GITHUB_ACTIONS, GITLAB, BITBUCKET]), + required=False, +) +@click.option( + "--gitlab-group", + help="The GitLab group that the repository belongs to. Only used if using GitLab OIDC for permissions", + required=False, +) +@click.option( + "--gitlab-project", + help="The GitLab project name. Only used if using GitLab OIDC for permissions", + required=False, +) +@click.option( + "--bitbucket-repo-uuid", + help="The UUID of the Bitbucket repository. Only used if using Bitbucket OIDC for permissions. " + "Found at https://bitbucket.org///admin/addon/admin/pipelines/openid-connect", + required=False, +) +@click.option( + "--cicd-provider", + help="The CICD platform for the SAM Pipeline", + required=False, +) @common_options @aws_creds_options @pass_context @@ -106,6 +174,17 @@ def cli( confirm_changeset: bool, config_file: Optional[str], config_env: Optional[str], + permissions_provider: Optional[str], + oidc_provider_url: Optional[str], + oidc_client_id: Optional[str], + github_org: Optional[str], + github_repo: Optional[str], + deployment_branch: Optional[str], + oidc_provider: Optional[str], + gitlab_group: Optional[str], + gitlab_project: Optional[str], + bitbucket_repo_uuid: Optional[str], + cicd_provider: Optional[str], ) -> None: """ `sam pipeline bootstrap` command entry point @@ -124,6 +203,17 @@ def cli( confirm_changeset=confirm_changeset, config_file=config_env, config_env=config_file, + permissions_provider=permissions_provider, + oidc_provider_url=oidc_provider_url, + oidc_client_id=oidc_client_id, + github_org=github_org, + github_repo=github_repo, + deployment_branch=deployment_branch, + oidc_provider=oidc_provider, + gitlab_group=gitlab_group, + gitlab_project=gitlab_project, + bitbucket_repo_uuid=bitbucket_repo_uuid, + cicd_provider=cicd_provider, ) # pragma: no cover @@ -141,14 +231,81 @@ def do_cli( confirm_changeset: bool, config_file: Optional[str], config_env: Optional[str], + permissions_provider: Optional[str], + oidc_provider_url: Optional[str], + oidc_client_id: Optional[str], + github_org: Optional[str], + github_repo: Optional[str], + deployment_branch: Optional[str], + oidc_provider: Optional[str], + gitlab_group: Optional[str], + gitlab_project: Optional[str], + bitbucket_repo_uuid: Optional[str], + cicd_provider: Optional[str], standalone: bool = True, ) -> None: """ implementation of `sam pipeline bootstrap` command """ - if not pipeline_user_arn: + from samcli.commands.pipeline.external_links import CONFIG_AWS_CRED_ON_CICD_URL + from samcli.lib.utils.colors import Colored + from samcli.commands.pipeline.bootstrap.guided_context import GuidedContext + from samcli.lib.pipeline.bootstrap.stage import ( + BITBUCKET_REPO_UUID, + DEPLOYMENT_BRANCH, + GITHUB_ORG, + GITHUB_REPO, + GITLAB_GROUP, + GITLAB_PROJECT, + OIDC_CLIENT_ID, + OIDC_PROVIDER, + OIDC_PROVIDER_URL, + PERMISSIONS_PROVIDER, + OIDC_SUPPORTED_PROVIDER, + Stage, + ) + + if not pipeline_user_arn and not permissions_provider == OPEN_ID_CONNECT: pipeline_user_arn = _load_saved_pipeline_user_arn() + enable_oidc_option = False + if not cicd_provider or cicd_provider in OIDC_SUPPORTED_PROVIDER: + enable_oidc_option = True + oidc_provider = cicd_provider + + config_parameters = _load_config_values() + oidc_config = OidcConfig( + oidc_client_id=oidc_client_id, oidc_provider=oidc_provider, oidc_provider_url=oidc_provider_url + ) + gitlab_config = GitLabOidcConfig( + gitlab_group=gitlab_group, gitlab_project=gitlab_project, deployment_branch=deployment_branch + ) + github_config = GitHubOidcConfig( + github_org=github_org, github_repo=github_repo, deployment_branch=deployment_branch + ) + bitbucket_config = BitbucketOidcConfig(bitbucket_repo_uuid=bitbucket_repo_uuid) + if config_parameters: + saved_provider = config_parameters.get(PERMISSIONS_PROVIDER) + if saved_provider == OPENID_CONNECT: + permissions_provider = OPEN_ID_CONNECT + oidc_config.update_values( + oidc_provider=config_parameters.get(OIDC_PROVIDER), + oidc_provider_url=config_parameters.get(OIDC_PROVIDER_URL), + oidc_client_id=config_parameters.get(OIDC_CLIENT_ID), + ) + github_config.update_values( + github_org=config_parameters.get(GITHUB_ORG), + github_repo=config_parameters.get(GITHUB_REPO), + deployment_branch=config_parameters.get(DEPLOYMENT_BRANCH), + ) + gitlab_config.update_values( + gitlab_group=config_parameters.get(GITLAB_GROUP), + gitlab_project=config_parameters.get(GITLAB_PROJECT), + deployment_branch=config_parameters.get(DEPLOYMENT_BRANCH), + ) + bitbucket_config.update_values(bitbucket_repo_uuid=config_parameters.get(BITBUCKET_REPO_UUID)) + elif saved_provider == "AWS IAM": + permissions_provider = IAM if interactive: if standalone: click.echo( @@ -175,6 +332,12 @@ def do_cli( create_image_repository=create_image_repository, image_repository_arn=image_repository_arn, region=region, + permissions_provider=permissions_provider, + oidc_config=oidc_config, + github_config=github_config, + gitlab_config=gitlab_config, + bitbucket_config=bitbucket_config, + enable_oidc_option=enable_oidc_option, ) guided_context.run() stage_configuration_name = guided_context.stage_configuration_name @@ -186,6 +349,19 @@ def do_cli( image_repository_arn = guided_context.image_repository_arn region = guided_context.region profile = guided_context.profile + permissions_provider = guided_context.permissions_provider + + subject_claim = None + pipeline_oidc_provider: Optional[PipelineOidcProvider] = None + + if permissions_provider == OPEN_ID_CONNECT: + pipeline_oidc_provider = _get_pipeline_oidc_provider( + oidc_config=oidc_config, + github_config=github_config, + gitlab_config=gitlab_config, + bitbucket_config=bitbucket_config, + ) + subject_claim = pipeline_oidc_provider.get_subject_claim() if not stage_configuration_name: raise click.UsageError("Missing required parameter '--stage'") @@ -200,6 +376,11 @@ def do_cli( artifacts_bucket_arn=artifacts_bucket_arn, create_image_repository=create_image_repository, image_repository_arn=image_repository_arn, + oidc_provider_url=oidc_config.oidc_provider_url, + oidc_client_id=oidc_config.oidc_client_id, + permissions_provider=permissions_provider, + subject_claim=subject_claim, + pipeline_oidc_provider=pipeline_oidc_provider, ) bootstrapped: bool = environment.bootstrap(confirm_changeset=confirm_changeset) @@ -221,7 +402,7 @@ def do_cli( ) ) - if not environment.pipeline_user.is_user_provided: + if not environment.pipeline_user.is_user_provided and not environment.use_oidc_provider: click.secho( dedent( f"""\ @@ -233,6 +414,40 @@ def do_cli( ) +def _get_pipeline_oidc_provider( + oidc_config: OidcConfig, + github_config: GitHubOidcConfig, + gitlab_config: GitLabOidcConfig, + bitbucket_config: BitbucketOidcConfig, +) -> PipelineOidcProvider: + from samcli.commands.pipeline.bootstrap.pipeline_oidc_provider import ( + BitbucketOidcProvider, + GitHubOidcProvider, + GitLabOidcProvider, + ) + + if oidc_config.oidc_provider == GITHUB_ACTIONS: + github_oidc_params: dict = { + GitHubOidcProvider.GITHUB_ORG_PARAMETER_NAME: github_config.github_org, + GitHubOidcProvider.GITHUB_REPO_PARAMETER_NAME: github_config.github_repo, + GitHubOidcProvider.DEPLOYMENT_BRANCH_PARAMETER_NAME: github_config.deployment_branch, + } + return GitHubOidcProvider(github_oidc_params, oidc_config.get_oidc_parameters()) + if oidc_config.oidc_provider == GITLAB: + gitlab_oidc_params: dict = { + GitLabOidcProvider.GITLAB_PROJECT_PARAMETER_NAME: gitlab_config.gitlab_project, + GitLabOidcProvider.GITLAB_GROUP_PARAMETER_NAME: gitlab_config.gitlab_group, + GitLabOidcProvider.DEPLOYMENT_BRANCH_PARAMETER_NAME: gitlab_config.deployment_branch, + } + return GitLabOidcProvider(gitlab_oidc_params, oidc_config.get_oidc_parameters()) + if oidc_config.oidc_provider == BITBUCKET: + bitbucket_oidc_params: dict = { + BitbucketOidcProvider.BITBUCKET_REPO_UUID_PARAMETER_NAME: bitbucket_config.bitbucket_repo_uuid + } + return BitbucketOidcProvider(bitbucket_oidc_params, oidc_config.get_oidc_parameters()) + raise click.UsageError("Missing required parameter '--oidc-provider'") + + def _load_saved_pipeline_user_arn() -> Optional[str]: samconfig: SamConfig = SamConfig(config_dir=PIPELINE_CONFIG_DIR, filename=PIPELINE_CONFIG_FILENAME) if not samconfig.exists(): @@ -241,5 +456,13 @@ def _load_saved_pipeline_user_arn() -> Optional[str]: return config.get("pipeline_user") +def _load_config_values() -> Dict[str, str]: + samconfig: SamConfig = SamConfig(config_dir=PIPELINE_CONFIG_DIR, filename=PIPELINE_CONFIG_FILENAME) + if not samconfig.exists(): + return {} + config: Dict[str, str] = samconfig.get_all(cmd_names=_get_bootstrap_command_names(), section="parameters") + return config + + def _get_bootstrap_command_names() -> List[str]: return ["pipeline", "bootstrap"] diff --git a/samcli/commands/pipeline/bootstrap/guided_context.py b/samcli/commands/pipeline/bootstrap/guided_context.py index aaac37a2d9..c4436c253c 100644 --- a/samcli/commands/pipeline/bootstrap/guided_context.py +++ b/samcli/commands/pipeline/bootstrap/guided_context.py @@ -6,11 +6,18 @@ import sys from textwrap import dedent from typing import Optional, List, Tuple, Callable +from xmlrpc.client import boolean import click from botocore.credentials import EnvProvider from samcli.commands.exceptions import CredentialsError +from samcli.commands.pipeline.bootstrap.oidc_config import ( + BitbucketOidcConfig, + GitHubOidcConfig, + GitLabOidcConfig, + OidcConfig, +) from samcli.commands.pipeline.external_links import CONFIG_AWS_CRED_DOC_URL from samcli.lib.bootstrap.bootstrap import get_current_account_id from samcli.lib.utils.colors import Colored @@ -19,9 +26,33 @@ from samcli.lib.utils.profile import list_available_profiles +GITHUB_ACTIONS = "github-actions" +GITLAB = "gitlab" +BITBUCKET = "bitbucket-pipelines" +OPEN_ID_CONNECT = "oidc" +OIDC_SUPPORTED_PROVIDER = [GITHUB_ACTIONS, GITLAB, BITBUCKET] +IAM = "iam" + + class GuidedContext: + + SUPPORTED_OIDC_PROVIDERS = {"1": GITHUB_ACTIONS, "2": GITLAB, "3": BITBUCKET} + OIDC_PROVIDER_NAME_MAPPINGS = {GITHUB_ACTIONS: "GitHub Actions", GITLAB: "GitLab", BITBUCKET: "Bitbucket"} + # GitHub defaults: https://tinyurl.com/github-defaults + # GitLab defaults: https://docs.gitlab.com/ee/ci/cloud_services/aws/#add-the-identity-provider + DEFAULT_OIDC_URLS = { + GITHUB_ACTIONS: "https://token.actions.githubusercontent.com", + GITLAB: "https://gitlab.com", + BITBUCKET: None, + } + DEFAULT_CLIENT_IDS = {GITHUB_ACTIONS: "sts.amazonaws.com", GITLAB: "https://gitlab.com", BITBUCKET: None} + def __init__( self, + oidc_config: OidcConfig, + github_config: GitHubOidcConfig, + gitlab_config: GitLabOidcConfig, + bitbucket_config: BitbucketOidcConfig, profile: Optional[str] = None, stage_configuration_name: Optional[str] = None, pipeline_user_arn: Optional[str] = None, @@ -31,6 +62,8 @@ def __init__( create_image_repository: bool = False, image_repository_arn: Optional[str] = None, region: Optional[str] = None, + permissions_provider: Optional[str] = None, + enable_oidc_option: boolean = True, ) -> None: self.profile = profile self.stage_configuration_name = stage_configuration_name @@ -41,6 +74,12 @@ def __init__( self.create_image_repository = create_image_repository self.image_repository_arn = image_repository_arn self.region = region + self.permissions_provider = permissions_provider + self.oidc_config = oidc_config + self.github_config = github_config + self.gitlab_config = gitlab_config + self.bitbucket_config = bitbucket_config + self.enable_oidc_option = enable_oidc_option self.color = Colored() def _prompt_account_id(self) -> None: @@ -144,43 +183,181 @@ def _prompt_image_repository(self) -> None: else: self.create_image_repository = False + def _prompt_permissions_provider(self) -> None: + click.echo("Select a user permissions provider:") + click.echo("\t1 - IAM (default)") + click.echo("\t2 - OpenID Connect (OIDC)") + user_provider = click.prompt("Choice", type=click.Choice((["1", "2"])), show_default=False, default="1") + self.permissions_provider = OPEN_ID_CONNECT if user_provider == "2" else IAM + + def _prompt_oidc_provider(self) -> None: + click.echo("Select an OIDC provider:") + for (key, provider) in self.SUPPORTED_OIDC_PROVIDERS.items(): + click.echo("\t{key} - {provider}".format(key=key, provider=self.OIDC_PROVIDER_NAME_MAPPINGS[provider])) + oidc_provider = click.prompt( + "Choice", + type=click.Choice((list(self.SUPPORTED_OIDC_PROVIDERS))), + show_default=False, + ) + self.oidc_config.oidc_provider = self.SUPPORTED_OIDC_PROVIDERS[oidc_provider] + + def _prompt_oidc_provider_url(self) -> None: + self.oidc_config.oidc_provider_url = click.prompt( + "Enter the URL of the OIDC provider", + type=click.STRING, + default=self.DEFAULT_OIDC_URLS[self.oidc_config.oidc_provider] if self.oidc_config.oidc_provider else None, + ) + + def _prompt_oidc_client_id(self) -> None: + self.oidc_config.oidc_client_id = click.prompt( + "Enter the OIDC client ID (sometimes called audience)", + type=click.STRING, + default=self.DEFAULT_CLIENT_IDS[self.oidc_config.oidc_provider] if self.oidc_config.oidc_provider else None, + ) + + def _prompt_subject_claim(self) -> None: + if self.oidc_config.oidc_provider == GITHUB_ACTIONS: + if not self.github_config.github_org: + self._prompt_github_org() + if not self.github_config.github_repo: + self._prompt_github_repo() + if not self.github_config.deployment_branch: + self._prompt_deployment_branch() + elif self.oidc_config.oidc_provider == GITLAB: + if not self.gitlab_config.gitlab_group: + self._prompt_gitlab_group() + if not self.gitlab_config.gitlab_project: + self._prompt_gitlab_project() + if not self.gitlab_config.deployment_branch: + self._prompt_deployment_branch() + elif self.oidc_config.oidc_provider == BITBUCKET: + if not self.bitbucket_config.bitbucket_repo_uuid: + self._prompt_bitbucket_repo_uuid() + + def _prompt_bitbucket_repo_uuid(self) -> None: + self.bitbucket_config.bitbucket_repo_uuid = click.prompt( + "Enter the Bitbucket repository UUID", type=click.STRING + ) + + def _prompt_gitlab_group(self) -> None: + self.gitlab_config.gitlab_group = click.prompt( + "Enter the GitLab group that the code repository belongs to." + " If there is no group enter your username instead", + type=click.STRING, + ) + + def _prompt_gitlab_project(self) -> None: + self.gitlab_config.gitlab_project = click.prompt("Enter GitLab project name", type=click.STRING) + + def _prompt_github_org(self) -> None: + self.github_config.github_org = click.prompt( + "Enter the GitHub organization that the code repository belongs to." + " If there is no organization enter your username instead", + type=click.STRING, + ) + + def _prompt_github_repo(self) -> None: + self.github_config.github_repo = click.prompt("Enter GitHub repository name", type=click.STRING) + + def _prompt_deployment_branch(self) -> None: + deployment_branch = click.prompt( + "Enter the name of the branch that deployments will occur from", type=click.STRING, default="main" + ) + self.github_config.deployment_branch = deployment_branch + self.gitlab_config.deployment_branch = deployment_branch + + def _validate_oidc_provider_url(self) -> None: + while not self.oidc_config.oidc_provider_url: + click.echo("Please enter the URL of the OIDC provider") + self._prompt_oidc_provider_url() + while self.oidc_config.oidc_provider_url.find("https://") == -1: + click.echo("Please ensure the OIDC URL begins with 'https://'") + self._prompt_oidc_provider_url() + def _get_user_inputs(self) -> List[Tuple[str, Callable[[], None]]]: - return [ + inputs = [ (f"Account: {get_current_account_id(self.profile)}", self._prompt_account_id), (f"Stage configuration name: {self.stage_configuration_name}", self._prompt_stage_configuration_name), (f"Region: {self.region}", self._prompt_region_name), - ( - f"Pipeline user ARN: {self.pipeline_user_arn}" - if self.pipeline_user_arn - else "Pipeline user: [to be created]", - self._prompt_pipeline_user, - ), - ( - f"Pipeline execution role ARN: {self.pipeline_execution_role_arn}" - if self.pipeline_execution_role_arn - else "Pipeline execution role: [to be created]", - self._prompt_pipeline_execution_role, - ), - ( - f"CloudFormation execution role ARN: {self.cloudformation_execution_role_arn}" - if self.cloudformation_execution_role_arn - else "CloudFormation execution role: [to be created]", - self._prompt_cloudformation_execution_role, - ), - ( - f"Artifacts bucket ARN: {self.artifacts_bucket_arn}" - if self.artifacts_bucket_arn - else "Artifacts bucket: [to be created]", - self._prompt_artifacts_bucket, - ), - ( - f"ECR image repository ARN: {self.image_repository_arn}" - if self.image_repository_arn - else f"ECR image repository: [{'to be created' if self.create_image_repository else 'skipped'}]", - self._prompt_image_repository, - ), ] + if self.permissions_provider == OPEN_ID_CONNECT: + inputs.extend( + [ + ( + f"OIDC identity provider URL: {self.oidc_config.oidc_provider_url}", + self._prompt_oidc_provider_url, + ), + (f"OIDC client ID: {self.oidc_config.oidc_client_id}", self._prompt_oidc_client_id), + ] + ) + if self.oidc_config.oidc_provider == GITHUB_ACTIONS: + inputs.extend( + [ + (f"GitHub organization: {self.github_config.github_org}", self._prompt_github_org), + (f"GitHub repository: {self.github_config.github_repo}", self._prompt_github_repo), + (f"Deployment branch: {self.github_config.deployment_branch}", self._prompt_deployment_branch), + ] + ) + elif self.oidc_config.oidc_provider == GITLAB: + inputs.extend( + [ + (f"GitLab group: {self.gitlab_config.gitlab_group}", self._prompt_gitlab_group), + (f"GitLab project: {self.gitlab_config.gitlab_project}", self._prompt_gitlab_project), + (f"Deployment branch: {self.gitlab_config.deployment_branch}", self._prompt_deployment_branch), + ] + ) + elif self.oidc_config.oidc_provider == BITBUCKET: + inputs.extend( + [ + ( + f"Bitbucket repository UUID: {self.bitbucket_config.bitbucket_repo_uuid}", + self._prompt_bitbucket_repo_uuid, + ), + ] + ) + else: + inputs.extend( + [ + ( + f"Pipeline user ARN: {self.pipeline_user_arn}" + if self.pipeline_user_arn + else "Pipeline user: [to be created]", + self._prompt_pipeline_user, + ) + ] + ) + + inputs.extend( + [ + ( + f"Pipeline execution role ARN: {self.pipeline_execution_role_arn}" + if self.pipeline_execution_role_arn + else "Pipeline execution role: [to be created]", + self._prompt_pipeline_execution_role, + ), + ( + f"CloudFormation execution role ARN: {self.cloudformation_execution_role_arn}" + if self.cloudformation_execution_role_arn + else "CloudFormation execution role: [to be created]", + self._prompt_cloudformation_execution_role, + ), + ( + f"Artifacts bucket ARN: {self.artifacts_bucket_arn}" + if self.artifacts_bucket_arn + else "Artifacts bucket: [to be created]", + self._prompt_artifacts_bucket, + ), + ( + f"ECR image repository ARN: {self.image_repository_arn}" + if self.image_repository_arn + else f"ECR image repository: [{'to be created' if self.create_image_repository else 'skipped'}]", + self._prompt_image_repository, + ), + ] + ) + return inputs + def run(self) -> None: # pylint: disable=too-many-branches """ Runs an interactive questionnaire to prompt the user for the ARNs of the AWS resources(infrastructure) required @@ -201,7 +378,19 @@ def run(self) -> None: # pylint: disable=too-many-branches if not self.region: self._prompt_region_name() - if self.pipeline_user_arn: + if self.enable_oidc_option and not self.permissions_provider == OPEN_ID_CONNECT and not self.pipeline_user_arn: + self._prompt_permissions_provider() + + if self.permissions_provider == OPEN_ID_CONNECT: + if not self.oidc_config.oidc_provider: + self._prompt_oidc_provider() + if not self.oidc_config.oidc_provider_url: + self._prompt_oidc_provider_url() + self._validate_oidc_provider_url() + if not self.oidc_config.oidc_client_id: + self._prompt_oidc_client_id() + self._prompt_subject_claim() + elif self.pipeline_user_arn: click.echo(f"Pipeline IAM user ARN: {self.pipeline_user_arn}") else: self._prompt_pipeline_user() diff --git a/samcli/commands/pipeline/bootstrap/oidc_config.py b/samcli/commands/pipeline/bootstrap/oidc_config.py new file mode 100644 index 0000000000..776b69abe8 --- /dev/null +++ b/samcli/commands/pipeline/bootstrap/oidc_config.py @@ -0,0 +1,81 @@ +""" +Represents a pipeline OIDC provider +""" +from dataclasses import dataclass +from typing import Optional + + +@dataclass +class OidcConfig: + oidc_provider_url: Optional[str] + oidc_client_id: Optional[str] + oidc_provider: Optional[str] + + def get_oidc_parameters(self) -> dict: + return { + "oidc-provider-url": self.oidc_provider_url, + "oidc-client-id": self.oidc_client_id, + "oidc-provider": self.oidc_provider, + } + + def update_values( + self, oidc_provider: Optional[str], oidc_provider_url: Optional[str], oidc_client_id: Optional[str] + ) -> None: + self.oidc_provider = oidc_provider if oidc_provider else self.oidc_provider + self.oidc_provider_url = oidc_provider_url if oidc_provider_url else self.oidc_provider_url + self.oidc_client_id = oidc_client_id if oidc_client_id else self.oidc_client_id + + +@dataclass +class GitHubOidcConfig: + github_org: Optional[str] + github_repo: Optional[str] + deployment_branch: Optional[str] + + def get_oidc_parameters(self) -> dict: + return { + "github-org": self.github_org, + "github-repo": self.github_repo, + "deployment-branch": self.deployment_branch, + } + + def update_values( + self, github_org: Optional[str], github_repo: Optional[str], deployment_branch: Optional[str] + ) -> None: + self.github_org = github_org if github_org else self.github_org + self.github_repo = github_repo if github_repo else self.github_repo + self.deployment_branch = deployment_branch if deployment_branch else self.deployment_branch + + +@dataclass +class GitLabOidcConfig: + gitlab_group: Optional[str] + gitlab_project: Optional[str] + deployment_branch: Optional[str] + + def get_oidc_parameters(self) -> dict: + return { + "gitlab-group": self.gitlab_group, + "gitlab-project": self.gitlab_project, + "deployment-branch": self.deployment_branch, + } + + def update_values( + self, gitlab_group: Optional[str], gitlab_project: Optional[str], deployment_branch: Optional[str] + ) -> None: + self.gitlab_group = gitlab_group if gitlab_group else self.gitlab_group + self.gitlab_project = gitlab_project if gitlab_project else self.gitlab_project + self.deployment_branch = deployment_branch if deployment_branch else self.deployment_branch + + +@dataclass +class BitbucketOidcConfig: + bitbucket_repo_uuid: Optional[str] + + def get_oidc_parameters(self) -> dict: + return { + "bitbucket-repo-uuid": self.bitbucket_repo_uuid, + } + + def update_values(self, bitbucket_repo_uuid: Optional[str]) -> None: + self.bitbucket_repo_uuid = bitbucket_repo_uuid if bitbucket_repo_uuid else self.bitbucket_repo_uuid diff --git a/samcli/commands/pipeline/bootstrap/pipeline_oidc_provider.py b/samcli/commands/pipeline/bootstrap/pipeline_oidc_provider.py new file mode 100644 index 0000000000..4b901e79f4 --- /dev/null +++ b/samcli/commands/pipeline/bootstrap/pipeline_oidc_provider.py @@ -0,0 +1,162 @@ +""" +Represents a pipeline OIDC provider +""" +from abc import abstractmethod +from typing import List +import click +from samcli.commands.pipeline.bootstrap.guided_context import BITBUCKET, GITHUB_ACTIONS, GITLAB + +from samcli.lib.config.samconfig import SamConfig + + +class PipelineOidcProvider: + + PROVIDER_URL_PARAMETER = "oidc-provider-url" + CLIENT_ID_PARAMETER = "oidc-client-id" + OPENID_CONNECT = "OpenID Connect (OIDC)" + + def __init__(self, oidc_parameters: dict, oidc_parameter_names: List[str], oidc_provider_name: str) -> None: + self.oidc_parameters = oidc_parameters + self.oidc_parameter_names = [self.PROVIDER_URL_PARAMETER, self.CLIENT_ID_PARAMETER] + oidc_parameter_names + self.oidc_provider_name = oidc_provider_name + self.verify_parameters() + + def verify_parameters(self) -> None: + """ + Makes sure that all required parameters have been provided + ------- + """ + error_string = "" + for parameter_name in self.oidc_parameter_names: + if not self.oidc_parameters[parameter_name]: + error_string += f"Missing required parameter '--{parameter_name}'\n" + if error_string: + raise click.UsageError("\n" + error_string) + + def save_values(self, samconfig: SamConfig, cmd_names: List[str], section: str) -> None: + """ + Saves provided values into config file so they can be reused for future calls to bootstrap + """ + for parameter_name in self.oidc_parameter_names: + samconfig.put( + cmd_names=cmd_names, + section=section, + key=parameter_name.replace("-", "_"), + value=self.oidc_parameters[parameter_name], + ) + samconfig.put(cmd_names=cmd_names, section=section, key="oidc_provider", value=self.oidc_provider_name) + samconfig.put(cmd_names=cmd_names, section=section, key="permissions_provider", value=self.OPENID_CONNECT) + + @abstractmethod + def get_subject_claim(self) -> str: + pass + + +class GitHubOidcProvider(PipelineOidcProvider): + """ + Represents a GitHub Actions OIDC provider + https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect + ---------- + subject_claim_parameters: dict + Parameters specific to building the subject claim for this provider. + oidc_parameters: dict + Parameters common to all providers. + """ + + GITHUB_ORG_PARAMETER_NAME = "github-org" + GITHUB_REPO_PARAMETER_NAME = "github-repo" + DEPLOYMENT_BRANCH_PARAMETER_NAME = "deployment-branch" + + def __init__(self, subject_claim_parameters: dict, oidc_parameters: dict) -> None: + all_oidc_parameters = {**oidc_parameters, **subject_claim_parameters} + all_oidc_parameter_names = [ + self.GITHUB_ORG_PARAMETER_NAME, + self.GITHUB_REPO_PARAMETER_NAME, + self.DEPLOYMENT_BRANCH_PARAMETER_NAME, + ] + super().__init__(all_oidc_parameters, all_oidc_parameter_names, GITHUB_ACTIONS) + + def get_subject_claim(self) -> str: + """ + Returns the subject claim that will be used to establish trust between the OIDC provider and AWS. + To read more about OIDC claims see the following: https://openid.net/specs/openid-connect-core-1_0.html#Claims + https://tinyurl.com/github-oidc-token + In GitHubs case when using the official OIDC action to assume a role the audience claim will always be + sts.amazon.aws so we must use the subject claim https://tinyurl.com/github-oidc-claim + ------- + """ + org = self.oidc_parameters["github-org"] + repo = self.oidc_parameters["github-repo"] + branch = self.oidc_parameters["deployment-branch"] + return f"repo:{org}/{repo}:ref:refs/heads/{branch}" + + +class GitLabOidcProvider(PipelineOidcProvider): + """ + Represents a GitLab OIDC provider + https://docs.gitlab.com/ee/integration/openid_connect_provider.html + ---------- + subject_claim_parameters: dict + Parameters specific to building the subject claim for this provider. + oidc_parameters: dict + Parameters common to all providers. + """ + + GITLAB_PROJECT_PARAMETER_NAME = "gitlab-project" + GITLAB_GROUP_PARAMETER_NAME = "gitlab-group" + DEPLOYMENT_BRANCH_PARAMETER_NAME = "deployment-branch" + + def __init__(self, subject_claim_parameters: dict, oidc_parameters: dict) -> None: + all_oidc_parameters = {**oidc_parameters, **subject_claim_parameters} + all_oidc_parameter_names = [ + self.GITLAB_PROJECT_PARAMETER_NAME, + self.GITLAB_GROUP_PARAMETER_NAME, + self.DEPLOYMENT_BRANCH_PARAMETER_NAME, + ] + super().__init__(all_oidc_parameters, all_oidc_parameter_names, GITLAB) + + def get_subject_claim(self) -> str: + """ + Returns the subject claim that will be used to establish trust between the OIDC provider and AWS. + To read more about OIDC claims see the following: https://openid.net/specs/openid-connect-core-1_0.html#Claims + https://docs.gitlab.com/ee/ci/cloud_services/aws/#configure-a-role-and-trust + To learn more about configuring a role to work with GitLab OIDC through claims see the following + https://docs.gitlab.com/ee/ci/cloud_services/index.html#configure-a-conditional-role-with-oidc-claims + ------- + """ + group = self.oidc_parameters["gitlab-group"] + project = self.oidc_parameters["gitlab-project"] + branch = self.oidc_parameters["deployment-branch"] + return f"project_path:{group}/{project}:ref_type:branch:ref:{branch}" + + +class BitbucketOidcProvider(PipelineOidcProvider): + """ + Represents a Bitbucket OIDC provider + https://support.atlassian.com/bitbucket-cloud/docs/integrate-pipelines-with-resource-servers-using-oidc/ + ---------- + subject_claim_parameters: dict + Parameters specific to building the subject claim for this provider. + oidc_parameters: dict + Parameters common to all providers. + """ + + BITBUCKET_REPO_UUID_PARAMETER_NAME = "bitbucket-repo-uuid" + + def __init__(self, subject_claim_parameters: dict, oidc_parameters: dict) -> None: + all_oidc_parameters = {**oidc_parameters, **subject_claim_parameters} + all_oidc_parameter_names = [ + self.BITBUCKET_REPO_UUID_PARAMETER_NAME, + ] + super().__init__(all_oidc_parameters, all_oidc_parameter_names, BITBUCKET) + + def get_subject_claim(self) -> str: + """ + Returns the subject claim that will be used to establish trust between the OIDC provider and AWS. + To read more about OIDC claims see the following: https://openid.net/specs/openid-connect-core-1_0.html#Claims + To learn more about configuring a role to work with GitLab OIDC through claims see the following + tinyurl.com/bitbucket-oidc-claims + ------- + """ + repo_uuid = self.oidc_parameters[self.BITBUCKET_REPO_UUID_PARAMETER_NAME] + return f"{repo_uuid}:*" diff --git a/samcli/commands/pipeline/init/interactive_init_flow.py b/samcli/commands/pipeline/init/interactive_init_flow.py index 85ef019895..dac54e3733 100644 --- a/samcli/commands/pipeline/init/interactive_init_flow.py +++ b/samcli/commands/pipeline/init/interactive_init_flow.py @@ -8,7 +8,7 @@ from json import JSONDecodeError from pathlib import Path from textwrap import dedent -from typing import Dict, List, Tuple +from typing import Dict, List, Tuple, Optional import click from samcli.cli.global_config import GlobalConfig @@ -102,7 +102,9 @@ def _generate_from_app_pipeline_templates( selected_pipeline_template_dir: Path = pipeline_templates_local_dir.joinpath( selected_pipeline_template_metadata.location ) - return self._generate_from_pipeline_template(selected_pipeline_template_dir) + return self._generate_from_pipeline_template( + selected_pipeline_template_dir, selected_pipeline_template_metadata.provider + ) def _generate_from_custom_location( self, @@ -146,7 +148,7 @@ def _select_and_generate_from_pipeline_template(self, pipeline_template_local_di return self._generate_from_pipeline_template(selected_pipeline_template_dir) def _prompt_run_bootstrap_within_pipeline_init( - self, stage_configuration_names: List[str], number_of_stages: int + self, stage_configuration_names: List[str], number_of_stages: int, cicd_provider: Optional[str] = None ) -> bool: """ Prompt bootstrap if `--bootstrap` flag is provided. Return True if bootstrap process is executed. @@ -157,7 +159,8 @@ def _prompt_run_bootstrap_within_pipeline_init( click.echo( Colored().yellow( f"Only {len(stage_configuration_names)} stage(s) were detected, " - f"fewer than what the template requires: {number_of_stages}." + f"fewer than what the template requires: {number_of_stages}. If " + f"these are incorrect, delete .aws-sam/pipeline/pipelineconfig.toml and rerun" ) ) click.echo() @@ -165,7 +168,8 @@ def _prompt_run_bootstrap_within_pipeline_init( if self.allow_bootstrap: if click.confirm( "Do you want to go through stage setup process now? If you choose no, " - "you can still reference other bootstrapped resources." + "you can still reference other bootstrapped resources.", + default=True, ): click.secho( dedent( @@ -198,6 +202,17 @@ def _prompt_run_bootstrap_within_pipeline_init( config_file=None, config_env=None, standalone=False, + permissions_provider=None, + oidc_client_id=None, + oidc_provider_url=None, + github_org=None, + github_repo=None, + deployment_branch=None, + oidc_provider=None, + cicd_provider=cicd_provider, + gitlab_group=None, + gitlab_project=None, + bitbucket_repo_uuid=None, ) return True else: @@ -215,7 +230,9 @@ def _prompt_run_bootstrap_within_pipeline_init( ) return False - def _generate_from_pipeline_template(self, pipeline_template_dir: Path) -> List[str]: + def _generate_from_pipeline_template( + self, pipeline_template_dir: Path, cicd_provider: Optional[str] = None + ) -> List[str]: """ Generates a pipeline config file from a given pipeline template local location and return the list of generated files. @@ -231,13 +248,18 @@ def _generate_from_pipeline_template(self, pipeline_template_dir: Path) -> List[ click.echo("Checking for existing stages...\n") stage_configuration_names, bootstrap_context = _load_pipeline_bootstrap_resources() if len(stage_configuration_names) < number_of_stages and self._prompt_run_bootstrap_within_pipeline_init( - stage_configuration_names, number_of_stages + stage_configuration_names, number_of_stages, cicd_provider ): # the customers just went through the bootstrap process, # refresh the pipeline bootstrap resources and see whether bootstrap is still needed continue + click.echo( + Colored().yellow( + "2 stage(s) were detected, matching the template requirements. " + "If these are incorrect, delete .aws-sam/pipeline/pipelineconfig.toml and rerun" + ) + ) break - context: Dict = pipeline_template.run_interactive_flows(bootstrap_context) with osutils.mkdir_temp() as generate_dir: LOG.debug("Generating pipeline files into %s", generate_dir) @@ -269,6 +291,8 @@ def _load_pipeline_bootstrap_resources() -> Tuple[List[str], Dict[str, str]]: # create an index alias for each stage name # so that if customers type "1," it is equivalent to the first stage name context[str([str(index + 1), key])] = value + for key, value in config.get_all(_get_bootstrap_command_names(), section, "default").items(): + context[str(["default", key])] = value # pre-load the list of stage names detected from pipelineconfig.toml stage_names_message = ( diff --git a/samcli/lib/cookiecutter/template.py b/samcli/lib/cookiecutter/template.py index 9bb8ef1db0..765add6e28 100644 --- a/samcli/lib/cookiecutter/template.py +++ b/samcli/lib/cookiecutter/template.py @@ -121,6 +121,7 @@ def run_interactive_flows(self, context: Optional[Dict] = None) -> Dict: """ try: context = context if context else {} + context["shared_values"] = "default" for flow in self._interactive_flows: context = flow.run(context) return context diff --git a/samcli/lib/pipeline/bootstrap/resource.py b/samcli/lib/pipeline/bootstrap/resource.py index 837f301b9d..9d2d02bafe 100644 --- a/samcli/lib/pipeline/bootstrap/resource.py +++ b/samcli/lib/pipeline/bootstrap/resource.py @@ -136,3 +136,31 @@ def get_uri(self) -> Optional[str]: i = len("repository/") repo_name = arn_parts.resource_id[i:] return f"{arn_parts.account_id}.dkr.ecr.{arn_parts.region}.amazonaws.com/{repo_name}" + + +class OidcProvider(Resource): + """ + Represents an AWS OIDC Provider resource + Attributes + ---------- + client_id: str + the client id used to authenticate the user with the OIDC provider. + provider_url: str + url of the OIDC provider. + thumbprint: str + thumbprint for the top intermediate certificate authority (CA) + that signed the certificate used by the identity provider + """ + + def __init__( + self, + arn: Optional[str], + comment: Optional[str], + client_id: Optional[str], + provider_url: Optional[str], + thumbprint: Optional[str], + ) -> None: + self.client_id: Optional[str] = client_id + self.provider_url: Optional[str] = provider_url + self.thumbprint: Optional[str] = thumbprint + super().__init__(arn=arn, comment=comment) diff --git a/samcli/lib/pipeline/bootstrap/stage.py b/samcli/lib/pipeline/bootstrap/stage.py index 166c13fb0f..6ea1d0e22d 100644 --- a/samcli/lib/pipeline/bootstrap/stage.py +++ b/samcli/lib/pipeline/bootstrap/stage.py @@ -3,16 +3,25 @@ import os import pathlib import re +import socket +import hashlib from itertools import chain from typing import Dict, List, Optional, Tuple +from urllib.parse import urlparse import boto3 +from botocore.exceptions import ClientError import click +import requests + +from OpenSSL import SSL, crypto # type: ignore +from samcli.commands.pipeline.bootstrap.guided_context import OPEN_ID_CONNECT, GITHUB_ACTIONS, GITLAB, BITBUCKET +from samcli.commands.pipeline.bootstrap.pipeline_oidc_provider import PipelineOidcProvider from samcli.lib.config.samconfig import SamConfig from samcli.lib.utils.colors import Colored -from samcli.lib.utils.managed_cloudformation_stack import manage_stack, StackOutput -from samcli.lib.pipeline.bootstrap.resource import Resource, IAMUser, ECRImageRepository +from samcli.lib.utils.managed_cloudformation_stack import update_stack, StackOutput +from samcli.lib.pipeline.bootstrap.resource import OidcProvider, Resource, IAMUser, ECRImageRepository CFN_TEMPLATE_PATH = str(pathlib.Path(os.path.dirname(__file__))) STACK_NAME_PREFIX = "aws-sam-cli-managed" @@ -23,6 +32,17 @@ CLOUDFORMATION_EXECUTION_ROLE = "cloudformation_execution_role" ARTIFACTS_BUCKET = "artifacts_bucket" ECR_IMAGE_REPOSITORY = "image_repository" +OIDC_PROVIDER_URL = "oidc_provider_url" +OIDC_CLIENT_ID = "oidc_client_id" +OIDC_PROVIDER = "oidc_provider" +GITHUB_ORG = "github_org" +GITHUB_REPO = "github_repo" +GITLAB_GROUP = "gitlab_group" +GITLAB_PROJECT = "gitlab_project" +DEPLOYMENT_BRANCH = "deployment_branch" +BITBUCKET_REPO_UUID = "bitbucket_repo_uuid" +PERMISSIONS_PROVIDER = "permissions_provider" +OIDC_SUPPORTED_PROVIDER = [GITHUB_ACTIONS, GITLAB, BITBUCKET] REGION = "region" @@ -51,6 +71,10 @@ class Stage: A boolean flag that determines whether the user wants to create an ECR image repository or not image_repository: ECRImageRepository The ECR image repository to hold the image container of lambda functions with Image package-type + oidc_provider: OidcProvider + The OIDCProvider to be created/used for assuming the pipeline execution role + subject_claim: Optional[str] + The subject claim that will be returned by the OIDC Provider to assume the role Methods: -------- @@ -69,11 +93,20 @@ class Stage: the `sam pipeline init` command. print_resources_summary(self) -> None: prints to the screen(console) the ARNs of the created and provided resources. + _should_create_new_provider(self) -> bool: + checks if there are any existing OIDC Providers configured in IAM by getting a list of all OIDC Providers + setup in the account and seeing if the URL provided is in the ARN + generate_thumbprint(oidc_provider_url) -> Optional[str]: + retrieves the certificate of the top intermidate cerficate authority that signed the certificate + used by the external identity provider and then returns the SHA1 hash of it. For more information on + why the thumbprint is needed and the steps required to obtain it see the following page + https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_create_oidc_verify-thumbprint.html """ def __init__( self, name: str, + permissions_provider: Optional[str] = None, aws_profile: Optional[str] = None, aws_region: Optional[str] = None, pipeline_user_arn: Optional[str] = None, @@ -82,8 +115,16 @@ def __init__( artifacts_bucket_arn: Optional[str] = None, create_image_repository: bool = False, image_repository_arn: Optional[str] = None, + oidc_provider_url: Optional[str] = None, + oidc_client_id: Optional[str] = None, + subject_claim: Optional[str] = None, + pipeline_oidc_provider: Optional[PipelineOidcProvider] = None, ) -> None: self.name: str = name + self.create_new_oidc_provider = False + self.subject_claim = subject_claim + self.use_oidc_provider = permissions_provider == OPEN_ID_CONNECT + self.pipeline_oidc_provider = pipeline_oidc_provider self.aws_profile: Optional[str] = aws_profile self.aws_region: Optional[str] = aws_region self.pipeline_user: IAMUser = IAMUser(arn=pipeline_user_arn, comment="Pipeline IAM user") @@ -100,6 +141,84 @@ def __init__( ) self.color = Colored() + self.oidc_provider: OidcProvider = OidcProvider( + client_id=oidc_client_id, + provider_url=oidc_provider_url, + thumbprint="", + comment="IAM OIDC Identity Provider", + arn="", + ) + + def _should_create_new_provider(self, stack_name: str) -> bool: + """ + Checks if there is an existing Identity Provider in the account already + whos ARN contains the URL provided by the user. + + OIDC Provider arns are of the following format + arn:aws:iam:::oidc-provider/api.bitbucket.org/2.0/workspaces//pipelines-config/identity/oidc + we can check if the URL provided is already in an existing provider to see if a new one should be made + ------- + """ + if not self.oidc_provider.provider_url: + return False + session = boto3.Session(profile_name=self.aws_profile, region_name=self.aws_region) + iam_client = session.client("iam") + cfn_client = session.client("cloudformation") + providers = iam_client.list_open_id_connect_providers() + + url_to_compare = self.oidc_provider.provider_url.replace("https://", "") + for provider_resource in providers["OpenIDConnectProviderList"]: + if url_to_compare in provider_resource["Arn"]: + try: + stack_res = cfn_client.describe_stack_resource( + StackName=stack_name, LogicalResourceId="OidcProvider" + ) + return url_to_compare in stack_res["StackResourceDetail"]["PhysicalResourceId"] + except ClientError as ex: + if "does not exist" in str(ex): + return False + raise ex + return True + + @staticmethod + def generate_thumbprint(oidc_provider_url: Optional[str]) -> Optional[str]: + """ + retrieves the certificate of the top intermidate cerficate authority that signed the certificate + used by the external identity provider and then returns the SHA1 hash of it. For more information on + why the thumbprint is needed and the steps required to obtain it see the following page + https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_create_oidc_verify-thumbprint.html + ------- + oidc_provider_url : Optional[str] + The URL of the OIDC provider that will be used for authentication. + """ + # Send HTTP GET to retrieve jwks_uri field from openid-configuration document + oidc_config_url = "{url}/.well-known/openid-configuration".format(url=oidc_provider_url) + r = requests.get(oidc_config_url, timeout=5) + jwks_uri = r.json()["jwks_uri"] + url_for_certificate = urlparse(jwks_uri).hostname + + # Create connection to retrieve certificate + # Create an IPV4 socket and use TLS for the SSL connection + address = (url_for_certificate, 443) + ctx = SSL.Context(SSL.TLS_METHOD) + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + s.connect(address) + c = SSL.Connection(ctx, s) + c.set_connect_state() + # set the servername extension becuase not setting it can cause errors with some sites + c.set_tlsext_host_name(str.encode(address[0])) + + # If we attempt to get the cert chain without exchanging some traffic it will be empty + c.sendall(str.encode("HEAD / HTTP/1.0\n\n")) + peerCertChain = c.get_peer_cert_chain() + cert = peerCertChain[-1] + + # Dump the certificate in DER/ASN1 format so that its SHA1 hash can be computed + dumped_cert = crypto.dump_certificate(crypto.FILETYPE_ASN1, cert) + s.close() + + return hashlib.sha1(dumped_cert).hexdigest() + def did_user_provide_all_required_resources(self) -> bool: """Check if the user provided all of the environment resources or not""" return all(resource.is_user_provided for resource in self._get_resources()) @@ -107,7 +226,8 @@ def did_user_provide_all_required_resources(self) -> bool: def _get_non_user_provided_resources_msg(self) -> str: resource_comments = chain.from_iterable( [ - [] if self.pipeline_user.is_user_provided else [self.pipeline_user.comment], + [] if self.pipeline_user.is_user_provided or self.use_oidc_provider else [self.pipeline_user.comment], + [] if not self.use_oidc_provider else [self.oidc_provider.comment], [] if self.pipeline_execution_role.is_user_provided else [self.pipeline_execution_role.comment], [] if self.cloudformation_execution_role.is_user_provided @@ -123,7 +243,7 @@ def _get_non_user_provided_resources_msg(self) -> str: def bootstrap(self, confirm_changeset: bool = True) -> bool: """ Deploys the CFN template(./stage_resources.yaml) which deploys: - * Pipeline IAM User + * Pipeline IAM User or IAM OIDC Identity Provider * Pipeline execution IAM role * CloudFormation execution IAM role * Artifacts' S3 Bucket @@ -162,9 +282,16 @@ def bootstrap(self, confirm_changeset: bool = True) -> bool: click.secho(self.color.red("Canceling pipeline bootstrap creation.")) return False + stack_name = self._get_stack_name() + + if self.use_oidc_provider: + self.create_new_oidc_provider = self._should_create_new_provider(stack_name) + if self.create_new_oidc_provider: + self.oidc_provider.thumbprint = self.generate_thumbprint(self.oidc_provider.provider_url) + environment_resources_template_body = Stage._read_template(STAGE_RESOURCES_CFN_TEMPLATE) - output: StackOutput = manage_stack( - stack_name=self._get_stack_name(), + output: StackOutput = update_stack( + stack_name=stack_name, region=self.aws_region, profile=self.aws_profile, template_body=environment_resources_template_body, @@ -175,17 +302,24 @@ def bootstrap(self, confirm_changeset: bool = True) -> bool: "ArtifactsBucketArn": self.artifacts_bucket.arn or "", "CreateImageRepository": "true" if self.create_image_repository else "false", "ImageRepositoryArn": self.image_repository.arn or "", + "IdentityProviderThumbprint": self.oidc_provider.thumbprint or "", + "OidcClientId": self.oidc_provider.client_id or "", + "OidcProviderUrl": self.oidc_provider.provider_url or "", + "UseOidcProvider": "true" if self.use_oidc_provider else "false", + "SubjectClaim": self.subject_claim or "", + "CreateNewOidcProvider": "true" if self.create_new_oidc_provider else "false", }, ) - pipeline_user_secret_sm_id = output.get("PipelineUserSecretKey") + if not self.use_oidc_provider: + pipeline_user_secret_sm_id = output.get("PipelineUserSecretKey") - self.pipeline_user.arn = output.get("PipelineUser") - if pipeline_user_secret_sm_id: - ( - self.pipeline_user.access_key_id, - self.pipeline_user.secret_access_key, - ) = Stage._get_pipeline_user_secret_pair(pipeline_user_secret_sm_id, self.aws_profile, self.aws_region) + self.pipeline_user.arn = output.get("PipelineUser") + if pipeline_user_secret_sm_id: + ( + self.pipeline_user.access_key_id, + self.pipeline_user.secret_access_key, + ) = Stage._get_pipeline_user_secret_pair(pipeline_user_secret_sm_id, self.aws_profile, self.aws_region) self.pipeline_execution_role.arn = output.get("PipelineExecutionRole") self.cloudformation_execution_role.arn = output.get("CloudFormationExecutionRole") self.artifacts_bucket.arn = output.get("ArtifactsBucket") @@ -252,6 +386,9 @@ def save_config(self, config_dir: str, filename: str, cmd_names: List[str]) -> N if self.pipeline_user.arn: samconfig.put(cmd_names=cmd_names, section="parameters", key=PIPELINE_USER, value=self.pipeline_user.arn) + samconfig.put(cmd_names=cmd_names, section="parameters", key=PERMISSIONS_PROVIDER, value="AWS IAM") + if self.use_oidc_provider and self.pipeline_oidc_provider: + self.pipeline_oidc_provider.save_values(cmd_names=cmd_names, section="parameters", samconfig=samconfig) # Computing Artifacts bucket name and ECR image repository URL may through an exception if the ARNs are wrong # Let's swallow such an exception to be able to save the remaining resources @@ -297,11 +434,14 @@ def save_config_safe(self, config_dir: str, filename: str, cmd_names: List[str]) def _get_resources(self) -> List[Resource]: resources = [ - self.pipeline_user, self.pipeline_execution_role, self.cloudformation_execution_role, self.artifacts_bucket, ] + if self.use_oidc_provider: + resources.append(self.oidc_provider) + else: + resources.append(self.pipeline_user) if self.create_image_repository or self.image_repository.arn: # Image Repository is optional resources.append(self.image_repository) return resources @@ -322,7 +462,7 @@ def print_resources_summary(self) -> None: for resource in created_resources: click.secho(self.color.green(f"\t- {resource.comment}")) - if not self.pipeline_user.is_user_provided: + if not self.pipeline_user.is_user_provided and not self.use_oidc_provider: click.secho(self.color.green("Pipeline IAM user credential:")) click.secho(self.color.green(f"\tAWS_ACCESS_KEY_ID: {self.pipeline_user.access_key_id}")) click.secho(self.color.green(f"\tAWS_SECRET_ACCESS_KEY: {self.pipeline_user.secret_access_key}")) diff --git a/samcli/lib/pipeline/bootstrap/stage_resources.yaml b/samcli/lib/pipeline/bootstrap/stage_resources.yaml index 6893a74f4e..23677bb847 100644 --- a/samcli/lib/pipeline/bootstrap/stage_resources.yaml +++ b/samcli/lib/pipeline/bootstrap/stage_resources.yaml @@ -16,9 +16,25 @@ Parameters: AllowedValues: [true, false] ImageRepositoryArn: Type: String + IdentityProviderThumbprint: + Type: String + OidcClientId: + Type: String + OidcProviderUrl: + Type: String + UseOidcProvider: + Type: String + AllowedValues: [true, false] + SubjectClaim: + Type: String + CreateNewOidcProvider: + Type: String + AllowedValues: [true, false] Conditions: - MissingPipelineUser: !Equals [!Ref PipelineUserArn, ""] + MissingOidcProvider: !Equals [!Ref CreateNewOidcProvider, "true"] + DontUseOidc: !Not [!Equals [!Ref UseOidcProvider, "true"] ] + MissingPipelineUser: !And [!Condition DontUseOidc, !Equals [!Ref PipelineUserArn, ""]] MissingPipelineExecutionRole: !Equals [!Ref PipelineExecutionRoleArn, ""] MissingCloudFormationExecutionRole: !Equals [!Ref CloudFormationExecutionRoleArn, ""] MissingArtifactsBucket: !Equals [!Ref ArtifactsBucketArn, ""] @@ -26,6 +42,16 @@ Conditions: MissingImageRepository: !And [!Condition ShouldHaveImageRepository, !Equals [!Ref ImageRepositoryArn, ""]] Resources: + OidcProvider: + Type: AWS::IAM::OIDCProvider + Condition: MissingOidcProvider + Properties: + ClientIdList: + - !Ref OidcClientId + ThumbprintList: + - !Ref IdentityProviderThumbprint + Url: !Ref OidcProviderUrl + PipelineUser: Type: AWS::IAM::User Condition: MissingPipelineUser @@ -94,31 +120,55 @@ Resources: - Key: Role Value: pipeline-execution-role AssumeRolePolicyDocument: - Version: 2012-10-17 - Statement: - - Effect: Allow - Principal: - AWS: - - Fn::If: - - MissingPipelineUser - - !GetAtt PipelineUser.Arn - - !Ref PipelineUserArn - Action: - - 'sts:AssumeRole' - - Effect: Allow - Principal: - # Allow roles with tag Role=aws-sam-pipeline-codebuild-service-role to assume this role. - # This is required when CodePipeline is the CI/CD system of choice. - AWS: - - !If - - MissingPipelineUser - - !Ref AWS::AccountId - - !Select [4, !Split [':', !Ref PipelineUserArn]] - Action: - - 'sts:AssumeRole' - Condition: - StringEquals: - aws:PrincipalTag/Role: aws-sam-pipeline-codebuild-service-role + Fn::If: + - DontUseOidc + - Version: 2012-10-17 + Statement: + - Effect: Allow + Principal: + AWS: + - Fn::If: + - MissingPipelineUser + - !GetAtt PipelineUser.Arn + - !Ref PipelineUserArn + Action: + - 'sts:AssumeRole' + - Effect: Allow + Principal: + # Allow roles with tag Role=aws-sam-pipeline-codebuild-service-role to assume this role. + # This is required when CodePipeline is the CI/CD system of choice. + AWS: + - !If + - MissingPipelineUser + - !Ref AWS::AccountId + - !Select [4, !Split [':', !Ref PipelineUserArn]] + Action: + - 'sts:AssumeRole' + Condition: + StringEquals: + aws:PrincipalTag/Role: aws-sam-pipeline-codebuild-service-role + - Fn::Sub: + - | + { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Principal": { + "Federated": "arn:${AWS::Partition}:iam::${AWS::AccountId}:oidc-provider/${Url}" + }, + "Action": "sts:AssumeRoleWithWebIdentity", + "Condition": { + "ForAllValues:StringLike": { + "${Url}:aud": "${OidcClientId}", + "${Url}:sub": "${SubjectClaim}" + } + } + } + ] + } + - Url: !Select [1, !Split ["//", !Ref OidcProviderUrl]] + ArtifactsBucket: Type: AWS::S3::Bucket diff --git a/samcli/lib/utils/managed_cloudformation_stack.py b/samcli/lib/utils/managed_cloudformation_stack.py index f5f1e9cd8a..d833b0677d 100644 --- a/samcli/lib/utils/managed_cloudformation_stack.py +++ b/samcli/lib/utils/managed_cloudformation_stack.py @@ -33,6 +33,62 @@ def get(self, key) -> Optional[str]: return None +def update_stack( + region: Optional[str], + stack_name: str, + template_body: str, + profile: Optional[str] = None, + parameter_overrides: Optional[Dict[str, Union[str, List[str]]]] = None, +) -> StackOutput: + """ + create or update a CloudFormation stack + + Parameters + ---------- + region: str + AWS region for the CloudFormation stack + stack_name: str + CloudFormation stack name + template_body: str + CloudFormation template's content + profile: Optional[str] + AWS named profile for the AWS account + parameter_overrides: Optional[Dict[str, Union[str, List[str]]]] + Values of template parameters, if any. + + Returns + ------- + StackOutput: + Stack output section(list of OutputKey, OutputValue pairs) + """ + try: + if profile: + session = boto3.Session(profile_name=profile, region_name=region if region else None) + cloudformation_client = session.client("cloudformation") + else: + cloudformation_client = boto3.client( + "cloudformation", config=Config(region_name=region if region else None) + ) + except ProfileNotFound as ex: + raise CredentialsError( + f"Error Setting Up Managed Stack Client: the provided AWS name profile '{profile}' is not found. " + "please check the documentation for setting up a named profile: " + "https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-profiles.html" + ) from ex + except NoCredentialsError as ex: + raise CredentialsError( + "Error Setting Up Managed Stack Client: Unable to resolve credentials for the AWS SDK for Python client. " + "Please see their documentation for options to pass in credentials: " + "https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html" + ) from ex + except NoRegionError as ex: + raise RegionError( + "Error Setting Up Managed Stack Client: Unable to resolve a region. " + "Please provide a region via the --region parameter or by the AWS_REGION environment variable." + ) from ex + return _create_or_update_stack(cloudformation_client, stack_name, template_body, parameter_overrides) + + def manage_stack( region: Optional[str], stack_name: str, @@ -118,6 +174,33 @@ def _create_or_get_stack( raise ManagedStackError(str(ex)) from ex +def _create_or_update_stack( + cloudformation_client, + stack_name: str, + template_body: str, + parameter_overrides: Optional[Dict[str, Union[str, List[str]]]] = None, +) -> StackOutput: + try: + cloudformation_client.describe_stacks(StackName=stack_name) + stack = _update_stack(cloudformation_client, stack_name, template_body, parameter_overrides) + _check_sanity_of_stack(stack) + stack_outputs = cast(List[Dict[str, str]], stack["Outputs"]) + return StackOutput(stack_outputs) + except ClientError: + LOG.debug("Managed S3 stack [%s] not found. Creating a new one.", stack_name) + + try: + stack = _create_stack( + cloudformation_client, stack_name, template_body, parameter_overrides + ) # exceptions are not captured from subcommands + _check_sanity_of_stack(stack) + stack_outputs = cast(List[Dict[str, str]], stack["Outputs"]) + return StackOutput(stack_outputs) + except (ClientError, BotoCoreError) as ex: + LOG.debug("Failed to create managed resources", exc_info=ex) + raise ManagedStackError(str(ex)) from ex + + def _check_sanity_of_stack(stack): stack_name = stack.get("StackName") tags = stack.get("Tags", None) @@ -187,6 +270,30 @@ def _create_stack( return stacks[0] +def _update_stack( + cloudformation_client, + stack_name: str, + template_body: str, + parameter_overrides: Optional[Dict[str, Union[str, List[str]]]] = None, +): + click.echo("\tUpdating the required resources...") + parameters = _generate_stack_parameters(parameter_overrides) + us_resp = cloudformation_client.update_stack( + StackName=stack_name, + TemplateBody=template_body, + Tags=[{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], + Capabilities=["CAPABILITY_IAM", "CAPABILITY_AUTO_EXPAND"], + Parameters=parameters, + ) + stack_id = us_resp["StackId"] + stack_waiter = cloudformation_client.get_waiter("stack_update_complete") + stack_waiter.wait(StackName=stack_id, WaiterConfig={"Delay": 15, "MaxAttempts": 60}) + ds_resp = cloudformation_client.describe_stacks(StackName=stack_name) + stacks = ds_resp["Stacks"] + click.echo("\tSuccessfully updated!") + return stacks[0] + + def _generate_stack_parameters( parameter_overrides: Optional[Dict[str, Union[str, List[str]]]] = None ) -> List[Dict[str, str]]: diff --git a/tests/integration/pipeline/test_bootstrap_command.py b/tests/integration/pipeline/test_bootstrap_command.py index ae4433360b..8b1e4c0663 100644 --- a/tests/integration/pipeline/test_bootstrap_command.py +++ b/tests/integration/pipeline/test_bootstrap_command.py @@ -1,5 +1,6 @@ from unittest import skipIf +import pytest from parameterized import parameterized from samcli.commands.pipeline.bootstrap.cli import PIPELINE_CONFIG_FILENAME, PIPELINE_CONFIG_DIR @@ -44,6 +45,7 @@ def test_interactive_with_no_resources_provided(self, create_image_repository): stage_configuration_name, CREDENTIAL_PROFILE, self.region, # region + "1", # IAM permissions provider "", # pipeline user "", # Pipeline execution role "", # CloudFormation execution role @@ -92,6 +94,70 @@ def test_interactive_with_no_resources_provided(self, create_image_repository): self.assertSetEqual(common_resources, set(self._extract_created_resource_logical_ids(stack_name))) self.validate_pipeline_config(stack_name, stage_configuration_name) + @parameterized.expand([("create_image_repository",), (False,)]) + def test_interactive_with_no_resources_provided_using_oidc(self, create_image_repository): + stage_configuration_name, stack_name = self._get_stage_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list() + + inputs = [ + stage_configuration_name, + CREDENTIAL_PROFILE, + self.region, # region + "2", # OIDC permissions provider + "1", # GitHub Actions OIDC + "https://token.actions.githubusercontent.com", # GitHub Actions OIDC URL + "sts.amazonaws.com", # GitHub Actions OIDC client id + "test_github_org", # GitHub Organization + "test_not_real", # Github Repository + "main", # Deployment branch + "", # Pipeline execution role + "", # CloudFormation execution role + "", # Artifacts bucket + "y" if create_image_repository else "N", # Should we create ECR repo + ] + + if create_image_repository: + inputs.append("") # Create image repository + + inputs.append("") # Confirm summary + inputs.append("y") # Create resources + + bootstrap_process_execute = run_command_with_inputs(bootstrap_command_list, inputs) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + # make sure pipeline user's credential is printed + + common_resources = { + "CloudFormationExecutionRole", + "PipelineExecutionRole", + "ArtifactsBucket", + "ArtifactsLoggingBucket", + "ArtifactsLoggingBucketPolicy", + "ArtifactsBucketPolicy", + "PipelineExecutionRolePermissionPolicy", + } + CFN_OUTPUT_TO_CONFIG_KEY["OidcProvider"] = "oidc_provider_url" + del CFN_OUTPUT_TO_CONFIG_KEY["PipelineUser"] + if create_image_repository: + self.assertSetEqual( + { + *common_resources, + "ImageRepository", + }, + set(self._extract_created_resource_logical_ids(stack_name)), + ) + CFN_OUTPUT_TO_CONFIG_KEY["ImageRepository"] = "image_repository" + self.validate_pipeline_config(stack_name, stage_configuration_name, list(CFN_OUTPUT_TO_CONFIG_KEY.keys())) + del CFN_OUTPUT_TO_CONFIG_KEY["ImageRepository"] + else: + self.assertSetEqual(common_resources, set(self._extract_created_resource_logical_ids(stack_name))) + self.validate_pipeline_config(stack_name, stage_configuration_name) + del CFN_OUTPUT_TO_CONFIG_KEY["OidcProvider"] + CFN_OUTPUT_TO_CONFIG_KEY["PipelineUser"] = "pipeline_user" + @parameterized.expand([("create_image_repository",), (False,)]) def test_non_interactive_with_no_resources_provided(self, create_image_repository): stage_configuration_name, stack_name = self._get_stage_and_stack_name() @@ -120,6 +186,7 @@ def test_interactive_with_all_required_resources_provided(self): stage_configuration_name, CREDENTIAL_PROFILE, self.region, # region + "1", # IAM permissions "arn:aws:iam::123:user/user-name", # pipeline user "arn:aws:iam::123:role/role-name", # Pipeline execution role "arn:aws:iam::123:role/role-name", # CloudFormation execution role @@ -176,10 +243,13 @@ def validate_pipeline_config(self, stack_name, stage_configuration_name, cfn_key if key not in cfn_keys_to_check: continue value = CFN_OUTPUT_TO_CONFIG_KEY[key] - cfn_value = output_values[key] + if key != "OidcProvider": + cfn_value = output_values[key] config_value = config_values[value] if key == "ImageRepository": self.assertEqual(cfn_value.split("/")[-1], config_value.split("/")[-1]) + elif key == "OidcProvider": + self.assertTrue(config_value.startswith("https://")) else: self.assertTrue(cfn_value.endswith(config_value) or cfn_value == config_value) @@ -221,6 +291,7 @@ def test_interactive_cancelled_by_user(self): stage_configuration_name, CREDENTIAL_PROFILE, self.region, # region + "1", # IAM permissions "arn:aws:iam::123:user/user-name", # pipeline user "arn:aws:iam::123:role/role-name", # Pipeline execution role "", # CloudFormation execution role @@ -247,6 +318,7 @@ def test_interactive_with_some_required_resources_provided(self): stage_configuration_name, CREDENTIAL_PROFILE, self.region, # region + "1", # IAM permissions "arn:aws:iam::123:user/user-name", # pipeline user "arn:aws:iam::123:role/role-name", # Pipeline execution role "", # CloudFormation execution role @@ -263,6 +335,10 @@ def test_interactive_with_some_required_resources_provided(self): self.assertIn("Successfully created!", stdout) # make sure the not provided resource is the only resource created. self.assertIn("CloudFormationExecutionRole", self._extract_created_resource_logical_ids(stack_name)) + if "ImageRepository" in CFN_OUTPUT_TO_CONFIG_KEY: + del CFN_OUTPUT_TO_CONFIG_KEY["ImageRepository"] + if "OidcProvider" in CFN_OUTPUT_TO_CONFIG_KEY: + del CFN_OUTPUT_TO_CONFIG_KEY["OidcProvider"] self.validate_pipeline_config(stack_name, stage_configuration_name) def test_interactive_pipeline_user_only_created_once(self): @@ -283,6 +359,7 @@ def test_interactive_pipeline_user_only_created_once(self): stage_configuration_name, CREDENTIAL_PROFILE, self.region, # region + "1", # IAM permissions *([""] if i == 0 else []), # pipeline user "arn:aws:iam::123:role/role-name", # Pipeline execution role "arn:aws:iam::123:role/role-name", # CloudFormation execution role diff --git a/tests/integration/pipeline/test_init_command.py b/tests/integration/pipeline/test_init_command.py index 542fd84b33..6670275681 100644 --- a/tests/integration/pipeline/test_init_command.py +++ b/tests/integration/pipeline/test_init_command.py @@ -279,6 +279,7 @@ def test_with_one_stages_in_pipeline_config(self): stage_configuration_names[0], CREDENTIAL_PROFILE, self.region, # region + "1", # IAM permissions "", # pipeline user "", # Pipeline execution role "", # CloudFormation execution role diff --git a/tests/unit/commands/pipeline/bootstrap/test_cli.py b/tests/unit/commands/pipeline/bootstrap/test_cli.py index e2fd1a019b..7c043d098e 100644 --- a/tests/unit/commands/pipeline/bootstrap/test_cli.py +++ b/tests/unit/commands/pipeline/bootstrap/test_cli.py @@ -6,12 +6,19 @@ from samcli.commands.pipeline.bootstrap.cli import ( _load_saved_pipeline_user_arn, - _get_bootstrap_command_names, + _load_config_values, PIPELINE_CONFIG_FILENAME, PIPELINE_CONFIG_DIR, ) from samcli.commands.pipeline.bootstrap.cli import cli as bootstrap_cmd from samcli.commands.pipeline.bootstrap.cli import do_cli as bootstrap_cli +from samcli.commands.pipeline.bootstrap.guided_context import GITHUB_ACTIONS, GITLAB +from samcli.commands.pipeline.bootstrap.oidc_config import ( + GitHubOidcConfig, + OidcConfig, + GitLabOidcConfig, + BitbucketOidcConfig, +) ANY_REGION = "ANY_REGION" ANY_PROFILE = "ANY_PROFILE" @@ -24,6 +31,21 @@ ANY_ARN = "ANY_ARN" ANY_CONFIG_FILE = "ANY_CONFIG_FILE" ANY_CONFIG_ENV = "ANY_CONFIG_ENV" +ANY_CICD_PROVIDER = "ANY_CICD_PROVIDER" +ANY_OIDC_PROVIDER_URL = "ANY_OIDC_PROVIDER_URL" +ANY_OIDC_CLIENT_ID = "ANY_OIDC_CLIENT_ID" +ANY_OIDC_PROVIDER = "ANY_OIDC_PROVIDER" +ANY_GITHUB_ORG = "ANY_GITHUB_ORG" +ANY_GITHUB_REPO = "ANY_GITHUB_REPO" +ANY_DEPLOYMENT_BRANCH = "ANY_DEPLOYMENT_BRANCH" +ANY_GITLAB_PROJECT = "ANY_GITLAB_PROJECT" +ANY_GITLAB_GROUP = "ANY_GITLAB_GROUP" +ANY_BITBUCKET_REPO_UUID = "ANY_BITBUCKET_REPO_UUID" +ANY_SUBJECT_CLAIM = "ANY_SUBJECT_CLAIM" +ANY_BUILT_SUBJECT_CLAIM = "repo:ANY_GITHUB_ORG/ANY_GITHUB_REPO:ref:refs/heads/ANY_DEPLOYMENT_BRANCH" +ANY_BUILT_GITLAB_SUBJECT_CLAIM = ( + "project_path:ANY_GITLAB_GROUP/ANY_GITLAB_PROJECT:ref_type:branch:ref" ":ANY_DEPLOYMENT_BRANCH" +) PIPELINE_BOOTSTRAP_COMMAND_NAMES = ["pipeline", "bootstrap"] @@ -43,6 +65,17 @@ def setUp(self) -> None: "confirm_changeset": True, "config_file": ANY_CONFIG_FILE, "config_env": ANY_CONFIG_ENV, + "permissions_provider": "iam", + "oidc_provider_url": ANY_OIDC_PROVIDER_URL, + "oidc_client_id": ANY_OIDC_CLIENT_ID, + "oidc_provider": GITHUB_ACTIONS, + "github_org": ANY_GITHUB_ORG, + "github_repo": ANY_GITHUB_REPO, + "gitlab_project": ANY_GITLAB_PROJECT, + "gitlab_group": ANY_GITLAB_GROUP, + "bitbucket_repo_uuid": ANY_BITBUCKET_REPO_UUID, + "deployment_branch": ANY_DEPLOYMENT_BRANCH, + "cicd_provider": ANY_CICD_PROVIDER, } @patch("samcli.commands.pipeline.bootstrap.cli.do_cli") @@ -68,6 +101,17 @@ def test_bootstrap_command_default_argument_values(self, do_cli_mock): confirm_changeset=True, config_file="default", config_env="samconfig.toml", + permissions_provider="iam", + oidc_provider_url=None, + oidc_client_id=None, + github_org=None, + github_repo=None, + deployment_branch=None, + oidc_provider=None, + gitlab_group=None, + gitlab_project=None, + bitbucket_repo_uuid=None, + cicd_provider=None, ) @patch("samcli.commands.pipeline.bootstrap.cli.do_cli") @@ -99,13 +143,14 @@ def test_bootstrap_command_with_different_arguments_combination(self, do_cli_moc @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") - @patch("samcli.commands.pipeline.bootstrap.cli.Stage") - @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + @patch("samcli.lib.pipeline.bootstrap.stage.Stage") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") def test_bootstrapping_normal_interactive_flow( self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock ): # setup gc_instance = Mock() + gc_instance.permissions_provider = "iam" guided_context_mock.return_value = gc_instance environment_instance = Mock() environment_mock.return_value = environment_instance @@ -128,10 +173,158 @@ def test_bootstrapping_normal_interactive_flow( cmd_names=PIPELINE_BOOTSTRAP_COMMAND_NAMES, ) + @patch("samcli.lib.pipeline.bootstrap.stage.Stage") + def test_bootstrapping_oidc_non_interactive_fails_if_missing_parameters(self, environment_mock): + # setup + environment_instance = Mock() + environment_mock.return_value = environment_instance + self.cli_context["interactive"] = False + self.cli_context["permissions_provider"] = "oidc" + self.cli_context["oidc_provider_url"] = None + self.cli_context["oidc_client_id"] = None + self.cli_context["oidc_provider"] = None + + # trigger + with self.assertRaises(click.UsageError): + bootstrap_cli(**self.cli_context) + + # verify + environment_instance.bootstrap.assert_not_called() + environment_instance.print_resources_summary.assert_not_called() + environment_instance.save_config_safe.assert_not_called() + + @patch("samcli.lib.pipeline.bootstrap.stage.Stage") + def test_bootstrapping_oidc_non_interactive_fails_if_missing_github_parameters(self, environment_mock): + # setup + environment_instance = Mock() + environment_mock.return_value = environment_instance + self.cli_context["interactive"] = False + self.cli_context["permissions_provider"] = "oidc" + self.cli_context["oidc_provider"] = GITHUB_ACTIONS + self.cli_context["github_org"] = None + self.cli_context["github_repo"] = None + self.cli_context["deployment_branch"] = None + + # trigger + with self.assertRaises(click.UsageError): + bootstrap_cli(**self.cli_context) + + # verify + environment_instance.bootstrap.assert_not_called() + environment_instance.print_resources_summary.assert_not_called() + environment_instance.save_config_safe.assert_not_called() + + @patch("samcli.commands.pipeline.bootstrap.pipeline_oidc_provider") + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + @patch("samcli.lib.pipeline.bootstrap.stage.Stage") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") + def test_bootstrapping_oidc_interactive_flow( + self, + guided_context_mock, + environment_mock, + get_command_names_mock, + pipeline_provider_mock, + ): + # setup + gc_instance = Mock() + gc_instance.permissions_provider = "oidc" + guided_context_mock.return_value = gc_instance + environment_instance = Mock() + environment_mock.return_value = environment_instance + """ oidc_config_instance = Mock() + oidc_config_instance.oidc_provider = "github-actions" + oidc_config_mock.return_value = oidc_config_instance""" + self.cli_context["interactive"] = True + self.cli_context["permissions_provider"] = "oidc" + get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES + + # trigger + bootstrap_cli(**self.cli_context) + + # verify + gc_instance.run.assert_called_once() + environment_instance.bootstrap.assert_called_once_with(confirm_changeset=True) + environment_instance.print_resources_summary.assert_called_once() + environment_instance.save_config_safe.assert_called_once_with( + config_dir=PIPELINE_CONFIG_DIR, + filename=PIPELINE_CONFIG_FILENAME, + cmd_names=PIPELINE_BOOTSTRAP_COMMAND_NAMES, + ) + + @patch("samcli.commands.pipeline.bootstrap.pipeline_oidc_provider") + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + @patch("samcli.lib.pipeline.bootstrap.stage.Stage") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") + def test_bootstrapping_oidc_interactive_flow_gitlab( + self, + guided_context_mock, + environment_mock, + get_command_names_mock, + pipeline_provider_mock, + ): + # setup + gc_instance = Mock() + gc_instance.permissions_provider = "oidc" + guided_context_mock.return_value = gc_instance + environment_instance = Mock() + environment_mock.return_value = environment_instance + self.cli_context["interactive"] = True + self.cli_context["permissions_provider"] = "oidc" + self.cli_context["oidc_provider"] = "gitlab" + get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES + + # trigger + bootstrap_cli(**self.cli_context) + + # verify + gc_instance.run.assert_called_once() + environment_instance.bootstrap.assert_called_once_with(confirm_changeset=True) + environment_instance.print_resources_summary.assert_called_once() + environment_instance.save_config_safe.assert_called_once_with( + config_dir=PIPELINE_CONFIG_DIR, + filename=PIPELINE_CONFIG_FILENAME, + cmd_names=PIPELINE_BOOTSTRAP_COMMAND_NAMES, + ) + + @patch("samcli.commands.pipeline.bootstrap.pipeline_oidc_provider") + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + @patch("samcli.lib.pipeline.bootstrap.stage.Stage") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") + def test_bootstrapping_oidc_interactive_flow_bitbucket( + self, + guided_context_mock, + environment_mock, + get_command_names_mock, + pipeline_provider_mock, + ): + # setup + gc_instance = Mock() + gc_instance.permissions_provider = "oidc" + guided_context_mock.return_value = gc_instance + environment_instance = Mock() + environment_mock.return_value = environment_instance + self.cli_context["interactive"] = True + self.cli_context["permissions_provider"] = "oidc" + self.cli_context["oidc_provider"] = "bitbucket-pipelines" + get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES + + # trigger + bootstrap_cli(**self.cli_context) + + # verify + gc_instance.run.assert_called_once() + environment_instance.bootstrap.assert_called_once_with(confirm_changeset=True) + environment_instance.print_resources_summary.assert_called_once() + environment_instance.save_config_safe.assert_called_once_with( + config_dir=PIPELINE_CONFIG_DIR, + filename=PIPELINE_CONFIG_FILENAME, + cmd_names=PIPELINE_BOOTSTRAP_COMMAND_NAMES, + ) + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") - @patch("samcli.commands.pipeline.bootstrap.cli.Stage") - @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + @patch("samcli.lib.pipeline.bootstrap.stage.Stage") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") def test_bootstrap_will_not_try_loading_pipeline_user_if_already_provided( self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock ): @@ -140,8 +333,8 @@ def test_bootstrap_will_not_try_loading_pipeline_user_if_already_provided( @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") - @patch("samcli.commands.pipeline.bootstrap.cli.Stage") - @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + @patch("samcli.lib.pipeline.bootstrap.stage.Stage") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") def test_bootstrap_will_try_loading_pipeline_user_if_not_provided( self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock ): @@ -149,10 +342,21 @@ def test_bootstrap_will_try_loading_pipeline_user_if_not_provided( bootstrap_cli(**self.cli_context) load_saved_pipeline_user_arn_mock.assert_called_once() + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + @patch("samcli.commands.pipeline.bootstrap.cli._load_config_values") + @patch("samcli.lib.pipeline.bootstrap.stage.Stage") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") + def test_bootstrap_will_try_loading_oidc_values_if_not_provided( + self, guided_context_mock, environment_mock, load_saved_oidc_values_arn_mock, get_command_names_mock + ): + self.cli_context["oidc_provider"] = None + bootstrap_cli(**self.cli_context) + load_saved_oidc_values_arn_mock.assert_called_once() + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") - @patch("samcli.commands.pipeline.bootstrap.cli.Stage") - @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + @patch("samcli.lib.pipeline.bootstrap.stage.Stage") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") def test_stage_configuration_name_is_required_to_be_provided_in_case_of_non_interactive_mode( self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock ): @@ -163,8 +367,8 @@ def test_stage_configuration_name_is_required_to_be_provided_in_case_of_non_inte @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") - @patch("samcli.commands.pipeline.bootstrap.cli.Stage") - @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + @patch("samcli.lib.pipeline.bootstrap.stage.Stage") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") def test_stage_configuration_name_is_not_required_to_be_provided_in_case_of_interactive_mode( self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock ): @@ -174,8 +378,8 @@ def test_stage_configuration_name_is_not_required_to_be_provided_in_case_of_inte @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") - @patch("samcli.commands.pipeline.bootstrap.cli.Stage") - @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + @patch("samcli.lib.pipeline.bootstrap.stage.Stage") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") def test_guided_context_will_be_enabled_or_disabled_based_on_the_interactive_mode( self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock ): @@ -190,8 +394,8 @@ def test_guided_context_will_be_enabled_or_disabled_based_on_the_interactive_mod @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") - @patch("samcli.commands.pipeline.bootstrap.cli.Stage") - @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + @patch("samcli.lib.pipeline.bootstrap.stage.Stage") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") def test_bootstrapping_will_confirm_before_creating_the_resources_unless_the_user_choose_not_to( self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock ): @@ -274,3 +478,90 @@ def test_load_saved_pipeline_user_arn_returns_the_pipeline_user_arn_from_the_pip # verify self.assertEqual(pipeline_user_arn, ANY_PIPELINE_USER_ARN) + + @patch("samcli.lib.pipeline.bootstrap.stage.Stage") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") + @patch("samcli.commands.pipeline.bootstrap.cli.SamConfig") + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + def test_load_saved_oidc_values_returns_values_from_file( + self, get_command_names_mock, sam_config_mock, guided_context_mock, stage_mock + ): + # setup + get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES + sam_config_instance_mock = Mock() + sam_config_mock.return_value = sam_config_instance_mock + sam_config_instance_mock.exists.return_value = True + sam_config_instance_mock.get_all.return_value = { + "oidc_provider_url": "saved_url", + "oidc_provider": "saved_provider", + "oidc_client_id": "saved_client_id", + "github_org": "saved_org", + "github_repo": "saved_repo", + "deployment_branch": "saved_branch", + "permissions_provider": "OpenID Connect (OIDC)", + } + github_config = GitHubOidcConfig( + github_repo="saved_repo", github_org="saved_org", deployment_branch="saved_branch" + ) + oidc_config = OidcConfig( + oidc_provider="saved_provider", oidc_client_id="saved_client_id", oidc_provider_url="saved_url" + ) + gitlab_config = GitLabOidcConfig( + gitlab_group=ANY_GITLAB_GROUP, gitlab_project=ANY_GITLAB_PROJECT, deployment_branch="saved_branch" + ) + bitbucket_config = BitbucketOidcConfig(ANY_BITBUCKET_REPO_UUID) + # trigger + bootstrap_cli(**self.cli_context) + + # verify + guided_context_mock.assert_called_with( + github_config=github_config, + gitlab_config=gitlab_config, + oidc_config=oidc_config, + bitbucket_config=bitbucket_config, + permissions_provider="oidc", + profile=ANY_PROFILE, + stage_configuration_name=ANY_STAGE_CONFIGURATION_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=True, + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, + region=ANY_REGION, + enable_oidc_option=False, + ) + + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.lib.pipeline.bootstrap.stage.Stage") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext") + def test_bootstrapping_normal_interactive_flow_with_non_user_provided_user( + self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + ): + # setup + gc_instance = Mock() + gc_instance.permissions_provider = "iam" + guided_context_mock.return_value = gc_instance + environment_instance = Mock() + environment_mock.return_value = environment_instance + environment_instance.permissions_provider = "iam" + load_saved_pipeline_user_arn_mock.return_value = ANY_PIPELINE_USER_ARN + environment_instance.pipeline_user.is_user_provided = False + self.cli_context["interactive"] = True + self.cli_context["pipeline_user_arn"] = None + get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES + + # trigger + bootstrap_cli(**self.cli_context) + + # verify + load_saved_pipeline_user_arn_mock.assert_called_once() + gc_instance.run.assert_called_once() + environment_instance.bootstrap.assert_called_once_with(confirm_changeset=True) + environment_instance.print_resources_summary.assert_called_once() + environment_instance.save_config_safe.assert_called_once_with( + config_dir=PIPELINE_CONFIG_DIR, + filename=PIPELINE_CONFIG_FILENAME, + cmd_names=PIPELINE_BOOTSTRAP_COMMAND_NAMES, + ) diff --git a/tests/unit/commands/pipeline/bootstrap/test_guided_context.py b/tests/unit/commands/pipeline/bootstrap/test_guided_context.py index 761fee0bff..9daec7c225 100644 --- a/tests/unit/commands/pipeline/bootstrap/test_guided_context.py +++ b/tests/unit/commands/pipeline/bootstrap/test_guided_context.py @@ -3,7 +3,13 @@ from parameterized import parameterized -from samcli.commands.pipeline.bootstrap.guided_context import GuidedContext +from samcli.commands.pipeline.bootstrap.guided_context import IAM, GuidedContext, GITHUB_ACTIONS +from samcli.commands.pipeline.bootstrap.oidc_config import ( + GitHubOidcConfig, + BitbucketOidcConfig, + GitLabOidcConfig, + OidcConfig, +) ANY_STAGE_CONFIGURATION_NAME = "ANY_STAGE_CONFIGURATION_NAME" ANY_PIPELINE_USER_ARN = "ANY_PIPELINE_USER_ARN" @@ -13,6 +19,15 @@ ANY_IMAGE_REPOSITORY_ARN = "ANY_IMAGE_REPOSITORY_ARN" ANY_ARN = "ANY_ARN" ANY_REGION = "us-east-2" +ANY_OIDC_PROVIDER = GITHUB_ACTIONS +ANY_OIDC_PROVIDER_URL = "https://ANY_OIDC_PROVIDER.com" +ANY_OIDC_CLIENT_ID = "ANY_OIDC_CLIENT_ID" +ANY_GITHUB_ORG = "ANY_GITHUB_ORG" +ANY_GITHUB_REPO = "ANY_GITHUB_REPO" +ANY_GITLAB_GROUP = "ANY_GITLAB_GROUP" +ANY_GITLAB_PROJECT = "ANY_GITLAB_PROJECT" +ANY_DEPLOYMENT_BRANCH = "ANY_DEPLOYMENT_BRANCH" +ANY_BITBUCKET_REPO_UUID = "ANY_BITBUCKET_REPO_UUID" class TestGuidedContext(TestCase): @@ -25,7 +40,21 @@ def test_guided_context_will_not_prompt_for_fields_that_are_already_provided( account_id_mock.return_value = "1234567890" click_mock.confirm.return_value = False click_mock.prompt = Mock(return_value="0") + github_config = GitHubOidcConfig( + github_org=ANY_GITHUB_ORG, github_repo=ANY_GITHUB_REPO, deployment_branch=ANY_DEPLOYMENT_BRANCH + ) + gitlab_config = GitLabOidcConfig( + gitlab_group=ANY_GITLAB_GROUP, gitlab_project=ANY_GITLAB_PROJECT, deployment_branch=ANY_DEPLOYMENT_BRANCH + ) + bitbucket_config = BitbucketOidcConfig(ANY_BITBUCKET_REPO_UUID) + oidc_config = OidcConfig( + oidc_provider=ANY_OIDC_PROVIDER, oidc_provider_url=ANY_OIDC_PROVIDER_URL, oidc_client_id=ANY_OIDC_CLIENT_ID + ) gc: GuidedContext = GuidedContext( + github_config=github_config, + gitlab_config=gitlab_config, + bitbucket_config=bitbucket_config, + oidc_config=oidc_config, stage_configuration_name=ANY_STAGE_CONFIGURATION_NAME, pipeline_user_arn=ANY_PIPELINE_USER_ARN, pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, @@ -34,6 +63,7 @@ def test_guided_context_will_not_prompt_for_fields_that_are_already_provided( create_image_repository=True, image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, region=ANY_REGION, + permissions_provider="oidc", ) gc.run() # there should only two prompt to ask @@ -42,6 +72,120 @@ def test_guided_context_will_not_prompt_for_fields_that_are_already_provided( prompt_account_id_mock.assert_called_once() click_mock.prompt.assert_called_once() + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext._prompt_account_id") + def test_guided_context_will_not_prompt_for_fields_that_are_already_provided_oidc( + self, prompt_account_id_mock, click_mock, account_id_mock + ): + account_id_mock.return_value = "1234567890" + click_mock.confirm.return_value = False + click_mock.prompt = Mock(return_value="0") + + github_config = GitHubOidcConfig( + github_org=ANY_GITHUB_ORG, github_repo=ANY_GITHUB_REPO, deployment_branch=ANY_DEPLOYMENT_BRANCH + ) + gitlab_config = GitLabOidcConfig( + gitlab_group=ANY_GITLAB_GROUP, gitlab_project=ANY_GITLAB_PROJECT, deployment_branch=ANY_DEPLOYMENT_BRANCH + ) + bitbucket_config = BitbucketOidcConfig(ANY_BITBUCKET_REPO_UUID) + oidc_config = OidcConfig( + oidc_provider=ANY_OIDC_PROVIDER, oidc_provider_url=ANY_OIDC_PROVIDER_URL, oidc_client_id=ANY_OIDC_CLIENT_ID + ) + gc: GuidedContext = GuidedContext( + github_config=github_config, + gitlab_config=gitlab_config, + bitbucket_config=bitbucket_config, + oidc_config=oidc_config, + stage_configuration_name=ANY_STAGE_CONFIGURATION_NAME, + permissions_provider="oidc", + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=True, + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, + region=ANY_REGION, + ) + gc.run() + # there should only two prompt to ask + # 1. which account to use (mocked in _prompt_account_id(), not contributing to count) + # 2. what values customers want to change + prompt_account_id_mock.assert_called_once() + click_mock.prompt.assert_called_once() + + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext._prompt_account_id") + def test_guided_context_will_not_prompt_for_fields_that_are_already_provided_oidc_gitlab( + self, prompt_account_id_mock, click_mock, account_id_mock + ): + account_id_mock.return_value = "1234567890" + click_mock.confirm.return_value = False + click_mock.prompt = Mock(return_value="0") + + github_config = GitHubOidcConfig(github_org=None, github_repo=None, deployment_branch=None) + gitlab_config = GitLabOidcConfig( + gitlab_group=ANY_GITLAB_GROUP, gitlab_project=ANY_GITLAB_PROJECT, deployment_branch=ANY_DEPLOYMENT_BRANCH + ) + bitbucket_config = BitbucketOidcConfig(ANY_BITBUCKET_REPO_UUID) + oidc_config = OidcConfig( + oidc_provider="gitlab", oidc_provider_url=ANY_OIDC_PROVIDER_URL, oidc_client_id=ANY_OIDC_CLIENT_ID + ) + gc: GuidedContext = GuidedContext( + github_config=github_config, + gitlab_config=gitlab_config, + bitbucket_config=bitbucket_config, + oidc_config=oidc_config, + stage_configuration_name=ANY_STAGE_CONFIGURATION_NAME, + permissions_provider="oidc", + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=True, + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, + region=ANY_REGION, + ) + gc.run() + # there should only two prompt to ask + # 1. which account to use (mocked in _prompt_account_id(), not contributing to count) + # 2. what values customers want to change + prompt_account_id_mock.assert_called_once() + click_mock.prompt.assert_called_once() + + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext._validate_oidc_provider_url") + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext._prompt_account_id") + def test_guided_context_will_prompt_for_fields_that_are_not_provided_oidc_bitbucket( + self, prompt_account_id_mock, click_mock, account_id_mock, oidc_url_validate_mock + ): + account_id_mock.return_value = "1234567890" + click_mock.confirm.return_value = False + click_mock.prompt = Mock(return_value="0") + github_config = GitHubOidcConfig(github_org=None, github_repo=None, deployment_branch=None) + gitlab_config = GitLabOidcConfig(gitlab_group=None, gitlab_project=None, deployment_branch=None) + bitbucket_config = BitbucketOidcConfig(None) + oidc_config = OidcConfig(oidc_provider="bitbucket-pipelines", oidc_provider_url=None, oidc_client_id=None) + + gc: GuidedContext = GuidedContext( + github_config=github_config, + gitlab_config=gitlab_config, + bitbucket_config=bitbucket_config, + oidc_config=oidc_config, + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, # Exclude ECR repo, it has its own detailed test below + permissions_provider="oidc", + ) + gc.run() + prompt_account_id_mock.assert_called_once() + self.assertTrue(self.did_prompt_text_like("Stage configuration Name", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("Pipeline execution role", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("CloudFormation execution role", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("Artifact bucket", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("region", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("URL of the OIDC provider", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("OIDC Client ID", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("Repository UUID", click_mock.prompt)) + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") @patch("samcli.commands.pipeline.bootstrap.guided_context.click") @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext._prompt_account_id") @@ -51,8 +195,17 @@ def test_guided_context_will_prompt_for_fields_that_are_not_provided( account_id_mock.return_value = "1234567890" click_mock.confirm.return_value = False click_mock.prompt = Mock(return_value="0") + github_config = GitHubOidcConfig(github_org=None, github_repo=None, deployment_branch=None) + gitlab_config = GitLabOidcConfig(gitlab_group=None, gitlab_project=None, deployment_branch=None) + bitbucket_config = BitbucketOidcConfig(None) + oidc_config = OidcConfig(oidc_provider=ANY_OIDC_PROVIDER, oidc_provider_url=None, oidc_client_id=None) + gc: GuidedContext = GuidedContext( - image_repository_arn=ANY_IMAGE_REPOSITORY_ARN # Exclude ECR repo, it has its own detailed test below + github_config=github_config, + gitlab_config=gitlab_config, + bitbucket_config=bitbucket_config, + oidc_config=oidc_config, + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, # Exclude ECR repo, it has its own detailed test below ) gc.run() prompt_account_id_mock.assert_called_once() @@ -63,6 +216,126 @@ def test_guided_context_will_prompt_for_fields_that_are_not_provided( self.assertTrue(self.did_prompt_text_like("Artifact bucket", click_mock.prompt)) self.assertTrue(self.did_prompt_text_like("region", click_mock.prompt)) + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext._validate_oidc_provider_url") + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext._prompt_account_id") + def test_guided_context_will_prompt_for_fields_that_are_not_provided_oidc( + self, prompt_account_id_mock, click_mock, account_id_mock, oidc_url_validate_mock + ): + account_id_mock.return_value = "1234567890" + click_mock.confirm.return_value = False + click_mock.prompt = Mock(return_value="0") + github_config = GitHubOidcConfig(github_org=None, github_repo=None, deployment_branch=None) + gitlab_config = GitLabOidcConfig(gitlab_group=None, gitlab_project=None, deployment_branch=None) + bitbucket_config = BitbucketOidcConfig(None) + oidc_config = OidcConfig(oidc_provider=ANY_OIDC_PROVIDER, oidc_provider_url=None, oidc_client_id=None) + + gc: GuidedContext = GuidedContext( + github_config=github_config, + gitlab_config=gitlab_config, + bitbucket_config=bitbucket_config, + oidc_config=oidc_config, + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, # Exclude ECR repo, it has its own detailed test below + permissions_provider="oidc", + ) + gc.run() + prompt_account_id_mock.assert_called_once() + self.assertTrue(self.did_prompt_text_like("Stage configuration Name", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("Pipeline execution role", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("CloudFormation execution role", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("Artifact bucket", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("region", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("URL of the OIDC provider", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("OIDC Client ID", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("GitHub Organization", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("GitHub Repository", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("branch that deployments", click_mock.prompt)) + + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext._validate_oidc_provider_url") + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext._prompt_account_id") + def test_guided_context_will_prompt_for_fields_that_are_not_provided_oidc_gitlab( + self, prompt_account_id_mock, click_mock, account_id_mock, oidc_url_validate_mock + ): + account_id_mock.return_value = "1234567890" + click_mock.confirm.return_value = False + click_mock.prompt = Mock(return_value="0") + github_config = GitHubOidcConfig(github_org=None, github_repo=None, deployment_branch=None) + gitlab_config = GitLabOidcConfig(gitlab_group=None, gitlab_project=None, deployment_branch=None) + bitbucket_config = BitbucketOidcConfig(None) + oidc_config = OidcConfig(oidc_provider="gitlab", oidc_provider_url=None, oidc_client_id=None) + + gc: GuidedContext = GuidedContext( + github_config=github_config, + gitlab_config=gitlab_config, + bitbucket_config=bitbucket_config, + oidc_config=oidc_config, + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, # Exclude ECR repo, it has its own detailed test below + permissions_provider="oidc", + ) + gc.run() + prompt_account_id_mock.assert_called_once() + self.assertTrue(self.did_prompt_text_like("Stage configuration Name", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("Pipeline execution role", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("CloudFormation execution role", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("Artifact bucket", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("region", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("URL of the OIDC provider", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("OIDC Client ID", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("GitLab Group", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("GitLab Project", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("branch that deployments", click_mock.prompt)) + + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + def test_guided_context_prompts_oidc_url_if_missing_or_invalid(self, click_mock): + github_config = GitHubOidcConfig(github_org=None, github_repo=None, deployment_branch=None) + gitlab_config = GitLabOidcConfig(gitlab_group=None, gitlab_project=None, deployment_branch=None) + bitbucket_config = BitbucketOidcConfig(None) + oidc_config = OidcConfig(oidc_provider=ANY_OIDC_PROVIDER, oidc_provider_url=None, oidc_client_id=None) + + gc: GuidedContext = GuidedContext( + github_config=github_config, + gitlab_config=gitlab_config, + bitbucket_config=bitbucket_config, + oidc_config=oidc_config, + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, # Exclude ECR repo, it has its own detailed test below + permissions_provider="oidc", + ) + click_mock.prompt = Mock(return_value=ANY_OIDC_PROVIDER_URL) + + gc._validate_oidc_provider_url() + self.assertTrue(self.did_prompt_text_like("Please enter the URL of the OIDC provider", click_mock.echo)) + + gc.oidc_config.oidc_provider_url = "Missing_Https://_At_The_Start.com" + + gc._validate_oidc_provider_url() + self.assertTrue(self.did_prompt_text_like("Please ensure the OIDC URL begins with 'https://", click_mock.echo)) + + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + def test_guided_context_oidc_provider_prompt(self, click_mock): + github_config = GitHubOidcConfig(github_org=None, github_repo=None, deployment_branch=None) + gitlab_config = GitLabOidcConfig(gitlab_group=None, gitlab_project=None, deployment_branch=None) + bitbucket_config = BitbucketOidcConfig(None) + oidc_config = OidcConfig(oidc_provider=None, oidc_provider_url=None, oidc_client_id=None) + + gc: GuidedContext = GuidedContext( + github_config=github_config, + gitlab_config=gitlab_config, + bitbucket_config=bitbucket_config, + oidc_config=oidc_config, + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, # Exclude ECR repo, it has its own detailed test below + permissions_provider="oidc", + ) + click_mock.prompt = Mock(return_value="1") + + self.assertTrue(gc.oidc_config.oidc_provider is None) + gc._prompt_oidc_provider() + self.assertTrue(self.did_prompt_text_like("Select an OIDC Provider", click_mock.echo)) + self.assertTrue(self.did_prompt_text_like("1 - GitHub Actions", click_mock.echo)) + self.assertTrue(gc.oidc_config.oidc_provider == GITHUB_ACTIONS) + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") @patch("samcli.commands.pipeline.bootstrap.guided_context.click") @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext._prompt_account_id") @@ -70,11 +343,21 @@ def test_guided_context_will_not_prompt_for_not_provided_image_repository_if_no_ self, prompt_account_id_mock, click_mock, account_id_mock ): account_id_mock.return_value = "1234567890" + github_config = GitHubOidcConfig(github_org=None, github_repo=None, deployment_branch=None) + gitlab_config = GitLabOidcConfig(gitlab_group=None, gitlab_project=None, deployment_branch=None) + bitbucket_config = BitbucketOidcConfig(None) + oidc_config = OidcConfig(oidc_provider=ANY_OIDC_PROVIDER, oidc_provider_url=None, oidc_client_id=None) + # ECR Image Repository choices: # 1 - No, My SAM Template won't include lambda functions of Image package-type # 2 - Yes, I need a help creating one # 3 - I already have an ECR image repository + gc_without_ecr_info: GuidedContext = GuidedContext( + github_config=github_config, + gitlab_config=gitlab_config, + bitbucket_config=bitbucket_config, + oidc_config=oidc_config, stage_configuration_name=ANY_STAGE_CONFIGURATION_NAME, pipeline_user_arn=ANY_PIPELINE_USER_ARN, pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, @@ -85,21 +368,21 @@ def test_guided_context_will_not_prompt_for_not_provided_image_repository_if_no_ self.assertIsNone(gc_without_ecr_info.image_repository_arn) click_mock.confirm.return_value = False # the user chose to not CREATE an ECR Image repository - click_mock.prompt.side_effect = [None, "0"] + click_mock.prompt.side_effect = [None, "0", "0"] gc_without_ecr_info.run() self.assertIsNone(gc_without_ecr_info.image_repository_arn) self.assertFalse(gc_without_ecr_info.create_image_repository) self.assertFalse(self.did_prompt_text_like("Please enter the ECR image repository", click_mock.prompt)) click_mock.confirm.return_value = True # the user chose to CREATE an ECR Image repository - click_mock.prompt.side_effect = [None, None, "0"] + click_mock.prompt.side_effect = [None, None, "0", "0"] gc_without_ecr_info.run() self.assertIsNone(gc_without_ecr_info.image_repository_arn) self.assertTrue(gc_without_ecr_info.create_image_repository) self.assertTrue(self.did_prompt_text_like("Please enter the ECR image repository", click_mock.prompt)) click_mock.confirm.return_value = True # the user already has a repo - click_mock.prompt.side_effect = [None, ANY_IMAGE_REPOSITORY_ARN, "0"] + click_mock.prompt.side_effect = [None, ANY_IMAGE_REPOSITORY_ARN, "0", "0"] gc_without_ecr_info.run() self.assertFalse(gc_without_ecr_info.create_image_repository) self.assertTrue( diff --git a/tests/unit/commands/pipeline/bootstrap/test_oidc_config.py b/tests/unit/commands/pipeline/bootstrap/test_oidc_config.py new file mode 100644 index 0000000000..380ca61bed --- /dev/null +++ b/tests/unit/commands/pipeline/bootstrap/test_oidc_config.py @@ -0,0 +1,65 @@ +from unittest import TestCase + +from samcli.commands.pipeline.bootstrap.oidc_config import ( + OidcConfig, + GitHubOidcConfig, + GitLabOidcConfig, + BitbucketOidcConfig, +) + +ANY_OIDC_PROVIDER = "ANY_PROVIDER" +ANY_OIDC_PROVIDER_URL = "ANY_PROVIDER_URL" +ANY_OIDC_CLIENT_ID = "ANY_CLIENT_ID" +ANY_GITHUB_ORG = "ANY_GITHUB_ORG" +ANY_GITHUB_REPO = "ANY_GITHUB_REPO" +ANY_DEPLOYMENT_BRANCH = "ANY_DEPLOYMENT_BRANCH" +ANY_GITLAB_PROJECT = "ANY_GITLAB_PROJECT" +ANY_GITLAB_GROUP = "ANY_GITLAB_GROUP" +ANY_BITBUCKET_REPO_UUID = "ANY_BITBUCKET_REPO_UUID" +ANY_SUBJECT_CLAIM = "ANY_SUBJECT_CLAIM" + + +class TestOidcConfig(TestCase): + def setUp(self) -> None: + self.oidc_config = OidcConfig( + oidc_provider=ANY_OIDC_PROVIDER, oidc_provider_url=ANY_OIDC_PROVIDER_URL, oidc_client_id=ANY_OIDC_CLIENT_ID + ) + self.github_config = GitHubOidcConfig( + github_org=ANY_GITHUB_ORG, github_repo=ANY_GITHUB_REPO, deployment_branch=ANY_DEPLOYMENT_BRANCH + ) + self.gitlab_config = GitLabOidcConfig( + gitlab_group=ANY_GITLAB_GROUP, gitlab_project=ANY_GITLAB_PROJECT, deployment_branch=ANY_DEPLOYMENT_BRANCH + ) + self.bitbucket_config = BitbucketOidcConfig(bitbucket_repo_uuid=ANY_BITBUCKET_REPO_UUID) + + def test_update_oidc_config(self): + self.oidc_config.update_values( + oidc_provider="updated_provider", oidc_client_id="updated_client_id", oidc_provider_url="updated_url" + ) + + self.assertEqual(self.oidc_config.oidc_provider, "updated_provider") + self.assertEqual(self.oidc_config.oidc_client_id, "updated_client_id") + self.assertEqual(self.oidc_config.oidc_provider_url, "updated_url") + + def test_update_github_config(self): + self.github_config.update_values( + github_org="updated_org", github_repo="updated_repo", deployment_branch="updated_branch" + ) + + self.assertEqual(self.github_config.github_org, "updated_org") + self.assertEqual(self.github_config.github_repo, "updated_repo") + self.assertEqual(self.github_config.deployment_branch, "updated_branch") + + def test_update_gitlab_config(self): + self.gitlab_config.update_values( + gitlab_group="updated_group", gitlab_project="updated_project", deployment_branch="updated_branch" + ) + + self.assertEqual(self.gitlab_config.gitlab_group, "updated_group") + self.assertEqual(self.gitlab_config.gitlab_project, "updated_project") + self.assertEqual(self.gitlab_config.deployment_branch, "updated_branch") + + def test_update_bitbucket_config(self): + self.bitbucket_config.update_values(bitbucket_repo_uuid="updated_uuid") + + self.assertEqual(self.bitbucket_config.bitbucket_repo_uuid, "updated_uuid") diff --git a/tests/unit/commands/pipeline/init/test_initeractive_init_flow.py b/tests/unit/commands/pipeline/init/test_initeractive_init_flow.py index 8836e7f37d..2b82846a84 100644 --- a/tests/unit/commands/pipeline/init/test_initeractive_init_flow.py +++ b/tests/unit/commands/pipeline/init/test_initeractive_init_flow.py @@ -46,6 +46,8 @@ def test_app_pipeline_templates_clone_fail_when_an_old_clone_exists( app_pipeline_templates_path_mock = Mock() selected_pipeline_template_path_mock = Mock() pipeline_templates_manifest_mock = Mock() + selected_pipeline_template_metadata = select_pipeline_template_mock.return_value = Mock() + selected_pipeline_template_metadata.provider = "gitlab" shared_path_mock.joinpath.return_value = app_pipeline_templates_path_mock app_pipeline_templates_path_mock.exists.return_value = True # An old clone exists app_pipeline_templates_path_mock.joinpath.return_value = selected_pipeline_template_path_mock @@ -62,7 +64,7 @@ def test_app_pipeline_templates_clone_fail_when_an_old_clone_exists( app_pipeline_templates_path_mock.exists.assert_called_once() read_app_pipeline_templates_manifest_mock.assert_called_once_with(app_pipeline_templates_path_mock) select_pipeline_template_mock.assert_called_once_with(pipeline_templates_manifest_mock) - generate_from_pipeline_template_mock.assert_called_once_with(selected_pipeline_template_path_mock) + generate_from_pipeline_template_mock.assert_called_once_with(selected_pipeline_template_path_mock, "gitlab") @patch("samcli.commands.pipeline.init.interactive_init_flow.shared_path") @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") @@ -185,8 +187,10 @@ def test_generate_pipeline_configuration_file_from_app_pipeline_template_happy_c str(["1", "pipeline_execution_role"]): "arn:aws:iam::123456789012:role/execution-role", str(["prod", "pipeline_execution_role"]): "arn:aws:iam::123456789012:role/execution-role", str(["2", "pipeline_execution_role"]): "arn:aws:iam::123456789012:role/execution-role", + str(["default", "pipeline_execution_role"]): "arn:aws:iam::123456789012:role/execution-role", str(["stage_names_message"]): "Here are the stage configuration names detected " f'in {os.path.join(".aws-sam", "pipeline", "pipelineconfig.toml")}:\n\t1 - testing\n\t2 - prod', + "shared_values": "default", } ) cookiecutter_mock.assert_called_once_with( @@ -507,12 +511,12 @@ def test_with_bootstrap_but_answer_no( InteractiveInitFlow(allow_bootstrap=True).do_interactive() # verify - _prompt_run_bootstrap_within_pipeline_init_mock.assert_called_once_with(["testing"], 2) + _prompt_run_bootstrap_within_pipeline_init_mock.assert_called_once_with(["testing"], 2, "jenkins") @parameterized.expand( [ - ([["testing"], ["testing", "prod"]], [call(["testing"], 2)]), - ([[], ["testing"], ["testing", "prod"]], [call([], 2), call(["testing"], 2)]), + ([["testing"], ["testing", "prod"]], [call(["testing"], 2, "jenkins")]), + ([[], ["testing"], ["testing", "prod"]], [call([], 2, "jenkins"), call(["testing"], 2, "jenkins")]), ] ) @patch("samcli.commands.pipeline.init.interactive_init_flow.SamConfig") diff --git a/tests/unit/lib/cookiecutter/test_template.py b/tests/unit/lib/cookiecutter/test_template.py index 318939f46b..22b8a66711 100644 --- a/tests/unit/lib/cookiecutter/test_template.py +++ b/tests/unit/lib/cookiecutter/test_template.py @@ -73,7 +73,7 @@ def test_run_interactive_flows(self, mock_plugin, mock_interactive_flow): # Template with no interactive-flows neither direct nor through a plugin t = Template(location=self._ANY_LOCATION) context = t.run_interactive_flows() - self.assertEqual(context, {}) + self.assertEqual(context, {"shared_values": "default"}) # Template with direct interactive flow only mock_interactive_flow.run.return_value = self._ANY_INTERACTIVE_FLOW_CONTEXT mock_plugin.interactive_flow = None diff --git a/tests/unit/lib/pipeline/bootstrap/test_environment.py b/tests/unit/lib/pipeline/bootstrap/test_environment.py index 6c50f25a75..1afbc86eab 100644 --- a/tests/unit/lib/pipeline/bootstrap/test_environment.py +++ b/tests/unit/lib/pipeline/bootstrap/test_environment.py @@ -1,6 +1,11 @@ +import hashlib from unittest import TestCase from unittest.mock import Mock, patch, call, MagicMock +import OpenSSL.SSL # type: ignore +import requests + +from samcli.commands.pipeline.bootstrap.guided_context import GITHUB_ACTIONS from samcli.lib.pipeline.bootstrap.stage import Stage ANY_STAGE_CONFIGURATION_NAME = "ANY_STAGE_CONFIGURATION_NAME" @@ -10,6 +15,14 @@ ANY_ARTIFACTS_BUCKET_ARN = "ANY_ARTIFACTS_BUCKET_ARN" ANY_IMAGE_REPOSITORY_ARN = "ANY_IMAGE_REPOSITORY_ARN" ANY_ARN = "ANY_ARN" +ANY_OIDC_PROVIDER_URL = "ANY_OIDC_PROVIDER_URL" +ANY_OIDC_CLIENT_ID = "ANY_OIDC_CLIENT_ID" +ANY_OIDC_PROVIDER = "ANY_OIDC_PROVIDER" +ANY_SUBJECT_CLAIM = "ANY_SUBJECT_CLAIM" +ANY_GITHUB_REPO = "ANY_GITHUB_REPO" +ANY_GITHUB_ORG = "ANY_GITHUB_ORG" +ANY_DEPLOYMENT_BRANCH = "ANY_DEPLOYMENT_BRANCH" +ANY_OIDC_PHYSICAL_RESOURCE_ID = "ANY_OIDC_PHYSICAL_RESOURCE_ID" class TestStage(TestCase): @@ -89,15 +102,15 @@ def test_did_user_provide_all_required_resources_when_image_repository_is_requir @patch("samcli.lib.pipeline.bootstrap.stage.Stage._get_pipeline_user_secret_pair") @patch("samcli.lib.pipeline.bootstrap.stage.click") - @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + @patch("samcli.lib.pipeline.bootstrap.stage.update_stack") def test_did_user_provide_all_required_resources_returns_false_if_the_stage_was_initialized_without_any_of_the_resources_even_if_fulfilled_after_bootstrap( - self, manage_stack_mock, click_mock, pipeline_user_secret_pair_mock + self, update_stack_mock, click_mock, pipeline_user_secret_pair_mock ): # setup stack_output = Mock() pipeline_user_secret_pair_mock.return_value = ("id", "secret") stack_output.get.return_value = ANY_ARN - manage_stack_mock.return_value = stack_output + update_stack_mock.return_value = stack_output stage: Stage = Stage(name=ANY_STAGE_CONFIGURATION_NAME) self.assertFalse(stage.did_user_provide_all_required_resources()) @@ -115,60 +128,60 @@ def test_did_user_provide_all_required_resources_returns_false_if_the_stage_was_ self.assertFalse(stage.did_user_provide_all_required_resources()) @patch("samcli.lib.pipeline.bootstrap.stage.click") - @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + @patch("samcli.lib.pipeline.bootstrap.stage.update_stack") @patch.object(Stage, "did_user_provide_all_required_resources") def test_bootstrap_will_not_deploy_the_cfn_template_if_all_resources_are_already_provided( - self, did_user_provide_all_required_resources_mock, manage_stack_mock, click_mock + self, did_user_provide_all_required_resources_mock, update_stack_mock, click_mock ): did_user_provide_all_required_resources_mock.return_value = True stage: Stage = Stage(name=ANY_STAGE_CONFIGURATION_NAME) stage.bootstrap(confirm_changeset=False) - manage_stack_mock.assert_not_called() + update_stack_mock.assert_not_called() @patch("samcli.lib.pipeline.bootstrap.stage.Stage._get_pipeline_user_secret_pair") @patch("samcli.lib.pipeline.bootstrap.stage.click") - @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + @patch("samcli.lib.pipeline.bootstrap.stage.update_stack") def test_bootstrap_will_confirm_before_deploying_unless_confirm_changeset_is_disabled( - self, manage_stack_mock, click_mock, pipeline_user_secret_pair_mock + self, update_stack_mock, click_mock, pipeline_user_secret_pair_mock ): click_mock.confirm.return_value = False pipeline_user_secret_pair_mock.return_value = ("id", "secret") stage: Stage = Stage(name=ANY_STAGE_CONFIGURATION_NAME) stage.bootstrap(confirm_changeset=False) click_mock.confirm.assert_not_called() - manage_stack_mock.assert_called_once() - manage_stack_mock.reset_mock() + update_stack_mock.assert_called_once() + update_stack_mock.reset_mock() stage.bootstrap(confirm_changeset=True) click_mock.confirm.assert_called_once() - manage_stack_mock.assert_not_called() # As the user choose to not confirm + update_stack_mock.assert_not_called() # As the user choose to not confirm @patch("samcli.lib.pipeline.bootstrap.stage.click") - @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + @patch("samcli.lib.pipeline.bootstrap.stage.update_stack") def test_bootstrap_will_not_deploy_the_cfn_template_if_the_user_did_not_confirm( - self, manage_stack_mock, click_mock + self, update_stack_mock, click_mock ): click_mock.confirm.return_value = False stage: Stage = Stage(name=ANY_STAGE_CONFIGURATION_NAME) stage.bootstrap(confirm_changeset=True) - manage_stack_mock.assert_not_called() + update_stack_mock.assert_not_called() @patch("samcli.lib.pipeline.bootstrap.stage.Stage._get_pipeline_user_secret_pair") @patch("samcli.lib.pipeline.bootstrap.stage.click") - @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + @patch("samcli.lib.pipeline.bootstrap.stage.update_stack") def test_bootstrap_will_deploy_the_cfn_template_if_the_user_did_confirm( - self, manage_stack_mock, click_mock, pipeline_user_secret_pair_mock + self, update_stack_mock, click_mock, pipeline_user_secret_pair_mock ): click_mock.confirm.return_value = True pipeline_user_secret_pair_mock.return_value = ("id", "secret") stage: Stage = Stage(name=ANY_STAGE_CONFIGURATION_NAME) stage.bootstrap(confirm_changeset=True) - manage_stack_mock.assert_called_once() + update_stack_mock.assert_called_once() @patch("samcli.lib.pipeline.bootstrap.stage.Stage._get_pipeline_user_secret_pair") @patch("samcli.lib.pipeline.bootstrap.stage.click") - @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + @patch("samcli.lib.pipeline.bootstrap.stage.update_stack") def test_bootstrap_will_pass_arns_of_all_user_provided_resources_any_empty_strings_for_other_resources_to_the_cfn_stack( - self, manage_stack_mock, click_mock, pipeline_user_secret_pair_mock + self, update_stack_mock, click_mock, pipeline_user_secret_pair_mock ): click_mock.confirm.return_value = True pipeline_user_secret_pair_mock.return_value = ("id", "secret") @@ -180,8 +193,8 @@ def test_bootstrap_will_pass_arns_of_all_user_provided_resources_any_empty_strin image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, ) stage.bootstrap() - manage_stack_mock.assert_called_once() - args, kwargs = manage_stack_mock.call_args_list[0] + update_stack_mock.assert_called_once() + args, kwargs = update_stack_mock.call_args_list[0] expected_parameter_overrides = { "PipelineUserArn": ANY_PIPELINE_USER_ARN, "PipelineExecutionRoleArn": "", @@ -189,20 +202,27 @@ def test_bootstrap_will_pass_arns_of_all_user_provided_resources_any_empty_strin "ArtifactsBucketArn": ANY_ARTIFACTS_BUCKET_ARN, "CreateImageRepository": "true", "ImageRepositoryArn": ANY_IMAGE_REPOSITORY_ARN, + "UseOidcProvider": "false", + "CreateNewOidcProvider": "false", + "IdentityProviderThumbprint": "", + "OidcClientId": "", + "OidcProviderUrl": "", + "SubjectClaim": "", + "UseOidcProvider": "false", } self.assertEqual(expected_parameter_overrides, kwargs["parameter_overrides"]) @patch("samcli.lib.pipeline.bootstrap.stage.Stage._get_pipeline_user_secret_pair") @patch("samcli.lib.pipeline.bootstrap.stage.click") - @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + @patch("samcli.lib.pipeline.bootstrap.stage.update_stack") def test_bootstrap_will_fullfill_all_resource_arns( - self, manage_stack_mock, click_mock, pipeline_user_secret_pair_mock + self, update_stack_mock, click_mock, pipeline_user_secret_pair_mock ): # setup pipeline_user_secret_pair_mock.return_value = ("id", "secret") stack_output = Mock() stack_output.get.return_value = ANY_ARN - manage_stack_mock.return_value = stack_output + update_stack_mock.return_value = stack_output stage: Stage = Stage(name=ANY_STAGE_CONFIGURATION_NAME) click_mock.confirm.return_value = True @@ -216,7 +236,7 @@ def test_bootstrap_will_fullfill_all_resource_arns( stage.bootstrap() # verify - manage_stack_mock.assert_called_once() + update_stack_mock.assert_called_once() self.assertEqual(ANY_ARN, stage.pipeline_user.arn) self.assertEqual(ANY_ARN, stage.pipeline_execution_role.arn) self.assertEqual(ANY_ARN, stage.cloudformation_execution_role.arn) @@ -246,6 +266,9 @@ def test_save_config_escapes_none_resources(self, samconfig_mock): expected_calls.append( call(cmd_names=cmd_names, section="parameters", key="pipeline_user", value=ANY_PIPELINE_USER_ARN) ) + expected_calls.append( + call(cmd_names=cmd_names, section="parameters", key="permissions_provider", value="AWS IAM") + ) self.trigger_and_assert_save_config_calls( stage, cmd_names, expected_calls + [empty_ecr_call], samconfig_instance_mock.put ) @@ -401,6 +424,137 @@ def test_print_resources_summary_prints_the_credentials_of_the_pipeline_user_iff self.assert_summary_has_a_message_like("AWS_ACCESS_KEY_ID", click_mock.secho) self.assert_summary_has_a_message_like("AWS_SECRET_ACCESS_KEY", click_mock.secho) + @patch("samcli.lib.pipeline.bootstrap.stage.crypto") + @patch("samcli.lib.pipeline.bootstrap.stage.socket") + @patch("samcli.lib.pipeline.bootstrap.stage.SSL") + @patch("samcli.lib.pipeline.bootstrap.stage.requests") + @patch("samcli.lib.pipeline.bootstrap.stage.click") + def test_generate_oidc_provider_thumbprint(self, click_mock, requests_mock, ssl_mock, socket_mock, crypto_mock): + # setup + stage: Stage = Stage( + name=ANY_STAGE_CONFIGURATION_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=True, + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, + ) + response_mock = Mock(requests.Response) + requests_mock.get.return_value = response_mock + response_mock.json.return_value = {"jwks_uri": "https://server.example.com/test"} + connection_mock = Mock(OpenSSL.SSL.Connection) + ssl_mock.Connection.return_value = connection_mock + certificate_mock = Mock(OpenSSL.crypto.x509) + connection_mock.get_peer_cert_chain.return_value = [certificate_mock] + dumped_certificate = "not a real certificate object dump".encode("utf-8") + crypto_mock.dump_certificate.return_value = dumped_certificate + expected_thumbprint = hashlib.sha1(dumped_certificate).hexdigest() + + # trigger + actual_thumbprint = stage.generate_thumbprint("https://server.example.com") + + # verify + self.assertEqual(expected_thumbprint, actual_thumbprint) + + @patch("samcli.lib.pipeline.bootstrap.stage.Stage.generate_thumbprint") + @patch("samcli.lib.pipeline.bootstrap.stage.boto3") + @patch("samcli.lib.pipeline.bootstrap.stage.click") + @patch("samcli.lib.pipeline.bootstrap.stage.update_stack") + def test_creates_new_oidc_provider_if_needed( + self, update_stack_mock, click_mock, boto3_mock, generate_thumbprint_mock + ): + + # setup + stage: Stage = Stage( + name=ANY_STAGE_CONFIGURATION_NAME, + permissions_provider="oidc", + oidc_provider_url=ANY_OIDC_PROVIDER_URL, + oidc_client_id=ANY_OIDC_CLIENT_ID, + subject_claim=ANY_SUBJECT_CLAIM, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=False, + ) + stack_output = Mock() + stack_output.get.return_value = ANY_ARN + update_stack_mock.return_value = stack_output + client_mock = Mock() + boto3_mock.client.return_value = client_mock + open_id_connect_providers_mock = {"OpenIDConnectProviderList": [{"Arn": ANY_ARN}]} + client_mock.list_open_id_connect_providers.return_value = open_id_connect_providers_mock + + self.assertFalse(stage.create_new_oidc_provider) + + # trigger + stage.bootstrap(confirm_changeset=False) + + # verify + self.assertTrue(stage.create_new_oidc_provider) + + @patch("samcli.lib.pipeline.bootstrap.stage.Stage.generate_thumbprint") + @patch("samcli.lib.pipeline.bootstrap.stage.boto3") + @patch("samcli.lib.pipeline.bootstrap.stage.click") + @patch("samcli.lib.pipeline.bootstrap.stage.update_stack") + def test_doesnt_create_new_oidc_provider(self, update_stack_mock, click_mock, boto3_mock, generate_thumbprint_mock): + + # setup + stage: Stage = Stage( + name=ANY_STAGE_CONFIGURATION_NAME, + permissions_provider="oidc", + oidc_provider_url=ANY_OIDC_PROVIDER_URL, + oidc_client_id=ANY_OIDC_CLIENT_ID, + subject_claim=ANY_SUBJECT_CLAIM, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=False, + ) + stack_output = Mock() + stack_output.get.return_value = ANY_ARN + update_stack_mock.return_value = stack_output + client_mock = Mock() + session_mock = Mock() + boto3_mock.Session.return_value = session_mock + session_mock.client.return_value = client_mock + open_id_connect_providers_mock = {"OpenIDConnectProviderList": [{"Arn": ANY_OIDC_PROVIDER_URL}]} + stack_detail_mock = {"StackResourceDetail": {"PhysicalResourceId": ANY_OIDC_PHYSICAL_RESOURCE_ID}} + client_mock.list_open_id_connect_providers.return_value = open_id_connect_providers_mock + client_mock.describe_stack_resource.return_value = stack_detail_mock + + # trigger + stage.bootstrap(confirm_changeset=False) + + # verify + self.assertFalse(stage.create_new_oidc_provider) + + @patch("samcli.lib.pipeline.bootstrap.stage.boto3") + def test_should_create_new_oidc_provider_returns_true_if_no_url(self, boto3_mock): + + # setup + stage: Stage = Stage( + name=ANY_STAGE_CONFIGURATION_NAME, + permissions_provider="oidc", + oidc_provider_url="", + oidc_client_id=ANY_OIDC_CLIENT_ID, + subject_claim=ANY_SUBJECT_CLAIM, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=False, + ) + client_mock = Mock() + boto3_mock.client.return_value = client_mock + open_id_connect_providers_mock = {"OpenIDConnectProviderList": [{"Arn": ANY_OIDC_PROVIDER_URL}]} + client_mock.list_open_id_connect_providers.return_value = open_id_connect_providers_mock + stack_detail_mock = {"StackResourceDetail": {"PhysicalResourceId": ANY_OIDC_PHYSICAL_RESOURCE_ID}} + client_mock.describe_stack_resource.return_value = stack_detail_mock + + # trigger + result = stage._should_create_new_provider("random_stack_name") + + # verify + self.assertFalse(result) + def assert_summary_has_a_message_like(self, msg, click_secho_mock): self.assertTrue( self.does_summary_have_a_message_like(msg, click_secho_mock), diff --git a/tests/unit/lib/utils/test_managed_cloudformation_stack.py b/tests/unit/lib/utils/test_managed_cloudformation_stack.py index 797d26d1c0..a449c51159 100644 --- a/tests/unit/lib/utils/test_managed_cloudformation_stack.py +++ b/tests/unit/lib/utils/test_managed_cloudformation_stack.py @@ -9,7 +9,13 @@ from samcli.commands.exceptions import UserException, CredentialsError, RegionError from samcli.lib.bootstrap.bootstrap import _get_stack_template, SAM_CLI_STACK_NAME -from samcli.lib.utils.managed_cloudformation_stack import manage_stack, _create_or_get_stack, ManagedStackError +from samcli.lib.utils.managed_cloudformation_stack import ( + manage_stack, + update_stack, + _create_or_get_stack, + _create_or_update_stack, + ManagedStackError, +) CLOUDFORMATION_CLIENT = botocore.session.get_session().create_client("cloudformation", region_name="us-west-2") @@ -37,6 +43,17 @@ def test_session_missing_profile(self, boto_mock): template_body=_get_stack_template(), ) + @patch("boto3.Session") + def test_session_missing_profile_update(self, boto_mock): + boto_mock.side_effect = ProfileNotFound(profile="test-profile") + with self.assertRaises(CredentialsError): + update_stack( + profile="test-profile", + region="fake-region", + stack_name=SAM_CLI_STACK_NAME, + template_body=_get_stack_template(), + ) + @patch("boto3.client") def test_client_missing_credentials(self, boto_mock): boto_mock.side_effect = NoCredentialsError() @@ -45,6 +62,14 @@ def test_client_missing_credentials(self, boto_mock): profile=None, region="fake-region", stack_name=SAM_CLI_STACK_NAME, template_body=_get_stack_template() ) + @patch("boto3.client") + def test_client_missing_credentials_update(self, boto_mock): + boto_mock.side_effect = NoCredentialsError() + with self.assertRaises(CredentialsError): + update_stack( + profile=None, region="fake-region", stack_name=SAM_CLI_STACK_NAME, template_body=_get_stack_template() + ) + @patch("boto3.client") def test_client_missing_region(self, boto_mock): boto_mock.side_effect = NoRegionError() @@ -53,6 +78,14 @@ def test_client_missing_region(self, boto_mock): profile=None, region="fake-region", stack_name=SAM_CLI_STACK_NAME, template_body=_get_stack_template() ) + @patch("boto3.client") + def test_client_missing_region_update(self, boto_mock): + boto_mock.side_effect = NoRegionError() + with self.assertRaises(RegionError): + update_stack( + profile=None, region="fake-region", stack_name=SAM_CLI_STACK_NAME, template_body=_get_stack_template() + ) + @patch("boto3.client") def test_new_stack(self, boto_mock): stub_cf, stubber = self._stubbed_cf_client() From ff3ca0bc09c51831dfdc9d5cf3eb513f0813a9c1 Mon Sep 17 00:00:00 2001 From: Wing Fung Lau <4760060+hawflau@users.noreply.github.com> Date: Thu, 13 Oct 2022 08:58:15 -0700 Subject: [PATCH 8/8] chore: bump version to 1.60.0 (#4302) Co-authored-by: Lau --- samcli/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/__init__.py b/samcli/__init__.py index a581cb0e15..11af7713f4 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = "1.59.0" +__version__ = "1.60.0"