diff --git a/docs/images/TechnicalGuide-AccountManagementStateMachine.drawio.png b/docs/images/TechnicalGuide-AccountManagementStateMachine.drawio.png new file mode 100644 index 000000000..9d1b36bed Binary files /dev/null and b/docs/images/TechnicalGuide-AccountManagementStateMachine.drawio.png differ diff --git a/docs/images/TechnicalGuide-BootstrapRepo.drawio.png b/docs/images/TechnicalGuide-BootstrapRepo.drawio.png new file mode 100644 index 000000000..0e3d9c8e3 Binary files /dev/null and b/docs/images/TechnicalGuide-BootstrapRepo.drawio.png differ diff --git a/docs/images/TechnicalGuide-BootstrapRepo.png b/docs/images/TechnicalGuide-BootstrapRepo.png deleted file mode 100644 index bd68f6c4a..000000000 Binary files a/docs/images/TechnicalGuide-BootstrapRepo.png and /dev/null differ diff --git a/docs/technical-guide.md b/docs/technical-guide.md index 565e1d06f..1cd339c0d 100644 --- a/docs/technical-guide.md +++ b/docs/technical-guide.md @@ -1,8 +1,24 @@ -## Technical Guide -### Introduction +# Technical Guide +## Introduction This document is intended to give insight into how the AWS Deployment Framework works under the hood. -### High Level Overview - AWS Deployment Framework Bootstrap Repository +## High Level Overview - AWS Deployment Framework Bootstrap Repository The AWS Deployment Framework Bootstrap Repository aka "Bootstrap Repo" is where the source code used by ADF lives. The bootstrap repo is also where your accounts, OU layout and base templates are defined. The flow below is a high level overview of what happens when a change is committed to this repository. ![bootstrap-repo-overview](./images/TechnicalGuide-BootstrapRepo.png) + +### Account Management State Machine +The Account Managment State Machine is triggered by S3 PUT events to the ADF Accounts bucket. +Below is a diagram detailing the components of the standard state machine. This state machine is defined in `src/account_processing.yml` and the lambda functions code is location in `src/lambda_codebase/account_processing` +![account-management-state-machine](./images/TechnicalGuide-AccountManagementStateMachine.drawio.png) + + +## High Level Overview - AWS Deployment Framework Pipeline Repository +The AWS Deployment Framework Pipeline Repository aka "Pipeline Rep" is where the deployment map definitions live. It typically exists in CodeCommit within your Deployment Account(s). +The diagram below details what happens when a commit is pushed to this repository. +![pipeline-repo-overview](./images/TechnicalGuide-PipelineRepo.drawio.png) + +### Pipeline Management State Machine +The Pipeline Management State machine is triggered by S3 PUT events to the ADF Pipelines bucket. This state machine is responsible for expanding the deployment map, resolving the targets, creating pipeline definitions (JSON objects that detail the source(s) and stages involved and the targets) and then generating CDK stacks off of the definitions. + +It additionally covers the deletion of stale pipelines. A Stale pipeline is any pipeline that has a definition but does not exist in a deployment map. \ No newline at end of file diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/global.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/global.yml index f3291d384..98f33af58 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/global.yml +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/global.yml @@ -170,6 +170,25 @@ Resources: BlockPublicPolicy: true IgnorePublicAcls: true RestrictPublicBuckets: true + PipelineManagementApplication: + Type: AWS::Serverless::Application + DeletionPolicy: Delete + UpdateReplacePolicy: Retain + Properties: + Location: pipeline_management.yml + Parameters: + LambdaLayer: !Ref LambdaLayerVersion + ADFVersion: !Ref ADFVersion + OrganizationID: !Ref OrganizationId + CrossAccountAccessRole: !Ref CrossAccountAccessRole + PipelineBucket: !Ref PipelineBucket + RootAccountId: !Ref MasterAccountId + CodeBuildImage: !Ref Image + CodeBuildComputeType: !Ref ComputeType + SharedModulesBucket: !Ref SharedModulesBucket + PipelinePrefix: !Ref PipelinePrefix + StackPrefix: !Ref StackPrefix + ADFLogLevel: !Ref ADFLogLevel CodeCommitRole: Type: AWS::IAM::Role @@ -260,6 +279,8 @@ Resources: - !Sub arn:${AWS::Partition}:s3:::${PipelineBucket}/* - !Sub arn:${AWS::Partition}:s3:::${SharedModulesBucket} - !Sub arn:${AWS::Partition}:s3:::${SharedModulesBucket}/* + - !Sub arn:${AWS::Partition}:s3:::${PipelineManagementApplication.Outputs.Bucket} + - !Sub arn:${AWS::Partition}:s3:::${PipelineManagementApplication.Outputs.Bucket}/* - Effect: Allow Sid: "KMS" Action: @@ -354,6 +375,8 @@ Resources: - !Sub arn:${AWS::Partition}:s3:::${PipelineBucket}/* - !Sub arn:${AWS::Partition}:s3:::${SharedModulesBucket} - !Sub arn:${AWS::Partition}:s3:::${SharedModulesBucket}/* + - !Sub arn:${AWS::Partition}:s3:::${PipelineManagementApplication.Outputs.Bucket} + - !Sub arn:${AWS::Partition}:s3:::${PipelineManagementApplication.Outputs.Bucket}/* - Effect: Allow Sid: "KMS" Action: @@ -716,6 +739,8 @@ Resources: Value: !Ref PipelineBucket - Name: SHARED_MODULES_BUCKET Value: !Ref SharedModulesBucket + - Name: ADF_PIPELINES_BUCKET + Value: !GetAtt PipelineManagementApplication.Outputs.Bucket - Name: ADF_PIPELINE_PREFIX Value: !Ref PipelinePrefix - Name: ADF_STACK_PREFIX @@ -744,23 +769,8 @@ Resources: - pip install -r adf-build/requirements.txt -q -t ./adf-build build: commands: - - cdk --version - - >- - chmod 755 - adf-build/cdk/execute_pipeline_stacks.py - adf-build/cdk/generate_pipeline_inputs.py - adf-build/cdk/generate_pipeline_stacks.py - adf-build/cdk/clean_pipelines.py - - python adf-build/cdk/generate_pipeline_inputs.py - - >- - cdk synth - --no-version-reporting - --app adf-build/cdk/generate_pipeline_stacks.py - 1> /dev/null - - python adf-build/cdk/execute_pipeline_stacks.py - post_build: - commands: - - python adf-build/cdk/clean_pipelines.py + - aws s3 cp deployment_map.yml s3://$ADF_PIPELINES_BUCKET/deployment_map.yml + - aws s3 sync deployment_maps/* s3://$ADF_PIPELINES_BUCKET ServiceRole: !GetAtt PipelineProvisionerCodeBuildRole.Arn Tags: - Key: "Name" diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_or_update_rule.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_or_update_rule.py new file mode 100644 index 000000000..b8f6ef550 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_or_update_rule.py @@ -0,0 +1,60 @@ +""" +Pipeline Management Lambda Function +Creates or Updates an Event Rule for forwarding events +If the source account != the Deplyment account +""" + +import os +import boto3 + +from cache import Cache +from rule import Rule +from logger import configure_logger +from cloudwatch import ADFMetrics + + +LOGGER = configure_logger(__name__) +DEPLOYMENT_ACCOUNT_REGION = os.environ["AWS_REGION"] +DEPLOYMENT_ACCOUNT_ID = os.environ["ACCOUNT_ID"] +PIPELINE_MANAGEMENT_STATEMACHINE = os.getenv("PIPELINE_MANAGEMENT_STATEMACHINE_ARN") +CLOUDWATCH = boto3.client("cloudwatch") +METRICS = ADFMetrics(CLOUDWATCH, "PIPELINE_MANAGEMENT/RULE") + +_cache = None + + +def lambda_handler(pipeline, _): + """Main Lambda Entry point""" + + # pylint: disable=W0603 + # Global variable here to cache across lambda execution runtimes. + global _cache + if not _cache: + _cache = Cache() + METRICS.put_metric_data( + {"MetricName": "CacheInitalised", "Value": 1, "Unit": "Count"} + ) + + LOGGER.info(pipeline) + + _source_account_id = ( + pipeline.get("default_providers", {}) + .get("source", {}) + .get("properties", {}) + .get("account_id", {}) + ) + if ( + _source_account_id + and int(_source_account_id) != int(DEPLOYMENT_ACCOUNT_ID) + and not _cache.check(_source_account_id) + ): + rule = Rule(pipeline["default_providers"]["source"]["properties"]["account_id"]) + rule.create_update() + _cache.add( + pipeline["default_providers"]["source"]["properties"]["account_id"], True + ) + METRICS.put_metric_data( + {"MetricName": "CreateOrUpdate", "Value": 1, "Unit": "Count"} + ) + + return pipeline diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_repository.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_repository.py new file mode 100644 index 000000000..fcfcfa376 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_repository.py @@ -0,0 +1,56 @@ +""" +Pipeline Management Lambda Function +Creates or Updates a CodeCommit Repository +""" + +import os +import boto3 +from repo import Repo + +from logger import configure_logger +from cloudwatch import ADFMetrics +from parameter_store import ParameterStore + + +CLOUDWATCH = boto3.client("cloudwatch") +METRICS = ADFMetrics(CLOUDWATCH, "PIPELINE_MANAGEMENT/REPO") +LOGGER = configure_logger(__name__) +DEPLOYMENT_ACCOUNT_REGION = os.environ["AWS_REGION"] +DEPLOYMENT_ACCOUNT_ID = os.environ["ACCOUNT_ID"] + + +def lambda_handler(pipeline, _): + """Main Lambda Entry point""" + parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3) + auto_create_repositories = parameter_store.fetch_parameter( + "auto_create_repositories" + ) + LOGGER.info(auto_create_repositories) + if auto_create_repositories == "enabled": + code_account_id = ( + pipeline.get("default_providers", {}) + .get("source", {}) + .get("properties", {}) + .get("account_id", {}) + ) + has_custom_repo = ( + pipeline.get("default_providers", {}) + .get("source", {}) + .get("properties", {}) + .get("repository", {}) + ) + if ( + auto_create_repositories + and code_account_id + and str(code_account_id).isdigit() + and not has_custom_repo + ): + repo = Repo( + code_account_id, pipeline.get("name"), pipeline.get("description") + ) + repo.create_update() + METRICS.put_metric_data( + {"MetricName": "CreateOrUpdate", "Value": 1, "Unit": "Count"} + ) + + return pipeline diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/generate_pipeline_inputs.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/generate_pipeline_inputs.py new file mode 100644 index 000000000..2a9d34090 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/generate_pipeline_inputs.py @@ -0,0 +1,117 @@ +""" +Pipeline Management Lambda Function +Generates Pipeline Inputs +""" + +import os +import boto3 + +from pipeline import Pipeline +from target import Target, TargetStructure +from organizations import Organizations +from parameter_store import ParameterStore +from sts import STS +from logger import configure_logger +from partition import get_partition + + +LOGGER = configure_logger(__name__) +DEPLOYMENT_ACCOUNT_REGION = os.environ["AWS_REGION"] +DEPLOYMENT_ACCOUNT_ID = os.environ["ACCOUNT_ID"] +ROOT_ACCOUNT_ID = os.environ["ROOT_ACCOUNT_ID"] + + +def store_regional_parameter_config(pipeline, parameter_store): + """ + Responsible for storing the region information for specific + pipelines. These regions are defined in the deployment_map + either as top level regions for a pipeline or stage specific regions + """ + if pipeline.top_level_regions: + parameter_store.put_parameter( + f"/deployment/{pipeline.name}/regions", + str(list(set(pipeline.top_level_regions))), + ) + return + + parameter_store.put_parameter( + f"/deployment/{pipeline.name}/regions", + str(list(set(Pipeline.flatten_list(pipeline.stage_regions)))), + ) + + +def fetch_required_ssm_params(regions): + output = {} + for region in regions: + parameter_store = ParameterStore(region, boto3) + output[region] = { + "s3": parameter_store.fetch_parameter( + f"/cross_region/s3_regional_bucket/{region}" + ), + "kms": parameter_store.fetch_parameter(f"/cross_region/kms_arn/{region}"), + } + if region == DEPLOYMENT_ACCOUNT_REGION: + output[region]["modules"] = parameter_store.fetch_parameter( + "deployment_account_bucket" + ) + output['default_scm_branch'] = parameter_store.fetch_parameter('default_scm_branch') + return output + + +def generate_pipeline_inputs(pipeline, organizations, parameter_store): + data = {} + pipeline_object = Pipeline(pipeline) + regions = [] + for target in pipeline.get("targets", []): + target_structure = TargetStructure(target) + for step in target_structure.target: + regions = step.get( + "regions", pipeline.get("regions", DEPLOYMENT_ACCOUNT_REGION) + ) + paths_tags = [] + for path in step.get("path", []): + paths_tags.append(path) + if step.get("tags") is not None: + paths_tags.append(step.get("tags", {})) + for path_or_tag in paths_tags: + pipeline_object.stage_regions.append(regions) + pipeline_target = Target( + path_or_tag, target_structure, organizations, step, regions + ) + pipeline_target.fetch_accounts_for_target() + # Targets should be a list of lists. + + # Note: This is a big shift away from how ADF handles targets natively. + # Previously this would be a list of [accountId(s)] it now returns a list of [[account_ids], [account_ids]] + # for the sake of consistency we should probably think of a target consisting of multiple "waves". So if you see + # any reference to a wave going forward it will be the individual batch of account ids + pipeline_object.template_dictionary["targets"].append( + list(target_structure.generate_waves()), + ) + + if DEPLOYMENT_ACCOUNT_REGION not in regions: + pipeline_object.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION) + + pipeline_object.generate_input() + data["ssm_params"] = fetch_required_ssm_params( + pipeline_object.input["regions"] or [DEPLOYMENT_ACCOUNT_REGION] + ) + data["input"] = pipeline_object.input + data['input']['default_scm_branch'] = data["ssm_params"].get('default_scm_branch') + store_regional_parameter_config(pipeline_object, parameter_store) + return data + + +def lambda_handler(pipeline, _): + """Main Lambda Entry point""" + parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3) + sts = STS() + role = sts.assume_cross_account_role( + f'arn:{get_partition(DEPLOYMENT_ACCOUNT_REGION)}:iam::{ROOT_ACCOUNT_ID}:role/{parameter_store.fetch_parameter("cross_account_access_role")}-readonly', + "pipeline", + ) + organizations = Organizations(role) + + output = generate_pipeline_inputs(pipeline, organizations, parameter_store) + + return output diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/identify_out_of_date_pipelines.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/identify_out_of_date_pipelines.py new file mode 100644 index 000000000..882f87796 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/identify_out_of_date_pipelines.py @@ -0,0 +1,101 @@ +""" +Pipeline Management Lambda Function +Compares pipeline definitions in S3 to the definitions stored in SSM Param Store. +Any that exist in param store but not S3 are marked for removal. +""" + +import os +import json +import hashlib +import tempfile + +import boto3 + +from logger import configure_logger +from deployment_map import DeploymentMap +from parameter_store import ParameterStore + + +LOGGER = configure_logger(__name__) +S3_BUCKET_NAME = os.environ["S3_BUCKET_NAME"] +DEPLOYMENT_ACCOUNT_ID = os.environ["ACCOUNT_ID"] +ADF_PIPELINE_PREFIX = os.environ["ADF_PIPELINE_PREFIX"] +DEPLOYMENT_ACCOUNT_REGION = os.environ["AWS_REGION"] + + +def download_deployment_maps(resource, prefix, local): + paginator = resource.meta.client.get_paginator("list_objects") + for result in paginator.paginate( + Bucket=S3_BUCKET_NAME, Delimiter="/", Prefix=prefix + ): + LOGGER.debug("Downloaded deployment map: %s", result) + for subdir in result.get("CommonPrefixes", []): + download_deployment_maps(resource, subdir.get("Prefix"), local) + for file in result.get("Contents", []): + LOGGER.debug("File content in deployment map: %s", file) + dest_path_name = os.path.join(local, file.get("Key")) + if not os.path.exists(os.path.dirname(dest_path_name)): + os.makedirs(os.path.dirname(dest_path_name)) + resource.meta.client.download_file( + S3_BUCKET_NAME, file.get("Key"), dest_path_name + ) + + +def get_current_pipelines(parameter_store): + return parameter_store.fetch_parameters_by_path("/deployment/") + + +def identify_out_of_date_pipelines(pipeline_names, current_pipelines): + return [ + {"pipeline": f"{ADF_PIPELINE_PREFIX}{d}"} + for d in current_pipelines.difference(pipeline_names) + ] + + +def delete_ssm_params(out_of_date_pipelines, parameter_store): + for pipeline in out_of_date_pipelines: + LOGGER.debug( + "Deleting SSM regions parameter of stale pipeline: /deployment/%s/regions - %s", + pipeline.get('name'), + pipeline, + ) + parameter_store.delete_parameter( + f"/deployment/{pipeline.get('pipeline').removeprefix(ADF_PIPELINE_PREFIX)}/regions" + ) + + +def lambda_handler(event, _): + output = event.copy() + s3 = boto3.resource("s3") + deployment_map = None + with tempfile.TemporaryDirectory() as tmp_dir_path: + download_deployment_maps(s3, "", tmp_dir_path) + deployment_map = DeploymentMap( + None, + None, + None, + map_path=f"{tmp_dir_path}/deployment_map.yml", + map_dir_path=tmp_dir_path, + ) + parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3) + current_pipelines = { + parameter.get("Name").split("/")[-2] + for parameter in get_current_pipelines(parameter_store) + } + + pipeline_names = { + p.get("name") for p in deployment_map.map_contents["pipelines"] + } + out_of_date_pipelines = identify_out_of_date_pipelines( + pipeline_names, current_pipelines + ) + delete_ssm_params(out_of_date_pipelines, parameter_store) + + output = {"pipelines_to_be_deleted": out_of_date_pipelines} + data_md5 = hashlib.md5( + json.dumps(output, sort_keys=True).encode("utf-8") + ).hexdigest() + root_trace_id = os.getenv("_X_AMZN_TRACE_ID", "na=na;na=na").split(";")[0] + output["traceroot"] = root_trace_id + output["hash"] = data_md5 + return output diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/process_deployment_map.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/process_deployment_map.py new file mode 100644 index 000000000..b13e9356b --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/process_deployment_map.py @@ -0,0 +1,92 @@ +""" +Pipeline Management Lambda Function +Triggered by new Deployment Maps in S3 Bucket. +Triggers the pipeline management state machine using the deployment map as input. +""" + + +import os +import json +import tempfile +import yaml +from yaml.error import YAMLError + +import boto3 +from botocore.exceptions import ClientError +from logger import configure_logger + + +LOGGER = configure_logger(__name__) +DEPLOYMENT_ACCOUNT_REGION = os.environ["AWS_REGION"] +DEPLOYMENT_ACCOUNT_ID = os.environ["ACCOUNT_ID"] +PIPELINE_MANAGEMENT_STATEMACHINE = os.getenv("PIPELINE_MANAGEMENT_STATE_MACHINE") + + +_cache = None + + +def get_details_from_event(event: dict): + s3_details = event.get("Records", [{}])[0].get("s3") + if not s3_details: + raise ValueError("No S3 Event details present in event trigger") + bucket_name = s3_details.get("bucket", {}).get("name") + object_key = s3_details.get("object", {}).get("key") + return { + "bucket_name": bucket_name, + "object_key": object_key, + } + + +def get_file_from_s3(s3_details: dict, s3_resource: boto3.resource): + try: + s3_object = s3_resource.Object( + s3_details.get("bucket_name"), s3_details.get("object_key") + ) + with tempfile.TemporaryFile() as file_pointer: + s3_object.download_fileobj(file_pointer) + + # Move pointer to the start of the file + file_pointer.seek(0) + + return yaml.safe_load(file_pointer) + except ClientError as error: + LOGGER.error( + "Failed to download %s from %s, due to %s", + s3_details.get('object_key'), + s3_details.get('bucket_name'), + error, + ) + raise + except YAMLError as yaml_error: + LOGGER.error( + "Failed to parse YAML file: %s from %s, due to %s", + s3_details.get('object_key'), + s3_details.get('bucket_name'), + yaml_error, + ) + raise + + +def start_executions(sfn_client, deployment_map): + LOGGER.info( + "Invoking Pipeline Management State Machine (%s)", + PIPELINE_MANAGEMENT_STATEMACHINE, + ) + for pipeline in deployment_map.get("pipelines"): + LOGGER.debug("Payload: %s", pipeline) + sfn_client.start_execution( + stateMachineArn=PIPELINE_MANAGEMENT_STATEMACHINE, + input=json.dumps(pipeline), + ) + + +def lambda_handler(event, _): + """Main Lambda Entry point""" + output = event.copy() + s3_resource = boto3.resource("s3") + sfn_client = boto3.client("stepfunctions") + s3_details = get_details_from_event(event) + deployment_map = get_file_from_s3(s3_details, s3_resource) + deployment_map["definition_bucket"] = s3_details.get("object_key") + start_executions(sfn_client, deployment_map) + return output diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/requirements.txt b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/requirements.txt new file mode 100644 index 000000000..6eba90f17 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/requirements.txt @@ -0,0 +1,3 @@ +pyyaml==5.4.1 +wrapt==1.41.1 # https://github.com/aws/aws-lambda-builders/issues/302 +schema==0.7.5 \ No newline at end of file diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/store_pipeline_definition.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/store_pipeline_definition.py new file mode 100644 index 000000000..bc4c5c347 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/store_pipeline_definition.py @@ -0,0 +1,32 @@ +""" +Pipeline Management Lambda Function +Stores pipeline input from prior function to S3. +""" + +import os +import json + +import boto3 + +from logger import configure_logger + + +LOGGER = configure_logger(__name__) +DEPLOYMENT_ACCOUNT_REGION = os.environ["AWS_REGION"] +DEPLOYMENT_ACCOUNT_ID = os.environ["ACCOUNT_ID"] +S3_BUCKET_NAME = os.environ["S3_BUCKET_NAME"] + + +def upload_event_to_s3(s3, definition): + pipeline_name = definition.get("input", {}).get("name") + s3_object = s3.Object(S3_BUCKET_NAME, f"pipelines/{pipeline_name}/definition.json") + s3_object.put(Body=json.dumps(definition).encode("UTF-8")) + return f"{S3_BUCKET_NAME}/pipelines/{pipeline_name}/" + + +def lambda_handler(event, _): + output = event.copy() + s3 = boto3.resource("s3") + location = upload_event_to_s3(s3, event) + output["definition_location"] = location + return output diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/templates/codecommit.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/templates/codecommit.yml new file mode 100755 index 000000000..1a08a0f08 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/templates/codecommit.yml @@ -0,0 +1,17 @@ +# // Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# // SPDX-License-Identifier: Apache-2.0 + +Parameters: + RepoName: + Type: String + Description: + Type: String + Default: Created by ADF +Resources: + Repo: + Type: AWS::CodeCommit::Repository + DeletionPolicy: Retain + UpdateReplacePolicy: Retain + Properties: + RepositoryName: !Ref RepoName + RepositoryDescription: !Ref Description \ No newline at end of file diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/templates/events.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/templates/events.yml new file mode 100755 index 000000000..a4f534ca2 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/templates/events.yml @@ -0,0 +1,48 @@ +# // Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# // SPDX-License-Identifier: Apache-2.0 + +Parameters: + DeploymentAccountId: + Type: "AWS::SSM::Parameter::Value" + Description: Deployment Account ID + Default: deployment_account_id +Resources: + EventRole: + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Principal: + Service: + - events.amazonaws.com + Action: sts:AssumeRole + Path: / + Policies: + - PolicyName: !Sub events-to-${DeploymentAccountId} + PolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Action: events:PutEvents + Resource: '*' + EventRule: + Type: AWS::Events::Rule + Properties: + Name: !Sub adf-cc-event-from-${AWS::AccountId}-to-${DeploymentAccountId} + EventPattern: + source: + - aws.codecommit + detail-type: + - 'CodeCommit Repository State Change' + detail: + event: + - referenceCreated + - referenceUpdated + referenceType: + - branch + Targets: + - Arn: !Sub arn:${AWS::Partition}:events:${AWS::Region}:${DeploymentAccountId}:event-bus/default + RoleArn: !GetAtt EventRole.Arn + Id: codecommit-push-event diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/pipeline_management.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/pipeline_management.yml new file mode 100644 index 000000000..0dc384529 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/pipeline_management.yml @@ -0,0 +1,883 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: 'AWS::Serverless-2016-10-31' +Description: ADF CloudFormation Stack for processing deployment maps. + +Parameters: + OrganizationID: + Type: String + MinLength: "1" + ADFVersion: + Type: String + MinLength: "1" + LambdaLayer: + Type: String + MinLength: "1" + CrossAccountAccessRole: + Type: String + MinLength: "1" + PipelineBucket: + Type: String + MinLength: "1" + RootAccountId: + Type: String + MinLength: "1" + CodeBuildImage: + Type: String + MinLength: "1" + CodeBuildComputeType: + Type: String + MinLength: "1" + SharedModulesBucket: + Type: String + MinLength: "1" + PipelinePrefix: + Type: String + MinLength: "1" + StackPrefix: + Type: String + MinLength: "1" + ADFLogLevel: + Type: String + MinLength: "1" + +Globals: + Function: + Architectures: + - arm64 + CodeUri: lambda_codebase/pipeline_management + Runtime: python3.9 + Timeout: 300 + Tracing: Active + Layers: + - !Ref LambdaLayer + +Resources: + ADFPipelineMangementLambdaBasePolicy: + Type: "AWS::IAM::ManagedPolicy" + Properties: + Description: "Base policy for all ADF pipeline management lambdas" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - "logs:CreateLogGroup" + - "logs:CreateLogStream" + - "logs:PutLogEvents" + - "xray:PutTelemetryRecords" + - "xray:PutTraceSegments" + - "cloudwatch:PutMetricData" + Resource: "*" + Roles: + - !Ref DeploymentMapProcessingLambdaRole + - !Ref CreateOrUpdateRuleLambdaRole + - !Ref CreateRepositoryLambdaRole + - !Ref GeneratePipelineInputsLambdaRole + - !Ref PipelineManagementCodeBuildProjectRole + - !Ref StoreDefinitionLambdaRole + - !Ref IdentifyOutOfDatePipelinesLambdaRole + + DeploymentMapProcessingLambdaRolePolicy: + Type: "AWS::IAM::ManagedPolicy" + Properties: + Description: "Policy to allow the deployment map processing Lambda to perform actions" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: "Allow" + Action: "s3:ListBucket" + Resource: !GetAtt ADFPipelineBucket.Arn + - Effect: "Allow" + Action: "lambda:GetLayerVersion" + Resource: !Ref LambdaLayer + - Effect: "Allow" + Action: "states:StartExecution" + Resource: !Sub "arn:${AWS::Partition}:states:${AWS::Region}:${AWS::AccountId}:stateMachine:ADFPipelineManagementStateMachine" + - Effect: "Allow" + Action: "s3:GetObject" + Resource: !Sub "${ADFPipelineBucket.Arn}/*" + Roles: + - !Ref DeploymentMapProcessingLambdaRole + CrossAccountCloudFormationPolicy: + Type: "AWS::IAM::ManagedPolicy" + Properties: + Description: "Policy to allow a lambda to upload a template to s3 and validate a cloudformation template" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - "s3:PutObject" + - "s3:GetObject" + Resource: + - !Sub "arn:${AWS::Partition}:s3:::${PipelineBucket}/*" + - Effect: Allow + Action: + - "cloudformation:ValidateTemplate" + Resource: + - "*" + Roles: + - !Ref CreateOrUpdateRuleLambdaRole + - !Ref CreateRepositoryLambdaRole + + DeploymentMapProcessingLambdaRole: + Type: "AWS::IAM::Role" + Properties: + Path: "/adf-automation/" + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: "Allow" + Principal: + Service: + - "lambda.amazonaws.com" + Action: + - "sts:AssumeRole" + + CreateOrUpdateRuleLambdaRole: + Type: "AWS::IAM::Role" + Properties: + Path: "/adf-automation/" + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: "Allow" + Principal: + Service: + - "lambda.amazonaws.com" + Action: + - "sts:AssumeRole" + + CreateRepositoryLambdaRole: + Type: "AWS::IAM::Role" + Properties: + Path: "/adf-automation/" + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: "Allow" + Principal: + Service: + - "lambda.amazonaws.com" + Action: + - "sts:AssumeRole" + Policies: + - PolicyName: "adf-create-repo-function-policy" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - "ssm:GetParameter" + - "ssm:GetParameters" + - "ssm:GetParametersByPath" + Resource: + - "*" + + GeneratePipelineInputsLambdaRole: + Type: "AWS::IAM::Role" + Properties: + Path: "/adf-automation/" + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: "Allow" + Principal: + Service: + - "lambda.amazonaws.com" + Action: + - "sts:AssumeRole" + Policies: + - PolicyName: "adf-generate-pipeline-input-function-policy" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - "sts:AssumeRole" + Resource: + - !Sub "arn:${AWS::Partition}:iam::${RootAccountId}:role/${CrossAccountAccessRole}-readonly" + - Effect: Allow + Action: + - "ssm:DeleteParameter" + - "ssm:GetParameter" + - "ssm:GetParameters" + - "ssm:GetParametersByPath" + - "ssm:PutParameter" + Resource: + - "*" + StoreDefinitionLambdaRole: + Type: "AWS::IAM::Role" + Properties: + Path: "/adf-automation/" + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: "Allow" + Principal: + Service: + - "lambda.amazonaws.com" + Action: + - "sts:AssumeRole" + Policies: + - PolicyName: "adf-store-pipeline-definitions" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - "s3:PutObject" + Resource: + - !Sub "${ADFDefinitionBucket.Arn}/*" + IdentifyOutOfDatePipelinesLambdaRole: + Type: "AWS::IAM::Role" + Properties: + Path: "/adf-automation/" + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: "Allow" + Principal: + Service: + - "lambda.amazonaws.com" + Action: + - "sts:AssumeRole" + Policies: + - PolicyName: "adf-get-deployment-maps" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - "s3:ListBucket" + - "s3:GetObject" + Resource: + - !Sub "${ADFPipelineBucket.Arn}/*" + - !Sub "${ADFPipelineBucket.Arn}" + - Effect: Allow + Action: + - "ssm:DeleteParameter" + - "ssm:GetParametersByPath" + Resource: + - "*" + + StateMachineExecutionRole: + Type: "AWS::IAM::Role" + Properties: + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: "Allow" + Principal: + Service: + - states.amazonaws.com + Action: "sts:AssumeRole" + Path: "/" + Policies: + - PolicyName: "adf-state-machine-role-policy" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - "xray:PutTelemetryRecords" + - "xray:PutTraceSegments" + - "codebuild:BatchGetBuilds" + - "cloudwatch:PutMetricData" + Resource: "*" + - Effect: Allow + Action: + - "lambda:InvokeFunction" + Resource: + - !GetAtt CreateOrUpdateRuleFunction.Arn + - !GetAtt CreateRepositoryFunction.Arn + - !GetAtt GeneratePipelineInputsFunction.Arn + - !GetAtt StoreDefinitionFunction.Arn + - !GetAtt IdentifyOutOfDatePipelinesFunction.Arn + - Effect: Allow + Action: + - "codebuild:StartBuild" + Resource: + - !GetAtt PipelineManagementCodeBuildProject.Arn + - Effect: Allow + Action: + - states:StartExecution + Resource: + - !Ref PipelineDeletionStateMachine + - Effect: Allow + Action: + - events:PutTargets + - events:PutRule + - events:DescribeRule + Resource: + - "*" + + DeletionStateMachineExecutionRole: + Type: "AWS::IAM::Role" + Properties: + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: "Allow" + Principal: + Service: + - states.amazonaws.com + Action: "sts:AssumeRole" + Path: "/" + Policies: + - PolicyName: "adf-state-machine-role-policy" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - "xray:PutTelemetryRecords" + - "xray:PutTraceSegments" + - "cloudwatch:PutMetricData" + Resource: "*" + - PolicyName: "adf-deploy-cloudformation-delete" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - cloudformation:DeleteStack + Resource: + - "*" + Condition: + StringEquals: + 'aws:ResourceTag/createdBy': "ADF" + + PipelineManagementStateMachine: + Type: "AWS::StepFunctions::StateMachine" + Properties: + StateMachineName: "ADFPipelineManagementStateMachine" + DefinitionString: !Sub |- + { + "Comment": "ADF Pipeline Management State Machine", + "StartAt": "CreateOrUpdateRule", + "States": { + "CreateOrUpdateRule": { + "Type": "Task", + "Resource": "${CreateOrUpdateRuleFunction.Arn}", + "Retry": [{ + "ErrorEquals": ["States.TaskFailed"], + "IntervalSeconds": 1, + "BackoffRate": 1.5, + "MaxAttempts": 10 + }], + "Next": "CreateRepository" + }, + "CreateRepository": { + "Type": "Task", + "Resource": "${CreateRepositoryFunction.Arn}", + "Retry": [{ + "ErrorEquals": ["States.TaskFailed"], + "IntervalSeconds": 1, + "BackoffRate": 1.5, + "MaxAttempts": 10 + }], + "Next": "GeneratePipelineInputs" + }, + "GeneratePipelineInputs": { + "Type": "Task", + "Resource": "${GeneratePipelineInputsFunction.Arn}", + "Retry": [{ + "ErrorEquals": ["States.TaskFailed"], + "IntervalSeconds": 1, + "BackoffRate": 1.5, + "MaxAttempts": 10 + }], + "Next": "StorePipelineDefinition" + }, + "StorePipelineDefinition": { + "Type": "Task", + "Resource": "${StoreDefinitionFunction.Arn}", + "Retry": [{ + "ErrorEquals": ["States.TaskFailed"], + "IntervalSeconds": 1, + "BackoffRate": 1.5, + "MaxAttempts": 10 + }], + "Next": "RunCDK" + }, + "RunCDK": { + "Type": "Task", + "Resource": "arn:${AWS::Partition}:states:::codebuild:startBuild.sync", + "Parameters": { + "ProjectName": "${PipelineManagementCodeBuildProject}", + "SourceTypeOverride": "S3", + "SourceLocationOverride.$": "$.definition_location" + }, + "Next": "IdentifyOutOfDatePipelines" + }, + "IdentifyOutOfDatePipelines": { + "Type": "Task", + "Resource": "${IdentifyOutOfDatePipelinesFunction.Arn}", + "Retry": [{ + "ErrorEquals": ["States.TaskFailed"], + "IntervalSeconds": 1, + "BackoffRate": 1.5, + "MaxAttempts": 10 + }], + "Next": "InvokeDeleteStateMachine" + }, + "InvokeDeleteStateMachine": { + "Type": "Task", + "Resource": "arn:${AWS::Partition}:states:::aws-sdk:sfn:startExecution", + "Parameters": { + "StateMachineArn": "${PipelineDeletionStateMachine}", + "Input.$": "$.pipelines_to_be_deleted", + "Name.$": "$.hash", + "TraceHeader.$": "$.traceroot" + }, + "Catch": [ { + "ErrorEquals": [ "Sfn.ExecutionAlreadyExistsException" ], + "Next": "Success" + } ], + "Next": "Success" + }, + "Success": { + "Type": "Succeed" + } + } + } + RoleArn: !GetAtt StateMachineExecutionRole.Arn + TracingConfiguration: + Enabled: true + + PipelineDeletionStateMachine: + Type: "AWS::StepFunctions::StateMachine" + Properties: + DefinitionString: !Sub |- + { + "Comment": "Delete Stacks", + "StartAt": "Map", + "States": { + "Map": { + "Type": "Map", + "Iterator": { + "StartAt": "DeleteStack", + "States": { + "DeleteStack": { + "Type": "Task", + "Parameters": { + "StackName.$": "$.pipeline" + }, + "Resource": "arn:${AWS::Partition}:states:::aws-sdk:cloudformation:deleteStack", + "End": true + } + } + }, + "MaxConcurrency": 10, + "Next": "Success", + "ItemsPath": "$.pipelines_to_be_deleted" + }, + "Success": { + "Type": "Succeed" + } + } + } + RoleArn: !GetAtt DeletionStateMachineExecutionRole.Arn + TracingConfiguration: + Enabled: true + + PipelineManagementCodeBuildProject: + Type: AWS::CodeBuild::Project + Properties: + Artifacts: + Type: NO_ARTIFACTS + Environment: + ComputeType: !Ref CodeBuildComputeType + Image: !Ref CodeBuildImage + EnvironmentVariables: + - Name: PYTHONPATH + Value: "./adf-build/:./adf-build/python/" + - Name: ACCOUNT_ID + Value: !Ref AWS::AccountId + - Name: MASTER_ACCOUNT_ID + Value: !Ref RootAccountId + - Name: S3_BUCKET_NAME + Value: !Ref PipelineBucket + - Name: SHARED_MODULES_BUCKET + Value: !Ref SharedModulesBucket + - Name: ADF_PIPELINE_PREFIX + Value: !Ref PipelinePrefix + - Name: ADF_STACK_PREFIX + Value: !Ref StackPrefix + - Name: ADF_LOG_LEVEL + Value: !Ref ADFLogLevel + - Name: ADF_VERSION + Value: !Ref ADFVersion + - Name: ORGANIZATION_ID + Value: !Ref OrganizationID + - Name: CLOUDFORMATION_ROLE_ARN + Value: !GetAtt ADFPipelineMangementCloudFormationRole.Arn + Type: LINUX_CONTAINER + Source: + Type: NO_SOURCE + BuildSpec: !Sub | + version: 0.2 + phases: + install: + runtime-versions: + python: 3.9 + nodejs: 14 + pre_build: + commands: + - npm install cdk@1.169 -g -y --quiet --no-progress + - aws s3 cp s3://$SHARED_MODULES_BUCKET/adf-build/ ./adf-build/ --recursive --quiet + - pip install -r adf-build/requirements.txt -q -t ./adf-build + - chmod 755 adf-build/cdk/execute_pipeline_stacks.py adf-build/cdk/generate_pipeline_stacks.py + build: + commands: + - cdk --version + - mkdir cdk_inputs + - cp definition.json cdk_inputs/definition.json + - cdk synth --app adf-build/cdk/generate_pipeline_stacks.py -vv + - python adf-build/cdk/execute_pipeline_stacks.py + ServiceRole: !GetAtt PipelineManagementCodeBuildProjectRole.Arn + PipelineManagementCodeBuildProjectRole: + Type: AWS::IAM::Role + Properties: + Path: "/adf-automation/" + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Principal: + Service: + - codebuild.amazonaws.com + Action: + - sts:AssumeRole + Policies: + - PolicyName: "adf-retrieve-pipeline-definitions" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - "s3:GetObject" + - "s3:GetObjectVersion" + - "s3:ListBucket" + Resource: + - !Sub "${ADFDefinitionBucket.Arn}/*" + - !Sub "${ADFDefinitionBucket.Arn}" + - PolicyName: "adf-retrieve-shared-modules" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - "s3:GetObject" + - "s3:GetObjectVersion" + - "s3:ListBucket" + Resource: + - !Sub "arn:${AWS::Partition}:s3:::${SharedModulesBucket}/*" + - !Sub "arn:${AWS::Partition}:s3:::${SharedModulesBucket}" + - PolicyName: "adf-deploy-cloudformation-createupdate" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - cloudformation:CreateStack + - cloudformation:UpdateStack + Resource: + - "*" + Condition: + StringEquals: + 'aws:RequestTag/createdBy': "ADF" + - PolicyName: "adf-deploy-cloudformation-template" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - cloudformation:DescribeStacks + - cloudformation:CreateChangeSet + - cloudformation:DeleteChangeSet + - cloudformation:DescribeChangeSet + - cloudformation:ExecuteChangeSet + - cloudformation:SetStackPolicy + - cloudformation:ValidateTemplate + Resource: + - "*" + - Effect: Allow + Sid: "PassRole" + Action: + - 'iam:PassRole' + Resource: + - !GetAtt ADFPipelineMangementCloudFormationRole.Arn + Condition: + StringEqualsIfExists: + 'iam:PassedToService': + - cloudformation.amazonaws.com + + ADFPipelineMangementCloudFormationRole: + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Principal: + Service: + - cloudformation.amazonaws.com + Action: + - sts:AssumeRole + Path: / + Policies: + - PolicyName: "adf-codepipeline-creation" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - "codepipeline:CreatePipeline" + - "codepipeline:DeletePipeline" + - "codepipeline:DeleteWebhook" + - "codepipeline:DeregisterWebhookWithThirdParty" + - "codepipeline:GetPipeline" + - "codepipeline:GetPipelineState" + - "codepipeline:PutWebhook" + - "codepipeline:RegisterWebhookWithThirdParty" + - "codepipeline:StartPipelineExecution" + - "codepipeline:TagResource" + - "codepipeline:UpdatePipeline" + Resource: + - !Sub arn:${AWS::Partition}:codepipeline:${AWS::Region}:${AWS::AccountId}:webhook:adf-webhook-* + - !Sub arn:${AWS::Partition}:codepipeline:${AWS::Region}:${AWS::AccountId}:${PipelinePrefix}* + - Effect: Allow + Action: + - "codebuild:CreateProject" + - "codebuild:DeleteProject" + - "codebuild:UpdateProject" + Resource: + - !Sub arn:${AWS::Partition}:codebuild:${AWS::Region}:${AWS::AccountId}:project/adf-build-* + - Effect: Allow + Action: + - "sns:DeleteTopic" + - "sns:CreateTopic" + - "sns:Unsubscribe" + - "sns:Subscribe" + - "sns:SetTopicAttributes" + - "sns:GetTopicAttributes" + - "sns:TagResource" + Resource: + - !Sub arn:${AWS::Partition}:sns:${AWS::Region}:${AWS::AccountId}:${PipelinePrefix}* + - Effect: Allow + Action: + - "iam:CreateRole" + - "iam:AttachRolePolicy" + - "iam:DeleteRole" + - "iam:DeleteRolePolicy" + - "iam:GetRole" + - "iam:GetRolePolicy" + - "iam:PutRolePolicy" + Resource: + - !Sub arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf-pipeline-* + - Effect: Allow + Action: + - "events:PutRule" + - "events:PutTargets" + - "events:PutPermission" + - "events:RemoveTargets" + - "events:DeleteRule" + - "events:DescribeRule" + Resource: + - !Sub arn:${AWS::Partition}:events:${AWS::Region}:${AWS::AccountId}:rule/${PipelinePrefix}* + - Effect: Allow + Action: + - "lambda:CreateEventSourceMapping" + - "lambda:AddPermission" + - "lambda:CreateFunction" + - "lambda:DeleteFunction" + - "lambda:GetFunction" + - "lambda:GetFunctionConfiguration" + - "lambda:RemovePermission" + - "lambda:UpdateFunctionCode" + - "lambda:UpdateFunctionConfiguration" + Resource: "*" + - Effect: Allow + Action: + - "iam:TagResource" + Resource: "*" + + DeploymentMapProcessingFunction: + Type: 'AWS::Serverless::Function' + Properties: + Handler: process_deployment_map.lambda_handler + Description: "ADF Lambda Function - Deployment Map Processing" + Environment: + Variables: + ACCOUNT_ID: !Ref AWS::AccountId + ORGANIZATION_ID: !Ref OrganizationID + ADF_VERSION: !Ref ADFVersion + ADF_LOG_LEVEL: !Ref ADFLogLevel + PIPELINE_MANAGEMENT_STATE_MACHINE: !Sub "arn:${AWS::Partition}:states:${AWS::Region}:${AWS::AccountId}:stateMachine:ADFPipelineManagementStateMachine" + ADF_ROLE_NAME: !Ref CrossAccountAccessRole + FunctionName: DeploymentMapProcessorFunction + Role: !GetAtt DeploymentMapProcessingLambdaRole.Arn + Events: + S3Event: + Type: S3 + Properties: + Bucket: + Ref: ADFPipelineBucket + Events: s3:ObjectCreated:* + + ADFAutomationRoleCrossAccountAccessRolePolicy: + Type: "AWS::IAM::ManagedPolicy" + Properties: + Description: "Additional policy that allows a lambda to assume the cross account automation role" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - "sts:AssumeRole" + Resource: !Sub "arn:${AWS::Partition}:iam::*:role/adf-automation-role" + Roles: + - !Ref CreateOrUpdateRuleLambdaRole + - !Ref CreateRepositoryLambdaRole + + CreateOrUpdateRuleFunction: + Type: 'AWS::Serverless::Function' + Properties: + Handler: create_or_update_rule.lambda_handler + Description: "ADF Lambda Function - Create or Update rule" + Environment: + Variables: + ACCOUNT_ID: !Ref AWS::AccountId + ORGANIZATION_ID: !Ref OrganizationID + ADF_VERSION: !Ref ADFVersion + ADF_LOG_LEVEL: !Ref ADFLogLevel + ADF_ROLE_NAME: !Ref CrossAccountAccessRole + S3_BUCKET_NAME: !Ref PipelineBucket + FunctionName: ADFPipelineCreateOrUpdateRuleFunction + Role: !GetAtt CreateOrUpdateRuleLambdaRole.Arn + + CreateRepositoryFunction: + Type: 'AWS::Serverless::Function' + Properties: + Handler: create_repository.lambda_handler + Description: "ADF Lambda Function - Create Repository" + Environment: + Variables: + ACCOUNT_ID: !Ref AWS::AccountId + ORGANIZATION_ID: !Ref OrganizationID + ADF_VERSION: !Ref ADFVersion + ADF_LOG_LEVEL: !Ref ADFLogLevel + ADF_ROLE_NAME: !Ref CrossAccountAccessRole + S3_BUCKET_NAME: !Ref PipelineBucket + FunctionName: ADFPipelineCreateRepositoryFunction + Role: !GetAtt CreateRepositoryLambdaRole.Arn + + GeneratePipelineInputsFunction: + Type: 'AWS::Serverless::Function' + Properties: + Handler: generate_pipeline_inputs.lambda_handler + Description: "ADF Lambda Function - Generate Pipeline Inputs" + Environment: + Variables: + ACCOUNT_ID: !Ref AWS::AccountId + ORGANIZATION_ID: !Ref OrganizationID + ADF_VERSION: !Ref ADFVersion + ADF_LOG_LEVEL: !Ref ADFLogLevel + ADF_ROLE_NAME: !Ref CrossAccountAccessRole + S3_BUCKET_NAME: !Ref PipelineBucket + ROOT_ACCOUNT_ID: !Ref RootAccountId + FunctionName: ADFPipelineGenerateInputsFunction + Role: !GetAtt GeneratePipelineInputsLambdaRole.Arn + + StoreDefinitionFunction: + Type: 'AWS::Serverless::Function' + Properties: + Handler: store_pipeline_definition.lambda_handler + Description: "ADF Lambda Function - Store Pipeline Definition" + Environment: + Variables: + ACCOUNT_ID: !Ref AWS::AccountId + ORGANIZATION_ID: !Ref OrganizationID + ADF_VERSION: !Ref ADFVersion + ADF_LOG_LEVEL: !Ref ADFLogLevel + ADF_ROLE_NAME: !Ref CrossAccountAccessRole + S3_BUCKET_NAME: !Ref ADFDefinitionBucket + ROOT_ACCOUNT_ID: !Ref RootAccountId + FunctionName: ADFPipelineStoreDefinitionFunction + Role: !GetAtt StoreDefinitionLambdaRole.Arn + + IdentifyOutOfDatePipelinesFunction: + Type: 'AWS::Serverless::Function' + Properties: + Handler: identify_out_of_date_pipelines.lambda_handler + Description: "ADF Lambda Function - Identify Out Of Date Pipelines" + Environment: + Variables: + ACCOUNT_ID: !Ref AWS::AccountId + ORGANIZATION_ID: !Ref OrganizationID + ADF_VERSION: !Ref ADFVersion + ADF_LOG_LEVEL: !Ref ADFLogLevel + ADF_ROLE_NAME: !Ref CrossAccountAccessRole + ROOT_ACCOUNT_ID: !Ref RootAccountId + S3_BUCKET_NAME: !Ref ADFPipelineBucket + ADF_PIPELINE_PREFIX: !Ref PipelinePrefix + FunctionName: ADFPipelineIdentifyOutOfDatePipelinesFunction + Role: !GetAtt IdentifyOutOfDatePipelinesLambdaRole.Arn + + ADFDefinitionBucket: + Type: "AWS::S3::Bucket" + DeletionPolicy: Retain + UpdateReplacePolicy: Retain + Properties: + AccessControl: BucketOwnerFullControl + BucketEncryption: + ServerSideEncryptionConfiguration: + - ServerSideEncryptionByDefault: + SSEAlgorithm: AES256 + VersioningConfiguration: + Status: Enabled + PublicAccessBlockConfiguration: + BlockPublicAcls: true + BlockPublicPolicy: true + IgnorePublicAcls: true + RestrictPublicBuckets: true + + ADFPipelineBucket: + Type: "AWS::S3::Bucket" + DeletionPolicy: Retain + UpdateReplacePolicy: Retain + Properties: + AccessControl: BucketOwnerFullControl + BucketEncryption: + ServerSideEncryptionConfiguration: + - ServerSideEncryptionByDefault: + SSEAlgorithm: AES256 + VersioningConfiguration: + Status: Enabled + PublicAccessBlockConfiguration: + BlockPublicAcls: true + BlockPublicPolicy: true + IgnorePublicAcls: true + RestrictPublicBuckets: true + + StateMachineFailureAlarm: + Type: AWS::CloudWatch::Alarm + Properties: + ComparisonOperator: GreaterThanThreshold + EvaluationPeriods: 1 + MetricName: "ExecutionsFailed" + Namespace: "AWS/States" + Dimensions: + - Name: "StateMachineArn" + Value: !Ref PipelineManagementStateMachine + Period: 60 + Statistic: Sum + Threshold: 1 + TreatMissingData: notBreaching + Unit: Count + +Outputs: + Bucket: + Value: !Ref ADFPipelineBucket diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/execute_pipeline_stacks.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/execute_pipeline_stacks.py index f34a093cf..728e94cb1 100755 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/execute_pipeline_stacks.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/execute_pipeline_stacks.py @@ -29,6 +29,7 @@ ADF_PIPELINE_PREFIX = os.environ["ADF_PIPELINE_PREFIX"] ADF_VERSION = os.environ["ADF_VERSION"] ADF_LOG_LEVEL = os.environ["ADF_LOG_LEVEL"] +CLOUDFORMATION_ROLE_ARN = os.environ["CLOUDFORMATION_ROLE_ARN"] def upload_pipeline(template_path, name, s3): @@ -54,7 +55,8 @@ def worker_thread(template_path, name, s3): stack_name=f"{ADF_PIPELINE_PREFIX}{name}", s3=None, s3_key_path=None, - account_id=DEPLOYMENT_ACCOUNT_ID + account_id=DEPLOYMENT_ACCOUNT_ID, + role_arn=CLOUDFORMATION_ROLE_ARN, ) cloudformation.create_stack() diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cloudformation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cloudformation.py index 47eeaf3d7..3073508db 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cloudformation.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cloudformation.py @@ -101,12 +101,14 @@ def __init__( s3_key_path=None, parameters=None, account_id=None, # Used for logging visibility + role_arn=None, ): self.client = role.client('cloudformation', region_name=region, config=CFN_CONFIG) self.wait = wait self.parameters = parameters self.account_id = account_id self.template_url = template_url + self.role_arn = role_arn StackProperties.__init__( self, region=region, @@ -181,21 +183,21 @@ def _create_change_set(self): self.template_url = self.template_url if self.template_url is not None else self.get_template_url() if self.template_url: self.validate_template() - self.client.create_change_set( - StackName=self.stack_name, - TemplateURL=self.template_url, - Parameters=self.parameters if self.parameters is not None else self.get_parameters(), - Capabilities=[ - 'CAPABILITY_NAMED_IAM', - ], - Tags=[ - { - 'Key': 'createdBy', - 'Value': 'ADF' - } - ], - ChangeSetName=self.stack_name, - ChangeSetType=self._get_change_set_type()) + change_set_params = { + "StackName": self.stack_name, + "TemplateURL": self.template_url, + "Parameters": self.parameters if self.parameters is not None else self.get_parameters(), + "Capabilities": ["CAPABILITY_NAMED_IAM", "CAPABILITY_AUTO_EXPAND"], + "Tags":[{ + 'Key': 'createdBy', + 'Value': 'ADF' + }], + "ChangeSetName": self.stack_name, + "ChangeSetType": self._get_change_set_type() + } + if self.role_arn: + change_set_params["RoleARN"] = self.role_arn + self.client.create_change_set(**change_set_params) self._wait_change_set() return True return False diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cloudwatch.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cloudwatch.py new file mode 100644 index 000000000..aae3ebce8 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cloudwatch.py @@ -0,0 +1,21 @@ +""" +Standardised class for pushing CloudWatch metric data to a service within the ADF Namespace +""" + +import boto3 + + +class ADFMetrics: + def __init__(self, client: boto3.client, service, namespace="ADF") -> None: + """ + Client: Any Boto3 CloudWatch client + Service: The name of the Service e.g PipelineManagement/Repository or AccountManagement/EnableSupport + namespace: Defaults to ADF + """ + self.cw = client + self.namespace = f"{namespace}/{service}" + + def put_metric_data(self, metric_data): + if not isinstance(metric_data, list): + metric_data = [metric_data] + self.cw.put_metric_data(Namespace=self.namespace, MetricData=metric_data) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/deployment_map.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/deployment_map.py similarity index 74% rename from src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/deployment_map.py rename to src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/deployment_map.py index 92ecd8887..d7f92e365 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/deployment_map.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/deployment_map.py @@ -19,14 +19,15 @@ class DeploymentMap: def __init__( - self, - parameter_store, - s3, - pipeline_name_prefix, - map_path=None + self, + parameter_store, + s3, + pipeline_name_prefix, + map_path=None, + map_dir_path=None, ): self.map_path = map_path or 'deployment_map.yml' - self.map_dir_path = map_path or 'deployment_maps' + self.map_dir_path = map_dir_path or 'deployment_maps' self.parameter_store = parameter_store self.s3 = s3 self._get_all() @@ -35,18 +36,26 @@ def __init__( def update_deployment_parameters(self, pipeline): for target in pipeline.template_dictionary['targets']: - for _t in target: - if _t.get('target'): # Allows target to be interchangeable with path - _t['path'] = _t.pop('target') - if _t.get('path'): - self.account_ou_names.update( - {item['name']: item['path'] for item in target if item['name'] != 'approval'} - ) + LOGGER.debug('target: %s', target) + for wave in target: + LOGGER.debug('wave: %s', wave) + for wave_target in wave: + LOGGER.debug('wave_target: %s', wave_target) + if wave_target.get('target'): # Allows target to be interchangeable with path + wave_target['path'] = wave_target.pop('target') + if wave_target.get('path'): + self.account_ou_names.update( + { + item['name']: item['path'] + for item in wave + if item['name'] != 'approval' + } + ) with open(f'{pipeline.name}.json', mode='w', encoding='utf-8') as outfile: json.dump(self.account_ou_names, outfile) self.s3.put_object( f"adf-parameters/deployment/{pipeline.name}/account_ous.json", - f"{pipeline.name}.json", + f'{pipeline.name}.json' ) if pipeline.notification_endpoint: self.parameter_store.put_parameter( @@ -63,7 +72,7 @@ def _read(self, file_path=None): _input = yaml.load(stream, Loader=yaml.FullLoader) return SchemaValidation(_input).validated except FileNotFoundError: - LOGGER.info('No default map file found at %s, continuing', file_path) + LOGGER.warning('No default map file found at %s, continuing', file_path) return {} except SchemaError as err: LOGGER.error(err.code) @@ -101,4 +110,4 @@ def _process_dir(self, path): self._read(filename) ) else: - LOGGER.warning("%s is not a directory and doesn't end in.yml", filename) + LOGGER.warning("%s is not a directory and doesn't hold the .yml suffix", filename) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/pipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/pipeline.py similarity index 100% rename from src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/pipeline.py rename to src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/pipeline.py diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/repo.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/repo.py similarity index 95% rename from src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/repo.py rename to src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/repo.py index 40361ed99..a7bed7e2d 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/repo.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/repo.py @@ -14,7 +14,6 @@ from partition import get_partition LOGGER = configure_logger(__name__) -TARGET_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) DEPLOYMENT_ACCOUNT_ID = os.environ["ACCOUNT_ID"] DEPLOYMENT_ACCOUNT_REGION = os.environ["AWS_REGION"] S3_BUCKET_NAME = os.environ["S3_BUCKET_NAME"] @@ -61,7 +60,7 @@ def define_repo_parameters(self): def create_update(self): s3_object_path = s3.put_object( "adf-build/templates/codecommit.yml", - f"{TARGET_DIR}/adf-build/templates/codecommit.yml", + "templates/codecommit.yml" ) cloudformation = CloudFormation( region=DEPLOYMENT_ACCOUNT_REGION, diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/schema_validation.py similarity index 100% rename from src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py rename to src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/schema_validation.py diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/target.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/target.py similarity index 97% rename from src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/target.py rename to src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/target.py index 26b8b9734..7cc2d4bc3 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/target.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/target.py @@ -48,13 +48,13 @@ def _define_target_type(target): return target def generate_waves(self): - waves = [] wave_size = self.wave.get('size', 50) + wave = [] length = len(self.account_list) for index in range(0, length, wave_size): - yield self.account_list[index:min(index + wave_size, length)] - waves.append(self.account_list[index:min(index + wave_size, length)]) - return waves + wave.append(self.account_list[index:min(index + wave_size, length)]) + return wave + class Target: diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/stubs/stub_deployment_map.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/stubs/stub_deployment_map.yml new file mode 100644 index 000000000..4c47d6702 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/stubs/stub_deployment_map.yml @@ -0,0 +1,162 @@ +pipelines: + - name: sample-iam # The name of your pipeline (by default, this will match the name of your repository) + default_providers: + source: + provider: codecommit + properties: + account_id: 111111111111 + build: + provider: codebuild + deploy: + provider: cloudformation + params: + notification_endpoint: john@example.com # The Notification (user/team/slack) responsible for this pipeline + restart_execution_on_update: true + targets: # Deployment stages + - /banking/testing # This will use the default deployment action as defined above + - path: /banking/production + # Since the deploy type is not overridden, it uses the CloudFormation as defined by the default provider + # while using specific properties for this target: + properties: + stack_name: my-cool-iam-stack # Override the default stack name to a specific one, useful when adopting a stack into ADF + change_set_approval: true # Override deploy action above and insert an approval in between create + execute change set + - provider: lambda + properties: # See https://docs.aws.amazon.com/codepipeline/latest/userguide/actions-invoke-lambda-function.html + function_name: my_lambda_function + + - name: ami-builder # The name of your pipeline (by default, the repository name will match the pipeline name) + default_providers: + source: + provider: codecommit + properties: + # When CodeCommit is configured as the source, you should specify the + # account_id where the repository is hosted. + account_id: 333333333333 + build: + provider: codebuild + properties: + role: packer + size: medium # Resource allocation for the build stage -> small | medium | large + params: + schedule: rate(7 days) # Run once every seven days. See expression syntax at: https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html#RateExpressions + completion_trigger: # What should happen when this pipeline completes + pipelines: + - sample-vpc # Run this other pipeline + + - name: sample-vpc + default_providers: + # If we omit build and deploy type we get a default of CodeBuild as the build provider. + # and CloudFormation as the deploy provider. + source: + provider: github + properties: + repository: example-vpc-adf # Optional, above name property will be used if this is not specified + owner: bundyfx + oauth_token_path: /adf/github_token # The path in AWS Secrets Manager that holds the GitHub Oauth token, ADF only has access to /adf/ prefix in Secrets Manager + json_field: token # The field (key) name of the json object stored in AWS Secrets Manager that holds the Oauth token + deploy: + provider: cloudformation + properties: + action: replace_on_failure + params: + notification_endpoint: john@example.com + targets: # Long hand syntax including regions and names for stages + - path: /banking/testing + name: fancy-name + + - name: sample-ecs-app + default_providers: + source: + provider: codestar + properties: + repository: my-ecs-app # Optional, the name of the pipeline will be used if this is not specified + owner: github-enterprise-team-org + codestar_connection_path: /path/to/parameter # The path in AWS Systems Manager Parameter Store that holds the AWS CodeStar Connection ARN + params: + notification_endpoint: team@example.com + targets: + - [/banking/testing, /banking/production] + + - name: sample-custom # Using a custom pipeline, we can execute code within CodeBuild to perform whichever tasks are required. + default_providers: + source: + provider: codecommit + properties: + account_id: 333333333333 # A different account id as this pipeline is owned by a different team + deploy: + provider: codebuild + targets: # Targets looks for the deploy defaults above to determine parameters + - properties: + spec_filename: custom-spec-one.yml + - provider: approval + properties: + message: plz approve + notification_endpoint: john@example.com # Approvals can have their own unique notification endpoint + - properties: + spec_filename: custom-spec-two.yml + + - name: sample-ec2-app-codedeploy + default_providers: + source: + provider: codecommit + properties: + account_id: 333333333333 # A different account id as this pipeline is owned by a different team + targets: + - 222222222222 + + - name: sample-ec2-java-app-codedeploy + default_providers: + source: + provider: codecommit + properties: + account_id: 333333333333 + build: + provider: codebuild + properties: + # Use a specific docker image (to use Python 3.9) for the build stage + # in this pipeline -> https://docs.aws.amazon.com/cdk/api/latest/docs/@aws-cdk_aws-codebuild.LinuxBuildImage.html + image: "STANDARD_5_0" + deploy: + provider: codedeploy + params: + notification_endpoint: deployments + targets: + - target: 222222222222 + properties: # These are stage specific properties for our deploy action + application_name: sample + deployment_group_name: testing-sample # See https://docs.aws.amazon.com/codedeploy/latest/userguide/deployment-groups.html + + - name: sample-input-export-pipeline + default_providers: + source: + provider: codecommit + properties: + account_id: 111111111111 + targets: + - target: 222222222222 + properties: + outputs: some_param_outputs # Outputs will take CloudFormation Outputs and pass them into a JSON file which can be used in later stages + - target: 111111111111 + properties: + param_overrides: + - inputs: some_param_outputs # Which file do we want to use to pass in overrides from + param: s3_bucket # The name of the parameter you wish to override at this stage + key_name: logging_bucket # The key from the output 'some_param_outputs' we want to get the value from + + - name: sample-s3-pipeline + default_providers: + source: + provider: s3 + properties: + bucket_name: packer-bucket-test + object_key: input.zip + account_id: 444444444444 + build: + enabled: false # If you wish to disable the build phase in a pipeline + deploy: + provider: s3 + targets: + - target: 222222222222 + properties: + bucket_name: test-adf-bucket-eu + object_key: output.zip diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/stub_target.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/stubs/stub_target.py similarity index 100% rename from src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/stub_target.py rename to src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/stubs/stub_target.py diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_deployment_map.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_deployment_map.py new file mode 100644 index 000000000..5e62ce2ba --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_deployment_map.py @@ -0,0 +1,134 @@ +# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +# pylint: skip-file + +import os +import boto3 + +from errors import InvalidDeploymentMapError +from pytest import fixture, raises +from mock import Mock +from ..pipeline import Pipeline +from ..deployment_map import DeploymentMap + + +@fixture +def cls(): + return DeploymentMap( + parameter_store=None, + s3=None, + pipeline_name_prefix="adf", + map_path="{0}/stubs/stub_deployment_map.yml".format( + os.path.dirname(os.path.realpath(__file__)) + ), + ) + + +def test_update_deployment_parameters(cls): + cls.s3 = Mock() + cls.s3.put_object.return_value = None + + pipeline = Pipeline( + { + "name": "pipeline", + "params": {"key": "value"}, + "targets": [], + "default_providers": { + "source": { + "name": "codecommit", + "properties": { + "account_id": 111111111111, + }, + } + }, + } + ) + + # Targets : [[account_id, account_id], [account_id, account_id]] + pipeline.template_dictionary = { + "targets": [ + # Array holding all waves + [ + # First wave of targets + [ + # First batch within the first wave + { + "id": "111111111111", + "name": "some_account", + "path": "/fake/path", + "properties": {}, + "provider": {}, + "regions": ["eu-west-1"], + "step_name": "", + }, + ] + ] + ] + } + + cls.update_deployment_parameters(pipeline) + assert cls.account_ou_names["some_account"] == "/fake/path" + + +def test_update_deployment_parameters_waves(cls): + cls.s3 = Mock() + cls.s3.put_object.return_value = None + + pipeline = Pipeline({ + "name": "pipeline", + "params": {"key": "value"}, + "targets": [], + "default_providers": { + "source": { + "name": "codecommit", + "properties" : { + "account_id": 111111111111, + } + } + } + }) + pipeline.template_dictionary = { + "targets": [ # Array holding all waves + [ # First wave of targets + [ # First batch within the first wave + { # First target in first wave + "name": "first", + "path": "/first/path", + }, + { # Second target in first wave + "name": "second", + "path": "/second/path", + } + ], + [ # Second batch within the first wave + { + # Third target in first wave + "name": "third", + "path": "/third/path", + }, + ], + ], + [ # Second wave of targets + [ # First batch within the second wave + { + # Third target in first wave + "name": "approval", + }, + ], + ], + [ # Third wave of targets + [ # First batch within the third wave + { + # Third target in first wave + "name": "fourth", + "path": "/fourth/path", + }, + ], + ] + ], + } + + cls.update_deployment_parameters(pipeline) + for target in ['first', 'second', 'third', 'fourth']: + assert cls.account_ou_names[target] == f'/{target}/path' diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_pipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_pipeline.py similarity index 100% rename from src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_pipeline.py rename to src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_pipeline.py diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_target.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_target.py similarity index 100% rename from src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_target.py rename to src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_target.py diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/stub_deployment_map.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/stub_deployment_map.yml deleted file mode 120000 index 1ec4905d7..000000000 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/stub_deployment_map.yml +++ /dev/null @@ -1 +0,0 @@ -../../../../adf-bootstrap/deployment/lambda_codebase/initial_commit/pipelines_repository/example-deployment_map.yml \ No newline at end of file diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_deployment_map.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_deployment_map.py deleted file mode 100644 index 0a22d80ff..000000000 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_deployment_map.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: MIT-0 - -# pylint: skip-file - -import os -import boto3 - -from errors import InvalidDeploymentMapError -from pytest import fixture, raises -from mock import Mock -from ..pipeline import Pipeline -from ..deployment_map import DeploymentMap - - -@fixture -def cls(): - return DeploymentMap( - parameter_store=None, - s3=None, - pipeline_name_prefix='adf', - map_path='{0}/stubs/stub_deployment_map.yml'.format( - os.path.dirname(os.path.realpath(__file__)) - ) - ) - - -def test_update_deployment_parameters(cls): - cls.s3 = Mock() - cls.s3.put_object.return_value = None - - pipeline = Pipeline({ - "name": "pipeline", - "params": {"key": "value"}, - "targets": [], - "default_providers": { - "source": { - "name": "codecommit", - "properties" : { - "account_id": 111111111111, - } - } - } - }) - pipeline.template_dictionary = { - "targets": [[ - { - "name": "some_pipeline", - "path": "/fake/path", - } - ]] - } - - cls.update_deployment_parameters(pipeline) - assert cls.account_ou_names['some_pipeline'] == '/fake/path' diff --git a/src/template.yml b/src/template.yml index 08ea8180f..7ade1732b 100644 --- a/src/template.yml +++ b/src/template.yml @@ -588,6 +588,7 @@ Resources: - pytest -vvv build: commands: + - sam build -t adf-bootstrap/deployment/pipeline_management.yml - sam build -t adf-bootstrap/deployment/global.yml - >- sam package --output-template-file adf-bootstrap/deployment/global.yml