diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/global.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/global.yml index 11428d59d..5a8aef711 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/global.yml +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/global.yml @@ -287,10 +287,17 @@ Resources: Resource: - !Sub arn:${AWS::Partition}:s3:::${PipelineBucket} - !Sub arn:${AWS::Partition}:s3:::${PipelineBucket}/* + - Effect: Allow + Sid: "S3ReadOnly" + Action: + - s3:Get* + - s3:GetBucketPolicy + - s3:List* + Resource: - !Sub arn:${AWS::Partition}:s3:::${SharedModulesBucket} - !Sub arn:${AWS::Partition}:s3:::${SharedModulesBucket}/* - - !Sub arn:${AWS::Partition}:s3:::${PipelineManagementApplication.Outputs.Bucket} - - !Sub arn:${AWS::Partition}:s3:::${PipelineManagementApplication.Outputs.Bucket}/* + - !Sub arn:${AWS::Partition}:s3:::${PipelineManagementApplication.Outputs.DefinitionBucket} + - !Sub arn:${AWS::Partition}:s3:::${PipelineManagementApplication.Outputs.DefinitionBucket}/* - Effect: Allow Sid: "KMS" Action: @@ -383,10 +390,17 @@ Resources: Resource: - !Sub arn:${AWS::Partition}:s3:::${PipelineBucket} - !Sub arn:${AWS::Partition}:s3:::${PipelineBucket}/* - - !Sub arn:${AWS::Partition}:s3:::${SharedModulesBucket} - - !Sub arn:${AWS::Partition}:s3:::${SharedModulesBucket}/* - !Sub arn:${AWS::Partition}:s3:::${PipelineManagementApplication.Outputs.Bucket} - !Sub arn:${AWS::Partition}:s3:::${PipelineManagementApplication.Outputs.Bucket}/* + - Effect: Allow + Sid: "S3ReadOnly" + Action: + - s3:Get* + - s3:GetBucketPolicy + - s3:List* + Resource: + - !Sub arn:${AWS::Partition}:s3:::${SharedModulesBucket} + - !Sub arn:${AWS::Partition}:s3:::${SharedModulesBucket}/* - Effect: Allow Sid: "KMS" Action: diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_or_update_rule.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_or_update_rule.py index b8f6ef550..b83421f64 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_or_update_rule.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_or_update_rule.py @@ -37,22 +37,20 @@ def lambda_handler(pipeline, _): LOGGER.info(pipeline) - _source_account_id = ( + source_account_id = ( pipeline.get("default_providers", {}) .get("source", {}) .get("properties", {}) .get("account_id", {}) ) if ( - _source_account_id - and int(_source_account_id) != int(DEPLOYMENT_ACCOUNT_ID) - and not _cache.check(_source_account_id) + source_account_id + and int(source_account_id) != int(DEPLOYMENT_ACCOUNT_ID) + and not _cache.exists(source_account_id) ): - rule = Rule(pipeline["default_providers"]["source"]["properties"]["account_id"]) + rule = Rule(source_account_id) rule.create_update() - _cache.add( - pipeline["default_providers"]["source"]["properties"]["account_id"], True - ) + _cache.add(source_account_id, True) METRICS.put_metric_data( {"MetricName": "CreateOrUpdate", "Value": 1, "Unit": "Count"} ) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/pipeline_management.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/pipeline_management.yml index b7065d185..6fd3616ad 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/pipeline_management.yml +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/pipeline_management.yml @@ -986,6 +986,13 @@ Resources: IgnorePublicAcls: true RestrictPublicBuckets: true + DefinitionBucketParameter: + Type: "AWS::SSM::Parameter" + Properties: + Name: "/adf/pipeline_definition_bucket" + Type: "String" + Value: !Ref ADFDefinitionBucket + ADFPipelineBucket: Type: "AWS::S3::Bucket" DeletionPolicy: Retain @@ -1024,6 +1031,9 @@ Outputs: Bucket: Value: !Ref ADFPipelineBucket + DefinitionBucket: + Value: !Ref ADFDefinitionBucket + CreateOrUpdateRuleLambdaRoleArn: Value: !GetAtt CreateOrUpdateRuleLambdaRole.Arn diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/base_resolver.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/base_resolver.py new file mode 100644 index 000000000..5506dd0b6 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/base_resolver.py @@ -0,0 +1,57 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +""" +BaseResolver abstract class used for resolver implementations +to inherit from so they use the same interface +""" +from abc import ABC, abstractmethod +from cache import Cache + + +class BaseResolver(ABC): + """ + The abstract BaseResolver class ensures that the interface + of the methods for resolvers are defined and common code is stored here. + """ + + def __init__(self): + self.cache = Cache() + + @abstractmethod + def resolve(self, lookup_str: str, random_filename: str) -> str: + """ + Assumes that the lookup_str is supported. + + This function will perform the intrinsic function to + resolve the value as requested. + + Args: + lookup_str (str): The lookup string that contains the lookup + syntax. + random_filename (str): The random filename, used to ensure + unique uploads when required. + + Returns: + str: The value as looked up using the intrinsic function. + """ + pass + + @abstractmethod + def supports(self, lookup_str: str) -> bool: + """ + Check if this resolver supports the lookup_str syntax. + + Args: + lookup_str (str): The lookup string that might have the lookup + syntax or not. + + Returns: + bool: True if this resolver supports the lookup_str syntax. + False if not. + """ + pass + + @staticmethod + def _is_optional(value: str) -> bool: + return value.endswith('?') diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/generate_pipeline_inputs.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/generate_pipeline_inputs.py index bc48c6326..bac31f86e 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/generate_pipeline_inputs.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/generate_pipeline_inputs.py @@ -209,7 +209,7 @@ def main(): need_to_create_rules = ( source_account_id and int(source_account_id) != int(DEPLOYMENT_ACCOUNT_ID) - and not cache.check(source_account_id) + and not cache.exists(source_account_id) ) if need_to_create_rules: rule = Rule(source_account_id) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/generate_params.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/generate_params.py index 4dd4910d9..66ec5b79e 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/generate_params.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/generate_params.py @@ -7,162 +7,415 @@ param files in the params folder """ +import re +from copy import deepcopy import json import secrets -import string # pylint: disable=deprecated-module # https://www.logilab.org/ticket/2481 +# Not all string functions are deprecated, the ones we use are not. +# Hence disabling the lint finding: +from string import ascii_lowercase, digits # pylint: disable=deprecated-module import os -import ast +from itertools import chain +from typing import Dict, Iterator, List, Optional, Union +from typing_extensions import TypedDict import yaml import boto3 -from resolver import Resolver -from s3 import S3 from logger import configure_logger from parameter_store import ParameterStore +from resolver import Resolver +from s3 import S3 + + +class ParametersAndTags(TypedDict): + """ + The param files will have Parameters and Tags, where these are + """ + Parameters: Dict[str, str] + Tags: Dict[str, str] + + +# When the wave target is selecting accounts using a +# tag based selection, where the key and value should be +# defined in the account: +TagKeyDict = Dict[str, str] + +# A wave target path can be a string referencing the account id, +# the organization unit path, or a tag based selection using TagKeyDict. +WaveTargetPath = Union[str, TagKeyDict] + + +class ParamGenWaveTarget(TypedDict): + """ + Optimized parameter generation wave target with clearly + identified fields as used in the generate parameters process. + """ + id: str + account_name: str + path: WaveTargetPath + regions: List[str] + + +# When the pipeline targets are retrieved, it will create a dictionary +# where they key will reference the account id and the value will +# contain all the relevant information of the wave target. +# The ParamGenWaveTarget will contain all the information from the +# different ParamGenWaveTarget it found that reference the same account id. +# +# In other words, if account A is targeted in the first wave for region +# eu-west-1 and it is targeted in the second wave in us-east-1, the combined +# ParamGenWaveTarget will contain both regions in the `regions` attribute. +PipelineTargets = Dict[str, ParamGenWaveTarget] + + +class InputPipelineWaveTarget(TypedDict): + """ + Each wave target in a pipeline will have the following + fields to point to the target account. + """ + id: str + name: str + path: WaveTargetPath + regions: List[str] + + +# When defining the pipeline, the accounts that it deploys to are mapped +# in waves. Within each wave, it will contain a list of wave targets to +# make sure that referencing 100 accounts for example will be broken down +# into two waves of 50 accounts each as max supported by CodePipeline. +TargetWavesWithNestedWaveTargets = List[ # Waves + List[ # Wave Targets + InputPipelineWaveTarget + ] +] + + +class InputEnvironmentDefinition(TypedDict): + """ + Inside the pipeline input environment, the list of targets + is defined as a list of waves that each contain a list of wave targets. + """ + targets: TargetWavesWithNestedWaveTargets + + +class InputDefinition(TypedDict): + """ + The input of the pipeline definition holds the environment + with all the targets defined inside. + """ + environment: InputEnvironmentDefinition + + +class PipelineDefinition(TypedDict): + """ + Bare minimum input pipeline definition as required for traversal + in this generation of parameters. + """ + input: InputDefinition + LOGGER = configure_logger(__name__) DEPLOYMENT_ACCOUNT_REGION = os.environ["AWS_REGION"] -SHARED_MODULES_BUCKET = os.environ["S3_BUCKET_NAME"] PROJECT_NAME = os.environ["ADF_PROJECT_NAME"] +EMPTY_PARAMS_DICT: ParametersAndTags = {'Parameters': {}, 'Tags': {}} + class Parameters: - def __init__(self, build_name, parameter_store, s3, directory=None): + """ + Parameter generation class. + """ + def __init__( + self, + build_name: str, + parameter_store: ParameterStore, + definition_s3: S3, + directory: Optional[str] = None, + ): self.cwd = directory or os.getcwd() self._create_params_folder() - self.global_path = "params/global" - self.parameter_store = parameter_store + self.resolver = Resolver(parameter_store) self.build_name = build_name - self.s3 = s3 + self.definition_s3 = definition_s3 self.file_name = "".join( - secrets.choice(string.ascii_lowercase + string.digits) for _ in range(6) + secrets.choice(ascii_lowercase + digits) + for _ in range(6) ) - [self.account_ous, self.regions] = self._fetch_initial_parameter() - def _fetch_initial_parameter(self): - return [ - ast.literal_eval( - self.s3.read_object(f"adf-parameters/deployment/{self.build_name}/account_ous.json") + def _retrieve_pipeline_definition(self) -> PipelineDefinition: + return json.loads( + self.definition_s3.read_object( + f"pipelines/{self.build_name}/definition.json", ), - ast.literal_eval( - self.parameter_store.fetch_parameter(f"/deployment/{self.build_name}/regions") - ) - ] + ) - def _create_params_folder(self): + def _retrieve_pipeline_targets(self) -> PipelineTargets: + pipeline_targets = {} + pipeline_definition = self._retrieve_pipeline_definition() + input_targets: TargetWavesWithNestedWaveTargets = ( + pipeline_definition['input']['environments']['targets'] + ) + # Since the input_targets returns a list of waves that each contain + # a list of wave_targets, we need to flatten them to iterate: + wave_targets: Iterator[ParamGenWaveTarget] = map( + lambda wt: { + # Change wt: InputPipelineWaveTarget to ParamGenWaveTarget + 'id': wt['id'], + 'account_name': wt['name'], + 'path': wt['path'], + 'regions': wt['regions'], + }, + filter( + lambda wt: wt['id'] != 'approval', + # Flatten the three levels of nested arrays to one iterable: + chain.from_iterable( + chain.from_iterable( + input_targets, + ), + ), + ) # Returns an Iterator[InputPipelineWaveTarget] + ) + for wave_target in wave_targets: + if wave_target['id'] in pipeline_targets: + # Lets merge the regions to show what regions it deploys + # to + stored_target = pipeline_targets[wave_target['id']] + stored_target['regions'] = sorted(list(set( + stored_target['regions'] + + wave_target['regions'], + ))) + else: + pipeline_targets[wave_target['id']] = wave_target + # Returns a list of targets: + # [ + # { + # "id": "111111111111", + # "account_name": "account-name", + # "path": "/ou/path" | "1111111111" | { "TagKey": "TagValue" } + # "regions": [ "eu-west-1", "us-east-1", "etc" ] + # }, + # ... + # ] + return pipeline_targets + + def _create_params_folder(self) -> None: try: - return os.mkdir(f'{self.cwd}/params') + os.mkdir(f'{self.cwd}/params') except FileExistsError: - return None + pass - @staticmethod - def _is_account_id(value): - return str(value).isnumeric() - - def create_parameter_files(self): - for account, ou in self.account_ous.items(): - for region in self.regions: - compare_params = {'Parameters': {}, 'Tags': {}} - compare_params = self._param_updater( - Parameters._parse(f"{self.cwd}/params/{account}_{region}"), - compare_params, + def create_parameter_files(self) -> None: + """ + Iterates over the pipeline target, and for each account it targets + it will iterate over the regions to which it deploys in that account + to generate the parameter files for those. + + The parameter files are generated with most specific parameter + definition winning. It iterates over the following files: + 1. f"{account_name}_{region}" i.e. "security-account_eu-west-1" + 1. f"{account_name}" i.e. "security-account" + 1. f"{organization_unit_path}_{region}" + i.e. "/devsecops/security_eu-west-1" + 1. f"{organization_unit_path}" i.e. "/devsecops/security" + 1. f"{global}_{region}" i.e. "global_eu-west-1" + 1. f"{global}" i.e. "global" + + It will then generate a JSON file that holds all the parameters per + target/region combination as such: "{account_name}_{region}.json" + + It will add new parameters or tags if the parameter or tag is found in + a less specific file, and it was missing in the more specific files + it processed so far. For example, if the account_region file did not + include the Department Tag, while the account file does, it will get + included automatically. If you want to override the Department key in a + specific region, make sure to include that in the account_region in + this case. + """ + for target in self._retrieve_pipeline_targets().values(): + for region in target['regions']: + current_params = deepcopy(EMPTY_PARAMS_DICT) + current_params = self._merge_params( + Parameters._parse( + params_root_path=self.cwd, + params_filename=f"{target['account_name']}_{region}", + ), + current_params, + ) + current_params = self._merge_params( + Parameters._parse( + params_root_path=self.cwd, + params_filename=target['account_name'], + ), + current_params, ) - compare_params = self._param_updater( - Parameters._parse(f"{self.cwd}/params/{account}"), - compare_params, + path_references_ou = ( + isinstance(target['path'], str) + and not Parameters._is_account_id(target['path']) ) - if not Parameters._is_account_id(ou): + if path_references_ou: # Compare account_region final to ou_region - compare_params = self._param_updater( - Parameters._parse(f"{self.cwd}/params/{ou}_{region}"), - compare_params + ou_id_or_path = target['path'] + if ou_id_or_path.startswith('/'): + # Skip the first slash + ou_id_or_path = ou_id_or_path[1:] + # Cleanup the ou name to include only alphanumeric, dashes, + # and underscores: + current_params = self._merge_params( + Parameters._parse( + params_root_path=self.cwd, + params_filename=f"{ou_id_or_path}_{region}", + ), + current_params ) # Compare account_region final to ou - compare_params = self._param_updater( - Parameters._parse(f"{self.cwd}/params/{ou}"), - compare_params + current_params = self._merge_params( + Parameters._parse( + params_root_path=self.cwd, + params_filename=ou_id_or_path, + ), + current_params ) # Compare account_region final to deployment_account_region - compare_params = self._param_updater( - Parameters._parse(f"{self.cwd}/params/global_{region}"), - compare_params + current_params = self._merge_params( + Parameters._parse( + params_root_path=self.cwd, + params_filename=f"global_{region}", + ), + current_params ) # Compare account_region final to global - compare_params = self._param_updater( - Parameters._parse(self.global_path), - compare_params + current_params = self._merge_params( + Parameters._parse( + params_root_path=self.cwd, + params_filename="global", + ), + current_params ) - if compare_params is not None: - self._update_params(compare_params, f"{account}_{region}") + if current_params is not None: + self._write_params( + current_params, + f"{target['account_name']}_{region}", + ) + + @staticmethod + def _is_account_id(wave_target_path: WaveTargetPath) -> bool: + return str(wave_target_path).isnumeric() @staticmethod - def _parse(filename): + def _clean_params_filename(params_filename: str) -> str: + # Cleanup the params_filename to include only alphanumeric, dashes, + # slashes, and underscores: + return re.sub(r'[^0-9a-zA-Z_\-/]+', '_', params_filename) + + @staticmethod + def _parse( + params_root_path: str, + params_filename: str, + ) -> ParametersAndTags: """ - Attempt to parse the parameters file and return he default + Attempt to parse the parameters file and return the default CloudFormation parameter base object if not found. Returning Base CloudFormation Parameters here since if the user was using - Any other type (SC, ECS) they would require a parameter file (global.json) - and thus this would not fail. + Any other type (SC, ECS) they would require a parameter file + (global.json) and thus this would not fail. + + Args: + params_root_path (str): The root path where the `params` folder is + located in. + params_filename (str): The name of the parameter file without the + file extension. For example `global` will attempt to read + f"{params_root_path}/params/{params_filename}.json" + and if that fails it will try to read: + f"{params_root_path}/params/{params_filename}.yml" + + Returns + ParametersAndTags: The Parameters and Tags defined in the file. """ + clean_file_name = Parameters._clean_params_filename( + params_filename, + ) + file_path = f"{params_root_path}/params/{clean_file_name}" try: - with open(f"{filename}.json", encoding='utf-8') as file: + with open(f"{file_path}.json", encoding='utf-8') as file: return json.load(file) except FileNotFoundError: try: - with open(f"{filename}.yml", encoding='utf-8') as file: + with open(f"{file_path}.yml", encoding='utf-8') as file: return yaml.load(file, Loader=yaml.FullLoader) except yaml.scanner.ScannerError: - LOGGER.exception('Invalid Yaml for %s.yml', filename) + LOGGER.exception('Invalid Yaml for %s.yml', file_path) raise except FileNotFoundError: return {'Parameters': {}, 'Tags': {}} - def _update_params(self, new_params, filename): + def _write_params( + self, + new_params: ParametersAndTags, + filename: str, + ) -> None: """ - Responsible for updating the parameters within the files themselves + Responsible for writing the parameters within the files themselves + + Args: + new_params (ParametersAndTags): The Parameters and Tags to write + to the requested file. + filename (str): The name of the file to write to inside the params + folder. """ - with open(f"{self.cwd}/params/{filename}.json", mode='w', encoding='utf-8') as outfile: + filepath = f"{self.cwd}/params/{filename}.json" + with open(filepath, mode='w', encoding='utf-8') as outfile: json.dump(new_params, outfile) - def _determine_intrinsic_function(self, resolver, value, key): - if str(value).startswith('resolve:'): - return resolver.fetch_parameter_store_value(value, key) - if str(value).startswith('import:'): - return resolver.fetch_stack_output(value, key) - if str(value).startswith('upload:'): - return resolver.upload(value, key, self.file_name) - return False + def _merge_params( + self, + new_params: ParametersAndTags, + current_params: ParametersAndTags + ) -> ParametersAndTags: + """ + Merge the new_params Parameters and Tags found into a clone of the + current_params if the Parameter or Tag found in the new_params is not + present in the current_params yet. Or the current_params version of + that Parameter or Tag is an empty string. - def _determine_parameter_structure(self, parameters, resolver): # pylint: disable=inconsistent-return-statements - try: - for key, value in parameters.items(): - if isinstance(value, dict): - LOGGER.debug('Calling _determine_parameter_structure recursively') - return self._determine_parameter_structure(value, resolver) - if self._determine_intrinsic_function(resolver, value, key): - continue - resolver.update(key) - except AttributeError: - LOGGER.debug('Input was not a dict for _determine_parameter_structure, nothing to do.') - pass + Args: + new_params (ParametersAndTags): The new Parameters and Tags to + merge into the current_params. + current_params (ParametersAndTags): The current Parameters and Tags + which is cloned and returned with the new parameters and tags + it found in new_params. Unless current_params already + contained the Parameter or Tag, as described above. - def _param_updater(self, comparison_parameters, stage_parameters): - """ - Generic Parameter Updater method + Returns: + ParametersAndTags: A clone of the current_params and newly merged + Parameters and Tags. """ - resolver = Resolver(self.parameter_store, stage_parameters, comparison_parameters) - self._determine_parameter_structure(comparison_parameters, resolver) - self._determine_parameter_structure(stage_parameters, resolver) - return resolver.__dict__.get('stage_parameters') + merged_params = deepcopy(current_params) + for root_key in new_params: + if root_key not in merged_params: + merged_params[root_key] = {} + for key in new_params[root_key]: + if merged_params[root_key].get(key, '') == '': + merged_params[root_key][key] = ( + self.resolver.apply_intrinsic_function_if_any( + new_params[root_key][key], + self.file_name, + ) + ) + return merged_params + -def main(): - s3 = S3(DEPLOYMENT_ACCOUNT_REGION, SHARED_MODULES_BUCKET) +def main() -> None: + """ + Main method that is invoked when the generate params script is executed. + """ + parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3) + definition_bucket_name = parameter_store.fetch_parameter( + "/adf/pipeline_definition_bucket", + ) + definition_s3 = S3(DEPLOYMENT_ACCOUNT_REGION, definition_bucket_name) parameters = Parameters( PROJECT_NAME, - ParameterStore( - DEPLOYMENT_ACCOUNT_REGION, - boto3 - ), - s3 + parameter_store, + definition_s3, ) parameters.create_parameter_files() diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cache.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cache.py index d10dd8ead..b6f0acd5f 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cache.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cache.py @@ -12,7 +12,10 @@ class Cache: def __init__(self): self._stash = {} - def check(self, key): + def exists(self, key): + return key in self._stash + + def get(self, key): try: return self._stash[key] except KeyError: diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/deployment_map.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/deployment_map.py index d7f92e365..16bb4cd5c 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/deployment_map.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/deployment_map.py @@ -41,7 +41,8 @@ def update_deployment_parameters(self, pipeline): LOGGER.debug('wave: %s', wave) for wave_target in wave: LOGGER.debug('wave_target: %s', wave_target) - if wave_target.get('target'): # Allows target to be interchangeable with path + if wave_target.get('target'): + # Allows target to be interchangeable with path wave_target['path'] = wave_target.pop('target') if wave_target.get('path'): self.account_ou_names.update( @@ -51,8 +52,8 @@ def update_deployment_parameters(self, pipeline): if item['name'] != 'approval' } ) - with open(f'{pipeline.name}.json', mode='w', encoding='utf-8') as outfile: - json.dump(self.account_ou_names, outfile) + with open(f'{pipeline.name}.json', mode='w', encoding='utf-8') as file: + json.dump(self.account_ou_names, file) self.s3.put_object( f"adf-parameters/deployment/{pipeline.name}/account_ous.json", f'{pipeline.name}.json' diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/organizations.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/organizations.py index e741327e6..8bde1f91f 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/organizations.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/organizations.py @@ -243,12 +243,12 @@ def build_account_path(self, ou_id, account_path, cache): # While not at the root of the Organization while current.get('Type') != "ROOT": # check cache for ou name of id - if not cache.check(current.get('Id')): + if not cache.exists(current.get('Id')): cache.add( current.get('Id'), - self.describe_ou_name( - current.get('Id'))) - ou_name = cache.check(current.get('Id')) + self.describe_ou_name(current.get('Id')), + ) + ou_name = cache.get(current.get('Id')) account_path.append(ou_name) return self.build_account_path( current.get('Id'), diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/pipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/pipeline.py index 77726e2ad..197b53f02 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/pipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/pipeline.py @@ -50,7 +50,8 @@ def _write_output(self, output_template): with open(output_path, mode='w', encoding='utf-8') as file_handler: file_handler.write(output_template) - def _input_type_validation(self, params): #pylint: disable=R0201 + @staticmethod + def _input_type_validation(params): if not params.get('default_providers', {}).get('build', {}): params['default_providers']['build'] = {} params['default_providers']['build']['provider'] = 'codebuild' diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_cache.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_cache.py index d19ace9ff..18faa5839 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_cache.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_cache.py @@ -14,8 +14,21 @@ def cls(): def test_add(cls): cls.add('my_key', 'my_value') + assert cls.get('my_key') == 'my_value' -def test_check(cls): +def test_exists(cls): cls.add('my_key', 'my_value') - assert cls.check('my_key') == 'my_value' + cls.add('false_key', False) + assert cls.exists('my_key') is True + assert cls.exists('false_key') is True + assert cls.exists('missing_key') is False + + +def test_get(cls): + cls.add('my_key', 'my_value') + cls.add('true_key', True) + cls.add('false_key', False) + assert cls.get('my_key') == 'my_value' + assert cls.get('true_key') is True + assert cls.get('false_key') is False diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/requirements.txt b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/requirements.txt index e723c4b17..b8e87dd78 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/requirements.txt +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/requirements.txt @@ -59,3 +59,4 @@ pyyaml~=5.4 schema~=0.7.5 tenacity==8.1.0 urllib3~=1.26.13 +typing_extensions==3.10.0.0 diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/resolver.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/resolver.py index 7ef0469ea..a4dcc9982 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/resolver.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/resolver.py @@ -5,172 +5,60 @@ This file is pulled into CodeBuild containers and used to resolve values from Parameter Store and CloudFormation """ -import os -import boto3 - -from botocore.exceptions import ClientError -from s3 import S3 +from typing import Optional from parameter_store import ParameterStore -from partition import get_partition -from cloudformation import CloudFormation -from cache import Cache -from errors import ParameterNotFoundError -from sts import STS -from logger import configure_logger - -LOGGER = configure_logger(__name__) -DEFAULT_REGION = os.environ["AWS_REGION"] -S3_BUCKET_NAME = os.environ["S3_BUCKET_NAME"] +from base_resolver import BaseResolver +from resolver_param_store import ResolverParamStore +from resolver_stack_output import ResolverStackOutput +from resolver_upload import ResolverUpload class Resolver: - def __init__(self, parameter_store, stage_parameters, comparison_parameters): - self.parameter_store = parameter_store - self.stage_parameters = stage_parameters - self.comparison_parameters = comparison_parameters - self.sts = STS() - self.cache = Cache() + """ + Resolver class responsible for managing the intrinsic + resolvers that are available. + """ + def __init__(self, parameter_store: ParameterStore): + self.resolvers = [ + ResolverParamStore(), + ResolverStackOutput(), + ResolverUpload(parameter_store), + ] - @staticmethod - def _is_optional(value): - return value.endswith('?') + def _matching_intrinsic_resolver( + self, + lookup_str: str + ) -> Optional[BaseResolver]: + matches = list(filter( + lambda resolver: resolver.supports(lookup_str), + self.resolvers, + )) + return None if len(matches) == 0 else matches[0] - def fetch_stack_output(self, value, key, optional=False): # pylint: disable=too-many-statements - partition = get_partition(DEFAULT_REGION) - try: - [_, account_id, region, stack_name, output_key] = str(value).split(':') - except ValueError as error: - raise ValueError( - f"{value} is not a valid import string. Syntax should be " - "import:account_id:region:stack_name:output_key" - ) from error - if Resolver._is_optional(output_key): - LOGGER.info("Parameter %s is considered optional", output_key) - optional = True - output_key = output_key[:-1] if optional else output_key - try: - role = self.sts.assume_cross_account_role( - f'arn:{partition}:iam::{account_id}:role/adf-readonly-automation-role', - 'importer' - ) - cloudformation = CloudFormation( - region=region, - deployment_account_region=os.environ["AWS_REGION"], - role=role, - stack_name=stack_name, - account_id=account_id - ) - stack_output = self.cache.check(value) or cloudformation.get_stack_output(output_key) - if stack_output: - LOGGER.info("Stack output value is %s", stack_output) - self.cache.add(value, stack_output) - except ClientError: - if not optional: - raise - stack_output = "" - try: - parent_key = list(Resolver.determine_parent_key(self.comparison_parameters, key))[0] - if optional: - self.stage_parameters[parent_key][key] = stack_output - else: - if not stack_output: - raise Exception( - f"No Stack Output found on {account_id} in {region} " - f"with stack name {stack_name} and " - f"output key {output_key}" - ) - self.stage_parameters[parent_key][key] = stack_output - except IndexError as error: - if stack_output: - if self.stage_parameters.get(key): - self.stage_parameters[key] = stack_output - else: - raise Exception( - "Could not determine the structure of the file in order " - "to import from CloudFormation", - ) from error - return True + def apply_intrinsic_function_if_any( + self, + lookup_value: str, + file_name: str, + ) -> str: + """ + Apply the first intrinsic function that matches if there is one. + Otherwise return the lookup_value as is. - def upload(self, value, key, file_name): - if not any(item in value for item in S3.supported_path_styles()): - raise Exception( - 'When uploading to S3 you need to specify a path style' - 'to use for the returned value to be used. ' - f'Supported path styles include: {S3.supported_path_styles()}' - ) from None - if str(value).count(':') > 2: - [_, region, style, value] = value.split(':') - else: - [_, style, value] = value.split(':') - region = DEFAULT_REGION - bucket_name = self.parameter_store.fetch_parameter( - f'/cross_region/s3_regional_bucket/{region}' - ) - client = S3(region, bucket_name) - try: - parent_key = list(Resolver.determine_parent_key(self.comparison_parameters, key))[0] - except IndexError: - if self.stage_parameters.get(key): - self.stage_parameters[key] = client.put_object( - f"adf-upload/{value}/{file_name}".format(value, file_name), - str(value), - style, - True # pre-check - ) - return True - self.stage_parameters[parent_key][key] = client.put_object( - f"adf-upload/{value}/{file_name}", - str(value), - style, - True # pre-check - ) - return True + Args: + lookup_value (str): The lookup value that could instruct an + intrinsic function to lookup the value as specified. + file_name (str): The random string used to create unique + file uploads when required. - @staticmethod - def determine_parent_key(d, target_key, parent_key=None): - for key, value in d.items(): - if key == target_key: - yield parent_key - if isinstance(value, dict): - for result in Resolver.determine_parent_key(value, target_key, key): - yield result - - def fetch_parameter_store_value(self, value, key, optional=False): # pylint: disable=too-many-statements - if self._is_optional(value): - LOGGER.info("Parameter %s is considered optional", value) - optional = True - if str(value).count(':') > 1: - [_, region, value] = value.split(':') - else: - [_, value] = value.split(':') - region = DEFAULT_REGION - value = value[:-1] if optional else value - client = ParameterStore(region, boto3) - try: - parameter = self.cache.check(f'{region}/{value}') or client.fetch_parameter(value) - except ParameterNotFoundError: - if optional: - LOGGER.info("Parameter %s not found, returning empty string", value) - parameter = "" - else: - raise - try: - parent_key = list(Resolver.determine_parent_key(self.comparison_parameters, key))[0] - if parameter: - self.cache.add(f'{region}/{value}', parameter) - self.stage_parameters[parent_key][key] = parameter - except IndexError as error: - if parameter: - if self.stage_parameters.get(key): - self.stage_parameters[key] = parameter - else: - LOGGER.error("Parameter was not found, unable to fetch it from parameter store") - raise Exception("Parameter was not found, unable to fetch it from parameter store") from error - return True - - def update(self, key): - for k, _ in self.comparison_parameters.items(): - if not self.stage_parameters.get(k) and not self.stage_parameters.get(k, {}).get(key): - self.stage_parameters[k] = self.comparison_parameters[k] - if key not in self.stage_parameters[k] and self.comparison_parameters.get(k, {}).get(key): - self.stage_parameters[k][key] = self.comparison_parameters[k][key] + Return: + str: The resolved value using the first matching intrinsic + resolver if any. Or the lookup_value as passed to the + function if no intrinsic resolvers support the lookup. + """ + matching_resolver = self._matching_intrinsic_resolver(lookup_value) + if matching_resolver is not None: + return matching_resolver.resolve( + lookup_value, + file_name, + ) + return lookup_value diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/resolver_param_store.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/resolver_param_store.py new file mode 100644 index 000000000..3e9a171b9 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/resolver_param_store.py @@ -0,0 +1,90 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +""" +ResolverParamStore class used to resolve parameter store lookups. +""" +import os +from typing import Tuple +import boto3 +from logger import configure_logger +from parameter_store import ParameterStore +from errors import ParameterNotFoundError +from base_resolver import BaseResolver + +LOGGER = configure_logger(__name__) +DEFAULT_REGION = os.environ["AWS_REGION"] + + +class ResolverParamStore(BaseResolver): + """ + The Parameter Store Resolver is able to resolve the parameter + as instructed using the 'resolve:' syntax. + """ + + @staticmethod + def _get_region_and_param_path(lookup_str: str) -> Tuple[str, str]: + lookup_split = lookup_str.split(':') + # The last element is the path + path = lookup_split[-1] + region = lookup_split[-2] if len(lookup_split) > 2 else DEFAULT_REGION + return (region, path) + + # pylint: disable=unused-argument + def resolve(self, lookup_str: str, random_filename: str) -> str: + """ + Assumes that the lookup_str starts with 'resolve:'. + + This function will perform a lookup in parameter store + to find the value as requested by the lookup_str. + + Args: + lookup_str (str): The lookup string that contains the + `resolve:` lookup path. + random_filename (str): The random filename, not used in this + function. + + Returns: + str: The value as looked up in parameter store. + """ + optional = self._is_optional(lookup_str) + if optional: + LOGGER.info("Parameter %s is considered optional", lookup_str) + lookup_str = lookup_str[:-1] + [region, param_path] = self._get_region_and_param_path(lookup_str) + cache_key = f'{region}/{param_path}' + if self.cache.exists(cache_key): + return self.cache.get(cache_key) + client = ParameterStore(region, boto3) + try: + param_value = client.fetch_parameter(param_path) + if param_value: + self.cache.add(f'{region}/{param_path}', param_value) + return param_value + except ParameterNotFoundError: + if not optional: + raise + LOGGER.info( + "Parameter %s not found, returning empty string", + param_path, + ) + return "" + + # To enable an easy interface that could do lookups + # whether a specific lookup string is supported or not it + # should be instance based. Disabling: no-self-use warning + # pylint: disable=R0201 + def supports(self, lookup_str: str) -> bool: + """ + Check if this resolver supports the lookup_str syntax. + + Args: + lookup_str (str): The lookup string that might have resolve: or + another resolver syntax. + + Returns: + bool: True if this resolver supports the lookup_str syntax. + In other words, the lookup_str starts with `resolve:`. + False if not. + """ + return lookup_str.startswith('resolve:') diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/resolver_stack_output.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/resolver_stack_output.py new file mode 100644 index 000000000..fd6f7a192 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/resolver_stack_output.py @@ -0,0 +1,125 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +""" +ResolverStackOutput class used to resolve CloudFormation Stack output lookups. +""" +import os +from botocore.exceptions import ClientError +from logger import configure_logger +from partition import get_partition +from cloudformation import CloudFormation +from sts import STS +from base_resolver import BaseResolver + +LOGGER = configure_logger(__name__) +DEFAULT_REGION = os.environ["AWS_REGION"] + + +class ResolverStackOutput(BaseResolver): + """ + The CloudFormation Stack Output Resolver is able to resolve `import:` + syntax. It will perform a lookup in the requested CloudFormation stack + for the output value of the key as specified in the lookup syntax. + """ + + def __init__(self): + BaseResolver.__init__(self) + self.sts = STS() + + def _get_stack( + self, + account_id: str, + region: str, + stack_name: str, + ) -> CloudFormation: + partition = get_partition(DEFAULT_REGION) + role = self.sts.assume_cross_account_role( + f'arn:{partition}:iam::{account_id}:' + 'role/adf-readonly-automation-role', + 'importer' + ) + return CloudFormation( + region=region, + deployment_account_region=os.environ["AWS_REGION"], + role=role, + stack_name=stack_name, + account_id=account_id, + ) + + # pylint: disable=unused-argument + def resolve(self, lookup_str: str, random_filename: str) -> str: + """ + Assumes that the lookup_str starts with 'import:'. + + This function will perform a lookup in CloudFormation + to find the output value as requested by the lookup_str. + + Args: + lookup_str (str): The lookup string that contains the + `import:` lookup path. + random_filename (str): The random filename, not used in this + function. + + Returns: + str: The value as looked up in CloudFormation. + """ + optional = self._is_optional(lookup_str) + if optional: + LOGGER.info("Import %s is considered optional", lookup_str) + # Remove the question mark + lookup_str = lookup_str[:-1] + if self.cache.exists(lookup_str): + return self.cache.get(lookup_str) + try: + [_, account_id, region, stack_name, output_key] = ( + str(lookup_str).split(':') + ) + except ValueError as error: + raise ValueError( + f"{lookup_str} is not a valid import string. " + "Syntax should be: " + "import:account_id:region:stack_name:output_key" + ) from error + try: + stack = self._get_stack(account_id, region, stack_name) + stack_output = stack.get_stack_output(output_key) + except ClientError as client_error: + LOGGER.info( + "Could not retrieve CloudFormation output %s ran into " + "a client error: %s", + lookup_str, + str(client_error), + ) + if not optional: + raise + stack_output = None + if stack_output is not None: + LOGGER.info("Stack output value is %s", stack_output) + self.cache.add(lookup_str, stack_output) + elif not optional: + raise Exception( + f"No Stack Output found on {account_id} in {region} " + f"with stack name {stack_name} and " + f"output key {output_key}" + ) + return stack_output if stack_output is not None else "" + + # To enable an easy interface that could do lookups + # whether a specific lookup string is supported or not it + # should be instance based. Disabling: no-self-use warning + # pylint: disable=R0201 + def supports(self, lookup_str: str) -> bool: + """ + Check if this resolver supports the lookup_str syntax. + + Args: + lookup_str (str): The lookup string that might have import: or + another resolver syntax. + + Returns: + bool: True if this resolver supports the lookup_str syntax. + In other words, the lookup_str starts with `import:`. + False if not. + """ + return lookup_str.startswith('import:') diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/resolver_upload.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/resolver_upload.py new file mode 100644 index 000000000..357c3598f --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/resolver_upload.py @@ -0,0 +1,100 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +""" +ResolverUpload class used to upload files to S3. +""" +import os +from typing import Tuple +from base_resolver import BaseResolver +from logger import configure_logger +from parameter_store import ParameterStore +from s3 import S3 + +LOGGER = configure_logger(__name__) +DEFAULT_REGION = os.environ["AWS_REGION"] + + +class ResolverUpload(BaseResolver): + """ + The S3 Upload Resolver is able to resolve `upload:` syntax. + It will upload the local file to the S3 bucket and resolve the + path to the object in the requested path syntax. + """ + + def __init__(self, parameter_store: ParameterStore): + BaseResolver.__init__(self) + self.parameter_store = parameter_store + + @staticmethod + def _get_region_style_and_object_key( + lookup_str: str, + ) -> Tuple[str, str, str]: + lookup_split = lookup_str.split(':') + # The last element is the object_key + object_key = lookup_split[-1] + style = lookup_split[-2] + region = lookup_split[-3] if len(lookup_split) > 3 else DEFAULT_REGION + return (region, style, object_key) + + def resolve(self, lookup_str: str, random_filename: str) -> str: + """ + Assumes that the lookup_str starts with 'upload:'. + + This function will perform an upload of the specified file to S3 + and return the path to the object as requested by the lookup_str. + + Args: + lookup_str (str): The lookup string that contains the + `upload:` instructions. + random_filename (str): The random filename, used to upload a + unique object to the S3 bucket. + + Returns: + str: The path to the uploaded object in S3. + """ + if not any( + item in lookup_str + for item in S3.supported_path_styles() + ): + raise Exception( + 'When uploading to S3 you need to specify a path style' + 'to use for the returned value to be used. ' + f'Supported path styles include: {S3.supported_path_styles()}' + ) from None + if self.cache.exists(lookup_str): + return self.cache.get(lookup_str) + (region, style, object_key) = self._get_region_style_and_object_key( + lookup_str, + ) + bucket_name = self.parameter_store.fetch_parameter( + f'/cross_region/s3_regional_bucket/{region}' + ) + s3_client = S3(region, bucket_name) + resolved_location = s3_client.put_object( + f"adf-upload/{object_key}/{random_filename}", + str(object_key), + style, + True # pre-check + ) + self.cache.add(lookup_str, resolved_location) + return resolved_location + + # To enable an easy interface that could do lookups + # whether a specific lookup string is supported or not it + # should be instance based. Disabling: no-self-use warning + # pylint: disable=R0201 + def supports(self, lookup_str: str) -> bool: + """ + Check if this resolver supports the lookup_str syntax. + + Args: + lookup_str (str): The lookup string that might have upload: or + another resolver syntax. + + Returns: + bool: True if this resolver supports the lookup_str syntax. + In other words, the lookup_str starts with `upload:`. + False if not. + """ + return lookup_str.startswith('upload:') diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/account_name1_eu-central-1.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/account_name1_eu-central-1.yml deleted file mode 100644 index 684c99deb..000000000 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/account_name1_eu-central-1.yml +++ /dev/null @@ -1,2 +0,0 @@ -Parameters: - CostCenter: not_free diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/parameter_environment_acceptance_tag_project_a.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/parameter_environment_acceptance_tag_project_a.yml new file mode 100644 index 000000000..9883080a7 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/parameter_environment_acceptance_tag_project_a.yml @@ -0,0 +1,5 @@ +Parameters: + Environment: acceptance + +Tags: + Project: 'ProjectA' diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/parameter_environment_prod.json b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/parameter_environment_prod.json new file mode 100644 index 000000000..90dce1df7 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/parameter_environment_prod.json @@ -0,0 +1,5 @@ +{ + "Parameters": { + "Environment": "production" + } +} diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/parameter_extra_one_only.json b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/parameter_extra_one_only.json new file mode 100644 index 000000000..d99ef80dc --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/parameter_extra_one_only.json @@ -0,0 +1,5 @@ +{ + "Parameters": { + "Extra": "one" + } +} diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/stub_cfn_global.json b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/stub_cfn_global.json index 5157a0170..a6842bbea 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/stub_cfn_global.json +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/stub_cfn_global.json @@ -1,11 +1,11 @@ { "Parameters": { - "CostCenter": "123", "Environment": "testing", "MySpecialValue": "resolve:/values/some_value" }, "Tags": { - "TagKey": "123", - "MyKey": "new_value" + "CostCenter": "overhead", + "Department": "unknown", + "Geography": "world" } } diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/stub_cfn_global.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/stub_cfn_global.yml new file mode 100644 index 000000000..f7a5aeedd --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/stub_cfn_global.yml @@ -0,0 +1,8 @@ +Parameters: + Environment: "testing" + MySpecialValue: "resolve:/values/some_value" + +Tags: + CostCenter: "overhead" + Department: "unknown" + Geography: "world" diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/account_name1_eu-west-1.json b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/tag_cost_center_free_only.json similarity index 66% rename from src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/account_name1_eu-west-1.json rename to src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/tag_cost_center_free_only.json index d4d74bc20..081b8b8e8 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/account_name1_eu-west-1.json +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/tag_cost_center_free_only.json @@ -1,5 +1,5 @@ { - "Parameters": { + "Tags": { "CostCenter": "free" } } diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/tag_cost_center_nonfree_only.json b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/tag_cost_center_nonfree_only.json new file mode 100644 index 000000000..d07c0e540 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/tag_cost_center_nonfree_only.json @@ -0,0 +1,5 @@ +{ + "Tags": { + "CostCenter": "non-free" + } +} diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/tag_department_alpha_only.json b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/tag_department_alpha_only.json new file mode 100644 index 000000000..a54435f6b --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/tag_department_alpha_only.json @@ -0,0 +1,5 @@ +{ + "Tags": { + "Department": "alpha" + } +} diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/tag_geo_eu_only.json b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/tag_geo_eu_only.json new file mode 100644 index 000000000..612e544ed --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/tag_geo_eu_only.json @@ -0,0 +1,5 @@ +{ + "Tags": { + "Geography": "eu" + } +} diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_generate_params.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_generate_params.py index c58f266f2..ef86bf458 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_generate_params.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_generate_params.py @@ -4,30 +4,124 @@ # pylint: skip-file import shutil +import json import os -import boto3 -import sys -from pytest import fixture +from pytest import fixture, mark from mock import Mock, patch -from cache import Cache from generate_params import Parameters from parameter_store import ParameterStore from cloudformation import CloudFormation from sts import STS -from resolver import Resolver + + +@fixture +def input_wave_target_one(): + return { + 'id': '111111111111', + 'name': 'account_name1', + 'path': '/one/path', + 'regions': ['eu-west-1'], + } + + +@fixture +def input_wave_target_one_north(): + return { + 'id': '111111111111', + 'name': 'account_name1', + 'path': '/one/path', + 'regions': ['eu-north-1'], + } + + +@fixture +def input_wave_target_one_us(): + return { + 'id': '111111111111', + 'name': 'account_name1', + 'path': '/one/path', + 'regions': ['us-east-1'], + } + + +@fixture +def input_wave_target_two(): + return { + 'id': '222222222222', + 'name': 'account_name2', + 'path': '/two/path', + 'regions': ['eu-west-2'], + } + + +@fixture +def input_wave_target_two_south(): + return { + 'id': '222222222222', + 'name': 'account_name2', + 'path': '/two/path', + 'regions': ['eu-south-1'], + } + + +@fixture +def input_wave_target_two_us(): + return { + 'id': '222222222222', + 'name': 'account_name2', + 'path': '/two/path', + 'regions': ['us-west-2'], + } + + +@fixture +def input_definition_targets( + input_wave_target_one, + input_wave_target_one_north, + input_wave_target_one_us, + input_wave_target_two, + input_wave_target_two_south, + input_wave_target_two_us, +): + return [ # Waves are inside an array + [ # Wave 1 + [ # Wave targets 1 - set 1 + input_wave_target_one, + input_wave_target_two, + ], + [ # Wave targets 1 - set 2 + input_wave_target_one_north, + input_wave_target_two_south, + ], + ], + [ # Wave 2 + [ # Wave targets 2 - set 1 + input_wave_target_one_us, + ], + [ # Wave targets 2 - set 2 + input_wave_target_two_us, + ], + ], + ] @fixture def cls(): parameter_store = Mock() - s3 = Mock() - s3.read_object.return_value = str({}) + definition_s3 = Mock() + definition_s3.read_object.return_value = json.dumps({ + 'input': { + 'environments': { + 'targets': [], + } + } + }) parameter_store.fetch_parameter.return_value = str({}) parameters = Parameters( build_name='some_name', parameter_store=parameter_store, - s3=s3, + definition_s3=definition_s3, directory=os.path.abspath( os.path.join( os.path.dirname(__file__), @@ -35,13 +129,9 @@ def cls(): ) ) ) - parameters.account_ous = { - 'account_name1': '/banking/testing', - 'account_name2': '/banking/production', - } - parameters.regions = ['eu-west-1', 'eu-central-1', 'us-west-2'] yield parameters - shutil.rmtree('{0}/params'.format(parameters.cwd)) + # Skip the first slash + shutil.rmtree(f'{parameters.cwd}/params') def test_valid_build_name(cls): @@ -49,129 +139,327 @@ def test_valid_build_name(cls): def test_params_folder_created(cls): - assert os.path.exists('{0}/params'.format(cls.cwd)) + assert os.path.exists(f'{cls.cwd}/params') + +def test_retrieve_pipeline_targets_empty(cls): + targets = cls._retrieve_pipeline_targets() + assert targets == {} -def test_parse(cls): + +def test_retrieve_pipeline_targets(cls, input_definition_targets): + cls.definition_s3.read_object.return_value = json.dumps({ + 'input': { + 'environments': { + 'targets': input_definition_targets, + } + } + }) + targets = cls._retrieve_pipeline_targets() + assert targets['111111111111'] == { + 'id': '111111111111', + 'account_name': 'account_name1', + 'path': '/one/path', + 'regions': sorted(['eu-west-1', 'eu-north-1', 'us-east-1']), + } + assert targets['222222222222'] == { + 'id': '222222222222', + 'account_name': 'account_name2', + 'path': '/two/path', + 'regions': sorted(['eu-west-2', 'eu-south-1', 'us-west-2']), + } + assert list(targets.keys()) == [ + '111111111111', + '222222222222', + ] + + +@mark.parametrize("file, to_file, ext", [ + ('stub_cfn_global', 'global', 'json'), + ('stub_cfn_global', 'global_yml', 'yml'), +]) +def test_parse(cls, file, to_file, ext): + shutil.copy( + f"{cls.cwd}/{file}.{ext}", + f"{cls.cwd}/params/{to_file}.{ext}", + ) parse = cls._parse( - '{0}/stub_cfn_global'.format(cls.cwd) + cls.cwd, + to_file, ) # Unresolved Intrinsic at this stage assert parse == { 'Parameters': { - 'CostCenter': '123', 'Environment': 'testing', - 'MySpecialValue': 'resolve:/values/some_value' + 'MySpecialValue': 'resolve:/values/some_value', }, 'Tags': { - 'TagKey': '123', - 'MyKey': 'new_value', + 'CostCenter': 'overhead', + 'Department': 'unknown', + 'Geography': 'world', }, } def test_parse_not_found(cls): parse = cls._parse( - '{0}/nothing'.format(cls.cwd) + cls.cwd, + 'nothing', ) assert parse == {'Parameters': {}, 'Tags': {}} -def test_param_updater(cls): - with patch.object(ParameterStore, 'fetch_parameter', return_value='something') as ssm_mock: +def test_merge_params(cls): + shutil.copy( + f"{cls.cwd}/stub_cfn_global.json", + f"{cls.cwd}/params/global.json", + ) + with patch.object( + ParameterStore, + 'fetch_parameter', + return_value='something' + ): parse = cls._parse( - '{0}/stub_cfn_global'.format(cls.cwd) + cls.cwd, + 'global', ) - compare = cls._param_updater( + compare = cls._merge_params( parse, {'Parameters': {}, 'Tags': {}} ) assert compare == { 'Parameters': { - 'CostCenter': '123', 'Environment': 'testing', 'MySpecialValue': 'something', }, 'Tags': { - 'TagKey': '123', - 'MyKey': 'new_value', + 'CostCenter': 'overhead', + 'Department': 'unknown', + 'Geography': 'world', + } + } + + +def test_merge_params_with_preset(cls): + shutil.copy( + f"{cls.cwd}/stub_cfn_global.json", + f"{cls.cwd}/params/global.json", + ) + with patch.object( + ParameterStore, + 'fetch_parameter', + return_value='something' + ): + parse = cls._parse( + cls.cwd, + 'global', + ) + compare = cls._merge_params( + parse, + { + 'Parameters': { + 'Base': 'Parameter', + }, + 'Tags': { + 'CostCenter': 'should-not-be-overwritten', + 'SomeBaseTag': 'BaseTag', + }, + } + ) + assert compare == { + 'Parameters': { + 'Base': 'Parameter', + 'Environment': 'testing', + 'MySpecialValue': 'something', + }, + 'Tags': { + 'CostCenter': 'should-not-be-overwritten', + 'Department': 'unknown', + 'Geography': 'world', + 'SomeBaseTag': 'BaseTag', } } -def test_create_parameter_files(cls): - with patch.object(ParameterStore, 'fetch_parameter', return_value='something') as ssm_mock: - cls.global_path = "{0}/stub_cfn_global".format(cls.cwd) +def test_create_parameter_files(cls, input_definition_targets): + cls.definition_s3.read_object.return_value = json.dumps({ + 'input': { + 'environments': { + 'targets': input_definition_targets, + } + } + }) + with patch.object( + ParameterStore, + 'fetch_parameter', + return_value='something', + ): cls.create_parameter_files() - assert os.path.exists("{0}/params/account_name1_eu-west-1.json".format(cls.cwd)) - assert os.path.exists("{0}/params/account_name1_eu-central-1.json".format(cls.cwd)) - assert os.path.exists("{0}/params/account_name1_us-west-2.json".format(cls.cwd)) - assert os.path.exists("{0}/params/account_name2_eu-west-1.json".format(cls.cwd)) - assert os.path.exists("{0}/params/account_name2_eu-central-1.json".format(cls.cwd)) - assert os.path.exists("{0}/params/account_name2_us-west-2.json".format(cls.cwd)) + assert os.path.exists(f"{cls.cwd}/params/account_name1_eu-west-1.json") + assert os.path.exists( + f"{cls.cwd}/params/account_name1_eu-north-1.json", + ) + assert os.path.exists(f"{cls.cwd}/params/account_name1_us-east-1.json") + assert os.path.exists(f"{cls.cwd}/params/account_name2_eu-west-2.json") + assert os.path.exists( + f"{cls.cwd}/params/account_name2_eu-south-1.json", + ) + assert os.path.exists(f"{cls.cwd}/params/account_name2_us-west-2.json") -def test_ensure_parameter_default_contents(cls): - with patch.object(ParameterStore, 'fetch_parameter', return_value='something') as ssm_mock: - cls.global_path = "{0}/stub_cfn_global".format(cls.cwd) +def test_ensure_parameter_default_contents(cls, input_definition_targets): + cls.definition_s3.read_object.return_value = json.dumps({ + 'input': { + 'environments': { + 'targets': input_definition_targets, + } + } + }) + shutil.copy( + f"{cls.cwd}/stub_cfn_global.json", + f"{cls.cwd}/params/global.json", + ) + with patch.object( + ParameterStore, + 'fetch_parameter', + return_value='something', + ): cls.create_parameter_files() parse = cls._parse( - "{0}/params/account_name1_us-west-2".format(cls.cwd) + cls.cwd, + "account_name1_us-east-1", ) assert parse == { 'Parameters': { - 'CostCenter': '123', 'Environment': 'testing', 'MySpecialValue': 'something', }, 'Tags': { - 'TagKey': '123', - 'MyKey': 'new_value', + 'CostCenter': 'overhead', + 'Department': 'unknown', + 'Geography': 'world', } } -def test_ensure_parameter_specific_contents(cls): - cls.global_path = "{0}/stub_cfn_global".format(cls.cwd) +def test_ensure_parameter_overrides( + cls, + input_wave_target_one, + input_wave_target_one_north, + input_wave_target_two +): + cls.definition_s3.read_object.return_value = json.dumps({ + 'input': { + 'environments': { + 'targets': [ + [ + [ + input_wave_target_one, + ], + [ + input_wave_target_one_north, + ], + ], + [ + [ + input_wave_target_two, + ], + ] + ] + } + } + }) + os.mkdir(f'{cls.cwd}/params/one') + shutil.copy( + f"{cls.cwd}/stub_cfn_global.json", + f"{cls.cwd}/params/global.json", + ) shutil.copy( - "{0}/account_name1_eu-west-1.json".format(cls.cwd), - "{0}/params/account_name1_eu-west-1.json".format(cls.cwd) + f"{cls.cwd}/parameter_environment_acceptance_tag_project_a.yml", + f"{cls.cwd}/params/global_eu-west-1.yml", ) shutil.copy( - "{0}/account_name1_eu-central-1.yml".format(cls.cwd), - "{0}/params/account_name1_eu-central-1.yml".format(cls.cwd) + f"{cls.cwd}/tag_department_alpha_only.json", + f"{cls.cwd}/params/one/path.json", + ) + shutil.copy( + f"{cls.cwd}/tag_geo_eu_only.json", + f"{cls.cwd}/params/one/path_eu-west-1.json", + ) + shutil.copy( + f"{cls.cwd}/parameter_extra_one_only.json", + f"{cls.cwd}/params/account_name1.json", + ) + shutil.copy( + f"{cls.cwd}/tag_cost_center_free_only.json", + f"{cls.cwd}/params/account_name2_eu-west-2.json", ) - with patch.object(ParameterStore, 'fetch_parameter', return_value='something') as ssm_mock: - with patch.object(CloudFormation, 'get_stack_output', return_value='something_else') as cfn_mock: - with patch.object(STS, 'assume_cross_account_role', return_value={}) as sts_mock: + with patch.object( + ParameterStore, + 'fetch_parameter', + return_value='something', + ): + with patch.object( + CloudFormation, + 'get_stack_output', + return_value='something_else', + ): + with patch.object( + STS, + 'assume_cross_account_role', + return_value={}, + ): cls.create_parameter_files() - parse_json = cls._parse( - "{0}/params/account_name1_eu-west-1".format(cls.cwd) - ) - parse_yml = cls._parse( - "{0}/params/account_name1_eu-central-1".format(cls.cwd) + assert ( + cls._parse( + cls.cwd, + "account_name1_eu-west-1", + ) == { + 'Parameters': { + 'Environment': 'acceptance', # Global region + 'MySpecialValue': 'something', # Global + 'Extra': 'one', # Account + }, + 'Tags': { + 'CostCenter': 'overhead', # Global + 'Department': 'alpha', # OU + 'Geography': 'eu', # OU Region + 'Project': 'ProjectA', # Global region + } + } ) - assert parse_json == { - 'Parameters': { - 'CostCenter': 'free', - 'MySpecialValue': 'something', - 'Environment': 'testing', - }, - 'Tags': { - 'TagKey': '123', - 'MyKey': 'new_value', + assert ( + cls._parse( + cls.cwd, + "account_name1_eu-north-1", + ) == { + 'Parameters': { + 'Environment': 'testing', # Global + 'MySpecialValue': 'something', # Global + 'Extra': 'one', # Account + }, + 'Tags': { + 'CostCenter': 'overhead', # Global + 'Department': 'alpha', # OU + 'Geography': 'world', # Global + } } - } - assert parse_yml == { - 'Parameters': { - 'CostCenter': 'not_free', - 'MySpecialValue': 'something', - 'Environment': 'testing', - }, - 'Tags': { - 'TagKey': '123', - 'MyKey': 'new_value', + ) + assert ( + cls._parse( + cls.cwd, + "account_name2_eu-west-2", + ) == { + 'Parameters': { + 'Environment': 'testing', # Global + 'MySpecialValue': 'something', # Global + }, + 'Tags': { + 'CostCenter': 'free', # Account Region + 'Department': 'unknown', # Global + 'Geography': 'world', # Global + } } - } + )