Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,10 @@ test:
# Run unit tests
pytest src/lambda_codebase/account -vvv -s -c src/lambda_codebase/account/pytest.ini
pytest src/lambda_codebase/account_processing -vvv -s -c src/lambda_codebase/account_processing/pytest.ini
pytest src/lambda_codebase/initial_commit -vvv -s -c src/lambda_codebase/initial_commit/pytest.ini
pytest src/lambda_codebase/initial_commit/bootstrap_repository -vvv -s -c src/lambda_codebase/initial_commit/bootstrap_repository/pytest.ini
pytest src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase -vvv -s -c src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pytest.ini
pytest src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/initial_commit -vvv -s -c src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/initial_commit/pytest.ini
pytest src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python -vvv -s -c src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/pytest.ini
pytest src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk -vvv -s -c src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/pytest.ini
pytest src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared -vvv -s -c src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/pytest.ini
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
"""
The Initial Commit main that is called when ADF is installed to commit the initial bootstrap repository content
The Initial Commit main that is called when ADF is installed to commit the
initial pipelines repository content.
"""

from typing import Mapping, Optional, Union, List, Dict, Any, Tuple
from dataclasses import dataclass
from enum import Enum
from pathlib import Path
import os
import re
import boto3
import jinja2
Expand All @@ -24,15 +24,25 @@
NOT_YET_CREATED = "NOT_YET_CREATED"
CC_CLIENT = boto3.client("codecommit")
CONFIG_FILE_REGEX = re.compile(r"\A.*[.](yaml|yml|json)\Z", re.I)
EXECUTABLE_FILES = []

PR_DESCRIPTION = """ADF Version {0} from https://github.com/awslabs/aws-deployment-framework
PR_DESCRIPTION = """ADF Version {0}

This PR was automatically created when you deployed version {0} of the AWS Deployment Framework through the Serverless Application Repository.
You can find the changelog at:
https://github.com/awslabs/aws-deployment-framework/releases/tag/v{0}

Review this PR to understand what changes will be made to your bootstrapping repository. If you also made changes to the repository yourself, you might have to resolve merge conflicts before you can merge this PR.
This PR was automatically created when you deployed version {0} of the
AWS Deployment Framework through the Serverless Application Repository.

Merge this PR to complete the deployment of the version {0} of the AWS Deployment Framework.
Review this PR to understand what changes will be made to your bootstrapping
repository. If you also made changes to the repository yourself,
you might have to resolve merge conflicts before you can merge this PR.

Merge this PR to complete the deployment of the version {0} of the
AWS Deployment Framework.
"""


@dataclass
class CustomResourceProperties:
ServiceToken: str
Expand Down Expand Up @@ -69,7 +79,7 @@ class Event:

def __post_init__(self):
self.ResourceProperties = CustomResourceProperties(
**self.ResourceProperties # pylint: disable=not-a-mapping
**self.ResourceProperties # pylint: disable=not-a-mapping
)


Expand Down Expand Up @@ -101,10 +111,12 @@ def as_dict(self) -> Dict[str, Union[str, bytes]]:
"filePath": self.filePath
}


@dataclass
class CreateEvent(Event):
pass


@dataclass
class UpdateEvent(Event):
PhysicalResourceId: str
Expand All @@ -118,23 +130,27 @@ def __post_init__(self):
**self.OldResourceProperties # pylint: disable=not-a-mapping
)


def generate_create_branch_input(event, repo_name, commit_id):
return {
"repositoryName": repo_name,
"branchName": event.ResourceProperties.Version,
"commitId": commit_id
}


def generate_delete_branch_input(event, repo_name):
return {
"repositoryName": repo_name,
"branchName": event.ResourceProperties.Version
}


def chunks(list_to_chunk, number_to_chunk_into):
number_of_chunks = max(1, number_to_chunk_into)
return (list_to_chunk[item:item + number_of_chunks] for item in range(0, len(list_to_chunk), number_of_chunks))


def generate_pull_request_input(event, repo_name):
return {
"title": f'ADF {event.ResourceProperties.Version} Automated Update PR',
Expand All @@ -148,6 +164,7 @@ def generate_pull_request_input(event, repo_name):
]
}


def generate_commit_input(repo_name, index, branch="master", parent_commit_id=None, puts=None, deletes=None):
commit_action = "Delete" if deletes else "Create"
output = {
Expand All @@ -163,6 +180,7 @@ def generate_commit_input(repo_name, index, branch="master", parent_commit_id=No
output["parentCommitId"] = parent_commit_id
return output


@create()
def create_(event: Mapping[str, Any], _context: Any) -> Tuple[Union[None, PhysicalResourceId], Data]:
create_event = CreateEvent(**event)
Expand Down Expand Up @@ -216,6 +234,7 @@ def create_(event: Mapping[str, Any], _context: Any) -> Tuple[Union[None, Physic

return commit_id, {}


@update()
def update_(event: Mapping[str, Any], _context: Any, create_pr=False) -> Tuple[PhysicalResourceId, Data]: #pylint: disable=R0912, R0915
update_event = UpdateEvent(**event)
Expand Down Expand Up @@ -268,9 +287,11 @@ def update_(event: Mapping[str, Any], _context: Any, create_pr=False) -> Tuple[P
def delete_(_event, _context):
pass


def repo_arn_to_name(repo_arn: str) -> str:
return repo_arn.split(":")[-1]


def get_files_to_delete(repo_name: str) -> List[FileToDelete]:
differences = CC_CLIENT.get_differences(
repositoryName=repo_name,
Expand All @@ -284,8 +305,12 @@ def get_files_to_delete(repo_name: str) -> List[FileToDelete]:
if not CONFIG_FILE_REGEX.match(file['afterBlob']['path'])
]

# 31: trimming off /var/task/pipelines_repository so we can compare correctly
blobs = [str(filename)[31:] for filename in Path('/var/task/pipelines_repository/').rglob('*')]
# 31: trimming off /var/task/pipelines_repository so
# we can compare correctly
blobs = [
str(filename)[31:]
for filename in Path('/var/task/pipelines_repository/').rglob('*')
]

return [
FileToDelete(
Expand All @@ -297,13 +322,23 @@ def get_files_to_delete(repo_name: str) -> List[FileToDelete]:
]


def determine_file_mode(entry, directoryName):
if str(get_relative_name(entry, directoryName)) in EXECUTABLE_FILES:
return FileMode.EXECUTABLE

return FileMode.NORMAL


def get_files_to_commit(directoryName: str) -> List[FileToCommit]:
path = HERE / directoryName

return [
FileToCommit(
str(get_relative_name(entry, directoryName)),
FileMode.NORMAL if not os.access(entry, os.X_OK) else FileMode.EXECUTABLE,
determine_file_mode(
entry,
directoryName,
),
entry.read_bytes(),
)
for entry in path.glob("**/*")
Expand Down Expand Up @@ -332,4 +367,5 @@ def create_adf_config_file(props: CustomResourceProperties) -> FileToCommit:

with open("/tmp/adfconfig.yml", mode="wb") as file:
file.write(adf_config)

return FileToCommit("adfconfig.yml", FileMode.NORMAL, adf_config)
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[pytest]
testpaths = tests
norecursedirs = pipelines_repository
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,14 @@

# pylint: skip-file

from pathlib import Path
import pytest
from mock import Mock, patch
from initial_commit import (
EXECUTABLE_FILES,
FileMode,
FileToDelete,
determine_file_mode,
get_files_to_delete,
)

Expand Down Expand Up @@ -34,6 +39,10 @@
'pipeline_types/cc-cloudformation.yml.j2',
'cc-cloudformation.yml.j2',
]
SHOULD_NOT_BE_EXECUTABLE = [
"README.md",
"deployment_map.yml",
]


class GenericPathMocked():
Expand Down Expand Up @@ -97,3 +106,23 @@ def test_get_files_to_delete(cc_client, path_cls):
# Should delete all other
assert all(x in result_paths for x in SHOULD_DELETE_PATHS)
assert len(result_paths) == len(SHOULD_DELETE_PATHS)


@pytest.mark.parametrize("entry", SHOULD_NOT_BE_EXECUTABLE)
def test_determine_file_mode_normal(entry):
base_path = "test"
new_entry = f"/some/{base_path}/{entry}"
assert determine_file_mode(
Path(new_entry),
base_path,
) == FileMode.NORMAL


@pytest.mark.parametrize("entry", EXECUTABLE_FILES)
def test_determine_file_mode_executable(entry):
base_path = "test"
new_entry = f"/some/{base_path}/{entry}"
assert determine_file_mode(
Path(new_entry),
base_path,
) == FileMode.EXECUTABLE
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Loading