Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -512,6 +512,11 @@ Resources:
Resource:
- !Sub arn:${AWS::Partition}:codepipeline:${AWS::Region}:${AWS::AccountId}:webhook:adf-webhook-*
- !Sub arn:${AWS::Partition}:codepipeline:${AWS::Region}:${AWS::AccountId}:${PipelinePrefix}*
- Effect: Allow
Action:
- "codepipeline:ListPipelineExecutions"
Resource:
- !Sub arn:${AWS::Partition}:codepipeline:${AWS::Region}:${AWS::AccountId}:aws-deployment-framework-pipelines
- Effect: Allow
Action:
- "codestar-connections:GetConnection"
Expand Down Expand Up @@ -775,8 +780,9 @@ Resources:
- mkdir -p deployment_maps
build:
commands:
- python adf-build/helpers/sync_to_s3.py --metadata adf_version=${!ADF_VERSION} --upload-with-metadata execution_id=${!CODEPIPELINE_EXECUTION_ID} deployment_map.yml s3://$ADF_PIPELINES_BUCKET/deployment_map.yml
- python adf-build/helpers/sync_to_s3.py --extension .yml --extension .yaml --metadata adf_version=${!ADF_VERSION} --upload-with-metadata execution_id=${!CODEPIPELINE_EXECUTION_ID} --recursive deployment_maps s3://$ADF_PIPELINES_BUCKET/deployment_maps
- python adf-build/helpers/describe_codepipeline_trigger.py --should-match StartPipelineExecution aws-deployment-framework-pipelines ${!CODEPIPELINE_EXECUTION_ID} && EXTRA_OPTS="--force" || EXTRA_OPTS=""
- python adf-build/helpers/sync_to_s3.py ${!EXTRA_OPTS} --metadata adf_version=${!ADF_VERSION} --upload-with-metadata execution_id=${!CODEPIPELINE_EXECUTION_ID} deployment_map.yml s3://$ADF_PIPELINES_BUCKET/deployment_map.yml
- python adf-build/helpers/sync_to_s3.py ${!EXTRA_OPTS} --extension .yml --extension .yaml --metadata adf_version=${!ADF_VERSION} --upload-with-metadata execution_id=${!CODEPIPELINE_EXECUTION_ID} --recursive deployment_maps s3://$ADF_PIPELINES_BUCKET/deployment_maps
post_build:
commands:
- echo "Pipelines are updated in the AWS Step Functions ADFPipelineManagementStateMachine."
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,155 @@
# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
"""
Describe CodePipeline trigger.

This script will retrieve the trigger for the given CodePipeline execution.
It also allows you to match it against a specific trigger that you expect and
return an exit code if it did not match.

Usage:
describe_codepipeline_trigger.py
[--should-match <trigger_type>]
[--json]
[-v... | --verbose...]
CODEPIPELINE_NAME
EXECUTION_ID

describe_codepipeline_trigger.py -h | --help

describe_codepipeline_trigger.py --version

Arguments:
CODEPIPELINE_NAME
The CodePipeline name of the pipeline to check.

EXECUTION_ID
The CodePipeline Execution Id that we want to check.

Options:
-h, --help Show this help message.

--json Return the trigger type and details as a JSON object.
An example object:
{
"trigger_type": "StartPipelineExecution or other",
"trigger_detail": "..."
}

--should-match <trigger_type>
When set, it will stop with exit code 0 if it matches the
expected trigger. If it does not match the trigger, it will
stop with exit code 1.
Trigger type can be: 'CreatePipeline',
'StartPipelineExecution', 'PollForSourceChanges', 'Webhook',
'CloudWatchEvent', or 'PutActionRevision'.

-v, --verbose
Show verbose logging information.
"""

import os
import sys
from typing import Any, Optional, TypedDict
import json
import logging
import boto3
from docopt import docopt


ADF_VERSION = os.environ.get("ADF_VERSION")
ADF_LOG_LEVEL = os.environ.get("ADF_LOG_LEVEL", "INFO")

logging.basicConfig(level=logging.ERROR)
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(ADF_LOG_LEVEL)


class TriggerData(TypedDict):
"""
Trigger Data Class.
"""
trigger_type: str
trigger_detail: str


def fetch_codepipeline_execution_trigger(
cp_client: Any,
codepipeline_name: str,
execution_id: str,
) -> Optional[TriggerData]:
"""
Fetch the CodePipeline Execution Trigger that matches
the requested parameters.

Args:
cp_client (boto3.client): The CodePipeline Boto3 client.

codepipeline_name (str): The CodePipeline name.

execution_id (str): The CodePipeline Execution id.

Returns:
TriggerData: The trigger type and trigger detail if found.

None: if it was not found.
"""
paginator = cp_client.get_paginator('list_pipeline_executions')
response_iterator = paginator.paginate(pipelineName=codepipeline_name)
for page in response_iterator:
for execution in page['pipelineExecutionSummaries']:
if execution['pipelineExecutionId'] == execution_id:
return {
"trigger_type": execution['trigger']['triggerType'],
"trigger_detail": execution['trigger']['triggerDetail'],
}
return None


def main():
"""Main function to describe the codepipeline trigger """
options = docopt(__doc__, version=ADF_VERSION, options_first=True)

# In case the user asked for verbose logging, increase
# the log level to debug.
if options["--verbose"] > 0:
LOGGER.setLevel(logging.DEBUG)
if options["--verbose"] > 1:
logging.basicConfig(level=logging.INFO)
if options["--verbose"] > 2:
# Also enable DEBUG mode for other libraries, like boto3
logging.basicConfig(level=logging.DEBUG)

LOGGER.debug("Input arguments: %s", options)

codepipeline_name = options.get('CODEPIPELINE_NAME')
execution_id = options.get('EXECUTION_ID')
should_match_type = options.get('--should-match')
output_in_json = options.get('--json')

cp_client = boto3.client("codepipeline")
trigger = fetch_codepipeline_execution_trigger(
cp_client,
codepipeline_name,
execution_id,
)

if trigger is None:
LOGGER.error(
"Could not find execution %s in the %s pipeline.",
execution_id,
codepipeline_name,
)
sys.exit(2)

if output_in_json:
print(json.dumps(trigger))
else:
print(trigger['trigger_type'])

if should_match_type and trigger['trigger_type'] != should_match_type:
sys.exit(1)


if __name__ == "__main__":
main()
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
[-e <extension> | --extension <extension>]...
[--metadata <key>=<value>]...
[--upload-with-metadata <key>=<value>]...
[-f | --force]
[--]
SOURCE_PATH DESTINATION_S3_URL

Expand Down Expand Up @@ -78,6 +79,13 @@
to match '.yml', it will not delete the README.md file as its
extension is not a match.

-f, --force
Force uploading of files that need to be synced. Regardless of
whether the target metadata matches the local one.
This also ignores the hash comparison. This is useful if you
want to force uploading a new copy of all local files to the
destination S3 bucket.

-h, --help Show this help message.

--metadata <key>=<value>
Expand Down Expand Up @@ -508,13 +516,31 @@ def _get_s3_object_data(s3_client, s3_bucket, key):
return None


def _get_upload_reason(
object_is_missing: bool,
content_changed: bool,
force: bool,
) -> str:
if object_is_missing:
return "object does not exist yet"

if content_changed:
return "file content changed"

if force:
return "forced to update"

return "metadata changed"


def upload_changed_files(
s3_client: any,
s3_bucket: str,
s3_prefix: str,
local_files: Mapping[str, LocalFileData],
s3_objects: Mapping[str, S3ObjectData],
metadata_to_check: MetadataToCheck,
force: bool,
):
"""
Upload changed files, by looping over the local files found and checking
Expand All @@ -539,6 +565,9 @@ def upload_changed_files(

metadata_to_check (MetadataToCheck): The metadata that needs to be
applied all the time and upon upload only.

force (bool): Whether to force uploading of files, even when the
metadata and hash data match.
"""
for key, local_file in local_files.items():
s3_file = s3_objects.get(key)
Expand All @@ -554,7 +583,7 @@ def upload_changed_files(
s3_metadata.items(),
)) != metadata_to_check["always_apply"]
)
if (object_is_missing or content_changed or metadata_changed):
if (force or object_is_missing or content_changed or metadata_changed):
with open(local_file.get("file_path"), "rb") as file_pointer:
s3_key = convert_to_s3_key(key, s3_prefix)

Expand All @@ -563,12 +592,10 @@ def upload_changed_files(
local_file.get("file_path"),
s3_bucket,
s3_key,
(
"object does not exist yet" if object_is_missing
else (
"file content changed" if content_changed
else "metadata changed"
)
_get_upload_reason(
object_is_missing,
content_changed,
force,
),
)
s3_client.put_object(
Expand Down Expand Up @@ -784,6 +811,7 @@ def sync_files(
recursive: bool,
delete: bool,
metadata_to_check: MetadataToCheck,
force: bool,
):
"""
Sync files using the S3 client from the local_path, matching the local_glob
Expand All @@ -810,6 +838,9 @@ def sync_files(

metadata_to_check (MetadataToCheck): The metadata that needs to be
applied all the time and upon upload only.

force (bool): Whether to force uploading of files, even when the
metadata and hash data match.
"""
s3_url_details = urlparse(s3_url)
s3_bucket = s3_url_details.netloc
Expand Down Expand Up @@ -841,6 +872,7 @@ def sync_files(
local_files,
s3_objects,
metadata_to_check,
force,
)
if delete:
delete_stale_objects(
Expand Down Expand Up @@ -872,6 +904,7 @@ def main(): # pylint: disable=R0915
s3_url = options.get('DESTINATION_S3_URL')
recursive = options.get('--recursive', False)
delete = options.get('--delete', False)
force = options.get('--force', False)

# Convert metadata key and value lists into a dictionary
metadata_to_check: MetadataToCheck = {
Expand Down Expand Up @@ -900,6 +933,7 @@ def main(): # pylint: disable=R0915
recursive,
delete,
metadata_to_check,
force,
)
LOGGER.info("All done.")

Expand Down
Loading