Skip to content
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion .mega-linter.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ APPLY_FIXES: none
# If you use ENABLE_LINTERS variable, all other linters will be disabled by default
ENABLE_LINTERS:
- BASH_EXEC
- BASH_SHFMT
- CLOUDFORMATION_CFN_LINT
- DOCKERFILE_HADOLINT
- EDITORCONFIG_EDITORCONFIG_CHECKER
Expand All @@ -30,16 +31,20 @@ FILEIO_REPORTER: false

# Install plugin for list handling.
JSON_PRETTIER_PRE_COMMANDS:
- command: "npm install [email protected].0"
- command: "npm install [email protected].4"
cwd: "workspace"

CLOUDFORMATION_CFN_LINT_CONFIG_FILE: '.cfnlintrc'
CLOUDFORMATION_CFN_LINT_FILE_EXTENSIONS: [".yml", ".yaml"]

EDITORCONFIG_EDITORCONFIG_CHECKER_CONFIG_FILE: '.ecrc.json'

MARKDOWN_MARKDOWN_LINK_CHECK_ARGUMENTS: '-q'
MARKDOWN_MARKDOWNLINT_DISABLE_ERRORS: false

SPELL_CSPELL_ARGUMENTS: '--gitignore --no-progress --show-suggestions'
SPELL_CSPELL_FILE_EXTENSIONS: ["*"]

TERRAFORM_TFLINT_UNSECURED_ENV_VARIABLES:
- GITHUB_TOKEN

Expand Down
2 changes: 1 addition & 1 deletion samples/sample-ec2-java-app-codedeploy/scripts/start.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@

cd /home/ec2-user/server
sudo /usr/bin/java -jar -Dserver.port=80 \
*.jar > /dev/null 2> /dev/null < /dev/null &
*.jar >/dev/null 2>/dev/null </dev/null &
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Might be me, but personally I like the space after the > sign.
It makes things more readable I think. What are your thoughts?
And can we change this setting?

A bit surprising that it does enforce spaces around |.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I agree with you. Adding space_redirects = true to editorconf to fix this.

2 changes: 1 addition & 1 deletion samples/sample-ec2-java-app-codedeploy/scripts/validate.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ echo "Waiting for 15 seconds before checking health.."
sleep 15

status_code=$(curl --write-out %{http_code} --silent --output /dev/null http://localhost:80)
if [[ "$status_code" -ne 200 ]] ; then
if [[ "$status_code" -ne 200 ]]; then
echo "App is not healthy - $status_code"
exit 1
else
Expand Down
33 changes: 16 additions & 17 deletions samples/sample-ec2-with-codedeploy/scripts/install-codedeploy.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@ set -xe

## Code Deploy Agent Bootstrap Script ##

exec > >(tee /var/log/user-data.log|logger -t user-data -s 2>/dev/console) 2>&1
exec > >(tee /var/log/user-data.log | logger -t user-data -s 2>/dev/console) 2>&1
AUTOUPDATE=false

function installdep(){
function installdep() {
if [ ${PLAT} = "ubuntu" ]; then
apt-get -y update
# Satisfying even Ubuntu older versions.
Expand All @@ -18,7 +18,7 @@ function installdep(){
fi
}

function platformize(){
function platformize() {
# Linux OS detection
if hash lsb_release; then
echo "Ubuntu server OS detected"
Expand All @@ -32,21 +32,20 @@ function platformize(){
fi
}


function execute(){
function execute() {
if [ ${PLAT} = "ubuntu" ]; then
cd /tmp/
wget https://aws-codedeploy-${REGION}.s3.${REGION}.amazonaws.com/latest/install
chmod +x ./install

if ./install auto; then
echo "Installation completed"
if ! ${AUTOUPDATE}; then
echo "Disabling Auto Update"
sed -i '/@reboot/d' /etc/cron.d/codedeploy-agent-update
chattr +i /etc/cron.d/codedeploy-agent-update
rm -f /tmp/install
fi
if ! ${AUTOUPDATE}; then
echo "Disabling Auto Update"
sed -i '/@reboot/d' /etc/cron.d/codedeploy-agent-update
chattr +i /etc/cron.d/codedeploy-agent-update
rm -f /tmp/install
fi
exit 0
else
echo "Installation script failed, please investigate"
Expand All @@ -61,12 +60,12 @@ function execute(){

if ./install auto; then
echo "Installation completed"
if ! ${AUTOUPDATE}; then
echo "Disabling auto update"
sed -i '/@reboot/d' /etc/cron.d/codedeploy-agent-update
chattr +i /etc/cron.d/codedeploy-agent-update
rm -f /tmp/install
fi
if ! ${AUTOUPDATE}; then
echo "Disabling auto update"
sed -i '/@reboot/d' /etc/cron.d/codedeploy-agent-update
chattr +i /etc/cron.d/codedeploy-agent-update
rm -f /tmp/install
fi
exit 0
else
echo "Installation script failed, please investigate"
Expand Down
2 changes: 1 addition & 1 deletion samples/sample-ec2-with-codedeploy/scripts/install-deps.sh
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ echo "<VirtualHost *:80>
ProxyRequests Off
ProxyPass / http://localhost:8080/
ProxyPassReverse / http://localhost:8080/
</VirtualHost>" >> sudo /etc/httpd/conf/httpd.conf
</VirtualHost>" /etc/httpd/conf/httpd.conf >>sudo
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This would not work I think, would it?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That does look odd... Let me move this to draft and investigate

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I modified this as follows:

# Append VirtualHost configuration
echo "
<VirtualHost *:80>
  ProxyRequests Off
  ProxyPass / http://localhost:8080/
  ProxyPassReverse / http://localhost:8080/
</VirtualHost>
" | sudo tee -a /etc/httpd/conf/httpd.conf >/dev/null


# start the httpd service now and stop it until userdata
sudo service httpd start
Expand Down
3 changes: 2 additions & 1 deletion samples/sample-fargate-node-app/build/docker.sh
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,5 @@ docker tag $REPOSITORY_URI:latest $REPOSITORY_URI:$IMAGE_TAG
docker push $REPOSITORY_URI:latest
docker push $REPOSITORY_URI:$IMAGE_TAG

tmp=$(mktemp); jq --arg REPOSITORY_URI "$REPOSITORY_URI" --arg IMAGE_TAG "$IMAGE_TAG" '.Parameters.Image = $REPOSITORY_URI+":"+$IMAGE_TAG' params/global.json > "$tmp" && mv "$tmp" params/global.json
tmp=$(mktemp)
jq --arg REPOSITORY_URI "$REPOSITORY_URI" --arg IMAGE_TAG "$IMAGE_TAG" '.Parameters.Image = $REPOSITORY_URI+":"+$IMAGE_TAG' params/global.json >"$tmp" && mv "$tmp" params/global.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,16 +11,15 @@ set -e
SKIP_BUILD=0

# Walk through the options passed to this script
for i in "$@"
do
for i in "$@"; do
case $i in
--no-build)
SKIP_BUILD=1
;;
*)
echo "Unknown option: $i"
exit 1
;;
--no-build)
SKIP_BUILD=1
;;
*)
echo "Unknown option: $i"
exit 1
;;
esac
done

Expand All @@ -34,16 +33,15 @@ fi

# Get list of regions supported by this application
echo "Determine which regions need to be prepared"
app_regions=`aws ssm get-parameters --names /adf/deployment/$ADF_DEPLOYMENT_MAP_SOURCE/$ADF_PROJECT_NAME/regions --with-decryption --output=text --query='Parameters[0].Value'`
app_regions=$(aws ssm get-parameters --names /adf/deployment/$ADF_DEPLOYMENT_MAP_SOURCE/$ADF_PROJECT_NAME/regions --with-decryption --output=text --query='Parameters[0].Value')
# Convert json list to bash list (space delimited regions)
regions="`echo $app_regions | sed -e 's/\[\([^]]*\)\]/\1/g' | sed 's/,/ /g' | sed "s/'//g"`"
regions="$(echo $app_regions | sed -e 's/\[\([^]]*\)\]/\1/g' | sed 's/,/ /g' | sed "s/'//g")"

for region in $regions
do
for region in $regions; do
if [ $CONTAINS_TRANSFORM ]; then
echo "Packaging templates for region $region"
ssm_bucket_name="/adf/cross_region/s3_regional_bucket/$region"
bucket=`aws ssm get-parameters --names $ssm_bucket_name --with-decryption --output=text --query='Parameters[0].Value'`
bucket=$(aws ssm get-parameters --names $ssm_bucket_name --with-decryption --output=text --query='Parameters[0].Value')
sam package --s3-bucket $bucket --output-template-file $CODEBUILD_SRC_DIR/template_$region.yml --region $region
else
# If package is not needed, just copy the file for each region
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,123 +5,111 @@ CURRENT=$(pwd)
terraform --version
echo "Terraform stage: $TF_STAGE"

tfinit(){
# retrieve regional S3 bucket name from parameter store
S3_BUCKET_REGION_NAME=$(aws ssm get-parameter --name "/adf/cross_region/s3_regional_bucket/$AWS_REGION" --region "$AWS_DEFAULT_REGION" | jq .Parameter.Value | sed s/\"//g)
mkdir -p "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}"
cd "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}" || exit
cp -R "${CURRENT}"/tf/. "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}"
# if account related variables exist copy the folder in the work directory
if [ -d "${CURRENT}/tfvars/${TF_VAR_TARGET_ACCOUNT_ID}" ]; then
cp -R "${CURRENT}/tfvars/${TF_VAR_TARGET_ACCOUNT_ID}/." "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}"
fi
if [ -d "${CURRENT}/tfvars/${TF_VAR_TARGET_ACCOUNT_ID}/${AWS_REGION}" ]; then
cp -R "${CURRENT}/tfvars/${TF_VAR_TARGET_ACCOUNT_ID}/${AWS_REGION}"/. "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}"
fi
if [ -f "${CURRENT}/tfvars/global.auto.tfvars" ]; then
cp -R "${CURRENT}/tfvars/global.auto.tfvars" "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}"
fi
terraform init \
-backend-config "bucket=$S3_BUCKET_REGION_NAME" \
-backend-config "region=$AWS_REGION" \
-backend-config "key=$ADF_PROJECT_NAME/$ACCOUNT_ID.tfstate" \
-backend-config "dynamodb_table=adf-tflocktable"
tfinit() {
# retrieve regional S3 bucket name from parameter store
S3_BUCKET_REGION_NAME=$(aws ssm get-parameter --name "/adf/cross_region/s3_regional_bucket/$AWS_REGION" --region "$AWS_DEFAULT_REGION" | jq .Parameter.Value | sed s/\"//g)
mkdir -p "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}"
cd "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}" || exit
cp -R "${CURRENT}"/tf/. "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}"
# if account related variables exist copy the folder in the work directory
if [ -d "${CURRENT}/tfvars/${TF_VAR_TARGET_ACCOUNT_ID}" ]; then
cp -R "${CURRENT}/tfvars/${TF_VAR_TARGET_ACCOUNT_ID}/." "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}"
fi
if [ -d "${CURRENT}/tfvars/${TF_VAR_TARGET_ACCOUNT_ID}/${AWS_REGION}" ]; then
cp -R "${CURRENT}/tfvars/${TF_VAR_TARGET_ACCOUNT_ID}/${AWS_REGION}"/. "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}"
fi
if [ -f "${CURRENT}/tfvars/global.auto.tfvars" ]; then
cp -R "${CURRENT}/tfvars/global.auto.tfvars" "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}"
fi
terraform init \
-backend-config "bucket=$S3_BUCKET_REGION_NAME" \
-backend-config "region=$AWS_REGION" \
-backend-config "key=$ADF_PROJECT_NAME/$ACCOUNT_ID.tfstate" \
-backend-config "dynamodb_table=adf-tflocktable"

echo "Bucket: $S3_BUCKET_REGION_NAME"
echo "Region: $AWS_REGION"
echo "Key: $ADF_PROJECT_NAME/$ACCOUNT_ID.tfstate"
echo "DynamoDB table: adf-tflocktable"
echo "Bucket: $S3_BUCKET_REGION_NAME"
echo "Region: $AWS_REGION"
echo "Key: $ADF_PROJECT_NAME/$ACCOUNT_ID.tfstate"
echo "DynamoDB table: adf-tflocktable"
}
tfplan(){
DATE=$(date +%Y-%m-%d)
TS=$(date +%Y%m%d%H%M%S)
bash "${CURRENT}/adf-build/helpers/sts.sh" "${TF_VAR_TARGET_ACCOUNT_ID}" "${TF_VAR_TARGET_ACCOUNT_ROLE}"
set -o pipefail
terraform plan -out "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}" 2>&1 | tee -a "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}-${TS}.log"
set +o pipefail
# Save Terraform plan results to the S3 bucket
aws s3 cp "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}-${TS}.log" "s3://${S3_BUCKET_REGION_NAME}/${ADF_PROJECT_NAME}/tf-plan/${DATE}/${TF_VAR_TARGET_ACCOUNT_ID}/${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}-${TS}.log"
echo "Path to terraform plan s3://$S3_BUCKET_REGION_NAME/$ADF_PROJECT_NAME/tf-plan/$DATE/$TF_VAR_TARGET_ACCOUNT_ID/$ADF_PROJECT_NAME-$TF_VAR_TARGET_ACCOUNT_ID-$TS.log"
tfplan() {
DATE=$(date +%Y-%m-%d)
TS=$(date +%Y%m%d%H%M%S)
bash "${CURRENT}/adf-build/helpers/sts.sh" "${TF_VAR_TARGET_ACCOUNT_ID}" "${TF_VAR_TARGET_ACCOUNT_ROLE}"
set -o pipefail
terraform plan -out "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}" 2>&1 | tee -a "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}-${TS}.log"
set +o pipefail
# Save Terraform plan results to the S3 bucket
aws s3 cp "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}-${TS}.log" "s3://${S3_BUCKET_REGION_NAME}/${ADF_PROJECT_NAME}/tf-plan/${DATE}/${TF_VAR_TARGET_ACCOUNT_ID}/${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}-${TS}.log"
echo "Path to terraform plan s3://$S3_BUCKET_REGION_NAME/$ADF_PROJECT_NAME/tf-plan/$DATE/$TF_VAR_TARGET_ACCOUNT_ID/$ADF_PROJECT_NAME-$TF_VAR_TARGET_ACCOUNT_ID-$TS.log"
}
tfapply(){
terraform apply "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}"
tfapply() {
terraform apply "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}"
}
tfplandestroy(){
terraform plan -destroy -out "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}-destroy"
tfplandestroy() {
terraform plan -destroy -out "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}-destroy"
}
tfdestroy(){
terraform apply "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}-destroy"
tfdestroy() {
terraform apply "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}-destroy"
}
tfrun(){
export TF_VAR_TARGET_ACCOUNT_ID=$ACCOUNT_ID
echo "Running terraform $TF_STAGE on account $ACCOUNT_ID and region $REGION"
if [[ "$TF_STAGE" = "init" ]]
then
set -e
tfinit
set +e
elif [[ "$TF_STAGE" = "plan" ]]
then
set -e
tfinit
tfplan
set +e
elif [[ "$TF_STAGE" = "apply" ]]
then
set -e
tfinit
tfplan
tfapply
set +e
elif [[ "$TF_STAGE" = "destroy" ]]
then
set -e
tfinit
tfplandestroy
tfdestroy
set +e
else
echo "Invalid Terraform stage: TF_STAGE = $TF_STAGE"
exit 1
fi
tfrun() {
export TF_VAR_TARGET_ACCOUNT_ID=$ACCOUNT_ID
echo "Running terraform $TF_STAGE on account $ACCOUNT_ID and region $REGION"
if [[ "$TF_STAGE" = "init" ]]; then
set -e
tfinit
set +e
elif [[ "$TF_STAGE" = "plan" ]]; then
set -e
tfinit
tfplan
set +e
elif [[ "$TF_STAGE" = "apply" ]]; then
set -e
tfinit
tfplan
tfapply
set +e
elif [[ "$TF_STAGE" = "destroy" ]]; then
set -e
tfinit
tfplandestroy
tfdestroy
set +e
else
echo "Invalid Terraform stage: TF_STAGE = $TF_STAGE"
exit 1
fi
}

# if REGIONS is not defined as pipeline parameters use default region
if [[ -z "$REGIONS" ]]
then
REGIONS=$AWS_DEFAULT_REGION
if [[ -z "$REGIONS" ]]; then
REGIONS=$AWS_DEFAULT_REGION
fi
echo "List of target regions: $REGIONS"
for REGION in $(echo "$REGIONS" | sed "s/,/ /g")
do
AWS_REGION=$(echo -n "$REGION" | sed 's/^[ \t]*//;s/[ \t]*$//') # sed trims whitespaces
export TF_VAR_TARGET_REGION=$AWS_REGION
# if TARGET_ACCOUNTS and TARGET_OUS are not defined apply to all accounts
if [[ -z "$TARGET_ACCOUNTS" ]] && [[ -z "$TARGET_OUS" ]]
then
echo "Apply to all accounts"
for ACCOUNT_ID in $(jq '.[].AccountId' "${CURRENT}/accounts.json" | sed 's/"//g' )
do
tfrun
done
fi
for REGION in $(echo "$REGIONS" | sed "s/,/ /g"); do
AWS_REGION=$(echo -n "$REGION" | sed 's/^[ \t]*//;s/[ \t]*$//') # sed trims whitespaces
export TF_VAR_TARGET_REGION=$AWS_REGION
# if TARGET_ACCOUNTS and TARGET_OUS are not defined apply to all accounts
if [[ -z "$TARGET_ACCOUNTS" ]] && [[ -z "$TARGET_OUS" ]]; then
echo "Apply to all accounts"
for ACCOUNT_ID in $(jq '.[].AccountId' "${CURRENT}/accounts.json" | sed 's/"//g'); do
tfrun
done
fi

if ! [[ -z "$TARGET_ACCOUNTS" ]]
then
# apply only on a subset of accounts (TARGET_ACCOUNTS)
echo "List of target account: $TARGET_ACCOUNTS"
for ACCOUNT_ID in $(echo "$TARGET_ACCOUNTS" | sed "s/,/ /g")
do
tfrun
done
fi
if ! [[ -z "$TARGET_ACCOUNTS" ]]; then
# apply only on a subset of accounts (TARGET_ACCOUNTS)
echo "List of target account: $TARGET_ACCOUNTS"
for ACCOUNT_ID in $(echo "$TARGET_ACCOUNTS" | sed "s/,/ /g"); do
tfrun
done
fi

if ! [[ -z "$TARGET_OUS" ]]
then
echo "List target OUs: $TARGET_OUS"
for ACCOUNT_ID in $(jq '.[].AccountId' "${CURRENT}/accounts_from_ous.json" | sed 's/"//g' )
do
tfrun
done
fi
if ! [[ -z "$TARGET_OUS" ]]; then
echo "List target OUs: $TARGET_OUS"
for ACCOUNT_ID in $(jq '.[].AccountId' "${CURRENT}/accounts_from_ous.json" | sed 's/"//g'); do
tfrun
done
fi
done