diff --git a/bucket.tf b/bucket.tf
index 35e55e5c..37958ee9 100644
--- a/bucket.tf
+++ b/bucket.tf
@@ -5,8 +5,8 @@
locals {
use_external_bucket = var.use_external_audit_log_bucket
- audit_log_bucket_id = local.use_external_bucket ? data.aws_s3_bucket.external[0].id : module.audit_log_bucket.this_bucket.id
- audit_log_bucket_arn = local.use_external_bucket ? data.aws_s3_bucket.external[0].arn : module.audit_log_bucket.this_bucket.arn
+ audit_log_bucket_id = local.use_external_bucket ? data.aws_s3_bucket.external[0].id : module.audit_log_bucket[0].this_bucket.id
+ audit_log_bucket_arn = local.use_external_bucket ? data.aws_s3_bucket.external[0].arn : module.audit_log_bucket[0].this_bucket.arn
audit_log_cloudtrail_destination = join("/", [local.audit_log_bucket_arn, trim(var.cloudtrail_s3_key_prefix, "/")])
audit_log_config_destination = join("/", [local.audit_log_bucket_arn, trim(var.config_s3_bucket_key_prefix, "/")])
@@ -31,6 +31,7 @@ data "aws_s3_bucket" "external" {
# --------------------------------------------------------------------------------------------------
module "audit_log_bucket" {
+ count = local.use_external_bucket ? 0 : 1
source = "./modules/secure-bucket"
bucket_name = var.audit_log_bucket_name
@@ -38,7 +39,6 @@ module "audit_log_bucket" {
log_bucket_name = "${var.audit_log_bucket_name}-access-logs"
lifecycle_glacier_transition_days = var.audit_log_lifecycle_glacier_transition_days
force_destroy = var.audit_log_bucket_force_destroy
- enabled = !local.use_external_bucket
tags = var.tags
@@ -58,8 +58,8 @@ data "aws_iam_policy_document" "audit_log_base" {
actions = ["s3:*"]
effect = "Deny"
resources = [
- module.audit_log_bucket.this_bucket.arn,
- "${module.audit_log_bucket.this_bucket.arn}/*"
+ module.audit_log_bucket[0].this_bucket.arn,
+ "${module.audit_log_bucket[0].this_bucket.arn}/*"
]
condition {
test = "Bool"
@@ -87,7 +87,7 @@ data "aws_iam_policy_document" "audit_log_cloud_trail" {
type = "Service"
identifiers = ["cloudtrail.amazonaws.com"]
}
- resources = [module.audit_log_bucket.this_bucket.arn]
+ resources = [module.audit_log_bucket[0].this_bucket.arn]
}
statement {
@@ -123,7 +123,7 @@ data "aws_iam_policy_document" "audit_log_config" {
type = "Service"
identifiers = ["config.amazonaws.com"]
}
- resources = [module.audit_log_bucket.this_bucket.arn]
+ resources = [module.audit_log_bucket[0].this_bucket.arn]
}
statement {
@@ -133,7 +133,7 @@ data "aws_iam_policy_document" "audit_log_config" {
type = "Service"
identifiers = ["config.amazonaws.com"]
}
- resources = [module.audit_log_bucket.this_bucket.arn]
+ resources = [module.audit_log_bucket[0].this_bucket.arn]
}
statement {
@@ -164,7 +164,7 @@ data "aws_iam_policy_document" "audit_log_config" {
identifiers = [for account in statement.value : "arn:aws:iam::${account.account_id}:root"]
}
actions = ["s3:GetBucketAcl"]
- resources = [module.audit_log_bucket.this_bucket.arn]
+ resources = [module.audit_log_bucket[0].this_bucket.arn]
}
}
@@ -178,7 +178,7 @@ data "aws_iam_policy_document" "audit_log_config" {
identifiers = [for account in statement.value : "arn:aws:iam::${account.account_id}:root"]
}
actions = ["s3:ListBucket", "s3:GetBucketLocation"]
- resources = [module.audit_log_bucket.this_bucket.arn]
+ resources = [module.audit_log_bucket[0].this_bucket.arn]
}
}
@@ -217,7 +217,7 @@ data "aws_iam_policy_document" "audit_log_flow_logs" {
type = "Service"
identifiers = ["delivery.logs.amazonaws.com"]
}
- resources = [module.audit_log_bucket.this_bucket.arn]
+ resources = [module.audit_log_bucket[0].this_bucket.arn]
}
statement {
@@ -250,6 +250,6 @@ data "aws_iam_policy_document" "audit_log" {
resource "aws_s3_bucket_policy" "audit_log" {
count = local.use_external_bucket ? 0 : 1
- bucket = module.audit_log_bucket.this_bucket.id
+ bucket = module.audit_log_bucket[0].this_bucket.id
policy = data.aws_iam_policy_document.audit_log[0].json
}
diff --git a/docs/upgrade-1.0.md b/docs/upgrade-1.0.md
index 387c4bde..acf8981b 100644
--- a/docs/upgrade-1.0.md
+++ b/docs/upgrade-1.0.md
@@ -10,56 +10,56 @@ See [the upgrade guide for AWS provider] for more detail.
## Audit log bucket migrations
-Following configurations from `module.audit_log_bucket.aws_s3_bucket.content[0]` were extracted to separated resources.
+Following configurations from `module.audit_log_bucket[0].aws_s3_bucket.content` were extracted to separated resources.
-- `module.audit_log_bucket.aws_s3_bucket_acl.content[0]`
-- `module.audit_log_bucket.aws_s3_bucket_lifecycle_configuration.content[0]`
-- `module.audit_log_bucket.aws_s3_bucket_logging.content[0]`
-- `module.audit_log_bucket.aws_s3_bucket_server_side_encryption_configuration.content[0]`
-- `module.audit_log_bucket.aws_s3_bucket_versioning.content[0]`
+- `module.audit_log_bucket[0].aws_s3_bucket_acl.content`
+- `module.audit_log_bucket[0].aws_s3_bucket_lifecycle_configuration.content`
+- `module.audit_log_bucket[0].aws_s3_bucket_logging.content`
+- `module.audit_log_bucket[0].aws_s3_bucket_server_side_encryption_configuration.content`
+- `module.audit_log_bucket[0].aws_s3_bucket_versioning.content`
To import the current configuration into these resources, use `terraform import` command as follows.
```sh
-$ terraform import "$MODULE_PATH.module.audit_log_bucket.aws_s3_bucket_acl.content[0]" "$AUDIT_LOG_BUCKET"
+$ terraform import "$MODULE_PATH.module.audit_log_bucket[0].aws_s3_bucket_acl.content" "$AUDIT_LOG_BUCKET"
-$ terraform import "$MODULE_PATH.module.audit_log_bucket.aws_s3_bucket_lifecycle_configuration.content[0]" "$AUDIT_LOG_BUCKET"
+$ terraform import "$MODULE_PATH.module.audit_log_bucket[0].aws_s3_bucket_lifecycle_configuration.content" "$AUDIT_LOG_BUCKET"
-$ terraform import "$MODULE_PATH.module.audit_log_bucket.aws_s3_bucket_logging.content[0]" "$AUDIT_LOG_BUCKET"
+$ terraform import "$MODULE_PATH.module.audit_log_bucket[0].aws_s3_bucket_logging.content" "$AUDIT_LOG_BUCKET"
-$ terraform import "$MODULE_PATH.module.audit_log_bucket.aws_s3_bucket_server_side_encryption_configuration.content[0]" "$AUDIT_LOG_BUCKET"
+$ terraform import "$MODULE_PATH.module.audit_log_bucket[0].aws_s3_bucket_server_side_encryption_configuration.content" "$AUDIT_LOG_BUCKET"
-$ terraform import "$MODULE_PATH.module.audit_log_bucket.aws_s3_bucket_versioning.content[0]" "$AUDIT_LOG_BUCKET"
+$ terraform import "$MODULE_PATH.module.audit_log_bucket[0].aws_s3_bucket_versioning.content" "$AUDIT_LOG_BUCKET"
```
### Notes
- `$MODULE_PATH` should be replaced the actual path of this module in your project, e.g. `module.secure_baseline`.
-- `$AUDIT_LOG_BUCKET` should be replaced with the state bucket name. The actual value in your state file as `module.audit_log_bucket.aws_s3_bucket.content[0].id`.
+- `$AUDIT_LOG_BUCKET` should be replaced with the state bucket name. The actual value in your state file as `module.audit_log_bucket.aws_s3_bucket[0].content.id`.
## Access logging bucket migrations
-Following configurations from `module.audit_log_bucket.aws_s3_bucket.access_log[0]` were extracted to separated resources.
+Following configurations from `module.audit_log_bucket[0].aws_s3_bucket.access_log` were extracted to separated resources.
-- `module.audit_log_bucket.aws_s3_bucket_acl.access_log[0]`
-- `module.audit_log_bucket.aws_s3_bucket_lifecycle_configuration.access_log[0]`
-- `module.audit_log_bucket.aws_s3_bucket_server_side_encryption_configuration.access_log[0]`
+- `module.audit_log_bucket[0].aws_s3_bucket_acl.access_log`
+- `module.audit_log_bucket[0].aws_s3_bucket_lifecycle_configuration.access_log`
+- `module.audit_log_bucket[0].aws_s3_bucket_server_side_encryption_configuration.access_log`
These resources can be imported by `terraform import` command as well.
```sh
-$ terraform import "$MODULE_PATH.aws_s3_bucket_acl.access_log[0]" "$ACCESS_LOG_BUCKET"
+$ terraform import "$MODULE_PATH.module.audit_log_bucket[0].aws_s3_bucket_acl.access_log" "$ACCESS_LOG_BUCKET"
-$ terraform import "$MODULE_PATH.aws_s3_bucket_lifecycle_configuration.access_log[0]" "$ACCESS_LOG_BUCKET"
+$ terraform import "$MODULE_PATH.module.audit_log_bucket[0].aws_s3_bucket_lifecycle_configuration.access_log" "$ACCESS_LOG_BUCKET"
-$ terraform import "$MODULE_PATH.aws_s3_bucket_server_side_encryption_configuration.access_log[0]" "$ACCESS_LOG_BUCKET"
+$ terraform import "$MODULE_PATH.module.audit_log_bucket[0].aws_s3_bucket_server_side_encryption_configuration.access_log" "$ACCESS_LOG_BUCKET"
```
### Notes
- `$MODULE_PATH` should be replaced the actual path of this module in your project, e.g. `module.secure_baseline`.
-- `$ACCESS_LOG_BUCKET` should be replaced with the state bucket name. The actual value in your state file as `module.audit_log_bucket.aws_s3_bucket.access_log[0].id`.
+- `$ACCESS_LOG_BUCKET` should be replaced with the state bucket name. The actual value in your state file as `module.audit_log_bucket[0].aws_s3_bucket.access_log.id`.
[aws provider]: https://github.com/hashicorp/terraform-provider-aws
[the upgrade guide for aws provider]: https://registry.terraform.io/providers/hashicorp/aws/latest/docs/guides/version-4-upgrade
diff --git a/migrations.tf b/migrations.tf
index 8ba8fc14..b011346b 100644
--- a/migrations.tf
+++ b/migrations.tf
@@ -1,3 +1,13 @@
+# --------------------------------------------------------------------------------------------------
+# Migrations to 1.0.0
+# Replacing `enabled` argument in secure-bucket module with `count` meta-argument
+# --------------------------------------------------------------------------------------------------
+
+moved {
+ from = module.audit_log_bucket
+ to = module.audit_log_bucket[0]
+}
+
# --------------------------------------------------------------------------------------------------
# Migrations to 0.31.0
# Replacing `enabled` argument in each sub-module with `count` meta-argument.
diff --git a/modules/secure-bucket/README.md b/modules/secure-bucket/README.md
index 845cf0a8..d60efa8f 100644
--- a/modules/secure-bucket/README.md
+++ b/modules/secure-bucket/README.md
@@ -23,7 +23,6 @@ Creates a S3 bucket with access logging enabled.
| [bucket\_name](#input\_bucket\_name) | The name of the S3 bucket to create. | `string` | yes |
| [log\_bucket\_name](#input\_log\_bucket\_name) | The name of the S3 bucket to store access logs to the main bucket. | `string` | yes |
| [bucket\_key\_enabled](#input\_bucket\_key\_enabled) | Whether or not to use Amazon S3 Bucket Keys for this bucket. | `bool` | no |
-| [enabled](#input\_enabled) | A boolean that indicates this module is enabled. Resources are not created if it is set to false. | `bool` | no |
| [force\_destroy](#input\_force\_destroy) | A boolean that indicates all objects should be deleted from the bucket so that the bucket can be destroyed without error. These objects are not recoverable. | `bool` | no |
| [lifecycle\_glacier\_transition\_days](#input\_lifecycle\_glacier\_transition\_days) | The number of days after object creation when the object is archived into Glacier. | `number` | no |
| [tags](#input\_tags) | Specifies object tags key and value. This applies to all resources created by this module. | `map(string)` | no |
diff --git a/modules/secure-bucket/main.tf b/modules/secure-bucket/main.tf
index 0b3608bb..553f0b59 100644
--- a/modules/secure-bucket/main.tf
+++ b/modules/secure-bucket/main.tf
@@ -1,12 +1,10 @@
data "aws_iam_policy_document" "access_log_policy" {
- count = var.enabled ? 1 : 0
-
statement {
actions = ["s3:*"]
effect = "Deny"
resources = [
- aws_s3_bucket.access_log[0].arn,
- "${aws_s3_bucket.access_log[0].arn}/*"
+ aws_s3_bucket.access_log.arn,
+ "${aws_s3_bucket.access_log.arn}/*"
]
condition {
test = "Bool"
@@ -21,8 +19,6 @@ data "aws_iam_policy_document" "access_log_policy" {
}
resource "aws_s3_bucket" "access_log" {
- count = var.enabled ? 1 : 0
-
bucket = var.log_bucket_name
force_destroy = var.force_destroy
@@ -30,16 +26,12 @@ resource "aws_s3_bucket" "access_log" {
}
resource "aws_s3_bucket_acl" "access_log" {
- count = var.enabled ? 1 : 0
-
- bucket = aws_s3_bucket.access_log[0].id
+ bucket = aws_s3_bucket.access_log.id
acl = "log-delivery-write"
}
resource "aws_s3_bucket_server_side_encryption_configuration" "access_log" {
- count = var.enabled ? 1 : 0
-
- bucket = aws_s3_bucket.access_log[0].id
+ bucket = aws_s3_bucket.access_log.id
rule {
apply_server_side_encryption_by_default {
@@ -49,9 +41,7 @@ resource "aws_s3_bucket_server_side_encryption_configuration" "access_log" {
}
resource "aws_s3_bucket_lifecycle_configuration" "access_log" {
- count = var.enabled ? 1 : 0
-
- bucket = aws_s3_bucket.access_log[0].id
+ bucket = aws_s3_bucket.access_log.id
rule {
id = "auto-archive"
@@ -67,10 +57,8 @@ resource "aws_s3_bucket_lifecycle_configuration" "access_log" {
}
resource "aws_s3_bucket_policy" "access_log_policy" {
- count = var.enabled ? 1 : 0
-
- bucket = aws_s3_bucket.access_log[0].id
- policy = data.aws_iam_policy_document.access_log_policy[0].json
+ bucket = aws_s3_bucket.access_log.id
+ policy = data.aws_iam_policy_document.access_log_policy.json
depends_on = [
aws_s3_bucket_public_access_block.access_log,
@@ -78,9 +66,7 @@ resource "aws_s3_bucket_policy" "access_log_policy" {
}
resource "aws_s3_bucket_public_access_block" "access_log" {
- count = var.enabled ? 1 : 0
-
- bucket = aws_s3_bucket.access_log[0].id
+ bucket = aws_s3_bucket.access_log.id
block_public_acls = true
block_public_policy = true
ignore_public_acls = true
@@ -88,8 +74,6 @@ resource "aws_s3_bucket_public_access_block" "access_log" {
}
resource "aws_s3_bucket" "content" {
- count = var.enabled ? 1 : 0
-
bucket = var.bucket_name
force_destroy = var.force_destroy
@@ -101,16 +85,12 @@ resource "aws_s3_bucket" "content" {
}
resource "aws_s3_bucket_acl" "content" {
- count = var.enabled ? 1 : 0
-
- bucket = aws_s3_bucket.content[0].id
+ bucket = aws_s3_bucket.content.id
acl = "private"
}
resource "aws_s3_bucket_server_side_encryption_configuration" "content" {
- count = var.enabled ? 1 : 0
-
- bucket = aws_s3_bucket.content[0].id
+ bucket = aws_s3_bucket.content.id
rule {
apply_server_side_encryption_by_default {
@@ -121,18 +101,14 @@ resource "aws_s3_bucket_server_side_encryption_configuration" "content" {
}
resource "aws_s3_bucket_logging" "content" {
- count = var.enabled ? 1 : 0
-
- bucket = aws_s3_bucket.content[0].id
+ bucket = aws_s3_bucket.content.id
- target_bucket = aws_s3_bucket.access_log[0].id
+ target_bucket = aws_s3_bucket.access_log.id
target_prefix = ""
}
resource "aws_s3_bucket_lifecycle_configuration" "content" {
- count = var.enabled ? 1 : 0
-
- bucket = aws_s3_bucket.content[0].id
+ bucket = aws_s3_bucket.content.id
rule {
id = "auto-archive"
@@ -153,9 +129,7 @@ resource "aws_s3_bucket_lifecycle_configuration" "content" {
}
resource "aws_s3_bucket_versioning" "content" {
- count = var.enabled ? 1 : 0
-
- bucket = aws_s3_bucket.content[0].id
+ bucket = aws_s3_bucket.content.id
versioning_configuration {
status = "Enabled"
@@ -166,9 +140,7 @@ resource "aws_s3_bucket_versioning" "content" {
}
resource "aws_s3_bucket_public_access_block" "content" {
- count = var.enabled ? 1 : 0
-
- bucket = aws_s3_bucket.content[0].id
+ bucket = aws_s3_bucket.content.id
block_public_acls = true
block_public_policy = true
ignore_public_acls = true
diff --git a/modules/secure-bucket/migrations.tf b/modules/secure-bucket/migrations.tf
new file mode 100644
index 00000000..6cf058aa
--- /dev/null
+++ b/modules/secure-bucket/migrations.tf
@@ -0,0 +1,30 @@
+# --------------------------------------------------------------------------------------------------
+# Migrations to 1.0.0
+# Replacing `enabled` argument in secure-bucket module with `count` meta-argument
+# --------------------------------------------------------------------------------------------------
+
+moved {
+ from = aws_s3_bucket.access_log[0]
+ to = aws_s3_bucket.access_log
+}
+
+moved {
+ from = aws_s3_bucket_policy.access_log_policy[0]
+ to = aws_s3_bucket_policy.access_log_policy
+}
+
+moved {
+ from = aws_s3_bucket_public_access_block.access_log[0]
+ to = aws_s3_bucket_public_access_block.access_log
+}
+
+moved {
+ from = aws_s3_bucket.content[0]
+ to = aws_s3_bucket.content
+}
+
+moved {
+ from = aws_s3_bucket_public_access_block.content[0]
+ to = aws_s3_bucket_public_access_block.content
+}
+
diff --git a/modules/secure-bucket/outputs.tf b/modules/secure-bucket/outputs.tf
index 5aa51670..0d9f5924 100644
--- a/modules/secure-bucket/outputs.tf
+++ b/modules/secure-bucket/outputs.tf
@@ -1,9 +1,9 @@
output "this_bucket" {
description = "This S3 bucket."
- value = var.enabled ? aws_s3_bucket.content[0] : null
+ value = aws_s3_bucket.content
}
output "log_bucket" {
description = "The S3 bucket used for storing access logs of this bucket."
- value = var.enabled ? aws_s3_bucket.access_log[0] : null
+ value = aws_s3_bucket.access_log
}
diff --git a/modules/secure-bucket/variables.tf b/modules/secure-bucket/variables.tf
index 3812ef08..532df17b 100644
--- a/modules/secure-bucket/variables.tf
+++ b/modules/secure-bucket/variables.tf
@@ -20,12 +20,6 @@ variable "force_destroy" {
default = false
}
-variable "enabled" {
- description = "A boolean that indicates this module is enabled. Resources are not created if it is set to false."
- type = bool
- default = true
-}
-
variable "tags" {
description = "Specifies object tags key and value. This applies to all resources created by this module."
type = map(string)
diff --git a/outputs.tf b/outputs.tf
index 2be54fcf..beae8b0b 100644
--- a/outputs.tf
+++ b/outputs.tf
@@ -4,7 +4,7 @@
output "audit_bucket" {
description = "The S3 bucket used for storing audit logs."
- value = module.audit_log_bucket.this_bucket
+ value = one(module.audit_log_bucket[*].this_bucket)
}
# --------------------------------------------------------------------------------------------------