diff --git a/README.md b/README.md
index 6dd724c..39456f7 100644
--- a/README.md
+++ b/README.md
@@ -146,25 +146,13 @@ No modules.
| Name | Type |
|------|------|
-| [aws_cloudwatch_event_rule.pipeline_event](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_event_rule) | resource |
-| [aws_cloudwatch_event_target.code_pipeline](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_event_target) | resource |
| [aws_codepipeline.this](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/codepipeline) | resource |
-| [aws_iam_policy.pipeline_event_role_policy](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource |
| [aws_iam_role.codepipeline_role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource |
-| [aws_iam_role.pipeline_event_role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource |
| [aws_iam_role_policy.codepipeline_policy](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy) | resource |
-| [aws_iam_role_policy_attachment.pipeline_event_role_attach_policy](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
-| [aws_s3_bucket.codepipeline_bucket](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource |
-| [aws_s3_bucket.source](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource |
-| [aws_s3_bucket_logging.codepipeline_bucket_logging](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_logging) | resource |
-| [aws_s3_bucket_logging.source_bucket_logging](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_logging) | resource |
-| [aws_s3_bucket_server_side_encryption_configuration.sse](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_server_side_encryption_configuration) | resource |
-| [aws_s3_bucket_versioning.versioning](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_versioning) | resource |
| [random_string.random](https://registry.terraform.io/providers/hashicorp/random/latest/docs/resources/string) | resource |
| [aws_caller_identity.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/caller_identity) | data source |
| [aws_iam_policy_document.assume_role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document) | data source |
| [aws_iam_policy_document.codepipeline_policy](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document) | data source |
-| [aws_iam_policy_document.event_bridge_role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document) | data source |
| [aws_iam_policy_document.pipeline_event_role_policy](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document) | data source |
## Inputs
@@ -174,11 +162,7 @@ No modules.
| [name](#input\_name) | The name of the pipeline | `string` | n/a | yes |
| [artifact\_stores](#input\_artifact\_stores) | One or more artifact\_store blocks. | `list(map(string))` |
[
{
"use_kms": false
}
]
| no |
| [stages](#input\_stages) | One or more stage blocks. | `any` | n/a | yes |
-| [create\_s3\_source](#input\_create\_s3\_source) | Create S3 source bucket for CodePipeline. | `string` | `false` | no |
-| [source\_s3\_bucket](#input\_source\_s3\_bucket) | Name of the S3 source bucket. | `string` | `null` | no |
-| [s3\_trigger\_file](#input\_s3\_trigger\_file) | S3 file that the event rule is looking for to trigger the pipeline. | `string` | `"trigger_pipeline.zip"` | no |
-| [log\_target\_bucket](#input\_log\_target\_bucket) | Target bucket for S3 Access Logs. If left blank, access logging is disabled. | `string` | `""` | no |
-| [log\_target\_prefix](#input\_log\_target\_prefix) | Prefix for S3 Access logs. Defaults to AWSLogs//s3audit if not otherwise provided. | `string` | `""` | no |
+| [artifact\_bucket\_name](#input\_source\_s3\_bucket) | Name of the S3 source bucket. | `string` | `null` | no |
| [codepipeline\_iam](#input\_codepipeline\_iam) | Additional IAM actions to add to CodePipeline IAM role. | `map(list(string))` | `null` | no |
| [pipeline\_type](#input\_pipeline\_type) | The CodePipeline pipeline\_type. Valid options are V1, V2 | `string` | `"V1"` | no |
| [execution\_mode](#input\_execution\_mode) | The CodePipeline execution\_mode. Valid options are `PARALLEL`, `QUEUED`, `SUPERSEDED` (default) | `string` | `"SUPERSEDED"` | no |
@@ -190,5 +174,4 @@ No modules.
|------|-------------|
| [id](#output\_id) | The codepipeline ID |
| [arn](#output\_arn) | The codepipeline ARN |
-| [source\_s3\_bucket\_arn](#output\_source\_s3\_bucket\_arn) | The codepipeline source s3 bucket ARN |
diff --git a/examples/complete/README.md b/examples/complete/README.md
index 9cc86b2..c8b5a38 100644
--- a/examples/complete/README.md
+++ b/examples/complete/README.md
@@ -45,8 +45,6 @@ provider "random" {}
| Name | Description | Type | Default | Required |
|------|-------------|------|---------|:--------:|
| [name](#input\_name) | The name of the pipeline | `string` | n/a | yes |
-| [create\_s3\_source](#input\_create\_s3\_source) | Create S3 source bucket for CodePipeline. | `string` | `false` | no |
-| [source\_s3\_bucket\_prefix](#input\_source\_s3\_bucket\_prefix) | Prefix of the S3 source bucket. Random characters will be added to the end to ensure uniqueness. | `string` | `null` | no |
| [stages](#input\_stages) | One or more stage blocks. | `any` | n/a | yes |
| [pipeline\_type](#input\_pipeline\_type) | The CodePipeline pipeline\_type. Valid options are V1, V2 | `string` | `"V2"` | no |
| [execution\_mode](#input\_execution\_mode) | The CodePipeline execution\_mode. Valid options are `PARALLEL`, `QUEUED`, `SUPERSEDED` (default) | `string` | `"SUPERSEDED"` | no |
@@ -58,5 +56,6 @@ provider "random" {}
|------|-------------|
| [id](#output\_id) | The codepipeline ID |
| [arn](#output\_arn) | The codepipeline ARN |
-| [source\_s3\_bucket\_arn](#output\_source\_s3\_bucket\_arn) | The codepipeline source s3 bucket ARN |
+
+c
\ No newline at end of file
diff --git a/examples/complete/locals.tf b/examples/complete/locals.tf
index 43a4f2f..9911154 100644
--- a/examples/complete/locals.tf
+++ b/examples/complete/locals.tf
@@ -10,6 +10,3 @@
// See the License for the specific language governing permissions and
// limitations under the License.
-locals {
- source_bucket_name = "${var.source_s3_bucket_prefix}-${random_string.random.result}"
-}
diff --git a/examples/complete/main.tf b/examples/complete/main.tf
index 94e73ee..052d6ab 100644
--- a/examples/complete/main.tf
+++ b/examples/complete/main.tf
@@ -20,12 +20,11 @@ module "codepipeline" {
source = "../.."
name = var.name
-
- create_s3_source = var.create_s3_source
- source_s3_bucket = local.source_bucket_name
- stages = var.stages
- pipeline_type = var.pipeline_type
- execution_mode = var.execution_mode
+
+ artifact_bucket_name = var.artifact_bucket_name
+ stages = var.stages
+ pipeline_type = var.pipeline_type
+ execution_mode = var.execution_mode
tags = var.tags
}
diff --git a/examples/complete/outputs.tf b/examples/complete/outputs.tf
index 9c86cc6..eca2a86 100644
--- a/examples/complete/outputs.tf
+++ b/examples/complete/outputs.tf
@@ -20,7 +20,3 @@ output "arn" {
value = module.codepipeline.arn
}
-output "source_s3_bucket_arn" {
- description = "The codepipeline source s3 bucket ARN"
- value = module.codepipeline.source_s3_bucket_arn
-}
diff --git a/examples/complete/test.tfvars b/examples/complete/test.tfvars
index 5fbb30e..f0d66ac 100644
--- a/examples/complete/test.tfvars
+++ b/examples/complete/test.tfvars
@@ -1,8 +1,7 @@
-name = "tf-aws-module_primitive-codepipeline-test-pipeline"
-create_s3_source = true
-source_s3_bucket_prefix = "tf-aws-module_primitive-codepipeline-test-bucket"
-pipeline_type = "V2"
-execution_mode = "PARALLEL"
+name = "tf-aws-module_primitive-codepipeline-test-pipeline"
+pipeline_type = "V2"
+execution_mode = "PARALLEL"
+artifact_bucket_name = "osahon-test-020127659860"
stages = [
@@ -14,7 +13,7 @@ stages = [
provider = "S3"
version = "1"
configuration = {
- S3Bucket = "aaron-test-bucket-778189110199"
+ S3Bucket = "osahon-test-020127659860"
S3ObjectKey = "trigger_pipeline.zip"
PollForSourceChanges = "false"
}
diff --git a/examples/complete/variables.tf b/examples/complete/variables.tf
index 8e78b40..f230846 100644
--- a/examples/complete/variables.tf
+++ b/examples/complete/variables.tf
@@ -15,23 +15,10 @@ variable "name" {
type = string
}
-variable "create_s3_source" {
- description = "Create S3 source bucket for CodePipeline."
- type = string
- default = false
-}
-
-variable "source_s3_bucket_prefix" {
- description = "Prefix of the S3 source bucket. Random characters will be added to the end to ensure uniqueness."
- type = string
- default = null
-}
-
variable "stages" {
description = "One or more stage blocks."
type = any
}
-
variable "pipeline_type" {
description = "The CodePipeline pipeline_type. Valid options are V1, V2"
type = string
@@ -49,6 +36,11 @@ variable "execution_mode" {
}
}
+variable "artifact_bucket_name" {
+ description = "the name of the S3 bucket used for storing the artifacts in the Codepipeline"
+ type = string
+}
+
variable "tags" {
description = "An arbitrary map of tags that can be added to all resources."
type = map(string)
diff --git a/locals.tf b/locals.tf
index 8a61c13..13403b6 100644
--- a/locals.tf
+++ b/locals.tf
@@ -14,8 +14,8 @@ locals {
default_tags = {
provisioner = "Terraform"
}
- account_id = data.aws_caller_identity.current.account_id
- bucket_prefix = length(var.log_target_prefix) == 0 ? "AWSLogs/${local.account_id}/s3audit" : var.log_target_prefix
-
+
tags = merge(local.default_tags, var.tags)
}
+
+
diff --git a/main.tf b/main.tf
index e971561..68a164c 100644
--- a/main.tf
+++ b/main.tf
@@ -10,9 +10,11 @@
// See the License for the specific language governing permissions and
// limitations under the License.
-data "aws_caller_identity" "current" {}
+data "aws_s3_bucket" "artifact_bucket" {
+ bucket = var.artifact_bucket_name
+}
-### Resources to create CodePipeline
+### Resouces to create codepipeline
resource "aws_codepipeline" "this" {
name = var.name
role_arn = aws_iam_role.codepipeline_role.arn
@@ -27,7 +29,7 @@ resource "aws_codepipeline" "this" {
}]
content {
- location = aws_s3_bucket.codepipeline_bucket.bucket
+ location = data.aws_s3_bucket.artifact_bucket.bucket
type = "S3"
dynamic "encryption_key" {
@@ -41,7 +43,7 @@ resource "aws_codepipeline" "this" {
}
}
- # Add each stage to the pipeline from the codebuild_stages var.
+ # Add each stage to the pipeline from the codebuild_stage var.
dynamic "stage" {
for_each = [for stage_val in var.stages : {
stage_name = try(stage_val.stage_name, "My-Stage")
@@ -79,28 +81,7 @@ resource "aws_codepipeline" "this" {
tags = local.tags
}
-# CodePipeline bucket used to store Output Artifacts
-resource "aws_s3_bucket" "codepipeline_bucket" {
- bucket = join("-", ["codepipeline", random_string.random.result])
- force_destroy = true
-}
-
-resource "aws_s3_bucket_logging" "codepipeline_bucket_logging" {
- count = length(var.log_target_bucket) > 0 ? 1 : 0
-
- bucket = aws_s3_bucket.codepipeline_bucket.id
-
- target_bucket = var.log_target_bucket
- target_prefix = local.bucket_prefix
-}
-
-resource "random_string" "random" {
- length = 16
- special = false
- upper = false
-}
-
-# CodePipeline Role
+#CodePipeline Role
data "aws_iam_policy_document" "assume_role" {
statement {
effect = "Allow"
@@ -121,8 +102,7 @@ resource "aws_iam_role" "codepipeline_role" {
data "aws_iam_policy_document" "codepipeline_policy" {
- # Eventbridge trigger
- statement {
+ statement {
effect = "Allow"
actions = [
"cloudwatch:*",
@@ -150,142 +130,19 @@ data "aws_iam_policy_document" "codepipeline_policy" {
"s3:*"
]
resources = [
- aws_s3_bucket.codepipeline_bucket.arn,
- "${aws_s3_bucket.codepipeline_bucket.arn}/*",
+ data.aws_s3_bucket.artifact_bucket.arn,
+ "${data.aws_s3_bucket.artifact_bucket.arn}/*",
]
}
-
- # Allow the ability to access source bucket
- dynamic "statement" {
- for_each = var.create_s3_source ? [1] : []
- content {
- effect = "Allow"
- actions = [
- "s3:GetObject",
- "s3:GetObjectVersion",
- "s3:GetBucketVersioning",
- "s3:PutObjectAcl",
- "s3:PutObject",
- ]
- resources = [
- aws_s3_bucket.source[0].arn,
- "${aws_s3_bucket.source[0].arn}/*",
- ]
- }
- }
-
- # Add additional policy statements if any
- dynamic "statement" {
- for_each = var.codepipeline_iam != null ? [1] : []
- content {
- effect = "Allow"
- actions = var.codepipeline_iam.actions
- resources = var.codepipeline_iam.resources
- }
- }
}
-
resource "aws_iam_role_policy" "codepipeline_policy" {
name = "codepipeline_policy"
role = aws_iam_role.codepipeline_role.id
policy = data.aws_iam_policy_document.codepipeline_policy.json
}
-### Resources to create an S3 Event Bridge trigger
-resource "aws_s3_bucket" "source" {
- count = var.create_s3_source ? 1 : 0
-
- bucket = replace(var.source_s3_bucket, "_", "-")
- force_destroy = true
-}
-
-resource "aws_s3_bucket_logging" "source_bucket_logging" {
- count = length(var.log_target_bucket) > 0 && var.create_s3_source ? 1 : 0
-
- bucket = aws_s3_bucket.source[0].id
-
- target_bucket = var.log_target_bucket
- target_prefix = local.bucket_prefix
-}
-
-resource "aws_s3_bucket_versioning" "versioning" {
- bucket = aws_s3_bucket.source[0].id
- versioning_configuration {
- status = "Enabled"
- }
-}
-
-resource "aws_s3_bucket_server_side_encryption_configuration" "sse" {
- bucket = aws_s3_bucket.source[0].bucket
-
- rule {
- bucket_key_enabled = true
- apply_server_side_encryption_by_default {
- sse_algorithm = "AES256"
- }
- }
-}
-
-resource "aws_cloudwatch_event_rule" "pipeline_event" {
- count = aws_s3_bucket.source != null ? 1 : 0
- name = substr("${var.name}-event", 0, 63)
- description = "Cloud watch event when zip is uploaded to s3"
-
- event_pattern = <