Skip to content

Commit

Permalink
Terraform Upgrade in Automate HA (#8253)
Browse files Browse the repository at this point in the history
* Syntax changes related to 1.5

* Added comment

Signed-off-by: Sushil Chaudhari <[email protected]>

* Annotated the output value as sensitive

Signed-off-by: Sushil Chaudhari <[email protected]>

* Made custome changes in package

Signed-off-by: Sushil Chaudhari <[email protected]>

* Made custome changes to accomodate terraform

Signed-off-by: Sushil Chaudhari <[email protected]>

* Added a new terraform package

Signed-off-by: Sushil Chaudhari <[email protected]>

* Rewrote all Terraform configuration files to a canonical format

Signed-off-by: Sushil Chaudhari <[email protected]>

* Changed to the hab pinned version

Signed-off-by: Sushil Chaudhari <[email protected]>

* Reverted to its origin

Signed-off-by: Sushil Chaudhari <[email protected]>

* Sorted terraform variables

Signed-off-by: Sushil Chaudhari <[email protected]>

---------

Signed-off-by: Sushil Chaudhari <[email protected]>
  • Loading branch information
schaudha authored Oct 25, 2023
1 parent d0eac07 commit 46ce68a
Show file tree
Hide file tree
Showing 23 changed files with 155 additions and 170 deletions.
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!{{pkgPathFor "core/bash"}}/bin/bash

hab pkg binlink chef/inspec -f
hab pkg binlink core/terraform -f
hab pkg binlink core/terraform1 -f
hab pkg binlink core/jq-static -f
hab pkg binlink chef/automate-ha-cluster-ctl -f
NEW_WORKSPACE="{{pkg.path}}/workspace"
Expand Down
4 changes: 2 additions & 2 deletions components/automate-backend-deployment/habitat/plan.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ pkg_deps=(
core/make
core/curl
core/rsync
core/terraform/0.14.8/20210826165930
core/busybox-static
core/terraform1
core/busybox-static
chef/automate-ha-cluster-ctl
)

Expand Down
2 changes: 1 addition & 1 deletion components/automate-cluster-ctl/habitat/plan.sh
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ pkg_deps=(
core/make
core/curl
core/rsync
core/terraform/0.14.8/20210826165930
core/terraform1
core/hab/1.6.521/20220603154827
)

Expand Down
6 changes: 3 additions & 3 deletions terraform/a2ha-terraform/destroy/aws/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ provider "aws" {
}

module "aws_metadata" {
source = "../../modules/aws_metadata"
source = "../../modules/aws_metadata"
}

module "aws" {
Expand Down Expand Up @@ -38,7 +38,7 @@ module "aws" {
opensearch_ebs_volume_size = var.opensearch_ebs_volume_size
opensearch_ebs_volume_type = var.opensearch_ebs_volume_type
opensearch_instance_count = var.opensearch_instance_count
opensearch_listen_port = var.opensearch_listen_port
opensearch_listen_port = var.opensearch_listen_port
opensearch_server_instance_type = var.opensearch_server_instance_type
pgleaderchk_listen_port = var.pgleaderchk_listen_port
postgresql_ebs_volume_iops = var.postgresql_ebs_volume_iops
Expand Down Expand Up @@ -94,6 +94,6 @@ module "aws-output" {
postgresql_private_ips = module.aws.postgresql_private_ips
opensearch_private_ips = module.aws.opensearch_private_ips
automate_fqdn = module.aws.automate_fqdn
automate_frontend_url = module.aws.automate_frontend_url
automate_frontend_url = module.aws.automate_frontend_url
bucket_name = var.backup_config_s3 == "true" ? module.s3[0].bucket_name : ""
}
2 changes: 1 addition & 1 deletion terraform/a2ha-terraform/modules/airgap_bundle/inputs.tf
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ variable "archive_disk_info" {
}

variable "bundle_files" {
default = []
default = []
description = "Array of hashs for bundle files, hash should have a source and destination key"
}

Expand Down
16 changes: 8 additions & 8 deletions terraform/a2ha-terraform/modules/airgap_bundle/main.tf
Original file line number Diff line number Diff line change
@@ -1,21 +1,21 @@
locals {
checksum_info = [
for bundle in var.bundle_files:
for bundle in var.bundle_files :
format("%s %s",
element(split(" ", file("transfer_files/${bundle.source}.md5")), 0),
bundle.destination
)
]
rsync_files = [
for bundle in var.bundle_files:
for bundle in var.bundle_files :
format("%s,%s", bundle.source, bundle.destination)
]
airgap_info = templatefile("${path.module}/templates/airgap.info.tpl", {
archive_disk_info = var.archive_disk_info,
files = join(",", local.rsync_files),
instance_count = var.instance_count,
tmp_path = var.tmp_path,
checksums = join("\n", local.checksum_info)
archive_disk_info = var.archive_disk_info,
files = join(",", local.rsync_files),
instance_count = var.instance_count,
tmp_path = var.tmp_path,
checksums = join("\n", local.checksum_info)
})
}

Expand All @@ -30,7 +30,7 @@ resource "null_resource" "rsync" {
}

triggers = {
template = local.airgap_info
template = local.airgap_info
always_run = timestamp()
}

Expand Down
109 changes: 44 additions & 65 deletions terraform/a2ha-terraform/modules/aws/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -9,23 +9,23 @@ data "aws_vpc" "default" {
id = var.aws_vpc_id
}

locals {
locals {
private_subnet_ids_string = join(",", var.private_custom_subnets)
private_subnet_ids_list = split(",", local.private_subnet_ids_string)
private_subnet_ids_list = split(",", local.private_subnet_ids_string)
}

data "aws_subnet" "default" {
count = length(var.private_custom_subnets) > 0 ? 3 : 0
data "aws_subnet" "default" {
count = length(var.private_custom_subnets) > 0 ? 3 : 0
id = local.private_subnet_ids_list[count.index]
}

locals {
locals {
public_subnet_ids_string = join(",", var.public_custom_subnets)
public_subnet_ids_list = split(",", local.public_subnet_ids_string)
public_subnet_ids_list = split(",", local.public_subnet_ids_string)
}

data "aws_subnet" "public" {
count = length(var.public_custom_subnets) > 0 ? 3 : 0
data "aws_subnet" "public" {
count = length(var.public_custom_subnets) > 0 ? 3 : 0
id = local.public_subnet_ids_list[count.index]
}

Expand All @@ -42,7 +42,7 @@ resource "aws_subnet" "default" {
cidr_block = cidrsubnet("${var.aws_cidr_block_addr}/18", 8, count.index + 1)
availability_zone = data.aws_availability_zones.available.names[count.index]

tags = merge(var.tags, map("Name", "${var.tag_name}_${random_id.random.hex}_${data.aws_availability_zones.available.names[count.index]}_private"))
tags = merge(var.tags, tomap({ "Name" = "${var.tag_name}_${random_id.random.hex}_${data.aws_availability_zones.available.names[count.index]}_private" }))
}

resource "aws_subnet" "public" {
Expand All @@ -52,96 +52,97 @@ resource "aws_subnet" "public" {
availability_zone = data.aws_availability_zones.available.names[count.index]
map_public_ip_on_launch = true

tags = merge(var.tags, map("Name", "${var.tag_name}_${random_id.random.hex}_${data.aws_availability_zones.available.names[count.index]}_public"))
tags = merge(var.tags, tomap({ "Name" = "${var.tag_name}_${random_id.random.hex}_${data.aws_availability_zones.available.names[count.index]}_public" }))
}


resource "aws_eip" "eip1" {
count = (length(var.public_custom_subnets) == 0 && var.aws_cidr_block_addr != "") ? 1 : 0
vpc = true
count = (length(var.public_custom_subnets) == 0 && var.aws_cidr_block_addr != "") ? 1 : 0
domain = "vpc" # changed in new version
public_ipv4_pool = "amazon"

tags = merge(var.tags, map("Name", "${var.tag_name}_${random_id.random.hex}_eip"))
tags = merge(var.tags, tomap({ "Name" = "${var.tag_name}_${random_id.random.hex}_eip" }))
}

resource "aws_eip" "eip2" {
count = (length(var.public_custom_subnets) == 0 && var.aws_cidr_block_addr != "") ? 1 : 0
vpc = true
count = (length(var.public_custom_subnets) == 0 && var.aws_cidr_block_addr != "") ? 1 : 0
domain = "vpc"
public_ipv4_pool = "amazon"

tags = merge(var.tags, map("Name", "${var.tag_name}_${random_id.random.hex}_eip"))
tags = merge(var.tags, tomap({ "Name" = "${var.tag_name}_${random_id.random.hex}_eip" }))
}

resource "aws_eip" "eip3" {
count = (length(var.public_custom_subnets) == 0 && var.aws_cidr_block_addr != "") ? 1 : 0
vpc = true
count = (length(var.public_custom_subnets) == 0 && var.aws_cidr_block_addr != "") ? 1 : 0
domain = "vpc"
public_ipv4_pool = "amazon"

tags = merge(var.tags, map("Name", "${var.tag_name}_${random_id.random.hex}_eip"))
tags = merge(var.tags, tomap({ "Name" = "${var.tag_name}_${random_id.random.hex}_eip" }))
}

resource "aws_nat_gateway" "nat1" {
count = (length(var.public_custom_subnets) == 0 && var.aws_cidr_block_addr != "") ? 1 : 0
count = (length(var.public_custom_subnets) == 0 && var.aws_cidr_block_addr != "") ? 1 : 0
allocation_id = aws_eip.eip1[0].id
subnet_id = aws_subnet.public[0].id

tags = merge(var.tags, map("Name", "${var.tag_name}_${random_id.random.hex}_nat_gw"))
tags = merge(var.tags, tomap({ "Name" = "${var.tag_name}_${random_id.random.hex}_nat_gw" }))

depends_on = [data.aws_internet_gateway.default]
}

resource "aws_nat_gateway" "nat2" {
count = (length(var.public_custom_subnets) == 0 && var.aws_cidr_block_addr != "") ? 1 : 0
count = (length(var.public_custom_subnets) == 0 && var.aws_cidr_block_addr != "") ? 1 : 0
allocation_id = aws_eip.eip2[0].id
subnet_id = aws_subnet.public[1].id

tags = merge(var.tags, map("Name", "${var.tag_name}_${random_id.random.hex}_nat_gw"))
tags = merge(var.tags, tomap({ "Name" = "${var.tag_name}_${random_id.random.hex}_nat_gw" }))

depends_on = [data.aws_internet_gateway.default]
}

resource "aws_nat_gateway" "nat3" {
count = (length(var.public_custom_subnets) == 0 && var.aws_cidr_block_addr != "") ? 1 : 0
count = (length(var.public_custom_subnets) == 0 && var.aws_cidr_block_addr != "") ? 1 : 0
allocation_id = aws_eip.eip3[0].id
subnet_id = aws_subnet.public[2].id

tags = merge(var.tags, map("Name", "${var.tag_name}_${random_id.random.hex}_nat_gw"))
tags = merge(var.tags, tomap({ "Name" = "${var.tag_name}_${random_id.random.hex}_nat_gw" }))

depends_on = [data.aws_internet_gateway.default]
}

resource "aws_route_table" "route1" {
count = (length(var.public_custom_subnets) == 0 && var.aws_cidr_block_addr != "") ? 1 : 0
count = (length(var.public_custom_subnets) == 0 && var.aws_cidr_block_addr != "") ? 1 : 0
vpc_id = data.aws_vpc.default.id
route {
cidr_block = "0.0.0.0/0"
nat_gateway_id = aws_nat_gateway.nat1[0].id
}

tags = merge(var.tags, map("Name", "${var.tag_name}_${random_id.random.hex}_route_table"))
tags = merge(var.tags, tomap({ "Name" = "${var.tag_name}_${random_id.random.hex}_route_table" }))

}

resource "aws_route_table" "route2" {
count = (length(var.public_custom_subnets) == 0 && var.aws_cidr_block_addr != "") ? 1 : 0
count = (length(var.public_custom_subnets) == 0 && var.aws_cidr_block_addr != "") ? 1 : 0
vpc_id = data.aws_vpc.default.id
route {
cidr_block = "0.0.0.0/0"
nat_gateway_id = aws_nat_gateway.nat2[0].id
}

tags = merge(var.tags, map("Name", "${var.tag_name}_${random_id.random.hex}_route_table"))
tags = merge(var.tags, tomap({ "Name" = "${var.tag_name}_${random_id.random.hex}_route_table" }))

}

resource "aws_route_table" "route3" {
count = (length(var.public_custom_subnets) == 0 && var.aws_cidr_block_addr != "") ? 1 : 0
count = (length(var.public_custom_subnets) == 0 && var.aws_cidr_block_addr != "") ? 1 : 0
vpc_id = data.aws_vpc.default.id
route {
cidr_block = "0.0.0.0/0"
nat_gateway_id = aws_nat_gateway.nat3[0].id
}

tags = merge(var.tags, map("Name", "${var.tag_name}_${random_id.random.hex}_route_table"))
tags = merge(var.tags, tomap({ "Name" = "${var.tag_name}_${random_id.random.hex}_route_table" }))

}

Expand Down Expand Up @@ -193,17 +194,10 @@ resource "aws_instance" "chef_automate_postgresql" {
iops = var.postgresql_ebs_volume_type == "io1" ? var.postgresql_ebs_volume_iops : 0
volume_size = var.postgresql_ebs_volume_size
volume_type = var.postgresql_ebs_volume_type
tags = merge(var.tags,map("Name",format("${var.tag_name}_${random_id.random.hex}_chef_automate_postgresql_%02d", count.index + 1)))
tags = merge(var.tags, tomap({ "Name" = format("${var.tag_name}_${random_id.random.hex}_chef_automate_postgresql_%02d", count.index + 1) }))
}

tags = merge(var.tags,
map("Name",
format(
"${var.tag_name}_${random_id.random.hex}_chef_automate_postgresql_%02d",
count.index + 1
)
)
)
tags = merge(var.tags, tomap({ "Name" = format("${var.tag_name}_${random_id.random.hex}_chef_automate_postgresql_%02d", count.index + 1) }))
lifecycle {
ignore_changes = [
tags,
Expand All @@ -217,7 +211,7 @@ resource "aws_instance" "chef_automate_postgresql" {
http_tokens = "required"
instance_metadata_tags = "enabled"
}
depends_on = [aws_route_table.route1,aws_route_table.route2,aws_route_table.route3]
depends_on = [aws_route_table.route1, aws_route_table.route2, aws_route_table.route3]

}
resource "aws_instance" "chef_automate_opensearch" {
Expand All @@ -237,15 +231,10 @@ resource "aws_instance" "chef_automate_opensearch" {
iops = var.opensearch_ebs_volume_type == "io1" ? var.opensearch_ebs_volume_iops : 0
volume_size = var.opensearch_ebs_volume_size
volume_type = var.opensearch_ebs_volume_type
tags = merge(var.tags,map("Name",format("${var.tag_name}_${random_id.random.hex}_chef_automate_opensearch_%02d", count.index + 1)))
tags = merge(var.tags, tomap({ "Name" = format("${var.tag_name}_${random_id.random.hex}_chef_automate_opensearch_%02d", count.index + 1) }))
}

tags = merge(
var.tags,
map("Name",
format("${var.tag_name}_${random_id.random.hex}_chef_automate_opensearch_%02d", count.index + 1)
)
)
tags = merge(var.tags, tomap({ "Name" = format("${var.tag_name}_${random_id.random.hex}_chef_automate_opensearch_%02d", count.index + 1) }))
lifecycle {
ignore_changes = [
tags,
Expand All @@ -259,7 +248,7 @@ resource "aws_instance" "chef_automate_opensearch" {
http_tokens = "required"
instance_metadata_tags = "enabled"
}
depends_on = [aws_route_table.route1,aws_route_table.route2,aws_route_table.route3]
depends_on = [aws_route_table.route1, aws_route_table.route2, aws_route_table.route3]

}

Expand All @@ -280,15 +269,10 @@ resource "aws_instance" "chef_automate" {
iops = var.automate_ebs_volume_type == "io1" ? var.automate_ebs_volume_iops : 0
volume_size = var.automate_ebs_volume_size
volume_type = var.automate_ebs_volume_type
tags = merge(var.tags,map("Name",format("${var.tag_name}_${random_id.random.hex}_chef_automate_%02d", count.index + 1)))
tags = merge(var.tags, tomap({ "Name" = format("${var.tag_name}_${random_id.random.hex}_chef_automate_%02d", count.index + 1) }))
}

tags = merge(
var.tags,
map("Name",
format("${var.tag_name}_${random_id.random.hex}_chef_automate_%02d", count.index + 1)
)
)
tags = merge(var.tags, tomap({ "Name" = format("${var.tag_name}_${random_id.random.hex}_chef_automate_%02d", count.index + 1) }))

lifecycle {
ignore_changes = [
Expand All @@ -303,7 +287,7 @@ resource "aws_instance" "chef_automate" {
http_tokens = "required"
instance_metadata_tags = "enabled"
}
depends_on = [aws_route_table.route1,aws_route_table.route2,aws_route_table.route3]
depends_on = [aws_route_table.route1, aws_route_table.route2, aws_route_table.route3]

}

Expand All @@ -325,15 +309,10 @@ resource "aws_instance" "chef_server" {
iops = var.chef_ebs_volume_type == "io1" ? var.chef_ebs_volume_iops : 0
volume_size = var.chef_ebs_volume_size
volume_type = var.chef_ebs_volume_type
tags = merge(var.tags,map("Name",format("${var.tag_name}_${random_id.random.hex}_chef_server_%02d", count.index + 1)))
tags = merge(var.tags, tomap({ "Name" = format("${var.tag_name}_${random_id.random.hex}_chef_server_%02d", count.index + 1) }))
}

tags = merge(
var.tags,
map("Name",
format("${var.tag_name}_${random_id.random.hex}_chef_server_%02d", count.index + 1)
)
)
tags = merge(var.tags, tomap({ "Name" = format("${var.tag_name}_${random_id.random.hex}_chef_server_%02d", count.index + 1) }))

lifecycle {
ignore_changes = [
Expand All @@ -348,6 +327,6 @@ resource "aws_instance" "chef_server" {
http_tokens = "required"
instance_metadata_tags = "enabled"
}
depends_on = [aws_route_table.route1,aws_route_table.route2,aws_route_table.route3]
depends_on = [aws_route_table.route1, aws_route_table.route2, aws_route_table.route3]

}
Loading

0 comments on commit 46ce68a

Please sign in to comment.