Not sure if this is a bug or if I'm doing something wrong. When I introduce a depenency between two modules, the vars in a template_file data of the depending module acts up and terraform complains it can't find it.
I've got two modules, saltmaster and cassandra. The goal is for saltmaster to expose its IP address via an output variable, and cassandra to consume it.
If I sever the dependency between the modules, by hardcoding an ip address as the input into the cassandra module, then all resources are created correctly. But when I introduce the dependency, terraform complains with:
- module.cassandra.aws_instance.momentum_cassandra[2]: Resource 'data.template_file.momentum_cassandra' does not have attribute 'vars.hostname' for variable 'data.template_file.momentum_cassandra.*.vars.hostname'
Note that I am defining the cassandra instance's hostname in the vars section of a template_file, and then using it in the tags block of the aws_instance. And it works fine in isolation, but breaks when the cassandra module depends on the saltmaster module.
cassandra.tf
data "template_file" "momentum_cassandra" {
count = "${var.instance_count}"
template = "${file("${path.module}/userdata.sh")}"
vars {
hostname = "${format("%s%02d", var.name, count.index + 1)}"
saltmaster = "${var.saltmaster}"
salt_installtype = "${var.salt_installtype}"
seed = "${count.index == 0 ? "true" : "false"}"
datacenter = "${var.datacenter}"
rack = "${var.rack}"
}
}
resource "aws_instance" "momentum_cassandra" {
count = 3
provider = "aws.us_dev"
ami = "${var.ami}"
instance_type = "${var.instance_type}"
key_name = "${var.instance_key_name}"
subnet_id = "${element(var.vpc_subnet_ids, count.index)}"
iam_instance_profile = "${var.iam_instance_profile}"
vpc_security_group_ids = ["${aws_security_group.momentum_cassandra.id}"]
user_data = "${data.template_file.momentum_cassandra.*.rendered[count.index]}"
lifecycle {
ignore_changes = ["ami", "user_data"]
}
tags {
Name = "${data.template_file.momentum_cassandra.*.vars.hostname[count.index]}"
main.tf
module "cassandra" {
source = "./tf-aws-momentum-cassandra"
saltmaster = "${module.saltmaster.private_ip}"
}
saltmaster.tf
# create the userdata for bootstrapping
data "template_file" "saltmaster_userdata" {
count = "${var.instance_count}"
template = "${file("${path.module}/userdata.sh")}"
vars {
hostname = "${format("%s%02d", var.name, count.index + 1)}"
saltmaster = "localhost"
environment = "${var.environment}"
installtype = "${var.installtype}"
}
}
# create the salt master
resource "aws_instance" "momentum_salt_master" {
provider = "aws.us_dev"
ami = "${var.ami}"
instance_type = "${var.instance_type}"
key_name = "${var.instance_key_name}"
vpc_security_group_ids = ["${aws_security_group.momentum_salt_master.id}"]
subnet_id = "${element(var.vpc_subnet_ids, count.index)}"
iam_instance_profile = "${var.iam_instance_profile}"
user_data = "${element(data.template_file.saltmaster_userdata.*.rendered, count.index)}"
tags {
Name = "${element(data.template_file.saltmaster_userdata.*.vars.hostname, count.index)}"
Environment = "${var.environment}"
Application = "${var.application}"
Role = "SaltMaster"
}
root_block_device {
volume_type = "gp2"
volume_size = 40
delete_on_termination = true
}
}
output "private_ip" {
value = "${aws_instance.momentum_salt_master.private_ip}"
}
Depending on terraform version you are using this may have been fixed here, however, try replacing
in
cassandra.tf
withelement
lookupswhich you actually use in
saltmaster.tf