Skip to content

Instantly share code, notes, and snippets.

@pmatsinopoulos
Created February 3, 2023 08:39
Show Gist options
  • Save pmatsinopoulos/3a7166e616d09c3ec0f57b0407836d69 to your computer and use it in GitHub Desktop.
Save pmatsinopoulos/3a7166e616d09c3ec0f57b0407836d69 to your computer and use it in GitHub Desktop.
Practical Terraform & AWS - Part 8 - Client Machine
data "aws_ami" "client" {
most_recent = true
filter {
name = "architecture"
values = ["x86_64"]
}
filter {
name = "block-device-mapping.delete-on-termination"
values = ["true"]
}
filter {
name = "image-type"
values = ["machine"]
}
filter {
name = "is-public"
values = ["true"]
}
filter {
name = "virtualization-type"
values = ["hvm"]
}
filter {
name = "name"
values = ["amzn2-ami-kernel-5.10-hvm-2.0*"]
}
filter {
name = "root-device-type"
values = ["ebs"]
}
owners = ["137112412989"] # AWS
}
data "aws_key_pair" "client" {
key_name = var.aws_ec2_client.key_pair
}
resource "aws_security_group" "client" {
name = "${var.project}-security-group-client"
description = "Allow SSH traffic from anywhere to anywhere"
vpc_id = aws_vpc.msk_demo.id
ingress {
description = "SSH from anywhere"
from_port = 22
to_port = 22
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
}
egress {
description = "Outgoing from anywhere"
from_port = 0
to_port = 0
protocol = "-1"
cidr_blocks = ["0.0.0.0/0"]
}
tags = {
"environment" = var.environment
"Name" = "${var.project}-security-group-client"
"project" = var.project
}
}
resource "aws_instance" "client" {
ami = data.aws_ami.client.id
instance_type = var.aws_ec2_client.instance_type
availability_zone = "${var.region}a"
subnet_id = aws_subnet.msk_demo[1].id
vpc_security_group_ids = [
aws_vpc.msk_demo.default_security_group_id,
aws_security_group.client.id
]
key_name = data.aws_key_pair.client.key_name
associate_public_ip_address = true
tags = {
"environment" = var.environment
"Name" = "${var.project}-client-machine"
"project" = var.project
}
user_data = <<EOF
#!/bin/bash
grep 'export KAFKA_BROKERS' /home/ec2-user/.bashrc
if [ $? -eq 1 ]; then
echo "export KAFKA_BROKERS='${aws_msk_cluster.msk_cluster.bootstrap_brokers}'" >> /home/ec2-user/.bashrc
fi
grep 'export TOPIC_NAME' /home/ec2-user/.bashrc
if [ $? -eq 1 ]; then
echo "export TOPIC_NAME='${var.topic_name}'" >> /home/ec2-user/.bashrc
fi
EOF
connection {
type = "ssh"
user = "ec2-user"
host = self.public_ip
private_key = file("~/.ssh/${var.aws_ec2_client.key_pair}.pem")
}
provisioner "file" {
source = "live_listening_event_consumer/initialize_db.sh"
destination = "/home/ec2-user/initialize_db.sh"
}
provisioner "remote-exec" {
inline = [
"sudo yum -y install java-1.8.0",
"wget https://archive.apache.org/dist/kafka/${var.kafka_version}/${local.kafka_tar_archive}.tgz",
"tar -xvf ${local.kafka_tar_archive}.tgz",
"echo 'security.protocol=PLAINTEXT' > ./${local.kafka_tar_archive}/bin/client.properties",
"(cd ${local.kafka_tar_archive} && ./bin/kafka-topics.sh --create --topic ${var.topic_name} --bootstrap-server ${aws_msk_cluster.msk_cluster.bootstrap_brokers} --replication-factor 3 --partitions 1)",
"sudo amazon-linux-extras enable postgresql14",
"sudo yum clean metadata",
"sudo yum -y install postgresql",
"chmod u+x /home/ec2-user/initialize_db.sh",
"DB_HOST=${aws_db_instance.analytics.address} DB_PORT=${aws_db_instance.analytics.port} DB_USERNAME=${aws_db_instance.analytics.username} DB_DATABASE=${var.db_analytics.name} PGPASSWORD=${var.db_analytics.password} /home/ec2-user/initialize_db.sh"
]
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment