Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix Terraform setup #276

Merged
merged 9 commits into from
Sep 7, 2020
32 changes: 32 additions & 0 deletions deploy/terraform/hardware_data.tpl
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
{
"id": "${id}",
"metadata": {
"facility": {
"facility_code": "${facility_code}",
"plan_slug": "${plan_slug}",
"plan_version_slug": ""
},
"instance": {},
"state": ""
},
"network": {
"interfaces": [
{
"dhcp": {
"arch": "x86_64",
"ip": {
"address": "${address}",
"gateway": "192.168.1.1",
"netmask": "255.255.255.248"
},
"mac": "${mac}",
"uefi": false
},
"netboot": {
"allow_pxe": true,
"allow_workflow": true
}
}
]
}
}
17 changes: 0 additions & 17 deletions deploy/terraform/input.tf

This file was deleted.

4 changes: 2 additions & 2 deletions deploy/terraform/install_package.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,10 @@ declare -a YUM_LIST=("https://download.docker.com/linux/centos/7/x86_64/stable/P
"docker-ce"
"docker-ce-cli"
"epel-release"
"pass"
"python3")
declare -a APT_LIST=("docker"
"docker-compose")
"docker-compose" "pass")

add_yum_repo() (
$YUM_CONFIG_MGR --add-repo https://download.docker.com/linux/centos/docker-ce.repo
Expand All @@ -34,7 +35,6 @@ update_apt() (
restart_docker_service() (
service docker restart
)

install_yum_packages() (
$YUM_INSTALL "${YUM_LIST[@]}" -y
)
Expand Down
120 changes: 99 additions & 21 deletions deploy/terraform/main.tf
Original file line number Diff line number Diff line change
@@ -1,63 +1,141 @@
# Configure the Packet Provider.
terraform {
required_providers {
packet = {
source = "packethost/packet"
version = "~> 3.0.1"
}
null = {
source = "hashicorp/null"
}
}
}

provider "packet" {
auth_token = var.packet_api_token
version = "~> 2.9"
}

# Create a new VLAN in datacenter "ewr1"
resource "packet_vlan" "provisioning-vlan" {
description = "provisioning-vlan"
resource "packet_vlan" "provisioning_vlan" {
description = "provisioning_vlan"
facility = var.facility
project_id = var.project_id
}

# Create a device and add it to tf_project_1
resource "packet_device" "tink-provisioner" {
resource "packet_device" "tink_provisioner" {
hostname = "tink-provisioner"
plan = var.device_type
facilities = [var.facility]
operating_system = "ubuntu_18_04"
billing_cycle = "hourly"
project_id = var.project_id
network_type = "hybrid"
user_data = "${file("install_package.sh")}"
user_data = file("install_package.sh")
}

resource "null_resource" "tink_directory" {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@gianarb by moving the provisioning to a null_resource, provisioning failures (like a missing ssh key) will not require the tink_provisioner host to be recreated. Changes to the tink/ directory can be reapplied (rsync'd) independently. terraform taint null_resource.tink_directory; terraform apply

connection {
type = "ssh"
user = var.ssh_user
host = packet_device.tink_provisioner.network[0].address
}

provisioner "remote-exec" {
inline = [
"mkdir -p /root/tink/deploy"
]
}

provisioner "file" {
source = "../../setup.sh"
destination = "/root/tink/setup.sh"
}

provisioner "file" {
source = "../../generate-envrc.sh"
destination = "/root/tink/generate-envrc.sh"
}

provisioner "file" {
source = "../../deploy"
destination = "/root/tink"
}

provisioner "remote-exec" {
inline = [
"chmod +x /root/tink/*.sh /root/tink/deploy/tls/*.sh"
]
}
}

resource "packet_device_network_type" "tink_provisioner_network_type" {
device_id = packet_device.tink_provisioner.id
type = "hybrid"
}

# Create a device and add it to tf_project_1
resource "packet_device" "tink-worker" {
hostname = "tink-worker"
resource "packet_device" "tink_worker" {
count = var.worker_count

hostname = "tink-worker-${count.index}"
plan = var.device_type
facilities = [var.facility]
operating_system = "custom_ipxe"
ipxe_script_url = "https://boot.netboot.xyz"
always_pxe = "true"
billing_cycle = "hourly"
project_id = var.project_id
network_type = "layer2-individual"
}

resource "packet_device_network_type" "tink_worker_network_type" {
count = var.worker_count

device_id = packet_device.tink_worker[count.index].id
type = "layer2-individual"
}

# Attach VLAN to provisioner
resource "packet_port_vlan_attachment" "provisioner" {
device_id = packet_device.tink-provisioner.id
port_name = "eth1"
vlan_vnid = packet_vlan.provisioning-vlan.vxlan
depends_on = [packet_device_network_type.tink_provisioner_network_type]
device_id = packet_device.tink_provisioner.id
port_name = "eth1"
vlan_vnid = packet_vlan.provisioning_vlan.vxlan
}

# Attach VLAN to worker
resource "packet_port_vlan_attachment" "worker" {
device_id = packet_device.tink-worker.id
count = var.worker_count
depends_on = [packet_device_network_type.tink_worker_network_type]

device_id = packet_device.tink_worker[count.index].id
port_name = "eth0"
vlan_vnid = packet_vlan.provisioning-vlan.vxlan
vlan_vnid = packet_vlan.provisioning_vlan.vxlan
}

output "provisioner_dns_name" {
value = "${split("-", packet_device.tink-provisioner.id)[0]}.packethost.net"
data "template_file" "worker_hardware_data" {
count = var.worker_count
template = file("${path.module}/hardware_data.tpl")
vars = {
id = packet_device.tink_worker[count.index].id
facility_code = packet_device.tink_worker[count.index].deployed_facility
plan_slug = packet_device.tink_worker[count.index].plan
address = "192.168.1.${count.index + 5}"
mac = packet_device.tink_worker[count.index].ports[1].mac
}
}

output "provisioner_ip" {
value = "${packet_device.tink-provisioner.network[0].address}"
}
resource "null_resource" "hardware_data" {
count = var.worker_count
depends_on = [null_resource.tink_directory]

connection {
type = "ssh"
user = var.ssh_user
host = packet_device.tink_provisioner.network[0].address
}

output "worker_mac_addr" {
value = "${packet_device.tink-worker.ports[1].mac}"
provisioner "file" {
content = data.template_file.worker_hardware_data[count.index].rendered
destination = "/root/tink/deploy/hardware-data-${count.index}.json"
}
}
15 changes: 15 additions & 0 deletions deploy/terraform/outputs.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
output "provisioner_dns_name" {
value = "${split("-", packet_device.tink_provisioner.id)[0]}.packethost.net"
}

output "provisioner_ip" {
value = packet_device.tink_provisioner.network[0].address
}

output "worker_mac_addr" {
value = formatlist("%s", packet_device.tink_worker[*].ports[1].mac)
}

output "worker_sos" {
value = formatlist("%s@sos.%s.packet.net", packet_device.tink_worker[*].id, packet_device.tink_worker[*].deployed_facility)
}
32 changes: 32 additions & 0 deletions deploy/terraform/variables.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
variable "packet_api_token" {
description = "Packet user api token"
type = string
}

variable "project_id" {
description = "Project ID"
type = string
}

variable "worker_count" {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I haven't checked tinkerbell/tinkerbell.org#129 to see if multiple workers will require any additional doc changes.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

No, they will just work! Nice!

description = "Number of Workers"
type = number
default = 1
}
variable "facility" {
description = "Packet facility to provision in"
type = string
default = "sjc1"
}

variable "device_type" {
type = string
description = "Type of device to provision"
default = "c3.small.x86"
}

variable "ssh_user" {
description = "Username that will be used to transfer file from your local environment to the provisioner"
type = string
default = "root"
}
3 changes: 3 additions & 0 deletions deploy/terraform/versions.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
terraform {
required_version = ">= 0.13"
}