// load secrets from file
// @see https://stackoverflow.com/a/67653301/1004931
// @see https://grep.app/search?q=for+tuple+in+regexall%28
// @see https://github.com/lrmendess/open-source-datalake/blob/main/minio.tf
locals {
  envs = { for tuple in regexall("(.*)=(.*)", file("../.env.production")) : tuple[0] => sensitive(tuple[1]) }
}

variable "ipfs_hosts" {
  description = "List of IP addresses for IPFS nodes"
  type        = list(string)
  default     = ["161.97.186.203", "38.242.193.246"]
}


variable "bright_port" {
  default = "4000"
}

variable "database_host" {
  default = "10.2.128.4"
}

variable "public_s3_endpoint" {
  default = "https://futureporn-b2.b-cdn.net"
}

variable "patreon_redirect_uri" {
  default = "https://bright.futureporn.net/auth/patreon/callback"
}

variable "site_url" {
  default = "https://bright.futureporn.net"
}

variable "phx_host" {
  default = "bright.futureporn.net"
}

variable "aws_bucket" {
  default = "futureporn"
}

variable "aws_region" {
  default = "us-west-000"
}

variable "aws_host" {
  default = "s3.us-west-000.backblazeb2.com"
}

variable "vps_user_data" {
  # most packages are installed using ansible, but we do use cloud-config
  # to install python3, an ansible dependency
  default = <<-EOT
      #cloud-config
      package_update: true
      packages:
        - python3
        - fail2ban

      # @see https://gist.github.com/NatElkins/20880368b797470f3bc6926e3563cb26 for more hardening ideas

    EOT
}


terraform {
  required_providers {
    vultr = {
      source  = "vultr/vultr"
      version = "2.23.1"
    }
    ansible = {
      source  = "ansible/ansible"
      version = "1.2.0"
    }
  }
}


provider "vultr" {
  api_key = local.envs.VULTR_API_KEY
}


# reserved IP lets us spin down the system and spin up without losing the IP reservation
resource "vultr_reserved_ip" "futureporn_v2_ip" {
  label   = "futureporn-v2"
  region  = "ord"
  ip_type = "v4"
}

resource "vultr_reserved_ip" "futureporn_tracker_ip" {
  label   = "futureporn-tracker"
  region  = "ord"
  ip_type = "v4"
}


# Virtual Private Cloud for connecting many VPS together on a private network
# We use this network connection for app<->db comms.
resource "vultr_vpc2" "futureporn_vpc2" {
  region      = "ord"
  description = "Futureporn V2 VPC2"
}

# load balancing instance
resource "vultr_instance" "load_balancer" {
  count           = 1
  hostname        = "fp-lb-${count.index}"
  plan            = "vc2-1c-2gb"
  region          = "ord"
  backups         = "disabled"
  ddos_protection = "false"
  os_id           = 1743
  enable_ipv6     = true
  label           = "fp lb ${count.index}"
  tags            = ["futureporn", "load_balancer", "bright"]
  ssh_key_ids     = [local.envs.VULTR_SSH_KEY_ID]
  user_data       = base64encode(var.vps_user_data)
  vpc2_ids = [
    vultr_vpc2.futureporn_vpc2.id
  ]
  reserved_ip_id = vultr_reserved_ip.futureporn_v2_ip.id
}

# vultr instance for running bright app
resource "vultr_instance" "bright" {
  count           = 1
  hostname        = "fp-bright-${count.index}"
  plan            = "vc2-2c-4gb"
  region          = "ord"
  backups         = "disabled"
  ddos_protection = "false"
  os_id           = 1743
  enable_ipv6     = true
  label           = "fp bright ${count.index}"
  tags            = ["futureporn", "phoenix", "bright"]
  ssh_key_ids     = [local.envs.VULTR_SSH_KEY_ID]
  vpc2_ids = [
    vultr_vpc2.futureporn_vpc2.id
  ]
  user_data = base64encode(var.vps_user_data)
}


# vultr instance meant for capturing VODs
resource "vultr_instance" "capture_vps" {
  count           = 0
  hostname        = "fp-cap-${count.index}"
  plan            = "vc2-2c-2gb"
  region          = "ord"
  backups         = "disabled"
  ddos_protection = "false"
  os_id           = 1743
  enable_ipv6     = true
  vpc2_ids        = [vultr_vpc2.futureporn_vpc2.id]
  label           = "fp capture ${count.index}"
  tags            = ["futureporn", "capture"]
  ssh_key_ids     = [local.envs.VULTR_SSH_KEY_ID]
  user_data       = base64encode(var.vps_user_data)
}


# vultr instance with a GPU. experimental.
# resource "vultr_instance" "capture_vps" {
#   count           = 0
#   hostname        = "fp-cap-${count.index}"
#   plan            = "vcg-a16-2c-8g-2vram"
#   region          = "ord"
#   backups         = "disabled"
#   ddos_protection = "false"
#   # os_id           = 1743
#   image_id = "ubuntu-xfce"
#   app_variables = {
#     desktopuser = "cj_clippy"
#   }
#   enable_ipv6 = true
#   vpc2_ids    = [vultr_vpc2.futureporn_vpc2.id]
#   label       = "fp capture ${count.index}"
#   tags        = ["futureporn", "capture"]
#   ssh_key_ids = [local.envs.VULTR_SSH_KEY_ID]
#   user_data   = base64encode(var.vps_user_data)
# }


resource "vultr_instance" "database" {
  count    = 1
  hostname = "fp-db-${count.index}"
  plan     = "vc2-1c-2gb"
  region   = "ord"
  backups  = "enabled"
  backups_schedule {
    hour = "2"
    type = "daily"
  }
  ddos_protection = "false"
  os_id           = 1743
  enable_ipv6     = true
  vpc2_ids        = [vultr_vpc2.futureporn_vpc2.id]
  label           = "fp database ${count.index}"
  tags            = ["futureporn", "database"]
  ssh_key_ids     = [local.envs.VULTR_SSH_KEY_ID]
  user_data       = base64encode(var.vps_user_data)
}

resource "vultr_instance" "tracker" {
  count           = 0
  hostname        = "fp-tracker-${count.index}"
  plan            = "vc2-1c-2gb"
  region          = "ord"
  backups         = "disabled"
  ddos_protection = "false"
  os_id           = 1743
  enable_ipv6     = true
  vpc2_ids        = [vultr_vpc2.futureporn_vpc2.id]
  label           = "fp tracker ${count.index}"
  tags            = ["futureporn", "tracker"]
  ssh_key_ids     = [local.envs.VULTR_SSH_KEY_ID]
  user_data       = base64encode(var.vps_user_data)
  reserved_ip_id  = vultr_reserved_ip.futureporn_tracker_ip.id
}

resource "ansible_host" "ipfs_vps" {
  for_each = { for idx, host in var.ipfs_hosts : idx => host }
  name     = each.value
  groups   = ["ipfs"]

  variables = {
    ansible_user = "root"
    ansible_host = each.value
  }
}




resource "ansible_host" "capture_vps" {
  for_each = { for idx, host in vultr_instance.capture_vps : idx => host }
  name     = each.value.hostname
  groups   = ["capture"] # Groups this host is part of.

  variables = {
    # Connection vars.
    ansible_user = "root"
    ansible_host = each.value.main_ip

    # Custom vars that we might use in roles/tasks.
    # hostname = "web1"
    # fqdn     = "web1.example.com"
  }
}

resource "ansible_host" "load_balancer" {
  for_each = { for idx, host in vultr_instance.load_balancer : idx => host }
  name     = each.value.hostname
  groups   = ["load_balancer"]
  variables = {
    ansible_host = each.value.main_ip
    internal_ip  = each.value.internal_ip
  }
}

resource "ansible_host" "database" {
  for_each = { for idx, host in vultr_instance.database : idx => host }
  name     = each.value.hostname
  groups   = ["database"]
  variables = {
    ansible_host = each.value.main_ip
    internal_ip  = each.value.internal_ip
  }
}

resource "ansible_host" "bright" {
  for_each = { for idx, host in vultr_instance.bright : idx => host }
  name     = each.value.hostname
  groups   = ["bright"]
  variables = {
    ansible_host      = each.value.main_ip
    internal_ip       = each.value.internal_ip
    vultr_instance_id = each.value.id
  }
}

resource "ansible_host" "tracker" {
  for_each = { for idx, host in vultr_instance.tracker : idx => host }
  name     = each.value.hostname
  groups   = ["tracker"]
  variables = {
    ansible_host      = each.value.main_ip
    internal_ip       = each.value.internal_ip
    vultr_instance_id = each.value.id
  }
}

resource "ansible_group" "capture" {
  name = "capture"
}


resource "ansible_group" "bright" {
  name = "bright"
}

resource "ansible_group" "tracker" {
  name = "tracker"
}

resource "ansible_group" "load_balancer" {
  name = "load_balancer"
}

resource "ansible_group" "database" {
  name = "database"
}

resource "ansible_group" "futureporn" {
  name = "futureporn"
  children = [
    "load_balancer",
    "database",
    "capture",
    "bright",
    "tracker"
  ]
}




# user_data = base64encode(<<-EOT
#       #cloud-config

#       package_update: true
#       packages:
#         - git
#         - mosh
#         - mg
#         - screen
#         - tree
#         - ncdu
#         - pipx
#         - ffmpeg
#         - fd-find
#         - npm

#       runcmd:
#         - git clone https://github.com/insanity54/dotfiles /root/dotfiles
#         - cp /root/dotfiles/.screenrc /root/
#         - curl -fsSL https://getcroc.schollz.com | bash
#         - curl -fsSL get.docker.com | bash
#         - ufw allow 60000:61000/udp
#         - pipx install yt-dlp
#         - pipx ensurepath
#         - git clone https://github.com/insanity54/voddo /root/voddo
#         - curl -fsSL https://gitea.futureporn.net/futureporn/fp/raw/branch/main/packages/scripts/thumbnail-generator.sh > ~/.local/bin/thumbnail-generator.sh
#         - chmod +x ~/.local/bin/thumbnail-generator.sh
#         - curl -fsSL https://github.com/Backblaze/B2_Command_Line_Tool/releases/download/v4.3.1/b2-linux > ~/.local/bin/b2
#         - chmod +x ~/.local/bin/b2
#         - export DIR=/usr/local/bin; curl https://raw.githubusercontent.com/jesseduffield/lazydocker/master/scripts/install_update_linux.sh | bash
#         - curl -fsSL https://dist.ipfs.tech/kubo/v0.33.2/kubo_v0.33.2_linux-amd64.tar.gz > ~/kubo_v0.33.2_linux-amd64.tar.gz
#         - tar xvzf ~/kubo_v0.33.2_linux-amd64.tar.gz
#         - ~/kubo/install.sh
#         - ufw allow 8080
#         - ufw allow 8081
#         - ufw allow 4001

#     EOT
#     )