// load secrets from file // @see https://stackoverflow.com/a/67653301/1004931 // @see https://grep.app/search?q=for+tuple+in+regexall%28 // @see https://github.com/lrmendess/open-source-datalake/blob/main/minio.tf locals { envs = { for tuple in regexall("(.*)=(.*)", file("../.env.production")) : tuple[0] => sensitive(tuple[1]) } } variable "ipfs_hosts" { description = "List of IP addresses for IPFS nodes" type = list(string) default = ["161.97.186.203", "38.242.193.246"] } variable "our_port" { default = "5000" } variable "database_host" { default = "10.2.128.4" } variable "public_s3_endpoint" { default = "https://fp-usc.b-cdn.net" } variable "site_url" { default = "https://future.porn" } variable "aws_bucket" { default = "futureporn" } variable "aws_region" { default = "us-west-000" } variable "aws_host" { default = "s3.us-west-000.backblazeb2.com" } variable "vps_user_data" { # most packages are installed using ansible, but we do use cloud-config # to install python3, an ansible dependency default = <<-EOT #cloud-config package_update: true packages: - python3 - fail2ban # @see https://gist.github.com/NatElkins/20880368b797470f3bc6926e3563cb26 for more hardening ideas EOT } terraform { required_providers { vultr = { source = "vultr/vultr" version = "2.26.0" } ansible = { source = "ansible/ansible" version = "1.2.0" } bunnynet = { source = "BunnyWay/bunnynet" } } } provider "vultr" { api_key = local.envs.VULTR_API_KEY } provider "bunnynet" { api_key = local.envs.BUNNY_API_KEY } # reserved IP lets us spin down the system and spin up without losing the IP reservation resource "vultr_reserved_ip" "futureporn_v2_ip" { label = "futureporn-v2" region = "ord" ip_type = "v4" } resource "vultr_reserved_ip" "futureporn_tracker_ip" { label = "futureporn-tracker" region = "ord" ip_type = "v4" } # Virtual Private Cloud for connecting many VPS together on a private network # We use this network connection for loadbalancer<->server<->worker<->db comms. resource "vultr_vpc" "futureporn_vpc" { description = "Futureporn V2 VPC" region = "ord" } resource "bunnynet_dns_record" "future_porn_a" { for_each = zipmap( range(length(vultr_instance.our_vps)), vultr_instance.our_vps ) zone = bunnynet_dns_zone.future_porn.id name = "*" type = "A" value = each.value.main_ip } resource "bunnynet_dns_zone" "future_porn" { domain = "future.porn" } # load balancing instance # resource "vultr_instance" "load_balancer" { # count = 1 # hostname = "fp-lb-${count.index}" # plan = "vc2-1c-2gb" # region = "ord" # backups = "disabled" # ddos_protection = "false" # os_id = 1743 # enable_ipv6 = true # label = "fp lb ${count.index}" # tags = ["futureporn", "load_balancer", "our"] # ssh_key_ids = [local.envs.VULTR_SSH_KEY_ID] # user_data = base64encode(var.vps_user_data) # vpc_ids = [ # vultr_vpc.futureporn_vpc.id # ] # reserved_ip_id = vultr_reserved_ip.futureporn_v2_ip.id # } resource "bunnynet_dns_record" "future_porn_apex" { zone = bunnynet_dns_zone.future_porn.id name = "" type = "A" value = vultr_instance.our_vps[0].main_ip ttl = 3600 } resource "bunnynet_dns_record" "www_future_porn" { zone = bunnynet_dns_zone.future_porn.id name = "www" type = "CNAME" value = "future.porn" ttl = 3600 } # vultr instance for running our app # resource "vultr_instance" "our_server" { # count = 1 # hostname = "fp-our-server-${count.index}" # plan = "vc2-2c-4gb" # region = "ord" # backups = "disabled" # ddos_protection = "false" # os_id = 1743 # enable_ipv6 = true # label = "fp our server ${count.index}" # tags = ["futureporn", "our", "server"] # ssh_key_ids = [local.envs.VULTR_SSH_KEY_ID] # vpc_ids = [ # vultr_vpc.futureporn_vpc.id # ] # user_data = base64encode(var.vps_user_data) # } # vultr instance for running our app's background task runners # resource "vultr_instance" "our_worker" { # count = 1 # hostname = "fp-our-worker-${count.index}" # plan = "vc2-2c-4gb" # region = "ord" # backups = "disabled" # ddos_protection = "false" # os_id = 1743 # enable_ipv6 = true # label = "fp our worker ${count.index}" # tags = ["futureporn", "our", "worker"] # ssh_key_ids = [local.envs.VULTR_SSH_KEY_ID] # vpc_ids = [ # vultr_vpc.futureporn_vpc.id # ] # user_data = base64encode(var.vps_user_data) # } # vultr instance meant for capturing VODs resource "vultr_instance" "capture_vps" { count = 1 hostname = "fp-cap-${count.index}" plan = "vc2-2c-2gb" region = "ord" backups = "disabled" ddos_protection = "false" os_id = 1743 enable_ipv6 = true vpc_ids = [vultr_vpc.futureporn_vpc.id] label = "fp capture ${count.index}" tags = ["futureporn", "capture"] ssh_key_ids = [local.envs.VULTR_SSH_KEY_ID] user_data = base64encode(var.vps_user_data) } # vultr instance meant for running our future.porn app resource "vultr_instance" "our_vps" { count = 1 hostname = "fp-our-${count.index}" plan = "vc2-2c-2gb" region = "ord" backups = "disabled" ddos_protection = "false" os_id = 1743 enable_ipv6 = true vpc_ids = [vultr_vpc.futureporn_vpc.id] label = "fp our ${count.index}" tags = ["futureporn", "capture"] ssh_key_ids = [local.envs.VULTR_SSH_KEY_ID] user_data = base64encode(var.vps_user_data) } # vultr instance with a GPU. experimental. # resource "vultr_instance" "capture_vps" { # count = 0 # hostname = "fp-cap-${count.index}" # plan = "vcg-a16-2c-8g-2vram" # region = "ord" # backups = "disabled" # ddos_protection = "false" # # os_id = 1743 # image_id = "ubuntu-xfce" # app_variables = { # desktopuser = "cj_clippy" # } # enable_ipv6 = true # vpc_ids = [vultr_vpc.futureporn_vpc.id] # label = "fp capture ${count.index}" # tags = ["futureporn", "capture"] # ssh_key_ids = [local.envs.VULTR_SSH_KEY_ID] # user_data = base64encode(var.vps_user_data) # } # resource "vultr_instance" "database" { # count = 1 # hostname = "fp-db-${count.index}" # plan = "vc2-1c-2gb" # region = "ord" # backups = "enabled" # backups_schedule { # hour = "2" # type = "daily" # } # ddos_protection = "false" # os_id = 1743 # enable_ipv6 = true # vpc_ids = [vultr_vpc.futureporn_vpc.id] # label = "fp database ${count.index}" # tags = ["futureporn", "database"] # ssh_key_ids = [local.envs.VULTR_SSH_KEY_ID] # user_data = base64encode(var.vps_user_data) # } resource "vultr_instance" "tracker" { count = 0 hostname = "fp-tracker-${count.index}" plan = "vc2-1c-2gb" region = "ord" backups = "disabled" ddos_protection = "false" os_id = 1743 enable_ipv6 = true vpc_ids = [vultr_vpc.futureporn_vpc.id] label = "fp tracker ${count.index}" tags = ["futureporn", "tracker"] ssh_key_ids = [local.envs.VULTR_SSH_KEY_ID] user_data = base64encode(var.vps_user_data) reserved_ip_id = vultr_reserved_ip.futureporn_tracker_ip.id } # resource "ansible_host" "ipfs_vps" { # for_each = { for idx, host in var.ipfs_hosts : idx => host } # name = each.value # groups = ["ipfs"] # variables = { # ansible_user = "root" # ansible_host = each.value # } # } # resource "ansible_host" "capture_vps" { # for_each = { for idx, host in vultr_instance.capture_vps : idx => host } # name = each.value.hostname # groups = ["capture"] # Groups this host is part of. # variables = { # # Connection vars. # ansible_user = "root" # ansible_host = each.value.main_ip # # Custom vars that we might use in roles/tasks. # # hostname = "web1" # # fqdn = "web1.example.com" # } # } # resource "ansible_host" "load_balancer" { # for_each = { for idx, host in vultr_instance.load_balancer : idx => host } # name = each.value.hostname # groups = ["load_balancer"] # variables = { # ansible_host = each.value.main_ip # internal_ip = each.value.internal_ip # } # } # resource "ansible_host" "database" { # for_each = { for idx, host in vultr_instance.database : idx => host } # name = each.value.hostname # groups = ["database"] # variables = { # ansible_host = each.value.main_ip # internal_ip = each.value.internal_ip # } # } # resource "ansible_host" "our_server" { # for_each = { for idx, host in vultr_instance.our_server : idx => host } # name = each.value.hostname # groups = ["our-server"] # variables = { # ansible_host = each.value.main_ip # internal_ip = each.value.internal_ip # vultr_instance_id = each.value.id # } # } # resource "ansible_host" "our_worker" { # for_each = { for idx, host in vultr_instance.our_worker : idx => host } # name = each.value.hostname # groups = ["our-worker"] # variables = { # ansible_host = each.value.main_ip # internal_ip = each.value.internal_ip # vultr_instance_id = each.value.id # } # } # resource "ansible_host" "tracker" { # for_each = { for idx, host in vultr_instance.tracker : idx => host } # name = each.value.hostname # groups = ["tracker"] # variables = { # ansible_host = each.value.main_ip # internal_ip = each.value.internal_ip # vultr_instance_id = each.value.id # } # } resource "ansible_host" "our" { for_each = { for idx, host in vultr_instance.our_vps : idx => host } name = each.value.hostname groups = ["our"] variables = { ansible_host = each.value.main_ip internal_ip = each.value.internal_ip vultr_instance_id = each.value.id vultr_vfs_storage_id = vultr_virtual_file_system_storage.vfs.id } } resource "vultr_virtual_file_system_storage" "vfs" { label = "fp-vfs-cache" size_gb = 200 region = "ord" tags = ["our", "vfs"] } # resource "ansible_host" "periphery" { # for_each = { for idx, host in vultr_instance.our_vps : idx => host } # name = each.value.hostname # groups = ["periphery"] # variables = { # ansible_host = each.value.main_ip # internal_ip = each.value.internal_ip # vultr_instance_id = each.value.id # } # } # resource "ansible_group" "capture" { # name = "capture" # } # resource "ansible_group" "our-server" { # name = "our-server" # } # resource "ansible_group" "our-worker" { # name = "our-worker" # } # resource "ansible_group" "tracker" { # name = "tracker" # } resource "ansible_group" "our" { name = "our" } # resource "ansible_group" "periphery" { # name = "periphery" # } # resource "ansible_group" "load_balancer" { # name = "load_balancer" # } # resource "ansible_group" "database" { # name = "database" # } resource "ansible_group" "futureporn" { name = "futureporn" children = [ # "load_balancer", # "database", # "capture", # "our-server", # "our-worker", # "periphery", # "tracker", "our" ] } # user_data = base64encode(<<-EOT # #cloud-config # package_update: true # packages: # - git # - mosh # - mg # - screen # - tree # - ncdu # - pipx # - ffmpeg # - fd-find # - npm # runcmd: # - git clone https://github.com/insanity54/dotfiles /root/dotfiles # - cp /root/dotfiles/.screenrc /root/ # - curl -fsSL https://getcroc.schollz.com | bash # - curl -fsSL get.docker.com | bash # - ufw allow 60000:61000/udp # - pipx install yt-dlp # - pipx ensurepath # - git clone https://github.com/insanity54/voddo /root/voddo # - curl -fsSL https://gitea.futureporn.net/futureporn/fp/raw/branch/main/packages/scripts/thumbnail-generator.sh > ~/.local/bin/thumbnail-generator.sh # - chmod +x ~/.local/bin/thumbnail-generator.sh # - curl -fsSL https://github.com/Backblaze/B2_Command_Line_Tool/releases/download/v4.3.1/b2-linux > ~/.local/bin/b2 # - chmod +x ~/.local/bin/b2 # - export DIR=/usr/local/bin; curl https://raw.githubusercontent.com/jesseduffield/lazydocker/master/scripts/install_update_linux.sh | bash # - curl -fsSL https://dist.ipfs.tech/kubo/v0.33.2/kubo_v0.33.2_linux-amd64.tar.gz > ~/kubo_v0.33.2_linux-amd64.tar.gz # - tar xvzf ~/kubo_v0.33.2_linux-amd64.tar.gz # - ~/kubo/install.sh # - ufw allow 8080 # - ufw allow 8081 # - ufw allow 4001 # EOT # )