auto set admin
Some checks failed
ci / build (push) Failing after 0s
ci / Tests & Checks (push) Failing after 1s

This commit is contained in:
CJ_Clippy 2025-06-17 10:44:05 -08:00
parent ce83d79e3c
commit 5f3902d1e2
5 changed files with 117 additions and 40 deletions

View File

@ -97,4 +97,4 @@ https://github.com/sosedoff/pgweb/wiki/SSH-Gateway
### pgadmin
dotenvx run -f ./.env.production -- docker run -p 5050:80 --rm --init -it -e PGADMIN_DEFAULT_EMAIL -e PGADMIN_DEFAULT_PASSWORD -e PGADMIN_DISABLE_POSTFIX=1 dpage/pgadmin4
dotenvx run -f ./.env.production -- docker run -p 5050:80 --rm --init -it -e PGADMIN_DEFAULT_EMAIL -e PGADMIN_DEFAULT_PASSWORD -e PGADMIN_DISABLE_POSTFIX=1 dpage/pgadmin4

View File

@ -70,6 +70,7 @@ export function buildApp() {
return new Handlebars.SafeString(text);
});
Handlebars.registerHelper('getCdnUrl', function (s3Key) {
console.log(`getCdnUrl called with CDN_ORIGIN=${env.CDN_ORIGIN} and CDN_TOKEN_SECRET=${env.CDN_TOKEN_SECRET}`)
return signUrl(`${env.CDN_ORIGIN}/${s3Key}`, {
securityKey: env.CDN_TOKEN_SECRET,
expirationTime: constants.timeUnits.sevenDaysInSeconds,

View File

@ -15,6 +15,10 @@ export const PatreonTiers = [
{ name: 'LuberPlusPlus', id: '8686022', role: 'supporterTier6' }
];
const admins = [
'20828619' // CJ_Clippy
]
const TierIdToRoleMap = new Map(
PatreonTiers.map(tier => [tier.id, tier.role])
);
@ -22,6 +26,12 @@ const TierIdToRoleMap = new Map(
export function getRoles(data: PatreonUserResponse): string[] {
const roles = new Set<string>(['user']);
// Add admin role if the user's Patreon ID is in the admins list
if (admins.includes(data.data.id)) {
roles.add('admin');
}
const entitledTierIds = data.included
?.filter((item): item is PatreonIncluded => item.type === 'member')
.flatMap(member =>

View File

@ -1,5 +1,3 @@
import { User } from "../../generated/prisma";
import { Prisma } from "../../generated/prisma";
type UserWithRoles = { roles: { name: string }[] };

View File

@ -13,8 +13,8 @@ variable "ipfs_hosts" {
}
variable "bright_port" {
default = "4000"
variable "our_port" {
default = "5000"
}
variable "database_host" {
@ -22,19 +22,12 @@ variable "database_host" {
}
variable "public_s3_endpoint" {
default = "https://futureporn-b2.b-cdn.net"
default = "https://fp-usc.b-cdn.net"
}
variable "patreon_redirect_uri" {
default = "https://bright.futureporn.net/auth/patreon/callback"
}
variable "site_url" {
default = "https://bright.futureporn.net"
}
variable "phx_host" {
default = "bright.futureporn.net"
default = "https://future.porn"
}
variable "aws_bucket" {
@ -75,6 +68,9 @@ terraform {
source = "ansible/ansible"
version = "1.2.0"
}
bunnynet = {
source = "BunnyWay/bunnynet"
}
}
}
@ -83,6 +79,9 @@ provider "vultr" {
api_key = local.envs.VULTR_API_KEY
}
provider "bunnynet" {
api_key = local.envs.BUNNY_API_KEY
}
# reserved IP lets us spin down the system and spin up without losing the IP reservation
resource "vultr_reserved_ip" "futureporn_v2_ip" {
@ -99,12 +98,31 @@ resource "vultr_reserved_ip" "futureporn_tracker_ip" {
# Virtual Private Cloud for connecting many VPS together on a private network
# We use this network connection for app<->db comms.
resource "vultr_vpc2" "futureporn_vpc2" {
# We use this network connection for loadbalancer<->server<->worker<->db comms.
resource "vultr_vpc" "futureporn_vpc" {
description = "Futureporn V2 VPC"
region = "ord"
description = "Futureporn V2 VPC2"
}
resource "bunnynet_dns_record" "future_porn_a" {
for_each = zipmap(
range(length(vultr_instance.load_balancer)),
vultr_instance.load_balancer
)
zone = bunnynet_dns_zone.future_porn.id
name = "*"
type = "A"
value = each.value.main_ip
}
resource "bunnynet_dns_zone" "future_porn" {
domain = "future.porn"
}
# load balancing instance
resource "vultr_instance" "load_balancer" {
count = 1
@ -116,30 +134,69 @@ resource "vultr_instance" "load_balancer" {
os_id = 1743
enable_ipv6 = true
label = "fp lb ${count.index}"
tags = ["futureporn", "load_balancer", "bright"]
tags = ["futureporn", "load_balancer", "our"]
ssh_key_ids = [local.envs.VULTR_SSH_KEY_ID]
user_data = base64encode(var.vps_user_data)
vpc2_ids = [
vultr_vpc2.futureporn_vpc2.id
vpc_ids = [
vultr_vpc.futureporn_vpc.id
]
reserved_ip_id = vultr_reserved_ip.futureporn_v2_ip.id
}
# vultr instance for running bright app
resource "vultr_instance" "bright" {
resource "bunnynet_dns_record" "future_porn_apex" {
zone = bunnynet_dns_zone.future_porn.id
name = ""
type = "A"
value = vultr_reserved_ip.futureporn_v2_ip.subnet
ttl = 3600
}
resource "bunnynet_dns_record" "www_future_porn" {
zone = bunnynet_dns_zone.future_porn.id
name = "www"
type = "CNAME"
value = "future.porn"
ttl = 3600
}
# vultr instance for running our app
resource "vultr_instance" "our_server" {
count = 1
hostname = "fp-bright-${count.index}"
hostname = "fp-our-server-${count.index}"
plan = "vc2-2c-4gb"
region = "ord"
backups = "disabled"
ddos_protection = "false"
os_id = 1743
enable_ipv6 = true
label = "fp bright ${count.index}"
tags = ["futureporn", "phoenix", "bright"]
label = "fp our server ${count.index}"
tags = ["futureporn", "our", "server"]
ssh_key_ids = [local.envs.VULTR_SSH_KEY_ID]
vpc2_ids = [
vultr_vpc2.futureporn_vpc2.id
vpc_ids = [
vultr_vpc.futureporn_vpc.id
]
user_data = base64encode(var.vps_user_data)
}
# vultr instance for running our app's background task runners
resource "vultr_instance" "our_worker" {
count = 1
hostname = "fp-our-worker-${count.index}"
plan = "vc2-2c-4gb"
region = "ord"
backups = "disabled"
ddos_protection = "false"
os_id = 1743
enable_ipv6 = true
label = "fp our worker ${count.index}"
tags = ["futureporn", "our", "worker"]
ssh_key_ids = [local.envs.VULTR_SSH_KEY_ID]
vpc_ids = [
vultr_vpc.futureporn_vpc.id
]
user_data = base64encode(var.vps_user_data)
}
@ -147,7 +204,7 @@ resource "vultr_instance" "bright" {
# vultr instance meant for capturing VODs
resource "vultr_instance" "capture_vps" {
count = 0
count = 1
hostname = "fp-cap-${count.index}"
plan = "vc2-2c-2gb"
region = "ord"
@ -155,7 +212,7 @@ resource "vultr_instance" "capture_vps" {
ddos_protection = "false"
os_id = 1743
enable_ipv6 = true
vpc2_ids = [vultr_vpc2.futureporn_vpc2.id]
vpc_ids = [vultr_vpc.futureporn_vpc.id]
label = "fp capture ${count.index}"
tags = ["futureporn", "capture"]
ssh_key_ids = [local.envs.VULTR_SSH_KEY_ID]
@ -177,7 +234,7 @@ resource "vultr_instance" "capture_vps" {
# desktopuser = "cj_clippy"
# }
# enable_ipv6 = true
# vpc2_ids = [vultr_vpc2.futureporn_vpc2.id]
# vpc_ids = [vultr_vpc.futureporn_vpc.id]
# label = "fp capture ${count.index}"
# tags = ["futureporn", "capture"]
# ssh_key_ids = [local.envs.VULTR_SSH_KEY_ID]
@ -198,7 +255,7 @@ resource "vultr_instance" "database" {
ddos_protection = "false"
os_id = 1743
enable_ipv6 = true
vpc2_ids = [vultr_vpc2.futureporn_vpc2.id]
vpc_ids = [vultr_vpc.futureporn_vpc.id]
label = "fp database ${count.index}"
tags = ["futureporn", "database"]
ssh_key_ids = [local.envs.VULTR_SSH_KEY_ID]
@ -214,7 +271,7 @@ resource "vultr_instance" "tracker" {
ddos_protection = "false"
os_id = 1743
enable_ipv6 = true
vpc2_ids = [vultr_vpc2.futureporn_vpc2.id]
vpc_ids = [vultr_vpc.futureporn_vpc.id]
label = "fp tracker ${count.index}"
tags = ["futureporn", "tracker"]
ssh_key_ids = [local.envs.VULTR_SSH_KEY_ID]
@ -234,8 +291,6 @@ resource "ansible_host" "ipfs_vps" {
}
resource "ansible_host" "capture_vps" {
for_each = { for idx, host in vultr_instance.capture_vps : idx => host }
name = each.value.hostname
@ -272,10 +327,11 @@ resource "ansible_host" "database" {
}
}
resource "ansible_host" "bright" {
for_each = { for idx, host in vultr_instance.bright : idx => host }
resource "ansible_host" "our_server" {
for_each = { for idx, host in vultr_instance.our_server : idx => host }
name = each.value.hostname
groups = ["bright"]
groups = ["our"]
variables = {
ansible_host = each.value.main_ip
internal_ip = each.value.internal_ip
@ -283,6 +339,18 @@ resource "ansible_host" "bright" {
}
}
resource "ansible_host" "our_worker" {
for_each = { for idx, host in vultr_instance.our_worker : idx => host }
name = each.value.hostname
groups = ["our"]
variables = {
ansible_host = each.value.main_ip
internal_ip = each.value.internal_ip
vultr_instance_id = each.value.id
}
}
resource "ansible_host" "tracker" {
for_each = { for idx, host in vultr_instance.tracker : idx => host }
name = each.value.hostname
@ -299,8 +367,8 @@ resource "ansible_group" "capture" {
}
resource "ansible_group" "bright" {
name = "bright"
resource "ansible_group" "our" {
name = "our"
}
resource "ansible_group" "tracker" {
@ -321,7 +389,7 @@ resource "ansible_group" "futureporn" {
"load_balancer",
"database",
"capture",
"bright",
"our",
"tracker"
]
}