Compare commits
No commits in common. "8522586745904d8d3f1dd84bf3ae7166c959370e" and "317dfad5545379c3c5ab1cfaecea201042e57c77" have entirely different histories.
8522586745
...
317dfad554
11
Makefile
11
Makefile
@ -7,17 +7,12 @@ namespaces:
|
|||||||
secrets:
|
secrets:
|
||||||
./scripts/k8s-secrets.sh
|
./scripts/k8s-secrets.sh
|
||||||
|
|
||||||
flux-prod:
|
flux:
|
||||||
./scripts/flux-bootstrap-prod.sh
|
./scripts/flux-bootstrap.sh
|
||||||
|
|
||||||
flux-staging:
|
|
||||||
./scripts/flux-bootstrap-staging.sh
|
|
||||||
|
|
||||||
dev: kind namespaces secrets chisel velero
|
dev: kind namespaces secrets chisel velero
|
||||||
|
|
||||||
prod: namespaces secrets velero flux-prod
|
prod: namespaces secrets velero flux
|
||||||
|
|
||||||
staging: namespaces secrets velero flux-staging
|
|
||||||
|
|
||||||
velero:
|
velero:
|
||||||
./scripts/velero-create.sh
|
./scripts/velero-create.sh
|
||||||
|
@ -42,6 +42,9 @@ spec:
|
|||||||
url: https://strapi.piko.sbtp.xyz
|
url: https://strapi.piko.sbtp.xyz
|
||||||
certIssuer: letsencrypt-staging
|
certIssuer: letsencrypt-staging
|
||||||
hostname: strapi.futureporn.svc.cluster.local
|
hostname: strapi.futureporn.svc.cluster.local
|
||||||
|
ingressClassName: ngrok
|
||||||
|
ngrok:
|
||||||
|
hostname: grateful-engaging-cicada.ngrok-free.app
|
||||||
realtime:
|
realtime:
|
||||||
imageName: gitea.futureporn.net/futureporn/realtime:latest
|
imageName: gitea.futureporn.net/futureporn/realtime:latest
|
||||||
adminEmail: cj@futureporn.net
|
adminEmail: cj@futureporn.net
|
||||||
|
0
apps/base/windmill/.gitkeep
Normal file
0
apps/base/windmill/.gitkeep
Normal file
@ -1,17 +0,0 @@
|
|||||||
apiVersion: helm.toolkit.fluxcd.io/v2
|
|
||||||
kind: HelmRelease
|
|
||||||
metadata:
|
|
||||||
name: fp
|
|
||||||
namespace: futureporn
|
|
||||||
spec:
|
|
||||||
values:
|
|
||||||
next:
|
|
||||||
certIssuer: letsencrypt-prod
|
|
||||||
hostname: next.futureporn.net
|
|
||||||
scout:
|
|
||||||
certIssuer: letsencrypt-prod
|
|
||||||
cdnBucketUrl: https://futureporn-b2.b-cdn.net
|
|
||||||
s3BucketName: futureporn-b2
|
|
||||||
strapi:
|
|
||||||
url: https://portal.futureporn.net
|
|
||||||
hostname: portal.futureporn.net
|
|
@ -1,11 +1,11 @@
|
|||||||
apiVersion: kustomize.config.k8s.io/v1beta1
|
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||||
kind: Kustomization
|
kind: Kustomization
|
||||||
namespace: futureporn
|
|
||||||
resources:
|
resources:
|
||||||
|
- ../base/podinfo
|
||||||
- ../base/temporal
|
- ../base/temporal
|
||||||
- ../base/fp
|
- ../base/fp
|
||||||
patches:
|
patches:
|
||||||
- path: fp-values.yaml
|
- path: podinfo-values.yaml
|
||||||
target:
|
target:
|
||||||
kind: HelmRelease
|
kind: HelmRelease
|
||||||
name: fp
|
name: podinfo
|
||||||
|
21
apps/production/podinfo-values.yaml
Normal file
21
apps/production/podinfo-values.yaml
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
apiVersion: helm.toolkit.fluxcd.io/v2
|
||||||
|
kind: HelmRelease
|
||||||
|
metadata:
|
||||||
|
name: podinfo
|
||||||
|
namespace: futureporn
|
||||||
|
spec:
|
||||||
|
chart:
|
||||||
|
spec:
|
||||||
|
version: ">=1.0.0"
|
||||||
|
values:
|
||||||
|
ingress:
|
||||||
|
hosts:
|
||||||
|
- host: podinfo.sbtp.xyz
|
||||||
|
paths:
|
||||||
|
- path: /
|
||||||
|
pathType: Prefix
|
||||||
|
backend:
|
||||||
|
service:
|
||||||
|
name: podinfo
|
||||||
|
port:
|
||||||
|
number: 9898
|
25
apps/staging/chisel/chisel.yaml
Normal file
25
apps/staging/chisel/chisel.yaml
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
|
||||||
|
apiVersion: source.toolkit.fluxcd.io/v1beta2
|
||||||
|
kind: GitRepository
|
||||||
|
metadata:
|
||||||
|
name: chisel-operator
|
||||||
|
namespace: futureporn
|
||||||
|
spec:
|
||||||
|
interval: 5m
|
||||||
|
url: https://github.com/FyraLabs/chisel-operator
|
||||||
|
ref:
|
||||||
|
branch: master
|
||||||
|
---
|
||||||
|
apiVersion: kustomize.toolkit.fluxcd.io/v1beta2
|
||||||
|
kind: Kustomization
|
||||||
|
metadata:
|
||||||
|
name: chisel-operator
|
||||||
|
namespace: futureporn
|
||||||
|
spec:
|
||||||
|
interval: 10m
|
||||||
|
targetNamespace: futureporn
|
||||||
|
sourceRef:
|
||||||
|
kind: GitRepository
|
||||||
|
name: chisel-operator
|
||||||
|
path: "./kustomize"
|
||||||
|
prune: true
|
5
apps/staging/chisel/kustomization.yaml
Normal file
5
apps/staging/chisel/kustomization.yaml
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||||
|
kind: Kustomization
|
||||||
|
namespace: futureporn
|
||||||
|
resources:
|
||||||
|
- chisel.yaml
|
@ -1,17 +0,0 @@
|
|||||||
apiVersion: helm.toolkit.fluxcd.io/v2
|
|
||||||
kind: HelmRelease
|
|
||||||
metadata:
|
|
||||||
name: fp
|
|
||||||
namespace: futureporn
|
|
||||||
spec:
|
|
||||||
values:
|
|
||||||
next:
|
|
||||||
certIssuer: letsencrypt-staging
|
|
||||||
hostname: next.sbtp.xyz
|
|
||||||
scout:
|
|
||||||
certIssuer: letsencrypt-staging
|
|
||||||
cdnBucketUrl: https://fp-dev.b-cdn.net
|
|
||||||
s3BucketName: fp-dev
|
|
||||||
strapi:
|
|
||||||
url: https://strapi.sbtp.xyz
|
|
||||||
hostname: strapi.sbtp.xyz
|
|
@ -3,14 +3,7 @@ kind: Kustomization
|
|||||||
namespace: futureporn
|
namespace: futureporn
|
||||||
resources:
|
resources:
|
||||||
- ../base/podinfo
|
- ../base/podinfo
|
||||||
- ../base/temporal
|
|
||||||
- ../base/fp
|
|
||||||
patches:
|
patches:
|
||||||
- path: podinfo-values.yaml
|
- path: podinfo-values.yaml
|
||||||
target:
|
target:
|
||||||
kind: HelmRelease
|
kind: HelmRelease
|
||||||
name: podinfo
|
|
||||||
- path: fp-values.yaml
|
|
||||||
target:
|
|
||||||
kind: HelmRelease
|
|
||||||
name: fp
|
|
||||||
|
@ -12,7 +12,7 @@ spec:
|
|||||||
values:
|
values:
|
||||||
ingress:
|
ingress:
|
||||||
hosts:
|
hosts:
|
||||||
- host: podinfo.sbtp.xyz
|
- host: podinfo.staging
|
||||||
paths:
|
paths:
|
||||||
- path: /
|
- path: /
|
||||||
pathType: ImplementationSpecific
|
pathType: ImplementationSpecific
|
||||||
|
@ -4,7 +4,7 @@ metadata:
|
|||||||
name: apps
|
name: apps
|
||||||
namespace: flux-system
|
namespace: flux-system
|
||||||
spec:
|
spec:
|
||||||
interval: 1m0s
|
interval: 10m0s
|
||||||
dependsOn:
|
dependsOn:
|
||||||
- name: infra-configs
|
- name: infra-configs
|
||||||
sourceRef:
|
sourceRef:
|
||||||
|
@ -14,7 +14,6 @@ spec:
|
|||||||
path: ./infrastructure/controllers
|
path: ./infrastructure/controllers
|
||||||
prune: true
|
prune: true
|
||||||
wait: true
|
wait: true
|
||||||
|
|
||||||
---
|
---
|
||||||
apiVersion: kustomize.toolkit.fluxcd.io/v1
|
apiVersion: kustomize.toolkit.fluxcd.io/v1
|
||||||
kind: Kustomization
|
kind: Kustomization
|
||||||
|
@ -6,7 +6,7 @@ metadata:
|
|||||||
spec:
|
spec:
|
||||||
acme:
|
acme:
|
||||||
# Replace the email address with your own contact email
|
# Replace the email address with your own contact email
|
||||||
email: cj@futureporn.net
|
email: fluxcdbot@users.noreply.github.com
|
||||||
# The server is replaced in /clusters/production/infrastructure.yaml
|
# The server is replaced in /clusters/production/infrastructure.yaml
|
||||||
server: https://acme-staging-v02.api.letsencrypt.org/directory
|
server: https://acme-staging-v02.api.letsencrypt.org/directory
|
||||||
privateKeySecretRef:
|
privateKeySecretRef:
|
||||||
|
@ -3,7 +3,6 @@ apiVersion: v1
|
|||||||
kind: Namespace
|
kind: Namespace
|
||||||
metadata:
|
metadata:
|
||||||
name: cert-manager
|
name: cert-manager
|
||||||
|
|
||||||
---
|
---
|
||||||
apiVersion: source.toolkit.fluxcd.io/v1
|
apiVersion: source.toolkit.fluxcd.io/v1
|
||||||
kind: HelmRepository
|
kind: HelmRepository
|
||||||
@ -13,7 +12,6 @@ metadata:
|
|||||||
spec:
|
spec:
|
||||||
interval: 24h
|
interval: 24h
|
||||||
url: https://charts.jetstack.io
|
url: https://charts.jetstack.io
|
||||||
|
|
||||||
---
|
---
|
||||||
apiVersion: helm.toolkit.fluxcd.io/v2
|
apiVersion: helm.toolkit.fluxcd.io/v2
|
||||||
kind: HelmRelease
|
kind: HelmRelease
|
||||||
|
@ -1,12 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "scripts",
|
|
||||||
"type": "module",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"description": "",
|
|
||||||
"keywords": [],
|
|
||||||
"author": "",
|
|
||||||
"license": "Unlicense",
|
|
||||||
"dependencies": {
|
|
||||||
"dotenv": "^16.4.5"
|
|
||||||
}
|
|
||||||
}
|
|
23
packages/infra/pnpm-lock.yaml
generated
23
packages/infra/pnpm-lock.yaml
generated
@ -1,23 +0,0 @@
|
|||||||
lockfileVersion: '9.0'
|
|
||||||
|
|
||||||
settings:
|
|
||||||
autoInstallPeers: true
|
|
||||||
excludeLinksFromLockfile: false
|
|
||||||
|
|
||||||
importers:
|
|
||||||
|
|
||||||
.:
|
|
||||||
dependencies:
|
|
||||||
dotenv:
|
|
||||||
specifier: ^16.4.5
|
|
||||||
version: 16.4.5
|
|
||||||
|
|
||||||
packages:
|
|
||||||
|
|
||||||
dotenv@16.4.5:
|
|
||||||
resolution: {integrity: sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==}
|
|
||||||
engines: {node: '>=12'}
|
|
||||||
|
|
||||||
snapshots:
|
|
||||||
|
|
||||||
dotenv@16.4.5: {}
|
|
@ -1,84 +0,0 @@
|
|||||||
#!/usr/bin/env node
|
|
||||||
|
|
||||||
|
|
||||||
import dotenv from 'dotenv'
|
|
||||||
dotenv.config({ path: '../../.env' })
|
|
||||||
|
|
||||||
const apiV2Base = 'https://api.vultr.com/v2'
|
|
||||||
|
|
||||||
|
|
||||||
if (!process.env.VULTR_API_KEY) throw new Error('VULTR_API_KEY is missing in env');
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
async function deleteOrphanedLoadBalancers() {
|
|
||||||
console.log('getting load balancers')
|
|
||||||
const loadBalancersRes = await fetch(`${apiV2Base}/load-balancers`, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
'authorization': `Bearer ${process.env.VULTR_API_KEY}`
|
|
||||||
}
|
|
||||||
})
|
|
||||||
const loadBalancerJson = await loadBalancersRes.json()
|
|
||||||
const orphanedLoadBalancers = loadBalancerJson.load_balancers.filter((lb) => (lb.instances.length === 0))
|
|
||||||
console.log(`found ${orphanedLoadBalancers.length} orphaned load balancers.`)
|
|
||||||
console.log('waiting 1 second')
|
|
||||||
await new Promise((resolve) => { setTimeout(resolve, 1000) })
|
|
||||||
|
|
||||||
for (const lb of orphanedLoadBalancers) {
|
|
||||||
console.log(`deleting load balancer ${lb.id}`)
|
|
||||||
|
|
||||||
const deleteLoadBalancerRes = await fetch(`https://api.vultr.com/v2/load-balancers/${lb.id}`, {
|
|
||||||
method: 'DELETE',
|
|
||||||
headers: {
|
|
||||||
'Authorization': `Bearer ${process.env.VULTR_API_KEY}`
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
console.log('waiting 1 second')
|
|
||||||
await new Promise((resolve) => { setTimeout(resolve, 1000) })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async function deleteOrphanedBlockStorage() {
|
|
||||||
console.log('getting block storage')
|
|
||||||
|
|
||||||
const blocksRes = await fetch(`${apiV2Base}/blocks`, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
'authorization': `Bearer ${process.env.VULTR_API_KEY}`
|
|
||||||
}
|
|
||||||
})
|
|
||||||
const blocksJson = await blocksRes.json()
|
|
||||||
|
|
||||||
const orphanedBlocks = blocksJson.blocks.filter((b) => b.attached_to_instance === '')
|
|
||||||
console.log(`found ${orphanedBlocks.length} orphaned block storages`)
|
|
||||||
|
|
||||||
console.log('waiting 1 second')
|
|
||||||
await new Promise((resolve) => { setTimeout(resolve, 1000) })
|
|
||||||
for (const block of orphanedBlocks) {
|
|
||||||
console.log(`deleting block ${block.id}`)
|
|
||||||
|
|
||||||
const deleteBlocksRes = await fetch(`${apiV2Base}/blocks/${block.id}`, {
|
|
||||||
method: 'DELETE',
|
|
||||||
headers: {
|
|
||||||
'Authorization': `Bearer ${process.env.VULTR_API_KEY}`
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
console.log('waiting 1 second')
|
|
||||||
await new Promise((resolve) => { setTimeout(resolve, 1000) })
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
await deleteOrphanedLoadBalancers()
|
|
||||||
await deleteOrphanedBlockStorage()
|
|
||||||
}
|
|
||||||
|
|
||||||
main()
|
|
@ -1,17 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
## this way is annoying because deployment asks for git password
|
|
||||||
# flux bootstrap git \
|
|
||||||
# --kubeconfig /home/cj/.kube/vke.yaml \
|
|
||||||
# --url=https://gitea.futureporn.net/futureporn/fp.git \
|
|
||||||
# --branch=main \
|
|
||||||
# --username=cj_clippy \
|
|
||||||
# --token-auth=true \
|
|
||||||
# --path=clusters/staging
|
|
||||||
|
|
||||||
## this way is more automatic although it does ask for yes/no confirmation that the ssh key has repo access
|
|
||||||
flux bootstrap git \
|
|
||||||
--url="ssh://git@gitea.futureporn.net:2222/futureporn/fp" \
|
|
||||||
--branch=main \
|
|
||||||
--path="clusters/staging" \
|
|
||||||
--private-key-file=/home/cj/.ssh/fp-flux
|
|
0
scripts/flux-bootstrap-prod.sh → scripts/flux-bootstrap.sh
Normal file → Executable file
0
scripts/flux-bootstrap-prod.sh → scripts/flux-bootstrap.sh
Normal file → Executable file
5
scripts/postgres-restore.sh
Executable file → Normal file
5
scripts/postgres-restore.sh
Executable file → Normal file
@ -1,6 +1,5 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
dbname=2024-06-19_22-24-03-futureporn-db.psql
|
|
||||||
|
|
||||||
## drop futureporn_db
|
## drop futureporn_db
|
||||||
kubectl -n futureporn exec postgres -- psql -U postgres --command "DROP DATABASE futureporn_db;"
|
kubectl -n futureporn exec postgres -- psql -U postgres --command "DROP DATABASE futureporn_db;"
|
||||||
@ -27,6 +26,6 @@ kubectl -n futureporn exec postgres -- psql -U postgres --command "\
|
|||||||
# kubectl exec -i POD_NAME -- pg_restore -U USERNAME -C -d DATABASE < dump.sql
|
# kubectl exec -i POD_NAME -- pg_restore -U USERNAME -C -d DATABASE < dump.sql
|
||||||
|
|
||||||
|
|
||||||
kubectl -n futureporn cp /home/cj/Documents/futureporn-meta/backups/$dbname postgres:/tmp/db.psql
|
kubectl -n futureporn cp /home/cj/Documents/futureporn-meta/backups/2024-06-18_20-35-38-futureporn-db.psql postgres:/tmp/db.psql
|
||||||
kubectl -n futureporn exec -i postgres -- pg_restore -U postgres -d futureporn_db /tmp/db.psql
|
kubectl -n futureporn exec -i postgres -- pg_restore -U postgres -d futureporn_db /tmp/db.psql
|
||||||
# kubectl -n futureporn exec -ti db-postgresql-0 -- rm /tmp/db.psql
|
# kubectl -n futureporn exec -ti db-postgresql-0 -- rm /tmp/db.psql
|
Loading…
x
Reference in New Issue
Block a user