begin refactor for monorepo build

This commit is contained in:
CJ_Clippy 2024-07-09 18:34:23 -08:00
parent a60d0f0821
commit 358e484a12
458 changed files with 11511 additions and 34015 deletions

View File

@ -6,7 +6,7 @@ namespaces:
./scripts/k8s-namespaces.sh ./scripts/k8s-namespaces.sh
secrets: secrets:
dotenvx run -f .env.$(ENV) -- ./scripts/k8s-secrets.sh dotenvx run -f .env.${ENV} -- ./scripts/k8s-secrets.sh
flux: flux:
./scripts/flux-bootstrap.sh ./scripts/flux-bootstrap.sh
@ -31,6 +31,9 @@ exoscale:
kind: kind:
./scripts/kind-with-local-registry.sh ./scripts/kind-with-local-registry.sh
kindload:
./scripts/kind-load.sh
chisel: chisel:
./scripts/k8s-chisel-operator.sh ./scripts/k8s-chisel-operator.sh

139
Tiltfile
View File

@ -1,4 +1,9 @@
# Tiltfile for working with Next and Strapi locally ## Tiltfile for working with Futureporn cluster locally
secret_settings(
disable_scrub=True
)
## cert-manager slows down Tilt updates so I prefer to keep it commented unless I specifically need to test certs ## cert-manager slows down Tilt updates so I prefer to keep it commented unless I specifically need to test certs
load('ext://cert_manager', 'deploy_cert_manager') load('ext://cert_manager', 'deploy_cert_manager')
@ -7,13 +12,10 @@ deploy_cert_manager(
version='v1.15.1', version='v1.15.1',
) )
default_registry('localhost:5001')
load('ext://helm_remote', 'helm_remote') load('ext://helm_remote', 'helm_remote')
# load('ext://dotenv', 'dotenv')
# dotenv(fn='.env')
# allow_k8s_contexts('vke-e41885d3-7f93-4f01-bfaa-426f20bf9f3f') load('ext://dotenv', 'dotenv')
dotenv(fn='.env.development')
# helm_remote( # helm_remote(
@ -113,55 +115,34 @@ helm_remote(
k8s_yaml(helm( k8s_yaml(helm(
'./charts/fp', './charts/fp',
values=['./charts/fp/values-dev.yaml'], values=['./charts/fp/values.yaml'],
)) ))
# k8s_yaml(helm(
# './charts/trigger',
# set=[
# 'trigger.name=trigger',
# 'trigger.replicaCount=2',
# 'trigger.image.tag=self-host-rc.2',
# 'trigger.image.pullPolicy=IfNotPresent',
# 'trigger.env.ENCRYPTION_KEY=%s' % os.getenv('TRIGGER_ENCRYPTION_KEY'),
# 'trigger.env.MAGIC_LINK_SECRET=%s' % os.getenv('TRIGGER_MAGIC_LINK_SECRET'),
# 'trigger.env.DATABASE_URL=%s' % os.getenv('TRIGGER_DATABASE_URL'),
# 'trigger.env.LOGIN_ORIGIN=%s' % os.getenv('TRIGGER_LOGIN_ORIGIN'),
# 'trigger.env.APP_ORIGIN=%s' % os.getenv('TRIGGER_APP_ORIGIN'),
# 'trigger.env.PORT=%s' % os.getenv('TRIGGER_PORT'),
# 'trigger.env.REMIX_APP_PORT=%s' % os.getenv('TRIGGER_REMIX_APP_PORT'),
# 'trigger.env.REDIS_HOST=redis-master.futureporn.svc.cluster.local',
# 'trigger.env.REDIS_PORT=6379',
# 'trigger.ingress.nginx.enabled=false',
# 'trigger.ingress.enabled=false',
# 'postgres.enabled=false'
# ]
# ))
# k8s_resource(
# workload='trigger',
# port_forwards=['3030'],
# )
# docker_build('fp/link2cid', './packages/link2cid') # docker_build('fp/link2cid', './packages/link2cid')
docker_build( docker_build(
'fp/strapi', 'fp/strapi',
'.', '.',
only=['./packages/strapi'], only=['./packages/strapi', './packages/types'],
dockerfile='./d.strapi.dev.dockerfile', dockerfile='./d.strapi.dev.dockerfile',
live_update=[ live_update=[
sync('./packages/strapi', '/app') sync('./packages/strapi', '/app')
] ]
) )
# docker_build( docker_build(
# 'fp/bot', 'fp/bot',
# '.', '.',
# only=['./packages/bot'], only=['./pnpm-lock.yaml', './package.json', './packages/types', './packages/bot'],
# dockerfile='./d.bot.dockerfile', dockerfile='./d.packages.dockerfile',
# live_update=[ target='bot-dev',
# sync('./packages/bot', '/app') live_update=[
# ] sync('./packages/bot', '/app'),
# ) run('cd /app && pnpm i', trigger=['./packages/bot/package.json', './packages/bot/pnpm-lock.yaml'])
]
)
@ -170,7 +151,7 @@ docker_build(
load('ext://uibutton', 'cmd_button') load('ext://uibutton', 'cmd_button')
cmd_button('postgres:create', cmd_button('postgres:create',
argv=['dotenvx', 'run', '-f', '.env.development', '--', 'sh', './scripts/postgres-create.sh'], argv=['./scripts/postgres-create.sh'],
resource='postgres', resource='postgres',
icon_name='dataset', icon_name='dataset',
text='create (empty) databases', text='create (empty) databases',
@ -212,9 +193,9 @@ cmd_button('temporal-web:namespace',
docker_build( docker_build(
'fp/next', 'fp/next',
'.', '.',
only=['./pnpm-lock.yaml', './package.json', './packages/next', './ca/letsencrypt-stg-root-x1.pem'], # only=['./pnpm-lock.yaml', './package.json', './packages/next', './packages/types', './ca/letsencrypt-stg-root-x1.pem'],
dockerfile='d.next.dockerfile', dockerfile='d.next.dockerfile',
target='dev', target='next',
build_args={ build_args={
'NEXT_PUBLIC_STRAPI_URL': 'https://strapi.fp.sbtp.xyz' 'NEXT_PUBLIC_STRAPI_URL': 'https://strapi.fp.sbtp.xyz'
}, },
@ -227,7 +208,7 @@ docker_build(
docker_build( docker_build(
'fp/scout-manager', 'fp/scout-manager',
'.', '.',
only=['./pnpm-lock.yaml', './package.json', './packages/scout', './packages/next', './ca/letsencrypt-stg-root-x1.pem'], only=['./pnpm-lock.yaml', './package.json', './packages/scout', './packages/types', './ca/letsencrypt-stg-root-x1.pem'],
dockerfile='d.packages.dockerfile', dockerfile='d.packages.dockerfile',
target='scout-manager', target='scout-manager',
live_update=[ live_update=[
@ -238,13 +219,30 @@ docker_build(
# entrypoint='pnpm tsx watch ./src/index.ts' # entrypoint='pnpm tsx watch ./src/index.ts'
) )
docker_build(
'fp/boop',
'.',
# only=['./pnpm-lock.yaml', './package.json', './packages/scout', './packages/types', './ca/letsencrypt-stg-root-x1.pem'],
dockerfile='d.packages.dockerfile',
target='boop',
live_update=[
sync('./packages/boop', '/app'),
# run('cd /app && pnpm i', trigger=['./packages/boop/package.json', './packages/boop/pnpm-lock.yaml']),
],
# entrypoint='pnpm nodemon --ext js,ts,json,yaml --exec node --no-warnings=ExperimentalWarning --loader ts-node/esm ./src/index.ts'
# entrypoint='pnpm tsx watch ./src/index.ts'
)
docker_build( docker_build(
'fp/scout-worker', 'fp/scout-worker',
'.', '.',
only=['./pnpm-lock.yaml', './package.json', './packages/scout', './packages/next', './ca/letsencrypt-stg-root-x1.pem'], only=['./pnpm-lock.yaml', './package.json', './packages/scout', './packages/types', './ca/letsencrypt-stg-root-x1.pem'],
# ignore=['./packages/next'], # I wish I could use this ignore to ignore file changes in this dir, but that's not how it works # ignore=['./packages/next'], # I wish I could use this ignore to ignore file changes in this dir, but that's not how it works
dockerfile='d.packages.dockerfile', dockerfile='d.packages.dockerfile',
target='scout:worker', target='scout-worker',
live_update=[ live_update=[
# idk if this run() is effective # idk if this run() is effective
# run('cd /app && pnpm i', trigger=['./packages/scout/package.json', './packages/scout/pnpm-lock.yaml']), # run('cd /app && pnpm i', trigger=['./packages/scout/package.json', './packages/scout/pnpm-lock.yaml']),
@ -259,6 +257,10 @@ docker_build(
# this entrypoint is a godsend. It lets me restart the node app (fast) without having to rebuild the docker container (slow) # this entrypoint is a godsend. It lets me restart the node app (fast) without having to rebuild the docker container (slow)
entrypoint='pnpm nodemon --ext js,ts,json,yaml --exec node --no-warnings=ExperimentalWarning --loader ts-node/esm ./src/temporal/worker.ts' entrypoint='pnpm nodemon --ext js,ts,json,yaml --exec node --no-warnings=ExperimentalWarning --loader ts-node/esm ./src/temporal/worker.ts'
) )
# k8s_resource( # k8s_resource(
# workload='kubernetes-ingress-controller-manager', # workload='kubernetes-ingress-controller-manager',
# links=[ # links=[
@ -336,6 +338,11 @@ k8s_resource(
labels=['backend'], labels=['backend'],
) )
# k8s_resource(
# workload='',
# )
# k8s_resource( # k8s_resource(
# workload='pgadmin', # workload='pgadmin',
# port_forwards=['5050'], # port_forwards=['5050'],
@ -387,8 +394,8 @@ helm_remote(
namespace='futureporn', namespace='futureporn',
version='0.37.0', version='0.37.0',
set=[ set=[
'admintools.image.tag=1.24.1-tctl-1.18.1-cli-0.12.0', 'admintools.image.tag=1.24.2-tctl-1.18.1-cli-0.13.0',
'web.image.tag=2.27.2', 'web.image.tag=2.28.0',
'prometheus.enabled=false', 'prometheus.enabled=false',
'grafana.enabled=false', 'grafana.enabled=false',
'elasticsearch.enabled=false', 'elasticsearch.enabled=false',
@ -421,7 +428,7 @@ k8s_resource(
]) ])
k8s_resource( k8s_resource(
workload='temporal-frontend', workload='temporal-frontend',
labels='temporal', port_forwards=['7233'], labels='temporal',
resource_deps=[ resource_deps=[
'postgres', 'postgres',
'strapi' 'strapi'
@ -485,24 +492,18 @@ k8s_resource(
labels=['backend'] labels=['backend']
) )
# k8s_resource( k8s_resource(
# workload='bot', workload='bot',
# labels=['backend'] labels=['backend'],
# ) resource_deps=['strapi', 'temporal-web'],
)
# k8s_resource( # k8s_resource(
# workload='cert-manager', # workload='trigger',
# labels='cert-manager' # labels=['backend'],
# ) # port_forwards=['3030:3000'],
# k8s_resource( # resource_deps=['postgres', 'redis-master'],
# workload='cert-manager-webhook', # links=[
# labels='cert-manager' # link('http://localhost:3030')
# ) # ],
# k8s_resource(
# workload='cert-manager-cainjector',
# labels='cert-manager'
# )
# k8s_resource(
# workload='cert-manager-startupapicheck',
# labels='cert-manager'
# ) # )

View File

@ -0,0 +1,15 @@
---
apiVersion: v1
kind: Pod
metadata:
name: boop
namespace: futureporn
labels:
app.kubernetes.io/name: boop
spec:
containers:
- name: boop
image: fp/boop
resources: {}
restartPolicy: OnFailure

View File

@ -0,0 +1,40 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: bot
namespace: futureporn
labels:
app: bot
spec:
replicas: {{ .Values.scout.worker.replicas }}
selector:
matchLabels:
app: bot
template:
metadata:
labels:
app: bot
spec:
containers:
- name: bot
image: "{{ .Values.bot.imageName }}"
imagePullPolicy: Always
ports:
- containerPort: 8080
env:
- name: DISCORD_CHANNEL_ID
value: "{{ .Values.bot.discordChannelId }}"
- name: DISCORD_TOKEN
valueFrom:
secretKeyRef:
name: discord
key: token
resources:
limits:
cpu: "500m"
memory: "512Mi"
requests:
cpu: "250m"
memory: "256Mi"

View File

@ -14,7 +14,7 @@ spec:
- name: HOSTNAME - name: HOSTNAME
value: 0.0.0.0 value: 0.0.0.0
- name: NEXT_PUBLIC_UPPY_COMPANION_URL - name: NEXT_PUBLIC_UPPY_COMPANION_URL
value: "{{ .Values.uppy.hostname }}" value: "{{ .Values.uppy.url }}"
ports: ports:
- name: web - name: web
containerPort: 3000 containerPort: 3000

View File

@ -98,8 +98,10 @@ spec:
value: "{{ .Values.uppy.s3.bucket }}" value: "{{ .Values.uppy.s3.bucket }}"
- name: COMPANION_AWS_REGION - name: COMPANION_AWS_REGION
value: "{{ .Values.uppy.s3.region }}" value: "{{ .Values.uppy.s3.region }}"
- name: COMPANION_AWS_PREFIX - name: COMPANION_AWS_ENDPOINT
value: "{{ .Values.uppy.s3.prefix }}" value: "{{ .Values.uppy.s3.endpoint }}"
# - name: COMPANION_AWS_PREFIX
# value: "{{ .Values.uppy.s3.prefix }}"
## COMPANION_OAUTH_DOMAIN is only necessary if using a different domain per each uppy pod. ## COMPANION_OAUTH_DOMAIN is only necessary if using a different domain per each uppy pod.
## We don't need this because we are load balancing the pods so they all use the same domain name. ## We don't need this because we are load balancing the pods so they all use the same domain name.

View File

@ -1,31 +0,0 @@
environment: production
storageClassName: vultr-block-storage-hdd
link2cid:
imageName: gitea.futureporn.net/futureporn/link2cid:latest
scout:
manager:
imageName: gitea.futureporn.net/futureporn/scout-manager:latest
worker:
imageName: gitea.futureporn.net/futureporn/scout-worker:latest
replicas: 2
pubsubServerUrl: https://realtime.futureporn.net/faye
cdnBucketUrl: https://futureporn-b2.b-cdn.net
s3BucketName: futureporn
next:
imageName: gitea.futureporn.net/futureporn/next:latest
hostname: next.sbtp.xyz
capture:
imageName: gitea.futureporn.net/futureporn/capture:latest
strapi:
imageName: sjc.vultrcr.com/fpcontainers/strapi
port: 1339
url: https://portal.futureporn.net
hostname: strapi.sbtp.xyz
ingressClassName: traefik
managedBy: Helm
adminEmail: cj@futureporn.net
extraArgs:
- --dns01-recursive-nameservers-only
- --dns01-recursive-nameservers=8.8.8.8:53,1.1.1.1:53
certManager:
issuer: letsencrypt-production

View File

@ -38,12 +38,22 @@ uppy:
hostname: uppy.fp.sbtp.xyz hostname: uppy.fp.sbtp.xyz
imageName: fp/uppy imageName: fp/uppy
s3: s3:
endpoint: s3.us-west-000.backblazeb2.com endpoint: https://s3.us-west-000.backblazeb2.com
bucket: futureporn-usc bucket: fp-usc-dev
region: us-west-000 region: us-west-000
prefix: s3 prefix: s3
clientOrigins: next.fp.sbtp.xyz clientOrigins: next.fp.sbtp.xyz
domain: uppy.fp.sbtp.xyz domain: uppy.fp.sbtp.xyz
uploadUrls: https://uppy.fp.sbtp.xyz/files uploadUrls: https://uppy.fp.sbtp.xyz/files
url: https://uppy.fp.sbtp.xyz
certManager: certManager:
issuer: letsencrypt-staging issuer: letsencrypt-staging
bot:
discordChannelId: "1185024773231759481"
imageName: fp/bot
trigger:
imageName: ghcr.io/triggerdotdev/trigger.dev:self-host-rc.3
worker:
replicas: 2
webapp:
replicas: 1

1
charts/trigger/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
charts/

View File

@ -0,0 +1,24 @@
# Patterns to ignore when building packages.
# This supports shell glob matching, relative path matching, and
# negation (prefixed with !). Only one pattern per line.
.DS_Store
# Common VCS dirs
.git/
.gitignore
.bzr/
.bzrignore
.hg/
.hgignore
.svn/
# Common backup files
*.swp
*.bak
*.tmp
*.orig
*~
# Various IDEs
.project
.idea/
*.tmproj
.vscode/
node_modules/

View File

@ -0,0 +1,2 @@
digest: sha256:e439e4b30ba18357defec97ba080973743a4724c423b78913990409f78f1ebd8
generated: "2023-10-20T14:22:57.044126+05:30"

24
charts/trigger/Chart.yaml Normal file
View File

@ -0,0 +1,24 @@
apiVersion: v2
name: trigger
description: A Helm chart for a full Trigger application stack
# A chart can be either an 'application' or a 'library' chart.
#
# Application charts are a collection of templates that can be packaged into versioned archives
# to be deployed.
#
# Library charts provide useful utilities or functions for the chart developer. They're included as
# a dependency of application charts to inject those utilities and functions into the rendering
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 0.1.0
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
appVersion: "1.16.0"

3
charts/trigger/README.md Normal file
View File

@ -0,0 +1,3 @@
# Trigger.dev Helm Chart
@see https://github.com/triggerdotdev/trigger.dev/tree/main/helm-charts

203
charts/trigger/package-lock.json generated Normal file
View File

@ -0,0 +1,203 @@
{
"name": "helm-charts",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "helm-charts",
"version": "1.0.0",
"license": "ISC",
"dependencies": {
"@bitnami/readme-generator-for-helm": "^2.6.0"
}
},
"node_modules/@bitnami/readme-generator-for-helm": {
"version": "2.6.0",
"resolved": "https://registry.npmjs.org/@bitnami/readme-generator-for-helm/-/readme-generator-for-helm-2.6.0.tgz",
"integrity": "sha512-LcByNCryaC2OJExL9rnhyFJ18+vrZu1gVoN2Z7j/HI42EjV4kLgT4G1KEPNnrKbls9HvozBqMG+sKZIDh0McFg==",
"dependencies": {
"commander": "^7.1.0",
"dot-object": "^2.1.4",
"lodash": "^4.17.21",
"markdown-table": "^2.0.0",
"yaml": "^2.0.0-3"
},
"bin": {
"readme-generator": "bin/index.js"
}
},
"node_modules/balanced-match": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
},
"node_modules/brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"dependencies": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
}
},
"node_modules/commander": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz",
"integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==",
"engines": {
"node": ">= 10"
}
},
"node_modules/concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="
},
"node_modules/dot-object": {
"version": "2.1.4",
"resolved": "https://registry.npmjs.org/dot-object/-/dot-object-2.1.4.tgz",
"integrity": "sha512-7FXnyyCLFawNYJ+NhkqyP9Wd2yzuo+7n9pGiYpkmXCTYa8Ci2U0eUNDVg5OuO5Pm6aFXI2SWN8/N/w7SJWu1WA==",
"dependencies": {
"commander": "^4.0.0",
"glob": "^7.1.5"
},
"bin": {
"dot-object": "bin/dot-object"
}
},
"node_modules/dot-object/node_modules/commander": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz",
"integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==",
"engines": {
"node": ">= 6"
}
},
"node_modules/fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
"integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="
},
"node_modules/glob": {
"version": "7.2.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
"dependencies": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.1.1",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
},
"engines": {
"node": "*"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/inflight": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
"integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
"dependencies": {
"once": "^1.3.0",
"wrappy": "1"
}
},
"node_modules/inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
},
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"node_modules/markdown-table": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-2.0.0.tgz",
"integrity": "sha512-Ezda85ToJUBhM6WGaG6veasyym+Tbs3cMAw/ZhOPqXiYsr0jgocBV3j3nx+4lk47plLlIqjwuTm/ywVI+zjJ/A==",
"dependencies": {
"repeat-string": "^1.0.0"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/wooorm"
}
},
"node_modules/minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dependencies": {
"brace-expansion": "^1.1.7"
},
"engines": {
"node": "*"
}
},
"node_modules/once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
"dependencies": {
"wrappy": "1"
}
},
"node_modules/path-is-absolute": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
"integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/repeat-string": {
"version": "1.6.1",
"resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz",
"integrity": "sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==",
"engines": {
"node": ">=0.10"
}
},
"node_modules/wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
},
"node_modules/yaml": {
"version": "2.3.4",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.4.tgz",
"integrity": "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==",
"engines": {
"node": ">= 14"
}
},
"readme-generator-for-helm": {
"version": "2.6.1",
"extraneous": true,
"license": "Apache-2.0",
"dependencies": {
"commander": "^7.1.0",
"dot-object": "^2.1.4",
"lodash": "^4.17.21",
"markdown-table": "^2.0.0",
"yaml": "^2.0.0-3"
},
"bin": {
"readme-generator": "bin/index.js"
},
"devDependencies": {
"eslint": "^7.24.0",
"eslint-config-airbnb-base": "^14.2.1",
"eslint-plugin-import": "^2.22.1",
"jest": "^29.2.1",
"temp": "^0.9.4"
}
}
}
}

View File

@ -0,0 +1,15 @@
{
"name": "helm-charts",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"generate-docs": "readme-generator --readme README.md --values values.yaml"
},
"keywords": [],
"author": "",
"license": "ISC",
"devDependencies": {
"@bitnami/readme-generator-for-helm": "^2.6.0"
}
}

View File

@ -0,0 +1,71 @@
{{/*
Expand the name of the chart.
*/}}
{{- define "trigger.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Create chart name and version as used by the chart label.
*/}}
{{- define "trigger.chart" -}}
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{/*
Create unified labels for trigger components
*/}}
{{- define "trigger.common.matchLabels" -}}
app: {{ template "trigger.name" . }}
release: {{ .Release.Name }}
{{- end -}}
{{- define "trigger.common.metaLabels" -}}
chart: {{ template "trigger.chart" . }}
heritage: {{ .Release.Service }}
{{- end -}}
{{- define "trigger.common.labels" -}}
{{ include "trigger.common.matchLabels" . }}
{{ include "trigger.common.metaLabels" . }}
{{- end -}}
{{- define "trigger.labels" -}}
{{ include "trigger.matchLabels" . }}
{{ include "trigger.common.metaLabels" . }}
{{- end -}}
{{- define "trigger.matchLabels" -}}
component: {{ .Values.trigger.name | quote }}
{{ include "trigger.common.matchLabels" . }}
{{- end -}}
{{/*
Create a fully qualified postgresql name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
*/}}
{{- define "trigger.postgresql.hostname" -}}
{{- if .Values.postgresql.fullnameOverride -}}
{{- .Values.postgresql.fullnameOverride | trunc 63 | trimSuffix "-" -}}
{{- else -}}
{{- $name := default .Chart.Name .Values.nameOverride -}}
{{- if contains $name .Release.Name -}}
{{- printf "%s-%s" .Release.Name .Values.postgresql.name | trunc 63 | trimSuffix "-" -}}
{{- else -}}
{{- printf "%s-%s-%s" .Release.Name $name .Values.postgresql.name | trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{- end -}}
{{- end -}}
{{/*
Create the postgresql connection string.
*/}}
{{- define "trigger.postgresql.connectionString" -}}
{{- $host := include "trigger.postgresql.hostname" . -}}
{{- $port := 5432 -}}
{{- $username := .Values.postgresql.global.postgresql.postgresqlUsername | default "postgres" -}}
{{- $password := .Values.postgresql.global.postgresql.postgresqlPassword | default "password" -}}
{{- $database := .Values.postgresql.global.postgresql.postgresqlDatabase | default "trigger" -}}
{{- $connectionString := printf "postgresql://%s:%s@%s:%d/%s" $username $password $host $port $database -}}
{{- printf "%s" $connectionString -}}
{{- end -}}

View File

@ -0,0 +1,43 @@
{{ if .Values.ingress.enabled }}
{{- $ingress := .Values.ingress }}
{{- if and $ingress.ingressClassName (not (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion)) }}
{{- if not (hasKey $ingress.annotations "kubernetes.io/ingress.class") }}
{{- $_ := set $ingress.annotations "kubernetes.io/ingress.class" $ingress.ingressClassName}}
{{- end }}
{{- end }}
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: trigger-ingress
{{- with $ingress.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
spec:
{{- if and $ingress.ingressClassName (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion) }}
ingressClassName: {{ $ingress.ingressClassName | default "nginx" }}
{{- end }}
{{- if $ingress.tls }}
tls:
{{- range $ingress.tls }}
- hosts:
{{- range .hosts }}
- {{ . | quote }}
{{- end }}
secretName: {{ .secretName }}
{{- end }}
{{- end }}
rules:
- http:
paths:
- path: {{ $ingress.trigger.path }}
pathType: {{ $ingress.trigger.pathType }}
backend:
service:
name: {{ include "trigger.name" . }}
port:
number: 3000
{{- if $ingress.hostName }}
host: {{ $ingress.hostName }}
{{- end }}
{{ end }}

View File

@ -0,0 +1,95 @@
{{- $trigger := .Values.trigger -}}
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "trigger.name" . }}
namespace: futureporn
annotations:
updatedAt: {{ now | date "2006-01-01 MST 15:04:05" | quote }}
{{- with $trigger.deploymentAnnotations }}
{{- toYaml . | nindent 4 }}
{{- end }}
labels:
{{- include "trigger.labels" . | nindent 4 }}
spec:
replicas: {{ $trigger.replicaCount }}
selector:
matchLabels:
{{- include "trigger.matchLabels" . | nindent 6 }}
template:
metadata:
labels:
{{- include "trigger.matchLabels" . | nindent 8 }}
annotations:
updatedAt: {{ now | date "2006-01-01 MST 15:04:05" | quote }}
{{- with $trigger.podAnnotations }}
{{- toYaml . | nindent 8 }}
{{- end }}
spec:
{{- with $trigger.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
{{- end }}
containers:
- name: {{ $trigger.name }}
image: "{{ $trigger.image.repository }}:{{ $trigger.image.tag | default "latest" }}"
imagePullPolicy: {{ $trigger.image.pullPolicy }}
ports:
- name: http
containerPort: 3000
protocol: TCP
readinessProbe:
httpGet:
path: /
port: 3000
envFrom:
- secretRef:
name: {{ $trigger.kubeSecretRef | default (include "trigger.name" .) }}
{{- if $trigger.resources }}
resources: {{- toYaml $trigger.resources | nindent 12 }}
{{- end }}
---
apiVersion: v1
kind: Service
metadata:
name: trigger
labels:
annotations:
spec:
type: {{ $trigger.service.type }}
selector:
{{- include "trigger.matchLabels" . | nindent 8 }}
ports:
- port: 3000
targetPort: 3000
protocol: TCP
{{- if eq $trigger.service.type "NodePort" }}
nodePort: {{ $trigger.service.nodePort }}
{{- end }}
---
{{ if not $trigger.kubeSecretRef }}
apiVersion: v1
kind: Secret
metadata:
name: {{ include "trigger.name" . }}
annotations:
"helm.sh/resource-policy": "keep"
type: Opaque
stringData:
{{- $requiredVars := dict "MAGIC_LINK_SECRET" (randAlphaNum 32 | lower)
"SESSION_SECRET" (randAlphaNum 32 | lower)
"ENCRYPTION_KEY" (randAlphaNum 32 | lower)
"DIRECT_URL" (include "trigger.postgresql.connectionString" .)
"DATABASE_URL" (include "trigger.postgresql.connectionString" .) }}
{{- $secretObj := (lookup "v1" "Secret" .Release.Namespace (include "trigger.name" .)) | default dict }}
{{- $secretData := (get $secretObj "data") | default dict }}
{{ range $key, $value := .Values.trigger.env }}
{{- $default := get $requiredVars $key -}}
{{- $current := get $secretData $key | b64dec -}}
{{- $v := $value | default ($current | default $default) -}}
{{ $key }}: {{ $v | quote }}
{{ end -}}
{{- end }}

276
charts/trigger/values.yaml Normal file
View File

@ -0,0 +1,276 @@
# Default values for helm-charts.
# This is a YAML-formatted file.
## @section Common parameters
##
## @param nameOverride Override release name
##
nameOverride: ""
## @param fullnameOverride Override release fullname
##
fullnameOverride: ""
## @section Trigger.dev parameters
##
trigger:
## @param trigger.name
name: trigger
## @param trigger.fullnameOverride trigger fullnameOverride
##
fullnameOverride: ""
## @param trigger.podAnnotations trigger pod annotations
##
podAnnotations: {}
## @param trigger.deploymentAnnotations trigger deployment annotations
##
deploymentAnnotations: {}
## @param trigger.replicaCount trigger replica count
##
replicaCount: 2
## trigger image parameters
##
image:
## @param trigger.image.repository trigger image repository
##
repository: ghcr.io/triggerdotdev/trigger.dev
## @param trigger.image.tag trigger image tag
##
tag: "latest"
## @param trigger.image.pullPolicy trigger image pullPolicy
##
pullPolicy: Always
## @param trigger.resources.limits.memory container memory limit [(docs)](https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/)
## @param trigger.resources.requests.cpu container CPU requests [(docs)](https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/)
##
resources:
limits:
memory: 800Mi
requests:
cpu: 250m
## @param trigger.affinity Backend pod affinity
##
affinity: {}
## @param trigger.kubeSecretRef trigger secret resource reference name
##
kubeSecretRef: ""
## trigger service
##
service:
## @param trigger.service.annotations trigger service annotations
##
annotations: {}
## @param trigger.service.type trigger service type
##
type: ClusterIP
## @param trigger.service.nodePort trigger service nodePort (used if above type is `NodePort`)
##
nodePort: ""
## @skip trigger.env
##
env:
ENCRYPTION_KEY: ""
MAGIC_LINK_SECRET: ""
SESSION_SECRET: ""
LOGIN_ORIGIN: ""
APP_ORIGIN: ""
DIRECT_URL: ""
DATABASE_URL: ""
FROM_EMAIL: ""
REPLY_TO_EMAIL: ""
RESEND_API_KEY: ""
AUTH_GITHUB_CLIENT_ID: ""
AUTH_GITHUB_CLIENT_SECRET: ""
## @section Postgres parameters
## Documentation: https://github.com/bitnami/charts/tree/main/bitnami/postgresql-ha
##
postgresql:
## @param postgresql.enabled Enable Postgres
##
enabled: true
## @param postgresql.name Name used to build variables (deprecated)
##
name: "postgresql"
## @param postgresql.nameOverride Name override
##
nameOverride: "postgresql"
## @param postgresql.fullnameOverride Fullname override
##
fullnameOverride: "postgresql"
global:
postgresql:
## @param postgresql.global.postgresql.auth.postgresPassword Password for the "postgres" admin user (overrides `auth.postgresPassword`)
## @param postgresql.global.postgresql.auth.username Name for a custom user to create (overrides `auth.username`)
## @param postgresql.global.postgresql.auth.password Password for the custom user to create (overrides `auth.password`)
## @param postgresql.global.postgresql.auth.database Name for a custom database to create (overrides `auth.database`)
## @param postgresql.global.postgresql.auth.existingSecret Name of existing secret to use for PostgreSQL credentials (overrides `auth.existingSecret`).
## @param postgresql.global.postgresql.auth.secretKeys.adminPasswordKey Name of key in existing secret to use for PostgreSQL credentials (overrides `auth.secretKeys.adminPasswordKey`). Only used when `postgresql.global.postgresql.auth.existingSecret` is set.
## @param postgresql.global.postgresql.auth.secretKeys.userPasswordKey Name of key in existing secret to use for PostgreSQL credentials (overrides `auth.secretKeys.userPasswordKey`). Only used when `postgresql.global.postgresql.auth.existingSecret` is set.
## @param postgresql.global.postgresql.auth.secretKeys.replicationPasswordKey Name of key in existing secret to use for PostgreSQL credentials (overrides `auth.secretKeys.replicationPasswordKey`). Only used when `postgresql.global.postgresql.auth.existingSecret` is set.
##
auth:
postgresPassword: "password"
username: "postgres"
password: "password"
database: "trigger"
existingSecret: ""
secretKeys:
adminPasswordKey: ""
userPasswordKey: ""
replicationPasswordKey: ""
## @param postgresql.global.postgresql.service.ports.postgresql PostgreSQL service port (overrides `service.ports.postgresql`)
##
service:
ports:
postgresql: "5432"
## Bitnami PostgreSQL image version
## ref: https://hub.docker.com/r/bitnami/postgresql/tags/
## @param postgresql.image.registry PostgreSQL image registry
## @param postgresql.image.repository PostgreSQL image repository
## @param postgresql.image.tag PostgreSQL image tag (immutable tags are recommended)
## @param postgresql.image.digest PostgreSQL image digest in the way sha256:aa.... Please note this parameter, if set, will override the tag
## @param postgresql.image.pullPolicy PostgreSQL image pull policy
## @param postgresql.image.pullSecrets Specify image pull secrets
## @param postgresql.image.debug Specify if debug values should be set
##
image:
registry: docker.io
repository: bitnami/postgresql
tag: 14.10.0-debian-11-r21
digest: ""
## Specify a imagePullPolicy
## Defaults to 'Always' if image tag is 'latest', else set to 'IfNotPresent'
## ref: https://kubernetes.io/docs/user-guide/images/#pre-pulling-images
##
pullPolicy: IfNotPresent
## Optionally specify an array of imagePullSecrets.
## Secrets must be manually created in the namespace.
## ref: https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/
## Example:
## pullSecrets:
## - myRegistryKeySecretName
##
pullSecrets: []
## Set to true if you would like to see extra information on logs
##
debug: false
## @param postgresql.architecture PostgreSQL architecture (`standalone` or `replication`)
##
architecture: standalone
## Replication configuration
## Ignored if `postgresql.architecture` is `standalone`
##
## @param postgresql.containerPorts.postgresql PostgreSQL container port
##
containerPorts:
postgresql: 5432
## @param postgresql.postgresqlDataDir PostgreSQL data dir
##
postgresqlDataDir: /bitnami/postgresql/data
## @param postgresql.postgresqlSharedPreloadLibraries Shared preload libraries (comma-separated list)
##
postgresqlSharedPreloadLibraries: "pgaudit"
## @section PostgreSQL Primary parameters
##
primary:
## Configure extra options for PostgreSQL Primary containers' liveness, readiness and startup probes
## ref: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/#configure-probes
## @param postgresql.primary.livenessProbe.enabled Enable livenessProbe on PostgreSQL Primary containers
## @param postgresql.primary.livenessProbe.initialDelaySeconds Initial delay seconds for livenessProbe
## @param postgresql.primary.livenessProbe.periodSeconds Period seconds for livenessProbe
## @param postgresql.primary.livenessProbe.timeoutSeconds Timeout seconds for livenessProbe
## @param postgresql.primary.livenessProbe.failureThreshold Failure threshold for livenessProbe
## @param postgresql.primary.livenessProbe.successThreshold Success threshold for livenessProbe
##
livenessProbe:
enabled: true
initialDelaySeconds: 30
periodSeconds: 10
timeoutSeconds: 5
failureThreshold: 6
successThreshold: 1
## @param postgresql.primary.readinessProbe.enabled Enable readinessProbe on PostgreSQL Primary containers
## @param postgresql.primary.readinessProbe.initialDelaySeconds Initial delay seconds for readinessProbe
## @param postgresql.primary.readinessProbe.periodSeconds Period seconds for readinessProbe
## @param postgresql.primary.readinessProbe.timeoutSeconds Timeout seconds for readinessProbe
## @param postgresql.primary.readinessProbe.failureThreshold Failure threshold for readinessProbe
## @param postgresql.primary.readinessProbe.successThreshold Success threshold for readinessProbe
##
readinessProbe:
enabled: true
initialDelaySeconds: 5
periodSeconds: 10
timeoutSeconds: 5
failureThreshold: 6
successThreshold: 1
## @param postgresql.primary.startupProbe.enabled Enable startupProbe on PostgreSQL Primary containers
## @param postgresql.primary.startupProbe.initialDelaySeconds Initial delay seconds for startupProbe
## @param postgresql.primary.startupProbe.periodSeconds Period seconds for startupProbe
## @param postgresql.primary.startupProbe.timeoutSeconds Timeout seconds for startupProbe
## @param postgresql.primary.startupProbe.failureThreshold Failure threshold for startupProbe
## @param postgresql.primary.startupProbe.successThreshold Success threshold for startupProbe
##
startupProbe:
enabled: false
initialDelaySeconds: 30
periodSeconds: 10
timeoutSeconds: 1
failureThreshold: 15
successThreshold: 1
persistence:
## @param postgresql.primary.persistence.enabled Enable PostgreSQL Primary data persistence using PVC
##
enabled: true
## @param postgresql.primary.persistence.existingClaim Name of an existing PVC to use
##
existingClaim: ""
## @param postgresql.primary.persistence.accessModes PVC Access Mode for PostgreSQL volume
##
accessModes:
- ReadWriteOnce
## @param postgresql.primary.persistence.size PVC Storage Request for PostgreSQL volume
##
size: 8Gi
## @section Ingress parameters
## Documentation: https://kubernetes.io/docs/concepts/services-networking/ingress/
##
ingress:
## @param ingress.enabled Enable ingress
##
enabled: true
## @param ingress.ingressClassName Ingress class name
##
ingressClassName: nginx
## @param ingress.nginx.enabled Ingress controller
##
nginx:
enabled: false
## @param ingress.annotations Ingress annotations
##
annotations:
{}
# kubernetes.io/ingress.class: "nginx"
# cert-manager.io/issuer: letsencrypt-nginx
## @param ingress.hostName Ingress hostname (your custom domain name, e.g. `infisical.example.org`)
## Replace with your own domain
##
hostName: ""
## @param ingress.tls Ingress TLS hosts (matching above hostName)
## Replace with your own domain
##
tls:
[]
# - secretName: letsencrypt-nginx
# hosts:
# - infisical.local
## @param ingress.trigger.path Trigger.dev ingress path
## @param ingress.trigger.pathType Trigger.dev ingress path type
##
trigger:
path: /
pathType: Prefix

View File

@ -1,20 +0,0 @@
FROM node:20 AS base
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable
FROM base AS build
ENV NODE_ENV=production
COPY ./packages/bot /usr/src/app
WORKDIR /usr/src/app
RUN mkdir -p /prod/scout
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
RUN pnpm deploy --filter=bot --prod /prod/scout
FROM base AS bot
COPY --from=build /prod/bot /app
WORKDIR /app
ENTRYPOINT ["pnpm"]
CMD ["run", "start"]

View File

@ -28,29 +28,8 @@ RUN --mount=type=cache,id=pnpm-store,target=/pnpm/store pnpm install
FROM install AS dev FROM install AS dev
CMD ["pnpm", "run", "dev"] CMD ["pnpm", "run", "dev"]
FROM install AS build FROM install AS build
RUN pnpm run build RUN pnpm run build
# COPY --chown=node:node --from=install /app/package.json /app/pnpm-lock.yaml ./
# RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --prod --frozen-lockfile
# COPY --from=install /app /app # i think this is duplicate
# can't get these to work because errors like "/prod/next/.next/standalone": not found
# as if pnpm is not copying the build artifacts.
# also this makes the build REALLY slow (adds ~10mins to build time)
# RUN pnpm deploy --filter=@futureporn/next --prod /prod/next
# RUN pnpm deploy --filter=@futureporn/link2cid --prod /prod/link2cid
# FROM deps as release
# # ENV NEXT_SHARP_PATH=/app/node_modules/sharp
# ENV NODE_ENV=production
# WORKDIR /app
# COPY --from=build /app/public ./public
# COPY --from=build /app/.next/standalone ./
# COPY --from=build /app/.next/static ./.next/static
# CMD [ "dumb-init", "node", "server.js" ]
FROM deps AS next FROM deps AS next
RUN apt-get update && apt-get install -y -qq --no-install-recommends dumb-init RUN apt-get update && apt-get install -y -qq --no-install-recommends dumb-init

View File

@ -2,7 +2,7 @@
## Because we are using monorepo with pnpm workspaces, we have many npm packages in this single git repo. ## Because we are using monorepo with pnpm workspaces, we have many npm packages in this single git repo.
## Some of these packages in the monorepo depend on other packages in the monorepo. ## Some of these packages in the monorepo depend on other packages in the monorepo.
## In order to build these individual packages which inter-depend on eachother, ## In order to build these individual packages which inter-depend on eachother,
## all of the dependent code must be present in the build. ## all of the dependent code must be present in the build context.
## ##
## Below, COPY . /usr/src/app copies all the app code into the build context. ## Below, COPY . /usr/src/app copies all the app code into the build context.
## Because we use Tilt, only specific path directories are visible to docker. This helps with build performance. ## Because we use Tilt, only specific path directories are visible to docker. This helps with build performance.
@ -16,27 +16,107 @@ FROM node:20 AS base
ENV PNPM_HOME="/pnpm" ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH" ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable RUN corepack enable
WORKDIR /app
ENTRYPOINT ["pnpm"]
FROM base AS build FROM base AS build
ENV NODE_ENV=production ENV NODE_ENV=production
COPY . /usr/src/app COPY . /usr/src/app
WORKDIR /usr/src/app WORKDIR /usr/src/app
RUN mkdir -p /prod/scout RUN mkdir -p /prod/scout
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile RUN pnpm fetch
RUN pnpm deploy --filter=scout --prod /prod/scout RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile --offline
RUN pnpm --recursive build
RUN pnpm deploy --filter=boop /prod/boop
# RUN pnpm deploy --filter=scout --prod /prod/scout
# RUN pnpm deploy --filter=bot --prod /prod/bot # RUN pnpm deploy --filter=bot --prod /prod/bot
# RUN pnpm deploy --filter=temporal-worker --prod /prod/temporal-worker
# RUN pnpm deploy --filter=next /prod/next
# RUN pnpm deploy --filter=next /prod/next-dev
FROM base AS boop
COPY --from=build /prod/boop /app
CMD ["start"]
FROM base AS scout-manager
COPY --from=build /prod/scout /app
WORKDIR /app
ENTRYPOINT ["pnpm"]
CMD ["run", "start:manager"]
FROM base AS scout-worker # COPY pnpm-lock.yaml ./
COPY --from=build /prod/scout /app # RUN pnpm fetch
COPY --from=build /usr/src/app/certs/letsencrypt-stg-root-x1.pem # COPY ./packages/next /app
ENV NODE_EXTRA_CA_CERTS "/app/certs/letsencrypt-stg-root-x1.pem" # RUN --mount=type=cache,id=pnpm-store,target=/pnpm/store pnpm install
WORKDIR /app
ENTRYPOINT ["pnpm"] # FROM base AS next-prep
CMD ["run", "start:worker"] # ARG NEXT_PUBLIC_SITE_URL=https://futureporn.net
# ARG NEXT_PUBLIC_STRAPI_URL=https://portal.futureporn.net
# ARG NEXT_PUBLIC_UPPY_COMPANION_URL=https://uppy.futureporn.net
# ENV NEXT_PUBLIC_SITE_URL ${NEXT_PUBLIC_SITE_URL}
# ENV NEXT_PUBLIC_STRAPI_URL ${NEXT_PUBLIC_STRAPI_URL}
# ENV NEXT_PUBLIC_UPPY_COMPANION_URL ${NEXT_PUBLIC_UPPY_COMPANION_URL}
# ENV NEXT_TELEMETRY_DISABLED 1
# COPY pnpm-lock.yaml ./
# COPY ./packages/next /app
# RUN pnpm fetch
# RUN --mount=type=cache,id=pnpm-store,target=/pnpm/store pnpm install
FROM base AS next-build
COPY --from=build /prod/next /app
# RUN --mount=type=cache,id=pnpm-store,target=/pnpm/store pnpm install
# RUN ls -la
RUN pnpm run build
FROM base as next
COPY --from=next-build /app /app
ENV TZ=UTC
ENV NODE_ENV=production
ENV HOSTNAME="0.0.0.0"
ENTRYPOINT [ "dumb-init", "node", "server.js" ]
# FROM base AS next-pre
# COPY --from=build /prod/next /app
# ENV NODE_EXTRA_CA_CERTS "/app/letsencrypt-stg-root-x1.pem"
# FROM next-pre AS next-dev
# CMD ["pnpm", "run", "dev"]
# FROM next-pre AS next-build
# RUN pnpm run build
# FROM base AS next
# RUN apt-get update && apt-get install -y -qq --no-install-recommends dumb-init
# # COPY --chown=node:node --from=build /prod/next .
# COPY --chown=node:node --from=next-build /app/package.json /app/pnpm-lock.yaml ./
# RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --prod --frozen-lockfile
# COPY --chown=node:node --from=next-build /app/public ./public
# COPY --chown=node:node --from=next-build /app/.next/standalone ./
# COPY --chown=node:node --from=next-build /app/.next/static ./.next/static
# RUN ls -la .
# ENV TZ=UTC
# ENV NODE_ENV=production
# ENV HOSTNAME="0.0.0.0"
# ENTRYPOINT [ "dumb-init", "node", "server.js" ]
# FROM base AS scout-manager
# COPY --from=build /prod/scout /app
# CMD ["run", "start:manager"]
# FROM base AS scout-worker
# COPY --from=build /prod/scout /app
# COPY --from=build /usr/src/app/certs/letsencrypt-stg-root-x1.pem /app
# ENV NODE_EXTRA_CA_CERTS "/app/certs/letsencrypt-stg-root-x1.pem"
# CMD ["run", "start:worker"]
# FROM base AS temporal-worker
# COPY --from=build /prod/temporal-worker /app
# CMD ["run", "start"]
# FROM base AS bot-prep
# COPY --from=build /prod/bot /app
# FROM bot-prep AS bot
# CMD ["run", "start"]
# FROM bot-prep AS bot-dev
# CMD ["run", "dev"]

4
packages/archive/.npmrc Normal file
View File

@ -0,0 +1,4 @@
engine-strict=true
package-manager-strict=true
use-node-version=20.13.1
node-version=20.13.1

View File

@ -0,0 +1,4 @@
# archive
This module does vod processing on the backend.

4
packages/archive/node.d.ts vendored Normal file
View File

@ -0,0 +1,4 @@
interface ImportMeta {
dirname: string;
url: string;
}

View File

@ -0,0 +1,16 @@
{
"name": "archive",
"version": "0.0.1",
"description": "",
"main": "index.ts",
"scripts": {
"test": "mocha"
},
"keywords": [],
"author": "@CJ_Clippy",
"license": "Unlicense",
"dependencies": {
"@aws-sdk/client-s3": "^3.583.0",
"prevvy": "^7.0.1"
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,240 @@
import Prevvy from 'prevvy';
import path from 'node:path';
import os from 'node:os';
import { promisify } from 'util';
import { PutObjectCommand, S3Client } from '@aws-sdk/client-s3';
type CVod = {
id: number,
attributes: {
date: string,
note: null,
date2: string,
title: string,
vtuber: {
data: {
id: number,
attributes: {
slug: string,
image: string,
imageBlur: string,
displayName: string
}
}
},
chatLog: null,
muxAsset: {
data: {
assetId: string,
playbackId: string
}
},
spoilers: null,
thinHash: null,
createdAt: null,
thiccHash: null,
thumbnail: {
data: {
id: number,
attributes: {
url: string,
cdnUrl: string
}
}
},
updatedAt: string,
videoSrcB2: {
data: {
id: number,
attributes: {
key: string,
url: string,
cdnUrl: string,
uploadId: string
}
}
},
announceUrl: string,
publishedAt: string,
video240Hash: string,
video360Hash: null,
video480Hash: null,
video720Hash: null,
videoSrcHash: string,
announceTitle: string,
archiveStatus: null,
tagVodRelations: {
data: any[]
}
}
}
type CStrapi = {
strapiUrl: string,
strapiApiKey: string
}
type S3 = {
port: number,
bucket: string,
region: string,
useSSL: boolean,
endPoint: string,
accessKey: string,
pathStyle: boolean,
secretKey: string
}
type CBunnyPullZone = {
cdnHostname: string
}
interface IUploadData {
uploadId: string;
key: string;
url: string;
}
export async function __generateThumbnail(vod: CVod): Promise<string> {
const fileName = `vod-${vod?.id}-thumb.png`;
const thumbnailFilePath = path.join(os.tmpdir(), fileName);
const videoInputUrl = vod.attributes.videoSrcB2?.data?.attributes?.cdnUrl;
if (!videoInputUrl) {
console.error(vod?.attributes?.videoSrcB2);
throw new Error(`videoInputUrl in __generateThumbnail was undefined`);
}
console.log(`🫰 Creating thumbnail from ${videoInputUrl} ---> ${thumbnailFilePath}`);
const thumb = new Prevvy({
input: videoInputUrl,
output: thumbnailFilePath,
throttleTimeout: 2000,
width: 128,
cols: 5,
rows: 5,
});
thumb.on('progress', async (data: { percentage: number }) => {
console.log(`Thumbnail generation ${data.percentage}%`);
});
await thumb.generate();
return thumbnailFilePath;
}
function createId(): string {
const timestamp: number = new Date().getTime();
const randomPart: number = Math.floor(Math.random() * 10000);
return `${timestamp}-${randomPart}`;
}
export async function uploadToB2 (s3Resource: S3, filePath: string): IUploadData {
const { bucket, endPoint, region, accessKey, secretKey } = s3Resource;
const keyName = `${createId()}-${path.basename(filePath)}`
console.log(`uploadToB2 begin. bucket:${bucket} endpoint:${endPoint}`)
const urlPrefix = 'https://f000.backblazeb2.com/b2api/v1/b2_download_file_by_id?fileId='
const s3 = new S3Client({
endpoint: `https://${endPoint}`,
region: region,
credentials: {
accessKeyId: accessKey,
secretAccessKey: secretKey,
}
});
const file = Bun.file(filePath);
const fileStream = await file.arrayBuffer();
var params = {Bucket: bucket, Key: keyName, Body: fileStream};
const res = await s3.send(new PutObjectCommand(params));
if (!res.VersionId) {
const msg = 'res was missing VersionId'
throw new Error(msg)
}
const url = `${urlPrefix}${res.VersionId}`;
const blah: IUploadData = {
uploadId: res.VersionId,
key: keyName,
url: url
}
console.log(url)
console.log(blah)
return blah;
}
export async function associateB2WithVod(vod: IVod, strapi: CStrapi, uploadData: IUploadData, zone: CBunnyPullZone) {
if (!vod) throw new Error('vod argument was missing');
if (!strapi) throw new Error('strapi argument was missing');
if (!uploadData) throw new Error('uploadData argument was missing');
const { cdnHostname } = zone;
const { strapiApiKey, strapiUrl } = strapi;
console.log(`🥤 lets create b2-file in Strapi`);
// Create the B2 file
const thumbResponse = await fetch(`${strapiUrl}/api/b2-files`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${strapiApiKey}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
data: {
key: uploadData.key,
uploadId: uploadData.uploadId,
url: uploadData.url,
cdnUrl: `https://${cdnHostname}/${uploadData.key}`
},
}),
});
if (!thumbResponse.ok) {
const msg = `🟠 Failed to create B2 file: ${thumbResponse.statusText}`
console.error(msg)
throw new Error(msg);
}
const thumbData = await thumbResponse.json() as IB2File;
console.log(`📀 B2 file creation complete for B2 file id: ${thumbData.data.id}`);
console.log(`🪇 lets associate B2-file with VOD ${vod.id} in Strapi`);
// Associate B2 file with VOD
const associateResponse = await fetch(`${strapiUrl}/api/vods/${vod.id}`, {
method: 'PUT',
headers: {
'Authorization': `Bearer ${strapiApiKey}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
data: {
thumbnail: thumbData.data.id,
},
}),
});
if (!associateResponse.ok) {
const msg = `💀 Failed to associate B2 file with VOD: ${associateResponse.statusText}`;
console.error(msg)
throw new Error(msg)
}
console.log(`🫚 Association complete`);
const json = await associateResponse.json()
return json;
}
export async function main(vod: CVod, strapi: CStrapi, s3: S3, zone: CBunnyPullZone): CVod {
if (!vod) throw new Error('vod param is missing, and it is required.');
if (!strapi) throw new Error('strapi param is missing, and it is required.');
if (!s3) throw new Error(`s3 param is missing, and it is required.`);
if (!zone) throw new Error(`zone param is missing, and it is required.`);
const thumbnailFilePath = await __generateThumbnail(vod);
const b2Record = await uploadToB2(s3, thumbnailFilePath);
return associateB2WithVod(vod, strapi, b2Record, zone);
}

View File

@ -0,0 +1,31 @@
{
"compilerOptions": {
// Base Options recommended for all projects
"esModuleInterop": true,
"skipLibCheck": true,
"target": "es2022",
"allowJs": false,
"resolveJsonModule": true,
"moduleDetection": "force",
"isolatedModules": true,
"verbatimModuleSyntax": true,
// Enable strict type checking so you can catch bugs early
"strict": true,
"noUncheckedIndexedAccess": true,
"noImplicitOverride": true,
// Transpile our TypeScript code to JavaScript
"module": "NodeNext",
"outDir": "lib",
"lib": [
"es2022"
]
},
// Include the necessary files for your project
"include": [
"**/*.ts",
"**/*.tsx"
],
"exclude": [
"node_modules"
]
}

18
packages/boop/index.ts Normal file
View File

@ -0,0 +1,18 @@
import { bell } from 'taco'
import { IPagination } from 'types'
function main() {
const page: IPagination = {
page: 5,
pageCount: 20,
pageSize: 50,
total: 365
}
console.log(bell()+' '+page)
setTimeout(() => {
return main()
}, 2000)
}
main()

View File

@ -0,0 +1,18 @@
{
"name": "boop",
"type": "module",
"version": "1.0.1",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"start": "node index.js"
},
"dependencies": {
"taco": "workspace:*",
"types": "workspace:*"
},
"keywords": [],
"author": "",
"license": "ISC"
}

View File

@ -0,0 +1,16 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
importers:
.:
dependencies:
taco:
specifier: workspace:*
version: link:../taco
types:
specifier: workspace:*
version: link:../types

View File

@ -1 +0,0 @@
node_modules

View File

@ -1 +0,0 @@
use-node-version=>=20.0.0

View File

@ -1 +0,0 @@
lts/iron

View File

@ -1,19 +0,0 @@
FROM node:20-alpine AS base
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
WORKDIR /app
RUN corepack enable
FROM base AS build
COPY ./packages/bot/package.json ./
COPY ./packages/bot/src ./
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install
FROM build AS dev
ENTRYPOINT ["pnpm"]
CMD ["run", "dev"]
FROM build AS run
ENTRYPOINT ["pnpm"]
CMD ["start"]

View File

@ -1,3 +1,26 @@
# bot # bot
A.K.A. FutureButt, the discord bot that integrates into FP backend. A.K.A. FutureButt, the discord bot that integrates into Futureporn backend.
## Features
* [ ] User submitted content (USC) notifications
* [ ] Embedded video
* [ ] Prevvy storyboard
* [ ] USC publishing
* [ ] USC rejection
## Usage
bot is a node.js app which uses ENV variables to ingest secrets. The following ENV vars are required.
```
DISCORD_TOKEN
DISCORD_CHANNEL_ID
```
Example invocation as follows.
DISCORD_TOKEN=your-token-goes-here DISCORD_CHANNEL_ID=1185024773231759481 node index.js

View File

@ -1,106 +0,0 @@
const { ButtonStyles, Client, ComponentTypes, ChannelTypes } = require("oceanic.js");
const client = new Client({
auth: `Bot ${process.env.DISCORD_TOKEN}`,
gateway: {
intents: ["GUILD_MESSAGES"] // If the message does not start with a mention to or somehow relate to your client, you will need the MESSAGE_CONTENT intent as well
}
});
client.on("ready", () => console.log("Ready as", client.user.tag));
client.on("messageCreate", async (msg) => {
if(msg.content.includes("!component")) {
await client.rest.channels.createMessage(msg.channelID, {
content: `Here's some buttons for you, ${msg.author.mention}.`,
components: [
{
// The top level component must always be an action row.
// Full list of types: https://docs.oceanic.ws/latest/enums/Constants.ComponentTypes.html
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.MessageActionRow.html
type: ComponentTypes.ACTION_ROW,
components: [
{
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.TextButton.html
type: ComponentTypes.BUTTON,
style: ButtonStyles.PRIMARY, // The style of button - full list: https://docs.oceanic.ws/latest/enums/Constants.ButtonStyles.html
customID: "some-string-you-will-see-later",
label: "Click!",
disabled: false, // If the button is disabled, false by default.
},
{
type: ComponentTypes.BUTTON,
style: ButtonStyles.PRIMARY,
customID: "some-other-string",
label: "This Is Disabled",
disabled: true
},
{
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.URLButton.html
type: ComponentTypes.BUTTON,
style: ButtonStyles.LINK,
label: "Open Link",
url: "https://docs.oceanic.ws"
}
]
},
{
// The top level component must always be an action row.
// Full list of types: https://docs.oceanic.ws/latest/enums/Constants.ComponentTypes.html
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.MessageActionRow.html
type: ComponentTypes.ACTION_ROW,
components: [
{
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.SelectMenu.html
type: ComponentTypes.STRING_SELECT,
customID: "string-select",
disabled: false,
maxValues: 1, // The maximum number of values that can be selected (default 1)
minValues: 1, // The minimum number of values that can be selected (default 1)
options: [
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.SelectOption.html
{
default: true, // If this option is selected by default
description: "The description of the option", // Optional description
label: "Option One",
value: "value-1"
},
{
label: "Option Two",
value: "option-2"
}
],
placeholder: "Some Placeholder Text"
}
]
},
{
// The top level component must always be an action row.
// Full list of types: https://docs.oceanic.ws/latest/enums/Constants.ComponentTypes.html
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.MessageActionRow.html
type: ComponentTypes.ACTION_ROW,
components: [
{
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.SelectMenu.html
type: ComponentTypes.CHANNEL_SELECT,
channelTypes: [ChannelTypes.GUILD_TEXT, ChannelTypes.GUILD_VOICE], // The types of channels that can be selected
customID: "channel-select",
disabled: false,
maxValues: 1, // The maximum number of values that can be selected (default 1)
minValues: 1, // The minimum number of values that can be selected (default 1)
placeholder: "Some Placeholder Text"
}
]
}
]
});
}
});
// An error handler
client.on("error", (error) => {
console.error("Something went wrong:", error);
});
// Connect to Discord
client.connect();

View File

@ -1,94 +0,0 @@
const { Client } = require("oceanic.js");
const { readFileSync } = require("fs");
const client = new Client({
auth: `Bot ${process.env.DISCORD_TOKEN}`,
gateway: {
intents: ["GUILD_MESSAGES"] // If the message does not start with a mention to or somehow relate to your client, you will need the MESSAGE_CONTENT intent as well
}
});
client.on("ready", () => console.log("Ready as", client.user.tag));
client.on("messageCreate", async (msg) => {
if(msg.content.includes("!embed")) {
console.log(`'!embeds' was seen in chat!`)
console.log(msg)
await client.rest.channels.createMessage(msg.channelID, {
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.EmbedOptions.html
// Up to 10 in one message
embeds: [
{
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.EmbedAuthorOptions.html
author: {
name: "Author Name",
// An image url, or attachment://filename.ext
iconURL: "https://i.furry.cool/DonPride.png", // Optional
url: "https://docs.oceanic.ws" // Optional
},
// Array of https://docs.oceanic.ws/latest/interfaces/Types_Channels.EmbedField.html
// Up to 25 in one message
fields: [
{
name: "Field One",
value: "Field One Value",
inline: true // If this field should be displayed inline (default: false)
},
{
name: "Field Two",
value: "Field Two Value",
inline: false
}
],
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.EmbedFooterOptions.html
footer: {
text: "Footer Text",
// An image url, or attachment://filename.ext
iconURL: "https://i.furry.cool/DonPride.png" // Optional
},
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.EmbedImageOptions.html
image: {
// An image url, or attachment://filename.ext
url: "https://i.furry.cool/DonPride.png"
},
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.EmbedThumbnailOptions.html
thumbnail: {
// An image url, or attachment://filename.ext
url: "https://i.furry.cool/DonPride.png"
},
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.EmbedOptions.html
color: 0xFFA500, // Base-10 color (0x prefix can be used for hex codes)
description: "My Cool Embed",
timestamp: new Date().toISOString(), // The current time - ISO 8601 format
title: "My Amazing Embed",
url: "https://docs.oceanic.ws"
}
]
});
} else if(msg.content.includes("!file")) {
await client.rest.channels.createMessage(msg.channelID, {
embeds: [
{
image: {
// This can also be used for author & footer images
url: "attachment://image.png"
}
}
],
files: [
{
name: "image.png",
contents: readFileSync(`${__dirname}/image.png`)
}
]
});
}
});
// An error handler
client.on("error", (error) => {
console.error("Something went wrong:", error);
});
// Connect to Discord
client.connect();

View File

@ -1,261 +0,0 @@
const { ButtonStyles, Client, ComponentTypes, ChannelTypes } = require("oceanic.js");
const client = new Client({
auth: `Bot ${process.env.DISCORD_TOKEN}`,
gateway: {
intents: ["GUILD_MESSAGES"] // If the message does not start with a mention to or somehow relate to your client, you will need the MESSAGE_CONTENT intent as well
}
});
client.on("ready", () => console.log("Ready as", client.user.tag));
client.on("messageCreate", async (msg) => {
console.log(msg.content)
if(msg.content.includes("!test")) {
await client.rest.channels.createMessage(msg.channelID, {
content: `HGERE IZ BUTTN'z 5 u, ${msg.author.mention}.`,
components: [
{
// The top level component must always be an action row.
// Full list of types: https://docs.oceanic.ws/latest/enums/Constants.ComponentTypes.html
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.MessageActionRow.html
type: ComponentTypes.ACTION_ROW,
components: [
{
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.TextButton.html
type: ComponentTypes.BUTTON,
style: ButtonStyles.PRIMARY, // The style of button - full list: https://docs.oceanic.ws/latest/enums/Constants.ButtonStyles.html
customID: "some-string-you-will-see-later",
label: "Click!",
disabled: false, // If the button is disabled, false by default.
},
{
type: ComponentTypes.BUTTON,
style: ButtonStyles.PRIMARY,
customID: "some-other-string",
label: "This Is Disabled",
disabled: true
},
{
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.URLButton.html
type: ComponentTypes.BUTTON,
style: ButtonStyles.LINK,
label: "Open Link",
url: "https://docs.oceanic.ws"
}
]
},
{
// The top level component must always be an action row.
// Full list of types: https://docs.oceanic.ws/latest/enums/Constants.ComponentTypes.html
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.MessageActionRow.html
type: ComponentTypes.ACTION_ROW,
components: [
{
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.SelectMenu.html
type: ComponentTypes.STRING_SELECT,
customID: "string-select",
disabled: false,
maxValues: 1, // The maximum number of values that can be selected (default 1)
minValues: 1, // The minimum number of values that can be selected (default 1)
options: [
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.SelectOption.html
{
default: true, // If this option is selected by default
description: "The description of the option", // Optional description
label: "Option One",
value: "value-1"
},
{
label: "Option Two",
value: "option-2"
}
],
placeholder: "Some Placeholder Text"
}
]
},
{
// The top level component must always be an action row.
// Full list of types: https://docs.oceanic.ws/latest/enums/Constants.ComponentTypes.html
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.MessageActionRow.html
type: ComponentTypes.ACTION_ROW,
components: [
{
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.SelectMenu.html
type: ComponentTypes.CHANNEL_SELECT,
channelTypes: [ChannelTypes.GUILD_TEXT, ChannelTypes.GUILD_VOICE], // The types of channels that can be selected
customID: "channel-select",
disabled: false,
maxValues: 1, // The maximum number of values that can be selected (default 1)
minValues: 1, // The minimum number of values that can be selected (default 1)
placeholder: "Some Placeholder Text"
}
]
}
]
});
}
});
client.on("interactionCreate", async(interaction) => {
console.log(`interaction!@`)
console.log(interaction)
switch(interaction.type) {
// https://docs.oceanic.ws/latest/classes/CommandInteraction.CommandInteraction.html
case InteractionTypes.APPLICATION_COMMAND: {
// defer interactions as soon as possible, you have three seconds to send any initial response
// if you wait too long, the interaction may be invalidated
await interaction.defer();
// If you want the response to be ephemeral, you can provide the flag to the defer function, like so:
// await interaction.defer(MessageFlags.EPHEMERAL);
// data = https://docs.oceanic.ws/latest/interfaces/Types_Interactions.ApplicationCommandInteractionData.html
switch(interaction.data.type) {
// Chat Input commands are what you use in the chat, i.e. slash commands
case ApplicationCommandTypes.CHAT_INPUT: {
if(interaction.data.name === "greet") {
// assume we have two options, user (called user) then string (called greeting) - first is required, second is not
// Get an option named `user` with the type USER - https://docs.oceanic.ws/dev/classes/InteractionOptionsWrapper.InteractionOptionsWrapper.html#getUser
// Setting the second parameter to true will throw an error if the option is not present
const user = interaction.data.options.getUser("user", true);
const greeting = interaction.data.options.getString("greeting", false) || "Hello, ";
// since we've already deferred the interaction, we cannot use createMessage (this is an initial response)
// we can only have one initial response, so we use createFollowup
await interaction.createFollowup({
content: `${greeting} ${user.mention}!`,
allowedMentions: {
users: [user.id]
}
});
}
// Chat Input application command interactions also have a set of resolved data, which is structured as so:
// https://docs.oceanic.ws/latest/interfaces/Types_Interactions.ApplicationCommandInteractionResolvedData.html
// the options wrapper pulls values out of resolved automatically, if you use the right method
break;
}
// User application commands are shown in the context menu when right-clicking on users
// `data` will have a target (and targetID) property with the user that the command was executed on
// These don't have options
case ApplicationCommandTypes.USER: {
if(interaction.data.name === "ping") {
await interaction.createFollowup({
content: `Pong! ${interaction.data.target.mention}`,
allowedMentions: {
users: [interaction.data.target.id]
}
});
}
break;
}
// Message application commands are shown in the context menu when right-clicking on messages
// `data` will have a target (and targetID) property with the message that the command was executed on
// Same as user commands, these don't have options
case ApplicationCommandTypes.MESSAGE: {
if(interaction.data.name === "author") {
await interaction.createFollowup({
content: `${interaction.data.target.author.mention} is the author of that message!`,
allowedMentions: {
users: [interaction.data.target.author.id]
}
});
}
break;
}
}
break;
}
// https://docs.oceanic.ws/latest/classes/ComponentInteraction.ComponentInteraction.html
case InteractionTypes.MESSAGE_COMPONENT: {
// same spiel as above
await interaction.defer();
// when you create a message with components, this will correspond with what you provided as the customID there
if(interaction.data.componentType === ComponentTypes.BUTTON) {
if(interaction.data.customID === "edit-message") {
// Edits the original message. This has an initial response variant: editParent
await interaction.editOriginal({
content: `This message was edited by ${interaction.user.mention}!`,
allowedMentions: {
users: [interaction.user.id]
}
});
} else if(interaction.data.customID === "my-amazing-button") {
await interaction.createFollowup({
content: "You clicked an amazing button!"
});
}
} else if(interaction.data.componentType === ComponentTypes.SELECT_MENU) {
// The `values` property under data contains all the selected values
await interaction.createFollowup({
content: `You selected: **${interaction.data.values.join("**, **")}**`
});
}
break;
}
// https://docs.oceanic.ws/latest/classes/AutocompleteInteraction.AutocompleteInteraction.html
case InteractionTypes.APPLICATION_COMMAND_AUTOCOMPLETE: {
// Autocomplete Interactions cannot be deferred
switch(interaction.data.name) {
case "test-autocomplete": {
// Autocomplete interactions data has a partial `options` property, which is the tree of options that are currently being filled in
// along with one at the end, which will have focused
// Setting the first parameter to true will throw an error if no focused option is present
const option = interaction.data.options.getFocused(true);
switch(option.name) {
case "test-option": {
return interaction.result([
{
name: "Choice 1",
nameLocalizations: {
"es-ES": "Opción 1"
},
value: "choice-1"
},
{
name: "Choice 2",
nameLocalizations: {
"es-ES": "Opción 2"
},
value: "choice-2"
}
]);
break;
}
}
}
}
break;
}
// https://docs.oceanic.ws/latest/classes/ModalSubmitInteraction.ModalSubmitInteraction.html
case InteractionTypes.MODAL_SUBMIT: {
// this will correspond with the customID you provided when creating the modal
switch(interaction.data.customID) {
case "test-modal": {
// the `components` property under data contains all the components that were submitted
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.ModalActionRow.html
console.log(interaction.data.components);
break;
}
}
break;
}
}
});
// An error handler
client.on("error", (error) => {
console.error("Something went wrong:", error);
});
// Connect to Discord
client.connect();

70
packages/bot/index.js Normal file
View File

@ -0,0 +1,70 @@
import 'dotenv/config';
import { Client, Events, GatewayIntentBits, Partials } from 'discord.js';
if (!process.env.DISCORD_TOKEN) throw new Error("DISCORD_TOKEN was missing from env");
if (!process.env.DISCORD_CHANNEL_ID) throw new Error("DISCORD_CHANNEL_ID was missing from env");
const channelId = ''+process.env.DISCORD_CHANNEL_ID
console.log(`channelId is ${channelId}`)
// Create a new client instance
const client = new Client({
intents: [
GatewayIntentBits.Guilds,
GatewayIntentBits.GuildMessages,
GatewayIntentBits.GuildMessageReactions,
],
partials: [
Partials.Message,
Partials.Channel,
Partials.Reaction,
]
});
// When the client is ready, run this code (only once).
// The distinction between `client: Client<boolean>` and `readyClient: Client<true>` is important for TypeScript developers.
// It makes some properties non-nullable.
client.once(Events.ClientReady, readyClient => {
console.log(`Ready! Logged in as ${readyClient.user.tag} yuhu`);
// client.channels.cache.get(process.env.DISCORD_CHANNEL_ID).send('testing 123');
readyClient.channels.fetch(channelId).then(channel => {
channel.send('generic welcome message!')
});
// console.log(readyClient.channels)
// const channel = readyClient.channels.cache.get(process.env.DISCORD_CHANNEL_ID);
// channel.send('testing 135');
});
client.on(Events.InteractionCreate, async interaction => {
if (!interaction.isChatInputCommand()) return;
const { commandName } = interaction;
if (commandName === 'react') {
const message = await interaction.reply({ content: 'You can react with Unicode emojis!', fetchReply: true });
message.react('😄');
}
})
client.on(Events.MessageReactionAdd, async (reaction, user) => {
// When a reaction is received, check if the structure is partial
if (reaction.partial) {
// If the message this reaction belongs to was removed, the fetching might result in an API error which should be handled
try {
await reaction.fetch();
} catch (error) {
console.error('Something went wrong when fetching the message:', error);
// Return as `reaction.message.author` may be undefined/null
return;
}
}
// Now the message has been cached and is fully available
console.log(`${reaction.message.author}'s message "${reaction.message.content}" gained a reaction!`);
// The reaction is now also fully available and the properties will be reflected accurately:
console.log(`${reaction.count} user(s) have given the same reaction to this message!`);
});
// Log in to Discord with your client's token
client.login(process.env.DISCORD_TOKEN);

View File

@ -1,171 +0,0 @@
const { Client, InteractionTypes, MessageFlags, ComponentTypes, ApplicationCommandTypes } = require("oceanic.js");
const client = new Client({
auth: `Bot ${process.env.DISCORD_TOKEN}`,
gateway: {
intents: 0 // No intents are needed if you are only using interactions
}
});
client.on("ready", async() => {
console.log("Ready as", client.user.tag);
});
client.on("interactionCreate", async(interaction) => {
switch(interaction.type) {
// https://docs.oceanic.ws/latest/classes/CommandInteraction.CommandInteraction.html
case InteractionTypes.APPLICATION_COMMAND: {
// defer interactions as soon as possible, you have three seconds to send any initial response
// if you wait too long, the interaction may be invalidated
await interaction.defer();
// If you want the response to be ephemeral, you can provide the flag to the defer function, like so:
// await interaction.defer(MessageFlags.EPHEMERAL);
// data = https://docs.oceanic.ws/latest/interfaces/Types_Interactions.ApplicationCommandInteractionData.html
switch(interaction.data.type) {
// Chat Input commands are what you use in the chat, i.e. slash commands
case ApplicationCommandTypes.CHAT_INPUT: {
if(interaction.data.name === "greet") {
// assume we have two options, user (called user) then string (called greeting) - first is required, second is not
// Get an option named `user` with the type USER - https://docs.oceanic.ws/dev/classes/InteractionOptionsWrapper.InteractionOptionsWrapper.html#getUser
// Setting the second parameter to true will throw an error if the option is not present
const user = interaction.data.options.getUser("user", true);
const greeting = interaction.data.options.getString("greeting", false) || "Hello, ";
// since we've already deferred the interaction, we cannot use createMessage (this is an initial response)
// we can only have one initial response, so we use createFollowup
await interaction.createFollowup({
content: `${greeting} ${user.mention}!`,
allowedMentions: {
users: [user.id]
}
});
}
// Chat Input application command interactions also have a set of resolved data, which is structured as so:
// https://docs.oceanic.ws/latest/interfaces/Types_Interactions.ApplicationCommandInteractionResolvedData.html
// the options wrapper pulls values out of resolved automatically, if you use the right method
break;
}
// User application commands are shown in the context menu when right-clicking on users
// `data` will have a target (and targetID) property with the user that the command was executed on
// These don't have options
case ApplicationCommandTypes.USER: {
if(interaction.data.name === "ping") {
await interaction.createFollowup({
content: `Pong! ${interaction.data.target.mention}`,
allowedMentions: {
users: [interaction.data.target.id]
}
});
}
break;
}
// Message application commands are shown in the context menu when right-clicking on messages
// `data` will have a target (and targetID) property with the message that the command was executed on
// Same as user commands, these don't have options
case ApplicationCommandTypes.MESSAGE: {
if(interaction.data.name === "author") {
await interaction.createFollowup({
content: `${interaction.data.target.author.mention} is the author of that message!`,
allowedMentions: {
users: [interaction.data.target.author.id]
}
});
}
break;
}
}
break;
}
// https://docs.oceanic.ws/latest/classes/ComponentInteraction.ComponentInteraction.html
case InteractionTypes.MESSAGE_COMPONENT: {
// same spiel as above
await interaction.defer();
// when you create a message with components, this will correspond with what you provided as the customID there
if(interaction.data.componentType === ComponentTypes.BUTTON) {
if(interaction.data.customID === "edit-message") {
// Edits the original message. This has an initial response variant: editParent
await interaction.editOriginal({
content: `This message was edited by ${interaction.user.mention}!`,
allowedMentions: {
users: [interaction.user.id]
}
});
} else if(interaction.data.customID === "my-amazing-button") {
await interaction.createFollowup({
content: "You clicked an amazing button!"
});
}
} else if(interaction.data.componentType === ComponentTypes.SELECT_MENU) {
// The `values` property under data contains all the selected values
await interaction.createFollowup({
content: `You selected: **${interaction.data.values.join("**, **")}**`
});
}
break;
}
// https://docs.oceanic.ws/latest/classes/AutocompleteInteraction.AutocompleteInteraction.html
case InteractionTypes.APPLICATION_COMMAND_AUTOCOMPLETE: {
// Autocomplete Interactions cannot be deferred
switch(interaction.data.name) {
case "test-autocomplete": {
// Autocomplete interactions data has a partial `options` property, which is the tree of options that are currently being filled in
// along with one at the end, which will have focused
// Setting the first parameter to true will throw an error if no focused option is present
const option = interaction.data.options.getFocused(true);
switch(option.name) {
case "test-option": {
return interaction.result([
{
name: "Choice 1",
nameLocalizations: {
"es-ES": "Opción 1"
},
value: "choice-1"
},
{
name: "Choice 2",
nameLocalizations: {
"es-ES": "Opción 2"
},
value: "choice-2"
}
]);
break;
}
}
}
}
break;
}
// https://docs.oceanic.ws/latest/classes/ModalSubmitInteraction.ModalSubmitInteraction.html
case InteractionTypes.MODAL_SUBMIT: {
// this will correspond with the customID you provided when creating the modal
switch(interaction.data.customID) {
case "test-modal": {
// the `components` property under data contains all the components that were submitted
// https://docs.oceanic.ws/latest/interfaces/Types_Channels.ModalActionRow.html
console.log(interaction.data.components);
break;
}
}
break;
}
}
});
// An error handler
client.on("error", (error) => {
console.error("Something went wrong:", error);
});
// Connect to Discord
client.connect();

View File

@ -1,22 +1,20 @@
{ {
"name": "fp-bot", "name": "bot",
"type": "module",
"version": "1.0.0", "version": "1.0.0",
"description": "", "description": "",
"main": "index.ts", "main": "index.js",
"scripts": { "scripts": {
"dev": "node --import=tsx --watch ./src/index.ts" "test": "echo \"Error: no test specified\" && exit 1",
"start": "node index",
"dev": "nodemon index"
}, },
"keywords": [], "keywords": [],
"author": "", "author": "",
"license": "CC0-1.0", "license": "Unlicense",
"devDependencies": {
"tsx": "^4.7.2"
},
"dependencies": { "dependencies": {
"@types/express": "^4.17.21", "discord.js": "^14.15.3",
"@types/node": "^20.12.6", "dotenv": "^16.4.5",
"discordeno": "^18.0.1", "nodemon": "^3.1.4"
"express": "^4.19.2",
"oceanic.js": "^1.10.0"
} }
} }

File diff suppressed because it is too large Load Diff

View File

@ -1,28 +0,0 @@
import { getBotIdFromToken, Intents } from 'discordeno';
/** The bot id, derived from the bot token. */
export const BOT_ID = getBotIdFromToken(process.env.DISCORD_TOKEN as string);
export const EVENT_HANDLER_URL = `http://${process.env.EVENT_HANDLER_HOST}:${process.env.EVENT_HANDLER_PORT}`;
export const REST_URL = `http://${process.env.REST_HOST}:${process.env.REST_PORT}`;
export const GATEWAY_URL = `http://${process.env.GATEWAY_HOST}:${process.env.GATEWAY_PORT}`;
// Gateway Proxy Configurations
/** The gateway intents you would like to use. */
export const INTENTS: Intents =
// SETUP-DD-TEMP: Add the intents you want enabled here. Or Delete the intents you don't want in your bot.
Intents.DirectMessageReactions |
Intents.DirectMessageTyping |
Intents.DirectMessages |
Intents.GuildBans |
Intents.GuildEmojis |
Intents.GuildIntegrations |
Intents.GuildInvites |
Intents.GuildMembers |
Intents.GuildMessageReactions |
Intents.GuildMessageTyping |
Intents.GuildMessages |
Intents.GuildPresences |
Intents.GuildVoiceStates |
Intents.GuildWebhooks |
Intents.Guilds;

View File

@ -1,58 +0,0 @@
import { BASE_URL, createRestManager } from 'discordeno';
import express, { Request, Response } from 'express';
// import { setupAnalyticsHooks } from '../analytics.js';
import { REST_URL } from './configs.js';
const DISCORD_TOKEN = process.env.DISCORD_TOKEN as string;
const REST_AUTHORIZATION = process.env.REST_AUTHORIZATION as string;
const REST_PORT = process.env.REST_PORT as string;
const rest = createRestManager({
token: DISCORD_TOKEN,
secretKey: REST_AUTHORIZATION,
customUrl: REST_URL,
debug: console.log,
});
// Add send fetching analytics hook to rest
// setupAnalyticsHooks(rest);
// @ts-expect-error
rest.convertRestError = (errorStack, data) => {
if (!data) return { message: errorStack.message };
return { ...data, message: errorStack.message };
};
const app = express();
app.use(
express.urlencoded({
extended: true,
}),
);
app.use(express.json());
app.all('/*', async (req: Request, res: Response) => {
if (!REST_AUTHORIZATION || REST_AUTHORIZATION !== req.headers.authorization) {
return res.status(401).json({ error: 'Invalid authorization key.' });
}
try {
const result = await rest.runMethod(rest, req.method, `${BASE_URL}${req.url}`, req.body);
if (result) {
res.status(200).json(result);
} else {
res.status(204).json();
}
} catch (error: any) {
console.log(error);
res.status(500).json(error);
}
});
app.listen(REST_PORT, () => {
console.log(`REST listening at ${REST_URL}`);
});

View File

@ -1,9 +0,0 @@
# REST Proxy
This folder will contain the code for our REST proxy. This is going to become the single source that all of our bot will
use to communciate to the Discord API.
## Further Steps
- Express framework to create the listener however, you can replace it with anything you like. Express is quite a
bloated framework. Feel free to optimize to a better framework.

View File

@ -1,30 +0,0 @@
{
"compilerOptions": {
"target": "es2022",
"module": "es2022",
"experimentalDecorators": true,
"emitDecoratorMetadata": true,
"outDir": "./dist",
"rootDir": "./src",
"esModuleInterop": true,
"importHelpers": true,
"allowUnusedLabels": false,
"noImplicitOverride": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"noUncheckedIndexedAccess": true,
"strict": true,
"stripInternal": true,
"noFallthroughCasesInSwitch": true,
"useUnknownInCatchVariables": false,
"allowUnreachableCode": false,
"skipLibCheck": true,
"moduleResolution": "node"
},
"include": ["./src/**/*", ".env"],
"ts-node": {
"esm": true,
"experimentalSpecifierResolution": "node",
"swc": true
}
}

View File

@ -0,0 +1,3 @@
{
"loader": "ts-node/esm"
}

View File

@ -1,5 +1,5 @@
import { getProminentColor, rgbToHex, getStoryboard } from './image.js' import { getProminentColor, rgbToHex, getStoryboard } from './index.js'
import { expect } from 'chai' import { expect } from 'chai'
import { describe } from 'mocha' import { describe } from 'mocha'
import path from 'node:path' import path from 'node:path'
@ -14,9 +14,9 @@ describe('image', function () {
}) })
describe('rgbToHex', function () { describe('rgbToHex', function () {
it('should convert color to hex {String} hexidecimal code', function () { it('should convert color to hex {String} hexidecimal code', function () {
const mulberry = [255, 87, 51] const mulberry = [255, 87, 51] as const
const screaminGreen = [77, 255, 106] const screaminGreen = [77, 255, 106] as const
const amaranth = [227, 64, 81] const amaranth = [227, 64, 81] as const
expect(rgbToHex(...mulberry)).to.equal('#ff5733') expect(rgbToHex(...mulberry)).to.equal('#ff5733')
expect(rgbToHex(...screaminGreen)).to.equal('#4dff6a') expect(rgbToHex(...screaminGreen)).to.equal('#4dff6a')
expect(rgbToHex(...amaranth)).to.equal('#e34051') expect(rgbToHex(...amaranth)).to.equal('#e34051')

30
packages/image/index.ts Normal file
View File

@ -0,0 +1,30 @@
// import ColorThief from 'colorthief';
import sharp from 'sharp';
import Prevvy from 'prevvy';
import path from 'path';
import { getTmpFile } from 'utils';
export async function getProminentColor(imageFile: string): Promise<string> {
const { dominant } = await sharp(imageFile).stats();
const { r, g, b } = dominant;
return rgbToHex(r, g, b);
}
export function rgbToHex(r: number, g: number, b: number): string {
return "#" + (1 << 24 | r << 16 | g << 8 | b).toString(16).slice(1);
}
export async function getStoryboard(imageFileOrUrl: string): Promise<string> {
let base = path.basename(imageFileOrUrl);
let outputImagePath = getTmpFile(base);
let options = {
input: imageFileOrUrl,
output: outputImagePath,
width: 265,
cols: 5,
rows: 5,
};
let prevvy = new Prevvy(options);
await prevvy.generate();
return outputImagePath;
}

View File

@ -0,0 +1,26 @@
{
"name": "image",
"type": "module",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "Unlicense",
"dependencies": {
"@types/chai": "^4.3.16",
"@types/mocha": "^10.0.7",
"prevvy": "^7.0.1",
"sharp": "^0.33.4",
"utils": "workspace:^"
},
"devDependencies": {
"chai": "^5.1.1",
"mocha": "^10.6.0",
"ts-node": "^10.9.2",
"typescript": "^5.5.3"
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,30 @@
{
"compilerOptions": {
// Base Options recommended for all projects
"esModuleInterop": true,
"skipLibCheck": true,
"target": "es2022",
"allowJs": true,
"resolveJsonModule": true,
"moduleDetection": "force",
"isolatedModules": true,
// Enable strict type checking so you can catch bugs early
"strict": true,
"noUncheckedIndexedAccess": true,
"noImplicitOverride": true,
// Transpile our TypeScript code to JavaScript
"module": "NodeNext",
"outDir": "lib",
"lib": [
"es2022"
]
},
// Include the necessary files for your project
"include": [
"**/*.ts",
"**/*.tsx"
],
"exclude": [
"node_modules"
]
}

View File

@ -13,7 +13,7 @@ export default async function Page() {
<section className="hero"> <section className="hero">
<div className="hero-body"> <div className="hero-body">
<p className="title">About</p> <p className="title">About</p>
<p>It's the worst feeling when a VOD disappears from the internet. It means you missed out, it's gone, and you may never experience what your peers got to take part in.</p> <p>It&apos;s the worst feeling when a VOD disappears from the internet. It means you missed out, it&apos;s gone, and you may never experience what your peers got to take part in.</p>
<p>Futureporn is created by fans, for fans. Missed a stream? We got you, bro.</p> <p>Futureporn is created by fans, for fans. Missed a stream? We got you, bro.</p>
<p>Together we can end 404s and create an everlasting archive of lewdtuber livestreams.</p> <p>Together we can end 404s and create an everlasting archive of lewdtuber livestreams.</p>

View File

@ -5,6 +5,8 @@ import { IStream } from "@/lib/streams";
import { useSearchParams } from 'next/navigation'; import { useSearchParams } from 'next/navigation';
import React, { useContext, useState, useEffect } from 'react'; import React, { useContext, useState, useEffect } from 'react';
import { UppyContext } from 'app/uppy'; import { UppyContext } from 'app/uppy';
import AwsS3 from '@uppy/aws-s3';
import RemoteSources from '@uppy/remote-sources';
import { LoginButton, useAuth } from '@/components/auth'; import { LoginButton, useAuth } from '@/components/auth';
import { Dashboard } from '@uppy/react'; import { Dashboard } from '@uppy/react';
import styles from '@/assets/styles/fp.module.css' import styles from '@/assets/styles/fp.module.css'
@ -19,6 +21,10 @@ import * as Yup from 'yup';
import qs from 'qs'; import qs from 'qs';
import { toast } from "react-toastify"; import { toast } from "react-toastify";
import { ErrorMessage } from "@hookform/error-message" import { ErrorMessage } from "@hookform/error-message"
import Uppy from '@uppy/core';
import { companionUrl } from '@/lib/constants';
interface IUploadFormProps { interface IUploadFormProps {
vtubers: IVtuber[]; vtubers: IVtuber[];
@ -58,9 +64,38 @@ const validationSchema = Yup.object().shape({
export default function UploadForm({ vtubers }: IUploadFormProps) { export default function UploadForm({ vtubers }: IUploadFormProps) {
const searchParams = useSearchParams(); const searchParams = useSearchParams();
const cuid = searchParams.get('cuid'); const cuid = searchParams.get('cuid');
const uppy = useContext(UppyContext);
const { authData } = useAuth(); const { authData } = useAuth();
const uppy = new Uppy(
{
autoProceed: true,
debug: true,
logger: {
debug: console.info,
warn: console.log,
error: console.error
},
}
)
.use(RemoteSources, {
companionUrl,
sources: [
'GoogleDrive',
'Dropbox',
'Url'
]
})
.use(AwsS3, {
companionUrl,
shouldUseMultipart: true,
abortMultipartUpload: () => {}, // @see https://github.com/transloadit/uppy/issues/1197#issuecomment-491756118
companionHeaders: {
'authorization': `Bearer ${authData?.accessToken}`
}
})
const formOptions = { const formOptions = {
resolver: yupResolver(validationSchema), resolver: yupResolver(validationSchema),
@ -159,23 +194,24 @@ export default function UploadForm({ vtubers }: IUploadFormProps) {
uppy.on('complete', async (result: any) => { uppy.on('complete', async (result: any) => {
console.log('uppy complete! ')
console.log(result)
for (const s of result.successful) { for (const s of result.successful) {
if (!s?.s3Multipart) { if (!s?.s3Multipart) {
const m = 'file was missing s3Multipart'
toast.error(`${m}`, { theme: 'dark' });
setError('root.serverError', { setError('root.serverError', {
type: 'remote', type: 'remote',
message: 'file was missing s3Multipart' message: m
}) })
// throw new Error('file was missing s3Multipart') throw new Error(m)
} }
} }
console.log('uppy complete! ')
console.log(result)
toast.success(`upload complete`);
let files = result.successful.map((f: any) => ({ key: f.s3Multipart.key, uploadId: f.s3Multipart.uploadId })); let files = result.successful.map((f: any) => ({ key: f.s3Multipart.key, uploadId: f.s3Multipart.uploadId }));
setValue('files', files); setValue('files', files);
}); });
return ( return (
<> <>
@ -215,19 +251,30 @@ export default function UploadForm({ vtubers }: IUploadFormProps) {
uppy={uppy} uppy={uppy}
theme='dark' theme='dark'
proudlyDisplayPoweredByUppy={true} proudlyDisplayPoweredByUppy={true}
showProgressDetails={true} showProgressDetails={true}
/> />
{/* This form is hidden. Why? */} {/*
Here is how we upload the files to the server.
From uppy, we get a list of files.
we add the files to a hidden input box.
the input box is part of the form which gets POSTed.
*/}
<input <input
required required
hidden={false} hidden={true}
style={{ display: 'block' }} style={{ display: 'block' }}
className="input" type="text" className="input" type="text"
{...register('files')} {...register('files')}
></input> ></input>
<button className="button" onClick={() => { setValue('files', [
{
"key": "4b4063a2-6b57-48f1-8565-a12ddce473e9-E1tB0KoUcAYJTni.jpg",
"uploadId": "4_z7d53875ff1c32a1983d30b18_f2129582707239923_d20240708_m003328_c000_v0001086_t0006_u01720398808368"
}
]); }}>(Debug) Add a list of files</button>
{errors.files && <p className="help is-danger">{errors.files.message?.toString()}</p>} {errors.files && <p className="help is-danger">{errors.files.message?.toString()}</p>}
@ -363,7 +410,7 @@ export default function UploadForm({ vtubers }: IUploadFormProps) {
)} )}
{isSubmitSuccessful && ( {isSubmitSuccessful && (
<> <>
<aside className="notification mt-5 is-success">Thank you for uploading! </aside> <aside className="notification mt-5 is-success">Thank you for uploading! A moderator will review the VOD before being published.</aside>
<button onClick={() => { <button onClick={() => {
reset(); // reset form reset(); // reset form
const files = uppy.getFiles() const files = uppy.getFiles()

View File

@ -1,14 +1,8 @@
import Link from "next/link"; import Link from "next/link";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { getSafeDate } from '@/lib/dates';
import { faPatreon } from "@fortawesome/free-brands-svg-icons";
import { faVideo } from "@fortawesome/free-solid-svg-icons";
import { getSafeDate, getDateFromSafeDate } from '@/lib/dates';
import { IVtuber } from '@/lib/vtubers'; import { IVtuber } from '@/lib/vtubers';
import Image from "next/legacy/image" import Image from "next/legacy/image"
import { LocalizedDate } from '@/components/localized-date' import { LocalizedDate } from '@/components/localized-date'
import { IMuxAsset, IMuxAssetResponse } from "@/lib/types";
import { IB2File } from "@/lib/b2File";
import VtuberButton from "./vtuber-button";
interface IVodCardProps { interface IVodCardProps {
id: number; id: number;

View File

@ -83,7 +83,7 @@ export default function Page() {
useEffect(() => { useEffect(() => {
initAuth() initAuth()
}, []) })

View File

@ -1,4 +1,4 @@
import { IMeta } from "./types"; import { IMeta } from "types";
export interface IB2File { export interface IB2File {
id: number; id: number;

View File

@ -1,37 +1,10 @@
import { siteUrl, strapiUrl } from './constants'; import { siteUrl, strapiUrl } from './constants';
import { getSafeDate } from './dates'; import { getSafeDate } from './dates';
import { IVodsResponse } from './vods';
import { IVtuber, IVtuberResponse } from './vtubers';
import { ITweetResponse } from './tweets';
import { IMeta } from './types';
import qs from 'qs'; import qs from 'qs';
export interface IStream {
id: number;
attributes: {
date: string;
date2: string;
archiveStatus: 'good' | 'issue' | 'missing';
vods: IVodsResponse;
cuid: string;
vtuber: IVtuberResponse;
tweet: ITweetResponse;
isChaturbateStream: boolean;
isFanslyStream: boolean;
}
}
export interface IStreamResponse {
data: IStream;
meta: IMeta;
}
export interface IStreamsResponse {
data: IStream[];
meta: IMeta;
}
const fetchStreamsOptions = { const fetchStreamsOptions = {

View File

@ -11,7 +11,7 @@ import { strapiUrl } from './constants'
import { ITagResponse, IToyTagResponse } from './tags'; import { ITagResponse, IToyTagResponse } from './tags';
import { IVod, IVodResponse } from './vods'; import { IVod, IVodResponse } from './vods';
import { IAuthData } from '@/components/auth'; import { IAuthData } from '@/components/auth';
import { IMeta } from './types'; import { IMeta } from 'types';
export interface ITagVodRelation { export interface ITagVodRelation {
id: number; id: number;

View File

@ -5,7 +5,7 @@ import slugify from 'slugify';
import { IToy } from './toys'; import { IToy } from './toys';
import { IAuthData } from '@/components/auth'; import { IAuthData } from '@/components/auth';
import qs from 'qs'; import qs from 'qs';
import { IMeta } from './types'; import { IMeta } from 'types';
export interface ITag { export interface ITag {

View File

@ -4,7 +4,7 @@ import qs from 'qs';
import { strapiUrl } from './constants' import { strapiUrl } from './constants'
import { IAuthData } from '@/components/auth'; import { IAuthData } from '@/components/auth';
import { ITagsResponse, ITag, ITagResponse } from './tags'; import { ITagsResponse, ITag, ITagResponse } from './tags';
import { IMeta } from './types'; import { IMeta } from 'types';
export interface ITimestamp { export interface ITimestamp {
id: number; id: number;

View File

@ -1,6 +1,6 @@
import { ITag, ITagResponse, ITagsResponse } from '@/lib/tags' import { ITag, ITagResponse, ITagsResponse } from '@/lib/tags'
import { IMeta } from './types'; import { IMeta } from 'types';
export interface IToysResponse { export interface IToysResponse {

View File

@ -1,5 +1,5 @@
import { IVtuberResponse } from "./vtubers"; import { IVtuberResponse } from "./vtubers";
import { IMeta } from "./types"; import { IMeta } from "types";
export interface ITweet { export interface ITweet {
id: number; id: number;

View File

@ -1,46 +0,0 @@
export interface IMuxAsset {
id: number;
attributes: {
playbackId: string;
assetId: string;
}
}
export interface IPagination {
page: number;
pageSize: number;
pageCount: number;
total: number;
}
export interface IMuxAssetResponse {
data: IMuxAsset;
meta: IMeta;
}
export interface IMeta {
pagination: IPagination;
}
export interface IPlatformNotification {
id: number;
attributes: {
source: string;
platform: string;
date: string;
date2: string;
vtuber: number;
}
}
export interface IPlatformNotificationResponse {
data: IPlatformNotification;
meta: IMeta;
}

View File

@ -1,4 +1,4 @@
import { IMeta } from "./types"; import { IMeta } from "types";
export interface IUser { export interface IUser {

View File

@ -6,7 +6,7 @@ import { IStream, IStreamResponse } from './streams';
import qs from 'qs'; import qs from 'qs';
import { ITagVodRelationsResponse } from './tag-vod-relations'; import { ITagVodRelationsResponse } from './tag-vod-relations';
import { ITimestampsResponse } from './timestamps'; import { ITimestampsResponse } from './timestamps';
import { IMeta, IMuxAsset, IMuxAssetResponse } from './types'; import { IMeta, IMuxAsset, IMuxAssetResponse } from 'types';
import { IB2File, IB2FileResponse } from '@/lib/b2File'; import { IB2File, IB2FileResponse } from '@/lib/b2File';
import fetchAPI from './fetch-api'; import fetchAPI from './fetch-api';
import { IUserResponse } from './users'; import { IUserResponse } from './users';

View File

@ -3,7 +3,7 @@
import { IVod } from './vods' import { IVod } from './vods'
import { strapiUrl, siteUrl } from './constants'; import { strapiUrl, siteUrl } from './constants';
import qs from 'qs'; import qs from 'qs';
import { IMeta } from './types'; import { IMeta } from 'types';
const fetchVtubersOptions = { const fetchVtubersOptions = {
@ -13,49 +13,6 @@ const fetchVtubersOptions = {
} }
export interface IVtuber {
id: number;
attributes: {
slug: string;
displayName: string;
chaturbate?: string;
twitter?: string;
patreon?: string;
twitch?: string;
tiktok?: string;
onlyfans?: string;
youtube?: string;
linktree?: string;
carrd?: string;
fansly?: string;
pornhub?: string;
discord?: string;
reddit?: string;
throne?: string;
instagram?: string;
facebook?: string;
merch?: string;
vods: IVod[];
description1: string;
description2?: string;
image: string;
imageBlur?: string;
themeColor: string;
fanslyId?: string;
chaturbateId?: string;
twitterId?: string;
}
}
export interface IVtuberResponse {
data: IVtuber;
meta: IMeta;
}
export interface IVtubersResponse {
data: IVtuber[];
meta: IMeta;
}
export function getUrl(slug: string): string { export function getUrl(slug: string): string {

View File

@ -20,19 +20,22 @@ export default function UppyProvider({
children: React.ReactNode children: React.ReactNode
}) { }) {
const { authData } = useAuth(); const { authData } = useAuth();
const [uppy] = useState(() => new Uppy( const uppy = new Uppy(
{ // const [uppy] = useState(() => new Uppy(
autoProceed: false, {
debug: true autoProceed: true,
debug: true,
logger: {
debug: console.info,
warn: console.log,
error: console.error
},
} }
) )
.use(RemoteSources, { .use(RemoteSources, {
companionUrl, companionUrl,
sources: [ title: 'testing 123',
'GoogleDrive',
'Dropbox',
'Url'
]
}) })
.use(AwsS3, { .use(AwsS3, {
companionUrl, companionUrl,
@ -42,7 +45,7 @@ export default function UppyProvider({
'authorization': `Bearer ${authData?.accessToken}` 'authorization': `Bearer ${authData?.accessToken}`
} }
}) })
); // );

View File

@ -62,7 +62,8 @@
"sharp": "^0.33.4", "sharp": "^0.33.4",
"slugify": "^1.6.6", "slugify": "^1.6.6",
"styled-components": "5.3.3", "styled-components": "5.3.3",
"yup": "^1.4.0" "yup": "^1.4.0",
"types": "workspace:*"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "^20.14.9", "@types/node": "^20.14.9",

View File

@ -164,6 +164,9 @@ importers:
styled-components: styled-components:
specifier: 5.3.3 specifier: 5.3.3
version: 5.3.3(@babel/core@7.24.5)(react-dom@18.3.1(react@18.3.1))(react-is@16.13.1)(react@18.3.1) version: 5.3.3(@babel/core@7.24.5)(react-dom@18.3.1(react@18.3.1))(react-is@16.13.1)(react@18.3.1)
types:
specifier: workspace:*
version: link:../types
yup: yup:
specifier: ^1.4.0 specifier: ^1.4.0
version: 1.4.0 version: 1.4.0
@ -3689,7 +3692,7 @@ snapshots:
eslint: 8.57.0 eslint: 8.57.0
eslint-import-resolver-node: 0.3.9 eslint-import-resolver-node: 0.3.9
eslint-import-resolver-typescript: 3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0) eslint-import-resolver-typescript: 3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0)
eslint-plugin-import: 2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0) eslint-plugin-import: 2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0)
eslint-plugin-jsx-a11y: 6.9.0(eslint@8.57.0) eslint-plugin-jsx-a11y: 6.9.0(eslint@8.57.0)
eslint-plugin-react: 7.34.3(eslint@8.57.0) eslint-plugin-react: 7.34.3(eslint@8.57.0)
eslint-plugin-react-hooks: 4.6.2(eslint@8.57.0) eslint-plugin-react-hooks: 4.6.2(eslint@8.57.0)
@ -3713,7 +3716,7 @@ snapshots:
enhanced-resolve: 5.17.0 enhanced-resolve: 5.17.0
eslint: 8.57.0 eslint: 8.57.0
eslint-module-utils: 2.8.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0) eslint-module-utils: 2.8.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0)
eslint-plugin-import: 2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0) eslint-plugin-import: 2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0)
fast-glob: 3.3.2 fast-glob: 3.3.2
get-tsconfig: 4.7.5 get-tsconfig: 4.7.5
is-core-module: 2.14.0 is-core-module: 2.14.0
@ -3735,7 +3738,7 @@ snapshots:
transitivePeerDependencies: transitivePeerDependencies:
- supports-color - supports-color
eslint-plugin-import@2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0): eslint-plugin-import@2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0):
dependencies: dependencies:
array-includes: 3.1.8 array-includes: 3.1.8
array.prototype.findlastindex: 1.2.5 array.prototype.findlastindex: 1.2.5

View File

@ -4,6 +4,9 @@
"version": "3.3.0", "version": "3.3.0",
"description": "vtuber data acquisition", "description": "vtuber data acquisition",
"main": "src/index.js", "main": "src/index.js",
"exports": {
"./*.js": "./src/*.js"
},
"scripts": { "scripts": {
"test": "mocha", "test": "mocha",
"build:worker": "tsc --build ./tsconfig.json", "build:worker": "tsc --build ./tsconfig.json",

View File

@ -1,4 +1,4 @@
import cheerio from 'cheerio' import * as cheerio from 'cheerio'
/** /**
* *

View File

@ -1,33 +0,0 @@
// import ColorThief from 'colorthief'
import sharp from 'sharp'
import Prevvy from 'prevvy'
import path from 'path'
import { getTmpFile } from './utils.js';
export async function getProminentColor(imageFile) {
const { dominant } = await sharp(imageFile).stats();
const { r, g, b } = dominant;
return rgbToHex(r, g, b)
}
export function rgbToHex(r, g, b) {
return "#" + (1 << 24 | r << 16 | g << 8 | b).toString(16).slice(1);
}
export async function getStoryboard(imageFileOrUrl) {
let base = path.basename(imageFileOrUrl)
let outputImagePath = getTmpFile(base)
let options = {
input: imageFileOrUrl,
output: outputImagePath,
width: 265,
cols: 5,
rows: 5
};
let prevvy = new Prevvy(options)
await prevvy.generate()
return outputImagePath
}

View File

@ -1,61 +0,0 @@
import dotenv from 'dotenv'
dotenv.config({
path: '../../.env'
})
import { S3Client } from "@aws-sdk/client-s3"
import { Upload } from "@aws-sdk/lib-storage"
// import { getSignedUrl } from "@aws-sdk/s3-request-presigner"
import { createId } from '@paralleldrive/cuid2'
import { basename } from 'node:path'
import fs from 'node:fs'
if (!process.env.S3_BUCKET_NAME) throw new Error('S3_BUCKET_NAME was undefined in env');
if (!process.env.SCOUT_NITTER_URL) throw new Error('SCOUT_NITTER_URL was undefined in env');
if (!process.env.S3_BUCKET_KEY_ID) throw new Error('S3_BUCKET_KEY_ID was undefined in env');
if (!process.env.S3_BUCKET_APPLICATION_KEY) throw new Error('S3_BUCKET_APPLICATION_KEY was undefined in env');
export async function uploadFile(filePath) {
if (!filePath) throw new Error("first argument, 'filePath' is undefined");
const client = new S3Client({
endpoint: 'https://s3.us-west-000.backblazeb2.com',
region:'us-west-000',
credentials:{
accessKeyId: process.env.S3_BUCKET_KEY_ID,
secretAccessKey: process.env.S3_BUCKET_APPLICATION_KEY
}
});
const target = {
Bucket: process.env.S3_BUCKET_NAME,
Key: `${createId()}-${basename(filePath)}`,
Body: fs.createReadStream(filePath)
}
// greets https://stackoverflow.com/a/70159394/1004931
try {
const parallelUploads3 = new Upload({
client: client,
//tags: [...], // optional tags
queueSize: 4, // optional concurrency configuration
leavePartsOnError: false, // optional manually handle dropped parts
params: target,
});
// parallelUploads3.on("httpUploadProgress", (progress) => {
// console.log(progress);
// });
const res = await parallelUploads3.done();
return res
} catch (e) {
console.error(`while uploading a file to s3, we encountered an error`)
throw new Error(e);
}
}

View File

@ -1,69 +0,0 @@
import slugify from 'slugify'
import path, { basename } from 'node:path'
import os from 'node:os'
import fs from 'node:fs'
import { createId } from '@paralleldrive/cuid2'
import { ua0 } from './ua.js'
import { Readable } from 'stream'
import { finished } from 'stream/promises'
import pRetry from 'p-retry'
export function fpSlugify(str) {
return slugify(str, {
lower: true,
strict: true,
locale: 'en',
trim: true
})
}
export function getTmpFile(str) {
return path.join(os.tmpdir(), `${createId()}_${basename(str)}`)
}
/**
*
* @param {String} url
* @returns {String} filePath
*
* greetz https://stackoverflow.com/a/74722818/1004931
*/
export async function download({ url, filePath }) {
if (!url) throw new Error(`second arg passed to download() must be a {string} url`);
const fileBaseName = basename(url)
filePath = filePath || path.join(os.tmpdir(), `${createId()}_${fileBaseName}`)
const stream = fs.createWriteStream(filePath)
const requestData = async () => {
const response = await fetch(url, {
headers: {
'user-agent': ua0
}
})
const { body } = response
await finished(Readable.fromWeb(body).pipe(stream))
// Abort retrying if the resource doesn't exist
if (response.status === 404) {
throw new AbortError(response.statusText);
}
return
}
try {
await pRetry(requestData, { retries: 3 })
} catch (e) {
console.error(`utils.download failed to download ${url}`)
console.error(e)
return null
}
return filePath
}
export const tmpFileRegex = /^\/tmp\/.*\.jpg$/

View File

@ -1,25 +0,0 @@
import { fpSlugify, getTmpFile, download } from './utils.js'
import { expect } from 'chai'
import { describe } from 'mocha'
describe('utils', function () {
describe('fpSlugify', function () {
it('should remove all capitalization and uppercase and spaces and special characters', function () {
expect(fpSlugify('ProjektMelody')).to.equal('projektmelody')
expect(fpSlugify('CJ_Clippy')).to.equal('cjclippy')
})
})
describe('getTmpFile', function () {
it('should give a /tmp/<random>_<basename> path', function () {
expect(getTmpFile('my-cool-image.webp')).to.match(/\/tmp\/.*_my-cool-image\.webp/)
expect(getTmpFile('video.mp4')).to.match(/\/tmp\/.*_video\.mp4/)
})
}),
describe('download', function () {
it('should get the file', async function () {
const file = await download({ url: 'https://futureporn-b2.b-cdn.net/sample.webp' })
expect(file).to.match(/\/tmp\/.*sample\.webp$/)
})
})
})

View File

@ -40,5 +40,4 @@ export async function getImage(vtuber) {
throw new Error(msg) throw new Error(msg)
} }
return img return img
} }

View File

@ -0,0 +1,3 @@
{
"loader": "ts-node/esm"
}

View File

@ -0,0 +1,22 @@
{
"name": "storage",
"version": "1.0.0",
"description": "",
"main": "index.js",
"exports": {
"./*.js": "./src/*.js"
},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "@CJ_Clippy",
"license": "Unlicense",
"dependencies": {
"@aws-sdk/client-s3": "^3.583.0",
"@aws-sdk/lib-storage": "^3.588.0",
"@paralleldrive/cuid2": "^2.2.2",
"@types/node": "^20.14.9",
"dotenv": "^16.4.5"
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,51 @@
import dotenv from 'dotenv';
dotenv.config({
path: '../../.env',
});
import { S3Client, S3ClientConfig } from "@aws-sdk/client-s3";
import { Upload } from "@aws-sdk/lib-storage";
import { createId } from '@paralleldrive/cuid2';
import { basename } from 'node:path';
import fs from 'node:fs';
if (!process.env.S3_BUCKET_NAME) throw new Error('S3_BUCKET_NAME was undefined in env');
if (!process.env.SCOUT_NITTER_URL) throw new Error('SCOUT_NITTER_URL was undefined in env');
if (!process.env.S3_BUCKET_KEY_ID) throw new Error('S3_BUCKET_KEY_ID was undefined in env');
if (!process.env.S3_BUCKET_APPLICATION_KEY) throw new Error('S3_BUCKET_APPLICATION_KEY was undefined in env');
export async function uploadFile(filePath: string): Promise<any> {
if (!filePath) throw new Error("first argument, 'filePath' is undefined");
const options: S3ClientConfig = {
endpoint: 'https://s3.us-west-000.backblazeb2.com',
region: 'us-west-000',
credentials: {
accessKeyId: process.env.S3_BUCKET_KEY_ID!,
secretAccessKey: process.env.S3_BUCKET_APPLICATION_KEY!,
},
}
const client = new S3Client();
const target = {
Bucket: process.env.S3_BUCKET_NAME,
Key: `${createId()}-${basename(filePath)}`,
Body: fs.createReadStream(filePath),
};
// greets https://stackoverflow.com/a/70159394/1004931
try {
const parallelUploads3 = new Upload({
client: client,
queueSize: 4, // optional concurrency configuration
leavePartsOnError: false, // optional manually handle dropped parts
params: target,
});
const res = await parallelUploads3.done();
return res;
} catch (e) {
console.error(`while uploading a file to s3, we encountered an error`);
if (e instanceof Error) {
throw new Error(e.message);
}
}
}

View File

@ -0,0 +1,30 @@
{
"compilerOptions": {
// Base Options recommended for all projects
"esModuleInterop": true,
"skipLibCheck": true,
"target": "es2022",
"allowJs": true,
"resolveJsonModule": true,
"moduleDetection": "force",
"isolatedModules": true,
// Enable strict type checking so you can catch bugs early
"strict": true,
"noUncheckedIndexedAccess": true,
"noImplicitOverride": true,
// Transpile our TypeScript code to JavaScript
"module": "NodeNext",
"outDir": "lib",
"lib": [
"es2022"
]
},
// Include the necessary files for your project
"include": [
"**/*.ts",
"**/*.tsx"
],
"exclude": [
"node_modules"
]
}

View File

@ -1,4 +0,0 @@
# STOP! This .dockerignore is probably not the .dockerignore you are looking for.
# The dockerignore in the ROOT of the Docker context is the .dockerignore that docker uses.
# We are using a monorepo and the docker build context is the root of this git repo.
# thus, see ../../.dockerignore

View File

@ -1,118 +0,0 @@
.env*
tunnel.conf
############################
# OS X
############################
.DS_Store
.AppleDouble
.LSOverride
Icon
.Spotlight-V100
.Trashes
._*
############################
# Linux
############################
*~
############################
# Windows
############################
Thumbs.db
ehthumbs.db
Desktop.ini
$RECYCLE.BIN/
*.cab
*.msi
*.msm
*.msp
############################
# Packages
############################
*.7z
*.csv
*.dat
*.dmg
*.gz
*.iso
*.jar
*.rar
*.tar
*.zip
*.com
*.class
*.dll
*.exe
*.o
*.seed
*.so
*.swo
*.swp
*.swn
*.swm
*.out
*.pid
############################
# Logs and databases
############################
.tmp
*.log
*.sql
*.sqlite
*.sqlite3
############################
# Misc.
############################
*#
ssl
.idea
nbproject
public/uploads/*
!public/uploads/.gitkeep
############################
# Node.js
############################
lib-cov
lcov.info
pids
logs
results
node_modules
.node_history
############################
# Tests
############################
testApp
coverage
############################
# Strapi
############################
.env
license.txt
exports
*.cache
dist
build
.strapi-updater.json

View File

@ -1,5 +0,0 @@
shamefully-hoist=true
engine-strict=true
package-manager-strict=true
use-node-version=18.20.3
node-version=18.20.3

View File

@ -1,14 +0,0 @@
/**
* This file was automatically generated by Strapi.
* Any modifications made will be discarded.
*/
import i18N from "@strapi/plugin-i18n/strapi-admin";
import usersPermissions from "@strapi/plugin-users-permissions/strapi-admin";
import { renderAdmin } from "@strapi/strapi/admin";
renderAdmin(document.getElementById("strapi"), {
plugins: {
i18n: i18N,
"users-permissions": usersPermissions,
},
});

View File

@ -1,62 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<!--
This file was automatically generated by Strapi.
Any modifications made will be discarded.
-->
<head>
<meta charset="utf-8" />
<meta
name="viewport"
content="width=device-width, initial-scale=1, viewport-fit=cover"
/>
<meta name="robots" content="noindex" />
<meta name="referrer" content="same-origin" />
<title>Strapi Admin</title>
<style>
html,
body,
#strapi {
height: 100%;
}
body {
margin: 0;
-webkit-font-smoothing: antialiased;
}
</style>
</head>
<body>
<div id="strapi"></div>
<noscript
><div class="strapi--root">
<div class="strapi--no-js">
<style type="text/css">
.strapi--root {
position: absolute;
top: 0;
right: 0;
left: 0;
bottom: 0;
background: #fff;
}
.strapi--no-js {
position: absolute;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
text-align: center;
font-family: helvetica, arial, sans-serif;
}
</style>
<h1>JavaScript disabled</h1>
<p>
Please
<a href="https://www.enable-javascript.com/">enable JavaScript</a>
in your browser and reload the page to proceed.
</p>
</div>
</div></noscript
>
</body>
</html>

View File

@ -1,13 +0,0 @@
## dev notes
### patreon campaign benefit ids
* ironmouse "Thank you" (for testing): 4760169
* cj_clippy "Full library access" (for production): 9380584
* cj_clippy "Your URL displayed on Futureporn.net": 10663202
### Content-Type Builder (Docker caveat)
Don't use the web UI to create or update Content-Types! The changes will be lost. This is a side-effect of our hacked together solution for Strapi with pnpm in docker.
Instead, content-type schemas must be hand-edited in ./src/api/(...). For the changes to take effect, trigger a strapi resource update in Tilt.

View File

@ -1,49 +0,0 @@
# FROM node:16-alpine as build
# # Installing libvips-dev for sharp Compatibility
# RUN apk update && apk add --no-cache build-base gcc autoconf automake zlib-dev libpng-dev vips-dev > /dev/null 2>&1
# ARG NODE_ENV=production
# ENV NODE_ENV=${NODE_ENV}
# WORKDIR /opt/
# COPY ./package.json ./yarn.lock ./
# ENV PATH /opt/node_modules/.bin:$PATH
# RUN yarn config set network-timeout 600000 -g && yarn install --production
# WORKDIR /opt/app
# COPY ./ .
# RUN yarn build
# FROM node:16-alpine
# RUN apk add --no-cache vips-dev
# FROM node:16-alpine
# RUN apk add --no-cache vips-dev
# ARG NODE_ENV=production
# ENV NODE_ENV=${NODE_ENV}
# WORKDIR /opt/app
# COPY --from=build /opt/node_modules ./node_modules
# ENV PATH /opt/node_modules/.bin:$PATH
# COPY --from=build /opt/app ./
# EXPOSE 5555
# CMD ["yarn", "start"]
# # following is from the strapi website
FROM node:18-alpine
# Installing libvips-dev for sharp Compatibility
RUN apk update && apk add --no-cache build-base gcc autoconf automake zlib-dev libpng-dev nasm bash vips-dev git
ARG NODE_ENV=development
ENV NODE_ENV=${NODE_ENV}
WORKDIR /opt/
COPY package.json yarn.lock ./
RUN yarn global add node-gyp
RUN yarn config set network-timeout 600000 -g && yarn install
ENV PATH /opt/node_modules/.bin:$PATH
WORKDIR /opt/app
COPY . .
RUN chown -R node:node /opt/app
USER node
RUN ["yarn", "build"]
EXPOSE 1337
CMD ["yarn", "dev"]

View File

@ -1,13 +0,0 @@
module.exports = ({ env }) => ({
auth: {
secret: env('ADMIN_JWT_SECRET'),
},
apiToken: {
salt: env('API_TOKEN_SALT'),
},
transfer: {
token: {
salt: env('TRANSFER_TOKEN_SALT'),
},
},
});

View File

@ -1,7 +0,0 @@
module.exports = {
rest: {
defaultLimit: 25,
maxLimit: 100,
withCount: true,
},
};

View File

@ -1,49 +0,0 @@
const path = require('path');
module.exports = ({ env }) => {
const client = env('DATABASE_CLIENT', 'postgres');
const connections = {
postgres: {
connection: {
connectionString: env('DATABASE_URL'),
host: env('DATABASE_HOST', 'localhost'),
port: env.int('DATABASE_PORT', 5432),
database: env('DATABASE_NAME', 'strapi'),
user: env('DATABASE_USERNAME', 'strapi'),
password: env('DATABASE_PASSWORD', 'strapi'),
ssl: env.bool('DATABASE_SSL', false) && {
key: env('DATABASE_SSL_KEY', undefined),
cert: env('DATABASE_SSL_CERT', undefined),
ca: env('DATABASE_SSL_CA', undefined),
capath: env('DATABASE_SSL_CAPATH', undefined),
cipher: env('DATABASE_SSL_CIPHER', undefined),
rejectUnauthorized: env.bool(
'DATABASE_SSL_REJECT_UNAUTHORIZED',
true
),
},
schema: env('DATABASE_SCHEMA', 'public'),
},
pool: { min: env.int('DATABASE_POOL_MIN', 2), max: env.int('DATABASE_POOL_MAX', 10) },
},
sqlite: {
connection: {
filename: path.join(
__dirname,
'..',
env('DATABASE_FILENAME', 'data.db')
),
},
useNullAsDefault: true,
},
};
return {
connection: {
client,
...connections[client],
acquireConnectionTimeout: env.int('DATABASE_CONNECTION_TIMEOUT', 60000),
},
};
};

View File

@ -1,26 +0,0 @@
module.exports = [
'strapi::logger',
'strapi::errors',
{
name: 'strapi::security',
config: {
contentSecurityPolicy: {
useDefaults: true,
directives: {
'connect-src': ["'self'", 'https:'],
'img-src': ["'self'", 'data:', 'blob:', 'dl.airtable.com', 'res.cloudinary.com'],
'media-src': ["'self'", 'data:', 'blob:', 'dl.airtable.com', 'res.cloudinary.com'],
upgradeInsecureRequests: null,
},
},
},
},
'strapi::cors',
'strapi::poweredBy',
'strapi::query',
'strapi::body',
'strapi::session',
'strapi::favicon',
'strapi::public',
];

View File

@ -1,75 +0,0 @@
module.exports = ({
env
}) => ({
'fuzzy-search': {
enabled: true,
config: {
contentTypes: [{
uid: 'api::tag.tag',
modelName: 'tag',
transliterate: false,
queryConstraints: {
where: {
'$and': [
{
publishedAt: {
'$notNull': true
}
},
]
}
},
fuzzysortOptions: {
characterLimit: 32,
threshold: -600,
limit: 10,
keys: [{
name: 'name',
weight: 100
}]
}
}]
}
},
upload: {
config: {
provider: 'cloudinary',
providerOptions: {
cloud_name: env('CLOUDINARY_NAME'),
api_key: env('CLOUDINARY_KEY'),
api_secret: env('CLOUDINARY_SECRET'),
},
actionOptions: {
upload: {},
uploadStream: {},
delete: {},
},
}
},
email: {
config: {
provider: 'sendgrid',
providerOptions: {
apiKey: env('SENDGRID_API_KEY'),
},
settings: {
defaultFrom: 'welcome@futureporn.net',
defaultReplyTo: 'cj@futureporn.net',
testAddress: 'grimtech@fastmail.com',
},
},
},
"users-permissions": {
config: {
register: {
allowedFields: [
"isNamePublic",
"isLinkPublic",
"avatar",
"vanityLink",
"patreonBenefits"
]
}
}
}
});

Some files were not shown because too many files have changed in this diff Show More