Compare commits

...

3 Commits

Author SHA1 Message Date
CJ_Clippy a60d0f0821 assert a vod CUID when updated
ci / build (push) Has been cancelled Details
2024-07-06 00:49:51 -08:00
CJ_Clippy 3a89a076d9 more cleanup 2024-07-04 13:39:06 -08:00
CJ_Clippy ffe9ca2bb5 cleanup 2024-07-04 13:20:29 -08:00
456 changed files with 34904 additions and 13874 deletions

View File

@ -1,3 +0,0 @@
https://dokku.com/docs/advanced-usage/deployment-tasks/?h=monorepo#changing-the-appjson-location
https://dokku.com/docs/deployment/builders/dockerfiles/

View File

@ -1,8 +0,0 @@
{
"scripts": {
"dokku": {
"predeploy": "echo hello-world-predeploy",
"postdeploy": "echo hello-world-postdeploy"
}
}
}

View File

@ -4,23 +4,18 @@ pnpm for workspaces.
Kubernetes for Development using Tiltfile
kubefwd and entr for DNS in dev cluster
dokku for Production, deployed with `git push`.
(dokku is slowly being replaced by Kubernetes)
Kubernetes for Production, deployed using FluxCD
Tested on VKE v1.30.0+1 (PVCs on other versions may not be fulfilled)
direnv for loading .envrc
Temporal for work queue
Temporal for work queue, cron
Postgres for data storage
S3 for media storage
Domain Driven Development
Test Driven Development
Tested on VKE v1.30.0+1 (PVCs on other versions may not be fulfilled)
Test Driven Development

View File

@ -23,7 +23,7 @@ velero:
tilt:
kind get kubeconfig > ~/.kube/kind.yaml
KUBECONFIG=~/.kube/kind.yaml tilt up -f ./t.wip.tiltfile
KUBECONFIG=~/.kube/kind.yaml tilt up -f ./Tiltfile
exoscale:
kubectl apply -f https://raw.githubusercontent.com/exoscale/cert-manager-webhook-exoscale/master/deploy/exoscale-webhook-kustomize/deploy.yaml

View File

@ -4,10 +4,35 @@ Source Code for https://futureporn.net
See ./ARCHITECTURE.md for overview
## Metrics Notes
## Jun update todo list
Keeping track of metrics we want to scrape using Prometheus
* [x] external-dns gameplan
* [ ] traefik well understood
* [ ] staging test with *.futureporn.net domains
### Uppy
https://uppy.fp.sbtp.xyz/metrics
## Development Mantras
### Move fast and break things
### Make it work, make it right, make it fast (in that order)
### Done is better than perfect
### If it looks like a duck and quacks like a duck, it is a duck.
### If the way is long, the way is wrong
### Good, Fast, Cheap. Pick two but not all three.
### Organizations are fractals
### Focus on what moves the needle
### Alligator energy (move slow and preserve things)
### Code is run more than it is read
### The computer doesn't care

View File

@ -110,6 +110,7 @@ helm_remote(
# values=['./charts/nitter/values.yaml'],
# ))
k8s_yaml(helm(
'./charts/fp',
values=['./charts/fp/values-dev.yaml'],
@ -141,35 +142,35 @@ k8s_yaml(helm(
# )
# docker_build('fp/link2cid', './packages/link2cid')
docker_build(
'fp/strapi',
'.',
build_args={
'NODE_ENV': 'development',
},
only=['./packages/strapi'],
dockerfile='./d.strapi.dockerfile',
dockerfile='./d.strapi.dev.dockerfile',
live_update=[
sync('./packages/strapi', '/app')
]
)
# docker_build(
# 'fp/strapi-app',
# 'fp/bot',
# '.',
# only=["./packages/strapi-app"],
# dockerfile='d.strapi-app.dockerfile',
# only=['./packages/bot'],
# dockerfile='./d.bot.dockerfile',
# live_update=[
# sync('./packages/strapi-app', '/app')
# sync('./packages/bot', '/app')
# ]
# )
load('ext://uibutton', 'cmd_button')
cmd_button('postgres:create',
argv=['sh', './scripts/postgres-create.sh'],
argv=['dotenvx', 'run', '-f', '.env.development', '--', 'sh', './scripts/postgres-create.sh'],
resource='postgres',
icon_name='dataset',
text='create (empty) databases',
@ -211,7 +212,7 @@ cmd_button('temporal-web:namespace',
docker_build(
'fp/next',
'.',
only=['./pnpm-lock.yaml', './package.json', './packages/next'],
only=['./pnpm-lock.yaml', './package.json', './packages/next', './ca/letsencrypt-stg-root-x1.pem'],
dockerfile='d.next.dockerfile',
target='dev',
build_args={
@ -226,9 +227,9 @@ docker_build(
docker_build(
'fp/scout-manager',
'.',
only=['./pnpm-lock.yaml', './package.json', './packages/scout', './packages/next'],
dockerfile='d.scout.dockerfile',
target='manager',
only=['./pnpm-lock.yaml', './package.json', './packages/scout', './packages/next', './ca/letsencrypt-stg-root-x1.pem'],
dockerfile='d.packages.dockerfile',
target='scout-manager',
live_update=[
sync('./packages/scout', '/app'),
run('cd /app && pnpm i', trigger=['./packages/scout/package.json', './packages/scout/pnpm-lock.yaml']),
@ -240,10 +241,10 @@ docker_build(
docker_build(
'fp/scout-worker',
'.',
only=['./pnpm-lock.yaml', './package.json', './packages/scout', './packages/next'],
only=['./pnpm-lock.yaml', './package.json', './packages/scout', './packages/next', './ca/letsencrypt-stg-root-x1.pem'],
# ignore=['./packages/next'], # I wish I could use this ignore to ignore file changes in this dir, but that's not how it works
dockerfile='d.scout.dockerfile',
target='worker',
dockerfile='d.packages.dockerfile',
target='scout:worker',
live_update=[
# idk if this run() is effective
# run('cd /app && pnpm i', trigger=['./packages/scout/package.json', './packages/scout/pnpm-lock.yaml']),
@ -269,38 +270,48 @@ docker_build(
# workload='frp-operator-controller-manager',
# labels='tunnel'
# )
# k8s_resource(
# workload='echo',
# links=[
# link('https://echo.fp.sbtp.xyz'),
# link('http://echo.futureporn.svc.cluster.local:8001')
# ],
# labels='debug'
# )
k8s_resource(
workload='echo',
port_forwards=['8001'],
workload='uppy',
links=[
link('https://echo.fp.sbtp.xyz'),
link('http://echo.futureporn.svc.cluster.local:8001')
link('https://uppy.fp.sbtp.xyz'),
],
labels='debug'
resource_deps=['redis-master'],
labels=['backend'],
)
k8s_resource(
workload='next',
port_forwards=['3000'],
links=[
link('https://next.fp.sbtp.xyz'),
],
resource_deps=['strapi', 'postgres']
resource_deps=['strapi', 'postgres'],
labels=['frontend'],
)
k8s_resource(
workload='strapi',
port_forwards=['1339'],
links=[
link('http://localhost:1339/admin'),
link('https://strapi.fp.sbtp.xyz/admin'),
link('https://strapi.fp.sbtp.xyz'),
],
resource_deps=['postgres']
resource_deps=['postgres'],
labels=['backend'],
)
k8s_resource(
workload='postgres',
port_forwards=['5432'],
labels=['backend'],
)
k8s_resource(
@ -309,18 +320,21 @@ k8s_resource(
links=[
link('http://localhost:9000/dashboard')
],
labels=['networking'],
)
# k8s_resource(
# workload='scout-worker',
# resource_deps=['postgres', 'strapi', 'temporal-frontend', 'scout-manager']
# )
k8s_resource(
workload='scout-worker',
resource_deps=['postgres', 'strapi', 'temporal-frontend'],
labels=['backend'],
)
# k8s_resource(
# workload='scout-manager',
# resource_deps=['postgres', 'strapi', 'temporal-frontend']
# )
k8s_resource(
workload='scout-manager',
resource_deps=['postgres', 'strapi', 'temporal-frontend', 'scout-worker'],
labels=['backend'],
)
# k8s_resource(
# workload='pgadmin',
@ -333,91 +347,148 @@ k8s_resource(
# port_forwards=['6060:10606'],
# )
# temporarily disabled to save CPU resources
# helm_remote(
# 'temporal',
# repo_name='temporal',
# repo_url='https://charts.lemontech.engineering',
# 'kube-prometheus-stack',
# repo_name='kube-prometheus-stack',
# repo_url='https://prometheus-community.github.io/helm-charts',
# namespace='futureporn',
# version='0.37.0',
# version='61.1.1',
# set=[
# 'admintools.image.tag=1.24.1-tctl-1.18.1-cli-0.12.0',
# 'web.image.tag=2.27.2',
# 'prometheus.enabled=false',
# 'grafana.enabled=false',
# 'elasticsearch.enabled=false',
# 'web.config.auth.enabled=true',
# 'cassandra.enabled=false',
# 'server.config.persistence.default.driver=sql',
# 'server.config.persistence.default.sql.driver=postgres12',
# 'server.config.persistence.default.sql.host=%s' % os.getenv('POSTGRES_HOST'),
# 'server.config.persistence.default.sql.port=5432',
# 'server.config.persistence.default.sql.user=%s' % os.getenv('POSTGRES_USER'),
# 'server.config.persistence.default.sql.password=%s' % os.getenv('POSTGRES_PASSWORD'),
# 'server.config.persistence.visibility.driver=sql',
# 'server.config.persistence.visibility.sql.driver=postgres12',
# 'server.config.persistence.visibility.sql.host=%s' % os.getenv('POSTGRES_HOST'),
# 'server.config.persistence.visibility.sql.port=5432',
# 'server.config.persistence.visibility.sql.user=%s' % os.getenv('POSTGRES_USER'),
# 'server.config.persistence.visibility.sql.password=%s' % os.getenv('POSTGRES_PASSWORD'),
# 'prometheus.prometheusSpec.storageSpec.volumeClaimTemplate.spec.storageClassName=vultr-block-storage',
# 'admin.existingSecret=grafana',
# 'sidecar.dashboards.enabled=true',
# 'grafana.admin.existingSecret=grafana',
# 'grafana.sidecar.dashboards.enabled=true',
# 'grafana.sidecar.dashboards.defaultFolderName=balls',
# 'grafana.sidecar.dashboards.label=grafana_dashboard',
# 'grafana.sidecar.dashboards.provider.foldersFromFileStructure=true'
# ]
# )
helm_remote(
'redis',
repo_name='redis',
repo_url='https://charts.bitnami.com/bitnami',
namespace='futureporn',
version='19.6.1',
set=[
'auth.existingSecret=redis',
'auth.existingSecretPasswordKey=password',
'replica.persistence.enabled=false',
'architecture=standalone'
]
)
helm_remote(
'temporal',
repo_name='temporal',
repo_url='https://charts.lemontech.engineering',
namespace='futureporn',
version='0.37.0',
set=[
'admintools.image.tag=1.24.1-tctl-1.18.1-cli-0.12.0',
'web.image.tag=2.27.2',
'prometheus.enabled=false',
'grafana.enabled=false',
'elasticsearch.enabled=false',
'web.config.auth.enabled=true',
'cassandra.enabled=false',
'server.config.persistence.default.driver=sql',
'server.config.persistence.default.sql.driver=postgres12',
'server.config.persistence.default.sql.existingSecret=postgres',
'server.config.persistence.default.sql.secretName=postgres',
'server.config.persistence.default.sql.secretKey=password',
'server.config.persistence.default.sql.host=postgres.futureporn.svc.cluster.local',
'server.config.persistence.default.sql.port=5432',
'server.config.persistence.default.sql.user=postgres',
'server.config.persistence.visibility.driver=sql',
'server.config.persistence.visibility.sql.driver=postgres12',
'server.config.persistence.visibility.sql.host=postgres.futureporn.svc.cluster.local',
'server.config.persistence.visibility.sql.port=5432',
'server.config.persistence.visibility.sql.user=postgres',
'server.config.persistence.visibility.sql.existingSecret=postgres',
'server.config.persistence.visibility.sql.secretName=postgres',
'server.config.persistence.visibility.sql.secretKey=password',
]
)
k8s_resource(
workload='temporal-admintools',
labels='temporal',
resource_deps=[
'postgres',
'strapi'
])
k8s_resource(
workload='temporal-frontend',
labels='temporal', port_forwards=['7233'],
resource_deps=[
'postgres',
'strapi'
])
k8s_resource(
workload='temporal-history',
labels='temporal',
resource_deps=[
'postgres',
'strapi'
])
k8s_resource(
workload='temporal-worker',
labels='temporal',
resource_deps=[
'postgres',
'strapi'
])
k8s_resource(
workload='temporal-web',
labels='temporal', port_forwards=['8080'],
resource_deps=[
'postgres',
'strapi'
])
k8s_resource(
workload='temporal-schema-setup',
labels='temporal',
resource_deps=[
'postgres',
'strapi'
])
k8s_resource(
workload='temporal-schema-update',
labels='temporal',
resource_deps=[
'postgres',
'strapi'
])
k8s_resource(
workload='temporal-matching',
labels='temporal',
resource_deps=[
'postgres',
'strapi'
])
k8s_resource(
workload='external-dns',
labels=['networking'],
)
k8s_resource(
workload='cert-manager-webhook-exoscale',
labels=['networking'],
)
k8s_resource(
workload='redis-master',
labels=['backend']
)
# k8s_resource(
# workload='temporal-admintools',
# labels='temporal',
# resource_deps=[
# 'postgres',
# 'strapi'
# ])
# k8s_resource(
# workload='temporal-frontend',
# labels='temporal', port_forwards=['7233'],
# resource_deps=[
# 'postgres',
# 'strapi'
# ])
# k8s_resource(
# workload='temporal-history',
# labels='temporal',
# resource_deps=[
# 'postgres',
# 'strapi'
# ])
# k8s_resource(
# workload='temporal-worker',
# labels='temporal',
# resource_deps=[
# 'postgres',
# 'strapi'
# ])
# k8s_resource(
# workload='temporal-web',
# labels='temporal', port_forwards=['8080'],
# resource_deps=[
# 'postgres',
# 'strapi'
# ])
# k8s_resource(
# workload='temporal-schema-setup',
# labels='temporal',
# resource_deps=[
# 'postgres',
# 'strapi'
# ])
# k8s_resource(
# workload='temporal-schema-update',
# labels='temporal',
# resource_deps=[
# 'postgres',
# 'strapi'
# ])
# k8s_resource(
# workload='temporal-matching',
# labels='temporal',
# resource_deps=[
# 'postgres',
# 'strapi'
# ])
# workload='bot',
# labels=['backend']
# )
# k8s_resource(
# workload='cert-manager',
@ -434,4 +505,4 @@ k8s_resource(
# k8s_resource(
# workload='cert-manager-startupapicheck',
# labels='cert-manager'
# )
# )

View File

@ -0,0 +1,32 @@
-----BEGIN CERTIFICATE-----
MIIFmDCCA4CgAwIBAgIQU9C87nMpOIFKYpfvOHFHFDANBgkqhkiG9w0BAQsFADBm
MQswCQYDVQQGEwJVUzEzMDEGA1UEChMqKFNUQUdJTkcpIEludGVybmV0IFNlY3Vy
aXR5IFJlc2VhcmNoIEdyb3VwMSIwIAYDVQQDExkoU1RBR0lORykgUHJldGVuZCBQ
ZWFyIFgxMB4XDTE1MDYwNDExMDQzOFoXDTM1MDYwNDExMDQzOFowZjELMAkGA1UE
BhMCVVMxMzAxBgNVBAoTKihTVEFHSU5HKSBJbnRlcm5ldCBTZWN1cml0eSBSZXNl
YXJjaCBHcm91cDEiMCAGA1UEAxMZKFNUQUdJTkcpIFByZXRlbmQgUGVhciBYMTCC
AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALbagEdDTa1QgGBWSYkyMhsc
ZXENOBaVRTMX1hceJENgsL0Ma49D3MilI4KS38mtkmdF6cPWnL++fgehT0FbRHZg
jOEr8UAN4jH6omjrbTD++VZneTsMVaGamQmDdFl5g1gYaigkkmx8OiCO68a4QXg4
wSyn6iDipKP8utsE+x1E28SA75HOYqpdrk4HGxuULvlr03wZGTIf/oRt2/c+dYmD
oaJhge+GOrLAEQByO7+8+vzOwpNAPEx6LW+crEEZ7eBXih6VP19sTGy3yfqK5tPt
TdXXCOQMKAp+gCj/VByhmIr+0iNDC540gtvV303WpcbwnkkLYC0Ft2cYUyHtkstO
fRcRO+K2cZozoSwVPyB8/J9RpcRK3jgnX9lujfwA/pAbP0J2UPQFxmWFRQnFjaq6
rkqbNEBgLy+kFL1NEsRbvFbKrRi5bYy2lNms2NJPZvdNQbT/2dBZKmJqxHkxCuOQ
FjhJQNeO+Njm1Z1iATS/3rts2yZlqXKsxQUzN6vNbD8KnXRMEeOXUYvbV4lqfCf8
mS14WEbSiMy87GB5S9ucSV1XUrlTG5UGcMSZOBcEUpisRPEmQWUOTWIoDQ5FOia/
GI+Ki523r2ruEmbmG37EBSBXdxIdndqrjy+QVAmCebyDx9eVEGOIpn26bW5LKeru
mJxa/CFBaKi4bRvmdJRLAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB
Af8EBTADAQH/MB0GA1UdDgQWBBS182Xy/rAKkh/7PH3zRKCsYyXDFDANBgkqhkiG
9w0BAQsFAAOCAgEAncDZNytDbrrVe68UT6py1lfF2h6Tm2p8ro42i87WWyP2LK8Y
nLHC0hvNfWeWmjZQYBQfGC5c7aQRezak+tHLdmrNKHkn5kn+9E9LCjCaEsyIIn2j
qdHlAkepu/C3KnNtVx5tW07e5bvIjJScwkCDbP3akWQixPpRFAsnP+ULx7k0aO1x
qAeaAhQ2rgo1F58hcflgqKTXnpPM02intVfiVVkX5GXpJjK5EoQtLceyGOrkxlM/
sTPq4UrnypmsqSagWV3HcUlYtDinc+nukFk6eR4XkzXBbwKajl0YjztfrCIHOn5Q
CJL6TERVDbM/aAPly8kJ1sWGLuvvWYzMYgLzDul//rUF10gEMWaXVZV51KpS9DY/
5CunuvCXmEQJHo7kGcViT7sETn6Jz9KOhvYcXkJ7po6d93A/jy4GKPIPnsKKNEmR
xUuXY4xRdh45tMJnLTUDdC9FIU0flTeO9/vNpVA8OPU1i14vCz+MU8KX1bV3GXm/
fxlB7VBBjX9v5oUep0o/j68R/iDlCOM4VVfRa8gX6T2FU7fNdatvGro7uQzIvWof
gN9WUwCbEMBy/YhBSrXycKA8crgGg3x1mIsopn88JKwmMBa68oS7EHM9w7C4y71M
7DiA+/9Qdp9RBWJpTS9i/mDnJg1xvo8Xz49mrrgfmcAXTCJqXi24NatI3Oc=
-----END CERTIFICATE-----

View File

@ -0,0 +1,15 @@
-----BEGIN CERTIFICATE-----
MIICTjCCAdSgAwIBAgIRAIPgc3k5LlLVLtUUvs4K/QcwCgYIKoZIzj0EAwMwaDEL
MAkGA1UEBhMCVVMxMzAxBgNVBAoTKihTVEFHSU5HKSBJbnRlcm5ldCBTZWN1cml0
eSBSZXNlYXJjaCBHcm91cDEkMCIGA1UEAxMbKFNUQUdJTkcpIEJvZ3VzIEJyb2Nj
b2xpIFgyMB4XDTIwMDkwNDAwMDAwMFoXDTQwMDkxNzE2MDAwMFowaDELMAkGA1UE
BhMCVVMxMzAxBgNVBAoTKihTVEFHSU5HKSBJbnRlcm5ldCBTZWN1cml0eSBSZXNl
YXJjaCBHcm91cDEkMCIGA1UEAxMbKFNUQUdJTkcpIEJvZ3VzIEJyb2Njb2xpIFgy
MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEOvS+w1kCzAxYOJbA06Aw0HFP2tLBLKPo
FQqR9AMskl1nC2975eQqycR+ACvYelA8rfwFXObMHYXJ23XLB+dAjPJVOJ2OcsjT
VqO4dcDWu+rQ2VILdnJRYypnV1MMThVxo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD
VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU3tGjWWQOwZo2o0busBB2766XlWYwCgYI
KoZIzj0EAwMDaAAwZQIwRcp4ZKBsq9XkUuN8wfX+GEbY1N5nmCRc8e80kUkuAefo
uc2j3cICeXo1cOybQ1iWAjEA3Ooawl8eQyR4wrjCofUE8h44p0j7Yl/kBlJZT8+9
vbtH7QiVzeKCOTQPINyRql6P
-----END CERTIFICATE-----

View File

@ -1,9 +1,5 @@
This chart was originally created by Kompose.
# Futureporn helm chart
Then I realized I don't understand kubernetes.
This is the chart with templates that define Futureporn kubernetes cluster
It was too complex.
I needed to start over, understand each piece before moving on.
so we're starting small, incrementally migrating services to the cluster.
https://helm.sh/docs/topics/charts/

View File

@ -1,138 +0,0 @@
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: echo
namespace: futureporn
spec:
selector:
matchLabels:
app: echo
template:
metadata:
labels:
app: echo
spec:
containers:
- image: hashicorp/http-echo
name: echo
ports:
- containerPort: 5678
args:
- -text="Hello, choom!"
{{ if eq .Values.environment "development" }}
---
apiVersion: chisel-operator.io/v1
kind: ExitNode
metadata:
name: echo-exit-node
namespace: futureporn
spec:
host: "{{ .Values.chisel.exitNodeIp }}"
port: 9090
auth: chisel
{{ end }}
# ---
# apiVersion: traefik.io/v1alpha1
# kind: IngressRoute
# metadata:
# name: echo
# namespace: futureporn
# spec:
# entryPoints:
# - web
# routes:
# - match: Host(`echo.fp.sbtp.xyz`) || PathPrefix(`/extra/echo`)
# kind: Rule
# services:
# - name: echo
# port: 8001
# # tls:
# # secretName: echo-cert
---
apiVersion: v1
kind: Service
metadata:
name: echo
namespace: futureporn
annotations:
external-dns.alpha.kubernetes.io/hostname: "{{ .Values.echo.hostname }}"
{{ if eq .Values.environment "development" }}
chisel-operator.io/exit-node-name: "echo-exit-node"
{{ end }}
spec:
type: LoadBalancer
selector:
app: echo
ports:
- name: web
protocol: TCP
port: 5678
targetPort: 5678
- name: websecure
protocol: TCP
port: 4443
targetPort: 5678
---
apiVersion: cert-manager.io/v1
kind: Certificate
metadata:
name: echo
namespace: futureporn
spec:
secretName: echo-tls
issuerRef:
name: "{{ .Values.certManager.issuer }}"
kind: ClusterIssuer
dnsNames:
- "{{ .Values.echo.hostname }}"
---
apiVersion: traefik.io/v1alpha1
kind: IngressRoute
metadata:
name: echo-http
namespace: futureporn
spec:
entryPoints:
- web
routes:
- match: Host(`echo.fp.sbtp.xyz`)
kind: Rule
services:
- name: echo
namespace: futureporn
port: 5678
---
apiVersion: traefik.io/v1alpha1
kind: IngressRoute
metadata:
name: echo-https
namespace: futureporn
annotations:
cert-manager.io/cluster-issuer: "{{ .Values.certManager.issuer }}"
spec:
entryPoints:
- websecure
routes:
- match: Host(`echo.fp.sbtp.xyz`)
kind: Rule
middlewares:
- name: redirect
namespace: futureporn
services:
- name: echo
namespace: futureporn
port: 5678
tls:
secretName: echo-tls

View File

@ -13,6 +13,8 @@ spec:
env:
- name: HOSTNAME
value: 0.0.0.0
- name: NEXT_PUBLIC_UPPY_COMPANION_URL
value: "{{ .Values.uppy.hostname }}"
ports:
- name: web
containerPort: 3000

View File

@ -33,7 +33,7 @@ spec:
- name: CDN_BUCKET_URL
value: "{{ .Values.scout.cdnBucketUrl }}"
- name: STRAPI_URL
value: https://strapi.piko.sbtp.xyz
value: https://strapi.fp.sbtp.xyz
- name: S3_BUCKET_APPLICATION_KEY
valueFrom:
secretKeyRef:
@ -156,7 +156,7 @@ spec:
# - name: CDN_BUCKET_URL
# value: "{{ .Values.scout.cdnBucketUrl }}"
# - name: STRAPI_URL
# value: https://strapi.piko.sbtp.xyz
# value: https://strapi.fp.sbtp.xyz
# - name: SCOUT_NITTER_ACCESS_KEY
# valueFrom:
# secretKeyRef:

View File

@ -116,13 +116,13 @@ spec:
secretKeyRef:
name: strapi
key: sendgridApiKey
- name: STRAPI_URL
value: "{{ .Values.strapi.url }}"
- name: TRANSFER_TOKEN_SALT
valueFrom:
secretKeyRef:
name: strapi
key: transferTokenSalt
- name: STRAPI_URL
value: "{{ .Values.strapi.url }}"
- name: PORT
value: "{{ .Values.strapi.port }}"
resources:

View File

@ -0,0 +1,219 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: uppy
namespace: futureporn
spec:
replicas: 2
minReadySeconds: 5
strategy:
type: RollingUpdate
rollingUpdate:
maxSurge: 2
maxUnavailable: 1
selector:
matchLabels:
app: uppy
template:
metadata:
labels:
app: uppy
spec:
containers:
- name: uppy
image: docker.io/transloadit/companion:latest
imagePullPolicy: IfNotPresent
resources:
limits:
memory: 150Mi
requests:
memory: 100Mi
env:
- name: COMPANION_STREAMING_UPLOAD
value: "true"
- name: COMPANION_CLIENT_ORIGINS
value: "{{ .Values.uppy.clientOrigins }}"
- name: COMPANION_DATADIR
value: /tmp/
- name: COMPANION_DOMAIN
value: "{{ .Values.uppy.domain }}"
- name: COMPANION_PROTOCOL
value: https
- name: COMPANION_REDIS_URL
valueFrom:
secretKeyRef:
name: uppy
key: redisUrl
- name: COMPANION_SECRET
valueFrom:
secretKeyRef:
name: uppy
key: secret
- name: COMPANION_PREAUTH_SECRET
valueFrom:
secretKeyRef:
name: uppy
key: preAuthSecret
- name: COMPANION_DROPBOX_KEY
valueFrom:
secretKeyRef:
name: uppy
key: dropboxKey
- name: COMPANION_DROPBOX_SECRET
valueFrom:
secretKeyRef:
name: uppy
key: dropboxSecret
- name: COMPANION_BOX_KEY
valueFrom:
secretKeyRef:
name: uppy
key: boxKey
- name: COMPANION_BOX_SECRET
valueFrom:
secretKeyRef:
name: uppy
key: boxSecret
- name: COMPANION_GOOGLE_KEY
valueFrom:
secretKeyRef:
name: uppy
key: googleKey
- name: COMPANION_GOOGLE_SECRET
valueFrom:
secretKeyRef:
name: uppy
key: googleSecret
- name: COMPANION_AWS_KEY
valueFrom:
secretKeyRef:
name: uppy
key: awsKey
- name: COMPANION_AWS_SECRET
valueFrom:
secretKeyRef:
name: uppy
key: awsSecret
- name: COMPANION_AWS_BUCKET
value: "{{ .Values.uppy.s3.bucket }}"
- name: COMPANION_AWS_REGION
value: "{{ .Values.uppy.s3.region }}"
- name: COMPANION_AWS_PREFIX
value: "{{ .Values.uppy.s3.prefix }}"
## COMPANION_OAUTH_DOMAIN is only necessary if using a different domain per each uppy pod.
## We don't need this because we are load balancing the pods so they all use the same domain name.
## @see https://github.com/transloadit/uppy/blob/f4dd3d534ff4378f3a2f73fe327358bcbde74059/docs/companion.md#server
- name: COMPANION_OAUTH_DOMAIN
value: ''
- name: COMPANION_PATH
value: ''
- name: COMPANION_IMPLICIT_PATH
value: ''
- name: COMPANION_DOMAINS
value: ''
## https://uppy.io/docs/companion/#uploadurls-companion_upload_urls
- name: COMPANION_UPLOAD_URLS
value: "{{ .Values.uppy.uploadUrls }}"
ports:
- containerPort: 3020
volumeMounts:
- name: uppy-data
mountPath: /mnt/uppy-data
volumes:
- name: uppy-data
emptyDir: {}
---
apiVersion: v1
kind: Service
metadata:
name: uppy
namespace: futureporn
annotations:
external-dns.alpha.kubernetes.io/hostname: "{{ .Values.uppy.hostname }}"
chisel-operator.io/exit-node-name: "uppy-exit-node"
spec:
type: LoadBalancer
ports:
- port: 3020
targetPort: 3020
protocol: TCP
selector:
app: uppy
{{ if eq .Values.environment "development" }}
---
apiVersion: chisel-operator.io/v1
kind: ExitNode
metadata:
name: uppy-exit-node
namespace: futureporn
spec:
host: "{{ .Values.chisel.exitNodeIp }}"
port: 9090
auth: chisel
{{ end }}
---
apiVersion: traefik.io/v1alpha1
kind: IngressRoute
metadata:
name: uppy-http
namespace: futureporn
spec:
entryPoints:
- web
routes:
- match: Host(`uppy.fp.sbtp.xyz`)
kind: Rule
middlewares:
- name: redirect
namespace: futureporn
services:
- name: uppy
namespace: futureporn
port: 3020
---
apiVersion: traefik.io/v1alpha1
kind: IngressRoute
metadata:
name: uppy-https
namespace: futureporn
annotations:
cert-manager.io/cluster-issuer: "{{ .Values.certManager.issuer }}"
spec:
entryPoints:
- websecure
routes:
- match: Host(`uppy.fp.sbtp.xyz`)
kind: Rule
services:
- name: uppy
namespace: futureporn
port: 3020
tls:
secretName: uppy-tls
# Welcome to Companion v4.15.1
# ===================================
# Congratulations on setting up Companion! Thanks for joining our cause, you have taken
# the first step towards the future of file uploading! We
# hope you are as excited about this as we are!
# While you did an awesome job on getting Companion running, this is just the welcome
# message, so let's talk about the places that really matter:
# - Be sure to add the following URLs as your Oauth redirect uris on their corresponding developer interfaces:
# https://uppy.fp.sbtp.xyz/drive/redirect, https://uppy.fp.sbtp.xyz/googlephotos/redirect, https://uppy.fp.sbtp.xyz/dropbox/redirect, https://uppy.fp.sbtp.xyz/box/redirect, https://uppy.fp.sbtp.xyz/instagram/redirect, https://uppy.fp.sbtp.xyz/facebook/redirect, https://uppy.fp.sbtp.xyz/onedrive/redirect, https://uppy.fp.sbtp.xyz/zoom/redirect, https://uppy.fp.sbtp.xyz/unsplash/redirect
# - The URL https://uppy.fp.sbtp.xyz/metrics is available for statistics to keep Companion running smoothly
# - https://github.com/transloadit/uppy/issues - report your bugs here
# So quit lollygagging, start uploading and experience the future!

View File

@ -34,5 +34,16 @@ chisel:
exitNodeIp: "155.138.254.201"
echo:
hostname: echo.fp.sbtp.xyz
uppy:
hostname: uppy.fp.sbtp.xyz
imageName: fp/uppy
s3:
endpoint: s3.us-west-000.backblazeb2.com
bucket: futureporn-usc
region: us-west-000
prefix: s3
clientOrigins: next.fp.sbtp.xyz
domain: uppy.fp.sbtp.xyz
uploadUrls: https://uppy.fp.sbtp.xyz/files
certManager:
issuer: letsencrypt-staging

View File

@ -5,22 +5,16 @@ RUN corepack enable
FROM base AS build
ENV NODE_ENV=production
COPY . /usr/src/app
COPY ./packages/bot /usr/src/app
WORKDIR /usr/src/app
RUN mkdir -p /prod/scout
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
RUN pnpm deploy --filter=scout --prod /prod/scout
RUN pnpm deploy --filter=bot --prod /prod/scout
FROM base AS manager
COPY --from=build /prod/scout /app
FROM base AS bot
COPY --from=build /prod/bot /app
WORKDIR /app
ENTRYPOINT ["pnpm"]
CMD ["run", "start:manager"]
FROM base AS worker
COPY --from=build /prod/scout /app
WORKDIR /app
ENTRYPOINT ["pnpm"]
CMD ["run", "start:worker"]
CMD ["run", "start"]

View File

@ -18,6 +18,7 @@ ENV NEXT_PUBLIC_SITE_URL ${NEXT_PUBLIC_SITE_URL}
ENV NEXT_PUBLIC_STRAPI_URL ${NEXT_PUBLIC_STRAPI_URL}
ENV NEXT_PUBLIC_UPPY_COMPANION_URL ${NEXT_PUBLIC_UPPY_COMPANION_URL}
ENV NEXT_TELEMETRY_DISABLED 1
ENV NODE_EXTRA_CA_CERTS "/app/letsencrypt-stg-root-x1.pem"
COPY pnpm-lock.yaml ./
RUN pnpm fetch
COPY ./packages/next /app

42
d.packages.dockerfile Normal file
View File

@ -0,0 +1,42 @@
## This dockerfile creates multiple docker images.
## Because we are using monorepo with pnpm workspaces, we have many npm packages in this single git repo.
## Some of these packages in the monorepo depend on other packages in the monorepo.
## In order to build these individual packages which inter-depend on eachother,
## all of the dependent code must be present in the build.
##
## Below, COPY . /usr/src/app copies all the app code into the build context.
## Because we use Tilt, only specific path directories are visible to docker. This helps with build performance.
## When a new package becomes a dependency, we need to update our Tiltfile to include the package directory.
## Tiltfile example of docker_build() args which include `scout` and `next` packages.
## `only=['./pnpm-lock.yaml', './package.json', './packages/scout', './packages/next'],`
##
##
FROM node:20 AS base
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable
FROM base AS build
ENV NODE_ENV=production
COPY . /usr/src/app
WORKDIR /usr/src/app
RUN mkdir -p /prod/scout
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
RUN pnpm deploy --filter=scout --prod /prod/scout
# RUN pnpm deploy --filter=bot --prod /prod/bot
FROM base AS scout-manager
COPY --from=build /prod/scout /app
WORKDIR /app
ENTRYPOINT ["pnpm"]
CMD ["run", "start:manager"]
FROM base AS scout-worker
COPY --from=build /prod/scout /app
COPY --from=build /usr/src/app/certs/letsencrypt-stg-root-x1.pem
ENV NODE_EXTRA_CA_CERTS "/app/certs/letsencrypt-stg-root-x1.pem"
WORKDIR /app
ENTRYPOINT ["pnpm"]
CMD ["run", "start:worker"]

View File

@ -1,14 +0,0 @@
FROM node:20-alpine
WORKDIR /app
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable
RUN apk update
ENV NODE_ENV=production
COPY pnpm-lock.yaml ./
RUN pnpm fetch
COPY ./packages/realtime /app
ENTRYPOINT ["pnpm"]
CMD ["run", "start"]

View File

@ -1,19 +0,0 @@
FROM node:18-alpine3.18
# Installing libvips-dev for sharp Compatibility
RUN apk update && apk add --no-cache build-base gcc autoconf automake zlib-dev libpng-dev nasm bash vips-dev git
ARG NODE_ENV=development
ENV NODE_ENV=${NODE_ENV}
WORKDIR /opt/
COPY ./packages/strapi-app/package.json ./packages/strapi-app/yarn.lock ./
RUN yarn global add node-gyp
RUN yarn config set network-timeout 600000 -g && yarn install
ENV PATH /opt/node_modules/.bin:$PATH
WORKDIR /opt/app
COPY ./packages/strapi-app/ .
RUN chown -R node:node /opt/app
USER node
RUN ["yarn", "build"]
EXPOSE 1338
CMD ["yarn", "develop", "--debug"]

22
d.strapi.dev.dockerfile Normal file
View File

@ -0,0 +1,22 @@
FROM node:18
# Installing libvips-dev for sharp Compatibility
# RUN apk update && apk add --no-cache build-base gcc autoconf automake zlib-dev libpng-dev nasm bash vips-dev git
RUN corepack enable
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
ARG NODE_ENV=development
ENV NODE_ENV=${NODE_ENV}
WORKDIR /opt/
COPY ./packages/strapi/package.json ./packages/strapi/pnpm-lock.yaml ./
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install -g node-gyp
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
ENV PATH /opt/node_modules/.bin:$PATH
WORKDIR /opt/app
COPY ./packages/strapi/. .
RUN chown -R node:node /opt/app
USER node
RUN ["pnpm", "run", "build"]
EXPOSE 1339
CMD ["pnpm", "run", "develop"]

View File

@ -7,4 +7,4 @@ COPY ./packages/strapi/package.json ./packages/strapi/pnpm-lock.yaml .
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
COPY ./packages/strapi/ .
RUN ["pnpm", "run", "build"]
CMD ["pnpm", "run", "dev"]
CMD ["pnpm", "run", "develop"]

View File

@ -1,24 +0,0 @@
FROM node:20-alpine as base
WORKDIR /app
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable
RUN apk update && apk add --no-cache build-base gcc autoconf automake zlib-dev libpng-dev vips-dev libc6-compat git nasm bash gcompat
FROM base AS install
COPY ./packages/strapi/pnpm-lock.yaml ./packages/strapi/package.json ./
RUN pnpm install --prod --shamefully-hoist && pnpm run build
COPY ./packages/strapi .
RUN chown -R node:node /app
USER node
FROM install AS dev
ENV NODE_ENV=development
ENTRYPOINT ["pnpm"]
CMD ["run", "dev"]
FROM install AS release
ENV NODE_ENV=production
ENTRYPOINT ["pnpm"]
CMD ["run", "start"]

View File

@ -1,20 +0,0 @@
FROM node:18-alpine3.18
RUN echo "do a rebuild, yuou fucking shit!"
# Installing libvips-dev for sharp Compatibility
RUN apk update && apk add --no-cache build-base gcc autoconf automake zlib-dev libpng-dev nasm bash vips-dev git
ARG NODE_ENV=development
ENV NODE_ENV=${NODE_ENV}
WORKDIR /opt/
COPY package.json yarn.lock ./
RUN yarn global add node-gyp
RUN yarn config set network-timeout 600000 -g && yarn install
ENV PATH /opt/node_modules/.bin:$PATH
WORKDIR /opt/app
COPY . .
RUN chown -R node:node /opt/app
USER node
RUN ["yarn", "build"]
EXPOSE 1339
CMD ["yarn", "start"]

View File

@ -5,9 +5,9 @@ WORKDIR /app
RUN corepack enable
FROM base as build
COPY ./packages/uppy/package.json ./
COPY ./packages/uppy/index.js ./
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install
COPY ./packages/uppy/package.json ./packages/uppy/pnpm-lock.yaml /app
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
COPY ./packages/uppy/ .
FROM build as run
ENTRYPOINT ["pnpm"]

3
flux/README.md Normal file
View File

@ -0,0 +1,3 @@
# Futureporn flux
Gitops https://fluxcd.io/flux/get-started/

5
packages/README.md Normal file
View File

@ -0,0 +1,5 @@
# Futureporn node packages
Each folder here is an individual node package, each of which can reference each other. One reason we do this is to share utility functions between packages.
See https://pnpm.io/workspaces

3
packages/bot/README.md Normal file
View File

@ -0,0 +1,3 @@
# bot
A.K.A. FutureButt, the discord bot that integrates into FP backend.

3
packages/infra/README.md Normal file
View File

@ -0,0 +1,3 @@
# infra
This module contains scripts that help with miscellaneous infrastructure tasks like cleaning up unused resources on Vultr Kubernetes Engine.

View File

@ -1,5 +1,5 @@
{
"name": "scripts",
"name": "infra",
"type": "module",
"version": "1.0.0",
"description": "",

View File

@ -1,3 +0,0 @@
node_modules
.env
*~

View File

@ -1,3 +0,0 @@
PORT=3030
IPFS_URL=http://localhost:5001
API_KEY=changeme

View File

@ -1,144 +0,0 @@
# Created by https://www.toptal.com/developers/gitignore/api/node
# Edit at https://www.toptal.com/developers/gitignore?templates=node
### Node ###
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
### Node Patch ###
# Serverless Webpack directories
.webpack/
# Optional stylelint cache
# SvelteKit build / generate output
.svelte-kit
# End of https://www.toptal.com/developers/gitignore/api/node

View File

@ -1,3 +0,0 @@
engine-strict=true
use-node-version=20.13.1
node-version=20.13.1

View File

@ -1,20 +0,0 @@
# Reference-- https://pnpm.io/docker
FROM node:20-alpine AS base
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable
WORKDIR /app
COPY ./package.json /app
EXPOSE 3939
FROM base AS dev
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install
CMD ["pnpm", "run", "dev"]
FROM base
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --prod
COPY ./index.js /app
ENTRYPOINT ["pnpm"]
CMD ["start"]

View File

@ -1,90 +0,0 @@
# link2cid
## Motivation
I wish I could give [kubo](https://github.com/ipfs/kubo) or [IPFS cluster](https://ipfscluster.io/) a URI to a file and then they would download the file and add to ipfs, returning me a [CID](https://docs.ipfs.tech/concepts/glossary/#cid).
However, neither kubo nor IPFS cluster can do this.
link2cid solves this issue with a REST API for adding a file at `url` to IPFS.
## Usage
Configure environment
Create a `.env` file. See `.env.example` for an example. Important environment variables are `API_KEY`, `PORT`, and `IPFS_URL`.
Install and run
```bash
pnpm install
pnpm start
```
Make a GET REST request to `/add` with `url` as a query parameter. Expect a [SSE](https://wikipedia.org/wiki/Server-sent_events) response.
## dokku
dokku builder-dockerfile:set link2cid dockerfile-path link2cid.Dockerfile
### Examples
#### [HTTPIE](https://httpie.io)
```bash
http -A bearer -a $API_KEY --stream 'http://localhost:3939/add?url=https://upload.wikimedia.org/wikipedia/commons/7/70/Example.png' Accept:text/event-stream
HTTP/1.1 200 OK
Access-Control-Allow-Origin: *
Cache-Control: no-cache
Connection: keep-alive
Content-Type: text/event-stream; charset=utf-8
Date: Thu, 21 Dec 2023 11:20:24 GMT
Transfer-Encoding: identity
X-Powered-By: Express
:ok
event: dlProgress
data: {
"percent": 100
}
event: addProgress
data: {
"percent": 100
}
event: end
data: {
"cid": "bafkreidj3jo7efguloaixz6vgivljlmowagagjtqv4yanyqgty2hrvg6km"
}
```
#### Javascript
@todo this is incomplete/untested
```js
await fetch('http://localhost:3939/add?url=https://upload.wikimedia.org/wikipedia/commons/7/70/Example.png', {
headers: {
'accept': 'text/event-stream',
'authorization': `Bearer ${API_KEY}`
}
});
```
## Dev notes
### Generate API_KEY
```js
require('crypto').randomBytes(64).toString('hex')
```
### `TypeError: data.split is not a function`
If you see this error, make sure data in SSE event payload is a string, not a number.

View File

@ -1,14 +0,0 @@
{
"healthchecks": {
"web": [
{
"type": "startup",
"name": "web check",
"description": "Checking for expecting string at /health",
"path": "/health",
"content": "link2cid",
"attempts": 3
}
]
}
}

View File

@ -1,9 +0,0 @@
require('dotenv').config()
const app = require('./src/app.js')
const port = process.env.PORT || 3000
const version = require('./package.json').version
app.listen(port, () => {
console.log(`link2cid ${version} listening on port ${port}`)
})

View File

@ -1,34 +0,0 @@
{
"name": "@futureporn/link2cid",
"version": "4.3.0",
"description": "REST API for adding files via URL to IPFS",
"main": "index.js",
"scripts": {
"test": "mocha \"./src/**/*.spec.js\"",
"dev": "pnpm nodemon ./index.js",
"start": "node index.js"
},
"keywords": [
"IPFS",
"CID",
"HTTP",
"REST"
],
"author": "@CJ_Clippy",
"license": "Unlicense",
"dependencies": {
"@paralleldrive/cuid2": "^2.2.2",
"@types/express": "^4.17.21",
"better-queue": "^3.8.12",
"body-parser": "^1.20.2",
"cors": "^2.8.5",
"dotenv": "^16.3.1",
"express": "^4.18.2"
},
"devDependencies": {
"chai": "^5.1.0",
"mocha": "^10.4.0",
"nodemon": "^3.0.3",
"supertest": "^6.3.4"
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,33 +0,0 @@
'use strict';
require('dotenv').config();
const express = require('express');
const bodyParser = require('body-parser');
const cors = require('cors');
const fs = require('fs');
const fsp = require('fs/promises');
const { openAsBlob } = require('node:fs');
const { rm, stat } = require('fs/promises');
const os = require('os');
const path = require('path');
const { authenticate } = require('./middleware/auth.js')
const { createTask, readTask, deleteTask } = require('./models/task.js')
const readHeath = require('./models/health.js')
const store = require('./middleware/store.js');
const queue = require('./middleware/queue.js');
const app = express();
app.use(store);
app.use(queue);
app.use(cors());
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.post('/task', authenticate, createTask)
app.get('/task', readTask)
app.delete('/task', authenticate, deleteTask)
app.get('/health', readHeath)
module.exports = app

View File

@ -1,109 +0,0 @@
const app = require('./app.js')
const request = require('supertest')
const qs = require('querystring')
require('dotenv').config()
describe('app', function () {
it('should exist', function (done) {
if (!app?.mountpath) throw new Error('app doesnt exist');
done()
})
describe('/health', function () {
it('should be publicly readable', function (done) {
request(app)
.get('/health')
.set('Accept', 'text/html')
.expect('Content-Type', /text/)
.expect(/piss/)
.expect(200, done)
})
})
describe('/task', function () {
describe('POST', function () {
it('should create a task', function (done) {
request(app)
.post('/task')
.set('Authorization', `Bearer ${process.env.API_KEY}`)
.set('Accept', 'application/json')
.send({
url: 'https://futureporn-b2.b-cdn.net/projekt-melody.jpg'
})
.expect('Content-Type', /json/)
.expect((res) => {
if (!res.body?.data) throw new Error('response body was missing data')
if (!res.body?.data?.id) throw new Error('response body was missing id')
return true
})
.expect(200, done)
})
})
describe('GET', function () {
it('should show all tasks specifications', async function () {
await request(app).post('/task').set('Authorization', `Bearer ${process.env.API_KEY}`).send({ url: 'https://example.com/my.jpg' })
await request(app).post('/task').set('Authorization', `Bearer ${process.env.API_KEY}`).send({ url: 'https://example.com/your.png' })
return request(app)
.get(`/task`)
.set('Authorization', `Bearer ${process.env.API_KEY}`)
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect((res) => {
if (!res?.body?.data) throw new Error('there was no data in response')
})
.expect(200)
})
it('should accept task id as query param and return task specification', function (done) {
const seed = request(app).post('/task').set('Authorization', `Bearer ${process.env.API_KEY}`).send({ url: 'https://example.com/z.jpg' })
seed.then((res) => {
const query = qs.stringify({
id: res.body.data.id
})
request(app)
.get(`/task?${query}`)
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect((res) => {
if (res?.body?.error) throw new Error('there was an error in the response: '+res.body?.message)
if (!res?.body?.data?.url) throw new Error('data.url was missing')
if (!res?.body?.data?.createdAt) throw new Error('data.createdAt was missing')
return true
})
.expect(200, done)
})
})
it('should show all tasks by default', function (done) {
request(app)
.get('/task')
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect((res) => {
if (res.body?.error) throw new Error('there was an error in the response'+res.error)
if (!res.body?.data) throw new Error('data was missing')
return true
})
.expect(200, done)
})
})
describe('DELETE', function () {
const query = qs.stringify({
id: 'awejf9wiejf9we'
})
it('should delete a single task', function (done) {
request(app)
.delete(`/task?${query}`)
.set('Authorization', `Bearer ${process.env.API_KEY}`)
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(200, done);
});
})
})
})

View File

@ -1 +0,0 @@
hello worlds

View File

@ -1,16 +0,0 @@
module.exports.authenticate = function authenticate(req, res, next) {
const bearerToken = req.headers?.authorization.split(' ').at(1);
if (!bearerToken) {
const msg = `authorization bearer token was missing from request headers`;
console.error(msg);
return res.status(401).json({ error: true, message: msg });
}
if (bearerToken !== process.env.API_KEY) {
const msg = 'INCORRECT API_KEY (wrong token)';
console.error(msg);
return res.status(403).json({ error: true, message: msg });
} else {
next();
}
}

View File

@ -1,21 +0,0 @@
const Queue = require('better-queue');
const taskProcess = require('../utils/taskProcess.js');
const options = {
id: 'id',
maxRetries: 3,
concurrent: 1
// @todo better-queue has batching and concurrency. might be useful to implement in the future
// @see https://github.com/diamondio/better-queue?tab=readme-ov-file#queue-management
}
let q = new Queue(taskProcess, options)
// Middleware function to attach db to request
const queueMiddleware = (req, res, next) => {
req.queue = q;
next();
};
module.exports = queueMiddleware

View File

@ -1,10 +0,0 @@
const store = {
tasks: {}
}
const storeMiddleware = (req, res, next) => {
req.store = store
next();
};
module.exports = storeMiddleware;

View File

@ -1,3 +0,0 @@
module.exports = function readHealth (req, res) {
return res.send('**link2cid pisses on the floor**')
}

View File

@ -1,59 +0,0 @@
const { createId } = require('@paralleldrive/cuid2');
const { getTmpFilePath } = require('../utils/paths.js');
const fsp = require('fs/promises');
module.exports.createTask = function createTask (req, res) {
const url = req.body.url
const task = {
id: createId(),
url: url,
filePath: getTmpFilePath(url),
fileSize: null,
createdAt: new Date().toISOString(),
cid: null,
downloadProgress: null,
addProgress: null
}
if (!req?.body?.url) return res.status(400).json({ error: true, message: 'request body was missing a url' });
req.store.tasks[task.id] = task;
req.queue.push(task, function (err, result) {
if (err) throw err;
console.log('the following is the result of the queued task being complete')
console.log(result)
})
return res.json({ error: false, data: task })
}
module.exports.readTask = function readTask (req, res) {
const id = req?.query?.id
// If we get an id in the query, show the one task.
// Otherwise, we show all tasks.
if (!!id) {
const task = req.store.tasks[id]
if (!task) return res.json({ error: true, message: 'there was no task in the store with that id' });
return res.json({ error: false, data: task })
} else {
const tasks = req.store.tasks
return res.json({ error: false, data: tasks })
}
}
module.exports.deleteTask = async function deleteTask (req, res) {
const id = req?.query?.id;
const task = req.store.tasks[id];
try {
if (task?.filePath) await fsp.unlink(task.filePath);
} catch (err) {}
delete req.store.tasks[id];
return res.json({ error: false, message: 'task deleted' });
if (err) return res.json({ error: true, message: err });
}

View File

@ -1,20 +0,0 @@
const fs = require("fs");
const { Readable } = require('stream');
const { finished } = require('stream/promises');
const path = require("path");
/**
* Download a file at url to local disk filePath
* @param {String} url
* @param {String} filePath
*
* greetz https://stackoverflow.com/a/51302466/1004931
*/
const download = (async (url, filePath) => {
const res = await fetch(url);
const fileStream = fs.createWriteStream(filePath, { flags: 'wx' });
await finished(Readable.fromWeb(res.body).pipe(fileStream));
});
module.exports = download;

View File

@ -1,12 +0,0 @@
const download = require('./download.js');
const fsp = require('fs/promises');
describe('download', function () {
it('should download a file from url', async function () {
const testFilePath = '/tmp/pmel.jpg'
try {
await fsp.unlink(testFilePath)
} catch (e) {}
await download('https://futureporn-b2.b-cdn.net/projekt-melody.jpg', testFilePath)
})
})

View File

@ -1,100 +0,0 @@
require('dotenv').config();
const { openAsBlob } = require('node:fs');
const { rm, stat } = require('fs/promises');
const path = require('path');
if (!process.env.IPFS_URL) throw new Error('IPFS_URL was missing in env');
async function streamingPostFetch(
url,
formData,
basename,
filesize
) {
// console.log(`streamingPostFetch with url=${url}, formData=${formData.get('file')}, basename=${basename}, filesize=${filesize}`);
try {
const res = await fetch(url, {
method: 'POST',
body: formData
});
if (!res.ok) {
throw new Error(`HTTP error! Status-- ${res.status}`);
}
const reader = res.body?.getReader();
if (!reader) {
throw new Error('Failed to get reader from response body');
}
while (true) {
const { done, value } = await reader.read();
const chunk = new TextDecoder().decode(value);
const lines = chunk.split('\n');
for (const line of lines) {
const trimmedLine = line.trim()
if (!!trimmedLine) {
// console.log(trimmedLine);
const json = JSON.parse(trimmedLine);
// console.log(`comparing json.Name=${json.Name} with basename=${basename}`);
if (json.Name === basename && json.Hash && json.Size) {
// this is the last chunk
return json;
}
}
}
if (done) {
throw new Error('Response reader finished before receiving a CID which indicates a failiure.');
}
}
} catch (error) {
console.error('An error occurred:', error);
throw error;
}
}
async function getFormStuff(filePath) {
const url = `${process.env.IPFS_URL}/api/v0/add?progress=false&cid-version=1&pin=true`;
const blob = await openAsBlob(filePath);
const basename = path.basename(filePath);
const filesize = (await stat(filePath)).size;
const formData = new FormData();
return {
url,
blob,
basename,
filesize,
formData
}
}
/**
* @param {String} filePath
* @returns {String} CID
*/
const ipfsAdd = async function (filePath) {
const { url: kuboUrl, blob, basename, filesize, formData } = await getFormStuff(filePath);
formData.append('file', blob, basename);
const output = await streamingPostFetch(kuboUrl, formData, basename, filesize);
if (!output?.Hash) throw new Error('No CID was received from remote IPFS node.');
const cid = output.Hash;
return cid
}
module.exports = ipfsAdd;

View File

@ -1,13 +0,0 @@
const ipfsAdd = require('./ipfsAdd.js')
const path = require('path');
describe('ipfs', function () {
describe('ipfsAdd', function () {
it('should add a file from disk to ipfs and return a {string} CID', async function () {
const expectedCid = 'bafkreibxh3ly47pr3emvrqtax6ieq2ybom4ywyil3yurxnlwirtcvb5pfi'
const file = path.join(__dirname, '..', 'fixtures', 'hello-worlds.txt')
const cid = await ipfsAdd(file, { cidVersion: 1 })
if (cid !== expectedCid) throw new Error(`expected ${cid} to match ${expectedCid}`)
})
})
})

View File

@ -1,12 +0,0 @@
const os = require('os');
const path = require('path');
const getTmpFilePath = function (url) {
const timestamp = new Date().valueOf()
return path.join(os.tmpdir(), timestamp+'-'+path.basename(url))
}
module.exports = {
getTmpFilePath
}

View File

@ -1,11 +0,0 @@
const paths = require('./paths.js')
describe('paths', function () {
describe('getTmpFilePath', function () {
it('should accept a url and receive a /tmp/<datestamp><basename> path on disk', function () {
const url = 'https://example.com/my.jpg'
const p = paths.getTmpFilePath(url)
if (!/\/tmp\/\d+-my\.jpg/.test(p)) throw new Error(`expected ${p} to use format /tmp/<datestamp><basename>`)
})
})
})

View File

@ -1,25 +0,0 @@
const os = require('os');
const path = require('path');
const downloadFile = require('./download');
const ipfsAdd = require('./ipfsAdd');
const taskProcess = async function (taskSpec, cb) {
console.log('downloading')
this.progressTask(1, 3, "downloading")
await downloadFile(taskSpec.url, taskSpec.filePath)
console.log('adding')
this.progressTask(2, 3, "adding")
const cid = await ipfsAdd(taskSpec.filePath)
taskSpec.cid = cid
cb(null, taskSpec)
}
module.exports = taskProcess;

View File

@ -1,27 +1,2 @@
# futureporn-next
## Dev notes
When adding a new module via pnpm, docker compose needs to be restarted or something. I'm not sure the exact steps just yet, but I think it's something like the following.
```
pnpm add @uppy/react
docker compose build next
```
> fp-next | Module not found: Can't resolve '@uppy/react'
hmm... It looks like I'm missing something. Is the new package not getting into the container? Maybe it's something to do with the pnpm cache?
Must we build without cache?
docker compose build --no-cache next; docker compose up
YES. that solved the issue.
However, it's really slow to purge cache and download all packages once again. Is there a way we can speed this up?
* make it work
* make it right
* make it fast
# next

View File

@ -8,47 +8,19 @@ export default async function Page() {
return (
<>
<div className="content">
<div className="content">
<section className="hero">
<div className="hero-body">
<p className="title">About</p>
<p>Futureporn is a fanmade public archive of NSFW R18 vtuber livestreams.</p>
<p>It's the worst feeling when a VOD disappears from the internet. It means you missed out, it's gone, and you may never experience what your peers got to take part in.</p>
<p>Futureporn is created by fans, for fans. Missed a stream? We got you, bro.</p>
<p>Together we can end 404s and create an everlasting archive of lewdtuber livestreams.</p>
</div>
</section>
<div className="section">
<h1>Mission</h1>
<p>It&apos;s a lofty goal, but Futureporn aims to become <b>the Galaxy&apos;s best VTuber hentai site.</b></p>
</div>
<div className="section">
<h2>How do we get there?</h2>
<h3>1. Solve the viewer&apos;s common problems</h3>
<p>Viewers want to watch livestream VODs on their own time. Futureporn collects vods from public streams, and caches them for later viewing.</p>
<p>Viewers want to find content that interests them. Futureporn enables vod tagging for easy browsing.</p>
</div>
<div className="section">
<h3>2. Solve the streamer&apos;s common problems</h3>
<p>Platforms like PH are not rising to the needs of VTubers. Instead of offering support and resources, they restrict and ban top talent.</p>
<p>Futureporn is different, embracing the medium and leveraging emerging technologies to amplify VTuber success.</p>
</div>
<div className="section">
<h3>3. Scale beyond Earth</h3>
<p>Piggybacking on <Link href="/faq#ipfs">IPFS</Link>&apos; content-addressable capabilities and potential to end 404s, VODs preserved here can withstand the test of time, and eventually persist <Link href="/goals">off-world</Link>.</p>
</div>
<div className="section">

View File

@ -6,7 +6,7 @@ export default function NotFound() {
<h2 className='title is-2'>404 Not Found</h2>
<p>Could not find that stream archive.</p>
<Link href="/s">Return to archive list</Link>
<Link href="/archive">Return to archive list</Link>
</div>
)
}

View File

@ -14,7 +14,7 @@ export default async function ArchiveProgress ({ vtuber }: IArchiveProgressProps
const issueStreams = await getStreamCountForVtuber(vtuber.id, ['issue'])
const totalStreams = await getStreamCountForVtuber(vtuber.id)
const eligibleStreams = goodStreams+issueStreams
const completedPercentage = (eligibleStreams / totalStreams) * 100
const completedPercentage = Math.floor((eligibleStreams / totalStreams) * 100)
return (
<div>
{/* <p>

View File

@ -7,7 +7,9 @@ import { faPatreon } from '@fortawesome/free-brands-svg-icons';
import { useLocalStorageValue } from '@react-hookz/web';
import { faRightFromBracket } from '@fortawesome/free-solid-svg-icons';
import Skeleton from 'react-loading-skeleton';
import { strapiUrl } from '@/lib/constants';
import { strapiUrl } from '@/lib/constants';
// import NextAuth from 'next-auth'; // this is (pipedream) wishlist
// import Providers from 'next-auth/providers';
export interface IJWT {
jwt: string;

View File

@ -63,7 +63,7 @@ export default function Navbar() {
</div>
{/* <div className="navbar-item">
<div className="navbar-item">
<Link className="button " href="/upload">
<span className="mr-1">Upload</span>
<FontAwesomeIcon
@ -71,7 +71,7 @@ export default function Navbar() {
className="fas fa-upload"
></FontAwesomeIcon>
</Link>
</div> */}
</div>
<div className="navbar-item fp-profile-button">
{/* show the login button if user is anon */}

View File

@ -10,11 +10,13 @@ interface PatronsListProps {
export default async function PatronsList({ displayStyle }: PatronsListProps) {
const patrons = await getPatrons()
if (!patrons) return (
<SkeletonTheme baseColor="#000" highlightColor="#000">
<Skeleton count={3} enableAnimation={false} />
</SkeletonTheme>
);
if (displayStyle === 'box') {
return (
<div className="columns is-multiline">

View File

@ -11,6 +11,7 @@ import { faTriangleExclamation, faCircleInfo, faThumbsUp, IconDefinition, faO, f
import { Hemisphere, Moon } from "lunarphase-js";
import { useEffect, useState } from "react";
import { faXTwitter } from "@fortawesome/free-brands-svg-icons";
import { notFound } from "next/navigation";
export interface IStreamProps {
stream: IStream;
@ -47,6 +48,7 @@ function determineStatus(stream: IStream): Status {
export default function StreamPage({ stream }: IStreamProps) {
console.log('StreamPage function has been invoked! stream as follows')
console.log(stream)
if (!stream) notFound()
const displayName = stream.attributes.vtuber.data.attributes.displayName;
const date = new Date(stream.attributes.date);
const [hemisphere, setHemisphere] = useState(Hemisphere.NORTHERN);
@ -169,8 +171,7 @@ export default function StreamPage({ stream }: IStreamProps) {
<span className="title is-1"><FontAwesomeIcon icon={icon}></FontAwesomeIcon></span>
<p className="mt-3">{desc1}</p>
<p className="mt-5">{desc2}<br />
{/* <Link href={`/upload?cuid=${stream.attributes.cuid}`}>Upload it here.</Link></p> */}
<Link style={{ cursor: 'not-allowed' }} href={`/upload?cuid=${stream.attributes.cuid}`}><i>Uploads coming soon.</i></Link></p>
<Link href={`/upload?cuid=${stream.attributes.cuid}`}>Upload it here.</Link></p>
</div>
</article>
</div>

View File

@ -16,7 +16,8 @@ import {
ColumnDef,
flexRender,
} from '@tanstack/react-table'
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'
import { faSpinner } from '@fortawesome/free-solid-svg-icons'
import { fetchStreamData, IStream } from '@/lib/streams'
@ -93,7 +94,7 @@ export default function StreamsTable() {
accessorFn: d => [
(d.attributes.isChaturbateStream && 'CB'),
(d.attributes.isFanslyStream && 'Fansly')
].filter(Boolean).join(' ') || '???'
].filter(Boolean).join(', ') || '???'
},
{
header: 'Status',
@ -129,20 +130,20 @@ export default function StreamsTable() {
pageSize: 50,
})
const dataQuery = useQuery({
const { data, error, isPending } = useQuery({
queryKey: ['streams', pagination.pageIndex, pagination.pageSize],
queryFn: () => fetchStreamData(pagination),
placeholderData: keepPreviousData, // don't have 0 rows flash while changing pages/loading next page,
staleTime: 1000
staleTime: 1000,
}, queryClient)
const defaultData = React.useMemo(() => [], [])
const table = useReactTable({
data: dataQuery?.data?.rows ?? defaultData,
data: data?.rows ?? defaultData,
columns,
// pageCount: dataQuery.data?.pageCount ?? -1, //you can now pass in `rowCount` instead of pageCount and `pageCount` will be calculated internally (new in v8.13.0)
rowCount: dataQuery.data?.rowCount, // new in v8.13.0 - alternatively, just pass in `pageCount` directly
rowCount: data?.rowCount, // new in v8.13.0 - alternatively, just pass in `pageCount` directly
state: {
pagination,
},
@ -156,49 +157,53 @@ export default function StreamsTable() {
return (
<div className="p-2">
<div className="h-2" />
<table className='table is-hoverable is-fullwidth'>
<thead>
{table.getHeaderGroups().map(headerGroup => (
<tr key={headerGroup.id}>
{headerGroup.headers.map(header => {
return (
<th key={header.id} colSpan={header.colSpan}>
{header.isPlaceholder ? null : (
<div>
{flexRender(
header.column.columnDef.header,
header.getContext()
)}
</div>
)}
</th>
)
})}
</tr>
))}
</thead>
<tbody>
{table.getRowModel().rows.map(row => {
return (
<tr key={row.id}>
{row.getVisibleCells().map(cell => {
{isPending && <FontAwesomeIcon className="mt-5 fa-spin-pulse" icon={faSpinner} ></FontAwesomeIcon> }
{!isPending && <>
<table className='table is-hoverable is-fullwidth'>
<thead>
{table.getHeaderGroups().map(headerGroup => (
<tr key={headerGroup.id}>
{headerGroup.headers.map(header => {
return (
<td
className={getStatusClass(cell.getValue() as string)}
key={cell.id}
>
{flexRender(
cell.column.columnDef.cell,
cell.getContext()
<th key={header.id} colSpan={header.colSpan}>
{header.isPlaceholder ? null : (
<div>
{flexRender(
header.column.columnDef.header,
header.getContext()
)}
</div>
)}
</td>
</th>
)
})}
</tr>
)
})}
</tbody>
</table>
))}
</thead>
<tbody>
{table.getRowModel().rows.map(row => {
return (
<tr key={row.id}>
{row.getVisibleCells().map(cell => {
return (
<td
className={getStatusClass(cell.getValue() as string)}
key={cell.id}
>
{flexRender(
cell.column.columnDef.cell,
cell.getContext()
)}
</td>
)
})}
</tr>
)
})}
</tbody>
</table>
<div className="columns is-mobile is-vcentered">
<div className='column is-half'>
@ -273,6 +278,7 @@ export default function StreamsTable() {
</div>
</div>
</div>
</>}

View File

@ -159,235 +159,241 @@ export default function UploadForm({ vtubers }: IUploadFormProps) {
uppy.on('complete', async (result: any) => {
console.log('uppy complete! ')
console.log(result)
for (const s of result.successful) {
if (!s?.s3Multipart) {
setError('root.serverError', {
type: 'remote',
message: 'file was missing s3Multipart'
})
// throw new Error('file was missing s3Multipart')
}
}
let files = result.successful.map((f: any) => ({ key: f.s3Multipart.key, uploadId: f.s3Multipart.uploadId }));
setValue('files', files);
});
return (<div className="notification is-secondary">
<h1 className="title">VOD uploads</h1>
<p>
<i>coming soon!!</i>
</p>
<hr ></hr>
<p>Track progress on the <a href="/goals">Goals Page</a></p>
</div>)
// return (
// <>
return (
<>
// <div className='section'>
// <h2 className='title is-2'>Upload VOD</h2>
<div className='section'>
<h2 className='title is-2'>Upload VOD</h2>
// <p className="mb-5"><i>Together we can archive all lewdtuber livestreams!</i></p>
<p className="mb-5"><i>Together we can archive all lewdtuber livestreams!</i></p>
// {(!authData?.accessToken)
// ?
// <>
// <aside className='notification is-danger'><p>Please log in to upload VODs</p></aside>
// <LoginButton />
// </>
// : (
{(!authData?.accessToken)
?
<>
<aside className='notification is-danger'><p>Please log in to upload VODs</p></aside>
<LoginButton />
</>
: (
// <div className='columns is-multiline'>
// <form id="vod-details" onSubmit={handleSubmit(createUSC)}>
<div className='columns is-multiline'>
<form id="vod-details" onSubmit={handleSubmit(createUSC)}>
// {(!isSubmitSuccessful) && <div className='column is-full'>
// <section className="hero is-info mb-3">
// <div className="hero-body">
// <p className="title">
// Step 1
// </p>
// <p className="subtitle">
// Upload the file
// </p>
// </div>
// </section>
// <section className="section mb-5">
// <Dashboard
// uppy={uppy}
// theme='dark'
// proudlyDisplayPoweredByUppy={false}
// />
{(!isSubmitSuccessful) && <div className='column is-full'>
<section className="hero is-info mb-3">
<div className="hero-body">
<p className="title">
Step 1
</p>
<p className="subtitle">
Upload the file
</p>
</div>
</section>
<section className="section mb-5">
<Dashboard
uppy={uppy}
theme='dark'
proudlyDisplayPoweredByUppy={true}
showProgressDetails={true}
/>
// <input
// required
// hidden={true}
// style={{ display: 'none' }}
// className="input" type="text"
// {...register('files')}
// ></input>
{/* This form is hidden. Why? */}
<input
required
hidden={false}
style={{ display: 'block' }}
className="input" type="text"
{...register('files')}
></input>
// {errors.files && <p className="help is-danger">{errors.files.message?.toString()}</p>}
{errors.files && <p className="help is-danger">{errors.files.message?.toString()}</p>}
// </section>
// </div>}
</section>
</div>}
// {(!isSubmitSuccessful) && <div className='column is-full '>
// {/* {(!cuid) && <aside className='notification is-info'>Hint: Some of these fields are filled out automatically when uploading from a <Link href="/streams">stream</Link> page.</aside>} */}
{(!isSubmitSuccessful) && <div className='column is-full '>
{/* {(!cuid) && <aside className='notification is-info'>Hint: Some of these fields are filled out automatically when uploading from a <Link href="/streams">stream</Link> page.</aside>} */}
// <section className="hero is-info mb-3">
// <div className="hero-body">
// <p className="title">
// Step 2
// </p>
// <p className="subtitle">
// Tell us about the VOD
// </p>
// </div>
// </section>
<section className="hero is-info mb-3">
<div className="hero-body">
<p className="title">
Step 2
</p>
<p className="subtitle">
Tell us about the VOD
</p>
</div>
</section>
// <section className="section">
<section className="section">
// {/* <input
// required
// // hidden={false}
// // style={{ display: 'none' }}
// className="input" type="text"
// {...register('streamCuid')}
// ></input> */}
{/* <input
required
// hidden={false}
// style={{ display: 'none' }}
className="input" type="text"
{...register('streamCuid')}
></input> */}
// <div className="field">
// <label className="label">VTuber</label>
// <div className="select">
// <select
// required
// // value={vtuber}
// // onChange={(evt) => setVtuber(parseInt(evt.target.value))}
// {...register('vtuber')}
// >
// {vtubers.map((vtuber: IVtuber) => (
// <option key={vtuber.id} value={vtuber.id}>{vtuber.attributes.displayName}</option>
// ))}
// </select>
// </div>
// <p className="help is-info">Choose the VTuber this VOD belongs to. (More VTubers will be added when storage/bandwidth funding is secured.)</p>
// {errors.vtuber && <p className="help is-danger">vtuber error</p>}
<div className="field">
<label className="label">VTuber</label>
<div className="select">
<select
required
// value={vtuber}
// onChange={(evt) => setVtuber(parseInt(evt.target.value))}
{...register('vtuber')}
>
{vtubers.map((vtuber: IVtuber) => (
<option key={vtuber.id} value={vtuber.id}>{vtuber.attributes.displayName}</option>
))}
</select>
</div>
<p className="help is-info">Choose the VTuber this VOD belongs to. (More VTubers will be added when storage/bandwidth funding is secured.)</p>
{errors.vtuber && <p className="help is-danger">vtuber error</p>}
// </div>
</div>
// <div className="field">
// <label className="label">Stream Date</label>
// <input
// required
// className="input" type="date"
// {...register('date')}
// // onChange={(evt) => setDate(evt.target.value)}
// ></input>
// <p className="help is-info">The date when the VOD was originally streamed.</p>
// {errors.date && <p className="help is-danger">{errors.date.message?.toString()}</p>}
<div className="field">
<label className="label">Stream Date</label>
<input
required
className="input" type="date"
{...register('date')}
// onChange={(evt) => setDate(evt.target.value)}
></input>
<p className="help is-info">The date when the VOD was originally streamed.</p>
{errors.date && <p className="help is-danger">{errors.date.message?.toString()}</p>}
// </div>
</div>
// <div className="field">
// <label className="label">Notes</label>
// <textarea
// className="textarea"
// placeholder="e.g. Missing first 10 minutes of stream"
// // onChange={(evt) => setNote(evt.target.value)}
// {...register('notes')}
// ></textarea>
// <p className="help is-info">If there are any issues with the VOD, put a note here. If there are no VOD issues, leave this field blank.</p>
// </div>
<div className="field">
<label className="label">Notes</label>
<textarea
className="textarea"
placeholder="e.g. Missing first 10 minutes of stream"
// onChange={(evt) => setNote(evt.target.value)}
{...register('notes')}
></textarea>
<p className="help is-info">If there are any issues with the VOD, put a note here. If there are no VOD issues, leave this field blank.</p>
</div>
// <div className="field">
// <label className="label">Attribution</label>
// <label className="checkbox">
// <input
// type="checkbox"
// // onChange={(evt) => setAttribution(evt.target.checked)}
// {...register('attribution')}
// />
// <span className={`ml-2 ${styles.noselect}`}>Credit {authData.user?.username} for the upload.</span>
// <p className="help is-info">Check this box if you want your username displayed on the website. Thank you for uploading!</p>
// </label>
// </div>
<div className="field">
<label className="label">Attribution</label>
<label className="checkbox">
<input
type="checkbox"
// onChange={(evt) => setAttribution(evt.target.checked)}
{...register('attribution')}
/>
<span className={`ml-2 ${styles.noselect}`}>Credit {authData.user?.username} for the upload.</span>
<p className="help is-info">Check this box if you want your username displayed on the website. Thank you for uploading!</p>
</label>
</div>
// </section>
</section>
// </div>}
</div>}
// <div className="column is-full">
// <section className="hero is-info">
// <div className="hero-body">
// <p className="title">
// Step 3
// </p>
// <p className="subtitle">
// Send the form
// </p>
// </div>
// </section>
// <section className="section">
<div className="column is-full">
<section className="hero is-info">
<div className="hero-body">
<p className="title">
Step 3
</p>
<p className="subtitle">
Send the form
</p>
</div>
</section>
<section className="section">
// {errors.root?.serverError && (
// <div className="notification">
// <button className="delete" onClick={() => clearErrors()}></button>
// <ErrorMessage name="root" errors={errors} ></ErrorMessage>
// </div>
// )}
{errors.root?.serverError && (
<div className="notification">
<button className="delete" onClick={() => clearErrors()}></button>
<ErrorMessage name="root" errors={errors} ></ErrorMessage>
</div>
)}
// {!isSubmitSuccessful && (
// <button className="button is-primary is-large mt-5">
// <span className="icon is-small">
// <FontAwesomeIcon icon={faPaperPlane}></FontAwesomeIcon>
// </span>
// <span>Send</span>
// </button>
// )}
{!isSubmitSuccessful && (
<button className="button is-primary is-large mt-5">
<span className="icon is-small">
<FontAwesomeIcon icon={faPaperPlane}></FontAwesomeIcon>
</span>
<span>Send</span>
</button>
)}
// {isSubmitting && (
// <p>
// <FontAwesomeIcon className="mt-5 fa-spin-pulse" icon={faSpinner} ></FontAwesomeIcon>
// </p>
// )}
// {isSubmitSuccessful && (
// <>
// <aside className="notification mt-5 is-success">Thank you for uploading! </aside>
// <button onClick={() => {
// reset(); // reset form
// const files = uppy.getFiles()
// for (const file of files) {
// uppy.removeFile(file.id); // reset uppy
// }
// }} className="button is-primary">
// <span className="icon is-small">
// <FontAwesomeIcon icon={faEraser}></FontAwesomeIcon>
// </span>
// <span>Reset form</span>
// </button>
// </>
// )}
{isSubmitting && (
<p>
<FontAwesomeIcon className="mt-5 fa-spin-pulse" icon={faSpinner} ></FontAwesomeIcon>
</p>
)}
{isSubmitSuccessful && (
<>
<aside className="notification mt-5 is-success">Thank you for uploading! </aside>
<button onClick={() => {
reset(); // reset form
const files = uppy.getFiles()
for (const file of files) {
uppy.removeFile(file.id); // reset uppy
}
}} className="button is-primary">
<span className="icon is-small">
<FontAwesomeIcon icon={faEraser}></FontAwesomeIcon>
</span>
<span>Reset form</span>
</button>
</>
)}
// </section>
// </div>
</section>
</div>
// </form>
// </div>
</form>
</div>
// )
// }
)
}
// </div>
</div>
// </>
// )
</>
)
}

View File

@ -277,7 +277,7 @@ export async function getAllStreamsForVtuber(vtuberId: number, archiveStatuses =
},
});
console.log(`strapiUrl=${strapiUrl}`)
// console.log(`strapiUrl=${strapiUrl}`)
const response = await fetch(`${strapiUrl}/api/streams?${query}`, fetchStreamsOptions)
if (response.status !== 200) {
@ -340,14 +340,14 @@ export async function fetchStreamData({ pageIndex, pageSize }: { pageIndex: numb
})
const response = await fetch(
`${strapiUrl}/api/streams?${query}`
);
const json = await response.json();
console.log(json)
const d = {
rows: json.data,
pageCount: Math.ceil(json.meta.pagination.total / pageSize),
rowCount: json.meta.pagination.total,
}
);
const json = await response.json();
console.log(json)
const d = {
rows: json.data,
pageCount: Math.ceil(json.meta.pagination.total / pageSize),
rowCount: json.meta.pagination.total,
}
// console.log(`fetchStreamData with pageIndex=${pageIndex}, pageSize=${pageSize}\n\n${JSON.stringify(d, null, 2)}`)
return d;
}
@ -371,13 +371,13 @@ export async function getStreamCountForVtuber(vtuberId: number, archiveStatuses
)
const res = await fetch(`${strapiUrl}/api/streams?${query}`, fetchStreamsOptions)
const data = await res.json()
console.log(`getStreamCountForVtuber with archiveStatuses=${archiveStatuses}`)
console.log(JSON.stringify(data, null, 2))
// console.log(`getStreamCountForVtuber with archiveStatuses=${archiveStatuses}`)
// console.log(JSON.stringify(data, null, 2))
return data.meta.pagination.total
}
export async function getStreamsForVtuber(vtuberId: number, page: number = 1, pageSize: number = 25, sortDesc = true): Promise<IStreamsResponse> {
console.log(`getStreamsForVtuber() with strapiUrl=${strapiUrl}`)
// console.log(`getStreamsForVtuber() with strapiUrl=${strapiUrl}`)
const query = qs.stringify(
{
populate: {
@ -405,7 +405,7 @@ export async function getStreamsForVtuber(vtuberId: number, page: number = 1, pa
)
const res = await fetch(`${strapiUrl}/api/streams?${query}`, fetchStreamsOptions)
const data = await res.json()
console.log(data)
// console.log(data)
return data
}

View File

@ -11,8 +11,16 @@ import { notFound } from "next/navigation";
export default async function Page() {
const vods = await getVods(1, 9, true);
console.log('vods as follows')
console.log(JSON.stringify(vods, null, 2))
const vtubers = await getVtubers();
if (!vtubers) notFound();
console.log(`vtubers as follows`)
console.log(JSON.stringify(vtubers, null, 2))
// return (
// <pre>

Some files were not shown because too many files have changed in this diff Show More