Compare commits
No commits in common. "a60d0f0821e44b071db398a1303512fa685cd6f8" and "2c8e41be61778883f8bd68fe0719673d62482cb9" have entirely different histories.
a60d0f0821
...
2c8e41be61
3
.dokku/README.md
Normal file
3
.dokku/README.md
Normal file
@ -0,0 +1,3 @@
|
||||
https://dokku.com/docs/advanced-usage/deployment-tasks/?h=monorepo#changing-the-appjson-location
|
||||
|
||||
https://dokku.com/docs/deployment/builders/dockerfiles/
|
8
.dokku/next.app.json
Normal file
8
.dokku/next.app.json
Normal file
@ -0,0 +1,8 @@
|
||||
{
|
||||
"scripts": {
|
||||
"dokku": {
|
||||
"predeploy": "echo hello-world-predeploy",
|
||||
"postdeploy": "echo hello-world-postdeploy"
|
||||
}
|
||||
}
|
||||
}
|
@ -4,18 +4,23 @@ pnpm for workspaces.
|
||||
|
||||
Kubernetes for Development using Tiltfile
|
||||
|
||||
Kubernetes for Production, deployed using FluxCD
|
||||
kubefwd and entr for DNS in dev cluster
|
||||
|
||||
Tested on VKE v1.30.0+1 (PVCs on other versions may not be fulfilled)
|
||||
dokku for Production, deployed with `git push`.
|
||||
|
||||
(dokku is slowly being replaced by Kubernetes)
|
||||
|
||||
Kubernetes for Production, deployed using FluxCD
|
||||
|
||||
direnv for loading .envrc
|
||||
|
||||
Temporal for work queue, cron
|
||||
Temporal for work queue
|
||||
|
||||
Postgres for data storage
|
||||
|
||||
S3 for media storage
|
||||
|
||||
Domain Driven Development
|
||||
|
||||
Test Driven Development
|
||||
|
||||
Tested on VKE v1.30.0+1 (PVCs on other versions may not be fulfilled)
|
||||
|
2
Makefile
2
Makefile
@ -23,7 +23,7 @@ velero:
|
||||
|
||||
tilt:
|
||||
kind get kubeconfig > ~/.kube/kind.yaml
|
||||
KUBECONFIG=~/.kube/kind.yaml tilt up -f ./Tiltfile
|
||||
KUBECONFIG=~/.kube/kind.yaml tilt up -f ./t.wip.tiltfile
|
||||
|
||||
exoscale:
|
||||
kubectl apply -f https://raw.githubusercontent.com/exoscale/cert-manager-webhook-exoscale/master/deploy/exoscale-webhook-kustomize/deploy.yaml
|
||||
|
33
README.md
33
README.md
@ -4,35 +4,10 @@ Source Code for https://futureporn.net
|
||||
|
||||
See ./ARCHITECTURE.md for overview
|
||||
|
||||
## Metrics Notes
|
||||
|
||||
Keeping track of metrics we want to scrape using Prometheus
|
||||
## Jun update todo list
|
||||
|
||||
### Uppy
|
||||
* [x] external-dns gameplan
|
||||
* [ ] traefik well understood
|
||||
* [ ] staging test with *.futureporn.net domains
|
||||
|
||||
https://uppy.fp.sbtp.xyz/metrics
|
||||
|
||||
|
||||
## Development Mantras
|
||||
|
||||
### Move fast and break things
|
||||
|
||||
### Make it work, make it right, make it fast (in that order)
|
||||
|
||||
### Done is better than perfect
|
||||
|
||||
### If it looks like a duck and quacks like a duck, it is a duck.
|
||||
|
||||
### If the way is long, the way is wrong
|
||||
|
||||
### Good, Fast, Cheap. Pick two but not all three.
|
||||
|
||||
### Organizations are fractals
|
||||
|
||||
### Focus on what moves the needle
|
||||
|
||||
### Alligator energy (move slow and preserve things)
|
||||
|
||||
### Code is run more than it is read
|
||||
|
||||
### The computer doesn't care
|
@ -1,32 +0,0 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFmDCCA4CgAwIBAgIQU9C87nMpOIFKYpfvOHFHFDANBgkqhkiG9w0BAQsFADBm
|
||||
MQswCQYDVQQGEwJVUzEzMDEGA1UEChMqKFNUQUdJTkcpIEludGVybmV0IFNlY3Vy
|
||||
aXR5IFJlc2VhcmNoIEdyb3VwMSIwIAYDVQQDExkoU1RBR0lORykgUHJldGVuZCBQ
|
||||
ZWFyIFgxMB4XDTE1MDYwNDExMDQzOFoXDTM1MDYwNDExMDQzOFowZjELMAkGA1UE
|
||||
BhMCVVMxMzAxBgNVBAoTKihTVEFHSU5HKSBJbnRlcm5ldCBTZWN1cml0eSBSZXNl
|
||||
YXJjaCBHcm91cDEiMCAGA1UEAxMZKFNUQUdJTkcpIFByZXRlbmQgUGVhciBYMTCC
|
||||
AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALbagEdDTa1QgGBWSYkyMhsc
|
||||
ZXENOBaVRTMX1hceJENgsL0Ma49D3MilI4KS38mtkmdF6cPWnL++fgehT0FbRHZg
|
||||
jOEr8UAN4jH6omjrbTD++VZneTsMVaGamQmDdFl5g1gYaigkkmx8OiCO68a4QXg4
|
||||
wSyn6iDipKP8utsE+x1E28SA75HOYqpdrk4HGxuULvlr03wZGTIf/oRt2/c+dYmD
|
||||
oaJhge+GOrLAEQByO7+8+vzOwpNAPEx6LW+crEEZ7eBXih6VP19sTGy3yfqK5tPt
|
||||
TdXXCOQMKAp+gCj/VByhmIr+0iNDC540gtvV303WpcbwnkkLYC0Ft2cYUyHtkstO
|
||||
fRcRO+K2cZozoSwVPyB8/J9RpcRK3jgnX9lujfwA/pAbP0J2UPQFxmWFRQnFjaq6
|
||||
rkqbNEBgLy+kFL1NEsRbvFbKrRi5bYy2lNms2NJPZvdNQbT/2dBZKmJqxHkxCuOQ
|
||||
FjhJQNeO+Njm1Z1iATS/3rts2yZlqXKsxQUzN6vNbD8KnXRMEeOXUYvbV4lqfCf8
|
||||
mS14WEbSiMy87GB5S9ucSV1XUrlTG5UGcMSZOBcEUpisRPEmQWUOTWIoDQ5FOia/
|
||||
GI+Ki523r2ruEmbmG37EBSBXdxIdndqrjy+QVAmCebyDx9eVEGOIpn26bW5LKeru
|
||||
mJxa/CFBaKi4bRvmdJRLAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB
|
||||
Af8EBTADAQH/MB0GA1UdDgQWBBS182Xy/rAKkh/7PH3zRKCsYyXDFDANBgkqhkiG
|
||||
9w0BAQsFAAOCAgEAncDZNytDbrrVe68UT6py1lfF2h6Tm2p8ro42i87WWyP2LK8Y
|
||||
nLHC0hvNfWeWmjZQYBQfGC5c7aQRezak+tHLdmrNKHkn5kn+9E9LCjCaEsyIIn2j
|
||||
qdHlAkepu/C3KnNtVx5tW07e5bvIjJScwkCDbP3akWQixPpRFAsnP+ULx7k0aO1x
|
||||
qAeaAhQ2rgo1F58hcflgqKTXnpPM02intVfiVVkX5GXpJjK5EoQtLceyGOrkxlM/
|
||||
sTPq4UrnypmsqSagWV3HcUlYtDinc+nukFk6eR4XkzXBbwKajl0YjztfrCIHOn5Q
|
||||
CJL6TERVDbM/aAPly8kJ1sWGLuvvWYzMYgLzDul//rUF10gEMWaXVZV51KpS9DY/
|
||||
5CunuvCXmEQJHo7kGcViT7sETn6Jz9KOhvYcXkJ7po6d93A/jy4GKPIPnsKKNEmR
|
||||
xUuXY4xRdh45tMJnLTUDdC9FIU0flTeO9/vNpVA8OPU1i14vCz+MU8KX1bV3GXm/
|
||||
fxlB7VBBjX9v5oUep0o/j68R/iDlCOM4VVfRa8gX6T2FU7fNdatvGro7uQzIvWof
|
||||
gN9WUwCbEMBy/YhBSrXycKA8crgGg3x1mIsopn88JKwmMBa68oS7EHM9w7C4y71M
|
||||
7DiA+/9Qdp9RBWJpTS9i/mDnJg1xvo8Xz49mrrgfmcAXTCJqXi24NatI3Oc=
|
||||
-----END CERTIFICATE-----
|
@ -1,15 +0,0 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICTjCCAdSgAwIBAgIRAIPgc3k5LlLVLtUUvs4K/QcwCgYIKoZIzj0EAwMwaDEL
|
||||
MAkGA1UEBhMCVVMxMzAxBgNVBAoTKihTVEFHSU5HKSBJbnRlcm5ldCBTZWN1cml0
|
||||
eSBSZXNlYXJjaCBHcm91cDEkMCIGA1UEAxMbKFNUQUdJTkcpIEJvZ3VzIEJyb2Nj
|
||||
b2xpIFgyMB4XDTIwMDkwNDAwMDAwMFoXDTQwMDkxNzE2MDAwMFowaDELMAkGA1UE
|
||||
BhMCVVMxMzAxBgNVBAoTKihTVEFHSU5HKSBJbnRlcm5ldCBTZWN1cml0eSBSZXNl
|
||||
YXJjaCBHcm91cDEkMCIGA1UEAxMbKFNUQUdJTkcpIEJvZ3VzIEJyb2Njb2xpIFgy
|
||||
MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEOvS+w1kCzAxYOJbA06Aw0HFP2tLBLKPo
|
||||
FQqR9AMskl1nC2975eQqycR+ACvYelA8rfwFXObMHYXJ23XLB+dAjPJVOJ2OcsjT
|
||||
VqO4dcDWu+rQ2VILdnJRYypnV1MMThVxo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD
|
||||
VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU3tGjWWQOwZo2o0busBB2766XlWYwCgYI
|
||||
KoZIzj0EAwMDaAAwZQIwRcp4ZKBsq9XkUuN8wfX+GEbY1N5nmCRc8e80kUkuAefo
|
||||
uc2j3cICeXo1cOybQ1iWAjEA3Ooawl8eQyR4wrjCofUE8h44p0j7Yl/kBlJZT8+9
|
||||
vbtH7QiVzeKCOTQPINyRql6P
|
||||
-----END CERTIFICATE-----
|
@ -1,5 +1,9 @@
|
||||
# Futureporn helm chart
|
||||
This chart was originally created by Kompose.
|
||||
|
||||
This is the chart with templates that define Futureporn kubernetes cluster
|
||||
Then I realized I don't understand kubernetes.
|
||||
|
||||
https://helm.sh/docs/topics/charts/
|
||||
It was too complex.
|
||||
|
||||
I needed to start over, understand each piece before moving on.
|
||||
|
||||
so we're starting small, incrementally migrating services to the cluster.
|
138
charts/fp/templates/echo.yaml
Normal file
138
charts/fp/templates/echo.yaml
Normal file
@ -0,0 +1,138 @@
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: echo
|
||||
namespace: futureporn
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
app: echo
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: echo
|
||||
spec:
|
||||
containers:
|
||||
- image: hashicorp/http-echo
|
||||
name: echo
|
||||
ports:
|
||||
- containerPort: 5678
|
||||
args:
|
||||
- -text="Hello, choom!"
|
||||
|
||||
{{ if eq .Values.environment "development" }}
|
||||
---
|
||||
apiVersion: chisel-operator.io/v1
|
||||
kind: ExitNode
|
||||
metadata:
|
||||
name: echo-exit-node
|
||||
namespace: futureporn
|
||||
spec:
|
||||
host: "{{ .Values.chisel.exitNodeIp }}"
|
||||
port: 9090
|
||||
auth: chisel
|
||||
{{ end }}
|
||||
|
||||
|
||||
|
||||
# ---
|
||||
# apiVersion: traefik.io/v1alpha1
|
||||
# kind: IngressRoute
|
||||
# metadata:
|
||||
# name: echo
|
||||
# namespace: futureporn
|
||||
# spec:
|
||||
# entryPoints:
|
||||
# - web
|
||||
# routes:
|
||||
# - match: Host(`echo.fp.sbtp.xyz`) || PathPrefix(`/extra/echo`)
|
||||
# kind: Rule
|
||||
# services:
|
||||
# - name: echo
|
||||
# port: 8001
|
||||
# # tls:
|
||||
# # secretName: echo-cert
|
||||
|
||||
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: echo
|
||||
namespace: futureporn
|
||||
annotations:
|
||||
external-dns.alpha.kubernetes.io/hostname: "{{ .Values.echo.hostname }}"
|
||||
{{ if eq .Values.environment "development" }}
|
||||
chisel-operator.io/exit-node-name: "echo-exit-node"
|
||||
{{ end }}
|
||||
spec:
|
||||
type: LoadBalancer
|
||||
selector:
|
||||
app: echo
|
||||
ports:
|
||||
- name: web
|
||||
protocol: TCP
|
||||
port: 5678
|
||||
targetPort: 5678
|
||||
- name: websecure
|
||||
protocol: TCP
|
||||
port: 4443
|
||||
targetPort: 5678
|
||||
|
||||
---
|
||||
apiVersion: cert-manager.io/v1
|
||||
kind: Certificate
|
||||
metadata:
|
||||
name: echo
|
||||
namespace: futureporn
|
||||
spec:
|
||||
secretName: echo-tls
|
||||
issuerRef:
|
||||
name: "{{ .Values.certManager.issuer }}"
|
||||
kind: ClusterIssuer
|
||||
dnsNames:
|
||||
- "{{ .Values.echo.hostname }}"
|
||||
|
||||
---
|
||||
apiVersion: traefik.io/v1alpha1
|
||||
kind: IngressRoute
|
||||
metadata:
|
||||
name: echo-http
|
||||
namespace: futureporn
|
||||
spec:
|
||||
entryPoints:
|
||||
- web
|
||||
routes:
|
||||
- match: Host(`echo.fp.sbtp.xyz`)
|
||||
kind: Rule
|
||||
services:
|
||||
- name: echo
|
||||
namespace: futureporn
|
||||
port: 5678
|
||||
|
||||
---
|
||||
apiVersion: traefik.io/v1alpha1
|
||||
kind: IngressRoute
|
||||
metadata:
|
||||
name: echo-https
|
||||
namespace: futureporn
|
||||
annotations:
|
||||
cert-manager.io/cluster-issuer: "{{ .Values.certManager.issuer }}"
|
||||
spec:
|
||||
entryPoints:
|
||||
- websecure
|
||||
routes:
|
||||
- match: Host(`echo.fp.sbtp.xyz`)
|
||||
kind: Rule
|
||||
middlewares:
|
||||
- name: redirect
|
||||
namespace: futureporn
|
||||
services:
|
||||
- name: echo
|
||||
namespace: futureporn
|
||||
port: 5678
|
||||
tls:
|
||||
secretName: echo-tls
|
@ -13,8 +13,6 @@ spec:
|
||||
env:
|
||||
- name: HOSTNAME
|
||||
value: 0.0.0.0
|
||||
- name: NEXT_PUBLIC_UPPY_COMPANION_URL
|
||||
value: "{{ .Values.uppy.hostname }}"
|
||||
ports:
|
||||
- name: web
|
||||
containerPort: 3000
|
||||
|
@ -33,7 +33,7 @@ spec:
|
||||
- name: CDN_BUCKET_URL
|
||||
value: "{{ .Values.scout.cdnBucketUrl }}"
|
||||
- name: STRAPI_URL
|
||||
value: https://strapi.fp.sbtp.xyz
|
||||
value: https://strapi.piko.sbtp.xyz
|
||||
- name: S3_BUCKET_APPLICATION_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
@ -156,7 +156,7 @@ spec:
|
||||
# - name: CDN_BUCKET_URL
|
||||
# value: "{{ .Values.scout.cdnBucketUrl }}"
|
||||
# - name: STRAPI_URL
|
||||
# value: https://strapi.fp.sbtp.xyz
|
||||
# value: https://strapi.piko.sbtp.xyz
|
||||
# - name: SCOUT_NITTER_ACCESS_KEY
|
||||
# valueFrom:
|
||||
# secretKeyRef:
|
||||
|
@ -116,13 +116,13 @@ spec:
|
||||
secretKeyRef:
|
||||
name: strapi
|
||||
key: sendgridApiKey
|
||||
- name: STRAPI_URL
|
||||
value: "{{ .Values.strapi.url }}"
|
||||
- name: TRANSFER_TOKEN_SALT
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: strapi
|
||||
key: transferTokenSalt
|
||||
- name: STRAPI_URL
|
||||
value: "{{ .Values.strapi.url }}"
|
||||
- name: PORT
|
||||
value: "{{ .Values.strapi.port }}"
|
||||
resources:
|
||||
|
@ -1,219 +0,0 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: uppy
|
||||
namespace: futureporn
|
||||
spec:
|
||||
replicas: 2
|
||||
minReadySeconds: 5
|
||||
strategy:
|
||||
type: RollingUpdate
|
||||
rollingUpdate:
|
||||
maxSurge: 2
|
||||
maxUnavailable: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: uppy
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: uppy
|
||||
spec:
|
||||
containers:
|
||||
- name: uppy
|
||||
image: docker.io/transloadit/companion:latest
|
||||
imagePullPolicy: IfNotPresent
|
||||
resources:
|
||||
limits:
|
||||
memory: 150Mi
|
||||
requests:
|
||||
memory: 100Mi
|
||||
env:
|
||||
- name: COMPANION_STREAMING_UPLOAD
|
||||
value: "true"
|
||||
- name: COMPANION_CLIENT_ORIGINS
|
||||
value: "{{ .Values.uppy.clientOrigins }}"
|
||||
- name: COMPANION_DATADIR
|
||||
value: /tmp/
|
||||
- name: COMPANION_DOMAIN
|
||||
value: "{{ .Values.uppy.domain }}"
|
||||
- name: COMPANION_PROTOCOL
|
||||
value: https
|
||||
- name: COMPANION_REDIS_URL
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: uppy
|
||||
key: redisUrl
|
||||
- name: COMPANION_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: uppy
|
||||
key: secret
|
||||
- name: COMPANION_PREAUTH_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: uppy
|
||||
key: preAuthSecret
|
||||
- name: COMPANION_DROPBOX_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: uppy
|
||||
key: dropboxKey
|
||||
- name: COMPANION_DROPBOX_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: uppy
|
||||
key: dropboxSecret
|
||||
- name: COMPANION_BOX_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: uppy
|
||||
key: boxKey
|
||||
- name: COMPANION_BOX_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: uppy
|
||||
key: boxSecret
|
||||
- name: COMPANION_GOOGLE_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: uppy
|
||||
key: googleKey
|
||||
- name: COMPANION_GOOGLE_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: uppy
|
||||
key: googleSecret
|
||||
- name: COMPANION_AWS_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: uppy
|
||||
key: awsKey
|
||||
- name: COMPANION_AWS_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: uppy
|
||||
key: awsSecret
|
||||
- name: COMPANION_AWS_BUCKET
|
||||
value: "{{ .Values.uppy.s3.bucket }}"
|
||||
- name: COMPANION_AWS_REGION
|
||||
value: "{{ .Values.uppy.s3.region }}"
|
||||
- name: COMPANION_AWS_PREFIX
|
||||
value: "{{ .Values.uppy.s3.prefix }}"
|
||||
|
||||
## COMPANION_OAUTH_DOMAIN is only necessary if using a different domain per each uppy pod.
|
||||
## We don't need this because we are load balancing the pods so they all use the same domain name.
|
||||
## @see https://github.com/transloadit/uppy/blob/f4dd3d534ff4378f3a2f73fe327358bcbde74059/docs/companion.md#server
|
||||
- name: COMPANION_OAUTH_DOMAIN
|
||||
value: ''
|
||||
- name: COMPANION_PATH
|
||||
value: ''
|
||||
- name: COMPANION_IMPLICIT_PATH
|
||||
value: ''
|
||||
- name: COMPANION_DOMAINS
|
||||
value: ''
|
||||
## https://uppy.io/docs/companion/#uploadurls-companion_upload_urls
|
||||
- name: COMPANION_UPLOAD_URLS
|
||||
value: "{{ .Values.uppy.uploadUrls }}"
|
||||
ports:
|
||||
- containerPort: 3020
|
||||
volumeMounts:
|
||||
- name: uppy-data
|
||||
mountPath: /mnt/uppy-data
|
||||
volumes:
|
||||
- name: uppy-data
|
||||
emptyDir: {}
|
||||
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: uppy
|
||||
namespace: futureporn
|
||||
annotations:
|
||||
external-dns.alpha.kubernetes.io/hostname: "{{ .Values.uppy.hostname }}"
|
||||
chisel-operator.io/exit-node-name: "uppy-exit-node"
|
||||
spec:
|
||||
type: LoadBalancer
|
||||
ports:
|
||||
- port: 3020
|
||||
targetPort: 3020
|
||||
protocol: TCP
|
||||
selector:
|
||||
app: uppy
|
||||
|
||||
|
||||
{{ if eq .Values.environment "development" }}
|
||||
---
|
||||
apiVersion: chisel-operator.io/v1
|
||||
kind: ExitNode
|
||||
metadata:
|
||||
name: uppy-exit-node
|
||||
namespace: futureporn
|
||||
spec:
|
||||
host: "{{ .Values.chisel.exitNodeIp }}"
|
||||
port: 9090
|
||||
auth: chisel
|
||||
{{ end }}
|
||||
|
||||
|
||||
|
||||
---
|
||||
apiVersion: traefik.io/v1alpha1
|
||||
kind: IngressRoute
|
||||
metadata:
|
||||
name: uppy-http
|
||||
namespace: futureporn
|
||||
spec:
|
||||
entryPoints:
|
||||
- web
|
||||
routes:
|
||||
- match: Host(`uppy.fp.sbtp.xyz`)
|
||||
kind: Rule
|
||||
middlewares:
|
||||
- name: redirect
|
||||
namespace: futureporn
|
||||
services:
|
||||
- name: uppy
|
||||
namespace: futureporn
|
||||
port: 3020
|
||||
|
||||
---
|
||||
apiVersion: traefik.io/v1alpha1
|
||||
kind: IngressRoute
|
||||
metadata:
|
||||
name: uppy-https
|
||||
namespace: futureporn
|
||||
annotations:
|
||||
cert-manager.io/cluster-issuer: "{{ .Values.certManager.issuer }}"
|
||||
spec:
|
||||
entryPoints:
|
||||
- websecure
|
||||
routes:
|
||||
- match: Host(`uppy.fp.sbtp.xyz`)
|
||||
kind: Rule
|
||||
services:
|
||||
- name: uppy
|
||||
namespace: futureporn
|
||||
port: 3020
|
||||
tls:
|
||||
secretName: uppy-tls
|
||||
|
||||
|
||||
# Welcome to Companion v4.15.1
|
||||
# ===================================
|
||||
|
||||
# Congratulations on setting up Companion! Thanks for joining our cause, you have taken
|
||||
# the first step towards the future of file uploading! We
|
||||
# hope you are as excited about this as we are!
|
||||
|
||||
# While you did an awesome job on getting Companion running, this is just the welcome
|
||||
# message, so let's talk about the places that really matter:
|
||||
|
||||
# - Be sure to add the following URLs as your Oauth redirect uris on their corresponding developer interfaces:
|
||||
# https://uppy.fp.sbtp.xyz/drive/redirect, https://uppy.fp.sbtp.xyz/googlephotos/redirect, https://uppy.fp.sbtp.xyz/dropbox/redirect, https://uppy.fp.sbtp.xyz/box/redirect, https://uppy.fp.sbtp.xyz/instagram/redirect, https://uppy.fp.sbtp.xyz/facebook/redirect, https://uppy.fp.sbtp.xyz/onedrive/redirect, https://uppy.fp.sbtp.xyz/zoom/redirect, https://uppy.fp.sbtp.xyz/unsplash/redirect
|
||||
# - The URL https://uppy.fp.sbtp.xyz/metrics is available for statistics to keep Companion running smoothly
|
||||
# - https://github.com/transloadit/uppy/issues - report your bugs here
|
||||
|
||||
# So quit lollygagging, start uploading and experience the future!
|
@ -34,16 +34,5 @@ chisel:
|
||||
exitNodeIp: "155.138.254.201"
|
||||
echo:
|
||||
hostname: echo.fp.sbtp.xyz
|
||||
uppy:
|
||||
hostname: uppy.fp.sbtp.xyz
|
||||
imageName: fp/uppy
|
||||
s3:
|
||||
endpoint: s3.us-west-000.backblazeb2.com
|
||||
bucket: futureporn-usc
|
||||
region: us-west-000
|
||||
prefix: s3
|
||||
clientOrigins: next.fp.sbtp.xyz
|
||||
domain: uppy.fp.sbtp.xyz
|
||||
uploadUrls: https://uppy.fp.sbtp.xyz/files
|
||||
certManager:
|
||||
issuer: letsencrypt-staging
|
@ -18,7 +18,6 @@ ENV NEXT_PUBLIC_SITE_URL ${NEXT_PUBLIC_SITE_URL}
|
||||
ENV NEXT_PUBLIC_STRAPI_URL ${NEXT_PUBLIC_STRAPI_URL}
|
||||
ENV NEXT_PUBLIC_UPPY_COMPANION_URL ${NEXT_PUBLIC_UPPY_COMPANION_URL}
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
ENV NODE_EXTRA_CA_CERTS "/app/letsencrypt-stg-root-x1.pem"
|
||||
COPY pnpm-lock.yaml ./
|
||||
RUN pnpm fetch
|
||||
COPY ./packages/next /app
|
||||
|
@ -1,42 +0,0 @@
|
||||
## This dockerfile creates multiple docker images.
|
||||
## Because we are using monorepo with pnpm workspaces, we have many npm packages in this single git repo.
|
||||
## Some of these packages in the monorepo depend on other packages in the monorepo.
|
||||
## In order to build these individual packages which inter-depend on eachother,
|
||||
## all of the dependent code must be present in the build.
|
||||
##
|
||||
## Below, COPY . /usr/src/app copies all the app code into the build context.
|
||||
## Because we use Tilt, only specific path directories are visible to docker. This helps with build performance.
|
||||
## When a new package becomes a dependency, we need to update our Tiltfile to include the package directory.
|
||||
## Tiltfile example of docker_build() args which include `scout` and `next` packages.
|
||||
## `only=['./pnpm-lock.yaml', './package.json', './packages/scout', './packages/next'],`
|
||||
##
|
||||
##
|
||||
|
||||
FROM node:20 AS base
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
RUN corepack enable
|
||||
|
||||
FROM base AS build
|
||||
ENV NODE_ENV=production
|
||||
COPY . /usr/src/app
|
||||
WORKDIR /usr/src/app
|
||||
RUN mkdir -p /prod/scout
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
|
||||
RUN pnpm deploy --filter=scout --prod /prod/scout
|
||||
# RUN pnpm deploy --filter=bot --prod /prod/bot
|
||||
|
||||
|
||||
FROM base AS scout-manager
|
||||
COPY --from=build /prod/scout /app
|
||||
WORKDIR /app
|
||||
ENTRYPOINT ["pnpm"]
|
||||
CMD ["run", "start:manager"]
|
||||
|
||||
FROM base AS scout-worker
|
||||
COPY --from=build /prod/scout /app
|
||||
COPY --from=build /usr/src/app/certs/letsencrypt-stg-root-x1.pem
|
||||
ENV NODE_EXTRA_CA_CERTS "/app/certs/letsencrypt-stg-root-x1.pem"
|
||||
WORKDIR /app
|
||||
ENTRYPOINT ["pnpm"]
|
||||
CMD ["run", "start:worker"]
|
14
d.realtime.dockerfile
Normal file
14
d.realtime.dockerfile
Normal file
@ -0,0 +1,14 @@
|
||||
FROM node:20-alpine
|
||||
WORKDIR /app
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
RUN corepack enable
|
||||
RUN apk update
|
||||
|
||||
ENV NODE_ENV=production
|
||||
COPY pnpm-lock.yaml ./
|
||||
RUN pnpm fetch
|
||||
COPY ./packages/realtime /app
|
||||
|
||||
ENTRYPOINT ["pnpm"]
|
||||
CMD ["run", "start"]
|
@ -5,16 +5,22 @@ RUN corepack enable
|
||||
|
||||
FROM base AS build
|
||||
ENV NODE_ENV=production
|
||||
COPY ./packages/bot /usr/src/app
|
||||
COPY . /usr/src/app
|
||||
WORKDIR /usr/src/app
|
||||
RUN mkdir -p /prod/scout
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
|
||||
RUN pnpm deploy --filter=bot --prod /prod/scout
|
||||
RUN pnpm deploy --filter=scout --prod /prod/scout
|
||||
|
||||
|
||||
|
||||
FROM base AS bot
|
||||
COPY --from=build /prod/bot /app
|
||||
FROM base AS manager
|
||||
COPY --from=build /prod/scout /app
|
||||
WORKDIR /app
|
||||
ENTRYPOINT ["pnpm"]
|
||||
CMD ["run", "start"]
|
||||
CMD ["run", "start:manager"]
|
||||
|
||||
FROM base AS worker
|
||||
COPY --from=build /prod/scout /app
|
||||
WORKDIR /app
|
||||
ENTRYPOINT ["pnpm"]
|
||||
CMD ["run", "start:worker"]
|
19
d.strapi-app.dockerfile
Normal file
19
d.strapi-app.dockerfile
Normal file
@ -0,0 +1,19 @@
|
||||
FROM node:18-alpine3.18
|
||||
# Installing libvips-dev for sharp Compatibility
|
||||
RUN apk update && apk add --no-cache build-base gcc autoconf automake zlib-dev libpng-dev nasm bash vips-dev git
|
||||
ARG NODE_ENV=development
|
||||
ENV NODE_ENV=${NODE_ENV}
|
||||
|
||||
WORKDIR /opt/
|
||||
COPY ./packages/strapi-app/package.json ./packages/strapi-app/yarn.lock ./
|
||||
RUN yarn global add node-gyp
|
||||
RUN yarn config set network-timeout 600000 -g && yarn install
|
||||
ENV PATH /opt/node_modules/.bin:$PATH
|
||||
|
||||
WORKDIR /opt/app
|
||||
COPY ./packages/strapi-app/ .
|
||||
RUN chown -R node:node /opt/app
|
||||
USER node
|
||||
RUN ["yarn", "build"]
|
||||
EXPOSE 1338
|
||||
CMD ["yarn", "develop", "--debug"]
|
@ -1,22 +0,0 @@
|
||||
FROM node:18
|
||||
# Installing libvips-dev for sharp Compatibility
|
||||
# RUN apk update && apk add --no-cache build-base gcc autoconf automake zlib-dev libpng-dev nasm bash vips-dev git
|
||||
RUN corepack enable
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
ARG NODE_ENV=development
|
||||
ENV NODE_ENV=${NODE_ENV}
|
||||
|
||||
WORKDIR /opt/
|
||||
COPY ./packages/strapi/package.json ./packages/strapi/pnpm-lock.yaml ./
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install -g node-gyp
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
|
||||
ENV PATH /opt/node_modules/.bin:$PATH
|
||||
|
||||
WORKDIR /opt/app
|
||||
COPY ./packages/strapi/. .
|
||||
RUN chown -R node:node /opt/app
|
||||
USER node
|
||||
RUN ["pnpm", "run", "build"]
|
||||
EXPOSE 1339
|
||||
CMD ["pnpm", "run", "develop"]
|
@ -7,4 +7,4 @@ COPY ./packages/strapi/package.json ./packages/strapi/pnpm-lock.yaml .
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
|
||||
COPY ./packages/strapi/ .
|
||||
RUN ["pnpm", "run", "build"]
|
||||
CMD ["pnpm", "run", "develop"]
|
||||
CMD ["pnpm", "run", "dev"]
|
||||
|
24
d.strapi.dockerfile.idk2
Normal file
24
d.strapi.dockerfile.idk2
Normal file
@ -0,0 +1,24 @@
|
||||
FROM node:20-alpine as base
|
||||
WORKDIR /app
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
RUN corepack enable
|
||||
RUN apk update && apk add --no-cache build-base gcc autoconf automake zlib-dev libpng-dev vips-dev libc6-compat git nasm bash gcompat
|
||||
|
||||
FROM base AS install
|
||||
COPY ./packages/strapi/pnpm-lock.yaml ./packages/strapi/package.json ./
|
||||
RUN pnpm install --prod --shamefully-hoist && pnpm run build
|
||||
COPY ./packages/strapi .
|
||||
RUN chown -R node:node /app
|
||||
USER node
|
||||
|
||||
|
||||
FROM install AS dev
|
||||
ENV NODE_ENV=development
|
||||
ENTRYPOINT ["pnpm"]
|
||||
CMD ["run", "dev"]
|
||||
|
||||
FROM install AS release
|
||||
ENV NODE_ENV=production
|
||||
ENTRYPOINT ["pnpm"]
|
||||
CMD ["run", "start"]
|
20
d.strapi.dockerfile.yarn
Normal file
20
d.strapi.dockerfile.yarn
Normal file
@ -0,0 +1,20 @@
|
||||
FROM node:18-alpine3.18
|
||||
RUN echo "do a rebuild, yuou fucking shit!"
|
||||
# Installing libvips-dev for sharp Compatibility
|
||||
RUN apk update && apk add --no-cache build-base gcc autoconf automake zlib-dev libpng-dev nasm bash vips-dev git
|
||||
ARG NODE_ENV=development
|
||||
ENV NODE_ENV=${NODE_ENV}
|
||||
|
||||
WORKDIR /opt/
|
||||
COPY package.json yarn.lock ./
|
||||
RUN yarn global add node-gyp
|
||||
RUN yarn config set network-timeout 600000 -g && yarn install
|
||||
ENV PATH /opt/node_modules/.bin:$PATH
|
||||
|
||||
WORKDIR /opt/app
|
||||
COPY . .
|
||||
RUN chown -R node:node /opt/app
|
||||
USER node
|
||||
RUN ["yarn", "build"]
|
||||
EXPOSE 1339
|
||||
CMD ["yarn", "start"]
|
@ -1,3 +0,0 @@
|
||||
# Futureporn flux
|
||||
|
||||
Gitops https://fluxcd.io/flux/get-started/
|
@ -1,5 +0,0 @@
|
||||
# Futureporn node packages
|
||||
|
||||
Each folder here is an individual node package, each of which can reference each other. One reason we do this is to share utility functions between packages.
|
||||
|
||||
See https://pnpm.io/workspaces
|
@ -1,3 +0,0 @@
|
||||
# bot
|
||||
|
||||
A.K.A. FutureButt, the discord bot that integrates into FP backend.
|
@ -1,3 +0,0 @@
|
||||
# infra
|
||||
|
||||
This module contains scripts that help with miscellaneous infrastructure tasks like cleaning up unused resources on Vultr Kubernetes Engine.
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"name": "infra",
|
||||
"name": "scripts",
|
||||
"type": "module",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
|
3
packages/link2cid/.dockerignore
Normal file
3
packages/link2cid/.dockerignore
Normal file
@ -0,0 +1,3 @@
|
||||
node_modules
|
||||
.env
|
||||
*~
|
3
packages/link2cid/.env.example
Normal file
3
packages/link2cid/.env.example
Normal file
@ -0,0 +1,3 @@
|
||||
PORT=3030
|
||||
IPFS_URL=http://localhost:5001
|
||||
API_KEY=changeme
|
144
packages/link2cid/.gitignore
vendored
Normal file
144
packages/link2cid/.gitignore
vendored
Normal file
@ -0,0 +1,144 @@
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/node
|
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=node
|
||||
|
||||
### Node ###
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# Snowpack dependency directory (https://snowpack.dev/)
|
||||
web_modules/
|
||||
|
||||
# TypeScript cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Optional stylelint cache
|
||||
.stylelintcache
|
||||
|
||||
# Microbundle cache
|
||||
.rpt2_cache/
|
||||
.rts2_cache_cjs/
|
||||
.rts2_cache_es/
|
||||
.rts2_cache_umd/
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variable files
|
||||
.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.local
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
.cache
|
||||
.parcel-cache
|
||||
|
||||
# Next.js build output
|
||||
.next
|
||||
out
|
||||
|
||||
# Nuxt.js build / generate output
|
||||
.nuxt
|
||||
dist
|
||||
|
||||
# Gatsby files
|
||||
.cache/
|
||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||
# public
|
||||
|
||||
# vuepress build output
|
||||
.vuepress/dist
|
||||
|
||||
# vuepress v2.x temp and cache directory
|
||||
.temp
|
||||
|
||||
# Docusaurus cache and generated files
|
||||
.docusaurus
|
||||
|
||||
# Serverless directories
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
.dynamodb/
|
||||
|
||||
# TernJS port file
|
||||
.tern-port
|
||||
|
||||
# Stores VSCode versions used for testing VSCode extensions
|
||||
.vscode-test
|
||||
|
||||
# yarn v2
|
||||
.yarn/cache
|
||||
.yarn/unplugged
|
||||
.yarn/build-state.yml
|
||||
.yarn/install-state.gz
|
||||
.pnp.*
|
||||
|
||||
### Node Patch ###
|
||||
# Serverless Webpack directories
|
||||
.webpack/
|
||||
|
||||
# Optional stylelint cache
|
||||
|
||||
# SvelteKit build / generate output
|
||||
.svelte-kit
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/node
|
3
packages/link2cid/.npmrc
Normal file
3
packages/link2cid/.npmrc
Normal file
@ -0,0 +1,3 @@
|
||||
engine-strict=true
|
||||
use-node-version=20.13.1
|
||||
node-version=20.13.1
|
20
packages/link2cid/Dockerfile
Normal file
20
packages/link2cid/Dockerfile
Normal file
@ -0,0 +1,20 @@
|
||||
# Reference-- https://pnpm.io/docker
|
||||
|
||||
FROM node:20-alpine AS base
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
RUN corepack enable
|
||||
WORKDIR /app
|
||||
COPY ./package.json /app
|
||||
EXPOSE 3939
|
||||
|
||||
FROM base AS dev
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install
|
||||
CMD ["pnpm", "run", "dev"]
|
||||
|
||||
FROM base
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --prod
|
||||
COPY ./index.js /app
|
||||
ENTRYPOINT ["pnpm"]
|
||||
CMD ["start"]
|
||||
|
90
packages/link2cid/README.md
Normal file
90
packages/link2cid/README.md
Normal file
@ -0,0 +1,90 @@
|
||||
# link2cid
|
||||
|
||||
## Motivation
|
||||
|
||||
I wish I could give [kubo](https://github.com/ipfs/kubo) or [IPFS cluster](https://ipfscluster.io/) a URI to a file and then they would download the file and add to ipfs, returning me a [CID](https://docs.ipfs.tech/concepts/glossary/#cid).
|
||||
|
||||
However, neither kubo nor IPFS cluster can do this.
|
||||
|
||||
link2cid solves this issue with a REST API for adding a file at `url` to IPFS.
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
Configure environment
|
||||
|
||||
Create a `.env` file. See `.env.example` for an example. Important environment variables are `API_KEY`, `PORT`, and `IPFS_URL`.
|
||||
|
||||
Install and run
|
||||
|
||||
```bash
|
||||
pnpm install
|
||||
pnpm start
|
||||
```
|
||||
|
||||
Make a GET REST request to `/add` with `url` as a query parameter. Expect a [SSE](https://wikipedia.org/wiki/Server-sent_events) response.
|
||||
|
||||
## dokku
|
||||
|
||||
dokku builder-dockerfile:set link2cid dockerfile-path link2cid.Dockerfile
|
||||
|
||||
|
||||
### Examples
|
||||
|
||||
#### [HTTPIE](https://httpie.io)
|
||||
|
||||
```bash
|
||||
http -A bearer -a $API_KEY --stream 'http://localhost:3939/add?url=https://upload.wikimedia.org/wikipedia/commons/7/70/Example.png' Accept:text/event-stream
|
||||
HTTP/1.1 200 OK
|
||||
Access-Control-Allow-Origin: *
|
||||
Cache-Control: no-cache
|
||||
Connection: keep-alive
|
||||
Content-Type: text/event-stream; charset=utf-8
|
||||
Date: Thu, 21 Dec 2023 11:20:24 GMT
|
||||
Transfer-Encoding: identity
|
||||
X-Powered-By: Express
|
||||
|
||||
:ok
|
||||
|
||||
event: dlProgress
|
||||
data: {
|
||||
"percent": 100
|
||||
}
|
||||
|
||||
event: addProgress
|
||||
data: {
|
||||
"percent": 100
|
||||
}
|
||||
|
||||
event: end
|
||||
data: {
|
||||
"cid": "bafkreidj3jo7efguloaixz6vgivljlmowagagjtqv4yanyqgty2hrvg6km"
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
#### Javascript
|
||||
|
||||
@todo this is incomplete/untested
|
||||
|
||||
```js
|
||||
await fetch('http://localhost:3939/add?url=https://upload.wikimedia.org/wikipedia/commons/7/70/Example.png', {
|
||||
headers: {
|
||||
'accept': 'text/event-stream',
|
||||
'authorization': `Bearer ${API_KEY}`
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
|
||||
## Dev notes
|
||||
|
||||
### Generate API_KEY
|
||||
|
||||
```js
|
||||
require('crypto').randomBytes(64).toString('hex')
|
||||
```
|
||||
|
||||
### `TypeError: data.split is not a function`
|
||||
|
||||
If you see this error, make sure data in SSE event payload is a string, not a number.
|
14
packages/link2cid/app.json
Normal file
14
packages/link2cid/app.json
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"healthchecks": {
|
||||
"web": [
|
||||
{
|
||||
"type": "startup",
|
||||
"name": "web check",
|
||||
"description": "Checking for expecting string at /health",
|
||||
"path": "/health",
|
||||
"content": "link2cid",
|
||||
"attempts": 3
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
9
packages/link2cid/index.js
Normal file
9
packages/link2cid/index.js
Normal file
@ -0,0 +1,9 @@
|
||||
require('dotenv').config()
|
||||
const app = require('./src/app.js')
|
||||
const port = process.env.PORT || 3000
|
||||
const version = require('./package.json').version
|
||||
|
||||
|
||||
app.listen(port, () => {
|
||||
console.log(`link2cid ${version} listening on port ${port}`)
|
||||
})
|
34
packages/link2cid/package.json
Normal file
34
packages/link2cid/package.json
Normal file
@ -0,0 +1,34 @@
|
||||
{
|
||||
"name": "@futureporn/link2cid",
|
||||
"version": "4.3.0",
|
||||
"description": "REST API for adding files via URL to IPFS",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "mocha \"./src/**/*.spec.js\"",
|
||||
"dev": "pnpm nodemon ./index.js",
|
||||
"start": "node index.js"
|
||||
},
|
||||
"keywords": [
|
||||
"IPFS",
|
||||
"CID",
|
||||
"HTTP",
|
||||
"REST"
|
||||
],
|
||||
"author": "@CJ_Clippy",
|
||||
"license": "Unlicense",
|
||||
"dependencies": {
|
||||
"@paralleldrive/cuid2": "^2.2.2",
|
||||
"@types/express": "^4.17.21",
|
||||
"better-queue": "^3.8.12",
|
||||
"body-parser": "^1.20.2",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^16.3.1",
|
||||
"express": "^4.18.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"chai": "^5.1.0",
|
||||
"mocha": "^10.4.0",
|
||||
"nodemon": "^3.0.3",
|
||||
"supertest": "^6.3.4"
|
||||
}
|
||||
}
|
1553
packages/link2cid/pnpm-lock.yaml
generated
Normal file
1553
packages/link2cid/pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
33
packages/link2cid/src/app.js
Normal file
33
packages/link2cid/src/app.js
Normal file
@ -0,0 +1,33 @@
|
||||
'use strict';
|
||||
|
||||
require('dotenv').config();
|
||||
const express = require('express');
|
||||
const bodyParser = require('body-parser');
|
||||
const cors = require('cors');
|
||||
const fs = require('fs');
|
||||
const fsp = require('fs/promises');
|
||||
const { openAsBlob } = require('node:fs');
|
||||
const { rm, stat } = require('fs/promises');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const { authenticate } = require('./middleware/auth.js')
|
||||
const { createTask, readTask, deleteTask } = require('./models/task.js')
|
||||
const readHeath = require('./models/health.js')
|
||||
const store = require('./middleware/store.js');
|
||||
const queue = require('./middleware/queue.js');
|
||||
|
||||
const app = express();
|
||||
app.use(store);
|
||||
app.use(queue);
|
||||
app.use(cors());
|
||||
app.use(bodyParser.json());
|
||||
app.use(bodyParser.urlencoded({ extended: false }));
|
||||
|
||||
|
||||
app.post('/task', authenticate, createTask)
|
||||
app.get('/task', readTask)
|
||||
app.delete('/task', authenticate, deleteTask)
|
||||
app.get('/health', readHeath)
|
||||
|
||||
|
||||
module.exports = app
|
109
packages/link2cid/src/app.spec.js
Normal file
109
packages/link2cid/src/app.spec.js
Normal file
@ -0,0 +1,109 @@
|
||||
|
||||
const app = require('./app.js')
|
||||
const request = require('supertest')
|
||||
const qs = require('querystring')
|
||||
require('dotenv').config()
|
||||
|
||||
describe('app', function () {
|
||||
it('should exist', function (done) {
|
||||
if (!app?.mountpath) throw new Error('app doesnt exist');
|
||||
done()
|
||||
})
|
||||
|
||||
|
||||
describe('/health', function () {
|
||||
it('should be publicly readable', function (done) {
|
||||
request(app)
|
||||
.get('/health')
|
||||
.set('Accept', 'text/html')
|
||||
.expect('Content-Type', /text/)
|
||||
.expect(/piss/)
|
||||
.expect(200, done)
|
||||
})
|
||||
})
|
||||
|
||||
describe('/task', function () {
|
||||
describe('POST', function () {
|
||||
it('should create a task', function (done) {
|
||||
request(app)
|
||||
.post('/task')
|
||||
.set('Authorization', `Bearer ${process.env.API_KEY}`)
|
||||
.set('Accept', 'application/json')
|
||||
.send({
|
||||
url: 'https://futureporn-b2.b-cdn.net/projekt-melody.jpg'
|
||||
})
|
||||
.expect('Content-Type', /json/)
|
||||
.expect((res) => {
|
||||
if (!res.body?.data) throw new Error('response body was missing data')
|
||||
if (!res.body?.data?.id) throw new Error('response body was missing id')
|
||||
return true
|
||||
})
|
||||
.expect(200, done)
|
||||
})
|
||||
})
|
||||
describe('GET', function () {
|
||||
it('should show all tasks specifications', async function () {
|
||||
await request(app).post('/task').set('Authorization', `Bearer ${process.env.API_KEY}`).send({ url: 'https://example.com/my.jpg' })
|
||||
await request(app).post('/task').set('Authorization', `Bearer ${process.env.API_KEY}`).send({ url: 'https://example.com/your.png' })
|
||||
return request(app)
|
||||
.get(`/task`)
|
||||
.set('Authorization', `Bearer ${process.env.API_KEY}`)
|
||||
.set('Accept', 'application/json')
|
||||
.expect('Content-Type', /json/)
|
||||
.expect((res) => {
|
||||
if (!res?.body?.data) throw new Error('there was no data in response')
|
||||
})
|
||||
.expect(200)
|
||||
})
|
||||
it('should accept task id as query param and return task specification', function (done) {
|
||||
const seed = request(app).post('/task').set('Authorization', `Bearer ${process.env.API_KEY}`).send({ url: 'https://example.com/z.jpg' })
|
||||
seed.then((res) => {
|
||||
|
||||
const query = qs.stringify({
|
||||
id: res.body.data.id
|
||||
})
|
||||
request(app)
|
||||
.get(`/task?${query}`)
|
||||
.set('Accept', 'application/json')
|
||||
.expect('Content-Type', /json/)
|
||||
.expect((res) => {
|
||||
if (res?.body?.error) throw new Error('there was an error in the response: '+res.body?.message)
|
||||
if (!res?.body?.data?.url) throw new Error('data.url was missing')
|
||||
if (!res?.body?.data?.createdAt) throw new Error('data.createdAt was missing')
|
||||
return true
|
||||
})
|
||||
.expect(200, done)
|
||||
})
|
||||
})
|
||||
it('should show all tasks by default', function (done) {
|
||||
request(app)
|
||||
.get('/task')
|
||||
.set('Accept', 'application/json')
|
||||
.expect('Content-Type', /json/)
|
||||
.expect((res) => {
|
||||
if (res.body?.error) throw new Error('there was an error in the response'+res.error)
|
||||
if (!res.body?.data) throw new Error('data was missing')
|
||||
return true
|
||||
})
|
||||
.expect(200, done)
|
||||
|
||||
})
|
||||
})
|
||||
describe('DELETE', function () {
|
||||
const query = qs.stringify({
|
||||
id: 'awejf9wiejf9we'
|
||||
})
|
||||
it('should delete a single task', function (done) {
|
||||
request(app)
|
||||
.delete(`/task?${query}`)
|
||||
.set('Authorization', `Bearer ${process.env.API_KEY}`)
|
||||
.set('Accept', 'application/json')
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200, done);
|
||||
});
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
|
||||
})
|
1
packages/link2cid/src/fixtures/hello-worlds.txt
Normal file
1
packages/link2cid/src/fixtures/hello-worlds.txt
Normal file
@ -0,0 +1 @@
|
||||
hello worlds
|
16
packages/link2cid/src/middleware/auth.js
Normal file
16
packages/link2cid/src/middleware/auth.js
Normal file
@ -0,0 +1,16 @@
|
||||
|
||||
module.exports.authenticate = function authenticate(req, res, next) {
|
||||
const bearerToken = req.headers?.authorization.split(' ').at(1);
|
||||
if (!bearerToken) {
|
||||
const msg = `authorization bearer token was missing from request headers`;
|
||||
console.error(msg);
|
||||
return res.status(401).json({ error: true, message: msg });
|
||||
}
|
||||
if (bearerToken !== process.env.API_KEY) {
|
||||
const msg = 'INCORRECT API_KEY (wrong token)';
|
||||
console.error(msg);
|
||||
return res.status(403).json({ error: true, message: msg });
|
||||
} else {
|
||||
next();
|
||||
}
|
||||
}
|
21
packages/link2cid/src/middleware/queue.js
Normal file
21
packages/link2cid/src/middleware/queue.js
Normal file
@ -0,0 +1,21 @@
|
||||
const Queue = require('better-queue');
|
||||
const taskProcess = require('../utils/taskProcess.js');
|
||||
|
||||
const options = {
|
||||
id: 'id',
|
||||
maxRetries: 3,
|
||||
concurrent: 1
|
||||
// @todo better-queue has batching and concurrency. might be useful to implement in the future
|
||||
// @see https://github.com/diamondio/better-queue?tab=readme-ov-file#queue-management
|
||||
}
|
||||
|
||||
let q = new Queue(taskProcess, options)
|
||||
|
||||
|
||||
// Middleware function to attach db to request
|
||||
const queueMiddleware = (req, res, next) => {
|
||||
req.queue = q;
|
||||
next();
|
||||
};
|
||||
|
||||
module.exports = queueMiddleware
|
10
packages/link2cid/src/middleware/store.js
Normal file
10
packages/link2cid/src/middleware/store.js
Normal file
@ -0,0 +1,10 @@
|
||||
const store = {
|
||||
tasks: {}
|
||||
}
|
||||
|
||||
const storeMiddleware = (req, res, next) => {
|
||||
req.store = store
|
||||
next();
|
||||
};
|
||||
|
||||
module.exports = storeMiddleware;
|
3
packages/link2cid/src/models/health.js
Normal file
3
packages/link2cid/src/models/health.js
Normal file
@ -0,0 +1,3 @@
|
||||
module.exports = function readHealth (req, res) {
|
||||
return res.send('**link2cid pisses on the floor**')
|
||||
}
|
59
packages/link2cid/src/models/task.js
Normal file
59
packages/link2cid/src/models/task.js
Normal file
@ -0,0 +1,59 @@
|
||||
const { createId } = require('@paralleldrive/cuid2');
|
||||
const { getTmpFilePath } = require('../utils/paths.js');
|
||||
const fsp = require('fs/promises');
|
||||
|
||||
|
||||
module.exports.createTask = function createTask (req, res) {
|
||||
|
||||
const url = req.body.url
|
||||
const task = {
|
||||
id: createId(),
|
||||
url: url,
|
||||
filePath: getTmpFilePath(url),
|
||||
fileSize: null,
|
||||
createdAt: new Date().toISOString(),
|
||||
cid: null,
|
||||
downloadProgress: null,
|
||||
addProgress: null
|
||||
}
|
||||
|
||||
if (!req?.body?.url) return res.status(400).json({ error: true, message: 'request body was missing a url' });
|
||||
|
||||
req.store.tasks[task.id] = task;
|
||||
req.queue.push(task, function (err, result) {
|
||||
if (err) throw err;
|
||||
console.log('the following is the result of the queued task being complete')
|
||||
console.log(result)
|
||||
})
|
||||
|
||||
return res.json({ error: false, data: task })
|
||||
|
||||
}
|
||||
module.exports.readTask = function readTask (req, res) {
|
||||
const id = req?.query?.id
|
||||
|
||||
// If we get an id in the query, show the one task.
|
||||
// Otherwise, we show all tasks.
|
||||
if (!!id) {
|
||||
const task = req.store.tasks[id]
|
||||
if (!task) return res.json({ error: true, message: 'there was no task in the store with that id' });
|
||||
return res.json({ error: false, data: task })
|
||||
} else {
|
||||
const tasks = req.store.tasks
|
||||
return res.json({ error: false, data: tasks })
|
||||
}
|
||||
|
||||
}
|
||||
module.exports.deleteTask = async function deleteTask (req, res) {
|
||||
const id = req?.query?.id;
|
||||
const task = req.store.tasks[id];
|
||||
|
||||
try {
|
||||
if (task?.filePath) await fsp.unlink(task.filePath);
|
||||
} catch (err) {}
|
||||
delete req.store.tasks[id];
|
||||
return res.json({ error: false, message: 'task deleted' });
|
||||
if (err) return res.json({ error: true, message: err });
|
||||
}
|
||||
|
||||
|
20
packages/link2cid/src/utils/download.js
Normal file
20
packages/link2cid/src/utils/download.js
Normal file
@ -0,0 +1,20 @@
|
||||
const fs = require("fs");
|
||||
const { Readable } = require('stream');
|
||||
const { finished } = require('stream/promises');
|
||||
const path = require("path");
|
||||
|
||||
|
||||
/**
|
||||
* Download a file at url to local disk filePath
|
||||
* @param {String} url
|
||||
* @param {String} filePath
|
||||
*
|
||||
* greetz https://stackoverflow.com/a/51302466/1004931
|
||||
*/
|
||||
const download = (async (url, filePath) => {
|
||||
const res = await fetch(url);
|
||||
const fileStream = fs.createWriteStream(filePath, { flags: 'wx' });
|
||||
await finished(Readable.fromWeb(res.body).pipe(fileStream));
|
||||
});
|
||||
|
||||
module.exports = download;
|
12
packages/link2cid/src/utils/download.spec.js
Normal file
12
packages/link2cid/src/utils/download.spec.js
Normal file
@ -0,0 +1,12 @@
|
||||
const download = require('./download.js');
|
||||
const fsp = require('fs/promises');
|
||||
|
||||
describe('download', function () {
|
||||
it('should download a file from url', async function () {
|
||||
const testFilePath = '/tmp/pmel.jpg'
|
||||
try {
|
||||
await fsp.unlink(testFilePath)
|
||||
} catch (e) {}
|
||||
await download('https://futureporn-b2.b-cdn.net/projekt-melody.jpg', testFilePath)
|
||||
})
|
||||
})
|
100
packages/link2cid/src/utils/ipfsAdd.js
Normal file
100
packages/link2cid/src/utils/ipfsAdd.js
Normal file
@ -0,0 +1,100 @@
|
||||
|
||||
require('dotenv').config();
|
||||
const { openAsBlob } = require('node:fs');
|
||||
const { rm, stat } = require('fs/promises');
|
||||
const path = require('path');
|
||||
|
||||
if (!process.env.IPFS_URL) throw new Error('IPFS_URL was missing in env');
|
||||
|
||||
|
||||
async function streamingPostFetch(
|
||||
url,
|
||||
formData,
|
||||
basename,
|
||||
filesize
|
||||
) {
|
||||
// console.log(`streamingPostFetch with url=${url}, formData=${formData.get('file')}, basename=${basename}, filesize=${filesize}`);
|
||||
|
||||
try {
|
||||
const res = await fetch(url, {
|
||||
method: 'POST',
|
||||
body: formData
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`HTTP error! Status-- ${res.status}`);
|
||||
}
|
||||
|
||||
const reader = res.body?.getReader();
|
||||
if (!reader) {
|
||||
throw new Error('Failed to get reader from response body');
|
||||
}
|
||||
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
|
||||
const chunk = new TextDecoder().decode(value);
|
||||
const lines = chunk.split('\n');
|
||||
for (const line of lines) {
|
||||
const trimmedLine = line.trim()
|
||||
if (!!trimmedLine) {
|
||||
// console.log(trimmedLine);
|
||||
const json = JSON.parse(trimmedLine);
|
||||
// console.log(`comparing json.Name=${json.Name} with basename=${basename}`);
|
||||
if (json.Name === basename && json.Hash && json.Size) {
|
||||
// this is the last chunk
|
||||
return json;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (done) {
|
||||
throw new Error('Response reader finished before receiving a CID which indicates a failiure.');
|
||||
}
|
||||
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('An error occurred:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async function getFormStuff(filePath) {
|
||||
const url = `${process.env.IPFS_URL}/api/v0/add?progress=false&cid-version=1&pin=true`;
|
||||
const blob = await openAsBlob(filePath);
|
||||
const basename = path.basename(filePath);
|
||||
const filesize = (await stat(filePath)).size;
|
||||
const formData = new FormData();
|
||||
return {
|
||||
url,
|
||||
blob,
|
||||
basename,
|
||||
filesize,
|
||||
formData
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @param {String} filePath
|
||||
* @returns {String} CID
|
||||
*/
|
||||
const ipfsAdd = async function (filePath) {
|
||||
|
||||
|
||||
const { url: kuboUrl, blob, basename, filesize, formData } = await getFormStuff(filePath);
|
||||
|
||||
|
||||
formData.append('file', blob, basename);
|
||||
const output = await streamingPostFetch(kuboUrl, formData, basename, filesize);
|
||||
if (!output?.Hash) throw new Error('No CID was received from remote IPFS node.');
|
||||
const cid = output.Hash;
|
||||
|
||||
|
||||
return cid
|
||||
}
|
||||
|
||||
module.exports = ipfsAdd;
|
||||
|
13
packages/link2cid/src/utils/ipfsAdd.spec.js
Normal file
13
packages/link2cid/src/utils/ipfsAdd.spec.js
Normal file
@ -0,0 +1,13 @@
|
||||
const ipfsAdd = require('./ipfsAdd.js')
|
||||
const path = require('path');
|
||||
|
||||
describe('ipfs', function () {
|
||||
describe('ipfsAdd', function () {
|
||||
it('should add a file from disk to ipfs and return a {string} CID', async function () {
|
||||
const expectedCid = 'bafkreibxh3ly47pr3emvrqtax6ieq2ybom4ywyil3yurxnlwirtcvb5pfi'
|
||||
const file = path.join(__dirname, '..', 'fixtures', 'hello-worlds.txt')
|
||||
const cid = await ipfsAdd(file, { cidVersion: 1 })
|
||||
if (cid !== expectedCid) throw new Error(`expected ${cid} to match ${expectedCid}`)
|
||||
})
|
||||
})
|
||||
})
|
12
packages/link2cid/src/utils/paths.js
Normal file
12
packages/link2cid/src/utils/paths.js
Normal file
@ -0,0 +1,12 @@
|
||||
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
|
||||
const getTmpFilePath = function (url) {
|
||||
const timestamp = new Date().valueOf()
|
||||
return path.join(os.tmpdir(), timestamp+'-'+path.basename(url))
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getTmpFilePath
|
||||
}
|
11
packages/link2cid/src/utils/paths.spec.js
Normal file
11
packages/link2cid/src/utils/paths.spec.js
Normal file
@ -0,0 +1,11 @@
|
||||
const paths = require('./paths.js')
|
||||
|
||||
describe('paths', function () {
|
||||
describe('getTmpFilePath', function () {
|
||||
it('should accept a url and receive a /tmp/<datestamp><basename> path on disk', function () {
|
||||
const url = 'https://example.com/my.jpg'
|
||||
const p = paths.getTmpFilePath(url)
|
||||
if (!/\/tmp\/\d+-my\.jpg/.test(p)) throw new Error(`expected ${p} to use format /tmp/<datestamp><basename>`)
|
||||
})
|
||||
})
|
||||
})
|
25
packages/link2cid/src/utils/taskProcess.js
Normal file
25
packages/link2cid/src/utils/taskProcess.js
Normal file
@ -0,0 +1,25 @@
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const downloadFile = require('./download');
|
||||
const ipfsAdd = require('./ipfsAdd');
|
||||
|
||||
|
||||
const taskProcess = async function (taskSpec, cb) {
|
||||
|
||||
console.log('downloading')
|
||||
this.progressTask(1, 3, "downloading")
|
||||
await downloadFile(taskSpec.url, taskSpec.filePath)
|
||||
|
||||
console.log('adding')
|
||||
this.progressTask(2, 3, "adding")
|
||||
|
||||
const cid = await ipfsAdd(taskSpec.filePath)
|
||||
|
||||
taskSpec.cid = cid
|
||||
|
||||
cb(null, taskSpec)
|
||||
|
||||
}
|
||||
|
||||
|
||||
module.exports = taskProcess;
|
@ -1,2 +1,27 @@
|
||||
# next
|
||||
# futureporn-next
|
||||
|
||||
## Dev notes
|
||||
|
||||
When adding a new module via pnpm, docker compose needs to be restarted or something. I'm not sure the exact steps just yet, but I think it's something like the following.
|
||||
|
||||
```
|
||||
pnpm add @uppy/react
|
||||
docker compose build next
|
||||
```
|
||||
|
||||
> fp-next | Module not found: Can't resolve '@uppy/react'
|
||||
|
||||
hmm... It looks like I'm missing something. Is the new package not getting into the container? Maybe it's something to do with the pnpm cache?
|
||||
|
||||
Must we build without cache?
|
||||
|
||||
docker compose build --no-cache next; docker compose up
|
||||
|
||||
YES. that solved the issue.
|
||||
|
||||
However, it's really slow to purge cache and download all packages once again. Is there a way we can speed this up?
|
||||
|
||||
* make it work
|
||||
* make it right
|
||||
* make it fast
|
||||
|
||||
|
@ -8,19 +8,47 @@ export default async function Page() {
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="content">
|
||||
<div className="content">
|
||||
|
||||
|
||||
<section className="hero">
|
||||
<div className="hero-body">
|
||||
<p className="title">About</p>
|
||||
<p>It's the worst feeling when a VOD disappears from the internet. It means you missed out, it's gone, and you may never experience what your peers got to take part in.</p>
|
||||
<p>Futureporn is created by fans, for fans. Missed a stream? We got you, bro.</p>
|
||||
<p>Together we can end 404s and create an everlasting archive of lewdtuber livestreams.</p>
|
||||
|
||||
<p>Futureporn is a fanmade public archive of NSFW R18 vtuber livestreams.</p>
|
||||
</div>
|
||||
|
||||
</section>
|
||||
|
||||
<div className="section">
|
||||
|
||||
<h1>Mission</h1>
|
||||
|
||||
<p>It's a lofty goal, but Futureporn aims to become <b>the Galaxy's best VTuber hentai site.</b></p>
|
||||
</div>
|
||||
|
||||
<div className="section">
|
||||
|
||||
<h2>How do we get there?</h2>
|
||||
|
||||
<h3>1. Solve the viewer's common problems</h3>
|
||||
|
||||
<p>Viewers want to watch livestream VODs on their own time. Futureporn collects vods from public streams, and caches them for later viewing.</p>
|
||||
|
||||
<p>Viewers want to find content that interests them. Futureporn enables vod tagging for easy browsing.</p>
|
||||
</div>
|
||||
|
||||
<div className="section">
|
||||
<h3>2. Solve the streamer's common problems</h3>
|
||||
|
||||
<p>Platforms like PH are not rising to the needs of VTubers. Instead of offering support and resources, they restrict and ban top talent.</p>
|
||||
|
||||
<p>Futureporn is different, embracing the medium and leveraging emerging technologies to amplify VTuber success.</p>
|
||||
</div>
|
||||
|
||||
<div className="section">
|
||||
<h3>3. Scale beyond Earth</h3>
|
||||
|
||||
<p>Piggybacking on <Link href="/faq#ipfs">IPFS</Link>' content-addressable capabilities and potential to end 404s, VODs preserved here can withstand the test of time, and eventually persist <Link href="/goals">off-world</Link>.</p>
|
||||
</div>
|
||||
|
||||
<div className="section">
|
||||
|
||||
|
@ -6,7 +6,7 @@ export default function NotFound() {
|
||||
<h2 className='title is-2'>404 Not Found</h2>
|
||||
<p>Could not find that stream archive.</p>
|
||||
|
||||
<Link href="/archive">Return to archive list</Link>
|
||||
<Link href="/s">Return to archive list</Link>
|
||||
</div>
|
||||
)
|
||||
}
|
@ -14,7 +14,7 @@ export default async function ArchiveProgress ({ vtuber }: IArchiveProgressProps
|
||||
const issueStreams = await getStreamCountForVtuber(vtuber.id, ['issue'])
|
||||
const totalStreams = await getStreamCountForVtuber(vtuber.id)
|
||||
const eligibleStreams = goodStreams+issueStreams
|
||||
const completedPercentage = Math.floor((eligibleStreams / totalStreams) * 100)
|
||||
const completedPercentage = (eligibleStreams / totalStreams) * 100
|
||||
return (
|
||||
<div>
|
||||
{/* <p>
|
||||
|
@ -7,9 +7,7 @@ import { faPatreon } from '@fortawesome/free-brands-svg-icons';
|
||||
import { useLocalStorageValue } from '@react-hookz/web';
|
||||
import { faRightFromBracket } from '@fortawesome/free-solid-svg-icons';
|
||||
import Skeleton from 'react-loading-skeleton';
|
||||
import { strapiUrl } from '@/lib/constants';
|
||||
// import NextAuth from 'next-auth'; // this is (pipedream) wishlist
|
||||
// import Providers from 'next-auth/providers';
|
||||
import { strapiUrl } from '@/lib/constants';
|
||||
|
||||
export interface IJWT {
|
||||
jwt: string;
|
||||
|
@ -63,7 +63,7 @@ export default function Navbar() {
|
||||
</div>
|
||||
|
||||
|
||||
<div className="navbar-item">
|
||||
{/* <div className="navbar-item">
|
||||
<Link className="button " href="/upload">
|
||||
<span className="mr-1">Upload</span>
|
||||
<FontAwesomeIcon
|
||||
@ -71,7 +71,7 @@ export default function Navbar() {
|
||||
className="fas fa-upload"
|
||||
></FontAwesomeIcon>
|
||||
</Link>
|
||||
</div>
|
||||
</div> */}
|
||||
|
||||
<div className="navbar-item fp-profile-button">
|
||||
{/* show the login button if user is anon */}
|
||||
|
@ -10,13 +10,11 @@ interface PatronsListProps {
|
||||
export default async function PatronsList({ displayStyle }: PatronsListProps) {
|
||||
const patrons = await getPatrons()
|
||||
|
||||
|
||||
if (!patrons) return (
|
||||
<SkeletonTheme baseColor="#000" highlightColor="#000">
|
||||
<Skeleton count={3} enableAnimation={false} />
|
||||
</SkeletonTheme>
|
||||
);
|
||||
|
||||
if (displayStyle === 'box') {
|
||||
return (
|
||||
<div className="columns is-multiline">
|
||||
|
@ -11,7 +11,6 @@ import { faTriangleExclamation, faCircleInfo, faThumbsUp, IconDefinition, faO, f
|
||||
import { Hemisphere, Moon } from "lunarphase-js";
|
||||
import { useEffect, useState } from "react";
|
||||
import { faXTwitter } from "@fortawesome/free-brands-svg-icons";
|
||||
import { notFound } from "next/navigation";
|
||||
|
||||
export interface IStreamProps {
|
||||
stream: IStream;
|
||||
@ -48,7 +47,6 @@ function determineStatus(stream: IStream): Status {
|
||||
export default function StreamPage({ stream }: IStreamProps) {
|
||||
console.log('StreamPage function has been invoked! stream as follows')
|
||||
console.log(stream)
|
||||
if (!stream) notFound()
|
||||
const displayName = stream.attributes.vtuber.data.attributes.displayName;
|
||||
const date = new Date(stream.attributes.date);
|
||||
const [hemisphere, setHemisphere] = useState(Hemisphere.NORTHERN);
|
||||
@ -171,7 +169,8 @@ export default function StreamPage({ stream }: IStreamProps) {
|
||||
<span className="title is-1"><FontAwesomeIcon icon={icon}></FontAwesomeIcon></span>
|
||||
<p className="mt-3">{desc1}</p>
|
||||
<p className="mt-5">{desc2}<br />
|
||||
<Link href={`/upload?cuid=${stream.attributes.cuid}`}>Upload it here.</Link></p>
|
||||
{/* <Link href={`/upload?cuid=${stream.attributes.cuid}`}>Upload it here.</Link></p> */}
|
||||
<Link style={{ cursor: 'not-allowed' }} href={`/upload?cuid=${stream.attributes.cuid}`}><i>Uploads coming soon.</i></Link></p>
|
||||
</div>
|
||||
</article>
|
||||
</div>
|
||||
|
@ -16,8 +16,7 @@ import {
|
||||
ColumnDef,
|
||||
flexRender,
|
||||
} from '@tanstack/react-table'
|
||||
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'
|
||||
import { faSpinner } from '@fortawesome/free-solid-svg-icons'
|
||||
|
||||
|
||||
import { fetchStreamData, IStream } from '@/lib/streams'
|
||||
|
||||
@ -94,7 +93,7 @@ export default function StreamsTable() {
|
||||
accessorFn: d => [
|
||||
(d.attributes.isChaturbateStream && 'CB'),
|
||||
(d.attributes.isFanslyStream && 'Fansly')
|
||||
].filter(Boolean).join(', ') || '???'
|
||||
].filter(Boolean).join(' ') || '???'
|
||||
},
|
||||
{
|
||||
header: 'Status',
|
||||
@ -130,20 +129,20 @@ export default function StreamsTable() {
|
||||
pageSize: 50,
|
||||
})
|
||||
|
||||
const { data, error, isPending } = useQuery({
|
||||
const dataQuery = useQuery({
|
||||
queryKey: ['streams', pagination.pageIndex, pagination.pageSize],
|
||||
queryFn: () => fetchStreamData(pagination),
|
||||
placeholderData: keepPreviousData, // don't have 0 rows flash while changing pages/loading next page,
|
||||
staleTime: 1000,
|
||||
staleTime: 1000
|
||||
}, queryClient)
|
||||
|
||||
const defaultData = React.useMemo(() => [], [])
|
||||
|
||||
const table = useReactTable({
|
||||
data: data?.rows ?? defaultData,
|
||||
data: dataQuery?.data?.rows ?? defaultData,
|
||||
columns,
|
||||
// pageCount: dataQuery.data?.pageCount ?? -1, //you can now pass in `rowCount` instead of pageCount and `pageCount` will be calculated internally (new in v8.13.0)
|
||||
rowCount: data?.rowCount, // new in v8.13.0 - alternatively, just pass in `pageCount` directly
|
||||
rowCount: dataQuery.data?.rowCount, // new in v8.13.0 - alternatively, just pass in `pageCount` directly
|
||||
state: {
|
||||
pagination,
|
||||
},
|
||||
@ -157,53 +156,49 @@ export default function StreamsTable() {
|
||||
return (
|
||||
<div className="p-2">
|
||||
<div className="h-2" />
|
||||
|
||||
{isPending && <FontAwesomeIcon className="mt-5 fa-spin-pulse" icon={faSpinner} ></FontAwesomeIcon> }
|
||||
{!isPending && <>
|
||||
<table className='table is-hoverable is-fullwidth'>
|
||||
<thead>
|
||||
{table.getHeaderGroups().map(headerGroup => (
|
||||
<tr key={headerGroup.id}>
|
||||
{headerGroup.headers.map(header => {
|
||||
<table className='table is-hoverable is-fullwidth'>
|
||||
<thead>
|
||||
{table.getHeaderGroups().map(headerGroup => (
|
||||
<tr key={headerGroup.id}>
|
||||
{headerGroup.headers.map(header => {
|
||||
return (
|
||||
<th key={header.id} colSpan={header.colSpan}>
|
||||
{header.isPlaceholder ? null : (
|
||||
<div>
|
||||
{flexRender(
|
||||
header.column.columnDef.header,
|
||||
header.getContext()
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</th>
|
||||
)
|
||||
})}
|
||||
</tr>
|
||||
))}
|
||||
</thead>
|
||||
<tbody>
|
||||
{table.getRowModel().rows.map(row => {
|
||||
return (
|
||||
<tr key={row.id}>
|
||||
{row.getVisibleCells().map(cell => {
|
||||
return (
|
||||
<th key={header.id} colSpan={header.colSpan}>
|
||||
{header.isPlaceholder ? null : (
|
||||
<div>
|
||||
{flexRender(
|
||||
header.column.columnDef.header,
|
||||
header.getContext()
|
||||
)}
|
||||
</div>
|
||||
<td
|
||||
className={getStatusClass(cell.getValue() as string)}
|
||||
key={cell.id}
|
||||
>
|
||||
{flexRender(
|
||||
cell.column.columnDef.cell,
|
||||
cell.getContext()
|
||||
)}
|
||||
</th>
|
||||
</td>
|
||||
)
|
||||
})}
|
||||
</tr>
|
||||
))}
|
||||
</thead>
|
||||
<tbody>
|
||||
{table.getRowModel().rows.map(row => {
|
||||
return (
|
||||
<tr key={row.id}>
|
||||
{row.getVisibleCells().map(cell => {
|
||||
return (
|
||||
<td
|
||||
className={getStatusClass(cell.getValue() as string)}
|
||||
key={cell.id}
|
||||
>
|
||||
{flexRender(
|
||||
cell.column.columnDef.cell,
|
||||
cell.getContext()
|
||||
)}
|
||||
</td>
|
||||
)
|
||||
})}
|
||||
</tr>
|
||||
)
|
||||
})}
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
)
|
||||
})}
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<div className="columns is-mobile is-vcentered">
|
||||
<div className='column is-half'>
|
||||
@ -278,7 +273,6 @@ export default function StreamsTable() {
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>}
|
||||
|
||||
|
||||
|
||||
|
@ -159,241 +159,235 @@ export default function UploadForm({ vtubers }: IUploadFormProps) {
|
||||
|
||||
|
||||
uppy.on('complete', async (result: any) => {
|
||||
console.log('uppy complete! ')
|
||||
console.log(result)
|
||||
for (const s of result.successful) {
|
||||
if (!s?.s3Multipart) {
|
||||
setError('root.serverError', {
|
||||
type: 'remote',
|
||||
message: 'file was missing s3Multipart'
|
||||
})
|
||||
// throw new Error('file was missing s3Multipart')
|
||||
}
|
||||
}
|
||||
let files = result.successful.map((f: any) => ({ key: f.s3Multipart.key, uploadId: f.s3Multipart.uploadId }));
|
||||
setValue('files', files);
|
||||
});
|
||||
|
||||
|
||||
return (<div className="notification is-secondary">
|
||||
<h1 className="title">VOD uploads</h1>
|
||||
<p>
|
||||
<i>coming soon!!</i>
|
||||
</p>
|
||||
<hr ></hr>
|
||||
<p>Track progress on the <a href="/goals">Goals Page</a></p>
|
||||
</div>)
|
||||
|
||||
|
||||
return (
|
||||
<>
|
||||
// return (
|
||||
// <>
|
||||
|
||||
<div className='section'>
|
||||
<h2 className='title is-2'>Upload VOD</h2>
|
||||
// <div className='section'>
|
||||
// <h2 className='title is-2'>Upload VOD</h2>
|
||||
|
||||
<p className="mb-5"><i>Together we can archive all lewdtuber livestreams!</i></p>
|
||||
// <p className="mb-5"><i>Together we can archive all lewdtuber livestreams!</i></p>
|
||||
|
||||
{(!authData?.accessToken)
|
||||
?
|
||||
<>
|
||||
<aside className='notification is-danger'><p>Please log in to upload VODs</p></aside>
|
||||
<LoginButton />
|
||||
</>
|
||||
: (
|
||||
// {(!authData?.accessToken)
|
||||
// ?
|
||||
// <>
|
||||
// <aside className='notification is-danger'><p>Please log in to upload VODs</p></aside>
|
||||
// <LoginButton />
|
||||
// </>
|
||||
// : (
|
||||
|
||||
|
||||
|
||||
<div className='columns is-multiline'>
|
||||
<form id="vod-details" onSubmit={handleSubmit(createUSC)}>
|
||||
// <div className='columns is-multiline'>
|
||||
// <form id="vod-details" onSubmit={handleSubmit(createUSC)}>
|
||||
|
||||
|
||||
{(!isSubmitSuccessful) && <div className='column is-full'>
|
||||
<section className="hero is-info mb-3">
|
||||
<div className="hero-body">
|
||||
<p className="title">
|
||||
Step 1
|
||||
</p>
|
||||
<p className="subtitle">
|
||||
Upload the file
|
||||
</p>
|
||||
</div>
|
||||
</section>
|
||||
<section className="section mb-5">
|
||||
<Dashboard
|
||||
uppy={uppy}
|
||||
theme='dark'
|
||||
proudlyDisplayPoweredByUppy={true}
|
||||
|
||||
showProgressDetails={true}
|
||||
/>
|
||||
// {(!isSubmitSuccessful) && <div className='column is-full'>
|
||||
// <section className="hero is-info mb-3">
|
||||
// <div className="hero-body">
|
||||
// <p className="title">
|
||||
// Step 1
|
||||
// </p>
|
||||
// <p className="subtitle">
|
||||
// Upload the file
|
||||
// </p>
|
||||
// </div>
|
||||
// </section>
|
||||
// <section className="section mb-5">
|
||||
// <Dashboard
|
||||
// uppy={uppy}
|
||||
// theme='dark'
|
||||
// proudlyDisplayPoweredByUppy={false}
|
||||
// />
|
||||
|
||||
{/* This form is hidden. Why? */}
|
||||
<input
|
||||
required
|
||||
hidden={false}
|
||||
style={{ display: 'block' }}
|
||||
className="input" type="text"
|
||||
{...register('files')}
|
||||
></input>
|
||||
// <input
|
||||
// required
|
||||
// hidden={true}
|
||||
// style={{ display: 'none' }}
|
||||
// className="input" type="text"
|
||||
// {...register('files')}
|
||||
// ></input>
|
||||
|
||||
|
||||
{errors.files && <p className="help is-danger">{errors.files.message?.toString()}</p>}
|
||||
// {errors.files && <p className="help is-danger">{errors.files.message?.toString()}</p>}
|
||||
|
||||
</section>
|
||||
</div>}
|
||||
// </section>
|
||||
// </div>}
|
||||
|
||||
{(!isSubmitSuccessful) && <div className='column is-full '>
|
||||
{/* {(!cuid) && <aside className='notification is-info'>Hint: Some of these fields are filled out automatically when uploading from a <Link href="/streams">stream</Link> page.</aside>} */}
|
||||
// {(!isSubmitSuccessful) && <div className='column is-full '>
|
||||
// {/* {(!cuid) && <aside className='notification is-info'>Hint: Some of these fields are filled out automatically when uploading from a <Link href="/streams">stream</Link> page.</aside>} */}
|
||||
|
||||
<section className="hero is-info mb-3">
|
||||
<div className="hero-body">
|
||||
<p className="title">
|
||||
Step 2
|
||||
</p>
|
||||
<p className="subtitle">
|
||||
Tell us about the VOD
|
||||
</p>
|
||||
</div>
|
||||
</section>
|
||||
// <section className="hero is-info mb-3">
|
||||
// <div className="hero-body">
|
||||
// <p className="title">
|
||||
// Step 2
|
||||
// </p>
|
||||
// <p className="subtitle">
|
||||
// Tell us about the VOD
|
||||
// </p>
|
||||
// </div>
|
||||
// </section>
|
||||
|
||||
<section className="section">
|
||||
// <section className="section">
|
||||
|
||||
|
||||
|
||||
{/* <input
|
||||
required
|
||||
// hidden={false}
|
||||
// style={{ display: 'none' }}
|
||||
className="input" type="text"
|
||||
{...register('streamCuid')}
|
||||
></input> */}
|
||||
// {/* <input
|
||||
// required
|
||||
// // hidden={false}
|
||||
// // style={{ display: 'none' }}
|
||||
// className="input" type="text"
|
||||
// {...register('streamCuid')}
|
||||
// ></input> */}
|
||||
|
||||
|
||||
<div className="field">
|
||||
<label className="label">VTuber</label>
|
||||
<div className="select">
|
||||
<select
|
||||
required
|
||||
// value={vtuber}
|
||||
// onChange={(evt) => setVtuber(parseInt(evt.target.value))}
|
||||
{...register('vtuber')}
|
||||
>
|
||||
{vtubers.map((vtuber: IVtuber) => (
|
||||
<option key={vtuber.id} value={vtuber.id}>{vtuber.attributes.displayName}</option>
|
||||
))}
|
||||
</select>
|
||||
</div>
|
||||
<p className="help is-info">Choose the VTuber this VOD belongs to. (More VTubers will be added when storage/bandwidth funding is secured.)</p>
|
||||
{errors.vtuber && <p className="help is-danger">vtuber error</p>}
|
||||
// <div className="field">
|
||||
// <label className="label">VTuber</label>
|
||||
// <div className="select">
|
||||
// <select
|
||||
// required
|
||||
// // value={vtuber}
|
||||
// // onChange={(evt) => setVtuber(parseInt(evt.target.value))}
|
||||
// {...register('vtuber')}
|
||||
// >
|
||||
// {vtubers.map((vtuber: IVtuber) => (
|
||||
// <option key={vtuber.id} value={vtuber.id}>{vtuber.attributes.displayName}</option>
|
||||
// ))}
|
||||
// </select>
|
||||
// </div>
|
||||
// <p className="help is-info">Choose the VTuber this VOD belongs to. (More VTubers will be added when storage/bandwidth funding is secured.)</p>
|
||||
// {errors.vtuber && <p className="help is-danger">vtuber error</p>}
|
||||
|
||||
</div>
|
||||
// </div>
|
||||
|
||||
<div className="field">
|
||||
<label className="label">Stream Date</label>
|
||||
<input
|
||||
required
|
||||
className="input" type="date"
|
||||
{...register('date')}
|
||||
// onChange={(evt) => setDate(evt.target.value)}
|
||||
></input>
|
||||
<p className="help is-info">The date when the VOD was originally streamed.</p>
|
||||
{errors.date && <p className="help is-danger">{errors.date.message?.toString()}</p>}
|
||||
// <div className="field">
|
||||
// <label className="label">Stream Date</label>
|
||||
// <input
|
||||
// required
|
||||
// className="input" type="date"
|
||||
// {...register('date')}
|
||||
// // onChange={(evt) => setDate(evt.target.value)}
|
||||
// ></input>
|
||||
// <p className="help is-info">The date when the VOD was originally streamed.</p>
|
||||
// {errors.date && <p className="help is-danger">{errors.date.message?.toString()}</p>}
|
||||
|
||||
</div>
|
||||
// </div>
|
||||
|
||||
<div className="field">
|
||||
<label className="label">Notes</label>
|
||||
<textarea
|
||||
className="textarea"
|
||||
placeholder="e.g. Missing first 10 minutes of stream"
|
||||
// onChange={(evt) => setNote(evt.target.value)}
|
||||
{...register('notes')}
|
||||
></textarea>
|
||||
<p className="help is-info">If there are any issues with the VOD, put a note here. If there are no VOD issues, leave this field blank.</p>
|
||||
</div>
|
||||
// <div className="field">
|
||||
// <label className="label">Notes</label>
|
||||
// <textarea
|
||||
// className="textarea"
|
||||
// placeholder="e.g. Missing first 10 minutes of stream"
|
||||
// // onChange={(evt) => setNote(evt.target.value)}
|
||||
// {...register('notes')}
|
||||
// ></textarea>
|
||||
// <p className="help is-info">If there are any issues with the VOD, put a note here. If there are no VOD issues, leave this field blank.</p>
|
||||
// </div>
|
||||
|
||||
<div className="field">
|
||||
<label className="label">Attribution</label>
|
||||
<label className="checkbox">
|
||||
<input
|
||||
type="checkbox"
|
||||
// onChange={(evt) => setAttribution(evt.target.checked)}
|
||||
{...register('attribution')}
|
||||
/>
|
||||
<span className={`ml-2 ${styles.noselect}`}>Credit {authData.user?.username} for the upload.</span>
|
||||
<p className="help is-info">Check this box if you want your username displayed on the website. Thank you for uploading!</p>
|
||||
</label>
|
||||
</div>
|
||||
// <div className="field">
|
||||
// <label className="label">Attribution</label>
|
||||
// <label className="checkbox">
|
||||
// <input
|
||||
// type="checkbox"
|
||||
// // onChange={(evt) => setAttribution(evt.target.checked)}
|
||||
// {...register('attribution')}
|
||||
// />
|
||||
// <span className={`ml-2 ${styles.noselect}`}>Credit {authData.user?.username} for the upload.</span>
|
||||
// <p className="help is-info">Check this box if you want your username displayed on the website. Thank you for uploading!</p>
|
||||
// </label>
|
||||
// </div>
|
||||
|
||||
</section>
|
||||
// </section>
|
||||
|
||||
</div>}
|
||||
// </div>}
|
||||
|
||||
|
||||
<div className="column is-full">
|
||||
<section className="hero is-info">
|
||||
<div className="hero-body">
|
||||
<p className="title">
|
||||
Step 3
|
||||
</p>
|
||||
<p className="subtitle">
|
||||
Send the form
|
||||
</p>
|
||||
</div>
|
||||
</section>
|
||||
<section className="section">
|
||||
// <div className="column is-full">
|
||||
// <section className="hero is-info">
|
||||
// <div className="hero-body">
|
||||
// <p className="title">
|
||||
// Step 3
|
||||
// </p>
|
||||
// <p className="subtitle">
|
||||
// Send the form
|
||||
// </p>
|
||||
// </div>
|
||||
// </section>
|
||||
// <section className="section">
|
||||
|
||||
|
||||
|
||||
|
||||
{errors.root?.serverError && (
|
||||
<div className="notification">
|
||||
<button className="delete" onClick={() => clearErrors()}></button>
|
||||
<ErrorMessage name="root" errors={errors} ></ErrorMessage>
|
||||
</div>
|
||||
)}
|
||||
// {errors.root?.serverError && (
|
||||
// <div className="notification">
|
||||
// <button className="delete" onClick={() => clearErrors()}></button>
|
||||
// <ErrorMessage name="root" errors={errors} ></ErrorMessage>
|
||||
// </div>
|
||||
// )}
|
||||
|
||||
|
||||
|
||||
{!isSubmitSuccessful && (
|
||||
<button className="button is-primary is-large mt-5">
|
||||
<span className="icon is-small">
|
||||
<FontAwesomeIcon icon={faPaperPlane}></FontAwesomeIcon>
|
||||
</span>
|
||||
<span>Send</span>
|
||||
</button>
|
||||
)}
|
||||
// {!isSubmitSuccessful && (
|
||||
// <button className="button is-primary is-large mt-5">
|
||||
// <span className="icon is-small">
|
||||
// <FontAwesomeIcon icon={faPaperPlane}></FontAwesomeIcon>
|
||||
// </span>
|
||||
// <span>Send</span>
|
||||
// </button>
|
||||
// )}
|
||||
|
||||
{isSubmitting && (
|
||||
<p>
|
||||
<FontAwesomeIcon className="mt-5 fa-spin-pulse" icon={faSpinner} ></FontAwesomeIcon>
|
||||
</p>
|
||||
)}
|
||||
{isSubmitSuccessful && (
|
||||
<>
|
||||
<aside className="notification mt-5 is-success">Thank you for uploading! </aside>
|
||||
<button onClick={() => {
|
||||
reset(); // reset form
|
||||
const files = uppy.getFiles()
|
||||
for (const file of files) {
|
||||
uppy.removeFile(file.id); // reset uppy
|
||||
}
|
||||
}} className="button is-primary">
|
||||
<span className="icon is-small">
|
||||
<FontAwesomeIcon icon={faEraser}></FontAwesomeIcon>
|
||||
</span>
|
||||
<span>Reset form</span>
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
// {isSubmitting && (
|
||||
// <p>
|
||||
// <FontAwesomeIcon className="mt-5 fa-spin-pulse" icon={faSpinner} ></FontAwesomeIcon>
|
||||
// </p>
|
||||
// )}
|
||||
// {isSubmitSuccessful && (
|
||||
// <>
|
||||
// <aside className="notification mt-5 is-success">Thank you for uploading! </aside>
|
||||
// <button onClick={() => {
|
||||
// reset(); // reset form
|
||||
// const files = uppy.getFiles()
|
||||
// for (const file of files) {
|
||||
// uppy.removeFile(file.id); // reset uppy
|
||||
// }
|
||||
// }} className="button is-primary">
|
||||
// <span className="icon is-small">
|
||||
// <FontAwesomeIcon icon={faEraser}></FontAwesomeIcon>
|
||||
// </span>
|
||||
// <span>Reset form</span>
|
||||
// </button>
|
||||
// </>
|
||||
// )}
|
||||
|
||||
|
||||
|
||||
</section>
|
||||
</div>
|
||||
// </section>
|
||||
// </div>
|
||||
|
||||
</form>
|
||||
</div>
|
||||
// </form>
|
||||
// </div>
|
||||
|
||||
|
||||
)
|
||||
}
|
||||
// )
|
||||
// }
|
||||
|
||||
</div>
|
||||
// </div>
|
||||
|
||||
</>
|
||||
)
|
||||
// </>
|
||||
// )
|
||||
|
||||
}
|
||||
|
@ -277,7 +277,7 @@ export async function getAllStreamsForVtuber(vtuberId: number, archiveStatuses =
|
||||
},
|
||||
});
|
||||
|
||||
// console.log(`strapiUrl=${strapiUrl}`)
|
||||
console.log(`strapiUrl=${strapiUrl}`)
|
||||
const response = await fetch(`${strapiUrl}/api/streams?${query}`, fetchStreamsOptions)
|
||||
|
||||
if (response.status !== 200) {
|
||||
@ -340,14 +340,14 @@ export async function fetchStreamData({ pageIndex, pageSize }: { pageIndex: numb
|
||||
})
|
||||
const response = await fetch(
|
||||
`${strapiUrl}/api/streams?${query}`
|
||||
);
|
||||
const json = await response.json();
|
||||
console.log(json)
|
||||
const d = {
|
||||
rows: json.data,
|
||||
pageCount: Math.ceil(json.meta.pagination.total / pageSize),
|
||||
rowCount: json.meta.pagination.total,
|
||||
}
|
||||
);
|
||||
const json = await response.json();
|
||||
console.log(json)
|
||||
const d = {
|
||||
rows: json.data,
|
||||
pageCount: Math.ceil(json.meta.pagination.total / pageSize),
|
||||
rowCount: json.meta.pagination.total,
|
||||
}
|
||||
// console.log(`fetchStreamData with pageIndex=${pageIndex}, pageSize=${pageSize}\n\n${JSON.stringify(d, null, 2)}`)
|
||||
return d;
|
||||
}
|
||||
@ -371,13 +371,13 @@ export async function getStreamCountForVtuber(vtuberId: number, archiveStatuses
|
||||
)
|
||||
const res = await fetch(`${strapiUrl}/api/streams?${query}`, fetchStreamsOptions)
|
||||
const data = await res.json()
|
||||
// console.log(`getStreamCountForVtuber with archiveStatuses=${archiveStatuses}`)
|
||||
// console.log(JSON.stringify(data, null, 2))
|
||||
console.log(`getStreamCountForVtuber with archiveStatuses=${archiveStatuses}`)
|
||||
console.log(JSON.stringify(data, null, 2))
|
||||
return data.meta.pagination.total
|
||||
}
|
||||
|
||||
export async function getStreamsForVtuber(vtuberId: number, page: number = 1, pageSize: number = 25, sortDesc = true): Promise<IStreamsResponse> {
|
||||
// console.log(`getStreamsForVtuber() with strapiUrl=${strapiUrl}`)
|
||||
console.log(`getStreamsForVtuber() with strapiUrl=${strapiUrl}`)
|
||||
const query = qs.stringify(
|
||||
{
|
||||
populate: {
|
||||
@ -405,7 +405,7 @@ export async function getStreamsForVtuber(vtuberId: number, page: number = 1, pa
|
||||
)
|
||||
const res = await fetch(`${strapiUrl}/api/streams?${query}`, fetchStreamsOptions)
|
||||
const data = await res.json()
|
||||
// console.log(data)
|
||||
console.log(data)
|
||||
return data
|
||||
}
|
||||
|
||||
|
@ -11,16 +11,8 @@ import { notFound } from "next/navigation";
|
||||
|
||||
export default async function Page() {
|
||||
const vods = await getVods(1, 9, true);
|
||||
console.log('vods as follows')
|
||||
console.log(JSON.stringify(vods, null, 2))
|
||||
|
||||
|
||||
const vtubers = await getVtubers();
|
||||
if (!vtubers) notFound();
|
||||
console.log(`vtubers as follows`)
|
||||
console.log(JSON.stringify(vtubers, null, 2))
|
||||
|
||||
|
||||
|
||||
// return (
|
||||
// <pre>
|
||||
|
@ -8,10 +8,6 @@ import { useAuth } from './components/auth';
|
||||
import { companionUrl } from '@/lib/constants';
|
||||
|
||||
|
||||
// Uppy is a challenging react integration. Following are some references
|
||||
// @see https://github.com/transloadit/uppy/issues/4727#issuecomment-1761118428
|
||||
|
||||
|
||||
export const UppyContext = createContext(new Uppy());
|
||||
|
||||
export default function UppyProvider({
|
||||
@ -22,17 +18,12 @@ export default function UppyProvider({
|
||||
const { authData } = useAuth();
|
||||
const [uppy] = useState(() => new Uppy(
|
||||
{
|
||||
autoProceed: false,
|
||||
debug: true
|
||||
autoProceed: true
|
||||
}
|
||||
)
|
||||
.use(RemoteSources, {
|
||||
companionUrl,
|
||||
sources: [
|
||||
'GoogleDrive',
|
||||
'Dropbox',
|
||||
'Url'
|
||||
]
|
||||
sources: ['GoogleDrive']
|
||||
})
|
||||
.use(AwsS3, {
|
||||
companionUrl,
|
||||
|
@ -16,47 +16,47 @@
|
||||
"@fortawesome/free-solid-svg-icons": "^6.5.2",
|
||||
"@fortawesome/react-fontawesome": "^0.2.2",
|
||||
"@hookform/error-message": "^2.0.1",
|
||||
"@hookform/resolvers": "^3.7.0",
|
||||
"@hookform/resolvers": "^3.6.0",
|
||||
"@mux/blurhash": "^0.1.2",
|
||||
"@mux/mux-player": "^2.7.0",
|
||||
"@mux/mux-player-react": "^2.7.0",
|
||||
"@paralleldrive/cuid2": "^2.2.2",
|
||||
"@react-hookz/web": "^24.0.4",
|
||||
"@tanstack/react-query": "^5.49.2",
|
||||
"@tanstack/react-table": "^8.19.2",
|
||||
"@types/lodash": "^4.17.6",
|
||||
"@tanstack/react-query": "^5.40.1",
|
||||
"@tanstack/react-table": "^8.17.3",
|
||||
"@types/lodash": "^4.17.4",
|
||||
"@types/qs": "^6.9.15",
|
||||
"@types/react": "^18.3.3",
|
||||
"@types/react-dom": "^18.3.0",
|
||||
"@uppy/aws-s3": "^3.6.2",
|
||||
"@uppy/aws-s3-multipart": "^3.12.0",
|
||||
"@uppy/core": "^3.13.0",
|
||||
"@uppy/dashboard": "^3.9.1",
|
||||
"@uppy/core": "^3.12.0",
|
||||
"@uppy/dashboard": "^3.8.3",
|
||||
"@uppy/drag-drop": "^3.1.0",
|
||||
"@uppy/file-input": "^3.1.2",
|
||||
"@uppy/progress-bar": "^3.1.1",
|
||||
"@uppy/react": "^3.4.0",
|
||||
"@uppy/remote-sources": "^1.3.0",
|
||||
"@uppy/react": "^3.3.1",
|
||||
"@uppy/remote-sources": "^1.2.0",
|
||||
"bulma": "^1.0.1",
|
||||
"date-fns": "^2.30.0",
|
||||
"date-fns-tz": "^2.0.1",
|
||||
"dayjs": "^1.11.11",
|
||||
"feed": "^4.2.2",
|
||||
"gray-matter": "^4.0.3",
|
||||
"hls.js": "^1.5.12",
|
||||
"hls.js": "^1.5.11",
|
||||
"lodash": "^4.17.21",
|
||||
"lunarphase-js": "^2.0.3",
|
||||
"multiformats": "^13.1.3",
|
||||
"multiformats": "^13.1.1",
|
||||
"next": "14.0.4",
|
||||
"next-goatcounter": "^1.0.5",
|
||||
"nextjs-toploader": "^1.6.12",
|
||||
"plyr": "^3.7.8",
|
||||
"prism-react-renderer": "^2.3.1",
|
||||
"qs": "^6.12.2",
|
||||
"qs": "^6.12.1",
|
||||
"react": "^18.3.1",
|
||||
"react-data-table-component": "^7.6.2",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-hook-form": "^7.52.1",
|
||||
"react-hook-form": "^7.51.5",
|
||||
"react-loading-skeleton": "^3.4.0",
|
||||
"react-toastify": "^9.1.3",
|
||||
"sharp": "^0.33.4",
|
||||
@ -65,7 +65,7 @@
|
||||
"yup": "^1.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.14.9",
|
||||
"@types/node": "^20.14.2",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-next": "14.0.4",
|
||||
"tsc": "^2.0.4",
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user