figure out strapi with pnpm

This commit is contained in:
CJ_Clippy 2024-05-27 14:20:58 -08:00
parent 11032ee83c
commit f6ce2138b9
133 changed files with 25471 additions and 14074 deletions

View File

@ -17,3 +17,13 @@ compose/
docker-compose.* docker-compose.*
.vscode .vscode
charts/**/charts charts/**/charts
packages/strapi/.tmp/
packages/strapi/.cache/
packages/strapi/.git/
packages/strapi/.env
packages/strapi/build/
packages/strapi/node_modules/
packages/strapi/data/
packages/strapi/backup

3
.gitignore vendored
View File

@ -1,3 +1,6 @@
**/.env
*~
charts/**/charts charts/**/charts
.envrc .envrc
compose/ compose/

View File

@ -4,6 +4,8 @@ pnpm required for workspaces.
Kubernetes for Development using Tiltfile Kubernetes for Development using Tiltfile
kubefwd and entr for DNS in dev cluster
dokku for Production, deployed with `git push`. dokku for Production, deployed with `git push`.
(dokku is slowly being replaced by Kubernetes) (dokku is slowly being replaced by Kubernetes)

View File

@ -30,6 +30,21 @@ tilt:
tilt up tilt up
secrets: secrets:
kubectl --namespace futureporn delete secret frp --ignore-not-found
kubectl --namespace futureporn create secret generic frp \
--from-literal=token=${FRP_TOKEN}
kubectl --namespace futureporn delete secret scout --ignore-not-found
kubectl --namespace futureporn create secret generic scout \
--from-literal=recentsToken=${SCOUT_RECENTS_TOKEN} \
--from-literal=strapiApiKey=${SCOUT_STRAPI_API_KEY} \
--from-literal=imapServer=${SCOUT_IMAP_SERVER} \
--from-literal=imapPort=${SCOUT_IMAP_PORT} \
--from-literal=imapUsername=${SCOUT_IMAP_USERNAME} \
--from-literal=imapPassword=${SCOUT_IMAP_PASSWORD} \
--from-literal=imapAccessToken=${SCOUT_IMAP_ACCESS_TOKEN} \
kubectl --namespace futureporn delete secret link2cid --ignore-not-found kubectl --namespace futureporn delete secret link2cid --ignore-not-found
kubectl --namespace futureporn create secret generic link2cid \ kubectl --namespace futureporn create secret generic link2cid \
--from-literal=apiKey=${LINK2CID_API_KEY} --from-literal=apiKey=${LINK2CID_API_KEY}
@ -38,7 +53,6 @@ secrets:
kubectl --namespace cert-manager create secret generic vultr \ kubectl --namespace cert-manager create secret generic vultr \
--from-literal=apiKey=${VULTR_API_KEY} --from-literal=apiKey=${VULTR_API_KEY}
kubectl --namespace futureporn delete secret vultr --ignore-not-found kubectl --namespace futureporn delete secret vultr --ignore-not-found
kubectl --namespace futureporn create secret generic vultr \ kubectl --namespace futureporn create secret generic vultr \
--from-literal=containerRegistryUsername=${VULTR_CONTAINER_REGISTRY_USERNAME} \ --from-literal=containerRegistryUsername=${VULTR_CONTAINER_REGISTRY_USERNAME} \
@ -58,7 +72,7 @@ secrets:
--from-literal=adminJwtSecret=${STRAPI_ADMIN_JWT_SECRET} \ --from-literal=adminJwtSecret=${STRAPI_ADMIN_JWT_SECRET} \
--from-literal=apiTokenSalt=${STRAPI_API_TOKEN_SALT} \ --from-literal=apiTokenSalt=${STRAPI_API_TOKEN_SALT} \
--from-literal=appKeys=${STRAPI_APP_KEYS} \ --from-literal=appKeys=${STRAPI_APP_KEYS} \
--from-literal=databaseUrl=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB} \ --from-literal=databaseUrl=postgres.futureporn.svc.cluster.local://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB} \
--from-literal=jwtSecret=${STRAPI_JWT_SECRET} \ --from-literal=jwtSecret=${STRAPI_JWT_SECRET} \
--from-literal=muxPlaybackRestrictionId=${MUX_PLAYBACK_RESTRICTION_ID} \ --from-literal=muxPlaybackRestrictionId=${MUX_PLAYBACK_RESTRICTION_ID} \
--from-literal=muxSigningKeyPrivateKey=${MUX_SIGNING_KEY_PRIVATE_KEY} \ --from-literal=muxSigningKeyPrivateKey=${MUX_SIGNING_KEY_PRIVATE_KEY} \
@ -73,15 +87,9 @@ secrets:
--from-literal=cdnBucketUscUrl=${CDN_BUCKET_USC_URL} \ --from-literal=cdnBucketUscUrl=${CDN_BUCKET_USC_URL} \
--from-literal=transferTokenSalt=${TRANSFER_TOKEN_SALT} --from-literal=transferTokenSalt=${TRANSFER_TOKEN_SALT}
kubectl --namespace futureporn delete secret realtime --ignore-not-found
kubectl --namespace futureporn create secret generic realtime \
--from-literal=postgresRealtimeConnectionString=${POSTGRES_REALTIME_CONNECTION_STRING}
# --from-literal=sessionSecret=$(SESSION_SECRET) \
# --from-literal=twitchClientId=$(TWITCH_CLIENT_ID) \
# --from-literal=twitchClientSecret=$(TWITCH_CLIENT_SECRET) \
# --from-literal=gumroadClientId=$(GUMROAD_CLIENT_ID) \
# --from-literal=gumroadClientSecret=$(GUMROAD_CLIENT_SECRET)
define _script define _script
cat <<'EOF' | ctlptl apply -f - cat <<'EOF' | ctlptl apply -f -
@ -100,6 +108,19 @@ minikube:
minikube addons enable metrics-server minikube addons enable metrics-server
kind:
bash -x ./scripts/kind-with-local-registry.sh
deps:
sudo pamac install make entr nvm minikube kubectl docker helm
curl -fsSL https://raw.githubusercontent.com/tilt-dev/tilt/master/scripts/install.sh | bash
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash
echo "go to https://github.com/txn2/kubefwd/releases/latest to get kubefwd"
echo "go to https://github.com/tilt-dev/ctlptl/releases/latest to get ctlptl"
sudo systemctl enable docker
sudo systemctl start docker
usermod -aG docker cj
newgrp docker
# A gitea act runner which runs locally # A gitea act runner which runs locally
# https://docs.gitea.com/next/usage/actions/overview # https://docs.gitea.com/next/usage/actions/overview

View File

@ -24,7 +24,7 @@ spec:
kind: HelmRepository kind: HelmRepository
name: bitnami name: bitnami
values: values:
fullnameOverride: windmill-postgresql-cool fullnameOverride: windmill-postgresql-uncool
postgresql: postgresql:
auth: auth:
postgresPassword: windmill postgresPassword: windmill

View File

@ -0,0 +1,25 @@
apiVersion: source.toolkit.fluxcd.io/v1beta2
kind: GitRepository
metadata:
name: chisel-operator
namespace: futureporn
spec:
interval: 5m
url: https://github.com/FyraLabs/chisel-operator
ref:
branch: master
---
apiVersion: kustomize.toolkit.fluxcd.io/v1beta2
kind: Kustomization
metadata:
name: chisel-operator
namespace: futureporn
spec:
interval: 10m
targetNamespace: futureporn
sourceRef:
kind: GitRepository
name: chisel-operator
path: "./kustomize"
prune: true

View File

@ -0,0 +1,5 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: futureporn
resources:
- chisel.yaml

View File

@ -0,0 +1,37 @@
apiVersion: v1
kind: Service
metadata:
name: capture
namespace: futureporn
spec:
selector:
app.kubernetes.io/name: capture
ports:
- name: capture
port: 80
targetPort: 5566
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: capture
namespace: futureporn
labels:
app: capture
spec:
replicas: 1
selector:
matchLabels:
app: capture
template:
metadata:
labels:
app: capture
spec:
containers:
- name: capture
image: "{{ .Values.capture.containerName }}"
ports:
- containerPort: 5566

View File

@ -0,0 +1,122 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: echo-deployment
namespace: futureporn
labels:
app.kubernetes.io/name: echo
spec:
replicas: 1
selector:
matchLabels:
app: echo-server
template:
metadata:
labels:
app: echo-server
spec:
containers:
- name: echo-server
resources:
limits:
cpu: 500m
memory: 512Mi
image: jmalloc/echo-server
ports:
- name: http
containerPort: 8080
---
apiVersion: v1
kind: Service
metadata:
name: echo
namespace: futureporn
labels:
app.kubernetes.io/name: echo
spec:
ports:
- name: http
port: 8080
targetPort: http
protocol: TCP
selector:
app: echo-server
# ---
# apiVersion: networking.k8s.io/v1
# kind: Ingress
# metadata:
# name: ngrok
# namespace: futureporn
# annotations:
# kubernetes.io/ingress.class: ngrok
# k8s.ngrok.com/namespace: futureporn
# k8s.ngrok.com/service: ngrok
# spec:
# ingressClassName: ngrok
# tls:
# - secretName: ngrok-tls
# hosts:
# - "{{ .Values.ngrok.hostname }}"
# rules:
# - host: "{{ .Values.ngrok.hostname }}"
# http:
# paths:
# - path: /echo
# pathType: Prefix
# backend:
# service:
# name: echo
# port:
# number: 8080
# - path: /game
# pathType: Prefix
# backend:
# service:
# name: game-2048
# port:
# number: 8080
# - path: /strapi
# pathType: Prefix
# backend:
# service:
# name: strapi
# port:
# number: 1337
# - path: /next
# pathType: Prefix
# backend:
# service:
# name: next
# port:
# number: 3000
# ---
# apiVersion: networking.k8s.io/v1
# kind: Ingress
# metadata:
# name: echo-ing
# namespace: futureporn
# annotations:
# kubernetes.io/ingress.class: nginx
# cert-manager.io/cluster-issuer: letsencrypt-staging
# spec:
# backend:
# serviceName: echo-service
# servicePort: 8080
# tls:
# - secretName: next-tls
# hosts:
# - echo.test
# rules:
# - host: echo.test
# http:
# paths:
# - path: /
# pathType: Prefix
# backend:
# service:
# name: echo-service
# port:
# number: 8080

View File

@ -1,3 +1,6 @@
{{ if eq .Values.managedBy "Helm" }}
---
apiVersion: v1 apiVersion: v1
kind: ServiceAccount kind: ServiceAccount
metadata: metadata:
@ -29,7 +32,8 @@ roleRef:
subjects: subjects:
- kind: ServiceAccount - kind: ServiceAccount
name: external-dns name: external-dns
namespace: default namespace: futureporn
--- ---
apiVersion: apps/v1 apiVersion: apps/v1
kind: Deployment kind: Deployment
@ -60,3 +64,5 @@ spec:
secretKeyRef: secretKeyRef:
name: vultr name: vultr
key: apiKey key: apiKey
{{ end }}

View File

@ -0,0 +1,34 @@
{{ if eq .Values.managedBy "tilt" }}
---
apiVersion: frp.zufardhiyaulhaq.com/v1alpha1
kind: Client
metadata:
name: client-01
namespace: futureporn
spec:
server:
host: 155.138.254.201
port: 7000
authentication:
token:
secret:
name: frp
key: token
---
apiVersion: frp.zufardhiyaulhaq.com/v1alpha1
kind: Upstream
metadata:
name: echo
namespace: futureporn
spec:
client: client-01
tcp:
host: echo.futureporn.svc.cluster.local
port: 8080
server:
port: 8080
proxyProtocol: v2
{{ end }}

View File

@ -0,0 +1,34 @@
apiVersion: v1
kind: Service
metadata:
name: game-2048
namespace: futureporn
spec:
ports:
- name: http
port: 8080
targetPort: 8080
selector:
app: game-2048
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: game-2048
namespace: futureporn
spec:
replicas: 1
selector:
matchLabels:
app: game-2048
template:
metadata:
labels:
app: game-2048
spec:
containers:
- name: backend
image: mendhak/http-https-echo
ports:
- name: http
containerPort: 8080

View File

@ -0,0 +1,108 @@
---
kind: ClusterRole
apiVersion: rbac.authorization.k8s.io/v1beta1
metadata:
name: traefik-ingress-controller
rules:
- apiGroups:
- ""
resources:
- services
- endpoints
- secrets
verbs:
- get
- list
- watch
- apiGroups:
- extensions
resources:
- ingresses
verbs:
- get
- list
- watch
- apiGroups:
- extensions
resources:
- ingresses/status
verbs:
- update
---
kind: ClusterRoleBinding
apiVersion: rbac.authorization.k8s.io/v1beta1
metadata:
name: traefik-ingress-controller
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: traefik-ingress-controller
subjects:
- kind: ServiceAccount
name: traefik-ingress-controller
namespace: kube-system
---
apiVersion: v1
kind: ServiceAccount
metadata:
name: traefik-ingress-controller
namespace: kube-system
---
kind: DaemonSet
apiVersion: apps/v1
metadata:
name: traefik-ingress-controller
namespace: kube-system
labels:
k8s-app: traefik-ingress-lb
spec:
selector:
matchLabels:
k8s-app: traefik-ingress-lb
name: traefik-ingress-lb
template:
metadata:
labels:
k8s-app: traefik-ingress-lb
name: traefik-ingress-lb
spec:
serviceAccountName: traefik-ingress-controller
terminationGracePeriodSeconds: 60
containers:
- image: traefik:v1.7
name: traefik-ingress-lb
ports:
- name: http
containerPort: 80
hostPort: 80
- name: admin
containerPort: 8080
hostPort: 8080
securityContext:
capabilities:
drop:
- ALL
add:
- NET_BIND_SERVICE
args:
- --api
- --kubernetes
- --logLevel=INFO
---
kind: Service
apiVersion: v1
metadata:
name: traefik-ingress-service
namespace: kube-system
spec:
selector:
k8s-app: traefik-ingress-lb
ports:
- protocol: TCP
port: 80
name: web
- protocol: TCP
port: 8080
name: admin

View File

@ -0,0 +1,70 @@
{{ if eq .Values.managedBy "Helm" }}
apiVersion: v1
kind: Pod
metadata:
name: ipfs-pod
namespace: default
labels:
app.kubernetes.io/name: ipfs
spec:
containers:
- name: ipfs
image: ipfs/kubo
ports:
- containerPort: 5001
- containerPort: 8080
volumeMounts:
- name: ipfs-pvc
mountPath: /data/ipfs
restartPolicy: OnFailure
volumes:
- name: ipfs-pvc
persistentVolumeClaim:
claimName: ipfs-pvc
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: ipfs
namespace: default
annotations:
meta.helm.sh/release-name: fp
meta.helm.sh/release-namespace: default
labels:
app.kubernetes.io/managed-by: {{ .Values.managedBy }}
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 40Gi
storageClassName: {{ .Values.storageClassName }}
apiVersion: v1
kind: Service
metadata:
name: ipfs-service
namespace: default
annotations:
meta.helm.sh/release-name: fp
meta.helm.sh/release-namespace: default
labels:
app.kubernetes.io/managed-by: {{ .Values.managedBy }}
spec:
selector:
app.kubernetes.io/name: ipfs
ports:
- name: gateway
protocol: TCP
port: 8080
targetPort: 8080
- name: api
protocol: TCP
port: 5001
targetPort: 5001
{{ end }}

View File

@ -2,7 +2,7 @@ apiVersion: v1
kind: Service kind: Service
metadata: metadata:
name: link2cid name: link2cid
namespace: default namespace: futureporn
spec: spec:
selector: selector:
app: link2cid app: link2cid
@ -22,7 +22,7 @@ apiVersion: apps/v1
kind: Deployment kind: Deployment
metadata: metadata:
name: link2cid name: link2cid
namespace: default namespace: futureporn
spec: spec:
selector: selector:
matchLabels: matchLabels:
@ -54,10 +54,10 @@ apiVersion: v1
kind: PersistentVolumeClaim kind: PersistentVolumeClaim
metadata: metadata:
name: link2cid name: link2cid
namespace: default namespace: futureporn
annotations: annotations:
meta.helm.sh/release-name: fp meta.helm.sh/release-name: fp
meta.helm.sh/release-namespace: default meta.helm.sh/release-namespace: futureporn
labels: labels:
app.kubernetes.io/managed-by: {{ .Values.managedBy }} app.kubernetes.io/managed-by: {{ .Values.managedBy }}
spec: spec:
@ -75,7 +75,7 @@ apiVersion: networking.k8s.io/v1
kind: Ingress kind: Ingress
metadata: metadata:
name: link2cid-ingress name: link2cid-ingress
namespace: default namespace: futureporn
annotations: annotations:
kubernetes.io/ingress.class: "nginx" kubernetes.io/ingress.class: "nginx"
nginx.ingress.kubernetes.io/ssl-redirect: "true" nginx.ingress.kubernetes.io/ssl-redirect: "true"

View File

@ -0,0 +1,47 @@
{{ if eq .Values.managedBy "tilt" }}
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: ngrok
namespace: futureporn
annotations:
kubernetes.io/ingress.class: ngrok
k8s.ngrok.com/namespace: futureporn
k8s.ngrok.com/service: ngrok
spec:
ingressClassName: ngrok
rules:
- host: "{{ .Values.ngrok.hostname }}"
http:
paths:
- path: /echo
pathType: Prefix
backend:
service:
name: echo
port:
number: 8080
- path: /next
pathType: Prefix
backend:
service:
name: next
port:
number: 3000
- path: /strapi
pathType: Prefix
backend:
service:
name: strapi
port:
number: 1337
# - path: /snake
# pathType: Prefix
# backend:
# service:
# name: snake
# port:
# number: 8080
{{ end }}

View File

@ -0,0 +1,65 @@
apiVersion: v1
kind: Service
metadata:
name: realtime
namespace: futureporn
spec:
selector:
app.kubernetes.io/name: realtime
ports:
- name: realtime
port: 80
targetPort: 5535
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: realtime
namespace: futureporn
labels:
app: realtime
spec:
replicas: 1
selector:
matchLabels:
app: realtime
template:
metadata:
labels:
app: realtime
spec:
containers:
- name: realtime
image: "{{ .Values.realtime.containerName }}"
ports:
- containerPort: 5535
{{ if eq .Values.managedBy "Helm" }}
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: realtime
namespace: futureporn
annotations:
kubernetes.io/ingress.class: nginx
cert-manager.io/cluster-issuer: letsencrypt-staging
spec:
tls:
- secretName: realtime-tls
hosts:
- realtime.futureporn.net
rules:
- host: realtime.futureporn.net
http:
paths:
- path: /
pathType: Prefix
backend:
service:
name: realtime
port:
number: 5535
{{ end }}

View File

@ -0,0 +1,35 @@
apiVersion: v1
kind: Service
metadata:
name: snake
namespace: futureporn
spec:
ports:
- name: http
port: 8080
targetPort: 8080
selector:
app: snake
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: snake
namespace: futureporn
spec:
replicas: 2
selector:
matchLabels:
app: snake
template:
metadata:
labels:
app: snake
spec:
containers:
- name: snake
image: thoschu/de.schulte360.web.snake
ports:
- name: http
containerPort: 8080

1
charts/fp/templates/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
strapi-app.yaml

View File

@ -1,22 +0,0 @@
apiVersion: v1
kind: Pod
metadata:
name: ipfs-pod
namespace: default
labels:
app.kubernetes.io/name: ipfs
spec:
containers:
- name: ipfs
image: ipfs/kubo
ports:
- containerPort: 5001
- containerPort: 8080
volumeMounts:
- name: ipfs-pvc
mountPath: /data/ipfs
restartPolicy: OnFailure
volumes:
- name: ipfs-pvc
persistentVolumeClaim:
claimName: ipfs-pvc

View File

@ -1,18 +0,0 @@
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: ipfs-pvc
namespace: default
annotations:
meta.helm.sh/release-name: fp
meta.helm.sh/release-namespace: default
labels:
app.kubernetes.io/managed-by: {{ .Values.managedBy }}
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 40Gi
storageClassName: {{ .Values.storageClassName }}

View File

@ -1,23 +0,0 @@
apiVersion: v1
kind: Service
metadata:
name: ipfs-service
namespace: default
annotations:
meta.helm.sh/release-name: fp
meta.helm.sh/release-namespace: default
labels:
app.kubernetes.io/managed-by: {{ .Values.managedBy }}
spec:
selector:
app.kubernetes.io/name: ipfs
ports:
- name: gateway
protocol: TCP
port: 8080
targetPort: 8080
- name: api
protocol: TCP
port: 5001
targetPort: 5001

View File

@ -1,22 +0,0 @@
---
apiVersion: cert-manager.io/v1
kind: ClusterIssuer
metadata:
name: letsencrypt-prod
spec:
acme:
# server: https://acme-staging-v02.api.letsencrypt.org/directory
server: https://acme-v02.api.letsencrypt.org/directory
email: {{ .Values.adminEmail }}
privateKeySecretRef:
name: letsencrypt-prod
solvers:
- dns01:
webhook:
groupName: acme.vultr.com
solverName: vultr
config:
apiKeySecretRef:
key: apiKey
name: vultr

View File

@ -1,3 +1,4 @@
{{ if eq .Values.managedBy "Helm" }}
apiVersion: cert-manager.io/v1 apiVersion: cert-manager.io/v1
kind: ClusterIssuer kind: ClusterIssuer
metadata: metadata:
@ -21,3 +22,28 @@ spec:
apiKeySecretRef: apiKeySecretRef:
key: apiKey key: apiKey
name: vultr-credentials name: vultr-credentials
---
apiVersion: cert-manager.io/v1
kind: ClusterIssuer
metadata:
name: letsencrypt-prod
spec:
acme:
# server: https://acme-staging-v02.api.letsencrypt.org/directory
server: https://acme-v02.api.letsencrypt.org/directory
email: {{ .Values.adminEmail }}
privateKeySecretRef:
name: letsencrypt-prod
solvers:
- dns01:
webhook:
groupName: acme.vultr.com
solverName: vultr
config:
apiKeySecretRef:
key: apiKey
name: vultr
{{ end }}

View File

@ -0,0 +1,65 @@
apiVersion: v1
kind: Service
metadata:
name: next
namespace: futureporn
spec:
selector:
app.kubernetes.io/name: next
ports:
- name: web
port: 3000
targetPort: 3000
---
apiVersion: v1
kind: Pod
metadata:
name: next
namespace: futureporn
labels:
app.kubernetes.io/name: next
spec:
containers:
- name: next
image: "{{ .Values.next.containerName }}"
env:
- name: HOSTNAME
value: 0.0.0.0
ports:
- name: web
containerPort: 3000
resources: {}
restartPolicy: OnFailure
{{ if eq .Values.managedBy "Helm" }}
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: next
namespace: futureporn
annotations:
kubernetes.io/ingress.class: nginx
cert-manager.io/cluster-issuer: "{{ .Values.next.certIssuer }}"
spec:
backend:
serviceName: next
servicePort: 3000
tls:
- secretName: next-tls
hosts:
- "{{ .Values.next.hostname }}"
rules:
- host: "{{ .Values.next.hostname }}"
http:
paths:
- path: /
pathType: Prefix
backend:
service:
name: next
port:
number: 3000
{{ end }}

View File

@ -0,0 +1,53 @@
apiVersion: v1
kind: Service
metadata:
name: pgadmin
namespace: futureporn
spec:
selector:
app.kubernetes.io/name: pgadmin
ports:
- name: web
protocol: TCP
port: 5050
targetPort: 5050
status:
loadBalancer: {}
---
apiVersion: v1
kind: Pod
metadata:
name: pgadmin
namespace: futureporn
labels:
app.kubernetes.io/name: pgadmin
spec:
containers:
- name: pgadmin
image: dpage/pgadmin4
ports:
- containerPort: 5050
resources:
limits:
cpu: 500m
memory: 1Gi
env:
- name: PGADMIN_LISTEN_PORT
value: '5050'
- name: POSTGRES_PASSWORD
valueFrom:
secretKeyRef:
name: postgres
key: password
- name: PGADMIN_DEFAULT_PASSWORD
valueFrom:
secretKeyRef:
name: pgadmin
key: defaultPassword
- name: PGADMIN_DEFAULT_EMAIL
valueFrom:
secretKeyRef:
name: pgadmin
key: defaultEmail
restartPolicy: OnFailure

View File

@ -0,0 +1,101 @@
# In development, we need a piko agent
{{ if eq .Values.managedBy "tilt" }}
{{ end }}
# In production, we need a piko server
{{ if eq .Values.managedBy "Helm" }}
---
apiVersion: v1
kind: Service
metadata:
name: piko
namespace: futureporn
labels:
app: piko
spec:
ports:
- port: 8000
name: proxy
- port: 8001
name: upstream
- port: 8002
name: admin
- port: 8003
name: gossip
clusterIP: None
selector:
app: piko
---
apiVersion: v1
kind: ConfigMap
metadata:
name: server-config
data:
server.yaml: |
cluster:
node_id_prefix: ${POD_NAME}-
join:
- piko
---
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: piko
spec:
selector:
matchLabels:
app: piko
serviceName: "piko"
replicas: 3
template:
metadata:
labels:
app: piko
spec:
terminationGracePeriodSeconds: 60
containers:
- name: piko
image: my-repo/piko:latest
ports:
- containerPort: 8000
name: proxy
- containerPort: 8001
name: upstream
- containerPort: 8002
name: admin
- containerPort: 8003
name: gossip
args:
- server
- --config.path
- /config/server.yaml
- --config.expand-env
resources:
limits:
cpu: 250m
ephemeral-storage: 1Gi
memory: 1Gi
requests:
cpu: 250m
ephemeral-storage: 1Gi
memory: 1Gi
env:
- name: POD_NAME
valueFrom:
fieldRef:
fieldPath: metadata.name
volumeMounts:
- name: config
mountPath: "/config"
readOnly: true
volumes:
- name: config
configMap:
name: server-config
items:
- key: "server.yaml"
path: "server.yaml"
{{ end }}

View File

@ -0,0 +1,70 @@
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
namespace: futureporn
name: postgres
annotations:
tilt.dev/down-policy: keep
spec:
accessModes:
- ReadWriteOnce
persistentVolumeReclaimPolicy: Retain
resources:
requests:
storage: 40Gi
storageClassName: {{ .Values.storageClassName }}
---
apiVersion: v1
kind: Service
metadata:
namespace: futureporn
name: postgres
annotations:
tilt.dev/down-policy: keep
spec:
selector:
app.kubernetes.io/name: postgres
ports:
- name: db
protocol: TCP
port: 5432
targetPort: 5432
status:
loadBalancer: {}
---
apiVersion: v1
kind: Pod
metadata:
namespace: futureporn
name: postgres
labels:
app.kubernetes.io/name: postgres
annotations:
tilt.dev/down-policy: keep
spec:
containers:
- name: postgres
image: postgres:16.0
env:
- name: POSTGRES_PASSWORD
valueFrom:
secretKeyRef:
name: postgres
key: password
ports:
- containerPort: 5432
resources:
limits:
cpu: 500m
memory: 1Gi
volumeMounts:
- name: postgres
mountPath: /data/postgres
restartPolicy: OnFailure
volumes:
- name: postgres
persistentVolumeClaim:
claimName: postgres

View File

@ -1,3 +1,5 @@
{{ if eq .Values.managedBy "Helm" }}
apiVersion: rbac.authorization.k8s.io/v1 apiVersion: rbac.authorization.k8s.io/v1
kind: Role kind: Role
metadata: metadata:
@ -23,3 +25,4 @@ roleRef:
name: cert-manager-webhook-vultr-secret-reader name: cert-manager-webhook-vultr-secret-reader
apiGroup: rbac.authorization.k8s.io apiGroup: rbac.authorization.k8s.io
{{ end }}

View File

@ -0,0 +1,108 @@
apiVersion: v1
kind: Service
metadata:
name: scout
namespace: futureporn
spec:
selector:
app.kubernetes.io/name: scout
ports:
- name: web
port: 3000
targetPort: 3000
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: scout
namespace: futureporn
labels:
app: scout
spec:
replicas: 1
selector:
matchLabels:
app: scout
template:
metadata:
labels:
app: scout
spec:
containers:
- name: scout
image: "{{ .Values.scout.containerName }}"
ports:
- containerPort: 5000
env:
- name: POSTGRES_REALTIME_CONNECTION_STRING
valueFrom:
secretKeyRef:
name: realtime
key: postgresRealtimeConnectionString
- name: STRAPI_URL
value: https://strapi.futureporn.svc.cluster.local
- name: SCOUT_RECENTS_TOKEN
valueFrom:
secretKeyRef:
name: scout
key: recentsToken
- name: SCOUT_IMAP_SERVER
valueFrom:
secretKeyRef:
name: scout
key: imapServer
- name: SCOUT_IMAP_PORT
valueFrom:
secretKeyRef:
name: scout
key: imapPort
- name: SCOUT_IMAP_USERNAME
valueFrom:
secretKeyRef:
name: scout
key: imapUsername
- name: SCOUT_IMAP_PASSWORD
valueFrom:
secretKeyRef:
name: scout
key: imapPassword
- name: SCOUT_IMAP_ACCESS_TOKEN
valueFrom:
secretKeyRef:
name: scout
key: imapAccessToken
- name: SCOUT_STRAPI_API_KEY
valueFrom:
secretKeyRef:
name: scout
key: strapiApiKey
{{ if eq .Values.managedBy "Helm" }}
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: scout
namespace: futureporn
annotations:
kubernetes.io/ingress.class: nginx
cert-manager.io/cluster-issuer: letsencrypt-staging
spec:
tls:
- secretName: scout-tls
hosts:
- scout.sbtp.xyz
rules:
- host: scout.sbtp.xyz
http:
paths:
- path: /
pathType: Prefix
backend:
service:
name: scout
port:
number: 3000
{{ end }}

View File

@ -0,0 +1,182 @@
apiVersion: v1
kind: Service
metadata:
name: strapi
namespace: futureporn
spec:
selector:
app.kubernetes.io/name: strapi
ports:
- name: http
port: 1339
targetPort: http
protocol: TCP
---
apiVersion: v1
kind: Pod
metadata:
name: strapi
namespace: futureporn
labels:
app.kubernetes.io/name: strapi
spec:
containers:
- name: strapi
image: "{{ .Values.strapi.containerName }}"
ports:
- name: http
containerPort: 1339
env:
- name: ADMIN_JWT_SECRET
valueFrom:
secretKeyRef:
name: strapi
key: adminJwtSecret
- name: API_TOKEN_SALT
valueFrom:
secretKeyRef:
name: strapi
key: apiTokenSalt
- name: APP_KEYS
valueFrom:
secretKeyRef:
name: strapi
key: appKeys
- name: DATABASE_URL
valueFrom:
secretKeyRef:
name: strapi
key: databaseUrl
- name: CDN_BUCKET_USC_URL
valueFrom:
secretKeyRef:
name: strapi
key: cdnBucketUscUrl
- name: DATABASE_CLIENT
value: postgres
- name: DATABASE_HOST
value: postgres.futureporn.svc.cluster.local
- name: DATABASE_NAME
value: futureporn-strapi
- name: JWT_SECRET
valueFrom:
secretKeyRef:
name: strapi
key: jwtSecret
- name: MUX_PLAYBACK_RESTRICTION_ID
valueFrom:
secretKeyRef:
name: strapi
key: muxPlaybackRestrictionId
- name: MUX_SIGNING_KEY_ID
valueFrom:
secretKeyRef:
name: strapi
key: muxSigningKeyId
- name: MUX_SIGNING_KEY_PRIVATE_KEY
valueFrom:
secretKeyRef:
name: strapi
key: muxSigningKeyPrivateKey
- name: NODE_ENV
value: production
- name: S3_USC_BUCKET_APPLICATION_KEY
valueFrom:
secretKeyRef:
name: strapi
key: s3UscBucketApplicationKey
- name: S3_USC_BUCKET_ENDPOINT
valueFrom:
secretKeyRef:
name: strapi
key: s3UscBucketEndpoint
- name: S3_USC_BUCKET_KEY_ID
valueFrom:
secretKeyRef:
name: strapi
key: s3UscBucketKeyId
- name: S3_USC_BUCKET_NAME
valueFrom:
secretKeyRef:
name: strapi
key: s3UscBucketName
- name: S3_USC_BUCKET_REGION
valueFrom:
secretKeyRef:
name: strapi
key: s3UscBucketRegion
- name: SENDGRID_API_KEY
valueFrom:
secretKeyRef:
name: strapi
key: sendgridApiKey
- name: STRAPI_URL
value: "{{ .Values.strapi.url }}"
- name: TRANSFER_TOKEN_SALT
valueFrom:
secretKeyRef:
name: strapi
key: transferTokenSalt
- name: PORT
value: "{{ .Values.strapi.port }}"
resources:
limits:
cpu: 1000m
memory: 2Gi
restartPolicy: Always
# ---
# apiVersion: v1
# kind: PersistentVolumeClaim
# metadata:
# name: strapi
# namespace: futureporn
# annotations:
# meta.helm.sh/release-name: fp
# meta.helm.sh/release-namespace: futureporn
# labels:
# app.kubernetes.io/managed-by: {{ .Values.managedBy }}
# spec:
# accessModes:
# - ReadWriteOnce
# resources:
# requests:
# storage: 100Gi
# storageClassName: {{ .Values.storageClassName }}
{{ if eq .Values.managedBy "Helm" }}
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: strapi
namespace: futureporn
annotations:
sbtp.xyz/managed-by: "{{ .Values.managedBy }}"
kubernetes.io/ingress.class: nginx
cert-manager.io/cluster-issuer: "{{ .Values.strapi.certIssuer }}"
spec:
ingressClassName: "{{ .Values.strapi.ingressClassName }}"
backend:
serviceName: strapi
servicePort: 1339
tls:
- secretName: strapi-tls
hosts:
- "{{ .Values.strapi.hostname }}"
rules:
- host: "{{ .Values.strapi.hostname }}"
http:
paths:
- path: /
pathType: Prefix
backend:
service:
name: strapi
port:
number: 1339
{{ end }}

View File

@ -1,3 +1,5 @@
{{ if eq .Values.managedBy "Helm" }}
apiVersion: networking.k8s.io/v1 apiVersion: networking.k8s.io/v1
kind: Ingress kind: Ingress
metadata: metadata:
@ -26,3 +28,5 @@ spec:
- hosts: - hosts:
- windmill2.sbtp.xyz - windmill2.sbtp.xyz
secretName: windmill-tls secretName: windmill-tls
{{ end }}

View File

@ -1,12 +1,26 @@
# storageClassName: csi-hostpath-sc # used by minikube # storageClassName: csi-hostpath-sc # used by minikube
storageClassName: standard # used by Kind storageClassName: standard # used by Kind
managedBy: tilt
link2cid: link2cid:
containerName: fp/link2cid containerName: fp/link2cid
next: next:
containerName: fp/next containerName: fp/next
certIssuer: letsencrypt-staging
hostname: next.futureporn.svc.cluster.local
capture:
containerName: fp/capture
scout:
containerName: fp/scout
pubsubServerUrl: https://realtime.futureporn.svc.cluster.local/faye
strapi: strapi:
containerName: fp/strapi containerName: fp/strapi
port: 1337 port: 1339
url: http://localhost:1337 url: http://localhost:1339
managedBy: Dildo certIssuer: letsencrypt-staging
hostname: strapi.futureporn.svc.cluster.local
ingressClassName: ngrok
ngrok:
hostname: grateful-engaging-cicada.ngrok-free.app
realtime:
containerName: fp/realtime
adminEmail: cj@futureporn.net adminEmail: cj@futureporn.net

View File

@ -1,12 +1,22 @@
storageClassName: vultr-block-storage-hdd storageClassName: vultr-block-storage-hdd
link2cid: link2cid:
containerName: gitea.futureporn.net/futureporn/link2cid:latest containerName: gitea.futureporn.net/futureporn/link2cid:latest
scout:
containerName: gitea.futureporn.net/futureporn/scout:latest
pubsubServerUrl: https://realtime.futureporn.net/faye
next: next:
containerName: sjc.vultrcr.com/fpcontainers/next containerName: gitea.futureporn.net/futureporn/next:latest
certIssuer: letsencrypt-staging
hostname: next.sbtp.xyz
capture:
containerName: gitea.futureporn.net/futureporn/capture:latest
strapi: strapi:
containerName: sjc.vultrcr.com/fpcontainers/strapi containerName: sjc.vultrcr.com/fpcontainers/strapi
port: 1337 port: 1339
url: https://portal.futureporn.net url: https://portal.futureporn.net
certIssuer: letsencrypt-prod
hostname: strapi.sbtp.xyz
ingressClassName: nginx
managedBy: Helm managedBy: Helm
adminEmail: cj@futureporn.net adminEmail: cj@futureporn.net
extraArgs: extraArgs:

35
d.capture.dockerfile Normal file
View File

@ -0,0 +1,35 @@
FROM node:18-alpine
# Install dependencies only when needed
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
RUN apk add --no-cache libc6-compat
RUN corepack enable && corepack prepare pnpm@latest --activate
# Enable `pnpm add --global` on Alpine Linux by setting
# home location environment variable to a location already in $PATH
# https://github.com/pnpm/pnpm/issues/784#issuecomment-1518582235
ENV PNPM_HOME=/usr/local/bin
# update and install latest dependencies, add dumb-init package
# add a non root user
RUN apk update && apk upgrade && apk add dumb-init ffmpeg make gcc g++ python3
WORKDIR /app
# Copy and install the dependencies for the project
COPY ./packages/capture/package.json ./packages/capture/pnpm-lock.yaml ./
# Copy all other project files to working directory
COPY ./packages/capture .
# Run the next build process and generate the artifacts
RUN pnpm i;
# expose 3000 on container
EXPOSE 3000
# set app host ,port and node env
ENV HOSTNAME=0.0.0.0 PORT=3000 NODE_ENV=production
# start the app with dumb init to spawn the Node.js runtime process
# with signal support
CMD [ "dumb-init", "node", "index.js" ]

14
d.realtime.dockerfile Normal file
View File

@ -0,0 +1,14 @@
FROM node:20-alpine
WORKDIR /app
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable
RUN apk update
ENV NODE_ENV=production
COPY pnpm-lock.yaml ./
RUN pnpm fetch
COPY ./packages/realtime /app
ENTRYPOINT ["pnpm"]
CMD ["run", "start"]

26
d.scout.dockerfile Normal file
View File

@ -0,0 +1,26 @@
FROM node:20-alpine AS base
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable
RUN apk update
FROM base AS build
ENV NODE_ENV=production
COPY . /usr/src/app
WORKDIR /usr/src/app
RUN mkdir -p /prod/scout
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
RUN ls -la ./packages
RUN pnpm deploy --filter=@futureporn/scout --prod /prod/scout
# COPY pnpm-lock.yaml ./
# RUN pnpm fetch
# COPY ./packages/scout /app
FROM base AS scout
COPY --from=build /prod/scout /app
WORKDIR /app
RUN ls -la
ENTRYPOINT ["pnpm"]
CMD ["run", "start"]

19
d.strapi-app.dockerfile Normal file
View File

@ -0,0 +1,19 @@
FROM node:18-alpine3.18
# Installing libvips-dev for sharp Compatibility
RUN apk update && apk add --no-cache build-base gcc autoconf automake zlib-dev libpng-dev nasm bash vips-dev git
ARG NODE_ENV=development
ENV NODE_ENV=${NODE_ENV}
WORKDIR /opt/
COPY ./packages/strapi-app/package.json ./packages/strapi-app/yarn.lock ./
RUN yarn global add node-gyp
RUN yarn config set network-timeout 600000 -g && yarn install
ENV PATH /opt/node_modules/.bin:$PATH
WORKDIR /opt/app
COPY ./packages/strapi-app/ .
RUN chown -R node:node /opt/app
USER node
RUN ["yarn", "build"]
EXPOSE 1338
CMD ["yarn", "develop", "--debug"]

24
d.strapi.dockerfile Normal file
View File

@ -0,0 +1,24 @@
FROM node:18-alpine AS base
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable
# Installing libvips-dev for sharp Compatibility
RUN apk update && apk add --no-cache build-base gcc autoconf automake zlib-dev libpng-dev nasm bash vips-dev git
ARG NODE_ENV=development
ENV NODE_ENV=${NODE_ENV}
FROM base AS build
WORKDIR /usr/src/app/
COPY ./packages/strapi/package.json ./packages/strapi/pnpm-lock.yaml .
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
COPY ./packages/strapi/ .
FROM build AS dev
WORKDIR /app
ENV PATH /app/node_modules/.bin:$PATH
COPY --from=build /usr/src/app/ .
RUN chown -R node:node /app
USER node
RUN ["pnpm", "run", "build"]
EXPOSE 1339
CMD ["pnpm", "run", "dev"]

File diff suppressed because it is too large Load Diff

153
packages/capture/.gitignore vendored Normal file
View File

@ -0,0 +1,153 @@
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/sdks
!.yarn/versions
# Created by https://www.toptal.com/developers/gitignore/api/node
# Edit at https://www.toptal.com/developers/gitignore?templates=node
### Node ###
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
### Node Patch ###
# Serverless Webpack directories
.webpack/
# Optional stylelint cache
# SvelteKit build / generate output
.svelte-kit
# End of https://www.toptal.com/developers/gitignore/api/node

View File

@ -0,0 +1,25 @@
# Capture
## Dev notes
### youtube-dl end of stream output
```
[https @ 0x5646887f1580] Opening 'https://edge11-lax.live.mmcdn.com/live-hls/amlst:hotfallingdevil-sd-fdf87e5b6c880e52d38e8c94f8ebf8728c980a91d56fb4ace13748ba59012336_trns_h264/chunklist_w881713853_b5128000_t64RlBTOjMwLjA=.m3u8' for reading
[hls @ 0x564687dd0980] Skip ('#EXT-X-VERSION:4')
[hls @ 0x564687dd0980] Skip ('#EXT-X-DISCONTINUITY-SEQUENCE:0')
[hls @ 0x564687dd0980] Skip ('#EXT-X-PROGRAM-DATE-TIME:2023-01-31T17:48:45.947+00:00')
[https @ 0x5646880bf880] Opening 'https://edge11-lax.live.mmcdn.com/live-hls/amlst:hotfallingdevil-sd-fdf87e5b6c880e52d38e8c94f8ebf8728c980a91d56fb4ace13748ba59012336_trns_h264/media_w881713853_b5128000_t64RlBTOjMwLjA=_18912.ts' for reading
[https @ 0x564688097d00] Opening 'https://edge11-lax.live.mmcdn.com/live-hls/amlst:hotfallingdevil-sd-fdf87e5b6c880e52d38e8c94f8ebf8728c980a91d56fb4ace13748ba59012336_trns_h264/media_w881713853_b5128000_t64RlBTOjMwLjA=_18913.ts' for reading
[https @ 0x5646887f1580] Opening 'https://edge11-lax.live.mmcdn.com/live-hls/amlst:hotfallingdevil-sd-fdf87e5b6c880e52d38e8c94f8ebf8728c980a91d56fb4ace13748ba59012336_trns_h264/chunklist_w881713853_b5128000_t64RlBTOjMwLjA=.m3u8' for reading
[https @ 0x5646886e8580] HTTP error 403 Forbidden
[hls @ 0x564687dd0980] keepalive request failed for 'https://edge11-lax.live.mmcdn.com/live-hls/amlst:hotfallingdevil-sd-fdf87e5b6c880e52d38e8c94f8ebf8728c980a91d56fb4ace13748ba59012336_trns_h264/chunklist_w881713853_b5128000_t64RlBTOjMwLjA=.m3u8' with error: 'Server returned 403 Forbidden (access denied)' when parsing playlist
[https @ 0x5646886ccfc0] HTTP error 403 Forbidden
[hls @ 0x564687dd0980] Failed to reload playlist 0
[https @ 0x5646886bf680] HTTP error 403 Forbidden
[hls @ 0x564687dd0980] Failed to reload playlist 0
frame= 5355 fps= 31 q=-1.0 Lsize= 71404kB time=00:02:58.50 bitrate=3277.0kbits/s speed=1.02x
video:68484kB audio:2790kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 0.181873%
[ffmpeg] Downloaded 73117881 bytes
[download] 100% of 69.73MiB in 02:57
```

155
packages/capture/index.js Executable file
View File

@ -0,0 +1,155 @@
#!/usr/bin/env node
// import Capture from './src/Capture.js'
// import Video from './src/Video.js'
import dotenv from 'dotenv'
dotenv.config()
import { createId } from '@paralleldrive/cuid2'
import os from 'os'
import fs from 'node:fs'
import { loggerFactory } from "./src/logger.js"
import { verifyStorage } from './src/disk.js'
import faye from 'faye'
import { record, assertDependencyDirectory, checkFFmpeg } from './src/record.js'
import fastq from 'fastq'
import pRetry from 'p-retry';
import Fastify from 'fastify';
// Create a map to store the work queues
const workQueues = new Map();
async function captureTask (args, cb) {
const { appContext, playlistUrl, roomName } = args;
try {
const downloadStream = async () => {
const rc = await record(appContext, playlistUrl, roomName)
if (rc !== 0) throw new Error('ffmpeg exited irregularly (return code was other than zero)')
}
await pRetry(downloadStream, {
retries: 3,
onFailedAttempt: error => {
appContext.logger.log({ level: 'error', message: `downloadStream attempt ${error.attemptNumber} failed. There are ${error.retriesLeft} retries left.` });
},
})
} catch (e) {
// we can get here if all retries are exhausted.
// this could be that the stream is over, the playlistUrl might be different, etc.
// we might have queued tasks so we don't want to crash.
appContext.logger.log({ level: 'error', message: `downloadStream exhausted all retries.` })
appContext.logger.log({ level: 'error', message: e })
}
verifyStorage(appContext)
appContext.logger.log({ level: 'info', message: 'Capture task complete'})
cb(null, null)
}
/**
*
* Fastify is used to facilitate Docker health checks
*
*/
async function initFastify(appContext) {
appContext.fastify = Fastify({
logger: true
})
// Declare a route
appContext.fastify.get('/health', function (_, reply) {
reply.send({ message: 'futureporn-capture sneed' });
})
// Run the server!
appContext.fastify.listen({ port: appContext.env.PORT }, function (err, address) {
if (err) {
appContext.fastify.log.error(err)
process.exit(1)
}
})
}
async function init () {
const appEnv = new Array(
'FUTUREPORN_WORKDIR',
'DOWNLOADER_UA',
'PORT'
)
const logger = loggerFactory({
service: 'futureporn/capture'
})
const appContext = {
env: appEnv.reduce((acc, ev) => {
if (typeof process.env[ev] === 'undefined') throw new Error(`${ev} is undefined in env`);
acc[ev] = process.env[ev];
return acc;
}, {}),
logger,
pkg: JSON.parse(fs.readFileSync('./package.json', { encoding: 'utf-8'})),
workerId: `${os.hostname}-${createId()}`,
};
await initFastify(appContext);
assertDependencyDirectory(appContext)
await checkFFmpeg(appContext)
verifyStorage(appContext)
return appContext
}
async function main () {
const appContext = await init()
appContext.logger.log({ level: 'info', message: `capture version: ${appContext.pkg.version}` })
appContext.logger.log({ level: 'info', message: `my capture directory is ${appContext.env.FUTUREPORN_WORKDIR}` })
// connect to realtime server
appContext.pubsub.subscribe('/signals', (message) => {
appContext.logger.log({ level: 'debug', message: JSON.stringify(message) })
if (
(message?.signal === 'start') &&
(message?.room) &&
(message?.url.startsWith('https://'))
) {
const roomName = message.room;
const playlistUrl = message.url;
// Check if a work queue for the room already exists, otherwise create a new one
if (!workQueues.has(roomName)) {
workQueues.set(roomName, fastq(captureTask, 1));
}
// Push the task to the corresponding work queue
workQueues.get(roomName).push({ appContext, playlistUrl, roomName });
}
})
}
main()

View File

@ -0,0 +1,46 @@
{
"name": "futureporn-capture",
"version": "0.1.12",
"main": "index.js",
"license": "Unlicense",
"private": true,
"type": "module",
"scripts": {
"start": "node --trace-warnings index",
"test": "FUTUREPORN_WORKDIR=/home/chris/Downloads mocha",
"integration": "FUTUREPORN_WORKDIR=/home/chris/Downloads mocha ./integration/**/*.test.js",
"dev": "FUTUREPORN_WORKDIR=/home/chris/Downloads nodemon index"
},
"dependencies": {
"@paralleldrive/cuid2": "^2.1.8",
"diskusage": "^1.1.3",
"dotenv": "^16.0.3",
"execa": "^6.1.0",
"fastify": "^4.12.0",
"fastq": "^1.15.0",
"faye": "^1.4.0",
"faye-websocket": "^0.11.4",
"fluent-ffmpeg": "^2.1.2",
"https": "^1.0.0",
"ioredis": "^5.2.4",
"minimatch": "^5.1.1",
"p-retry": "^5.1.2",
"postgres": "^3.3.3",
"rxjs": "^7.8.0",
"sql": "^0.78.0",
"winston": "^3.9.0",
"youtube-dl-wrap": "git+https://github.com/insanity54/youtube-dl-wrap.git"
},
"devDependencies": {
"chai": "^4.3.7",
"cheerio": "^1.0.0-rc.12",
"mocha": "^10.2.0",
"multiformats": "^11.0.1",
"node-abort-controller": "^3.0.1",
"node-fetch": "^3.3.0",
"nodemon": "^2.0.20",
"sinon": "^15.0.1",
"sinon-chai": "^3.7.0",
"sinon-test": "^3.1.5"
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,125 @@
import Voddo from './Voddo.js'
import {loggerFactory} from 'common/logger'
const logger = loggerFactory({
service: 'futureporn/capture'
})
export default class Capture {
constructor(opts) {
this.date = opts?.date
this.sql = opts.sql
this.ipfs = opts.ipfs
this.idleTimeout = opts.idleTimeout || 1000*60*15
this.video = opts.video
this.voddo = opts.voddo
this.workerId = opts.workerId
return this
}
/**
* upload VOD to ipfs
*
* @return {Promise}
* @resolves {String} cid
*/
async upload (filename) {
const cid = await this.ipfs.upload(filename)
return cid
}
/**
* save Vod data to db
*/
async save (cid, timestamp) {
logger.log({ level: 'debug', message: `saving ${cid} \n w/ captureDate ${timestamp}` })
this.date = timestamp
return await this.sql`INSERT INTO vod ( "videoSrcHash", "captureDate" ) values (${cid}, ${timestamp}) returning *`
}
/**
* advertise the vod segment(s) we captured.
* futureporn/commander uses this data to elect one worker to upload the VOD
*/
async advertise () {
const segments = await this.voddo.getRecordedSegments()
const streams = Voddo.groupStreamSegments(segments)
const workerId = this.workerId
logger.log({ level: 'debug', message: `Advertising our VOD streams(s) ${JSON.stringify({segments, streams, workerId})}` })
this.sql.notify('capture/vod/advertisement', JSON.stringify({segments, streams, workerId}))
}
listen () {
this.sql.listen('scout/stream/stop', (data) => {
logger.log({ level: 'debug', message: 'Scout said the stream has stopped. I will advertise the vod segment(s) I have.' })
this.advertise()
})
this.sql.listen('commander/vod/election', async (data) => {
if (data.workerId === this.workerId) {
logger.log({ level: 'debug', message: 'Commander elected me to process/upload' })
this.process(await this.voddo.getFilenames())
} else {
logger.log({ level: 'debug', message: `Commander elected ${data.workerId} to process/upload their vod segment(s)` })
}
})
return this
}
/**
* process video(s) after end of stream
*
* @param {String[]} filenames
* @returns {void}
*/
async process (filenames) {
this.date = filenames[0].timestamp
logger.log({ level: 'debug', message: 'concatenation in progress...' })
const file = await this.video.concat(filenames)
logger.log({ level: 'debug', message: `uploading ${file}` })
const cid = await this.ipfs.upload(file)
logger.log({ level: 'debug', message: 'db save in progress' })
await this.save(cid, this.date)
}
/**
* download a livestream
*
* - initializes Voddo
* - invokes this.process() as side effect
*
* @return {void}
*/
async download () {
this.voddo.on('start', (data) => {
logger.log({ level: 'debug', message: 'voddo started' })
logger.log({ level: 'debug', message: data })
this.sql.notify('capture/file', JSON.stringify(data))
})
this.voddo.on('stop', (report) => {
logger.log({ level: 'debug', message: `Got a stop event from Voddo` })
})
logger.log({ level: 'debug', message: 'starting voddo' })
this.voddo.start()
}
}

View File

@ -0,0 +1,57 @@
import {execa} from 'execa'
import {loggerFactory} from 'common/logger'
const logger = loggerFactory({
service: 'futureporn/capture'
})
export default class Ipfs {
constructor(opts) {
this.multiaddr = opts?.IPFS_CLUSTER_HTTP_API_MULTIADDR
this.username = opts?.IPFS_CLUSTER_HTTP_API_USERNAME
this.password = opts?.IPFS_CLUSTER_HTTP_API_PASSWORD
this.ctlExecutable = opts?.ctlExecutable || '/usr/local/bin/ipfs-cluster-ctl'
this.ipfsExecutable = opts?.ipfsExecutable || '/usr/local/bin/ipfs'
}
getArgs () {
let args = [
'--no-check-certificate',
'--host', this.multiaddr,
'--basic-auth', `${this.username}:${this.password}`
]
return args
}
async upload (filename, expiryDuration = false) {
try {
let args = getArgs()
args = args.concat([
'add',
'--quieter',
'--cid-version', 1
])
if (expiryDuration) {
args = args.concat(['--expire-in', expiryDuration])
}
args.push(filename)
const { stdout } = await execa(this.ctlExecutable, args)
return stdout
} catch (e) {
logger.log({ level: 'error', message: 'Error while adding file to ipfs' })
logger.log({ level: 'error', message: e })
}
}
async hash (filename) {
try {
const { stdout } = await execa(this.ipfsExecutable, ['add', '--quiet', '--cid-version=1', '--only-hash', filename])
return stdout
} catch (e) {
logger.log({ level: 'error', message: 'Error while hashing file' })
logger.log({ level: 'error', message: e })
}
}
}

View File

@ -0,0 +1,68 @@
import { execa } from 'execa'
import { tmpdir } from 'os'
import path from 'node:path'
import fs from 'node:fs'
import os from 'node:os'
export class VideoConcatError extends Error {
constructor (msg) {
super(msg || 'Failed to concatenate video')
this.name = 'VideoConcatError'
}
}
export default class Video {
constructor (opts) {
if (typeof opts.filePaths === 'undefined') throw new Error('Video must be called with opts.filePaths');
if (typeof opts.cwd === 'undefined') throw new Error('Video must be called with opts.cwd');
this.filePaths = opts.filePaths
this.cwd = opts.cwd
this.room = opts.room || 'projektmelody'
this.execa = opts.execa || execa
}
getFilesTxt () {
return this.filePaths
.sort((a, b) => a.timestamp - b.timestamp)
.map((d) => `file '${d.file}'`)
.join('\n')
.concat('\n')
}
getFilesFile () {
const p = path.join(this.cwd, 'files.txt')
fs.writeFileSync(
p,
this.getFilesTxt(this.filePaths),
{ encoding: 'utf-8' }
)
return p
}
async concat () {
const target = path.join(this.cwd, `${this.room}-chaturbate-${new Date().valueOf()}.mp4`)
const { exitCode, killed, stdout, stderr } = await this.execa('ffmpeg', [
'-y',
'-f', 'concat',
'-safe', '0',
'-i', this.getFilesFile(this.filePaths),
'-c', 'copy',
target
], {
cwd: this.cwd
});
if (exitCode !== 0 || killed !== false) {
throw new VideoConcatError(`exitCode:${exitCode}, killed:${killed}, stdout:${stdout}, stderr:${stderr}`);
}
return target
}
}

View File

@ -0,0 +1,243 @@
import 'dotenv/config'
import YoutubeDlWrap from "youtube-dl-wrap";
import { EventEmitter } from 'node:events';
import { AbortController } from "node-abort-controller";
import { readdir, stat } from 'node:fs/promises';
import { join } from 'node:path'
import ffmpeg from 'fluent-ffmpeg'
import { loggerFactory } from 'common/logger'
const logger = loggerFactory({
service: 'futureporn/capture'
})
const defaultStats = {segments:[],lastUpdatedAt:null}
export default class Voddo extends EventEmitter {
constructor(opts) {
super()
this.courtesyTimer = setTimeout(() => {}, 0);
this.retryCount = 0;
this.url = opts.url;
this.format = opts.format || 'best';
this.cwd = opts.cwd;
this.ytdlee; // event emitter for ytdlwrap
this.stats = Object.assign({}, defaultStats);
this.abortController = new AbortController();
this.ytdl = opts.ytdl || new YoutubeDlWrap();
if (process.env.YOUTUBE_DL_BINARY) this.ytdl.setBinaryPath(process.env.YOUTUBE_DL_BINARY);
}
static async getVideoLength (filePath) {
return new Promise((resolve, reject) => {
ffmpeg.ffprobe(filePath, function(err, metadata) {
if (err) reject(err)
resolve(Math.floor(metadata.format.duration*1000))
});
})
}
// greets ChatGPT
static groupStreamSegments(segments, threshold = 1000*60*60) {
segments.sort((a, b) => a.startTime - b.startTime);
const streams = [];
let currentStream = [];
for (let i = 0; i < segments.length; i++) {
const currentSegment = segments[i];
const previousSegment = currentStream[currentStream.length - 1];
if (!previousSegment || currentSegment.startTime - previousSegment.endTime <= threshold) {
currentStream.push(currentSegment);
} else {
streams.push(currentStream);
currentStream = [currentSegment];
}
}
streams.push(currentStream);
return streams;
}
/**
* getRecordedStreams
*
* get the metadata of the videos captured
*/
async getRecordedSegments() {
let f = []
const files = await readdir(this.cwd).then((raw) => raw.filter((f) => /\.mp4$/.test(f) ))
for (const file of files) {
const filePath = join(this.cwd, file)
const s = await stat(filePath)
const videoDuration = await Voddo.getVideoLength(filePath)
const startTime = parseInt(s.ctimeMs)
const endTime = startTime+videoDuration
const size = s.size
f.push({
startTime,
endTime,
file,
size
})
}
this.stats.segments = f
return this.stats.segments
}
isDownloading() {
// if there are event emitter listeners for the progress event,
// we are probably downloading.
return (
this.ytdlee?.listeners('progress').length !== undefined
)
}
delayedStart() {
// only for testing
this.retryCount = 500
this.courtesyTimer = this.getCourtesyTimer(() => this.download())
}
start() {
// if download is in progress, do nothing
if (this.isDownloading()) {
logger.log({ level: 'debug', message: 'Doing nothing because a download is in progress.' })
return;
}
// if download is not in progress, start download immediately
// reset the retryCount so the backoff timer resets to 1s between attempts
this.retryCount = 0
clearTimeout(this.courtesyTimer)
// create new abort controller
//this.abortController = new AbortController() // @todo do i need this? Can't I reuse the existing this.abortController?
this.download()
}
stop() {
logger.log({ level: 'info', message: 'Received stop(). Stopping.' })
clearTimeout(this.courtesyTimer)
this.abortController.abort()
}
/** generate a report **/
getReport(errorMessage) {
let report = {}
report.stats = Object.assign({}, this.stats)
report.error = errorMessage
report.reason = (() => {
if (errorMessage) return 'error';
else if (this.abortController.signal.aborted) return 'aborted';
else return 'close';
})()
// clear stats to prepare for next run
this.stats = Object.assign({}, defaultStats)
return report
}
emitReport(report) {
logger.log({ level: 'debug', message: 'EMITTING REPORT' })
this.emit('stop', report)
}
getCourtesyTimer(callback) {
// 600000ms = 10m
const waitTime = Math.min(600000, (Math.pow(2, this.retryCount) * 1000));
this.retryCount += 1;
logger.log({ level: 'debug', message: `courtesyWait for ${waitTime/1000} seconds. (retryCount: ${this.retryCount})` })
return setTimeout(callback, waitTime)
}
download() {
const handleProgress = (progress) => {
logger.log({ level: 'debug', message:` [*] progress event` })
this.stats.lastUpdatedAt = Date.now(),
this.stats.totalSize = progress.totalSize
}
const handleError = (error) => {
if (error?.message !== undefined && error.message.includes('Room is currently offline')) {
logger.log({ level: 'debug', message: 'Handled an expected \'Room is offline\' error' })
} else {
logger.log({ level: 'error', message: 'ytdl error' })
logger.log({ level: 'error', message: error.message })
}
this.ytdlee.off('progress', handleProgress)
this.ytdlee.off('handleYtdlEvent', handleYtdlEvent)
// restart the download after the courtesyTimeout
this.courtesyTimer = this.getCourtesyTimer(() => this.download())
this.emitReport(this.getReport(error.message))
}
const handleYtdlEvent = (type, data) => {
logger.log({ level: 'debug', message: `handleYtdlEvent type: ${type}, data: ${data}` })
logger.log({ level: 'debug', message: `handleYtdlEvent type: ${type}, data: ${data}` })
if (type === 'download' && data.includes('Destination:')) {
let filePath = /Destination:\s(.*)$/.exec(data)[1]
logger.log({ level: 'debug', message: `Destination file detected: ${filePath}` })
let datum = { file: filePath, timestamp: new Date().valueOf() }
let segments = this.stats.segments
segments.push(datum) && segments.length > 64 && segments.shift(); // limit the size of the segments array
this.emit('start', datum)
} else if (type === 'ffmpeg' && data.includes('bytes')) {
const bytes = /(\d*)\sbytes/.exec(data)[1]
logger.log({ level: 'debug', message: `ffmpeg reports ${bytes}`})
let mostRecentFile = this.stats.segments[this.stats.segments.length-1]
mostRecentFile['size'] = bytes
logger.log({ level: 'debug', message: mostRecentFile })
}
}
const handleClose = () => {
logger.log({ level: 'debug', message: 'got a close event. handling!' });
this.ytdlee.off('progress', handleProgress)
this.ytdlee.off('handleYtdlEvent', handleYtdlEvent)
// restart Voddo only if the close was not due to stop()
if (!this.abortController.signal.aborted) {
// restart the download after the courtesyTimeout
this.courtesyTimer = this.getCourtesyTimer(() => this.download())
}
this.emitReport(this.getReport())
}
logger.log({ level: 'debug', message: `Downloading url:${this.url} format:${this.format}` })
logger.log({ level: 'debug', message: JSON.stringify(this.ytdl) })
// sanity check. ensure cwd exists
stat(this.cwd, (err) => {
if (err) logger.log({ level: 'error', message: `Error while getting cwd stats of ${this.cwd} Does it exist?` })
})
this.ytdlee = this.ytdl.exec(
[this.url, '-f', this.format],
{
cwd: this.cwd
},
this.abortController.signal
);
this.ytdlee.on('progress', handleProgress);
this.ytdlee.on('youtubeDlEvent', handleYtdlEvent);
this.ytdlee.once('error', handleError);
this.ytdlee.once('close', handleClose);
}
}

View File

@ -0,0 +1,17 @@
import cheerio from 'cheerio'
import fetch from 'node-fetch'
export async function getRandomRoom () {
const res = await fetch('https://chaturbate.com/')
const body = await res.text()
const $ = cheerio.load(body)
let roomsRaw = $('a[data-room]')
let rooms = []
$(roomsRaw).each((_, e) => {
rooms.push($(e).attr('href'))
})
// greets https://stackoverflow.com/a/4435017/1004931
var randomIndex = Math.floor(Math.random() * rooms.length);
return rooms[randomIndex].replaceAll('/', '')
}

View File

@ -0,0 +1,33 @@
import disk from 'diskusage';
export function verifyStorage (appContext) {
const mountPath = appContext.env.FUTUREPORN_WORKDIR
disk.check(mountPath, (err, info) => {
if (err) {
appContext.logger.log({ level: 'error', message: `Error retrieving disk usage for ${mountPath}: ${err}` });
return;
}
const totalSize = info.total;
const availableSize = info.available;
const freeSize = info.free;
appContext.logger.log({ 'level': 'info', message: `${mountPath} Disk Usage:` });
appContext.logger.log({ 'level': 'info', message: `Total: ${bytesToSize(totalSize)}` });
appContext.logger.log({ 'level': 'info', message: `Free: ${bytesToSize(freeSize)}` });
appContext.logger.log({ 'level': 'info', message: `Available: ${bytesToSize(availableSize)}` });
if (availableSize < 85899345920) appContext.logger.log({ 'level': 'warn', message: `⚠️ Available disk is getting low! ${bytesToSize(availableSize)}` });
else if (availableSize < 42949672960) appContext.logger.log({ 'level': 'error', message: `⚠️☠️ AVAILABLE DISK IS TOO LOW! ${bytesToSize(availableSize)}` });
});
}
// Helper function to convert bytes to human-readable format
export function bytesToSize(bytes) {
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
if (bytes === 0) return '0 Bytes';
const i = Math.floor(Math.log2(bytes) / 10);
return `${(bytes / Math.pow(1024, i)).toFixed(2)} ${sizes[i]}`;
}

View File

@ -0,0 +1,25 @@
import winston from 'winston'
export const loggerFactory = (options) => {
const mergedOptions = Object.assign({}, {
level: 'info',
defaultMeta: { service: 'futureporn' },
format: winston.format.timestamp()
}, options)
const logger = winston.createLogger(mergedOptions);
if (process.env.NODE_ENV !== 'production') {
logger.add(new winston.transports.Console({
level: 'debug',
format: winston.format.simple()
}))
} else {
logger.add(new winston.transports.Console({
level: 'info',
format: winston.format.json()
}))
}
return logger
}

View File

@ -0,0 +1,117 @@
import { join } from 'path';
import { spawn } from 'child_process';
import fs from 'node:fs';
export const getFilename = (appContext, roomName) => {
const name = `${roomName}_${new Date().toISOString()}.ts`
return join(appContext.env.FUTUREPORN_WORKDIR, 'recordings', name);
}
export const assertDirectory = (directoryPath) => {
if (fs.statSync(directoryPath, { throwIfNoEntry: false }) === undefined) fs.mkdirSync(directoryPath);
}
export const checkFFmpeg = async (appContext) => {
return new Promise((resolve, reject) => {
const childProcess = spawn('ffmpeg', ['-version']);
childProcess.on('error', (err) => {
appContext.logger.log({
level: 'error',
message: `ffmpeg -version failed, which likely means ffmpeg is not installed or not on $PATH`,
});
throw new Error('ffmpeg is missing')
});
childProcess.on('exit', (code) => {
if (code !== 0) reject(`'ffmpeg -version' exited with code ${code}`)
if (code === 0) {
appContext.logger.log({ level: 'info', message: `ffmpeg PRESENT.` });
resolve()
}
});
})
};
export const assertDependencyDirectory = (appContext) => {
// Extract the directory path from the filename
const directoryPath = join(appContext.env.FUTUREPORN_WORKDIR, 'recordings');
console.log(`asserting ${directoryPath} exists`)
// Check if the directory exists, and create it if it doesn't
if (!fs.existsSync(directoryPath)) {
fs.mkdirSync(directoryPath, { recursive: true });
console.log(`Created directory: ${directoryPath}`);
}
}
export const record = async (appContext, playlistUrl, roomName) => {
if (appContext === undefined) throw new Error('appContext undef');
if (playlistUrl === undefined) throw new Error('playlistUrl undef');
if (roomName === undefined) throw new Error('roomName undef');
const filename = getFilename(appContext, roomName);
console.log(`downloading to ${filename}`)
// example: `ffmpeg -headers "User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:105.0) Gecko/20100101 Firefox/105.0"
// -i ${chunkPlaylist}
// -c:v copy
// -c:a copy
// -movflags faststart
// -y
// -f mpegts
// ./my-recording.ts`
const ffmpegProcess = spawn('ffmpeg', [
'-headers', `"User-Agent: ${appContext.env.DOWNLOADER_UA}"`,
'-i', playlistUrl,
'-c:v', 'copy',
'-c:a', 'copy',
'-movflags', 'faststart',
'-y',
'-f', 'mpegts',
filename
], {
stdio: 'inherit'
});
return new Promise((resolve, reject) => {
ffmpegProcess.once('exit', (code) => {
resolve(code)
})
})
// ffmpegProcess.on('data', (data) => {
// console.log(data.toString());
// });
// Optional: Handle other events such as 'error', 'close', etc.
// @todo this needs to be handled outside this function
// otherwise this function is not testable
// ffmpegProcess.on('exit', (code, signal) => {
// // Retry the download using exponential backoff if the process exits for any reason
// console.log(`ffmpeg exited with code ${code} and signal ${signal}`)
// retryDownload(appContext, playlistUrl, roomName);
// });
// return ffmpegProcess;
}
const calculateExponentialBackoffDelay = (attemptNumber) => {
return Math.pow(2, attemptNumber) * 1000;
};
const retryDownload = (appContext, playlistUrl, roomName, attemptNumber = 1, maxAttempts = 3) => {
const delay = calculateExponentialBackoffDelay(attemptNumber);
appContext.logger.log({ level: 'debug', message: `Retrying download in ${delay / 1000} seconds...` });
setTimeout(() => {
console.log('Retrying download...');
record(appContext, playlistUrl, roomName, attemptNumber + 1);
}, delay);
};

View File

@ -0,0 +1,147 @@
import Video from '../src/Video.js'
import Capture from '../src/Capture.js'
import Ipfs from '../src/Ipfs.js'
import chai, { expect } from 'chai'
import { dirname } from 'path';
import { fileURLToPath } from 'url';
import path from 'node:path'
import sinon from 'sinon'
import sinonChai from 'sinon-chai'
import { CID } from 'multiformats/cid'
import Voddo from '../src/Voddo.js'
import EventEmitter from 'node:events'
import postgres from 'postgres'
chai.use(sinonChai)
const Timer = setTimeout(()=>{},0).constructor
const fixtureDate = 1581117660000
const cidFixture = 'bafybeid3mg5lzrvnmpfi5ftwhiupp7i5bgkmdo7dnlwrvklbv33telrrry'
const __dirname = dirname(fileURLToPath(import.meta.url));
describe('Capture', function () {
let clock
const sandbox = sinon.createSandbox()
beforeEach(() => {
clock = sandbox.useFakeTimers({
toFake: ["setTimeout", "setInterval"],
shouldAdvanceTime: false
});
// // const sql = postgres({
// // idle_timeout: 1
// // })
// let pgStub = (opts) => {
// let sql = (args) => {}
// return sql
// }
const sqlRaw = postgres()
const sql = sandbox.stub(sqlRaw)
// sql.listen.resolves(fixtureDate)
// sql.notify.resolves(92834)
// sinon.stub(postgres, 'notify')
// sinon.createStubInstance(postgres)
// sql
// .withArgs('INSERT INTO vod ( videoSrcHash, captureDate ) values (bafybeid3mg5lzrvnmpfi5ftwhiupp7i5bgkmdo7dnlwrvklbv33telrrry, 1581117660000) returning *')
// .resolves({ msg: 'idk' })
// sinon.stub(sql, 'notify').returns()
// const ipfs = sandbox.createStubInstance(Ipfs)
// ipfs.upload.withArgs('/tmp/mycoolfile.mp4').resolves(cidFixture)
// capture = new Capture({
// sql,
// ipfs,
// video,
// voddo
// })
// sandbox.stub(capture, 'process').resolves()
})
afterEach(() => {
sandbox.restore()
clock.restore()
})
describe('upload', function () {
it('should upload a video to ipfs', async function () {
const sqlRaw = postgres()
const sql = sandbox.stub(sqlRaw)
const video = sandbox.stub()
const voddo = sandbox.createStubInstance(Voddo)
voddo.on.callThrough()
voddo.emit.callThrough()
voddo.listeners.callThrough()
voddo.listenerCount.callThrough()
voddo.start.callsFake(() => {
voddo.emit('start', { file: '/tmp/burrito.mp4', timestamp: 1 })
})
const ipfs = sandbox.createStubInstance(Ipfs)
ipfs.upload.withArgs('/tmp/mycoolfile.mp4').resolves(cidFixture)
const capture = new Capture({
sql,
ipfs,
video,
voddo
})
const cid = await capture.upload('/tmp/mycoolfile.mp4')
expect(() => CID.parse(cid), `The IPFS CID '${cid}' is invalid.`).to.not.throw()
expect(capture.ipfs.upload).calledOnce
})
})
describe('save', function () {
it('should save to db', async function () {
const sqlRaw = postgres()
const sql = sandbox.stub(sqlRaw)
const video = sandbox.stub()
const voddo = sandbox.createStubInstance(Voddo)
voddo.on.callThrough()
voddo.emit.callThrough()
voddo.listeners.callThrough()
voddo.listenerCount.callThrough()
voddo.start.callsFake(() => {
voddo.emit('start', { file: '/tmp/burrito.mp4', timestamp: 1 })
})
const ipfs = sandbox.createStubInstance(Ipfs)
ipfs.upload.withArgs('/tmp/mycoolfile.mp4').resolves(cidFixture)
const capture = new Capture({
sql,
ipfs,
video,
voddo
})
// I can't stub sql`` because of that template string override so i'm just stubbing capture.save
// I think this is an evergreen test ¯\_(ツ)_/¯
sandbox.stub(capture, 'save').resolves([
{ id: 1, cid: cidFixture, captureDate: fixtureDate }
])
const vod = await capture.save(cidFixture, fixtureDate)
})
})
})

View File

@ -0,0 +1,86 @@
import 'dotenv/config'
import Video from '../src/Video.js'
import { dirname } from 'path';
import { fileURLToPath } from 'url';
import path from 'node:path'
import os from 'node:os'
import fs from 'node:fs'
import sinon from 'sinon'
import sinonChai from 'sinon-chai'
import chai, { expect } from 'chai'
chai.use(sinonChai);
const __dirname = dirname(fileURLToPath(import.meta.url));
const dataFixture = [
{
timestamp: 1,
file: 'mock-stream0.mp4'
}, {
timestamp: 2,
file: 'mock-stream1.mp4'
}, {
timestamp: 3,
file: 'mock-stream2.mp4'
}
]
describe('Video', function () {
let video
before(() => {
// copy files to /tmp so we dont clutter the fixtures dir
// and simulate cwd being process.env.FUTUREPORN_TMP
dataFixture.forEach((d) => {
fs.copyFileSync(
path.join(__dirname, 'fixtures', d.file),
path.join(os.tmpdir(), d.file)
)
})
})
beforeEach(() => {
video = new Video({
cwd: os.tmpdir(),
filePaths: dataFixture,
execa: sinon.fake.resolves({ exitCode: 0, killed: false, stdout: "i am so horni rn", stderr: null })
})
})
afterEach(function() {
console.log('>> sinon.restore! (afterEach)')
sinon.restore();
})
describe('getFilesTxt', function () {
it('should generate contents suitable for input to `ffmpeg -f concat`', function () {
const txt = video.getFilesTxt()
expect(txt).to.deep.equal("file 'mock-stream0.mp4'\nfile 'mock-stream1.mp4'\nfile 'mock-stream2.mp4'\n")
})
})
describe('concat', function () {
it('should join multiple videos into one', async function () {
const file = await video.concat()
expect(typeof file === 'string').to.be.true
expect(video.execa).calledOnce
expect(file).to.match(/\.mp4$/)
})
})
describe('getFilesFile', function () {
it('should create a files.txt and return the path', async function () {
const file = await video.getFilesFile()
expect(typeof file === 'string').to.be.true
expect(file).to.equal(path.join(os.tmpdir(), 'files.txt'))
})
})
})

View File

@ -0,0 +1,491 @@
import 'dotenv/config'
import Voddo from '../src/Voddo.js'
import chai, { expect } from 'chai'
import sinon from 'sinon'
import YoutubeDlWrap from 'youtube-dl-wrap'
import {
AbortController
} from "node-abort-controller";
import {
EventEmitter
} from 'events'
import debugFactory from 'debug'
import { join, dirname } from 'path';
import { fileURLToPath } from 'url';
import sinonChai from 'sinon-chai'
import sinonTest from "sinon-test";
import path from 'path'
chai.use(sinonChai);
const test = sinonTest(sinon, {
toFake: ["setTimeout", "setInterval"],
shouldAdvanceTime: false
});
const debug = debugFactory('voddo')
const __dirname = dirname(fileURLToPath(import.meta.url));
describe('Voddo', function() {
describe('groupStreamSegments', function () {
it('should separate two stream data objects', function () {
const fixture = [{
"startTime": 1675386000000,
"file": "projektmelody 2023-02-02 17_00-projektmelody.mp4",
"size": 550799038,
"endTime": 1675391400000,
}, {
"startTime": 1675391405000,
"file": "projektmelody 2023-02-02 18_30-projektmelody.mp4",
"size": 6556534941,
"endTime": 1675396800000
}, {
"startTime": 1675368000000,
"file": "projektmelody 2023-02-02 12_00-projektmelody.mp4",
"size": 6556534941,
"endTime": 1675378800000
}]
const streams = Voddo.groupStreamSegments(fixture)
expect(streams).to.deep.equal([
[
{
"startTime": 1675368000000,
"file": "projektmelody 2023-02-02 12_00-projektmelody.mp4",
"size": 6556534941,
"endTime": 1675378800000
}
],
[
{
"startTime": 1675386000000,
"file": "projektmelody 2023-02-02 17_00-projektmelody.mp4",
"size": 550799038,
"endTime": 1675391400000,
}, {
"startTime": 1675391405000,
"file": "projektmelody 2023-02-02 18_30-projektmelody.mp4",
"size": 6556534941,
"endTime": 1675396800000
}
]
])
})
})
// let clock;
// beforeEach(function() {
// clock = sinon.useFakeTimers({
// toFake: ["setTimeout", "setInterval"],
// shouldAdvanceTime: false
// });
// })
// afterEach(() => {
// sinon.restore()
// })
// Something faulty with Voddo or sinon or mocha, not sure.
// When running by itself, test succeeds. When running with 'should start and stop stream download',
// voddo.stats gets set to whatever that test sets it to. So bizarre, it's like the same Voddo class instance
// exists in two different tests even though they are named differently.
// Even though they are not in global scope. Even though each was called with `new Voddo(...)`
// Doesn't matter if I wrap both in sinon-test. Same leaky problem.
// Doesn't matter if I sinon.restore() afterEach. Same leaky problem.
// Doesn't matter if I manually set up a sinon sandbox. Same leaky problem.
// Fuck event emitters. I love their utility but I don't know how the fuck they are supposed to be tested.
// Solution might just call for a rewrite of Voddo, or perhaps deleting Voddo in favor of Capture
// For now, I'm moving forward because Voddo works even though this test does not.
describe('getRecordedSegments', function() {
xit('should populate it\'s log if log is empty', async function () {
const voddo = new Voddo({
url: 'https://example.com',
cwd: join(__dirname, 'fixtures')
})
const streams = await voddo.getRecordedSegments()
console.log(streams)
expect(streams.length).to.equal(3)
expect(streams[0]).to.have.property('startTime')
expect(streams[0]).to.have.property('file')
expect(streams[0]).to.have.property('size')
})
xit('should use Voddo\'s stats history to get filenames of only the most recent stream', async function() {
const sb = sinon.createSandbox()
const viddo = new Voddo({
url: 'https://example.com',
cwd: '~/Downloads'
})
sb.stub(viddo, 'stats').value({
segments: [{
startTime: 1674147647000,
size: 192627,
file: 'projektmelody 2023-01-19 17_00-projektmelody.mp4'
}, {
startTime: 1674151247000,
size: 192627,
file: 'projektmelody 2023-01-19 18_00-projektmelody.mp4'
}, {
startTime: 1674154847000,
size: 192627,
file: 'projektmelody 2023-01-19 19_00-projektmelody.mp4'
}, {
file: 'projektmelody 2023-01-20 20_10-projektmelody.mp4',
size: 192627,
startTime: 1674245400000,
}, {
file: 'projektmelody 2023-01-20 21_10-projektmelody.mp4',
size: 192627,
startTime: 1674249000000,
}, {
file: 'projektmelody 2023-01-20 22_10-projektmelody.mp4',
size: 192627,
startTime: 1674252600000,
}]
})
const filenames = await viddo.getRecordedSegments()
sb.restore()
expect(filenames).to.have.lengthOf(3)
expect(filenames).to.deep.equal([{
file: 'projektmelody 2023-01-20 20_10-projektmelody.mp4',
size: 192627,
startTime: 1674245400000,
}, {
file: 'projektmelody 2023-01-20 21_10-projektmelody.mp4',
size: 192627,
startTime: 1674249000000,
}, {
file: 'projektmelody 2023-01-20 22_10-projektmelody.mp4',
size: 192627,
startTime: 1674252600000,
}])
})
})
xit('should keep a log of the files downloaded', function(done) {
const ee = new EventEmitter()
const ytdl = sinon.createStubInstance(YoutubeDlWrap)
ytdl.exec.returns(ee)
const times = [
1000, // start
1000 * 60 * 60 * 1, // stop
1000 * 60 * 60 * 1 + 1, // start
1000 * 60 * 60 * 2, // stop
1000 * 60 * 60 * 3 + 1, // start
1000 * 60 * 60 * 4 // stop
]
clock.setTimeout(() => {
ee.emit('youtubeDlEvent', 'download', ' Destination: projektmelody 2023-01-18 21_10-projektmelody.mp4')
}, times[0])
clock.setTimeout(() => {
ee.emit('close')
}, times[1])
clock.setTimeout(() => {
ee.emit('youtubeDlEvent', 'download', ' Destination: projektmelody 2023-01-18 22_10-projektmelody.mp4')
}, times[2])
clock.setTimeout(() => {
ee.emit('close')
}, times[3])
clock.setTimeout(() => {
ee.emit('youtubeDlEvent', 'download', ' Destination: projektmelody 2023-01-18 23_10-projektmelody.mp4')
}, times[4])
clock.setTimeout(() => {
ee.emit('close')
}, times[5])
let url = `https://chaturbate.com/projektmelody`
let cwd = process.env.FUTUREPORN_WORKDIR || '/tmp'
const voddo = new Voddo({
url: url,
format: 'best',
cwd: cwd,
ytdl
})
voddo.once('start', (data) => {
expect(data).to.have.property('file')
expect(data).to.have.property('timestamp')
voddo.once('start', (data) => {
expect(data).to.have.property('file')
expect(data).to.have.property('timestamp')
voddo.once('start', (data) => {
expect(data).to.have.property('file')
expect(data).to.have.property('timestamp')
voddo.once('stop', function(report) {
debug(report)
expect(report).to.have.property('stats')
expect(report.stats).to.have.property('files')
expect(report.stats.files).to.have.lengthOf(3)
debug(report.stats.files)
expect(report.stats.files[0]).to.include({
file: 'projektmelody 2023-01-18 21_10-projektmelody.mp4'
})
expect(ytdl.exec).calledThrice
console.log('>>WE ARE DONE')
expect(this.clock.countTimers()).to.equal(0)
done()
})
clock.tick(times[5]) // stop
})
clock.tick(times[3]) // stop
clock.tick(times[4]) // start
})
clock.tick(times[1]) // stop
clock.tick(times[2]) // start
})
voddo.start()
expect(ytdl.exec).calledOnce
clock.tick(times[0])
})
xit('should keep a log of the files downloaded', function(done) {
this.timeout(5000)
// https://github.com/insanity54/futureporn/issues/13
const ytdlStub = sinon.createStubInstance(YoutubeDlWrap)
ytdlStub.exec
.onCall(0)
.callsFake(function(args, opts, aborter) {
let ee = new EventEmitter()
clock.setTimeout(() => {
ee.emit('youtubeDlEvent', 'download', ' Destination: projektmelody 2023-01-18 21_10-projektmelody.mp4')
}, 50)
clock.setTimeout(() => {
ee.emit('close')
}, 100)
return ee
})
.onCall(1)
.callsFake(function(args, opts, aborter) {
let ee = new EventEmitter()
clock.setTimeout(() => {
ee.emit('youtubeDlEvent', 'download', ' Destination: projektmelody 2023-01-18 22_10-projektmelody.mp4')
}, 50)
clock.setTimeout(() => {
ee.emit('close')
}, 100)
return ee
})
.onCall(2)
.callsFake(function(args, opts, aborter) {
let ee = new EventEmitter()
clock.setTimeout(() => {
ee.emit('youtubeDlEvent', 'download', ' Destination: projektmelody 2023-01-18 23_10-projektmelody.mp4')
}, 50)
clock.setTimeout(() => {
ee.emit('close')
}, 100)
return ee
})
let url = `https://chaturbate.com/projektmelody`
let cwd = process.env.FUTUREPORN_WORKDIR || '/tmp'
const voddo = new Voddo({
url: url,
format: 'best',
cwd: cwd,
ytdl: ytdlStub
})
// expect(clock.countTimers()).to.equal(0)
voddo.once('start', function(data) {
expect(data).to.have.property('file')
expect(data).to.have.property('timestamp')
clock.next()
clock.next()
voddo.once('start', function(data) {
expect(data).to.have.property('file')
expect(data).to.have.property('timestamp')
voddo.once('start', function(data) {
debug('fake start?')
expect(data).to.have.property('file')
expect(data).to.have.property('timestamp')
voddo.once('stop', function(report) {
debug(report)
expect(report).to.have.property('stats')
expect(report.stats).to.have.property('files')
expect(report.stats.files).to.have.lengthOf(3)
debug(report.stats.files)
expect(report.stats.files[0]).to.include({
file: 'projektmelody 2023-01-18 21_10-projektmelody.mp4'
})
sinon.assert.calledThrice(ytdlStub.exec)
expect(this.clock.countTimers()).to.equal(0)
done()
})
})
})
})
voddo.start()
})
it('should start and stop stream download', test(function(done) {
const sandbox = this
const ee = new EventEmitter()
const ytdl = this.createStubInstance(YoutubeDlWrap);
ytdl.exec.returns(ee)
const url = 'https://chaturbate.com/projektmelody'
const format = 'best'
const cwd = '/tmp'
const v = new Voddo({
url,
format,
cwd,
ytdl
})
console.log(v.stats)
v.once('stop', function(data) {
console.log('ffffff')
console.log(this)
expect(this.abortController.signal.aborted, 'abortController did not abort').to.be.true
expect(sandbox.clock.countTimers()).to.equal(0)
done()
})
v.once('start', function(data) {
console.log('STARRRRRT')
expect(data).to.have.property('file')
expect(data).to.have.property('timestamp')
expect(this).to.have.property('abortController')
console.log('ey cool, voddo started')
})
v.start()
const times = [
500,
1000,
2000
]
this.clock.setTimeout(() => {
ee.emit('youtubeDlEvent', 'download', ' Destination: projektmelody 2023-01-18 21_10-projektmelody.mp4')
}, times[0])
this.clock.setTimeout(() => {
v.stop()
}, times[1])
this.clock.setTimeout(() => {
ee.emit('close')
}, times[2])
this.clock.tick(times[0]) // start
this.clock.tick(times[1]) // stop
this.clock.tick(times[2]) // close
}))
xit('should retry when a stream closes', function(done) {
const ytdlStub = sinon.createStubInstance(YoutubeDlWrap);
ytdlStub.exec
.onCall(0)
.callsFake(function(args, opts, aborter) {
debug(' [test] callsFake 0')
let ee = new EventEmitter()
setTimeout(() => {
console.log('should retry when a stream closes -- emission')
ee.emit('youtubeDlEvent', 'download', ' Destination: projektmelody 2023-01-17 19_39-projektmelody.mp4')
}, 100)
setTimeout(() => {
console.log('should retry when a stream closes -- emission')
// this simulates youtube-dl closing
// (NOT Voddo closing)
ee.emit('close')
}, 550)
return ee
})
.onCall(1)
.callsFake(function(args, opts, aborter) {
debug(' [test] callsFake 1')
let ee = new EventEmitter()
setTimeout(() => {
ee.emit('youtubeDlEvent', 'download', ' Destination: projektmelody 2023-01-17 19_45-projektmelody.mp4')
}, 100)
return ee
})
let url = `https://chaturbate.com/projektmelody`
let cwd = process.env.FUTUREPORN_WORKDIR || '/tmp'
let abortController = new AbortController()
const voddo = new Voddo({
url: url,
format: 'best',
cwd: cwd,
ytdl: ytdlStub
})
voddo.once('start', function(data) {
debug(' [test] voddo <<<<<-----')
expect(data).to.have.property('file')
expect(data).to.have.property('timestamp')
voddo.once('start', function(data) {
debug(' [test] restarted after dl close! (expected) <<<<<-----')
sinon.assert.calledTwice(ytdlStub.exec)
expect(this.clock.countTimers()).to.equal(0)
done()
})
})
voddo.start()
clock.next()
clock.next()
clock.next()
clock.next()
clock.next()
})
})

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,123 @@
import 'dotenv/config'
import chai, { expect } from 'chai'
import sinon from 'sinon'
import sinonChai from 'sinon-chai'
import { CID } from 'multiformats/cid'
import EventEmitter from 'node:events'
import { fileURLToPath } from 'url';
import path from 'node:path'
import postgres from 'postgres'
import Capture from '../src/Capture.js'
import Voddo from '../src/Voddo.js'
import Video from '../src/Video.js'
chai.use(sinonChai)
if (typeof process.env.POSTGRES_PASSWORD === 'undefined') throw new Error('missing POSTGRES_PASSWORD');
if (typeof process.env.POSTGRES_USERNAME === 'undefined') throw new Error('missing POSTGRES_USERNAME');
const cidFixture = 'bafybeid3mg5lzrvnmpfi5ftwhiupp7i5bgkmdo7dnlwrvklbv33telrrry'
const inputFixture = 'projektmelody 3021-10-16 06-16.mp4'
const outputFixture = 'projektmelody-chaturbate-30211016T000000Z.mp4'
const timestampFixture = 33191316900000
describe('Capture integration', function () {
let clock
beforeEach(() => {
clock = sinon.useFakeTimers({
toFake: ["setTimeout", "setInterval"],
shouldAdvanceTime: false
});
})
afterEach(() => {
sinon.restore()
clock.restore()
})
it('end of stream behavior', async function() {
const ipfsClusterUpload = sinon.mock()
.withExactArgs(outputFixture)
.resolves(cidFixture)
const sql = postgres({
username: process.env.POSTGRES_USERNAME,
password: process.env.POSTGRES_PASSWORD,
host: process.env.POSTGRES_HOST,
database: 'futureporn',
idle_timeout: 1
})
const voddo = sinon.createStubInstance(Voddo)
voddo.on.callThrough()
voddo.off.callThrough()
voddo.emit.callThrough()
voddo.listeners.callThrough()
voddo.listenerCount.callThrough()
voddo.getFilenames.returns([{
timestamp: timestampFixture,
filename: inputFixture
}])
const video = sinon.createStubInstance(Video)
video.concat.resolves(outputFixture)
const capture = new Capture({
voddo,
sql,
ipfsClusterUpload,
video
})
capture.download()
voddo.emit('stop', {
reason: 'close',
stats: {
filenames: [
inputFixture
]
}
})
clock.next() // actionTimer elapse
expect(clock.countTimers()).to.equal(0)
clock.restore()
// gotta wait to verify otherwise verification
// occurs before ipfsClusterUpload has a chance
// to be invoked.
//
// (not sure why)
//
// maybe we're waiting for the
// concat promise to resolve?
await Promise.resolve(() => {
expect(ipfsClusterUpload).calledOnce
})
// Capture.save is called as a side effect
// of Capture.process
// which is called as a side effect of Capture.download
// so we have to wait for it to complete
// this is not ideal because there is potential
// to not wait long enough
await new Promise((resolve) => {
setTimeout(resolve, 1000)
})
const rows = await sql`SELECT "videoSrcHash" FROM vod WHERE "videoSrcHash" = ${cidFixture}`
expect(rows[0]).to.exist
expect(rows[0]).to.have.property('videoSrcHash', cidFixture)
})
})

View File

@ -0,0 +1,18 @@
import Ipfs from '../src/Ipfs.js'
import { expect } from 'chai'
import path, { dirname } from 'path';
import { fileURLToPath } from 'url';
const __dirname = dirname(fileURLToPath(import.meta.url));
const ipfsExecutable = '/home/chris/.local/bin/ipfs'
describe('Ipfs', function() {
describe('hash', function () {
it('should hash a file and return the v1 CID', async function () {
const ipfs = new Ipfs({ ipfsExecutable })
const cid = await ipfs.hash(path.join(__dirname, '../test/fixtures/mock-stream0.mp4'))
expect(cid).to.equal('bafkreihfbftehabfrakhr6tmbx72inewwpayw6cypwgm6lbhbf7mxm7wni')
})
})
})

View File

@ -0,0 +1,62 @@
import 'dotenv/config'
import Voddo from '../src/Voddo.js'
import {
expect
} from 'chai'
import sinon from 'sinon'
import YoutubeDlWrap from 'youtube-dl-wrap'
import {
EventEmitter
} from 'events'
import { getRandomRoom } from '../src/cb.js'
import path, { dirname } from 'path';
import { fileURLToPath } from 'url';
const __dirname = dirname(fileURLToPath(import.meta.url));
describe('voddo', function() {
describe('getVideoLength', function () {
it('should return the video length in ms', async function () {
const fixtureFile = path.join(__dirname, '..', 'test', 'fixtures', 'mock-stream0.mp4')
const length = await Voddo.getVideoLength(fixtureFile)
expect(length).to.equal(3819)
})
})
it('aborted stream', function(done) {
this.timeout(10000)
getRandomRoom().then((room) => {
console.log(room)
const abortController = new AbortController()
const url = `https://chaturbate.com/${room}`
const format = 'best'
const cwd = '/tmp'
const voddo = new Voddo({
url,
format,
cwd
})
voddo.once('stop', function(data) {
console.log('f in chat')
expect(voddo.stats.files[0]).to.have.property('size')
done()
})
voddo.start()
setTimeout(() => {
voddo.stop()
}, 5000)
})
})
})

View File

@ -0,0 +1,35 @@
import { record, assertDependencyDirectory } from '../../src/record.js'
import { getRandomRoom } from '../../src/cb.js'
import path from 'node:path'
import os from 'node:os'
import { execa } from 'execa'
describe('record', function() {
it('should record a file to disk', async function () {
this.timeout(1000*60)
const roomName = await getRandomRoom()
console.log(`roomName:${roomName}`)
const appContext = {
env: {
FUTUREPORN_WORKDIR: os.tmpdir(),
DOWNLOADER_UA: "Mozilla/5.0 (X11; Linux x86_64; rv:105.0) Gecko/20100101 Firefox/105.0"
},
logger: {
log: (msg) => { console.log(JSON.stringify(msg)) }
}
}
console.log(appContext)
const { stdout } = await execa('yt-dlp', ['-g', `https://chaturbate.com/${roomName}`])
const playlistUrl = stdout.trim()
console.log(`playlistUrl:${playlistUrl}`)
assertDependencyDirectory(appContext)
const ffmpegProc = record(appContext, playlistUrl, roomName)
// console.log(ffmpegProc)
return new Promise((resolve) => {
setTimeout(() => {
ffmpegProc.kill('SIGINT')
resolve()
}, 1000*10)
})
})
})

View File

@ -0,0 +1,33 @@
import 'dotenv/config'
import Video from '../src/Video.js'
import { expect } from 'chai'
import { dirname } from 'path';
import { fileURLToPath } from 'url';
import path from 'node:path'
const __dirname = dirname(fileURLToPath(import.meta.url));
const dataFixture = [
{
timestamp: 1,
file: 'mock-stream0.mp4'
}, {
timestamp: 2,
file: 'mock-stream1.mp4'
}, {
timestamp: 3,
file: 'mock-stream2.mp4'
}
]
xdescribe('video', function () {
describe('concat', function () {
it('should combine several videos into one', async function() {
const cwd = path.join(__dirname, './fixtures')
const outputFile = await concat(dataFixture, {
cwd
})
})
})
})

View File

@ -1,20 +0,0 @@
# Base Image
FROM golang:latest
# Set the Current Working Directory inside the container
WORKDIR /app
# Copy everything from the current directory to the PWD(Present Working Directory) inside the container
COPY . .
# Download all the dependencies
RUN go mod download
# Build the Go app
RUN go build -o main .
# Expose port 8080 to the outside world
EXPOSE 8080
# Command to run the executable
CMD ["./main"]

View File

@ -1,13 +0,0 @@
package main
import "net/http"
func main() {
http.HandleFunc("/", hellowordhandler)
http.ListenAndServe(":9000", nil)
}
func hellowordhandler(w http.ResponseWriter, r *http.Request) {
w.Write([]byte("Hello World"))
}

View File

@ -1,2 +0,0 @@
node_modules
README.md

View File

@ -1,36 +0,0 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.js
.yarn/install-state.gz
# testing
/coverage
# next.js
/.next/
/out/
# production
/build
# misc
.DS_Store
*.pem
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# local env files
.env*.local
# vercel
.vercel
# typescript
*.tsbuildinfo
next-env.d.ts

View File

@ -1,12 +0,0 @@
FROM node:20
RUN corepack enable
WORKDIR /app
ADD package.json .
ADD package-lock.json .
RUN pnpm install
ADD . .
ENTRYPOINT [ "pnpm", "next", "dev" ]

View File

@ -1,36 +0,0 @@
This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app).
## Getting Started
First, run the development server:
```bash
npm run dev
# or
yarn dev
# or
pnpm dev
# or
bun dev
```
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
You can start editing the page by modifying `app/page.js`. The page auto-updates as you edit the file.
This project uses [`next/font`](https://nextjs.org/docs/basic-features/font-optimization) to automatically optimize and load Inter, a custom Google Font.
## Learn More
To learn more about Next.js, take a look at the following resources:
- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API.
- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial.
You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js/) - your feedback and contributions are welcome!
## Deploy on Vercel
The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js.
Check out our [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

View File

@ -1,107 +0,0 @@
:root {
--max-width: 1100px;
--border-radius: 12px;
--font-mono: ui-monospace, Menlo, Monaco, "Cascadia Mono", "Segoe UI Mono",
"Roboto Mono", "Oxygen Mono", "Ubuntu Monospace", "Source Code Pro",
"Fira Mono", "Droid Sans Mono", "Courier New", monospace;
--foreground-rgb: 0, 0, 0;
--background-start-rgb: 214, 219, 220;
--background-end-rgb: 255, 255, 255;
--primary-glow: conic-gradient(
from 180deg at 50% 50%,
#16abff33 0deg,
#0885ff33 55deg,
#54d6ff33 120deg,
#0071ff33 160deg,
transparent 360deg
);
--secondary-glow: radial-gradient(
rgba(255, 255, 255, 1),
rgba(255, 255, 255, 0)
);
--tile-start-rgb: 239, 245, 249;
--tile-end-rgb: 228, 232, 233;
--tile-border: conic-gradient(
#00000080,
#00000040,
#00000030,
#00000020,
#00000010,
#00000010,
#00000080
);
--callout-rgb: 238, 240, 241;
--callout-border-rgb: 172, 175, 176;
--card-rgb: 180, 185, 188;
--card-border-rgb: 131, 134, 135;
}
@media (prefers-color-scheme: dark) {
:root {
--foreground-rgb: 255, 255, 255;
--background-start-rgb: 0, 0, 0;
--background-end-rgb: 0, 0, 0;
--primary-glow: radial-gradient(rgba(1, 65, 255, 0.4), rgba(1, 65, 255, 0));
--secondary-glow: linear-gradient(
to bottom right,
rgba(1, 65, 255, 0),
rgba(1, 65, 255, 0),
rgba(1, 65, 255, 0.3)
);
--tile-start-rgb: 2, 13, 46;
--tile-end-rgb: 2, 5, 19;
--tile-border: conic-gradient(
#ffffff80,
#ffffff40,
#ffffff30,
#ffffff20,
#ffffff10,
#ffffff10,
#ffffff80
);
--callout-rgb: 20, 20, 20;
--callout-border-rgb: 108, 108, 108;
--card-rgb: 100, 100, 100;
--card-border-rgb: 200, 200, 200;
}
}
* {
box-sizing: border-box;
padding: 0;
margin: 0;
}
html,
body {
max-width: 100vw;
overflow-x: hidden;
}
body {
color: rgb(var(--foreground-rgb));
background: linear-gradient(
to bottom,
transparent,
rgb(var(--background-end-rgb))
)
rgb(var(--background-start-rgb));
}
a {
color: inherit;
text-decoration: none;
}
@media (prefers-color-scheme: dark) {
html {
color-scheme: dark;
}
}

View File

@ -1,17 +0,0 @@
import { Inter } from "next/font/google";
import "./globals.css";
const inter = Inter({ subsets: ["latin"] });
export const metadata = {
title: "Create Next App",
description: "Generated by create next app",
};
export default function RootLayout({ children }) {
return (
<html lang="en">
<body className={inter.className}>{children}</body>
</html>
);
}

View File

@ -1,95 +0,0 @@
import Image from "next/image";
import styles from "./page.module.css";
export default function Home() {
return (
<main className={styles.main}>
<div className={styles.description}>
<p>
Get started by editing&nbsp;
<code className={styles.code}>app/page.js</code>
</p>
<div>
<a
href="https://vercel.com?utm_source=create-next-app&utm_medium=appdir-template&utm_campaign=create-next-app"
target="_blank"
rel="noopener noreferrer"
>
By{" "}
<Image
src="/vercel.svg"
alt="Vercel Logo"
className={styles.vercelLogo}
width={100}
height={24}
priority
/>
</a>
</div>
</div>
<div className={styles.center}>
<Image
className={styles.logo}
src="/next.svg"
alt="Next.js Logo"
width={180}
height={37}
priority
/>
</div>
<div className={styles.grid}>
<a
href="https://nextjs.org/docs?utm_source=create-next-app&utm_medium=appdir-template&utm_campaign=create-next-app"
className={styles.card}
target="_blank"
rel="noopener noreferrer"
>
<h2>
Docs <span>-&gt;</span>
</h2>
<p>Find in-depth information about Next.js features and API.</p>
</a>
<a
href="https://nextjs.org/learn?utm_source=create-next-app&utm_medium=appdir-template&utm_campaign=create-next-app"
className={styles.card}
target="_blank"
rel="noopener noreferrer"
>
<h2>
Learn <span>-&gt;</span>
</h2>
<p>Learn about Next.js in an interactive course with&nbsp;quizzes!</p>
</a>
<a
href="https://vercel.com/templates?framework=next.js&utm_source=create-next-app&utm_medium=appdir-template&utm_campaign=create-next-app"
className={styles.card}
target="_blank"
rel="noopener noreferrer"
>
<h2>
Templates <span>-&gt;</span>
</h2>
<p>Explore starter templates for Next.js.</p>
</a>
<a
href="https://vercel.com/new?utm_source=create-next-app&utm_medium=appdir-template&utm_campaign=create-next-app"
className={styles.card}
target="_blank"
rel="noopener noreferrer"
>
<h2>
Deploy <span>-&gt;</span>
</h2>
<p>
Instantly deploy your Next.js site to a shareable URL with Vercel.
</p>
</a>
</div>
</main>
);
}

View File

@ -1,230 +0,0 @@
.main {
display: flex;
flex-direction: column;
justify-content: space-between;
align-items: center;
padding: 6rem;
min-height: 100vh;
}
.description {
display: inherit;
justify-content: inherit;
align-items: inherit;
font-size: 0.85rem;
max-width: var(--max-width);
width: 100%;
z-index: 2;
font-family: var(--font-mono);
}
.description a {
display: flex;
justify-content: center;
align-items: center;
gap: 0.5rem;
}
.description p {
position: relative;
margin: 0;
padding: 1rem;
background-color: rgba(var(--callout-rgb), 0.5);
border: 1px solid rgba(var(--callout-border-rgb), 0.3);
border-radius: var(--border-radius);
}
.code {
font-weight: 700;
font-family: var(--font-mono);
}
.grid {
display: grid;
grid-template-columns: repeat(4, minmax(25%, auto));
max-width: 100%;
width: var(--max-width);
}
.card {
padding: 1rem 1.2rem;
border-radius: var(--border-radius);
background: rgba(var(--card-rgb), 0);
border: 1px solid rgba(var(--card-border-rgb), 0);
transition: background 200ms, border 200ms;
}
.card span {
display: inline-block;
transition: transform 200ms;
}
.card h2 {
font-weight: 600;
margin-bottom: 0.7rem;
}
.card p {
margin: 0;
opacity: 0.6;
font-size: 0.9rem;
line-height: 1.5;
max-width: 30ch;
text-wrap: balance;
}
.center {
display: flex;
justify-content: center;
align-items: center;
position: relative;
padding: 4rem 0;
}
.center::before {
background: var(--secondary-glow);
border-radius: 50%;
width: 480px;
height: 360px;
margin-left: -400px;
}
.center::after {
background: var(--primary-glow);
width: 240px;
height: 180px;
z-index: -1;
}
.center::before,
.center::after {
content: "";
left: 50%;
position: absolute;
filter: blur(45px);
transform: translateZ(0);
}
.logo {
position: relative;
}
/* Enable hover only on non-touch devices */
@media (hover: hover) and (pointer: fine) {
.card:hover {
background: rgba(var(--card-rgb), 0.1);
border: 1px solid rgba(var(--card-border-rgb), 0.15);
}
.card:hover span {
transform: translateX(4px);
}
}
@media (prefers-reduced-motion) {
.card:hover span {
transform: none;
}
}
/* Mobile */
@media (max-width: 700px) {
.content {
padding: 4rem;
}
.grid {
grid-template-columns: 1fr;
margin-bottom: 120px;
max-width: 320px;
text-align: center;
}
.card {
padding: 1rem 2.5rem;
}
.card h2 {
margin-bottom: 0.5rem;
}
.center {
padding: 8rem 0 6rem;
}
.center::before {
transform: none;
height: 300px;
}
.description {
font-size: 0.8rem;
}
.description a {
padding: 1rem;
}
.description p,
.description div {
display: flex;
justify-content: center;
position: fixed;
width: 100%;
}
.description p {
align-items: center;
inset: 0 0 auto;
padding: 2rem 1rem 1.4rem;
border-radius: 0;
border: none;
border-bottom: 1px solid rgba(var(--callout-border-rgb), 0.25);
background: linear-gradient(
to bottom,
rgba(var(--background-start-rgb), 1),
rgba(var(--callout-rgb), 0.5)
);
background-clip: padding-box;
backdrop-filter: blur(24px);
}
.description div {
align-items: flex-end;
pointer-events: none;
inset: auto 0 0;
padding: 2rem;
height: 200px;
background: linear-gradient(
to bottom,
transparent 0%,
rgb(var(--background-end-rgb)) 40%
);
z-index: 1;
}
}
/* Tablet and Smaller Desktop */
@media (min-width: 701px) and (max-width: 1120px) {
.grid {
grid-template-columns: repeat(2, 50%);
}
}
@media (prefers-color-scheme: dark) {
.vercelLogo {
filter: invert(1);
}
.logo {
filter: invert(1) drop-shadow(0 0 0.3rem #ffffff70);
}
}
@keyframes rotate {
from {
transform: rotate(360deg);
}
to {
transform: rotate(0deg);
}
}

View File

@ -1,7 +0,0 @@
{
"compilerOptions": {
"paths": {
"@/*": ["./*"]
}
}
}

View File

@ -1,4 +0,0 @@
/** @type {import('next').NextConfig} */
const nextConfig = {};
export default nextConfig;

View File

@ -1,389 +0,0 @@
{
"name": "helloworldy",
"version": "0.1.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "helloworldy",
"version": "0.1.0",
"dependencies": {
"next": "14.1.3",
"react": "^18",
"react-dom": "^18"
}
},
"node_modules/@next/env": {
"version": "14.1.3",
"resolved": "https://registry.npmjs.org/@next/env/-/env-14.1.3.tgz",
"integrity": "sha512-VhgXTvrgeBRxNPjyfBsDIMvgsKDxjlpw4IAUsHCX8Gjl1vtHUYRT3+xfQ/wwvLPDd/6kqfLqk9Pt4+7gysuCKQ=="
},
"node_modules/@next/swc-darwin-arm64": {
"version": "14.1.3",
"resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.1.3.tgz",
"integrity": "sha512-LALu0yIBPRiG9ANrD5ncB3pjpO0Gli9ZLhxdOu6ZUNf3x1r3ea1rd9Q+4xxUkGrUXLqKVK9/lDkpYIJaCJ6AHQ==",
"cpu": [
"arm64"
],
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@next/swc-darwin-x64": {
"version": "14.1.3",
"resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-14.1.3.tgz",
"integrity": "sha512-E/9WQeXxkqw2dfcn5UcjApFgUq73jqNKaE5bysDm58hEUdUGedVrnRhblhJM7HbCZNhtVl0j+6TXsK0PuzXTCg==",
"cpu": [
"x64"
],
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@next/swc-linux-arm64-gnu": {
"version": "14.1.3",
"resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.1.3.tgz",
"integrity": "sha512-USArX9B+3rZSXYLFvgy0NVWQgqh6LHWDmMt38O4lmiJNQcwazeI6xRvSsliDLKt+78KChVacNiwvOMbl6g6BBw==",
"cpu": [
"arm64"
],
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@next/swc-linux-arm64-musl": {
"version": "14.1.3",
"resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.1.3.tgz",
"integrity": "sha512-esk1RkRBLSIEp1qaQXv1+s6ZdYzuVCnDAZySpa62iFTMGTisCyNQmqyCTL9P+cLJ4N9FKCI3ojtSfsyPHJDQNw==",
"cpu": [
"arm64"
],
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@next/swc-linux-x64-gnu": {
"version": "14.1.3",
"resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.1.3.tgz",
"integrity": "sha512-8uOgRlYEYiKo0L8YGeS+3TudHVDWDjPVDUcST+z+dUzgBbTEwSSIaSgF/vkcC1T/iwl4QX9iuUyUdQEl0Kxalg==",
"cpu": [
"x64"
],
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@next/swc-linux-x64-musl": {
"version": "14.1.3",
"resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.1.3.tgz",
"integrity": "sha512-DX2zqz05ziElLoxskgHasaJBREC5Y9TJcbR2LYqu4r7naff25B4iXkfXWfcp69uD75/0URmmoSgT8JclJtrBoQ==",
"cpu": [
"x64"
],
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@next/swc-win32-arm64-msvc": {
"version": "14.1.3",
"resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.1.3.tgz",
"integrity": "sha512-HjssFsCdsD4GHstXSQxsi2l70F/5FsRTRQp8xNgmQs15SxUfUJRvSI9qKny/jLkY3gLgiCR3+6A7wzzK0DBlfA==",
"cpu": [
"arm64"
],
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@next/swc-win32-ia32-msvc": {
"version": "14.1.3",
"resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.1.3.tgz",
"integrity": "sha512-DRuxD5axfDM1/Ue4VahwSxl1O5rn61hX8/sF0HY8y0iCbpqdxw3rB3QasdHn/LJ6Wb2y5DoWzXcz3L1Cr+Thrw==",
"cpu": [
"ia32"
],
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@next/swc-win32-x64-msvc": {
"version": "14.1.3",
"resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.1.3.tgz",
"integrity": "sha512-uC2DaDoWH7h1P/aJ4Fok3Xiw6P0Lo4ez7NbowW2VGNXw/Xv6tOuLUcxhBYZxsSUJtpeknCi8/fvnSpyCFp4Rcg==",
"cpu": [
"x64"
],
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@swc/helpers": {
"version": "0.5.2",
"resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.2.tgz",
"integrity": "sha512-E4KcWTpoLHqwPHLxidpOqQbcrZVgi0rsmmZXUle1jXmJfuIf/UWpczUJ7MZZ5tlxytgJXyp0w4PGkkeLiuIdZw==",
"dependencies": {
"tslib": "^2.4.0"
}
},
"node_modules/busboy": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz",
"integrity": "sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==",
"dependencies": {
"streamsearch": "^1.1.0"
},
"engines": {
"node": ">=10.16.0"
}
},
"node_modules/caniuse-lite": {
"version": "1.0.30001599",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001599.tgz",
"integrity": "sha512-LRAQHZ4yT1+f9LemSMeqdMpMxZcc4RMWdj4tiFe3G8tNkWK+E58g+/tzotb5cU6TbcVJLr4fySiAW7XmxQvZQA==",
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/browserslist"
},
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/caniuse-lite"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
]
},
"node_modules/client-only": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/client-only/-/client-only-0.0.1.tgz",
"integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA=="
},
"node_modules/graceful-fs": {
"version": "4.2.11",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
"integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="
},
"node_modules/js-tokens": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="
},
"node_modules/loose-envify": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
"integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==",
"dependencies": {
"js-tokens": "^3.0.0 || ^4.0.0"
},
"bin": {
"loose-envify": "cli.js"
}
},
"node_modules/nanoid": {
"version": "3.3.7",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz",
"integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"bin": {
"nanoid": "bin/nanoid.cjs"
},
"engines": {
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
}
},
"node_modules/next": {
"version": "14.1.3",
"resolved": "https://registry.npmjs.org/next/-/next-14.1.3.tgz",
"integrity": "sha512-oexgMV2MapI0UIWiXKkixF8J8ORxpy64OuJ/J9oVUmIthXOUCcuVEZX+dtpgq7wIfIqtBwQsKEDXejcjTsan9g==",
"dependencies": {
"@next/env": "14.1.3",
"@swc/helpers": "0.5.2",
"busboy": "1.6.0",
"caniuse-lite": "^1.0.30001579",
"graceful-fs": "^4.2.11",
"postcss": "8.4.31",
"styled-jsx": "5.1.1"
},
"bin": {
"next": "dist/bin/next"
},
"engines": {
"node": ">=18.17.0"
},
"optionalDependencies": {
"@next/swc-darwin-arm64": "14.1.3",
"@next/swc-darwin-x64": "14.1.3",
"@next/swc-linux-arm64-gnu": "14.1.3",
"@next/swc-linux-arm64-musl": "14.1.3",
"@next/swc-linux-x64-gnu": "14.1.3",
"@next/swc-linux-x64-musl": "14.1.3",
"@next/swc-win32-arm64-msvc": "14.1.3",
"@next/swc-win32-ia32-msvc": "14.1.3",
"@next/swc-win32-x64-msvc": "14.1.3"
},
"peerDependencies": {
"@opentelemetry/api": "^1.1.0",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"sass": "^1.3.0"
},
"peerDependenciesMeta": {
"@opentelemetry/api": {
"optional": true
},
"sass": {
"optional": true
}
}
},
"node_modules/picocolors": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz",
"integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ=="
},
"node_modules/postcss": {
"version": "8.4.31",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz",
"integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==",
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/postcss/"
},
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/postcss"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"dependencies": {
"nanoid": "^3.3.6",
"picocolors": "^1.0.0",
"source-map-js": "^1.0.2"
},
"engines": {
"node": "^10 || ^12 || >=14"
}
},
"node_modules/react": {
"version": "18.2.0",
"resolved": "https://registry.npmjs.org/react/-/react-18.2.0.tgz",
"integrity": "sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==",
"dependencies": {
"loose-envify": "^1.1.0"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/react-dom": {
"version": "18.2.0",
"resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.2.0.tgz",
"integrity": "sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==",
"dependencies": {
"loose-envify": "^1.1.0",
"scheduler": "^0.23.0"
},
"peerDependencies": {
"react": "^18.2.0"
}
},
"node_modules/scheduler": {
"version": "0.23.0",
"resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.0.tgz",
"integrity": "sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==",
"dependencies": {
"loose-envify": "^1.1.0"
}
},
"node_modules/source-map-js": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.1.0.tgz",
"integrity": "sha512-9vC2SfsJzlej6MAaMPLu8HiBSHGdRAJ9hVFYN1ibZoNkeanmDmLUcIrj6G9DGL7XMJ54AKg/G75akXl1/izTOw==",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/streamsearch": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz",
"integrity": "sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==",
"engines": {
"node": ">=10.0.0"
}
},
"node_modules/styled-jsx": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/styled-jsx/-/styled-jsx-5.1.1.tgz",
"integrity": "sha512-pW7uC1l4mBZ8ugbiZrcIsiIvVx1UmTfw7UkC3Um2tmfUq9Bhk8IiyEIPl6F8agHgjzku6j0xQEZbfA5uSgSaCw==",
"dependencies": {
"client-only": "0.0.1"
},
"engines": {
"node": ">= 12.0.0"
},
"peerDependencies": {
"react": ">= 16.8.0 || 17.x.x || ^18.0.0-0"
},
"peerDependenciesMeta": {
"@babel/core": {
"optional": true
},
"babel-plugin-macros": {
"optional": true
}
}
},
"node_modules/tslib": {
"version": "2.6.2",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz",
"integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q=="
}
}
}

View File

@ -1,16 +0,0 @@
{
"name": "helloworldy",
"version": "0.1.0",
"private": true,
"scripts": {
"dev": "next dev",
"build": "next build",
"start": "next start",
"lint": "next lint"
},
"dependencies": {
"react": "^18",
"react-dom": "^18",
"next": "14.1.3"
}
}

View File

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 394 80"><path fill="#000" d="M262 0h68.5v12.7h-27.2v66.6h-13.6V12.7H262V0ZM149 0v12.7H94v20.4h44.3v12.6H94v21h55v12.6H80.5V0h68.7zm34.3 0h-17.8l63.8 79.4h17.9l-32-39.7 32-39.6h-17.9l-23 28.6-23-28.6zm18.3 56.7-9-11-27.1 33.7h17.8l18.3-22.7z"/><path fill="#000" d="M81 79.3 17 0H0v79.3h13.6V17l50.2 62.3H81Zm252.6-.4c-1 0-1.8-.4-2.5-1s-1.1-1.6-1.1-2.6.3-1.8 1-2.5 1.6-1 2.6-1 1.8.3 2.5 1a3.4 3.4 0 0 1 .6 4.3 3.7 3.7 0 0 1-3 1.8zm23.2-33.5h6v23.3c0 2.1-.4 4-1.3 5.5a9.1 9.1 0 0 1-3.8 3.5c-1.6.8-3.5 1.3-5.7 1.3-2 0-3.7-.4-5.3-1s-2.8-1.8-3.7-3.2c-.9-1.3-1.4-3-1.4-5h6c.1.8.3 1.6.7 2.2s1 1.2 1.6 1.5c.7.4 1.5.5 2.4.5 1 0 1.8-.2 2.4-.6a4 4 0 0 0 1.6-1.8c.3-.8.5-1.8.5-3V45.5zm30.9 9.1a4.4 4.4 0 0 0-2-3.3 7.5 7.5 0 0 0-4.3-1.1c-1.3 0-2.4.2-3.3.5-.9.4-1.6 1-2 1.6a3.5 3.5 0 0 0-.3 4c.3.5.7.9 1.3 1.2l1.8 1 2 .5 3.2.8c1.3.3 2.5.7 3.7 1.2a13 13 0 0 1 3.2 1.8 8.1 8.1 0 0 1 3 6.5c0 2-.5 3.7-1.5 5.1a10 10 0 0 1-4.4 3.5c-1.8.8-4.1 1.2-6.8 1.2-2.6 0-4.9-.4-6.8-1.2-2-.8-3.4-2-4.5-3.5a10 10 0 0 1-1.7-5.6h6a5 5 0 0 0 3.5 4.6c1 .4 2.2.6 3.4.6 1.3 0 2.5-.2 3.5-.6 1-.4 1.8-1 2.4-1.7a4 4 0 0 0 .8-2.4c0-.9-.2-1.6-.7-2.2a11 11 0 0 0-2.1-1.4l-3.2-1-3.8-1c-2.8-.7-5-1.7-6.6-3.2a7.2 7.2 0 0 1-2.4-5.7 8 8 0 0 1 1.7-5 10 10 0 0 1 4.3-3.5c2-.8 4-1.2 6.4-1.2 2.3 0 4.4.4 6.2 1.2 1.8.8 3.2 2 4.3 3.4 1 1.4 1.5 3 1.5 5h-5.8z"/></svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

View File

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 283 64"><path fill="black" d="M141 16c-11 0-19 7-19 18s9 18 20 18c7 0 13-3 16-7l-7-5c-2 3-6 4-9 4-5 0-9-3-10-7h28v-3c0-11-8-18-19-18zm-9 15c1-4 4-7 9-7s8 3 9 7h-18zm117-15c-11 0-19 7-19 18s9 18 20 18c6 0 12-3 16-7l-8-5c-2 3-5 4-8 4-5 0-9-3-11-7h28l1-3c0-11-8-18-19-18zm-10 15c2-4 5-7 10-7s8 3 9 7h-19zm-39 3c0 6 4 10 10 10 4 0 7-2 9-5l8 5c-3 5-9 8-17 8-11 0-19-7-19-18s8-18 19-18c8 0 14 3 17 8l-8 5c-2-3-5-5-9-5-6 0-10 4-10 10zm83-29v46h-9V5h9zM37 0l37 64H0L37 0zm92 5-27 48L74 5h10l18 30 17-30h10zm59 12v10l-3-1c-6 0-10 4-10 10v15h-9V17h9v9c0-5 6-9 13-9z"/></svg>

Before

Width:  |  Height:  |  Size: 629 B

File diff suppressed because it is too large Load Diff

View File

@ -4,9 +4,9 @@ export default function NotFound() {
return ( return (
<div className='section'> <div className='section'>
<h2 className='title is-2'>404 Not Found</h2> <h2 className='title is-2'>404 Not Found</h2>
<p>Could not find that stream.</p> <p>Could not find that stream archive.</p>
<Link href="/s">Return to streams list</Link> <Link href="/s">Return to archive list</Link>
</div> </div>
) )
} }

View File

@ -12,6 +12,7 @@ interface IPageParams {
export default async function Page ({ params: { cuid } }: IPageParams) { export default async function Page ({ params: { cuid } }: IPageParams) {
const stream = await getStreamByCuid(cuid); const stream = await getStreamByCuid(cuid);
console.log(`getting stream by cuid. cuid=${cuid}`)
return ( return (
<> <>
<StreamPage stream={stream} /> <StreamPage stream={stream} />

View File

@ -0,0 +1,91 @@
import Pager from "@/components/pager";
import StreamsList from "@/components/streams-list";
import StreamsTable from '@/components/streams-table';
import { getAllStreams, getStreamsForVtuber } from "@/lib/streams";
// import { getAllVtubers } from "@/lib/vtubers";
import { notFound } from "next/navigation";
export default async function Page() {
// const vtubers = await getAllVtubers();
// const streams = await getAllStreams();
// const streams = await getStreamsForVtuber(1)
// const pageSize = 100;
// const page = 1;
// export interface IStream {
// id: number;
// attributes: {
// date: string;
// archiveStatus: 'good' | 'issue' | 'missing';
// vods: IVodsResponse;
// cuid: string;
// vtuber: IVtuberResponse;
// tweet: ITweetResponse;
// isChaturbateStream: boolean;
// isFanslyStream: boolean;
// }
// }
// if (!vtubers) notFound();
// const streams = [
// {
// "firstName": "Tanner",
// "lastName": "Linsley",
// "age": 33,
// "visits": 100,
// "progress": 50,
// "status": "Married",
// "id": 5,
// "attributes": {
// date: '2023-10-10T15:18:20.003Z',
// archiveStatus: 'missing',
// isChaturbateStream: false,
// isFanslyStream: true,
// vods: {},
// cuid: '2983482932384',
// vtuber: {},
// tweet: '',
// }
// },
// {
// "firstName": "Kevin",
// "lastName": "Vandy",
// "age": 27,
// "visits": 200,
// "progress": 100,
// "status": "Single",
// "id": 3,
// "attributes": {
// date: '2023-10-10T15:18:20.003Z',
// archiveStatus: 'missing',
// isChaturbateStream: true,
// isFanslyStream: true,
// vods: {},
// cuid: '29823432384',
// vtuber: {},
// tweet: '',
// }
// }
// ]
return (
<div className="section">
{/* <pre>
<p>here are the streams object</p>
<code>
{JSON.stringify(streams, null, 2)}
</code>
</pre> */}
<h1 className="title">Stream Archive</h1>
<StreamsTable />
{/* <StreamsList vtubers={vtubers} page={page} pageSize={pageSize} />
<Pager baseUrl="/streams" page={page} pageCount={vtubers.length/pageSize}/> */}
</div>
)
}

View File

@ -7,7 +7,7 @@ export interface IArchiveProgressProps {
} }
export default async function ArchiveProgress ({ vtuber }: IArchiveProgressProps) { export default async function ArchiveProgress ({ vtuber }: IArchiveProgressProps) {
const vods = await getVodsForVtuber(vtuber.id) // const vods = await getVodsForVtuber(vtuber.id)
// const streams = await getAllStreamsForVtuber(vtuber.id); // const streams = await getAllStreamsForVtuber(vtuber.id);
// const goodStreams = await getAllStreamsForVtuber(vtuber.id, ['good']); // const goodStreams = await getAllStreamsForVtuber(vtuber.id, ['good']);
// const issueStreams = await getAllStreamsForVtuber(vtuber.id, ['issue']); // const issueStreams = await getAllStreamsForVtuber(vtuber.id, ['issue']);
@ -16,17 +16,21 @@ export default async function ArchiveProgress ({ vtuber }: IArchiveProgressProps
// // Check if totalStreams is not zero before calculating completedPercentage // // Check if totalStreams is not zero before calculating completedPercentage
// const completedPercentage = (totalStreams !== 0) ? Math.round(eligibleStreams / totalStreams * 100) : 0; // const completedPercentage = (totalStreams !== 0) ? Math.round(eligibleStreams / totalStreams * 100) : 0;
// return ( const completedPercentage = 50
// <div> const totalStreams = 500
// <p className="heading">{eligibleStreams}/{totalStreams} Streams Archived ({completedPercentage}%)</p> const eligibleStreams = 50
// <progress className="progress is-success" value={eligibleStreams} max={totalStreams}>{completedPercentage}%</progress>
// </div>
// )
// @todo
return ( return (
<div> <div>
<i><p className="">{(vods) ? vods.data.length : 0} vods</p></i> <p>@todo</p>
<p className="heading">{eligibleStreams}/{totalStreams} Streams Archived ({completedPercentage}%)</p>
<progress className="progress is-success" value={eligibleStreams} max={totalStreams}>{completedPercentage}%</progress>
</div> </div>
) )
// @todo
// return (
// <div>
// <i><p className="">{(vods) ? vods.data.length : 0} vods</p></i>
// </div>
// )
} }

View File

@ -17,7 +17,7 @@ export default function Footer() {
<ul> <ul>
<li><Link href="#top">&uarr; Top of page</Link></li> <li><Link href="#top">&uarr; Top of page</Link></li>
<li><Link href="/vt">Vtubers</Link></li> <li><Link href="/vt">Vtubers</Link></li>
{/* <li><Link href="/streams">Stream Archive</Link></li> */} <li><Link href="/archive">Archive</Link></li>
<li><Link href="/about">About</Link></li> <li><Link href="/about">About</Link></li>
<li><Link href="/faq">FAQ</Link></li> <li><Link href="/faq">FAQ</Link></li>
<li><Link href="/goals">Goals</Link></li> <li><Link href="/goals">Goals</Link></li>

View File

@ -43,7 +43,7 @@ export default function Navbar() {
<div className={`navbar-menu ${isExpanded ? 'is-active' : ''}`} id="navMenu"> <div className={`navbar-menu ${isExpanded ? 'is-active' : ''}`} id="navMenu">
<div className='navbar-start'> <div className='navbar-start'>
<Link className="navbar-item is-expanded" href="/vt">Vtubers</Link> <Link className="navbar-item is-expanded" href="/vt">Vtubers</Link>
{/* <Link className="navbar-item is-expanded" href="/streams">Stream Archive</Link> */} <Link className="navbar-item is-expanded" href="/archive">Archive</Link>
<Link className="navbar-item is-expanded" href="/about">About</Link> <Link className="navbar-item is-expanded" href="/about">About</Link>
<Link className="navbar-item is-expanded" href="/faq">FAQ</Link> <Link className="navbar-item is-expanded" href="/faq">FAQ</Link>
<Link className="navbar-item is-expanded" href="/goals">Goals</Link> <Link className="navbar-item is-expanded" href="/goals">Goals</Link>

View File

@ -8,7 +8,7 @@ export function StreamButton({ stream }: { stream: IStream }) {
return ( return (
<Link <Link
href={`/streams/${stream.attributes.cuid}`} href={`/archive/${stream.attributes.cuid}`}
className="button is-medium" className="button is-medium"
> >
<span className="mr-2"><FontAwesomeIcon icon={faCalendar} className="fas fa-calendar" /></span><span>{new Date(stream.attributes.date).toLocaleDateString()}</span> <span className="mr-2"><FontAwesomeIcon icon={faCalendar} className="fas fa-calendar" /></span><span>{new Date(stream.attributes.date).toLocaleDateString()}</span>

View File

@ -45,6 +45,8 @@ function determineStatus(stream: IStream): Status {
} }
export default function StreamPage({ stream }: IStreamProps) { export default function StreamPage({ stream }: IStreamProps) {
console.log('StreamPage function has been invoked! stream as follows')
console.log(stream)
const displayName = stream.attributes.vtuber.data.attributes.displayName; const displayName = stream.attributes.vtuber.data.attributes.displayName;
const date = new Date(stream.attributes.date); const date = new Date(stream.attributes.date);
const [hemisphere, setHemisphere] = useState(Hemisphere.NORTHERN); const [hemisphere, setHemisphere] = useState(Hemisphere.NORTHERN);
@ -89,13 +91,22 @@ export default function StreamPage({ stream }: IStreamProps) {
// </pre> // </pre>
// </p> // </p>
// const platformsList = '???'; // const platformsList = [
const { isChaturbateInvite, isFanslyInvite } = stream.attributes.tweet.data.attributes; // stream.attributes.isChaturbateStream ? 'Chaturbate' : null,
const platformsArray = [ // stream.attributes.isFanslyStream ? 'Fansly' : null
isChaturbateInvite ? 'Chaturbate' : null, // ].filter(Boolean).join(', ');
isFanslyInvite ? 'Fansly' : null // platformsList = platformsArray.length > 0 ? platformsArray.join(', ') : 'None';
].filter(Boolean);
const platformsList = platformsArray.length > 0 ? platformsArray.join(', ') : 'None'; // const platformsList = [
// (stream.attributes.isChaturbateStream && 'CB'),
// (stream.attributes.isFanslyStream && 'Fansly')
// ].filter(Boolean).join(', ')
const platformsList = [
(stream.attributes.isChaturbateStream && 'CB'),
(stream.attributes.isFanslyStream && 'Fansly')
].filter(Boolean).join(', ') || '!!!';
return ( return (
@ -110,23 +121,39 @@ export default function StreamPage({ stream }: IStreamProps) {
<div className="section columns is-multiline"> <div className="section columns is-multiline">
<div className="column is-half"> <div className="column is-half">
<div className="box"> <div className="box">
<h2 className="title is-3">Details</h2>
<div className="columns is-multiline"> <div className="columns is-multiline">
<div className="column is-full"> <div className="column is-full">
<span><b>Announcement</b>&nbsp;<span><Link target="_blank" href={stream.attributes.tweet.data.attributes.url}><FontAwesomeIcon icon={faXTwitter}></FontAwesomeIcon><FontAwesomeIcon icon={faExternalLinkAlt}></FontAwesomeIcon></Link></span></span><br></br> <table className="table">
<span><b>Platform</b>&nbsp;</span><span>{platformsList}</span><br></br> <thead>
<span><b>UTC Datetime</b>&nbsp;</span><time dateTime={date.toISOString()}>{date.toISOString()}</time><br></br> <tr>
<span><b>Local Datetime</b>&nbsp;</span><span>{date.toLocaleDateString()} {date.toLocaleTimeString()}</span><br></br> <th className="is-family-sans-serif">Description</th>
<span><b>Lunar Phase</b>&nbsp;</span><span>{Moon.lunarPhase(date)} {Moon.lunarPhaseEmoji(date, { hemisphere })}</span><br></br> <th className="is-family-sans-serif">Details</th>
<br></br> </tr>
{/* <select className="mt-5" </thead>
value={selectedStatus} <tbody>
onChange={e => setSelectedStatus(e.target.value as Status)} {/* <tr>
> <td>Announcement</td>
<option>good</option> <td><Link target="_blank" href={stream.attributes.tweet.data.attributes.url}><FontAwesomeIcon icon={faXTwitter}></FontAwesomeIcon><FontAwesomeIcon icon={faExternalLinkAlt}></FontAwesomeIcon></Link></td>
<option>issue</option> </tr> */}
<option>missing</option> <tr>
</select> */} <td>Platform</td>
<td>{platformsList}</td>
</tr>
<tr>
<td>UTC Datetime</td>
<td><time dateTime={date.toISOString()}>{date.toISOString()}</time></td>
</tr>
<tr>
<td>Local Datetime</td>
<td>{date.toLocaleDateString()} {date.toLocaleTimeString()}</td>
</tr>
<tr>
<td>Lunar Phase</td>
<td>{Moon.lunarPhase(date)} {Moon.lunarPhaseEmoji(date, { hemisphere })}</td>
</tr>
</tbody>
</table>
</div> </div>
</div> </div>
</div> </div>
@ -150,6 +177,7 @@ export default function StreamPage({ stream }: IStreamProps) {
</div> </div>
{stream.attributes.vods.data.length !== 0 &&
<div className="section"> <div className="section">
<h1 className="title">VODs</h1> <h1 className="title">VODs</h1>
<table className="table"> <table className="table">
@ -179,7 +207,7 @@ export default function StreamPage({ stream }: IStreamProps) {
))} ))}
</tbody> </tbody>
</table> </table>
</div> </div>}
</div> </div>

View File

@ -1,194 +1,129 @@
'use client' 'use client'
import React from 'react'
import ReactDOM from 'react-dom/client'
import Link from 'next/link'
import {
keepPreviousData,
QueryClient,
useQuery,
} from '@tanstack/react-query'
import { import {
Column, PaginationState,
Table as ReactTable,
useReactTable, useReactTable,
ColumnFiltersState,
getCoreRowModel, getCoreRowModel,
getFilteredRowModel,
getFacetedRowModel,
getFacetedUniqueValues,
getFacetedMinMaxValues,
getPaginationRowModel,
sortingFns,
getSortedRowModel,
FilterFn,
SortingFn,
ColumnDef, ColumnDef,
flexRender, flexRender,
FilterFns,
ColumnOrderState,
createColumnHelper,
} from '@tanstack/react-table' } from '@tanstack/react-table'
import Image from 'next/image';
import { useState } from "react";
import { IStream } from "@/lib/streams";
import { LocalizedDate } from './localized-date';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { faAngleLeft, faAngleRight, faAnglesLeft, faAnglesRight, faChevronCircleRight, faChevronRight } from '@fortawesome/free-solid-svg-icons';
import Link from 'next/link';
function Filter({
column,
table,
}: {
column: Column<any, any>
table: ReactTable<any>
}) {
const firstValue = table
.getPreFilteredRowModel()
.flatRows[0]?.getValue(column.id)
const columnFilterValue = column.getFilterValue()
import { fetchStreamData, IStream } from '@/lib/streams'
if (typeof firstValue === 'number') { const queryClient = new QueryClient()
return (
<div className="flex space-x-2"> function getStatusClass(value: string) {
<input switch (value) {
type="number" case 'issue':
value={(columnFilterValue as [number, number])?.[0] ?? ''} return 'is-warning';
onChange={e => case 'missing':
column.setFilterValue((old: [number, number]) => [ return 'is-danger';
e.target.value, case 'good':
old?.[1], return 'is-success';
]) default:
return '';
} }
placeholder={`Min`}
className="w-24 border shadow rounded"
/>
<input
type="number"
value={(columnFilterValue as [number, number])?.[1] ?? ''}
onChange={e =>
column.setFilterValue((old: [number, number]) => [
old?.[0],
e.target.value,
])
}
placeholder={`Max`}
className="w-24 border shadow rounded"
/>
</div>
)
}
if (typeof firstValue === 'boolean') {
return (
<>
<div className='select'>
<select
onChange={(evt) => {
if (evt.target.value === "any")
column?.setFilterValue(null);
if (evt.target.value === "yes")
column?.setFilterValue(true);
if (evt.target.value === "no")
column?.setFilterValue(false);
}}
>
<option>any</option>
<option>yes</option>
<option>no</option>
</select>
</div>
</>
)
}
return (
<input
type="text"
value={(columnFilterValue ?? '') as string}
onChange={e => column.setFilterValue(e.target.value)}
placeholder={`Search...`}
className="input"
/>
)
} }
const archiveStatusClassName = (archiveStatus: string): string => { export default function StreamsTable() {
if (archiveStatus === 'missing') return 'is-danger'; const rerender = React.useReducer(() => ({}), {})[1]
if (archiveStatus === 'issue') return 'is-warning';
if (archiveStatus === 'good') return 'is-success';
return 'is-info';
};
export default function StreamsTable({ streams }: { streams: IStream[] }) {
// name
// title
// platform
// date
// archiveStatus
const columns = React.useMemo<ColumnDef<IStream>[]>(
() => [
{
header: 'VTuber',
accessorFn: d => d.attributes.vtuber.data?.attributes?.displayName,
},
{
header: 'Date',
accessorFn: d => new Date(d.attributes.date2).toISOString().split('T').at(0),
cell: info => <Link href={`/archive/${info.row.original.attributes.cuid}`}>{info.getValue() as string}</Link>
},
{
header: 'Platform',
accessorFn: d => [
(d.attributes.isChaturbateStream && 'CB'),
(d.attributes.isFanslyStream && 'Fansly')
].filter(Boolean).join(', ') || '???'
},
{
header: 'Status',
accessorFn: d => {
if (!d.attributes.archiveStatus) return 'missing';
return d.attributes.archiveStatus
}
},
// {
// header: 'Name',
// footer: props => props.column.id,
// columns: [
// {
// accessorKey: 'firstName',
// cell: info => info.getValue(),
// footer: props => props.column.id,
// },
// {
// accessorFn: row => row.lastName,
// id: 'lastName',
// cell: info => info.getValue(),
// header: () => <span>Last Name</span>,
// footer: props => props.column.id,
// },
// ],
// },
],
[]
)
const columnHelper = createColumnHelper<IStream>() const [pagination, setPagination] = React.useState<PaginationState>({
pageIndex: 0,
pageSize: 50,
const columns = [
columnHelper.accessor('attributes.cuid', {
cell: info => <Link href={`/streams/${info.getValue()}`}>{info.getValue()}</Link>,
header: () => <span>ID</span>
}),
columnHelper.accessor('attributes.vtuber.data.attributes.image', {
cell: info => <figure className='image is-24x24'><Image className="is-rounded" width={24} height={24} alt="" src={info.getValue()}></Image></figure>,
header: () => <span></span>,
enableColumnFilter: false
}),
columnHelper.accessor('attributes.vtuber.data.attributes.displayName', {
cell: info => info.getValue(),
header: () => <span>VTuber</span>
}),
columnHelper.accessor('attributes.date', {
cell: info => <LocalizedDate date={new Date(info.getValue())}/>,
header: () => <span>Date</span>
}),
columnHelper.accessor('attributes.isChaturbateStream', {
id: 'isChaturbateStream',
cell: info => info.getValue() === true ? 'yes' : 'no',
header: () => <span>Chaturbate</span>
}),
columnHelper.accessor('attributes.isFanslyStream', {
id: 'isFanslyStream',
cell: info => info.getValue() === true ? 'yes' : 'no',
header: () => <span>Fansly</span>
}),
columnHelper.accessor('attributes.archiveStatus', {
cell: info => <div className={`tag ${archiveStatusClassName(info.getValue())}`} >{info.getValue()}</div>,
header: () => <span>Status</span>
}) })
]
const [columnVisibility, setColumnVisibility] = useState({}) const dataQuery = useQuery({
const [columnOrder, setColumnOrder] = useState<ColumnOrderState>([]) queryKey: ['streams', pagination.pageIndex, pagination.pageSize],
const [columnFilters, setColumnFilters] = useState<ColumnFiltersState>([]) queryFn: () => fetchStreamData(pagination),
const [data, setData] = useState(() => streams) placeholderData: keepPreviousData, // don't have 0 rows flash while changing pages/loading next page,
staleTime: 1000
}, queryClient)
const defaultData = React.useMemo(() => [], [])
const table = useReactTable({ const table = useReactTable({
data, data: dataQuery?.data?.rows ?? defaultData,
columns, columns,
getCoreRowModel: getCoreRowModel(), // pageCount: dataQuery.data?.pageCount ?? -1, //you can now pass in `rowCount` instead of pageCount and `pageCount` will be calculated internally (new in v8.13.0)
getFilteredRowModel: getFilteredRowModel(), rowCount: dataQuery.data?.rowCount, // new in v8.13.0 - alternatively, just pass in `pageCount` directly
getPaginationRowModel: getPaginationRowModel(),
state: { state: {
columnVisibility, pagination,
columnOrder,
columnFilters
}, },
onColumnOrderChange: setColumnOrder, onPaginationChange: setPagination,
onColumnFiltersChange: setColumnFilters, getCoreRowModel: getCoreRowModel(),
manualPagination: true, //we're doing manual "server-side" pagination
// getPaginationRowModel: getPaginationRowModel(), // If only doing manual pagination, you don't need this
debugTable: true,
}) })
return ( return (
<>
<div className="p-2"> <div className="p-2">
<div className="h-2" /> <div className="h-2" />
<table className='table is-hoverable is-fullwidth'>
<table className='table'>
<thead> <thead>
{table.getHeaderGroups().map(headerGroup => ( {table.getHeaderGroups().map(headerGroup => (
<tr key={headerGroup.id}> <tr key={headerGroup.id}>
@ -201,11 +136,6 @@ export default function StreamsTable({ streams }: { streams: IStream[] }) {
header.column.columnDef.header, header.column.columnDef.header,
header.getContext() header.getContext()
)} )}
{header.column.getCanFilter() ? (
<div>
<Filter column={header.column} table={table} />
</div>
) : null}
</div> </div>
)} )}
</th> </th>
@ -220,7 +150,10 @@ export default function StreamsTable({ streams }: { streams: IStream[] }) {
<tr key={row.id}> <tr key={row.id}>
{row.getVisibleCells().map(cell => { {row.getVisibleCells().map(cell => {
return ( return (
<td key={cell.id}> <td
className={getStatusClass(cell.getValue() as string)}
key={cell.id}
>
{flexRender( {flexRender(
cell.column.columnDef.cell, cell.column.columnDef.cell,
cell.getContext() cell.getContext()
@ -233,52 +166,53 @@ export default function StreamsTable({ streams }: { streams: IStream[] }) {
})} })}
</tbody> </tbody>
</table> </table>
<div className="columns is-multiline is-mobile" />
<div className="column is-12"> <div className="columns is-mobile is-vcentered">
<div className='column is-half'>
<button <button
className="button icon is-rounded is-medium p-1 m-1" className="button border rounded mx-1"
onClick={() => table.setPageIndex(0)} onClick={() => table.firstPage()}
disabled={!table.getCanPreviousPage()} disabled={!table.getCanPreviousPage()}
> >
<FontAwesomeIcon className='fa-solid fa-angles-left' icon={faAnglesLeft}></FontAwesomeIcon> {'<<'}
</button> </button>
<button <button
className="button icon is-rounded is-medium p-1 m-1" className="button border rounded mx-1"
onClick={() => table.previousPage()} onClick={() => table.previousPage()}
disabled={!table.getCanPreviousPage()} disabled={!table.getCanPreviousPage()}
> >
<FontAwesomeIcon className='fa-solid fa-angle-left' icon={faAngleLeft}></FontAwesomeIcon> {'<'}
</button> </button>
<button <button
className="button icon is-rounded is-medium p-1 m-1" className="button border rounded mx-1"
onClick={() => table.nextPage()} onClick={() => table.nextPage()}
disabled={!table.getCanNextPage()} disabled={!table.getCanNextPage()}
> >
<FontAwesomeIcon className="fa-solid fa-angle-right" icon={faAngleRight}></FontAwesomeIcon> {'>'}
</button> </button>
<button <button
className="button icon is-medium is-rounded p-1 m-1" className="button border rounded mx-1"
onClick={() => table.setPageIndex(table.getPageCount() - 1)} onClick={() => table.lastPage()}
disabled={!table.getCanNextPage()} disabled={!table.getCanNextPage()}
> >
<FontAwesomeIcon className='fa-solid fa-angles-right' icon={faAnglesRight}></FontAwesomeIcon> {'>>'}
</button> </button>
</div> </div>
<div className='column is-2'> <div className='column is-half'>
<span>Page </span>
<div className=''> <strong>
<div className=''>
<span className='mr-1'>Page</span>
{table.getState().pagination.pageIndex + 1} of{' '} {table.getState().pagination.pageIndex + 1} of{' '}
{table.getPageCount()} {table.getPageCount().toLocaleString()}
</strong>
</div> </div>
</div> </div>
<div className=''> {/* second row with page number input and pages-per-screen select */}
<label className='label'> <div className='columns is-mobile is-vcentered'>
Go to page: <div className='column is-2 '>
</label> <span className='is-text-centered'>Go to page:</span>
<div className="control is-expanded"> </div>
<div className='column is-3'>
<input <input
type="number" type="number"
defaultValue={table.getState().pagination.pageIndex + 1} defaultValue={table.getState().pagination.pageIndex + 1}
@ -286,24 +220,18 @@ export default function StreamsTable({ streams }: { streams: IStream[] }) {
const page = e.target.value ? Number(e.target.value) - 1 : 0 const page = e.target.value ? Number(e.target.value) - 1 : 0
table.setPageIndex(page) table.setPageIndex(page)
}} }}
className="input p-1" className="input"
/> />
</div> </div>
</div> <div className='column is-5'>
</div> <div className="select">
<div className='column is-2'>
<div className="m-1">
<span className='mr-1'>Page</span>
</div>
<div className='select'>
<select <select
value={table.getState().pagination.pageSize} value={table.getState().pagination.pageSize}
onChange={e => { onChange={e => {
table.setPageSize(Number(e.target.value)) table.setPageSize(Number(e.target.value))
}} }}
> >
{[10, 20, 30, 40, 50].map(pageSize => ( {[20, 50, 100].map(pageSize => (
<option key={pageSize} value={pageSize}> <option key={pageSize} value={pageSize}>
Show {pageSize} Show {pageSize}
</option> </option>
@ -313,6 +241,9 @@ export default function StreamsTable({ streams }: { streams: IStream[] }) {
</div> </div>
</div> </div>
</>
</div>
) )
} }

View File

@ -156,9 +156,9 @@ export function Tagger({ vod, setTimestamps }: ITaggerProps): React.JSX.Element
} }
} }
if (!isAuthed) { // if (!isAuthed) {
return <></> // return <></>
} else { // } else {
if (isEditor) { if (isEditor) {
return ( return (
<div className='card mt-2' style={{ width: '100%' }}> <div className='card mt-2' style={{ width: '100%' }}>
@ -234,7 +234,7 @@ export function Tagger({ vod, setTimestamps }: ITaggerProps): React.JSX.Element
</button> </button>
); );
} }
} // }
} }

View File

@ -110,7 +110,6 @@ export const VideoPlayer = forwardRef(function VideoPlayer( props: IPlayerProps,
return ( return (
<> <>
<p className='notification'>CDN1 (for Patrons only)</p>
<MuxPlayer <MuxPlayer
onCanPlay={() => { onCanPlay={() => {
setIsPlayerReady(true)} setIsPlayerReady(true)}

View File

@ -31,12 +31,16 @@ export function VideoSourceSelector({
if (isEntitledToCDN) { if (isEntitledToCDN) {
if (selectedVideoSource === 'Mux' && isMux) { if (selectedVideoSource === 'Mux' && isMux) {
return 'Mux'; return 'Mux';
} else if (selectedVideoSource === 'B2' && isB2) {
return 'B2';
} }
} }
// If the user doesn't have entitlements or their preference is not available, default to IPFS
if (isIPFSSource) { // if the user has B2 as their preference or they have no preference, use B2
if (selectedVideoSource === 'B2' || !selectedVideoSource) {
return 'B2'
}
// use IPFS only if the user has opted to use it
if (selectedVideoSource === 'IPFSSource' && isIPFSSource) {
return 'IPFSSource'; return 'IPFSSource';
} else if (isIPFS240) { } else if (isIPFS240) {
return 'IPFS240'; return 'IPFS240';
@ -53,8 +57,16 @@ export function VideoSourceSelector({
// Check if the saved preference is valid based on entitlements and available sources // Check if the saved preference is valid based on entitlements and available sources
if (savedPreference === 'Mux' && isMux && isEntitledToCDN) { if (savedPreference === 'Mux' && isMux && isEntitledToCDN) {
setSelectedVideoSource('Mux'); setSelectedVideoSource('Mux');
} else if (savedPreference === 'B2' && isB2 && isEntitledToCDN) { } else if (savedPreference === 'B2') {
setSelectedVideoSource('B2'); setSelectedVideoSource('B2');
} else if (savedPreference === 'IPFSSource') {
setSelectedVideoSource('IPFSSource');
} else if (savedPreference === 'IPFS240') {
if (isIPFS240) {
setSelectedVideoSource('IPFS240');
} else {
setSelectedVideoSource('IPFSSource');
}
} else { } else {
// Determine the best video source if the saved preference is invalid or not available // Determine the best video source if the saved preference is invalid or not available
const bestSource = determineBestVideoSource(); const bestSource = determineBestVideoSource();
@ -69,7 +81,7 @@ export function VideoSourceSelector({
const handleSourceClick = (source: string) => { const handleSourceClick = (source: string) => {
if ( if (
(source === 'Mux' && isMux && isEntitledToCDN) || (source === 'Mux' && isMux && isEntitledToCDN) ||
(source === 'B2' && isB2 && isEntitledToCDN) || (source === 'B2' && isB2) ||
(source === 'IPFSSource') || (source === 'IPFSSource') ||
(source === 'IPFS240') (source === 'IPFS240')
) { ) {
@ -100,9 +112,9 @@ export function VideoSourceSelector({
</button> </button>
</div>} </div>}
{(isB2) && <div className="nav-item"> {(isB2) && <div className="nav-item">
<button onClick={() => handleSourceClick('B2')} disabled={!isEntitledToCDN} className={`button ${selectedVideoSource === 'B2' && 'is-active'}`}> <button onClick={() => handleSourceClick('B2')} className={`button ${selectedVideoSource === 'B2' && 'is-active'}`}>
<span className="icon"> <span className="icon">
<FontAwesomeIcon icon={faPatreon} className="fab fa-patreon" /> <FontAwesomeIcon icon={faGlobe} className="fab fa-globe" />
</span> </span>
<span>CDN 2</span> <span>CDN 2</span>
</button> </button>

View File

@ -7,12 +7,17 @@ import "@fortawesome/fontawesome-svg-core/styles.css";
import { AuthProvider } from './components/auth'; import { AuthProvider } from './components/auth';
import type { Metadata } from 'next'; import type { Metadata } from 'next';
import NotificationCenter from './components/notification-center'; import NotificationCenter from './components/notification-center';
import UppyProvider from './uppy'; // import {
// QueryClientProvider,
// QueryClient
// } from '@tanstack/react-query'
// import NextTopLoader from 'nextjs-toploader'; // import NextTopLoader from 'nextjs-toploader';
// import Ipfs from './components/ipfs'; // slows down the page too much // import Ipfs from './components/ipfs'; // slows down the page too much
// const queryClient = new QueryClient()
export const metadata: Metadata = { export const metadata: Metadata = {
title: 'Futureporn.net', title: 'Futureporn.net',
description: "The Galaxy's Best VTuber Hentai Site", description: "The Galaxy's Best VTuber Hentai Site",
@ -56,14 +61,14 @@ export default function RootLayout({
shadow="0 0 10px #2299DD,0 0 5px #2299DD" shadow="0 0 10px #2299DD,0 0 5px #2299DD"
/> */} /> */}
<AuthProvider> <AuthProvider>
<UppyProvider> {/* <QueryClientProvider client={queryClient}> */}
<Navbar /> <Navbar />
<NotificationCenter /> <NotificationCenter />
<div className="container"> <div className="container">
{children} {children}
<Footer /> <Footer />
</div> </div>
</UppyProvider> {/* </QueryClientProvider> */}
</AuthProvider> </AuthProvider>
</body> </body>
</html> </html>

View File

@ -29,6 +29,6 @@ export default async function fetchAPI(
} catch (error) { } catch (error) {
console.error(error); console.error(error);
throw new Error(`Error while fetching data from API.`); throw new Error(`Error while fetching data from API. ${path}`);
} }
} }

View File

@ -1,5 +1,5 @@
import { strapiUrl, siteUrl } from './constants'; import { siteUrl, strapiUrl } from './constants';
import { getSafeDate } from './dates'; import { getSafeDate } from './dates';
import { IVodsResponse } from './vods'; import { IVodsResponse } from './vods';
import { IVtuber, IVtuberResponse } from './vtubers'; import { IVtuber, IVtuberResponse } from './vtubers';
@ -12,6 +12,7 @@ export interface IStream {
id: number; id: number;
attributes: { attributes: {
date: string; date: string;
date2: string;
archiveStatus: 'good' | 'issue' | 'missing'; archiveStatus: 'good' | 'issue' | 'missing';
vods: IVodsResponse; vods: IVodsResponse;
cuid: string; cuid: string;
@ -35,7 +36,8 @@ export interface IStreamsResponse {
const fetchStreamsOptions = { const fetchStreamsOptions = {
next: { next: {
tags: ['streams'] tags: ['streams'],
revalidation: 1
} }
} }
@ -162,6 +164,7 @@ export async function getStream(id: number): Promise<IStream> {
export async function getAllStreams(archiveStatuses = ['missing', 'issue', 'good']): Promise<IStream[]> { export async function getAllStreams(archiveStatuses = ['missing', 'issue', 'good']): Promise<IStream[]> {
throw new Error('getAllStreams function is not performant. please use something more efficient.')
const pageSize = 100; // Adjust this value as needed const pageSize = 100; // Adjust this value as needed
const sortDesc = true; // Adjust the sorting direction as needed const sortDesc = true; // Adjust the sorting direction as needed
@ -308,7 +311,50 @@ export async function getAllStreamsForVtuber(vtuberId: number, archiveStatuses =
return allStreams; return allStreams;
} }
/**
* Used as table data on /archive page.
* .pageIndex, pagination.pageSize
*/
export async function fetchStreamData({ pageIndex, pageSize }: { pageIndex: number, pageSize: number }) {
const offset = pageIndex * pageSize;
const query = qs.stringify({
populate: {
vtuber: {
fields: ['slug', 'displayName', 'publishedAt']
}
},
filters: {
vtuber: {
publishedAt: {
$notNull: true
}
}
},
pagination: {
start: offset,
limit: pageSize,
withCount: true
}
})
const response = await fetch(
`${strapiUrl}/api/streams?${query}`
);
const json = await response.json();
console.log(json)
const d = {
rows: json.data,
pageCount: Math.ceil(json.meta.pagination.total / pageSize),
rowCount: json.meta.pagination.total,
}
// console.log(`fetchStreamData with pageIndex=${pageIndex}, pageSize=${pageSize}\n\n${JSON.stringify(d, null, 2)}`)
return d;
}
export async function getStreamsForVtuber(vtuberId: number, page: number = 1, pageSize: number = 25, sortDesc = true): Promise<IStreamsResponse> { export async function getStreamsForVtuber(vtuberId: number, page: number = 1, pageSize: number = 25, sortDesc = true): Promise<IStreamsResponse> {
console.log(`getStreamsForVtuber() with strapiUrl=${strapiUrl}`)
const query = qs.stringify( const query = qs.stringify(
{ {
populate: { populate: {
@ -334,8 +380,10 @@ export async function getStreamsForVtuber(vtuberId: number, page: number = 1, pa
} }
} }
) )
return fetch(`${strapiUrl}/api/streams?${query}`, fetchStreamsOptions) const res = await fetch(`${strapiUrl}/api/streams?${query}`, fetchStreamsOptions)
.then((res) => res.json()) const data = await res.json()
console.log(data)
return data
} }

Some files were not shown because too many files have changed in this diff Show More