capture progress
ci / build (push) Has been cancelled Details

This commit is contained in:
CJ_Clippy 2024-07-22 18:59:41 -08:00
parent 2b2f925146
commit af8f58940a
130 changed files with 11546 additions and 7793 deletions

View File

@ -5,5 +5,6 @@
"file:///home/cj/Documents/futureporn-monorepo/charts/fp/templates/strapi.yaml"
],
"https://json.schemastore.org/kustomization.json": "file:///home/cj/Documents/futureporn-monorepo/clusters/production/infrastructure.yaml"
}
},
"editor.tabSize": 2
}

View File

@ -44,7 +44,7 @@ chisel:
clean:
kind delete cluster
node ./packages/infra/vultr-delete-orphaned-resources.js
dotenvx run -f .env.${ENV} -- node ./packages/infra/vultr-delete-orphaned-resources.js
deps:
echo "Some of the install methods for these dependencies are not cross-platform compatible. Some of the install methods are not tested. Expect this to fail. Please consult the Makefile for URLs to project sources."

325
Tiltfile
View File

@ -13,11 +13,11 @@ secret_settings(
## cert-manager loaded using this extension is PAINFULLY SLOW, and it must re-install and re-test every time the Tiltfile changes.
## additionally, it is SYNCRHONOUS, which means nothing else can update until cert-manager is updated. @see https://github.com/tilt-dev/tilt-extensions/pull/90#issuecomment-704381205
## TL;DR: It's much preferred & much faster to use a helm chart for working with cert-manager in every environment.
load('ext://cert_manager', 'deploy_cert_manager')
deploy_cert_manager(
load_to_kind=True,
version='v1.15.1',
)
# load('ext://cert_manager', 'deploy_cert_manager')
# deploy_cert_manager(
# load_to_kind=True,
# version='v1.15.1',
# )
load('ext://helm_remote', 'helm_remote')
@ -135,29 +135,29 @@ docker_build(
]
)
docker_build(
'fp/bot',
'.',
only=[
'./.npmrc',
'./package.json',
'./pnpm-lock.yaml',
'./pnpm-workspace.yaml',
'./packages/bot',
'./packages/image',
'./packages/scout',
'./packages/storage',
'./packages/temporal-workflows',
'./packages/types',
'./packages/utils',
],
dockerfile='./d.bot.dockerfile',
target='dev',
live_update=[
sync('./packages/bot', '/app'),
run('cd /app && pnpm i', trigger=['./packages/bot/package.json', './packages/bot/pnpm-lock.yaml'])
]
)
# docker_build(
# 'fp/bot',
# '.',
# only=[
# './.npmrc',
# './package.json',
# './pnpm-lock.yaml',
# './pnpm-workspace.yaml',
# './packages/bot',
# './packages/image',
# './packages/scout',
# './packages/storage',
# './packages/workflows',
# './packages/types',
# './packages/utils',
# ],
# dockerfile='./d.bot.dockerfile',
# target='dev',
# live_update=[
# sync('./packages/bot', '/app'),
# run('cd /app && pnpm i', trigger=['./packages/bot/package.json', './packages/bot/pnpm-lock.yaml'])
# ]
# )
@ -167,39 +167,34 @@ docker_build(
load('ext://uibutton', 'cmd_button')
cmd_button('postgres:create',
argv=['./scripts/postgres-create.sh'],
resource='postgres',
resource='postgresql-primary',
icon_name='dataset',
text='create (empty) databases',
)
cmd_button('postgres:restore',
argv=['sh', './scripts/postgres-restore.sh'],
resource='postgres',
argv=['dotenvx', 'run', '-f', '.env.development', '--', './scripts/postgres-restore.sh'],
resource='postgresql-primary',
icon_name='upload',
text='restore db from backup',
)
cmd_button('postgres:drop',
argv=['sh', './scripts/postgres-drop.sh'],
resource='postgres',
resource='postgresql-primary',
icon_name='delete',
text='DROP futureporn_db'
)
cmd_button('postgres:drop_temporal',
argv=['sh', './scripts/postgres-drop-temporal.sh'],
resource='postgres',
icon_name='delete',
text='DROP temporal'
)
cmd_button('postgres:backup',
argv=['sh', './scripts/postgres-backup.sh'],
resource='postgres',
resource='postgresql-primary',
icon_name='download',
text='backup the database'
)
cmd_button('temporal-web:namespace',
argv=['sh', './scripts/temporal-namespaces.sh'],
resource='temporal-web',
icon_name='badge',
text='create futureporn namespace',
cmd_button('postgres:graphile',
argv=['sh', './scripts/postgres-test-graphile.sh'],
resource='postgresql-primary',
icon_name='graph',
text='create graphile test job',
)
@ -219,25 +214,24 @@ docker_build(
)
docker_build(
'fp/scout',
'.',
dockerfile='d.scout.dockerfile',
target='scout',
live_update=[
sync('./packages/scout', '/app'),
run('cd /app && pnpm i', trigger=['./packages/scout/package.json', './packages/scout/pnpm-lock.yaml']),
],
entrypoint='pnpm nodemon --ext js,ts,json,yaml --exec node --no-warnings=ExperimentalWarning --loader ts-node/esm ./src/index.ts'
# entrypoint='pnpm tsx watch ./src/index.ts'
)
# docker_build(
# 'fp/scout',
# '.',
# dockerfile='d.scout.dockerfile',
# target='scout',
# live_update=[
# sync('./packages/scout', '/app'),
# run('cd /app && pnpm i', trigger=['./packages/scout/package.json', './packages/scout/pnpm-lock.yaml']),
# ],
# entrypoint='pnpm nodemon --ext js,ts,json,yaml --exec node --no-warnings=ExperimentalWarning --loader ts-node/esm ./src/index.ts'
# # entrypoint='pnpm tsx watch ./src/index.ts'
# )
docker_build(
'fp/worker',
'fp/mailbox',
'.',
dockerfile='d.worker.dockerfile',
target='worker',
dockerfile='d.mailbox.dockerfile',
target='mailbox',
only=[
'./.npmrc',
'./package.json',
@ -245,66 +239,63 @@ docker_build(
'./pnpm-workspace.yaml',
'./packages/image',
'./packages/scout',
'./packages/temporal-workflows',
'./packages/temporal-worker',
'./packages/mailbox',
'./packages/types',
'./packages/utils',
'./packages/video',
'./packages/storage',
],
live_update=[
sync('./packages/temporal-worker', '/app'),
run('cd /app && pnpm i', trigger=['./packages/temporal-worker/package.json', './packages/temporal-worker/pnpm-lock.yaml']),
sync('./packages/mailbox', '/app'),
run('cd /app && pnpm i', trigger=['./packages/mailbox/package.json', './packages/mailbox/pnpm-lock.yaml']),
],
# entrypoint='pnpm nodemon --ext js,ts,json,yaml --exec node --no-warnings=ExperimentalWarning --loader ts-node/esm ./src/index.ts'
)
# docker_build(
# 'fp/meal',
# '.',
# dockerfile='d.meal.dockerfile',
# target='meal',
# only=[
# './.npmrc',
# './package.json',
# './pnpm-lock.yaml',
# './pnpm-workspace.yaml',
# './packages/meal',
# './packages/taco',
# './packages/types',
# ],
# live_update=[
# sync('./packages/meal', '/app'),
# # run('cd /app && pnpm i', trigger=['./packages/meal/package.json', './packages/meal/pnpm-lock.yaml']),
# ],
# )
docker_build(
'fp/boop',
'fp/capture',
'.',
dockerfile='d.boop.dockerfile',
target='boop',
dockerfile='d.capture.dockerfile',
target='capture',
only=[
'./.npmrc',
'./package.json',
'./pnpm-lock.yaml',
'./pnpm-workspace.yaml',
'./packages/boop',
'./packages/taco',
'./packages/capture',
'./packages/scout',
'./packages/types',
'./packages/utils',
],
live_update=[
sync('./packages/boop', '/app'),
# run('cd /app && pnpm i', trigger=['./packages/boop/package.json', './packages/boop/pnpm-lock.yaml']),
sync('./packages/capture', '/app'),
],
)
# docker_build(
# 'fp/scout-worker',
# '.',
# # ignore=['./packages/next'], # I wish I could use this ignore to ignore file changes in this dir, but that's not how it works
# dockerfile='d.scout.dockerfile',
# target='scout-worker',
# live_update=[
# # idk if this run() is effective
# # run('cd /app && pnpm i', trigger=['./packages/scout/package.json', './packages/scout/pnpm-lock.yaml']),
# sync('./packages/scout', '/app'),
# ## this is a hack to avoid complete scout image rebuilds when src in ./packages/next is updated
# ## ./packages/next needs to be in the build context because scout depends on types exported from next module (a feature of pnpm workspaces)
# ## instead of a full rebuild, we put ./packages/next in the live_update spec so the changed files get shoved into /ignore-me
# ## ideally, I would like to include ./packages/next in the build context, but ignore file changes there for rebuilds.
# ## however, I don't think tilt has this capability.
# sync('./packages/next', '/ignore-me'),
# ],
# # this entrypoint is a godsend. It lets me restart the node app (fast) without having to rebuild the docker container (slow)
# entrypoint='pnpm nodemon --ext js,ts,json,yaml --exec node --no-warnings=ExperimentalWarning --loader ts-node/esm ./src/temporal/worker.ts'
# )
# k8s_resource(
@ -342,7 +333,7 @@ k8s_resource(
links=[
link('https://next.fp.sbtp.xyz'),
],
resource_deps=['strapi', 'postgres'],
resource_deps=['strapi', 'postgresql-primary'],
labels=['frontend'],
)
k8s_resource(
@ -352,12 +343,12 @@ k8s_resource(
link('https://strapi.fp.sbtp.xyz/admin'),
link('https://strapi.fp.sbtp.xyz'),
],
resource_deps=['postgres'],
resource_deps=['postgresql-primary'],
labels=['backend'],
)
k8s_resource(
workload='postgres',
workload='postgresql-primary',
port_forwards=['5432'],
labels=['backend'],
)
@ -374,8 +365,8 @@ k8s_resource(
k8s_resource(
workload='scout',
resource_deps=['postgres', 'strapi', 'temporal-frontend', 'worker'],
workload='mailbox',
resource_deps=['postgresql-primary', 'strapi'],
labels=['backend'],
)
@ -387,7 +378,7 @@ k8s_resource(
# k8s_resource(
# workload='pgadmin',
# port_forwards=['5050'],
# resource_deps=['postgres']
# resource_deps=['postgresql-primary']
# )
# k8s_resource(
@ -413,6 +404,25 @@ k8s_resource(
# 'grafana.sidecar.dashboards.provider.foldersFromFileStructure=true'
# ]
# )
helm_remote(
'postgresql',
repo_name='postgresql',
repo_url='https://charts.bitnami.com/bitnami',
namespace='futureporn',
version='15.5.17',
set=[
'auth.enablePostgresUser=true',
'auth.existingSecret=postgresql',
# 'architecture=standalone',
'architecture=replication',
'readReplicas.replicaCount=3',
'replication.syncronousCommit=on',
'replication.numSyncronousReplicas=1',
'replication.applicationName=futureporn',
'image.debug=true',
'auth.usePasswordFiles=true',
]
)
## redis is only here for uppy's usage.
## If we can engineer a way to delete redis, let's do it
@ -430,95 +440,6 @@ helm_remote(
]
)
helm_remote(
'temporal',
repo_name='temporal',
repo_url='https://charts.lemontech.engineering',
namespace='futureporn',
version='0.37.0',
set=[
'admintools.image.tag=1.24.2-tctl-1.18.1-cli-0.13.0',
'web.image.tag=2.28.0',
'prometheus.enabled=false',
'grafana.enabled=false',
'elasticsearch.enabled=false',
'web.config.auth.enabled=true',
'cassandra.enabled=false',
'server.config.persistence.default.driver=sql',
'server.config.persistence.default.sql.driver=postgres12',
'server.config.persistence.default.sql.existingSecret=postgres',
'server.config.persistence.default.sql.secretName=postgres',
'server.config.persistence.default.sql.secretKey=password',
'server.config.persistence.default.sql.host=postgresql-primary.futureporn.svc.cluster.local',
'server.config.persistence.default.sql.port=5432',
'server.config.persistence.default.sql.user=postgres',
'server.config.persistence.visibility.driver=sql',
'server.config.persistence.visibility.sql.driver=postgres12',
'server.config.persistence.visibility.sql.host=postgresql-primary.futureporn.svc.cluster.local',
'server.config.persistence.visibility.sql.port=5432',
'server.config.persistence.visibility.sql.user=postgres',
'server.config.persistence.visibility.sql.existingSecret=postgres',
'server.config.persistence.visibility.sql.secretName=postgres',
'server.config.persistence.visibility.sql.secretKey=password',
]
)
k8s_resource(
workload='temporal-admintools',
labels='temporal',
resource_deps=[
'postgres',
'strapi'
])
k8s_resource(
workload='temporal-frontend',
labels='temporal',
resource_deps=[
'postgres',
'strapi'
])
k8s_resource(
workload='temporal-history',
labels='temporal',
resource_deps=[
'postgres',
'strapi'
])
k8s_resource(
workload='temporal-worker',
labels='temporal',
resource_deps=[
'postgres',
'strapi'
])
k8s_resource(
workload='temporal-web',
labels='temporal', port_forwards=['8080'],
resource_deps=[
'postgres',
'strapi'
])
k8s_resource(
workload='temporal-schema-setup',
labels='temporal',
resource_deps=[
'postgres',
'strapi'
])
k8s_resource(
workload='temporal-schema-update',
labels='temporal',
resource_deps=[
'postgres',
'strapi'
])
k8s_resource(
workload='temporal-matching',
labels='temporal',
resource_deps=[
'postgres',
'strapi'
])
k8s_resource(
workload='external-dns',
labels=['networking'],
@ -536,23 +457,21 @@ k8s_resource(
)
k8s_resource(
workload='bot',
workload='postgresql-read',
labels=['backend']
)
# k8s_resource(
# workload='bot',
# labels=['backend'],
# # resource_deps=['strapi'],
# )
k8s_resource(
workload='capture-api',
port_forwards=['5003'],
labels=['backend'],
# resource_deps=['strapi', 'temporal-web'],
)
k8s_resource(
workload='worker',
workload='capture-worker',
labels=['backend'],
resource_deps=['strapi', 'temporal-web', 'postgres' ],
)
# k8s_resource(
# workload='trigger',
# labels=['backend'],
# port_forwards=['3030:3000'],
# resource_deps=['postgres', 'redis-master'],
# links=[
# link('http://localhost:3030')
# ],
# )
)

View File

@ -0,0 +1,167 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: trigger
namespace: futureporn
spec:
replicas: {{ .Values.trigger.worker.replicas }}
minReadySeconds: 5
strategy:
type: RollingUpdate
rollingUpdate:
maxSurge: {{ .Values.trigger.worker.replicas }}
maxUnavailable: 1
selector:
matchLabels:
app: trigger
template:
metadata:
labels:
app: trigger
spec:
containers:
- name: trigger
image: {{ .Values.trigger.imageName }}
## We override the entrypoint in order to apply a database schema workaround.
## The problem is that trigger.dev uses the public postgres schema, which is a postgres default.
## We don't want trigger.dev to use this default because there might be multiple system components using that default.
## In order to avoid accidental data loss when two components are writing data there, we want trigger to use a non-public schema.
## @see https://github.com/triggerdotdev/trigger.dev/issues/1026
command:
- sh
- -c
# - echo fuck @ fuck @ fuck @ fuck @ fuck @ fuck @ fuck @ fuck @ fuck @ fuck @ fuck @ fuck @ fuck @ fuck @ fuck @ fuck @ fuck @ fuck @ fuck @ fuck @ fuck @ fuck @ fuck @
- sed -i s/public/triggerdotdev/ ./packages/database/prisma/migrations/20240130165343_add_composite_index_to_job_run_for_job_id_and_created_at/migration.sql && ./scripts/entrypoint.sh
imagePullPolicy: IfNotPresent
resources:
limits:
memory: 150Mi
requests:
memory: 100Mi
env:
- name: TRIGGER_TELEMETRY_DISABLED
value: "1"
- name: REDIS_PORT
value: "6379"
- name: REDIS_HOST
value: redis-master.futureporn.svc.cluster.local
- name: DATABASE_URL
valueFrom:
secretKeyRef:
name: trigger
key: databaseUrl
- name: DIRECT_URL
valueFrom:
secretKeyRef:
name: trigger
key: databaseUrl
- name: MAGIC_LINK_SECRET
valueFrom:
secretKeyRef:
name: trigger
key: magicLinkSecret
- name: SESSION_SECRET
valueFrom:
secretKeyRef:
name: trigger
key: sessionSecret
- name: ENCRYPTION_KEY
valueFrom:
secretKeyRef:
name: trigger
key: encryptionKey
- name: PROVIDER_SECRET
valueFrom:
secretKeyRef:
name: trigger
key: providerSecret
- name: COORDINATOR_SECRET
valueFrom:
secretKeyRef:
name: trigger
key: coordinatorSecret
ports:
- containerPort: 3030
volumeMounts:
- name: trigger-data
mountPath: /mnt/trigger-data
volumes:
- name: trigger-data
emptyDir: {}
---
apiVersion: v1
kind: Service
metadata:
name: trigger
namespace: futureporn
annotations:
external-dns.alpha.kubernetes.io/hostname: "{{ .Values.trigger.hostname }}"
chisel-operator.io/exit-node-name: "trigger-exit-node"
spec:
type: LoadBalancer
ports:
- port: 3030
targetPort: 3030
protocol: TCP
selector:
app: trigger
{{ if eq .Values.environment "development" }}
---
apiVersion: chisel-operator.io/v2
kind: ExitNode
metadata:
name: trigger-exit-node
namespace: futureporn
spec:
host: "{{ .Values.chisel.exitNodeIp }}"
port: 9090
auth: chisel
{{ end }}
---
apiVersion: traefik.io/v1alpha1
kind: IngressRoute
metadata:
name: trigger-http
namespace: futureporn
spec:
entryPoints:
- web
routes:
- match: Host(`trigger.fp.sbtp.xyz`)
kind: Rule
middlewares:
- name: redirect
namespace: futureporn
services:
- name: trigger
namespace: futureporn
port: 3030
---
apiVersion: traefik.io/v1alpha1
kind: IngressRoute
metadata:
name: trigger-https
namespace: futureporn
annotations:
cert-manager.io/cluster-issuer: "{{ .Values.certManager.issuer }}"
spec:
entryPoints:
- websecure
routes:
- match: Host(`trigger.fp.sbtp.xyz`)
kind: Rule
services:
- name: trigger
namespace: futureporn
port: 3030
tls:
secretName: trigger-tls

View File

@ -0,0 +1,93 @@
---
apiVersion: v1
kind: Service
metadata:
name: capture-api
namespace: futureporn
spec:
type: ClusterIP
selector:
app.kubernetes.io/name: capture
ports:
- name: http
port: {{ .Values.capture.api.port }}
targetPort: http
protocol: TCP
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: capture-worker
namespace: futureporn
labels:
app.kubernetes.io/name: capture-worker
spec:
replicas: {{ .Values.capture.worker.replicas }}
selector:
matchLabels:
app: capture-worker
template:
metadata:
labels:
app: capture-worker
spec:
containers:
- name: capture-worker
image: "{{ .Values.capture.imageName }}"
env:
- name: FUNCTION
value: worker
- name: DATABASE_URL
valueFrom:
secretKeyRef:
name: capture
key: databaseUrl
- name: PORT
value: "{{ .Values.capture.api.port }}"
resources:
limits:
cpu: 1000m
memory: 2Gi
restartPolicy: Always
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: capture-api
namespace: futureporn
labels:
app.kubernetes.io/name: capture
spec:
replicas: {{ .Values.capture.api.replicas }}
selector:
matchLabels:
app: capture-api
template:
metadata:
labels:
app: capture-api
spec:
containers:
- name: capture
image: "{{ .Values.capture.imageName }}"
ports:
- name: http
containerPort: {{ .Values.capture.api.port }}
env:
- name: FUNCTION
value: api
- name: DATABASE_URL
valueFrom:
secretKeyRef:
name: capture
key: databaseUrl
- name: PORT
value: "{{ .Values.capture.api.port }}"
resources:
limits:
cpu: 100m
memory: 256Mi
restartPolicy: Always

View File

@ -0,0 +1,59 @@
apiVersion: apps/v1
kind: ReplicaSet
metadata:
name: mailbox
namespace: futureporn
labels:
app: mailbox
spec:
replicas: {{ .Values.mailbox.replicas }}
selector:
matchLabels:
app: mailbox
template:
metadata:
labels:
app: mailbox
spec:
containers:
- name: mailbox
image: "{{ .Values.mailbox.imageName }}"
imagePullPolicy: Always
ports:
- containerPort: 5000
env:
- name: DATABASE_URL
valueFrom:
secretKeyRef:
name: mailbox
key: databaseUrl
- name: PORT
value: "{{ .Values.mailbox.port }}"
- name: IMAP_SERVER
valueFrom:
secretKeyRef:
name: mailbox
key: imapServer
- name: IMAP_PORT
valueFrom:
secretKeyRef:
name: mailbox
key: imapPort
- name: IMAP_USERNAME
valueFrom:
secretKeyRef:
name: mailbox
key: imapUsername
- name: IMAP_PASSWORD
valueFrom:
secretKeyRef:
name: mailbox
key: imapPassword
resources:
limits:
cpu: "500m"
memory: "512Mi"
requests:
cpu: "250m"
memory: "256Mi"

View File

@ -1,67 +0,0 @@
apiVersion: apps/v1
kind: ReplicaSet
metadata:
name: worker
namespace: futureporn
labels:
app: worker
spec:
replicas: {{ .Values.worker.replicas }}
selector:
matchLabels:
app: worker
template:
metadata:
labels:
app: worker
spec:
containers:
- name: worker
image: "{{ .Values.worker.imageName }}"
imagePullPolicy: Always
ports:
- containerPort: 8080
env:
- name: TEMPORAL_SERVICE_ADDRESS
value: "temporal-frontend.futureporn.svc.cluster.local:7233"
- name: TEMPORAL_NAMESPACE
value: "futureporn"
- name: TEMPORAL_TASK_QUEUE
value: "futureporn"
# - name: STRAPI_URL
# value: https://strapi.fp.sbtp.xyz
# - name: S3_BUCKET_APPLICATION_KEY
# valueFrom:
# secretKeyRef:
# name: scout
# key: s3BucketApplicationKey
# - name: S3_BUCKET_KEY_ID
# valueFrom:
# secretKeyRef:
# name: scout
# key: s3BucketKeyId
# - name: SCOUT_NITTER_ACCESS_KEY
# valueFrom:
# secretKeyRef:
# name: scout
# key: nitterAccessKey
# - name: SCOUT_NITTER_URL
# value: https://nitter.sbtp.xyz
# - name: SCOUT_RECENTS_TOKEN
# valueFrom:
# secretKeyRef:
# name: scout
# key: recentsToken
# - name: SCOUT_STRAPI_API_KEY
# valueFrom:
# secretKeyRef:
# name: scout
# key: strapiApiKey
resources:
limits:
cpu: "500m"
memory: "512Mi"
requests:
cpu: "250m"
memory: "256Mi"

View File

@ -10,11 +10,17 @@ next:
nodeExtraCaCerts: /app/letsencrypt-stg-root-x1.pem
capture:
imageName: fp/capture
worker:
imageName: fp/worker
replicas: 2
worker:
replicas: 2
api:
port: 5003
replicas: 1
mailbox:
imageName: fp/mailbox
replicas: 1
cdnBucketUrl: https://fp-dev.b-cdn.net
s3BucketName: fp-dev
port: 5000
scout:
replicas: 1
imageName: fp/scout
@ -60,9 +66,10 @@ bot:
discordGuildId: "1084674137391374338"
imageName: fp/bot
replicas: 1
trigger:
imageName: ghcr.io/triggerdotdev/trigger.dev:self-host-rc.3
worker:
replicas: 2
webapp:
replicas: 1
# trigger:
# imageName: ghcr.io/triggerdotdev/trigger.dev:self-host-rc.3
# worker:
# replicas: 1
# webapp:
# replicas: 1
# hostname: trigger.fp.sbtp.xyz

View File

@ -1 +0,0 @@
charts/

View File

@ -1,24 +0,0 @@
# Patterns to ignore when building packages.
# This supports shell glob matching, relative path matching, and
# negation (prefixed with !). Only one pattern per line.
.DS_Store
# Common VCS dirs
.git/
.gitignore
.bzr/
.bzrignore
.hg/
.hgignore
.svn/
# Common backup files
*.swp
*.bak
*.tmp
*.orig
*~
# Various IDEs
.project
.idea/
*.tmproj
.vscode/
node_modules/

View File

@ -1,2 +0,0 @@
digest: sha256:e439e4b30ba18357defec97ba080973743a4724c423b78913990409f78f1ebd8
generated: "2023-10-20T14:22:57.044126+05:30"

View File

@ -1,24 +0,0 @@
apiVersion: v2
name: trigger
description: A Helm chart for a full Trigger application stack
# A chart can be either an 'application' or a 'library' chart.
#
# Application charts are a collection of templates that can be packaged into versioned archives
# to be deployed.
#
# Library charts provide useful utilities or functions for the chart developer. They're included as
# a dependency of application charts to inject those utilities and functions into the rendering
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 0.1.0
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
appVersion: "1.16.0"

View File

@ -1,3 +0,0 @@
# Trigger.dev Helm Chart
@see https://github.com/triggerdotdev/trigger.dev/tree/main/helm-charts

View File

@ -1,203 +0,0 @@
{
"name": "helm-charts",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "helm-charts",
"version": "1.0.0",
"license": "ISC",
"dependencies": {
"@bitnami/readme-generator-for-helm": "^2.6.0"
}
},
"node_modules/@bitnami/readme-generator-for-helm": {
"version": "2.6.0",
"resolved": "https://registry.npmjs.org/@bitnami/readme-generator-for-helm/-/readme-generator-for-helm-2.6.0.tgz",
"integrity": "sha512-LcByNCryaC2OJExL9rnhyFJ18+vrZu1gVoN2Z7j/HI42EjV4kLgT4G1KEPNnrKbls9HvozBqMG+sKZIDh0McFg==",
"dependencies": {
"commander": "^7.1.0",
"dot-object": "^2.1.4",
"lodash": "^4.17.21",
"markdown-table": "^2.0.0",
"yaml": "^2.0.0-3"
},
"bin": {
"readme-generator": "bin/index.js"
}
},
"node_modules/balanced-match": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
},
"node_modules/brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"dependencies": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
}
},
"node_modules/commander": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz",
"integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==",
"engines": {
"node": ">= 10"
}
},
"node_modules/concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="
},
"node_modules/dot-object": {
"version": "2.1.4",
"resolved": "https://registry.npmjs.org/dot-object/-/dot-object-2.1.4.tgz",
"integrity": "sha512-7FXnyyCLFawNYJ+NhkqyP9Wd2yzuo+7n9pGiYpkmXCTYa8Ci2U0eUNDVg5OuO5Pm6aFXI2SWN8/N/w7SJWu1WA==",
"dependencies": {
"commander": "^4.0.0",
"glob": "^7.1.5"
},
"bin": {
"dot-object": "bin/dot-object"
}
},
"node_modules/dot-object/node_modules/commander": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz",
"integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==",
"engines": {
"node": ">= 6"
}
},
"node_modules/fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
"integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="
},
"node_modules/glob": {
"version": "7.2.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
"dependencies": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.1.1",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
},
"engines": {
"node": "*"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/inflight": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
"integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
"dependencies": {
"once": "^1.3.0",
"wrappy": "1"
}
},
"node_modules/inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
},
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"node_modules/markdown-table": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-2.0.0.tgz",
"integrity": "sha512-Ezda85ToJUBhM6WGaG6veasyym+Tbs3cMAw/ZhOPqXiYsr0jgocBV3j3nx+4lk47plLlIqjwuTm/ywVI+zjJ/A==",
"dependencies": {
"repeat-string": "^1.0.0"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/wooorm"
}
},
"node_modules/minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dependencies": {
"brace-expansion": "^1.1.7"
},
"engines": {
"node": "*"
}
},
"node_modules/once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
"dependencies": {
"wrappy": "1"
}
},
"node_modules/path-is-absolute": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
"integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/repeat-string": {
"version": "1.6.1",
"resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz",
"integrity": "sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==",
"engines": {
"node": ">=0.10"
}
},
"node_modules/wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
},
"node_modules/yaml": {
"version": "2.3.4",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.4.tgz",
"integrity": "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==",
"engines": {
"node": ">= 14"
}
},
"readme-generator-for-helm": {
"version": "2.6.1",
"extraneous": true,
"license": "Apache-2.0",
"dependencies": {
"commander": "^7.1.0",
"dot-object": "^2.1.4",
"lodash": "^4.17.21",
"markdown-table": "^2.0.0",
"yaml": "^2.0.0-3"
},
"bin": {
"readme-generator": "bin/index.js"
},
"devDependencies": {
"eslint": "^7.24.0",
"eslint-config-airbnb-base": "^14.2.1",
"eslint-plugin-import": "^2.22.1",
"jest": "^29.2.1",
"temp": "^0.9.4"
}
}
}
}

View File

@ -1,15 +0,0 @@
{
"name": "helm-charts",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"generate-docs": "readme-generator --readme README.md --values values.yaml"
},
"keywords": [],
"author": "",
"license": "ISC",
"devDependencies": {
"@bitnami/readme-generator-for-helm": "^2.6.0"
}
}

View File

@ -1,71 +0,0 @@
{{/*
Expand the name of the chart.
*/}}
{{- define "trigger.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Create chart name and version as used by the chart label.
*/}}
{{- define "trigger.chart" -}}
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{/*
Create unified labels for trigger components
*/}}
{{- define "trigger.common.matchLabels" -}}
app: {{ template "trigger.name" . }}
release: {{ .Release.Name }}
{{- end -}}
{{- define "trigger.common.metaLabels" -}}
chart: {{ template "trigger.chart" . }}
heritage: {{ .Release.Service }}
{{- end -}}
{{- define "trigger.common.labels" -}}
{{ include "trigger.common.matchLabels" . }}
{{ include "trigger.common.metaLabels" . }}
{{- end -}}
{{- define "trigger.labels" -}}
{{ include "trigger.matchLabels" . }}
{{ include "trigger.common.metaLabels" . }}
{{- end -}}
{{- define "trigger.matchLabels" -}}
component: {{ .Values.trigger.name | quote }}
{{ include "trigger.common.matchLabels" . }}
{{- end -}}
{{/*
Create a fully qualified postgresql name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
*/}}
{{- define "trigger.postgresql.hostname" -}}
{{- if .Values.postgresql.fullnameOverride -}}
{{- .Values.postgresql.fullnameOverride | trunc 63 | trimSuffix "-" -}}
{{- else -}}
{{- $name := default .Chart.Name .Values.nameOverride -}}
{{- if contains $name .Release.Name -}}
{{- printf "%s-%s" .Release.Name .Values.postgresql.name | trunc 63 | trimSuffix "-" -}}
{{- else -}}
{{- printf "%s-%s-%s" .Release.Name $name .Values.postgresql.name | trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{- end -}}
{{- end -}}
{{/*
Create the postgresql connection string.
*/}}
{{- define "trigger.postgresql.connectionString" -}}
{{- $host := include "trigger.postgresql.hostname" . -}}
{{- $port := 5432 -}}
{{- $username := .Values.postgresql.global.postgresql.postgresqlUsername | default "postgres" -}}
{{- $password := .Values.postgresql.global.postgresql.postgresqlPassword | default "password" -}}
{{- $database := .Values.postgresql.global.postgresql.postgresqlDatabase | default "trigger" -}}
{{- $connectionString := printf "postgresql://%s:%s@%s:%d/%s" $username $password $host $port $database -}}
{{- printf "%s" $connectionString -}}
{{- end -}}

View File

@ -1,43 +0,0 @@
{{ if .Values.ingress.enabled }}
{{- $ingress := .Values.ingress }}
{{- if and $ingress.ingressClassName (not (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion)) }}
{{- if not (hasKey $ingress.annotations "kubernetes.io/ingress.class") }}
{{- $_ := set $ingress.annotations "kubernetes.io/ingress.class" $ingress.ingressClassName}}
{{- end }}
{{- end }}
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: trigger-ingress
{{- with $ingress.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
spec:
{{- if and $ingress.ingressClassName (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion) }}
ingressClassName: {{ $ingress.ingressClassName | default "nginx" }}
{{- end }}
{{- if $ingress.tls }}
tls:
{{- range $ingress.tls }}
- hosts:
{{- range .hosts }}
- {{ . | quote }}
{{- end }}
secretName: {{ .secretName }}
{{- end }}
{{- end }}
rules:
- http:
paths:
- path: {{ $ingress.trigger.path }}
pathType: {{ $ingress.trigger.pathType }}
backend:
service:
name: {{ include "trigger.name" . }}
port:
number: 3000
{{- if $ingress.hostName }}
host: {{ $ingress.hostName }}
{{- end }}
{{ end }}

View File

@ -1,95 +0,0 @@
{{- $trigger := .Values.trigger -}}
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "trigger.name" . }}
namespace: futureporn
annotations:
updatedAt: {{ now | date "2006-01-01 MST 15:04:05" | quote }}
{{- with $trigger.deploymentAnnotations }}
{{- toYaml . | nindent 4 }}
{{- end }}
labels:
{{- include "trigger.labels" . | nindent 4 }}
spec:
replicas: {{ $trigger.replicaCount }}
selector:
matchLabels:
{{- include "trigger.matchLabels" . | nindent 6 }}
template:
metadata:
labels:
{{- include "trigger.matchLabels" . | nindent 8 }}
annotations:
updatedAt: {{ now | date "2006-01-01 MST 15:04:05" | quote }}
{{- with $trigger.podAnnotations }}
{{- toYaml . | nindent 8 }}
{{- end }}
spec:
{{- with $trigger.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
{{- end }}
containers:
- name: {{ $trigger.name }}
image: "{{ $trigger.image.repository }}:{{ $trigger.image.tag | default "latest" }}"
imagePullPolicy: {{ $trigger.image.pullPolicy }}
ports:
- name: http
containerPort: 3000
protocol: TCP
readinessProbe:
httpGet:
path: /
port: 3000
envFrom:
- secretRef:
name: {{ $trigger.kubeSecretRef | default (include "trigger.name" .) }}
{{- if $trigger.resources }}
resources: {{- toYaml $trigger.resources | nindent 12 }}
{{- end }}
---
apiVersion: v1
kind: Service
metadata:
name: trigger
labels:
annotations:
spec:
type: {{ $trigger.service.type }}
selector:
{{- include "trigger.matchLabels" . | nindent 8 }}
ports:
- port: 3000
targetPort: 3000
protocol: TCP
{{- if eq $trigger.service.type "NodePort" }}
nodePort: {{ $trigger.service.nodePort }}
{{- end }}
---
{{ if not $trigger.kubeSecretRef }}
apiVersion: v1
kind: Secret
metadata:
name: {{ include "trigger.name" . }}
annotations:
"helm.sh/resource-policy": "keep"
type: Opaque
stringData:
{{- $requiredVars := dict "MAGIC_LINK_SECRET" (randAlphaNum 32 | lower)
"SESSION_SECRET" (randAlphaNum 32 | lower)
"ENCRYPTION_KEY" (randAlphaNum 32 | lower)
"DIRECT_URL" (include "trigger.postgresql.connectionString" .)
"DATABASE_URL" (include "trigger.postgresql.connectionString" .) }}
{{- $secretObj := (lookup "v1" "Secret" .Release.Namespace (include "trigger.name" .)) | default dict }}
{{- $secretData := (get $secretObj "data") | default dict }}
{{ range $key, $value := .Values.trigger.env }}
{{- $default := get $requiredVars $key -}}
{{- $current := get $secretData $key | b64dec -}}
{{- $v := $value | default ($current | default $default) -}}
{{ $key }}: {{ $v | quote }}
{{ end -}}
{{- end }}

View File

@ -1,276 +0,0 @@
# Default values for helm-charts.
# This is a YAML-formatted file.
## @section Common parameters
##
## @param nameOverride Override release name
##
nameOverride: ""
## @param fullnameOverride Override release fullname
##
fullnameOverride: ""
## @section Trigger.dev parameters
##
trigger:
## @param trigger.name
name: trigger
## @param trigger.fullnameOverride trigger fullnameOverride
##
fullnameOverride: ""
## @param trigger.podAnnotations trigger pod annotations
##
podAnnotations: {}
## @param trigger.deploymentAnnotations trigger deployment annotations
##
deploymentAnnotations: {}
## @param trigger.replicaCount trigger replica count
##
replicaCount: 2
## trigger image parameters
##
image:
## @param trigger.image.repository trigger image repository
##
repository: ghcr.io/triggerdotdev/trigger.dev
## @param trigger.image.tag trigger image tag
##
tag: "latest"
## @param trigger.image.pullPolicy trigger image pullPolicy
##
pullPolicy: Always
## @param trigger.resources.limits.memory container memory limit [(docs)](https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/)
## @param trigger.resources.requests.cpu container CPU requests [(docs)](https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/)
##
resources:
limits:
memory: 800Mi
requests:
cpu: 250m
## @param trigger.affinity Backend pod affinity
##
affinity: {}
## @param trigger.kubeSecretRef trigger secret resource reference name
##
kubeSecretRef: ""
## trigger service
##
service:
## @param trigger.service.annotations trigger service annotations
##
annotations: {}
## @param trigger.service.type trigger service type
##
type: ClusterIP
## @param trigger.service.nodePort trigger service nodePort (used if above type is `NodePort`)
##
nodePort: ""
## @skip trigger.env
##
env:
ENCRYPTION_KEY: ""
MAGIC_LINK_SECRET: ""
SESSION_SECRET: ""
LOGIN_ORIGIN: ""
APP_ORIGIN: ""
DIRECT_URL: ""
DATABASE_URL: ""
FROM_EMAIL: ""
REPLY_TO_EMAIL: ""
RESEND_API_KEY: ""
AUTH_GITHUB_CLIENT_ID: ""
AUTH_GITHUB_CLIENT_SECRET: ""
## @section Postgres parameters
## Documentation: https://github.com/bitnami/charts/tree/main/bitnami/postgresql-ha
##
postgresql:
## @param postgresql.enabled Enable Postgres
##
enabled: true
## @param postgresql.name Name used to build variables (deprecated)
##
name: "postgresql"
## @param postgresql.nameOverride Name override
##
nameOverride: "postgresql"
## @param postgresql.fullnameOverride Fullname override
##
fullnameOverride: "postgresql"
global:
postgresql:
## @param postgresql.global.postgresql.auth.postgresPassword Password for the "postgres" admin user (overrides `auth.postgresPassword`)
## @param postgresql.global.postgresql.auth.username Name for a custom user to create (overrides `auth.username`)
## @param postgresql.global.postgresql.auth.password Password for the custom user to create (overrides `auth.password`)
## @param postgresql.global.postgresql.auth.database Name for a custom database to create (overrides `auth.database`)
## @param postgresql.global.postgresql.auth.existingSecret Name of existing secret to use for PostgreSQL credentials (overrides `auth.existingSecret`).
## @param postgresql.global.postgresql.auth.secretKeys.adminPasswordKey Name of key in existing secret to use for PostgreSQL credentials (overrides `auth.secretKeys.adminPasswordKey`). Only used when `postgresql.global.postgresql.auth.existingSecret` is set.
## @param postgresql.global.postgresql.auth.secretKeys.userPasswordKey Name of key in existing secret to use for PostgreSQL credentials (overrides `auth.secretKeys.userPasswordKey`). Only used when `postgresql.global.postgresql.auth.existingSecret` is set.
## @param postgresql.global.postgresql.auth.secretKeys.replicationPasswordKey Name of key in existing secret to use for PostgreSQL credentials (overrides `auth.secretKeys.replicationPasswordKey`). Only used when `postgresql.global.postgresql.auth.existingSecret` is set.
##
auth:
postgresPassword: "password"
username: "postgres"
password: "password"
database: "trigger"
existingSecret: ""
secretKeys:
adminPasswordKey: ""
userPasswordKey: ""
replicationPasswordKey: ""
## @param postgresql.global.postgresql.service.ports.postgresql PostgreSQL service port (overrides `service.ports.postgresql`)
##
service:
ports:
postgresql: "5432"
## Bitnami PostgreSQL image version
## ref: https://hub.docker.com/r/bitnami/postgresql/tags/
## @param postgresql.image.registry PostgreSQL image registry
## @param postgresql.image.repository PostgreSQL image repository
## @param postgresql.image.tag PostgreSQL image tag (immutable tags are recommended)
## @param postgresql.image.digest PostgreSQL image digest in the way sha256:aa.... Please note this parameter, if set, will override the tag
## @param postgresql.image.pullPolicy PostgreSQL image pull policy
## @param postgresql.image.pullSecrets Specify image pull secrets
## @param postgresql.image.debug Specify if debug values should be set
##
image:
registry: docker.io
repository: bitnami/postgresql
tag: 14.10.0-debian-11-r21
digest: ""
## Specify a imagePullPolicy
## Defaults to 'Always' if image tag is 'latest', else set to 'IfNotPresent'
## ref: https://kubernetes.io/docs/user-guide/images/#pre-pulling-images
##
pullPolicy: IfNotPresent
## Optionally specify an array of imagePullSecrets.
## Secrets must be manually created in the namespace.
## ref: https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/
## Example:
## pullSecrets:
## - myRegistryKeySecretName
##
pullSecrets: []
## Set to true if you would like to see extra information on logs
##
debug: false
## @param postgresql.architecture PostgreSQL architecture (`standalone` or `replication`)
##
architecture: standalone
## Replication configuration
## Ignored if `postgresql.architecture` is `standalone`
##
## @param postgresql.containerPorts.postgresql PostgreSQL container port
##
containerPorts:
postgresql: 5432
## @param postgresql.postgresqlDataDir PostgreSQL data dir
##
postgresqlDataDir: /bitnami/postgresql/data
## @param postgresql.postgresqlSharedPreloadLibraries Shared preload libraries (comma-separated list)
##
postgresqlSharedPreloadLibraries: "pgaudit"
## @section PostgreSQL Primary parameters
##
primary:
## Configure extra options for PostgreSQL Primary containers' liveness, readiness and startup probes
## ref: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/#configure-probes
## @param postgresql.primary.livenessProbe.enabled Enable livenessProbe on PostgreSQL Primary containers
## @param postgresql.primary.livenessProbe.initialDelaySeconds Initial delay seconds for livenessProbe
## @param postgresql.primary.livenessProbe.periodSeconds Period seconds for livenessProbe
## @param postgresql.primary.livenessProbe.timeoutSeconds Timeout seconds for livenessProbe
## @param postgresql.primary.livenessProbe.failureThreshold Failure threshold for livenessProbe
## @param postgresql.primary.livenessProbe.successThreshold Success threshold for livenessProbe
##
livenessProbe:
enabled: true
initialDelaySeconds: 30
periodSeconds: 10
timeoutSeconds: 5
failureThreshold: 6
successThreshold: 1
## @param postgresql.primary.readinessProbe.enabled Enable readinessProbe on PostgreSQL Primary containers
## @param postgresql.primary.readinessProbe.initialDelaySeconds Initial delay seconds for readinessProbe
## @param postgresql.primary.readinessProbe.periodSeconds Period seconds for readinessProbe
## @param postgresql.primary.readinessProbe.timeoutSeconds Timeout seconds for readinessProbe
## @param postgresql.primary.readinessProbe.failureThreshold Failure threshold for readinessProbe
## @param postgresql.primary.readinessProbe.successThreshold Success threshold for readinessProbe
##
readinessProbe:
enabled: true
initialDelaySeconds: 5
periodSeconds: 10
timeoutSeconds: 5
failureThreshold: 6
successThreshold: 1
## @param postgresql.primary.startupProbe.enabled Enable startupProbe on PostgreSQL Primary containers
## @param postgresql.primary.startupProbe.initialDelaySeconds Initial delay seconds for startupProbe
## @param postgresql.primary.startupProbe.periodSeconds Period seconds for startupProbe
## @param postgresql.primary.startupProbe.timeoutSeconds Timeout seconds for startupProbe
## @param postgresql.primary.startupProbe.failureThreshold Failure threshold for startupProbe
## @param postgresql.primary.startupProbe.successThreshold Success threshold for startupProbe
##
startupProbe:
enabled: false
initialDelaySeconds: 30
periodSeconds: 10
timeoutSeconds: 1
failureThreshold: 15
successThreshold: 1
persistence:
## @param postgresql.primary.persistence.enabled Enable PostgreSQL Primary data persistence using PVC
##
enabled: true
## @param postgresql.primary.persistence.existingClaim Name of an existing PVC to use
##
existingClaim: ""
## @param postgresql.primary.persistence.accessModes PVC Access Mode for PostgreSQL volume
##
accessModes:
- ReadWriteOnce
## @param postgresql.primary.persistence.size PVC Storage Request for PostgreSQL volume
##
size: 8Gi
## @section Ingress parameters
## Documentation: https://kubernetes.io/docs/concepts/services-networking/ingress/
##
ingress:
## @param ingress.enabled Enable ingress
##
enabled: true
## @param ingress.ingressClassName Ingress class name
##
ingressClassName: nginx
## @param ingress.nginx.enabled Ingress controller
##
nginx:
enabled: false
## @param ingress.annotations Ingress annotations
##
annotations:
{}
# kubernetes.io/ingress.class: "nginx"
# cert-manager.io/issuer: letsencrypt-nginx
## @param ingress.hostName Ingress hostname (your custom domain name, e.g. `infisical.example.org`)
## Replace with your own domain
##
hostName: ""
## @param ingress.tls Ingress TLS hosts (matching above hostName)
## Replace with your own domain
##
tls:
[]
# - secretName: letsencrypt-nginx
# hosts:
# - infisical.local
## @param ingress.trigger.path Trigger.dev ingress path
## @param ingress.trigger.pathType Trigger.dev ingress path type
##
trigger:
path: /
pathType: Prefix

View File

@ -1,35 +1,50 @@
FROM node:18-alpine
# Install dependencies only when needed
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
FROM node:20-alpine AS base
## Install dependencies only when needed
## Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
RUN apk add --no-cache libc6-compat
RUN corepack enable && corepack prepare pnpm@latest --activate
RUN corepack enable && corepack prepare pnpm@9.5.0 --activate
# Enable `pnpm add --global` on Alpine Linux by setting
# home location environment variable to a location already in $PATH
# https://github.com/pnpm/pnpm/issues/784#issuecomment-1518582235
## Enable `pnpm add --global` on Alpine Linux by setting
## home location environment variable to a location already in $PATH
## https://github.com/pnpm/pnpm/issues/784#issuecomment-1518582235
ENV PNPM_HOME=/usr/local/bin
# update and install latest dependencies, add dumb-init package
# add a non root user
RUN apk update && apk upgrade && apk add dumb-init ffmpeg make gcc g++ python3
## install yt-dlp
RUN wget -O /usr/local/bin/yt-dlp https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp && chmod +x /usr/local/bin/yt-dlp
WORKDIR /app
# Copy and install the dependencies for the project
COPY ./packages/capture/package.json ./packages/capture/pnpm-lock.yaml ./
# Copy all other project files to working directory
COPY ./packages/capture .
# Run the next build process and generate the artifacts
RUN pnpm i;
FROM base AS build
## Copy the manifests and lockfiles into the build context
COPY package.json pnpm-lock.yaml pnpm-workspace.yaml .npmrc .
COPY ./packages/capture/package.json ./packages/capture/pnpm-lock.yaml ./packages/capture/
COPY ./packages/scout/package.json ./packages/scout/pnpm-lock.yaml ./packages/scout/
COPY ./packages/types/package.json ./packages/types/pnpm-lock.yaml ./packages/types/
COPY ./packages/utils/package.json ./packages/utils/pnpm-lock.yaml ./packages/utils/
## install npm packages
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm fetch
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --recursive
# expose 3000 on container
EXPOSE 3000
# set app host ,port and node env
ENV HOSTNAME=0.0.0.0 PORT=3000 NODE_ENV=production
# start the app with dumb init to spawn the Node.js runtime process
# with signal support
CMD [ "dumb-init", "node", "index.js" ]
## Copy in all other project files
COPY ./packages/capture/ ./packages/capture/
COPY ./packages/scout/ ./packages/scout/
COPY ./packages/types/ ./packages/types/
COPY ./packages/utils/ ./packages/utils/
## Run the build process and generate the artifacts
RUN pnpm run -r build
RUN mkdir -p /prod/capture
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm --filter=@futureporn/capture deploy --prod /prod/capture
RUN ls -la /prod/capture
## start the app with dumb init to spawn the Node.js runtime process
## with signal support
FROM base AS capture
ENV HOSTNAME=0.0.0.0 NODE_ENV=production
COPY --from=build /prod/capture .
CMD [ "dumb-init", "node", "dist/index.js" ]

65
d.mailbox.dockerfile Normal file
View File

@ -0,0 +1,65 @@
## d.mailbox.dockerfile
##
## @todo future improvement might be merging the dockerfiles for the various monorepo packages.
## this is not an easy task, so I'm not doing it right now.
## "make it work, make it right, make it fast" (in that order)
## Right now we are making things work with separate dockerfiles for each package.
## One thing to determine is build speed. If we're developing in Tilt and have to wait 20 minutes for the build to complete
## every time we change a file in any dependent package, then merging dockerfiles is not desirable.
## One of the slow parts of the docker build is copying all package directories into the build context.
## If we have a lot of packages, it takes a long time.
## I have yet to determine performance benchmarks, so it's unclear if merging dockerfiles is desirable.
##
## @todo another performance improvement would almost certainly be to move strapi, next, and similar packages from `packages/*` into `services/*`
## this way, when we're building the various @futureporn library-type packages, we don't have to filter and COPY the dependency packages one-by-one.
## instead, we add the entire `packages/*` directory and then move on to the next step.
FROM node:20 AS base
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
WORKDIR /app
RUN corepack enable && corepack prepare pnpm@9.5.0 --activate
FROM base AS build
WORKDIR /app
RUN mkdir -p /app/packages/mailbox && mkdir -p /prod/mailbox
## Copy manfiests, lockfiles, and configs into docker context
COPY package.json pnpm-lock.yaml .npmrc .
COPY ./packages/image/pnpm-lock.yaml ./packages/image/package.json ./packages/image/
COPY ./packages/scout/pnpm-lock.yaml ./packages/scout/package.json ./packages/scout/
COPY ./packages/storage/pnpm-lock.yaml ./packages/storage/package.json ./packages/storage/
COPY ./packages/types/pnpm-lock.yaml ./packages/types/package.json ./packages/types/
COPY ./packages/utils/pnpm-lock.yaml ./packages/utils/package.json ./packages/utils/
COPY ./packages/mailbox/pnpm-lock.yaml ./packages/mailbox/package.json ./packages/mailbox/
## Install npm packages
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm fetch
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --recursive --frozen-lockfile --prefer-offline
## Copy package code into docker context
COPY ./packages/image/ ./packages/image/
COPY ./packages/scout/ ./packages/scout/
COPY ./packages/storage/ ./packages/storage/
COPY ./packages/types/ ./packages/types/
COPY ./packages/utils/ ./packages/utils/
COPY ./packages/mailbox/ ./packages/mailbox/
## Transpile TS into JS
## we have to build @futureporn/image first because other packages depend on it's built js files
## next we build everything else
RUN pnpm --filter=@futureporn/mailbox build
# RUN pnpm --filter=!@futureporn/image -r build
# RUN pnpm -r build
## Copy all production code into one place
## `pnpm deploy` copies all dependencies into an isolated node_modules directory inside the target dir
## @see https://pnpm.io/cli/deploy
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm deploy --filter=@futureporn/mailbox --prod /prod/mailbox
FROM base AS mailbox
COPY --from=build /prod/mailbox .
RUN ls -la .
ENTRYPOINT ["pnpm", "start"]

View File

@ -1,57 +1,10 @@
FROM node:20 AS base
## Installing libvips-dev for sharp Compatibility
## (only needed on alpine)
# RUN apk update && apk add --no-cache build-base gcc autoconf automake zlib-dev libpng-dev nasm bash vips-dev git
FROM node:20 AS strapi
WORKDIR /usr/src/app/
RUN corepack enable && corepack prepare pnpm@9.5.0 --activate
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
ARG NODE_ENV=development
ENV NODE_ENV=${NODE_ENV}
EXPOSE 1339
ENTRYPOINT ["pnpm"]
FROM base AS build
WORKDIR /app
RUN mkdir -p /prod/strapi
COPY pnpm-workspace.yaml pnpm-lock.yaml .npmrc package.json .
COPY ./packages/types ./packages/types
COPY ./packages/strapi ./packages/strapi
RUN pnpm fetch
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install -g node-gyp --prefer-offline
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --recursive --prefer-offline
RUN pnpm -r build
RUN pnpm deploy --filter=strapi /prod/strapi
RUN ls -lah ./
RUN ls -lah ./packages
RUN ls -lah ./packages/strapi
RUN ls -lah /prod/strapi
# FROM base AS build
# RUN mkdir -p /prod/strapi
# WORKDIR /opt/
# COPY ./packages/strapi/package.json ./packages/strapi/pnpm-lock.yaml ./
# RUN pnpm fetch
# RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install -g node-gyp
# RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
# ENV PATH /opt/node_modules/.bin:$PATH
# WORKDIR /opt/app
# COPY ./packages/strapi/. .
# RUN pnpm -r build
# RUN pnpm deploy --filter=strapi /prod/strapi
FROM base AS dev
COPY --from=build /prod/strapi .
CMD ["run", "develop"]
FROM base AS strapi
WORKDIR /opt/app
RUN chown -R node:node /opt/app
USER node
COPY --from=build /prod/strapi .
RUN ls -la .
ENTRYPOINT ["pnpm", "start"]
COPY ./packages/strapi/package.json ./packages/strapi/pnpm-lock.yaml .
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
COPY ./packages/strapi/ .
RUN ["pnpm", "run", "build"]
CMD ["pnpm", "run", "develop"]

View File

@ -3,11 +3,6 @@
## @futureporn/worker is the system component which runs background tasks.
## Tasks such as thumbnail generation, video encoding, file transfers, etc.
##
## 'temporal-worker' is already a pod name (temporal helm chart creates it for it's internal use)
## so our docker image is called fp/worker, not fp/temporal-worker
## not that we need to name the docker image differently, but
## the hope with this name is to keep the mental concept of the two pods separate by having different names
##
## @todo future improvement might be merging the dockerfiles for the various monorepo packages.
## this is not an easy task, so I'm not doing it right now.
## "make it work, make it right, make it fast" (in that order)
@ -30,45 +25,46 @@ RUN corepack enable && corepack prepare pnpm@9.5.0 --activate
FROM base AS build
WORKDIR /app
RUN mkdir -p /app/packages/temporal-worker && mkdir -p /prod/temporal-worker
RUN mkdir -p /app/packages/worker && mkdir -p /prod/worker
## Copy manfiests, lockfiles, and configs into docker context
COPY package.json pnpm-lock.yaml .npmrc .
COPY ./packages/image/pnpm-lock.yaml ./packages/image/package.json ./packages/image/
COPY ./packages/scout/pnpm-lock.yaml ./packages/scout/package.json ./packages/scout/
COPY ./packages/storage/pnpm-lock.yaml ./packages/storage/package.json ./packages/storage/
COPY ./packages/temporal-workflows/pnpm-lock.yaml ./packages/temporal-workflows/package.json ./packages/temporal-workflows/
COPY ./packages/temporal-worker/pnpm-lock.yaml ./packages/temporal-worker/package.json ./packages/temporal-worker/
COPY ./packages/types/pnpm-lock.yaml ./packages/types/package.json ./packages/types/
COPY ./packages/utils/pnpm-lock.yaml ./packages/utils/package.json ./packages/utils/
# COPY ./packages/image/pnpm-lock.yaml ./packages/image/package.json ./packages/image/
# COPY ./packages/scout/pnpm-lock.yaml ./packages/scout/package.json ./packages/scout/
# COPY ./packages/storage/pnpm-lock.yaml ./packages/storage/package.json ./packages/storage/
# COPY ./packages/types/pnpm-lock.yaml ./packages/types/package.json ./packages/types/
# COPY ./packages/utils/pnpm-lock.yaml ./packages/utils/package.json ./packages/utils/
COPY ./packages/worker/pnpm-lock.yaml ./packages/worker/package.json ./packages/worker/
## Install npm packages
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm fetch
## we install node-gyp explicitly in order for sharp to install properly
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install -g node-gyp --prefer-offline
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --recursive --frozen-lockfile --prefer-offline
## Copy package code into docker context
COPY ./packages/image/ ./packages/image/
COPY ./packages/scout/ ./packages/scout/
COPY ./packages/storage/ ./packages/storage/
COPY ./packages/temporal-workflows/ ./packages/temporal-workflows/
COPY ./packages/temporal-worker/ ./packages/temporal-worker/
COPY ./packages/types/ ./packages/types/
COPY ./packages/utils/ ./packages/utils/
# COPY ./packages/image/ ./packages/image/
# COPY ./packages/scout/ ./packages/scout/
# COPY ./packages/storage/ ./packages/storage/
# COPY ./packages/types/ ./packages/types/
# COPY ./packages/utils/ ./packages/utils/
COPY ./packages/worker/ ./packages/worker/
## Transpile TS into JS
## we have to build @futureporn/image first because other packages depend on it's built js files
## next we build everything else
RUN pnpm --filter=@futureporn/image build
RUN pnpm --filter=!@futureporn/image -r build
# RUN pnpm --filter=@futureporn/image build
# RUN pnpm --filter=!@futureporn/image -r build
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm -r build
## Copy all production code into one place
## `pnpm deploy` copies all dependencies into an isolated node_modules directory inside the target dir
## @see https://pnpm.io/cli/deploy
RUN pnpm deploy --filter=@futureporn/temporal-worker --prod /prod/temporal-worker
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm deploy --filter=@futureporn/worker --prod /prod/worker
FROM base AS worker
COPY --from=build /prod/temporal-worker .
COPY --from=build /prod/worker .
RUN ls -la .
ENTRYPOINT ["pnpm", "start"]

View File

@ -20,7 +20,7 @@ importers:
version: 3.1.4
ts-node:
specifier: ^10.9.2
version: 10.9.2(@types/node@20.14.10)(typescript@5.5.3)
version: 10.9.2(@types/node@20.14.11)(typescript@5.5.3)
typescript:
specifier: ^5.5.3
version: 5.5.3
@ -93,8 +93,8 @@ packages:
'@tsconfig/node16@1.0.4':
resolution: {integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==}
'@types/node@20.14.10':
resolution: {integrity: sha512-MdiXf+nDuMvY0gJKxyfZ7/6UFsETO7mGKF54MVD/ekJS6HdFtpZFBgrh6Pseu64XTb2MLyFPlbW6hj8HYRQNOQ==}
'@types/node@20.14.11':
resolution: {integrity: sha512-kprQpL8MMeszbz6ojB5/tU8PLN4kesnN8Gjzw349rDlNgsSzg90lAVj3llK99Dh7JON+t9AuscPPFW6mPbTnSA==}
'@types/ws@8.5.11':
resolution: {integrity: sha512-4+q7P5h3SpJxaBft0Dzpbr6lmMaqh0Jr2tbhJZ/luAwvD7ohSCniYkwz/pLxuT2h0EOa6QADgJj1Ko+TzRfZ+w==}
@ -244,8 +244,8 @@ packages:
resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==}
engines: {node: '>=8.10.0'}
semver@7.6.2:
resolution: {integrity: sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==}
semver@7.6.3:
resolution: {integrity: sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==}
engines: {node: '>=10'}
hasBin: true
@ -398,13 +398,13 @@ snapshots:
'@tsconfig/node16@1.0.4': {}
'@types/node@20.14.10':
'@types/node@20.14.11':
dependencies:
undici-types: 5.26.5
'@types/ws@8.5.11':
dependencies:
'@types/node': 20.14.10
'@types/node': 20.14.11
'@vladfrangu/async_event_emitter@2.4.4': {}
@ -530,7 +530,7 @@ snapshots:
ignore-by-default: 1.0.1
minimatch: 3.1.2
pstree.remy: 1.1.8
semver: 7.6.2
semver: 7.6.3
simple-update-notifier: 2.0.0
supports-color: 5.5.0
touch: 3.1.1
@ -546,11 +546,11 @@ snapshots:
dependencies:
picomatch: 2.3.1
semver@7.6.2: {}
semver@7.6.3: {}
simple-update-notifier@2.0.0:
dependencies:
semver: 7.6.2
semver: 7.6.3
supports-color@5.5.0:
dependencies:
@ -564,14 +564,14 @@ snapshots:
ts-mixer@6.0.4: {}
ts-node@10.9.2(@types/node@20.14.10)(typescript@5.5.3):
ts-node@10.9.2(@types/node@20.14.11)(typescript@5.5.3):
dependencies:
'@cspotcode/source-map-support': 0.8.1
'@tsconfig/node10': 1.0.11
'@tsconfig/node12': 1.0.11
'@tsconfig/node14': 1.0.3
'@tsconfig/node16': 1.0.4
'@types/node': 20.14.10
'@types/node': 20.14.11
acorn: 8.12.1
acorn-walk: 8.3.3
arg: 4.1.3

View File

@ -1,4 +1,6 @@
{
"extension": ["js"],
"spec": "test/**/*.test.js"
"$schema": "https://json.schemastore.org/mocharc.json",
"extensions": ["ts"],
"spec": "./src/**/*.spec.ts",
"require": "tsx"
}

View File

@ -1,9 +1,24 @@
# Capture
# @futureporn/capture
## Dev notes
Cloud based wrapper around yt-dlp
There are two components in this package. api and worker.
## API components
API container serves a REST API. Use this API to start/stop recordings. The API component doesn't actually record, it just queues the record task which a worker then picks up. When run in k8s, this container is meant to have 1 replication.
## Worker components
Worker container runs a Graphile Worker which listens for specific tasks related to recording. This container is meant to have 1 or more replications.
## Misc dev notes
### youtube-dl end of stream output
The end-of-stream output from yt-dlp when recording CB looks like this
```
[https @ 0x5646887f1580] Opening 'https://edge11-lax.live.mmcdn.com/live-hls/amlst:hotfallingdevil-sd-fdf87e5b6c880e52d38e8c94f8ebf8728c980a91d56fb4ace13748ba59012336_trns_h264/chunklist_w881713853_b5128000_t64RlBTOjMwLjA=.m3u8' for reading
[hls @ 0x564687dd0980] Skip ('#EXT-X-VERSION:4')

View File

@ -0,0 +1,12 @@
import type { GraphileConfig } from "graphile-config";
import type {} from "graphile-worker";
const preset: GraphileConfig.Preset = {
worker: {
connectionString: process.env.DATABASE_URL,
concurrentJobs: 3,
fileExtensions: [".js", ".ts"],
},
};
export default preset;

View File

@ -1,48 +1,65 @@
{
"name": "capture",
"version": "0.1.12",
"main": "index.js",
"name": "@futureporn/capture",
"type": "module",
"version": "0.2.12",
"license": "Unlicense",
"private": true,
"type": "module",
"scripts": {
"start": "node --trace-warnings index",
"test": "echo '@todo -- WARN common/logger in this module does not resolve. exiting.'; exit 0; FUTUREPORN_WORKDIR=/home/chris/Downloads mocha",
"integration": "FUTUREPORN_WORKDIR=/home/chris/Downloads mocha ./integration/**/*.test.js",
"dev": "FUTUREPORN_WORKDIR=/home/chris/Downloads nodemon index",
"start": "node dist/index.js",
"build": "tsup",
"test": "mocha",
"integration": "FUTUREPORN_WORKDIR=/home/cj/Downloads mocha ./integration/**/*.test.js",
"dev": "tsx --watch ./src/index.ts",
"clean": "rm -rf dist",
"superclean": "rm -rf node_modules && rm -rf pnpm-lock.yaml && rm -rf dist"
},
"dependencies": {
"@paralleldrive/cuid2": "^2.1.8",
"diskusage": "^1.1.3",
"dotenv": "^16.0.3",
"@aws-sdk/client-s3": "^3.617.0",
"@aws-sdk/lib-storage": "^3.588.0",
"@aws-sdk/types": "^3.609.0",
"@futureporn/scout": "workspace:^",
"@futureporn/utils": "workspace:^",
"@paralleldrive/cuid2": "^2.2.2",
"@types/chai": "^4.3.16",
"@types/mocha": "^10.0.7",
"@types/node": "^20.14.11",
"diskusage": "^1.2.0",
"dotenv": "^16.4.5",
"execa": "^6.1.0",
"fastify": "^4.12.0",
"fastq": "^1.15.0",
"fastify": "^4.28.1",
"fastify-plugin": "^4.5.1",
"fastq": "^1.17.1",
"faye": "^1.4.0",
"faye-websocket": "^0.11.4",
"fluent-ffmpeg": "^2.1.2",
"fluent-ffmpeg": "^2.1.3",
"graphile-config": "0.0.1-beta.9",
"graphile-worker": "^0.16.6",
"https": "^1.0.0",
"ioredis": "^5.2.4",
"minimatch": "^5.1.1",
"ioredis": "^5.4.1",
"minimatch": "^5.1.6",
"p-retry": "^5.1.2",
"postgres": "^3.3.3",
"rxjs": "^7.8.0",
"pino-pretty": "^11.2.1",
"postgres": "^3.4.4",
"rxjs": "^7.8.1",
"sql": "^0.78.0",
"winston": "^3.9.0",
"youtube-dl-wrap": "git+https://github.com/insanity54/youtube-dl-wrap.git"
"winston": "^3.13.1",
"youtube-dl-wrap": "github:insanity54/youtube-dl-wrap"
},
"devDependencies": {
"chai": "^4.3.7",
"cheerio": "^1.0.0-rc.12",
"mocha": "^10.2.0",
"multiformats": "^11.0.1",
"node-abort-controller": "^3.0.1",
"node-fetch": "^3.3.0",
"nodemon": "^2.0.20",
"sinon": "^15.0.1",
"@types/sinon": "^17.0.3",
"@types/sinon-chai": "^3.2.12",
"chai": "^4.4.1",
"cheerio": "1.0.0-rc.12",
"mocha": "^10.7.0",
"multiformats": "^11.0.2",
"node-abort-controller": "^3.1.1",
"node-fetch": "^3.3.2",
"nodemon": "^2.0.22",
"sinon": "^15.2.0",
"sinon-chai": "^3.7.0",
"sinon-test": "^3.1.5"
"sinon-test": "^3.1.6",
"tsup": "^8.1.2",
"tsx": "^4.16.2",
"typescript": "^5.5.3"
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,10 +1,6 @@
import Voddo from './Voddo.js'
import {loggerFactory} from 'common/logger'
const logger = loggerFactory({
service: 'futureporn/capture'
})
export default class Capture {
@ -37,7 +33,7 @@ export default class Capture {
* save Vod data to db
*/
async save (cid, timestamp) {
logger.log({ level: 'debug', message: `saving ${cid} \n w/ captureDate ${timestamp}` })
console.log({ level: 'debug', message: `saving ${cid} \n w/ captureDate ${timestamp}` })
this.date = timestamp
return await this.sql`INSERT INTO vod ( "videoSrcHash", "captureDate" ) values (${cid}, ${timestamp}) returning *`
}

View File

@ -1,7 +1,7 @@
import Video from '../src/Video.js'
import Capture from '../src/Capture.js'
import Ipfs from '../src/Ipfs.js'
import Video from './Video.js'
import Capture from './Capture.js'
import Ipfs from './Ipfs.js'
import chai, { expect } from 'chai'
import { dirname } from 'path';
import { fileURLToPath } from 'url';
@ -9,7 +9,7 @@ import path from 'node:path'
import sinon from 'sinon'
import sinonChai from 'sinon-chai'
import { CID } from 'multiformats/cid'
import Voddo from '../src/Voddo.js'
import Voddo from './Voddo.js'
import EventEmitter from 'node:events'
import postgres from 'postgres'

View File

@ -0,0 +1,22 @@
import Record from "./Record.js"
import { expect } from "chai"
describe('Record', function () {
let record: Record
this.beforeEach(function () {
record = new Record({ url: 'https://example.com/my-cool-stream' })
})
describe('start()', function () {
it('should start the recording', async function () {
await record.start()
expect(record).to.have.property('id')
expect(record).to.have.property('url')
})
})
describe('stop()', function () {
it('should stop the recording', async function () {
await record.stop()
expect(record).to.have.property('cdnUrl')
})
})
})

View File

@ -0,0 +1,109 @@
import { createId } from '@paralleldrive/cuid2'
import { spawn } from 'child_process';
import { ua0 } from '@futureporn/scout/ua.js'
import { getTmpFile } from '@futureporn/utils'
export interface RecordArgs {
url: string;
filename?: string;
channel?: string;
date?: string;
}
export default class Record {
readonly id: string;
readonly url: string;
filename?: string;
channel?: string;
date?: string;
constructor({ url }: RecordArgs) {
if (!url) throw new Error('url passed to Record constructor was undefined.');
this.id = createId()
this.url = url
}
async start() {
console.log(`@TODO record start with id=${this.id}, url=${this.url}`)
const playlistUrlPromise = new Promise<string>((resolve) => {
const ytdlp = spawn('yt-dlp', [
'-g',
this.url
])
ytdlp.stdout.on('data', (data) => {
resolve(data)
})
})
const playlistUrl = await playlistUrlPromise
const filename = getTmpFile(`stream.ts`)
const ffmpeg = spawn('ffmpeg', [
'-headers', `"User-Agent: ${ua0}"`,
'-i', playlistUrl,
'-c:v', 'copy',
'-c:a', 'copy',
'-movflags', 'faststart',
'-y',
'-f', 'mpegts',
filename
])
const ffmpegProcess = spawn('ffmpeg', [
'-headers', `"User-Agent: ${ua0}"`,
'-i', playlistUrl,
'-c:v', 'copy',
'-c:a', 'copy',
'-movflags', 'faststart',
'-y',
'-f', 'mpegts',
filename
], {
stdio: 'inherit'
});
const ps = spawn('ps', ['ax']);
const grep = spawn('grep', ['ssh']);
ps.stdout.on('data', (data) => {
grep.stdin.write(data);
});
ps.stderr.on('data', (data) => {
console.error(`ps stderr: ${data}`);
});
ps.on('close', (code) => {
if (code !== 0) {
console.log(`ps process exited with code ${code}`);
}
grep.stdin.end();
});
grep.stdout.on('data', (data) => {
console.log(data.toString());
});
grep.stderr.on('data', (data) => {
console.error(`grep stderr: ${data}`);
});
grep.on('close', (code) => {
if (code !== 0) {
console.log(`grep process exited with code ${code}`);
}
});
return {
id: this.id,
url: this.url
}
}
async stop() {
throw new Error('@todo please implement')
}
}

View File

@ -1,6 +1,6 @@
import 'dotenv/config'
import Video from '../src/Video.js'
import Video from './Video.js'
import { dirname } from 'path';
import { fileURLToPath } from 'url';
import path from 'node:path'

View File

@ -1,5 +1,5 @@
import 'dotenv/config'
import Voddo from '../src/Voddo.js'
import Voddo from './Voddo.js'
import chai, { expect } from 'chai'
import sinon from 'sinon'
import YoutubeDlWrap from 'youtube-dl-wrap'
@ -14,7 +14,6 @@ import { join, dirname } from 'path';
import { fileURLToPath } from 'url';
import sinonChai from 'sinon-chai'
import sinonTest from "sinon-test";
import path from 'path'
chai.use(sinonChai);

View File

@ -0,0 +1,91 @@
'use strict'
import { build } from './app.js'
import chai, { expect } from "chai"
import sinonChai from 'sinon-chai'
import sinon from 'sinon'
import { makeWorkerUtils } from 'graphile-worker'
chai.use(sinonChai)
describe('app', function () {
const app = build({}, 'postgres://')
describe('/', function () {
it('GET', async function () {
const response = await app.inject({
method: 'GET',
url: '/'
})
expect(response.statusCode).to.equal(200)
expect(JSON.parse(response.body)).to.have.property('version')
})
})
xdescribe('/api/records', function () {
it('GET -- list the records', async function () {
const response = await app.inject({
method: 'GET',
url: '/api/records'
})
expect(response.statusCode).to.equal(200)
const body = JSON.parse(response.body)
expect(body).to.have.property('data')
expect(body.data).to.be.an.instanceof(Array);
})
it('DELETE -- delete all records', async function () {
const response = await app.inject({
method: 'DELETE',
url: '/api/records'
})
expect(response.statusCode).to.equal(200)
const body = JSON.parse(response.body)
expect(body).to.have.property('data')
expect(body.data).to.be.lengthOf(0);
})
})
describe('/api/record', function () {
describe('POST', function () {
it('should create', async function () {
let url = 'https://example.com/my-cool-stream'
const response = await app.inject({
method: 'POST',
url: '/api/record',
body: {
url
}
})
expect(response.statusCode).to.equal(200)
const body = JSON.parse(response.body)
expect(body).to.have.property('id')
expect(body).to.have.property('url', url)
})
it('should return 400 if url is missing', async function () {
const response = await app.inject({
method: 'POST',
url: '/api/record',
})
expect(response.statusCode).to.equal(400)
})
})
xit('GET -- list a record', async function () {
const response = await app.inject({
method: 'GET',
url: '/api/record'
})
expect(response.statusCode).to.equal(200)
expect(JSON.parse(response.body)).to.have.property('id')
expect(JSON.parse(response.body)).to.have.property('sourceUrl')
expect(JSON.parse(response.body)).to.have.property('fileSize')
expect(JSON.parse(response.body)).to.have.property('outputUrl')
})
xit('DELETE -- delete a record', async function () {
const response = await app.inject({
method: 'DELETE',
url: '/api/record'
})
expect(response.statusCode).to.equal(200)
expect(JSON.parse(response.body)).to.have.property('id')
})
})
})

View File

@ -0,0 +1,30 @@
'use strict'
import fastify, { type FastifyRequest } from 'fastify'
import { getPackageVersion } from '@futureporn/utils'
import graphileWorkerPlugin, { type ExtendedFastifyInstance } from './fastify-graphile-worker-plugin.js'
const version = getPackageVersion('../package.json')
interface RecordBodyType {
url: string
}
function build(opts: Record<string, any>={}, connectionString: string) {
const app: ExtendedFastifyInstance = fastify(opts)
app.register(graphileWorkerPlugin, { connectionString })
app.get('/', async function (request, reply) {
return { app: '@futureporn/capture', version }
})
app.post('/api/record', async function (request: FastifyRequest<{ Body: RecordBodyType }>, reply) {
const { url } = request.body
console.log(`POST /api/record with url=${url}`)
const job = await app.graphile.addJob('record', { url })
return job
})
return app
}
export {
build
}

View File

@ -0,0 +1,22 @@
import { type FastifyInstance } from 'fastify'
import fp from 'fastify-plugin'
import { makeWorkerUtils } from 'graphile-worker'
type Options = {
connectionString: string;
}
export interface ExtendedFastifyInstance extends FastifyInstance {
graphile?: any
}
async function graphileWorkerPlugin (fastify: ExtendedFastifyInstance, opts: Options) {
if (!fastify.graphile) {
if (!opts.connectionString) throw new Error('graphileWorkerPlugin requires connectionString passed in options argument, but it was missing');
const workerUtils = await makeWorkerUtils({ connectionString: opts.connectionString })
fastify.decorate('graphile', workerUtils)
}
}
export default fp(graphileWorkerPlugin)

View File

@ -0,0 +1,77 @@
'use strict'
import { build } from './app.js'
import 'dotenv/config'
import { run } from 'graphile-worker'
import { dirname } from 'node:path';
import { fileURLToPath } from 'url';
const __dirname = dirname(fileURLToPath(import.meta.url));
if (!process.env.DATABASE_URL) throw new Error('DATABASE_URL is missing in env');
if (!process.env.FUNCTION) throw new Error(`FUNCTION env var was missing. FUNCTION env var must be either 'api' or 'worker'.`);
const connectionString = process.env.DATABASE_URL!
async function api() {
if (!process.env.PORT) throw new Error('PORT is missing in env');
const PORT = parseInt(process.env.PORT!)
const fastifyOpts = {
logger: {
level: 'info',
transport: {
target: 'pino-pretty'
}
}
}
const server = build(fastifyOpts, connectionString)
server.listen({ port: PORT }, (err) => {
if (err) {
server.log.error(err)
process.exit(1)
}
})
}
async function worker() {
const concurrency = (process.env?.WORKER_CONCURRENCY) ? parseInt(process.env.WORKER_CONCURRENCY) : 1
// Run a worker to execute jobs:
const runner = await run({
connectionString,
concurrency,
// Install signal handlers for graceful shutdown on SIGINT, SIGTERM, etc
noHandleSignals: false,
pollInterval: 1000,
taskDirectory: `${__dirname}/tasks`,
});
// Immediately await (or otherwise handle) the resulting promise, to avoid
// "unhandled rejection" errors causing a process crash in the event of
// something going wrong. console.log()
await runner.promise;
// If the worker exits (whether through fatal error or otherwise), the above
// promise will resolve/reject.
}
async function main() {
if (process.env.FUNCTION === 'api') {
api()
} else if (process.env.FUNCTION === 'worker') {
worker()
} else {
throw new Error('process.env.FUNCTION must be either api or worker. got '+process.env.FUNCTION)
}
}
main().catch((err) => {
console.error(err);
process.exit(1);
});

View File

@ -0,0 +1,80 @@
'use strict'
import { build } from './app.js'
import 'dotenv/config'
import { run } from 'graphile-worker'
import { dirname } from 'node:path';
import { fileURLToPath } from 'url';
const __dirname = dirname(fileURLToPath(import.meta.url));
if (!process.env.DATABASE_URL) throw new Error('DATABASE_URL is missing in env');
if (!process.env.FUNCTION) throw new Error(`FUNCTION env var was missing. FUNCTION env var must be either 'api' or 'worker'.`);
const connectionString = process.env.DATABASE_URL!
async function api() {
if (!process.env.PORT) throw new Error('PORT is missing in env');
const PORT = parseInt(process.env.PORT!)
const fastifyOpts = {
logger: {
level: 'info',
transport: {
target: 'pino-pretty'
}
}
}
const server = build(fastifyOpts, connectionString)
server.listen({ port: PORT }, (err) => {
if (err) {
server.log.error(err)
process.exit(1)
}
})
}
async function worker() {
if (!process.env.WORKER_CONCURRENCY) throw new Error('WORKER_CONCURRENCY is missing in env');
const concurrency = (process.env?.WORKER_CONCURRENCY) ? parseInt(process.env.WORKER_CONCURRENCY) : 1
// Run a worker to execute jobs:
const runner = await run({
connectionString: process.env.DATABASE_URL!,
concurrency,
// Install signal handlers for graceful shutdown on SIGINT, SIGTERM, etc
noHandleSignals: false,
pollInterval: 1000,
taskDirectory: `${__dirname}/tasks`,
});
// Immediately await (or otherwise handle) the resulting promise, to avoid
// "unhandled rejection" errors causing a process crash in the event of
// something going wrong. console.log()
await runner.promise;
// If the worker exits (whether through fatal error or otherwise), the above
// promise will resolve/reject.
}
async function main() {
if (process.env.FUNCTION === 'worker') {
worker()
} else if (process.env.FUNCTION === 'api') {
api()
} else {
console.error(`FUNCTION environment variable must be 'worker' or 'api', but it was set to ${process.env.FUNCTION}`)
}
}
main().catch((err) => {
console.error(err);
process.exit(1);
});

View File

@ -0,0 +1,77 @@
'use strict'
import { build } from './app.js'
import 'dotenv/config'
import { run } from 'graphile-worker'
import { dirname } from 'node:path';
import { fileURLToPath } from 'url';
const __dirname = dirname(fileURLToPath(import.meta.url));
if (!process.env.DATABASE_URL) throw new Error('DATABASE_URL is missing in env');
if (!process.env.FUNCTION) throw new Error(`FUNCTION env var was missing. FUNCTION env var must be either 'api' or 'worker'.`);
const connectionString = process.env.DATABASE_URL!
async function api() {
if (!process.env.PORT) throw new Error('PORT is missing in env');
const PORT = parseInt(process.env.PORT!)
const fastifyOpts = {
logger: {
level: 'info',
transport: {
target: 'pino-pretty'
}
}
}
const server = build(fastifyOpts, connectionString)
server.listen({ port: PORT }, (err) => {
if (err) {
server.log.error(err)
process.exit(1)
}
})
}
async function worker() {
const concurrency = (process.env?.WORKER_CONCURRENCY) ? parseInt(process.env.WORKER_CONCURRENCY) : 1
// Run a worker to execute jobs:
const runner = await run({
connectionString,
concurrency,
// Install signal handlers for graceful shutdown on SIGINT, SIGTERM, etc
noHandleSignals: false,
pollInterval: 1000,
taskDirectory: `${__dirname}/tasks`,
});
// Immediately await (or otherwise handle) the resulting promise, to avoid
// "unhandled rejection" errors causing a process crash in the event of
// something going wrong. console.log()
await runner.promise;
// If the worker exits (whether through fatal error or otherwise), the above
// promise will resolve/reject.
}
async function main() {
if (process.env.FUNCTION === 'api') {
api()
} else if (process.env.FUNCTION === 'worker') {
worker()
} else {
throw new Error('process.env.FUNCTION must be either api or worker. got '+process.env.FUNCTION)
}
}
main().catch((err) => {
console.error(err);
process.exit(1);
});

View File

@ -0,0 +1,41 @@
'use strict'
import { run } from 'graphile-worker'
import { dirname } from 'node:path';
import { fileURLToPath } from 'url';
const __dirname = dirname(fileURLToPath(import.meta.url));
if (!process.env.DATABASE_URL) throw new Error('DATABASE_URL is undefined in env');
const concurrency = (process.env?.WORKER_CONCURRENCY) ? parseInt(process.env.WORKER_CONCURRENCY) : 1
async function main() {
// Run a worker to execute jobs:
const runner = await run({
connectionString: process.env.DATABASE_URL!,
concurrency,
// Install signal handlers for graceful shutdown on SIGINT, SIGTERM, etc
noHandleSignals: false,
pollInterval: 1000,
taskDirectory: `${__dirname}/tasks`,
});
// Immediately await (or otherwise handle) the resulting promise, to avoid
// "unhandled rejection" errors causing a process crash in the event of
// something going wrong. console.log()
await runner.promise;
// If the worker exits (whether through fatal error or otherwise), the above
// promise will resolve/reject.
}
main().catch((err) => {
console.error(err);
process.exit(1);
});

View File

@ -1,5 +1,5 @@
import { record, assertDependencyDirectory } from '../../src/record.js'
import { getRandomRoom } from '../../src/cb.js'
import { record, assertDependencyDirectory } from '../record.js'
import { getRandomRoom } from '../cb.js'
import path from 'node:path'
import os from 'node:os'
import { execa } from 'execa'

View File

@ -0,0 +1,90 @@
import { S3Client, HeadObjectCommand } from "@aws-sdk/client-s3"
import { Upload } from "@aws-sdk/lib-storage"
import { basename } from 'node:path'
import fs from 'node:fs'
if (!process.env.S3_BUCKET_NAME) throw new Error('S3_BUCKET_NAME was undefined in env');
if (!process.env.S3_BUCKET_KEY_ID) throw new Error('S3_BUCKET_KEY_ID was undefined in env');
if (!process.env.S3_BUCKET_APPLICATION_KEY) throw new Error('S3_BUCKET_APPLICATION_KEY was undefined in env');
export async function uploadFile(filePath: string) {
if (!filePath) throw new Error("first argument, 'filePath' is undefined");
const fileName = basename(filePath)
let isFileAlreadyUploaded = false
const client = new S3Client({
endpoint: 'https://s3.us-west-000.backblazeb2.com',
region: 'us-west-000',
credentials: {
accessKeyId: process.env.S3_BUCKET_KEY_ID!,
secretAccessKey: process.env.S3_BUCKET_APPLICATION_KEY!
}
});
// try {
// console.log(`> Checking to see if ${fileName} exists in the S3 bucket`)
// const input = {
// "Bucket": process.env.S3_BUCKET_NAME,
// "Key": fileName
// };
// const command = new HeadObjectCommand(input);
// const response = await client.send(command);
// if (response.ContentLength && response.ContentLength > 0) isFileAlreadyUploaded = true;
// // console.log('does this following file exist?')
// return { Key: fileName }
// } catch (e) {
// console.log(`> failed to get ${fileName} which means it probably doesnt exist in the bucket`)
// console.log(e)
// }
console.log('> Uploading file')
const target = {
Bucket: process.env.S3_BUCKET_NAME,
Key: fileName,
Body: fs.createReadStream(filePath)
}
// greets https://stackoverflow.com/a/70159394/1004931
try {
const parallelUploads3 = new Upload({
client: client,
//tags: [...], // optional tags
queueSize: 4, // optional concurrency configuration
leavePartsOnError: false, // optional manually handle dropped parts
params: target,
});
// parallelUploads3.on("httpUploadProgress", (progress) => {
// console.log(progress);
// });
const res = await parallelUploads3.done();
return res
} catch (e) {
if (e instanceof Error) {
console.error(`while uploading a file to s3, we encountered an error`)
throw new Error(e.message);
} else {
throw new Error(`error of some sort ${JSON.stringify(e, null, 2)}`)
}
}
}

View File

@ -8,9 +8,9 @@ dotenv.config()
import { createId } from '@paralleldrive/cuid2'
import os from 'os'
import fs from 'node:fs'
import { loggerFactory } from "./src/logger.js"
import { verifyStorage } from './src/disk.js'
import { record, assertDependencyDirectory, checkFFmpeg } from './src/record.js'
import { loggerFactory } from "./logger.js"
import { verifyStorage } from './disk.js'
import { record, assertDependencyDirectory, checkFFmpeg } from './record.js'
import fastq from 'fastq'
import pRetry from 'p-retry';
import Fastify from 'fastify';

View File

@ -0,0 +1,17 @@
import { type Helpers } from 'graphile-worker'
import Record from '../Record.ts'
type Payload = {
url: string
}
export default async function (payload: Payload, helpers: Helpers) {
const { url } = payload;
helpers.logger.info(`'record' task execution begin with url=${url} (@todo implement)`);
const record = new Record({ url: 'https://example.com/stream' })
record.start()
return record.id
};

View File

@ -0,0 +1,32 @@
{
"compilerOptions": {
// Base Options recommended for all projects
"allowImportingTsExtensions": true,
"noEmit": true, // tsup does the emissions
"esModuleInterop": true,
"skipLibCheck": true,
"target": "es2022",
"allowJs": true,
"moduleResolution": "Bundler",
"resolveJsonModule": true,
"moduleDetection": "force",
"isolatedModules": true,
// Enable strict type checking so you can catch bugs early
"strict": true,
"noUncheckedIndexedAccess": true,
"noImplicitOverride": true,
// Transpile our TypeScript code to JavaScript
"module": "ESNext",
"outDir": "dist",
"lib": [
"es2022"
]
},
// Include the necessary files for your project
"include": [
"**/*.ts"
],
"exclude": [
"node_modules"
]
}

View File

@ -2,22 +2,28 @@ import { defineConfig } from "tsup";
import { exec } from 'child_process';
export default defineConfig({
entry: ["src/index.ts", "src/workflows.ts"],
format: ["cjs"],
entry: [
"src/index.ts",
"src/tasks/**.ts"
],
format: ["esm"],
target: "node20",
clean: true,
sourcemap: true,
/**
* The common package is using the internal packages approach, so it needs to
* These packages are using the internal packages approach, so it needs to
* be transpiled / bundled together with the deployed code.
*/
noExternal: ["@futureporn/temporal-workflows"],
noExternal: [
"@futureporn/utils",
"@futureporn/scout",
"@futureporn/types",
],
/**
* We do not use tsup for generating d.ts files because it can not generate type
* the definition maps required for go-to-definition to work in our IDE. We
* use tsc for that.
*/
onSuccess: async () => {
exec('tsc --emitDeclarationOnly');
},

View File

@ -3,10 +3,10 @@
"type": "module",
"version": "1.0.0",
"description": "",
"main": "dist/index.js",
"main": "src/index.ts",
"types": "src/index.ts",
"scripts": {
"test": "mocha",
"build": "tsc --build",
"clean": "rm -rf dist",
"superclean": "rm -rf node_modules && rm -rf pnpm-lock.yaml && rm -rf dist"
},
@ -22,12 +22,6 @@
},
"devDependencies": {
"chai": "^5.1.1",
"mocha": "^10.6.0",
"ts-node": "^10.9.2",
"tsx": "^4.7.2",
"typescript": "^5.5.3"
},
"ts-node": {
"esm": true
"mocha": "^10.6.0"
}
}

View File

@ -29,164 +29,13 @@ importers:
version: 5.1.1
mocha:
specifier: ^10.6.0
version: 10.6.0
ts-node:
specifier: ^10.9.2
version: 10.9.2(@types/node@20.14.10)(typescript@5.5.3)
tsx:
specifier: ^4.7.2
version: 4.16.2
typescript:
specifier: ^5.5.3
version: 5.5.3
version: 10.7.0
packages:
'@cspotcode/source-map-support@0.8.1':
resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==}
engines: {node: '>=12'}
'@emnapi/runtime@1.2.0':
resolution: {integrity: sha512-bV21/9LQmcQeCPEg3BDFtvwL6cwiTMksYNWQQ4KOxCZikEGalWtenoZ0wCiukJINlGCIi2KXx01g4FoH/LxpzQ==}
'@esbuild/aix-ppc64@0.21.5':
resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==}
engines: {node: '>=12'}
cpu: [ppc64]
os: [aix]
'@esbuild/android-arm64@0.21.5':
resolution: {integrity: sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==}
engines: {node: '>=12'}
cpu: [arm64]
os: [android]
'@esbuild/android-arm@0.21.5':
resolution: {integrity: sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==}
engines: {node: '>=12'}
cpu: [arm]
os: [android]
'@esbuild/android-x64@0.21.5':
resolution: {integrity: sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==}
engines: {node: '>=12'}
cpu: [x64]
os: [android]
'@esbuild/darwin-arm64@0.21.5':
resolution: {integrity: sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==}
engines: {node: '>=12'}
cpu: [arm64]
os: [darwin]
'@esbuild/darwin-x64@0.21.5':
resolution: {integrity: sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==}
engines: {node: '>=12'}
cpu: [x64]
os: [darwin]
'@esbuild/freebsd-arm64@0.21.5':
resolution: {integrity: sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==}
engines: {node: '>=12'}
cpu: [arm64]
os: [freebsd]
'@esbuild/freebsd-x64@0.21.5':
resolution: {integrity: sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==}
engines: {node: '>=12'}
cpu: [x64]
os: [freebsd]
'@esbuild/linux-arm64@0.21.5':
resolution: {integrity: sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==}
engines: {node: '>=12'}
cpu: [arm64]
os: [linux]
'@esbuild/linux-arm@0.21.5':
resolution: {integrity: sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==}
engines: {node: '>=12'}
cpu: [arm]
os: [linux]
'@esbuild/linux-ia32@0.21.5':
resolution: {integrity: sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==}
engines: {node: '>=12'}
cpu: [ia32]
os: [linux]
'@esbuild/linux-loong64@0.21.5':
resolution: {integrity: sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==}
engines: {node: '>=12'}
cpu: [loong64]
os: [linux]
'@esbuild/linux-mips64el@0.21.5':
resolution: {integrity: sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==}
engines: {node: '>=12'}
cpu: [mips64el]
os: [linux]
'@esbuild/linux-ppc64@0.21.5':
resolution: {integrity: sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==}
engines: {node: '>=12'}
cpu: [ppc64]
os: [linux]
'@esbuild/linux-riscv64@0.21.5':
resolution: {integrity: sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==}
engines: {node: '>=12'}
cpu: [riscv64]
os: [linux]
'@esbuild/linux-s390x@0.21.5':
resolution: {integrity: sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==}
engines: {node: '>=12'}
cpu: [s390x]
os: [linux]
'@esbuild/linux-x64@0.21.5':
resolution: {integrity: sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==}
engines: {node: '>=12'}
cpu: [x64]
os: [linux]
'@esbuild/netbsd-x64@0.21.5':
resolution: {integrity: sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==}
engines: {node: '>=12'}
cpu: [x64]
os: [netbsd]
'@esbuild/openbsd-x64@0.21.5':
resolution: {integrity: sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==}
engines: {node: '>=12'}
cpu: [x64]
os: [openbsd]
'@esbuild/sunos-x64@0.21.5':
resolution: {integrity: sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==}
engines: {node: '>=12'}
cpu: [x64]
os: [sunos]
'@esbuild/win32-arm64@0.21.5':
resolution: {integrity: sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==}
engines: {node: '>=12'}
cpu: [arm64]
os: [win32]
'@esbuild/win32-ia32@0.21.5':
resolution: {integrity: sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==}
engines: {node: '>=12'}
cpu: [ia32]
os: [win32]
'@esbuild/win32-x64@0.21.5':
resolution: {integrity: sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==}
engines: {node: '>=12'}
cpu: [x64]
os: [win32]
'@img/sharp-darwin-arm64@0.33.4':
resolution: {integrity: sha512-p0suNqXufJs9t3RqLBO6vvrgr5OhgbWp76s5gTRvdmxmuv9E1rcaqGUsl3l4mKVmXPkTkTErXediAui4x+8PSA==}
engines: {glibc: '>=2.26', node: ^18.17.0 || ^20.3.0 || >=21.0.0, npm: '>=9.6.5', pnpm: '>=7.1.0', yarn: '>=3.2.0'}
@ -300,28 +149,6 @@ packages:
cpu: [x64]
os: [win32]
'@jridgewell/resolve-uri@3.1.2':
resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==}
engines: {node: '>=6.0.0'}
'@jridgewell/sourcemap-codec@1.5.0':
resolution: {integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==}
'@jridgewell/trace-mapping@0.3.9':
resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==}
'@tsconfig/node10@1.0.11':
resolution: {integrity: sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==}
'@tsconfig/node12@1.0.11':
resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==}
'@tsconfig/node14@1.0.3':
resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==}
'@tsconfig/node16@1.0.4':
resolution: {integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==}
'@types/chai@4.3.16':
resolution: {integrity: sha512-PatH4iOdyh3MyWtmHVFXLWCCIhUbopaltqddG9BzB+gMIzee2MJrvd+jouii9Z3wzQJruGWAm7WOMjgfG8hQlQ==}
@ -340,17 +167,8 @@ packages:
'@types/ms@0.7.34':
resolution: {integrity: sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==}
'@types/node@20.14.10':
resolution: {integrity: sha512-MdiXf+nDuMvY0gJKxyfZ7/6UFsETO7mGKF54MVD/ekJS6HdFtpZFBgrh6Pseu64XTb2MLyFPlbW6hj8HYRQNOQ==}
acorn-walk@8.3.3:
resolution: {integrity: sha512-MxXdReSRhGO7VlFe1bRG/oI7/mdLV9B9JJT0N8vZOhF7gFRR5l3M8W9G8JxmKV+JC5mGqJ0QvqfSOLsCPa4nUw==}
engines: {node: '>=0.4.0'}
acorn@8.12.1:
resolution: {integrity: sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==}
engines: {node: '>=0.4.0'}
hasBin: true
'@types/node@20.14.11':
resolution: {integrity: sha512-kprQpL8MMeszbz6ojB5/tU8PLN4kesnN8Gjzw349rDlNgsSzg90lAVj3llK99Dh7JON+t9AuscPPFW6mPbTnSA==}
ansi-colors@4.1.3:
resolution: {integrity: sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==}
@ -368,9 +186,6 @@ packages:
resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==}
engines: {node: '>= 8'}
arg@4.1.3:
resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==}
argparse@2.0.1:
resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==}
@ -435,9 +250,6 @@ packages:
resolution: {integrity: sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==}
engines: {node: '>=12.5.0'}
create-require@1.1.1:
resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==}
cross-spawn@7.0.3:
resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==}
engines: {node: '>= 8'}
@ -463,10 +275,6 @@ packages:
resolution: {integrity: sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==}
engines: {node: '>=8'}
diff@4.0.2:
resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==}
engines: {node: '>=0.3.1'}
diff@5.2.0:
resolution: {integrity: sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==}
engines: {node: '>=0.3.1'}
@ -474,11 +282,6 @@ packages:
emoji-regex@8.0.0:
resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==}
esbuild@0.21.5:
resolution: {integrity: sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==}
engines: {node: '>=12'}
hasBin: true
escalade@3.1.2:
resolution: {integrity: sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==}
engines: {node: '>=6'}
@ -526,9 +329,6 @@ packages:
resolution: {integrity: sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==}
engines: {node: '>=16'}
get-tsconfig@4.7.5:
resolution: {integrity: sha512-ZCuZCnlqNzjb4QprAzXKdpp/gh6KTxSJuw3IBsPnV/7fV4NxC9ckB+vPTt8w7fJA0TaSD7c55BR47JD6MEDyDw==}
glob-parent@5.1.2:
resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
engines: {node: '>= 6'}
@ -613,9 +413,6 @@ packages:
luxon@1.28.1:
resolution: {integrity: sha512-gYHAa180mKrNIUJCbwpmD0aTu9kV0dREDrwNnuyFAsO1Wt0EVYSZelPnJlbj9HplzXX/YWXHFTL45kvZ53M0pw==}
make-error@1.3.6:
resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==}
merge-stream@2.0.0:
resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==}
@ -627,8 +424,8 @@ packages:
resolution: {integrity: sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==}
engines: {node: '>=10'}
mocha@10.6.0:
resolution: {integrity: sha512-hxjt4+EEB0SA0ZDygSS015t65lJw/I2yRCS3Ae+SJ5FrbzrXgfYwJr96f0OvIXdj7h4lv/vLCrH3rkiuizFSvw==}
mocha@10.7.0:
resolution: {integrity: sha512-v8/rBWr2VO5YkspYINnvu81inSz2y3ODJrhO175/Exzor1RcEZZkizgE2A+w/CAXXoESS8Kys5E62dOHGHzULA==}
engines: {node: '>= 14.0.0'}
hasBin: true
@ -695,14 +492,11 @@ packages:
resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==}
engines: {node: '>=0.10.0'}
resolve-pkg-maps@1.0.0:
resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==}
safe-buffer@5.2.1:
resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==}
semver@7.6.2:
resolution: {integrity: sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==}
semver@7.6.3:
resolution: {integrity: sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==}
engines: {node: '>=10'}
hasBin: true
@ -756,28 +550,9 @@ packages:
resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
engines: {node: '>=8.0'}
ts-node@10.9.2:
resolution: {integrity: sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==}
hasBin: true
peerDependencies:
'@swc/core': '>=1.2.50'
'@swc/wasm': '>=1.2.50'
'@types/node': '*'
typescript: '>=2.7'
peerDependenciesMeta:
'@swc/core':
optional: true
'@swc/wasm':
optional: true
tslib@2.6.3:
resolution: {integrity: sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==}
tsx@4.16.2:
resolution: {integrity: sha512-C1uWweJDgdtX2x600HjaFaucXTilT7tgUZHbOE4+ypskZ1OP8CRCSDkCxG6Vya9EwaFIVagWwpaVAn5wzypaqQ==}
engines: {node: '>=18.0.0'}
hasBin: true
typescript@5.5.3:
resolution: {integrity: sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ==}
engines: {node: '>=14.17'}
@ -786,9 +561,6 @@ packages:
undici-types@5.26.5:
resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==}
v8-compile-cache-lib@3.0.1:
resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==}
which@1.3.1:
resolution: {integrity: sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==}
hasBin: true
@ -824,94 +596,17 @@ packages:
resolution: {integrity: sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==}
engines: {node: '>=10'}
yn@3.1.1:
resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==}
engines: {node: '>=6'}
yocto-queue@0.1.0:
resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==}
engines: {node: '>=10'}
snapshots:
'@cspotcode/source-map-support@0.8.1':
dependencies:
'@jridgewell/trace-mapping': 0.3.9
'@emnapi/runtime@1.2.0':
dependencies:
tslib: 2.6.3
optional: true
'@esbuild/aix-ppc64@0.21.5':
optional: true
'@esbuild/android-arm64@0.21.5':
optional: true
'@esbuild/android-arm@0.21.5':
optional: true
'@esbuild/android-x64@0.21.5':
optional: true
'@esbuild/darwin-arm64@0.21.5':
optional: true
'@esbuild/darwin-x64@0.21.5':
optional: true
'@esbuild/freebsd-arm64@0.21.5':
optional: true
'@esbuild/freebsd-x64@0.21.5':
optional: true
'@esbuild/linux-arm64@0.21.5':
optional: true
'@esbuild/linux-arm@0.21.5':
optional: true
'@esbuild/linux-ia32@0.21.5':
optional: true
'@esbuild/linux-loong64@0.21.5':
optional: true
'@esbuild/linux-mips64el@0.21.5':
optional: true
'@esbuild/linux-ppc64@0.21.5':
optional: true
'@esbuild/linux-riscv64@0.21.5':
optional: true
'@esbuild/linux-s390x@0.21.5':
optional: true
'@esbuild/linux-x64@0.21.5':
optional: true
'@esbuild/netbsd-x64@0.21.5':
optional: true
'@esbuild/openbsd-x64@0.21.5':
optional: true
'@esbuild/sunos-x64@0.21.5':
optional: true
'@esbuild/win32-arm64@0.21.5':
optional: true
'@esbuild/win32-ia32@0.21.5':
optional: true
'@esbuild/win32-x64@0.21.5':
optional: true
'@img/sharp-darwin-arm64@0.33.4':
optionalDependencies:
'@img/sharp-libvips-darwin-arm64': 1.0.2
@ -987,23 +682,6 @@ snapshots:
'@img/sharp-win32-x64@0.33.4':
optional: true
'@jridgewell/resolve-uri@3.1.2': {}
'@jridgewell/sourcemap-codec@1.5.0': {}
'@jridgewell/trace-mapping@0.3.9':
dependencies:
'@jridgewell/resolve-uri': 3.1.2
'@jridgewell/sourcemap-codec': 1.5.0
'@tsconfig/node10@1.0.11': {}
'@tsconfig/node12@1.0.11': {}
'@tsconfig/node14@1.0.3': {}
'@tsconfig/node16@1.0.4': {}
'@types/chai@4.3.16': {}
'@types/debug@4.1.12':
@ -1012,7 +690,7 @@ snapshots:
'@types/fluent-ffmpeg@2.1.24':
dependencies:
'@types/node': 20.14.10
'@types/node': 20.14.11
'@types/luxon@3.4.2': {}
@ -1020,16 +698,10 @@ snapshots:
'@types/ms@0.7.34': {}
'@types/node@20.14.10':
'@types/node@20.14.11':
dependencies:
undici-types: 5.26.5
acorn-walk@8.3.3:
dependencies:
acorn: 8.12.1
acorn@8.12.1: {}
ansi-colors@4.1.3: {}
ansi-regex@5.0.1: {}
@ -1043,8 +715,6 @@ snapshots:
normalize-path: 3.0.0
picomatch: 2.3.1
arg@4.1.3: {}
argparse@2.0.1: {}
assertion-error@2.0.1: {}
@ -1116,8 +786,6 @@ snapshots:
color-convert: 2.0.1
color-string: 1.9.1
create-require@1.1.1: {}
cross-spawn@7.0.3:
dependencies:
path-key: 3.1.1
@ -1136,38 +804,10 @@ snapshots:
detect-libc@2.0.3: {}
diff@4.0.2: {}
diff@5.2.0: {}
emoji-regex@8.0.0: {}
esbuild@0.21.5:
optionalDependencies:
'@esbuild/aix-ppc64': 0.21.5
'@esbuild/android-arm': 0.21.5
'@esbuild/android-arm64': 0.21.5
'@esbuild/android-x64': 0.21.5
'@esbuild/darwin-arm64': 0.21.5
'@esbuild/darwin-x64': 0.21.5
'@esbuild/freebsd-arm64': 0.21.5
'@esbuild/freebsd-x64': 0.21.5
'@esbuild/linux-arm': 0.21.5
'@esbuild/linux-arm64': 0.21.5
'@esbuild/linux-ia32': 0.21.5
'@esbuild/linux-loong64': 0.21.5
'@esbuild/linux-mips64el': 0.21.5
'@esbuild/linux-ppc64': 0.21.5
'@esbuild/linux-riscv64': 0.21.5
'@esbuild/linux-s390x': 0.21.5
'@esbuild/linux-x64': 0.21.5
'@esbuild/netbsd-x64': 0.21.5
'@esbuild/openbsd-x64': 0.21.5
'@esbuild/sunos-x64': 0.21.5
'@esbuild/win32-arm64': 0.21.5
'@esbuild/win32-ia32': 0.21.5
'@esbuild/win32-x64': 0.21.5
escalade@3.1.2: {}
escape-string-regexp@4.0.0: {}
@ -1211,10 +851,6 @@ snapshots:
get-stream@8.0.1: {}
get-tsconfig@4.7.5:
dependencies:
resolve-pkg-maps: 1.0.0
glob-parent@5.1.2:
dependencies:
is-glob: 4.0.3
@ -1283,8 +919,6 @@ snapshots:
luxon@1.28.1: {}
make-error@1.3.6: {}
merge-stream@2.0.0: {}
mimic-fn@4.0.0: {}
@ -1293,7 +927,7 @@ snapshots:
dependencies:
brace-expansion: 2.0.1
mocha@10.6.0:
mocha@10.7.0:
dependencies:
ansi-colors: 4.1.3
browser-stdout: 1.3.1
@ -1357,7 +991,7 @@ snapshots:
'@types/debug': 4.1.12
'@types/fluent-ffmpeg': 2.1.24
'@types/luxon': 3.4.2
'@types/node': 20.14.10
'@types/node': 20.14.11
debug: 4.3.5(supports-color@8.1.1)
execa: 8.0.1
fluent-ffmpeg: 2.1.3
@ -1376,11 +1010,9 @@ snapshots:
require-directory@2.1.1: {}
resolve-pkg-maps@1.0.0: {}
safe-buffer@5.2.1: {}
semver@7.6.2: {}
semver@7.6.3: {}
serialize-javascript@6.0.2:
dependencies:
@ -1390,7 +1022,7 @@ snapshots:
dependencies:
color: 4.2.3
detect-libc: 2.0.3
semver: 7.6.2
semver: 7.6.3
optionalDependencies:
'@img/sharp-darwin-arm64': 0.33.4
'@img/sharp-darwin-x64': 0.33.4
@ -1450,40 +1082,13 @@ snapshots:
dependencies:
is-number: 7.0.0
ts-node@10.9.2(@types/node@20.14.10)(typescript@5.5.3):
dependencies:
'@cspotcode/source-map-support': 0.8.1
'@tsconfig/node10': 1.0.11
'@tsconfig/node12': 1.0.11
'@tsconfig/node14': 1.0.3
'@tsconfig/node16': 1.0.4
'@types/node': 20.14.10
acorn: 8.12.1
acorn-walk: 8.3.3
arg: 4.1.3
create-require: 1.1.1
diff: 4.0.2
make-error: 1.3.6
typescript: 5.5.3
v8-compile-cache-lib: 3.0.1
yn: 3.1.1
tslib@2.6.3:
optional: true
tsx@4.16.2:
dependencies:
esbuild: 0.21.5
get-tsconfig: 4.7.5
optionalDependencies:
fsevents: 2.3.3
typescript@5.5.3: {}
undici-types@5.26.5: {}
v8-compile-cache-lib@3.0.1: {}
which@1.3.1:
dependencies:
isexe: 2.0.0
@ -1523,6 +1128,4 @@ snapshots:
y18n: 5.0.8
yargs-parser: 20.2.9
yn@3.1.1: {}
yocto-queue@0.1.0: {}

View File

@ -26,7 +26,7 @@ describe('image', function () {
this.timeout(1000*60*15)
it('should accept a URL and return a path to image on disk', async function () {
// const url = 'https://futureporn-b2.b-cdn.net/projektmelody-chaturbate-2024-06-25.mp4'
const url = 'https://futureporn-b2.b-cdn.net/projektmelody-chaturbate-2024-07-16-clyrhavkf000008l20j3v46hn.mp4'
const url = 'https://futureporn-b2.b-cdn.net/projektmelody-chaturbate-2024-07-18.mp4'
const imagePath = await getStoryboard(url)
expect(imagePath).to.match(/\.png/)
})

View File

@ -22,7 +22,7 @@ async function deleteOrphanedLoadBalancers() {
const loadBalancerJson = await loadBalancersRes.json()
const orphanedLoadBalancers = loadBalancerJson.load_balancers.filter((lb) => (lb.instances.length === 0))
console.log(`found ${orphanedLoadBalancers.length} orphaned load balancers.`)
console.log('waiting 1 second')
// console.log('waiting 1 second')
await new Promise((resolve) => { setTimeout(resolve, 1000) })
for (const lb of orphanedLoadBalancers) {
@ -35,7 +35,7 @@ async function deleteOrphanedLoadBalancers() {
}
})
console.log('waiting 1 second')
// console.log('waiting 1 second')
await new Promise((resolve) => { setTimeout(resolve, 1000) })
}
}

View File

@ -0,0 +1,17 @@
# @futureporn/mailbox
A daemon which connects to an IMAP e-mail account and listens for new e-mails.
When an e-mail is received, the daemon checks to see if the e-mail is from chaturbate or fansly.
If the e-mail is from one of the aforementioned platforms, the e-mail is parsed.
If the e-mail is a going live notification for a streamer on said platforms, the daemon creates or updates records in Strapi.
Creates or updates the following record types in Strapi.
* Platform Notification
* VTuber
* Stream
With this data, we have a log of all the lewdtuber streams that have occured.

View File

@ -0,0 +1,44 @@
{
"name": "@futureporn/mailbox",
"type": "module",
"version": "1.0.0",
"description": "",
"scripts": {
"superclean": "rm -rf node_modules && rm -rf pnpm-lock.yaml && rm -rf dist",
"start": "node dist/index.cjs",
"test": "echo \"Error: no test specified\" && exit 1",
"dev": "tsx ",
"build": "tsup"
},
"keywords": [],
"author": "@CJ_Clippy",
"license": "Unlicense",
"devDependencies": {
"@esbuild-plugins/esm-externals": "^0.1.2",
"@futureporn/image": "workspace:^",
"@futureporn/scout": "workspace:^",
"@futureporn/storage": "workspace:^",
"@futureporn/types": "workspace:^",
"@futureporn/utils": "workspace:^",
"@types/chai": "^4.3.16",
"@types/imapflow": "^1.0.18",
"@types/mailparser": "^3.4.4",
"@types/mocha": "^10.0.7",
"chai": "^5.1.1",
"mocha": "^10.7.0",
"tsup": "^8.1.2",
"typescript": "^5.5.3"
},
"dependencies": {
"@types/node": "^20.14.9",
"@types/qs": "^6.9.15",
"cheerio": "1.0.0-rc.12",
"date-fns": "^3.6.0",
"dotenv": "^16.4.5",
"fastify": "^4.12.0",
"graphile-worker": "^0.16.6",
"imapflow": "^1.0.164",
"mailparser": "^3.7.1",
"qs": "^6.12.3"
}
}

File diff suppressed because it is too large Load Diff

View File

@ -2,46 +2,45 @@
import { ImapFlow } from 'imapflow';
import EventEmitter from 'node:events';
import 'dotenv/config';
import { simpleParser } from 'mailparser';
if (!process.env.SCOUT_IMAP_SERVER) throw new Error('SCOUT_IMAP_SERVER is missing from env');
if (!process.env.SCOUT_IMAP_PORT) throw new Error('SCOUT_IMAP_PORT is missing from env');
if (!process.env.SCOUT_IMAP_USERNAME) throw new Error('SCOUT_IMAP_USERNAME is missing from env');
if (!process.env.SCOUT_IMAP_PASSWORD) throw new Error('SCOUT_IMAP_PASSWORD is missing from env');
if (!process.env.IMAP_SERVER) throw new Error('IMAP_SERVER is missing from env');
if (!process.env.IMAP_PORT) throw new Error('IMAP_PORT is missing from env');
if (!process.env.IMAP_USERNAME) throw new Error('IMAP_USERNAME is missing from env');
if (!process.env.IMAP_PASSWORD) throw new Error('IMAP_PASSWORD is missing from env');
// https://stackoverflow.com/a/49428486/1004931
function streamToString(stream) {
const chunks = [];
function streamToString(stream: any) {
const chunks: any = [];
return new Promise((resolve, reject) => {
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
stream.on('error', (err) => reject(err));
stream.on('data', (chunk: any) => chunks.push(Buffer.from(chunk)));
stream.on('error', (err: any) => reject(err));
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')));
})
}
export class Email extends EventEmitter {
private client?: ImapFlow | null = null
constructor() {
super()
this.client = null
}
async archiveMessage(uid) {
async archiveMessage(uid: any) {
if (!this.client) throw new Error('archiveMessage was called before this.client was instantiated.');
await this.client.messageDelete(uid, { uid: true })
}
async connect() {
this.client = new ImapFlow({
host: process.env.SCOUT_IMAP_SERVER,
port: process.env.SCOUT_IMAP_PORT,
host: process.env.IMAP_SERVER!,
port: parseInt(process.env.IMAP_PORT!),
secure: true,
auth: {
user: process.env.SCOUT_IMAP_USERNAME,
pass: process.env.SCOUT_IMAP_PASSWORD
user: process.env.IMAP_USERNAME!,
pass: process.env.IMAP_PASSWORD!
},
logger: false
});
@ -49,7 +48,7 @@ export class Email extends EventEmitter {
this.registerEventListeners()
await this.client.connect()
const stat = await this.getStatus()
if (stat.messages > 0) {
if (stat?.messages && stat.messages > 0) {
await this.emitAllMessages()
}
}
@ -61,6 +60,7 @@ export class Email extends EventEmitter {
}
async getStatus() {
if (!this.client) throw new Error('getStatus was called before this.client was instantiated.');
let lock = await this.client.getMailboxLock('INBOX');
let status;
try {
@ -71,12 +71,13 @@ export class Email extends EventEmitter {
return status
}
async loadMessage(uid) {
async loadMessage(uid: any) {
if (!this.client) throw new Error('loadMessage was called before this.client was instantiated.');
console.log(` 💾 loading message uid=${uid}`)
let lock = await this.client.getMailboxLock('INBOX');
let lock = await this.client.getMailboxLock('INBOX')
let dl, body
try {
dl = await this.client.download(uid, null, { uid: true })
dl = await this.client.download(uid, undefined, { uid: true })
body = await streamToString(dl.content)
} finally {
lock.release()
@ -85,6 +86,7 @@ export class Email extends EventEmitter {
}
async emitAllMessages() {
if (!this.client) throw new Error('emitAllMessages was called before this.client was instantiated.');
console.log('emitAllMessages is running')
let lock = await this.client.getMailboxLock('INBOX');
try {
@ -102,9 +104,10 @@ export class Email extends EventEmitter {
}
registerEventListeners() {
if (!this.client) throw new Error('registerEventListeners was called before this.client was instantiated.');
console.log(` > REGISTERING EVENT LISTENERS <`)
this.client.once('end', () => this.reconnect())
this.client.on('exists', (evt) => {
this.client.on('exists', (evt: any) => {
console.log(`exists event! count=${evt.count} prevCount=${evt.prevCount}`)
// console.log(evt)
if (evt.path === 'INBOX') {

View File

@ -0,0 +1,70 @@
'use strict'
/**
* watches an e-mail inbox for going live notifications
*/
import { checkEmail } from './parsers.js'
import { Email } from './imap.js'
import { makeWorkerUtils, WorkerUtils } from 'graphile-worker'
import { type NotificationData } from '@futureporn/types'
import { type FetchMessageObject } from 'imapflow'
if (!process.env.DATABASE_URL) throw new Error('DATABASE_URL is undefined in env');
if (!process.env.PORT) throw new Error('PORT is undefined in env');
const connectionString = process.env.DATABASE_URL
console.log(`process.env.DATABASE_URL=${connectionString}`)
async function handleMessage({ workerUtils, email, msg }: { workerUtils: WorkerUtils, email: Email, msg: FetchMessageObject }) {
try {
console.log(' ✏️ loading message')
const body = await email.loadMessage(msg.uid) as string
console.log(' ✏️ checking e-mail')
const { isMatch, url, platform, channel, displayName, date, userId, avatar }: NotificationData = (await checkEmail(body) )
if (isMatch) {
console.log(' ✏️✏️ adding process_notif_email job to queue')
workerUtils.addJob('process_notif_email', { isMatch, url, platform, channel, displayName, date, userId, avatar })
}
console.log(' ✏️ archiving e-mail')
await email.archiveMessage(msg.uid)
} catch (e) {
console.error(`An error was encountered while handling the following e-mail message.\n${JSON.stringify(msg, null, 2)}\nError as follows.`)
console.error(e)
}
}
async function main() {
const workerUtils = await makeWorkerUtils({
connectionString
})
// demonstrate that we are connected @todo remove this
workerUtils.addJob('hello', { name: 'worker' })
workerUtils.addJob('identify_image_color', { url: 'https://futureporn-b2.b-cdn.net/ti8ht9bgwj6k783j7hglfg8j_projektmelody-chaturbate-2024-07-18.png' })
// connect to IMAP inbox and wait for new e-mails
const email = new Email()
email.once('message', (msg: FetchMessageObject) => handleMessage({ workerUtils, email, msg }))
console.log('mailbox is starting...')
await email.connect()
}
main().catch((err) => {
console.error(err);
process.exit(1);
});

View File

@ -69,7 +69,7 @@ export async function checkEmail (body: string): Promise<NotificationData> {
let res: Record<string, any> = {}
let def: any = definitions.find((def) => def.from === mail.from!.value[0]!.address)
if (!def) return { isMatch: false, channel: null, platform: undefined, url: undefined };
if (!def) return { isMatch: false, channel: undefined, platform: undefined, url: undefined };
res.isMatch = true
// Step 0, get values from e-mail metadata
@ -95,6 +95,5 @@ export async function checkEmail (body: string): Promise<NotificationData> {
res.url = res.url || render(def.template, { channel: res.channel })
return res
return res as NotificationData
}

View File

@ -8,7 +8,6 @@
"resolveJsonModule": true,
"moduleDetection": "force",
"isolatedModules": true,
"verbatimModuleSyntax": true,
// Enable strict type checking so you can catch bugs early
"strict": true,
"noUncheckedIndexedAccess": true,
@ -22,7 +21,7 @@
},
// Include the necessary files for your project
"include": [
"./src/**/*.ts"
"**/*.ts"
],
"exclude": [
"node_modules"

View File

@ -0,0 +1,39 @@
import { defineConfig } from "tsup"
import { exec } from 'node:child_process'
export default defineConfig({
entry: [
"src/index.ts",
"src/tasks/**.ts"
],
/**
* lots of compatibility issues outputting to ESM. So we output to CJS to get things working.
*/
format: ["cjs"],
target: "es2022",
sourcemap: true,
/**
* The common package is using the internal packages approach, so it needs to
* be transpiled / bundled together with the deployed code.
*/
noExternal: [
"@futureporn/image",
"@futureporn/utils",
"@futureporn/scout",
"@futureporn/storage",
"@futureporn/types",
],
/**
* Do not use tsup for generating d.ts files because it can not generate type
* the definition maps required for go-to-definition to work in our IDE. We
* use tsc for that.
*/
splitting: false,
treeshake: true,
clean: true,
outDir: 'dist', // Where you want your compiled files to live
shims: true,
onSuccess: async () => {
exec('tsc --emitDeclarationOnly');
}
});

View File

@ -20,7 +20,7 @@ importers:
version: 20.14.11
tsup:
specifier: ^8.1.2
version: 8.1.2(typescript@5.5.3)
version: 8.2.2(typescript@5.5.3)
typescript:
specifier: ^5.5.3
version: 5.5.3
@ -209,83 +209,83 @@ packages:
resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==}
engines: {node: '>=14'}
'@rollup/rollup-android-arm-eabi@4.18.1':
resolution: {integrity: sha512-lncuC4aHicncmbORnx+dUaAgzee9cm/PbIqgWz1PpXuwc+sa1Ct83tnqUDy/GFKleLiN7ZIeytM6KJ4cAn1SxA==}
'@rollup/rollup-android-arm-eabi@4.19.0':
resolution: {integrity: sha512-JlPfZ/C7yn5S5p0yKk7uhHTTnFlvTgLetl2VxqE518QgyM7C9bSfFTYvB/Q/ftkq0RIPY4ySxTz+/wKJ/dXC0w==}
cpu: [arm]
os: [android]
'@rollup/rollup-android-arm64@4.18.1':
resolution: {integrity: sha512-F/tkdw0WSs4ojqz5Ovrw5r9odqzFjb5LIgHdHZG65dFI1lWTWRVy32KDJLKRISHgJvqUeUhdIvy43fX41znyDg==}
'@rollup/rollup-android-arm64@4.19.0':
resolution: {integrity: sha512-RDxUSY8D1tWYfn00DDi5myxKgOk6RvWPxhmWexcICt/MEC6yEMr4HNCu1sXXYLw8iAsg0D44NuU+qNq7zVWCrw==}
cpu: [arm64]
os: [android]
'@rollup/rollup-darwin-arm64@4.18.1':
resolution: {integrity: sha512-vk+ma8iC1ebje/ahpxpnrfVQJibTMyHdWpOGZ3JpQ7Mgn/3QNHmPq7YwjZbIE7km73dH5M1e6MRRsnEBW7v5CQ==}
'@rollup/rollup-darwin-arm64@4.19.0':
resolution: {integrity: sha512-emvKHL4B15x6nlNTBMtIaC9tLPRpeA5jMvRLXVbl/W9Ie7HhkrE7KQjvgS9uxgatL1HmHWDXk5TTS4IaNJxbAA==}
cpu: [arm64]
os: [darwin]
'@rollup/rollup-darwin-x64@4.18.1':
resolution: {integrity: sha512-IgpzXKauRe1Tafcej9STjSSuG0Ghu/xGYH+qG6JwsAUxXrnkvNHcq/NL6nz1+jzvWAnQkuAJ4uIwGB48K9OCGA==}
'@rollup/rollup-darwin-x64@4.19.0':
resolution: {integrity: sha512-fO28cWA1dC57qCd+D0rfLC4VPbh6EOJXrreBmFLWPGI9dpMlER2YwSPZzSGfq11XgcEpPukPTfEVFtw2q2nYJg==}
cpu: [x64]
os: [darwin]
'@rollup/rollup-linux-arm-gnueabihf@4.18.1':
resolution: {integrity: sha512-P9bSiAUnSSM7EmyRK+e5wgpqai86QOSv8BwvkGjLwYuOpaeomiZWifEos517CwbG+aZl1T4clSE1YqqH2JRs+g==}
'@rollup/rollup-linux-arm-gnueabihf@4.19.0':
resolution: {integrity: sha512-2Rn36Ubxdv32NUcfm0wB1tgKqkQuft00PtM23VqLuCUR4N5jcNWDoV5iBC9jeGdgS38WK66ElncprqgMUOyomw==}
cpu: [arm]
os: [linux]
'@rollup/rollup-linux-arm-musleabihf@4.18.1':
resolution: {integrity: sha512-5RnjpACoxtS+aWOI1dURKno11d7krfpGDEn19jI8BuWmSBbUC4ytIADfROM1FZrFhQPSoP+KEa3NlEScznBTyQ==}
'@rollup/rollup-linux-arm-musleabihf@4.19.0':
resolution: {integrity: sha512-gJuzIVdq/X1ZA2bHeCGCISe0VWqCoNT8BvkQ+BfsixXwTOndhtLUpOg0A1Fcx/+eA6ei6rMBzlOz4JzmiDw7JQ==}
cpu: [arm]
os: [linux]
'@rollup/rollup-linux-arm64-gnu@4.18.1':
resolution: {integrity: sha512-8mwmGD668m8WaGbthrEYZ9CBmPug2QPGWxhJxh/vCgBjro5o96gL04WLlg5BA233OCWLqERy4YUzX3bJGXaJgQ==}
'@rollup/rollup-linux-arm64-gnu@4.19.0':
resolution: {integrity: sha512-0EkX2HYPkSADo9cfeGFoQ7R0/wTKb7q6DdwI4Yn/ULFE1wuRRCHybxpl2goQrx4c/yzK3I8OlgtBu4xvted0ug==}
cpu: [arm64]
os: [linux]
'@rollup/rollup-linux-arm64-musl@4.18.1':
resolution: {integrity: sha512-dJX9u4r4bqInMGOAQoGYdwDP8lQiisWb9et+T84l2WXk41yEej8v2iGKodmdKimT8cTAYt0jFb+UEBxnPkbXEQ==}
'@rollup/rollup-linux-arm64-musl@4.19.0':
resolution: {integrity: sha512-GlIQRj9px52ISomIOEUq/IojLZqzkvRpdP3cLgIE1wUWaiU5Takwlzpz002q0Nxxr1y2ZgxC2obWxjr13lvxNQ==}
cpu: [arm64]
os: [linux]
'@rollup/rollup-linux-powerpc64le-gnu@4.18.1':
resolution: {integrity: sha512-V72cXdTl4EI0x6FNmho4D502sy7ed+LuVW6Ym8aI6DRQ9hQZdp5sj0a2usYOlqvFBNKQnLQGwmYnujo2HvjCxQ==}
'@rollup/rollup-linux-powerpc64le-gnu@4.19.0':
resolution: {integrity: sha512-N6cFJzssruDLUOKfEKeovCKiHcdwVYOT1Hs6dovDQ61+Y9n3Ek4zXvtghPPelt6U0AH4aDGnDLb83uiJMkWYzQ==}
cpu: [ppc64]
os: [linux]
'@rollup/rollup-linux-riscv64-gnu@4.18.1':
resolution: {integrity: sha512-f+pJih7sxoKmbjghrM2RkWo2WHUW8UbfxIQiWo5yeCaCM0TveMEuAzKJte4QskBp1TIinpnRcxkquY+4WuY/tg==}
'@rollup/rollup-linux-riscv64-gnu@4.19.0':
resolution: {integrity: sha512-2DnD3mkS2uuam/alF+I7M84koGwvn3ZVD7uG+LEWpyzo/bq8+kKnus2EVCkcvh6PlNB8QPNFOz6fWd5N8o1CYg==}
cpu: [riscv64]
os: [linux]
'@rollup/rollup-linux-s390x-gnu@4.18.1':
resolution: {integrity: sha512-qb1hMMT3Fr/Qz1OKovCuUM11MUNLUuHeBC2DPPAWUYYUAOFWaxInaTwTQmc7Fl5La7DShTEpmYwgdt2hG+4TEg==}
'@rollup/rollup-linux-s390x-gnu@4.19.0':
resolution: {integrity: sha512-D6pkaF7OpE7lzlTOFCB2m3Ngzu2ykw40Nka9WmKGUOTS3xcIieHe82slQlNq69sVB04ch73thKYIWz/Ian8DUA==}
cpu: [s390x]
os: [linux]
'@rollup/rollup-linux-x64-gnu@4.18.1':
resolution: {integrity: sha512-7O5u/p6oKUFYjRbZkL2FLbwsyoJAjyeXHCU3O4ndvzg2OFO2GinFPSJFGbiwFDaCFc+k7gs9CF243PwdPQFh5g==}
'@rollup/rollup-linux-x64-gnu@4.19.0':
resolution: {integrity: sha512-HBndjQLP8OsdJNSxpNIN0einbDmRFg9+UQeZV1eiYupIRuZsDEoeGU43NQsS34Pp166DtwQOnpcbV/zQxM+rWA==}
cpu: [x64]
os: [linux]
'@rollup/rollup-linux-x64-musl@4.18.1':
resolution: {integrity: sha512-pDLkYITdYrH/9Cv/Vlj8HppDuLMDUBmgsM0+N+xLtFd18aXgM9Nyqupb/Uw+HeidhfYg2lD6CXvz6CjoVOaKjQ==}
'@rollup/rollup-linux-x64-musl@4.19.0':
resolution: {integrity: sha512-HxfbvfCKJe/RMYJJn0a12eiOI9OOtAUF4G6ozrFUK95BNyoJaSiBjIOHjZskTUffUrB84IPKkFG9H9nEvJGW6A==}
cpu: [x64]
os: [linux]
'@rollup/rollup-win32-arm64-msvc@4.18.1':
resolution: {integrity: sha512-W2ZNI323O/8pJdBGil1oCauuCzmVd9lDmWBBqxYZcOqWD6aWqJtVBQ1dFrF4dYpZPks6F+xCZHfzG5hYlSHZ6g==}
'@rollup/rollup-win32-arm64-msvc@4.19.0':
resolution: {integrity: sha512-HxDMKIhmcguGTiP5TsLNolwBUK3nGGUEoV/BO9ldUBoMLBssvh4J0X8pf11i1fTV7WShWItB1bKAKjX4RQeYmg==}
cpu: [arm64]
os: [win32]
'@rollup/rollup-win32-ia32-msvc@4.18.1':
resolution: {integrity: sha512-ELfEX1/+eGZYMaCIbK4jqLxO1gyTSOIlZr6pbC4SRYFaSIDVKOnZNMdoZ+ON0mrFDp4+H5MhwNC1H/AhE3zQLg==}
'@rollup/rollup-win32-ia32-msvc@4.19.0':
resolution: {integrity: sha512-xItlIAZZaiG/u0wooGzRsx11rokP4qyc/79LkAOdznGRAbOFc+SfEdfUOszG1odsHNgwippUJavag/+W/Etc6Q==}
cpu: [ia32]
os: [win32]
'@rollup/rollup-win32-x64-msvc@4.18.1':
resolution: {integrity: sha512-yjk2MAkQmoaPYCSu35RLJ62+dz358nE83VfTePJRp8CG7aMg25mEJYpXFiD+NcevhX8LxD5OP5tktPXnXN7GDw==}
'@rollup/rollup-win32-x64-msvc@4.19.0':
resolution: {integrity: sha512-xNo5fV5ycvCCKqiZcpB65VMR11NJB+StnxHz20jdqRAktfdfzhgjTiJ2doTDQE/7dqGaV5I7ZGqKpgph6lCIag==}
cpu: [x64]
os: [win32]
@ -555,6 +555,9 @@ packages:
resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==}
engines: {node: '>=8'}
picocolors@1.0.1:
resolution: {integrity: sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==}
picomatch@2.3.1:
resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
engines: {node: '>=8.6'}
@ -600,8 +603,8 @@ packages:
resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==}
engines: {iojs: '>=1.0.0', node: '>=0.10.0'}
rollup@4.18.1:
resolution: {integrity: sha512-Elx2UT8lzxxOXMpy5HWQGZqkrQOtrVDDa/bm9l10+U4rQnVzbL/LgZ4NOM1MPIDyHk69W4InuYDF5dzRh4Kw1A==}
rollup@4.19.0:
resolution: {integrity: sha512-5r7EYSQIowHsK4eTZ0Y81qpZuJz+MUuYeqmmYmRMl1nwhdmbiYqt5jwzf6u7wyOzJgYqtCRMtVRKOtHANBz7rA==}
engines: {node: '>=18.0.0', npm: '>=8.0.0'}
hasBin: true
@ -677,8 +680,8 @@ packages:
ts-interface-checker@0.1.13:
resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==}
tsup@8.1.2:
resolution: {integrity: sha512-Gzw/PXSX/z0aYMNmkcI54bKKFVFJQbLne+EqTJZeQ3lNT3QpumjtMU4rl+ZwTTp8oRF3ahMbEAxT2sZPJLFSrg==}
tsup@8.2.2:
resolution: {integrity: sha512-MufIuzdSt6HYPOeOtjUXLR4rqRJySi6XsRNZdwvjC2XR+xghsu2L3vSmYmX+k4S1mO6j0OlUEyVQ3Fc0H66XcA==}
engines: {node: '>=18'}
hasBin: true
peerDependencies:
@ -838,52 +841,52 @@ snapshots:
'@pkgjs/parseargs@0.11.0':
optional: true
'@rollup/rollup-android-arm-eabi@4.18.1':
'@rollup/rollup-android-arm-eabi@4.19.0':
optional: true
'@rollup/rollup-android-arm64@4.18.1':
'@rollup/rollup-android-arm64@4.19.0':
optional: true
'@rollup/rollup-darwin-arm64@4.18.1':
'@rollup/rollup-darwin-arm64@4.19.0':
optional: true
'@rollup/rollup-darwin-x64@4.18.1':
'@rollup/rollup-darwin-x64@4.19.0':
optional: true
'@rollup/rollup-linux-arm-gnueabihf@4.18.1':
'@rollup/rollup-linux-arm-gnueabihf@4.19.0':
optional: true
'@rollup/rollup-linux-arm-musleabihf@4.18.1':
'@rollup/rollup-linux-arm-musleabihf@4.19.0':
optional: true
'@rollup/rollup-linux-arm64-gnu@4.18.1':
'@rollup/rollup-linux-arm64-gnu@4.19.0':
optional: true
'@rollup/rollup-linux-arm64-musl@4.18.1':
'@rollup/rollup-linux-arm64-musl@4.19.0':
optional: true
'@rollup/rollup-linux-powerpc64le-gnu@4.18.1':
'@rollup/rollup-linux-powerpc64le-gnu@4.19.0':
optional: true
'@rollup/rollup-linux-riscv64-gnu@4.18.1':
'@rollup/rollup-linux-riscv64-gnu@4.19.0':
optional: true
'@rollup/rollup-linux-s390x-gnu@4.18.1':
'@rollup/rollup-linux-s390x-gnu@4.19.0':
optional: true
'@rollup/rollup-linux-x64-gnu@4.18.1':
'@rollup/rollup-linux-x64-gnu@4.19.0':
optional: true
'@rollup/rollup-linux-x64-musl@4.18.1':
'@rollup/rollup-linux-x64-musl@4.19.0':
optional: true
'@rollup/rollup-win32-arm64-msvc@4.18.1':
'@rollup/rollup-win32-arm64-msvc@4.19.0':
optional: true
'@rollup/rollup-win32-ia32-msvc@4.18.1':
'@rollup/rollup-win32-ia32-msvc@4.19.0':
optional: true
'@rollup/rollup-win32-x64-msvc@4.18.1':
'@rollup/rollup-win32-x64-msvc@4.19.0':
optional: true
'@types/estree@1.0.5': {}
@ -1147,6 +1150,8 @@ snapshots:
path-type@4.0.0: {}
picocolors@1.0.1: {}
picomatch@2.3.1: {}
pirates@4.0.6: {}
@ -1167,26 +1172,26 @@ snapshots:
reusify@1.0.4: {}
rollup@4.18.1:
rollup@4.19.0:
dependencies:
'@types/estree': 1.0.5
optionalDependencies:
'@rollup/rollup-android-arm-eabi': 4.18.1
'@rollup/rollup-android-arm64': 4.18.1
'@rollup/rollup-darwin-arm64': 4.18.1
'@rollup/rollup-darwin-x64': 4.18.1
'@rollup/rollup-linux-arm-gnueabihf': 4.18.1
'@rollup/rollup-linux-arm-musleabihf': 4.18.1
'@rollup/rollup-linux-arm64-gnu': 4.18.1
'@rollup/rollup-linux-arm64-musl': 4.18.1
'@rollup/rollup-linux-powerpc64le-gnu': 4.18.1
'@rollup/rollup-linux-riscv64-gnu': 4.18.1
'@rollup/rollup-linux-s390x-gnu': 4.18.1
'@rollup/rollup-linux-x64-gnu': 4.18.1
'@rollup/rollup-linux-x64-musl': 4.18.1
'@rollup/rollup-win32-arm64-msvc': 4.18.1
'@rollup/rollup-win32-ia32-msvc': 4.18.1
'@rollup/rollup-win32-x64-msvc': 4.18.1
'@rollup/rollup-android-arm-eabi': 4.19.0
'@rollup/rollup-android-arm64': 4.19.0
'@rollup/rollup-darwin-arm64': 4.19.0
'@rollup/rollup-darwin-x64': 4.19.0
'@rollup/rollup-linux-arm-gnueabihf': 4.19.0
'@rollup/rollup-linux-arm-musleabihf': 4.19.0
'@rollup/rollup-linux-arm64-gnu': 4.19.0
'@rollup/rollup-linux-arm64-musl': 4.19.0
'@rollup/rollup-linux-powerpc64le-gnu': 4.19.0
'@rollup/rollup-linux-riscv64-gnu': 4.19.0
'@rollup/rollup-linux-s390x-gnu': 4.19.0
'@rollup/rollup-linux-x64-gnu': 4.19.0
'@rollup/rollup-linux-x64-musl': 4.19.0
'@rollup/rollup-win32-arm64-msvc': 4.19.0
'@rollup/rollup-win32-ia32-msvc': 4.19.0
'@rollup/rollup-win32-x64-msvc': 4.19.0
fsevents: 2.3.3
run-parallel@1.2.0:
@ -1261,7 +1266,7 @@ snapshots:
ts-interface-checker@0.1.13: {}
tsup@8.1.2(typescript@5.5.3):
tsup@8.2.2(typescript@5.5.3):
dependencies:
bundle-require: 5.0.0(esbuild@0.23.0)
cac: 6.7.14
@ -1272,9 +1277,10 @@ snapshots:
execa: 5.1.1
globby: 11.1.0
joycon: 3.1.1
picocolors: 1.0.1
postcss-load-config: 6.0.1
resolve-from: 5.0.0
rollup: 4.18.1
rollup: 4.19.0
source-map: 0.8.0-beta.0
sucrase: 3.35.0
tree-kill: 1.2.2

View File

@ -10,19 +10,19 @@ importers:
dependencies:
'@fortawesome/fontawesome-free':
specifier: ^6.5.2
version: 6.5.2
version: 6.6.0
'@fortawesome/fontawesome-svg-core':
specifier: ^6.5.2
version: 6.5.2
version: 6.6.0
'@fortawesome/free-brands-svg-icons':
specifier: ^6.5.2
version: 6.5.2
version: 6.6.0
'@fortawesome/free-solid-svg-icons':
specifier: ^6.5.2
version: 6.5.2
version: 6.6.0
'@fortawesome/react-fontawesome':
specifier: ^0.2.2
version: 0.2.2(@fortawesome/fontawesome-svg-core@6.5.2)(react@18.3.1)
version: 0.2.2(@fortawesome/fontawesome-svg-core@6.6.0)(react@18.3.1)
'@futureporn/types':
specifier: workspace:*
version: link:../types
@ -49,7 +49,7 @@ importers:
version: 24.0.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
'@tanstack/react-query':
specifier: ^5.49.2
version: 5.51.1(react@18.3.1)
version: 5.51.11(react@18.3.1)
'@tanstack/react-table':
specifier: ^8.19.2
version: 8.19.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
@ -103,7 +103,7 @@ importers:
version: 2.0.1(date-fns@2.30.0)
dayjs:
specifier: ^1.11.11
version: 1.11.11
version: 1.11.12
feed:
specifier: ^4.2.2
version: 4.2.2
@ -121,7 +121,7 @@ importers:
version: 2.0.3
multiformats:
specifier: ^13.1.3
version: 13.1.3
version: 13.2.0
next:
specifier: 14.0.4
version: 14.0.4(@babel/core@7.24.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.8)
@ -176,7 +176,7 @@ importers:
devDependencies:
'@types/node':
specifier: ^20.14.9
version: 20.14.10
version: 20.14.11
eslint:
specifier: ^8.57.0
version: 8.57.0
@ -331,24 +331,24 @@ packages:
resolution: {integrity: sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
'@fortawesome/fontawesome-common-types@6.5.2':
resolution: {integrity: sha512-gBxPg3aVO6J0kpfHNILc+NMhXnqHumFxOmjYCFfOiLZfwhnnfhtsdA2hfJlDnj+8PjAs6kKQPenOTKj3Rf7zHw==}
'@fortawesome/fontawesome-common-types@6.6.0':
resolution: {integrity: sha512-xyX0X9mc0kyz9plIyryrRbl7ngsA9jz77mCZJsUkLl+ZKs0KWObgaEBoSgQiYWAsSmjz/yjl0F++Got0Mdp4Rw==}
engines: {node: '>=6'}
'@fortawesome/fontawesome-free@6.5.2':
resolution: {integrity: sha512-hRILoInAx8GNT5IMkrtIt9blOdrqHOnPBH+k70aWUAqPZPgopb9G5EQJFpaBx/S8zp2fC+mPW349Bziuk1o28Q==}
'@fortawesome/fontawesome-free@6.6.0':
resolution: {integrity: sha512-60G28ke/sXdtS9KZCpZSHHkCbdsOGEhIUGlwq6yhY74UpTiToIh8np7A8yphhM4BWsvNFtIvLpi4co+h9Mr9Ow==}
engines: {node: '>=6'}
'@fortawesome/fontawesome-svg-core@6.5.2':
resolution: {integrity: sha512-5CdaCBGl8Rh9ohNdxeeTMxIj8oc3KNBgIeLMvJosBMdslK/UnEB8rzyDRrbKdL1kDweqBPo4GT9wvnakHWucZw==}
'@fortawesome/fontawesome-svg-core@6.6.0':
resolution: {integrity: sha512-KHwPkCk6oRT4HADE7smhfsKudt9N/9lm6EJ5BVg0tD1yPA5hht837fB87F8pn15D8JfTqQOjhKTktwmLMiD7Kg==}
engines: {node: '>=6'}
'@fortawesome/free-brands-svg-icons@6.5.2':
resolution: {integrity: sha512-zi5FNYdmKLnEc0jc0uuHH17kz/hfYTg4Uei0wMGzcoCL/4d3WM3u1VMc0iGGa31HuhV5i7ZK8ZlTCQrHqRHSGQ==}
'@fortawesome/free-brands-svg-icons@6.6.0':
resolution: {integrity: sha512-1MPD8lMNW/earme4OQi1IFHtmHUwAKgghXlNwWi9GO7QkTfD+IIaYpIai4m2YJEzqfEji3jFHX1DZI5pbY/biQ==}
engines: {node: '>=6'}
'@fortawesome/free-solid-svg-icons@6.5.2':
resolution: {integrity: sha512-QWFZYXFE7O1Gr1dTIp+D6UcFUF0qElOnZptpi7PBUMylJh+vFmIedVe1Ir6RM1t2tEQLLSV1k7bR4o92M+uqlw==}
'@fortawesome/free-solid-svg-icons@6.6.0':
resolution: {integrity: sha512-IYv/2skhEDFc2WGUcqvFJkeK39Q+HyPf5GHUrT/l2pKbtgEIv1al1TKd6qStR5OIwQdN1GZP54ci3y4mroJWjA==}
engines: {node: '>=6'}
'@fortawesome/react-fontawesome@0.2.2':
@ -637,11 +637,11 @@ packages:
'@swc/helpers@0.5.2':
resolution: {integrity: sha512-E4KcWTpoLHqwPHLxidpOqQbcrZVgi0rsmmZXUle1jXmJfuIf/UWpczUJ7MZZ5tlxytgJXyp0w4PGkkeLiuIdZw==}
'@tanstack/query-core@5.51.1':
resolution: {integrity: sha512-fJBMQMpo8/KSsWW5ratJR5+IFr7YNJ3K2kfP9l5XObYHsgfVy1w3FJUWU4FT2fj7+JMaEg33zOcNDBo0LMwHnw==}
'@tanstack/query-core@5.51.9':
resolution: {integrity: sha512-HsAwaY5J19MD18ykZDS3aVVh+bAt0i7m6uQlFC2b77DLV9djo+xEN7MWQAQQTR8IM+7r/zbozTQ7P0xr0bHuew==}
'@tanstack/react-query@5.51.1':
resolution: {integrity: sha512-s47HKFnQ4HOJAHoIiXcpna/roMMPZJPy6fJ6p4ZNVn8+/onlLBEDd1+xc8OnDuwgvecqkZD7Z2mnSRbcWefrKw==}
'@tanstack/react-query@5.51.11':
resolution: {integrity: sha512-4Kq2x0XpDlpvSnaLG+8pHNH60zEc3mBvb3B2tOMDjcPCi/o+Du3p/9qpPLwJOTliVxxPJAP27fuIhLrsRdCr7A==}
peerDependencies:
react: ^18.0.0
@ -665,8 +665,8 @@ packages:
'@types/lodash@4.17.7':
resolution: {integrity: sha512-8wTvZawATi/lsmNu10/j2hk1KEP0IvjubqPE3cu1Xz7xfXXt5oCq3SNUz4fMIP4XGF9Ky+Ue2tBA3hcS7LSBlA==}
'@types/node@20.14.10':
resolution: {integrity: sha512-MdiXf+nDuMvY0gJKxyfZ7/6UFsETO7mGKF54MVD/ekJS6HdFtpZFBgrh6Pseu64XTb2MLyFPlbW6hj8HYRQNOQ==}
'@types/node@20.14.11':
resolution: {integrity: sha512-kprQpL8MMeszbz6ojB5/tU8PLN4kesnN8Gjzw349rDlNgsSzg90lAVj3llK99Dh7JON+t9AuscPPFW6mPbTnSA==}
'@types/prismjs@1.26.4':
resolution: {integrity: sha512-rlAnzkW2sZOjbqZ743IHUhFcvzaGbqijwOu8QZnZCjfQzBqFE3s4lOTJEsxikImav9uzz/42I+O7YUs1mWgMlg==}
@ -932,9 +932,6 @@ packages:
resolution: {integrity: sha512-Ewyx0c9PmpcsByhSW4r+9zDU7sGjFc86qf/kKtuSCRdhfbk0SNLLkaT5qvcHnRGgc5NP/ly/y+qkXkqONX54CQ==}
engines: {node: '>= 0.4'}
array.prototype.toreversed@1.1.2:
resolution: {integrity: sha512-wwDCoT4Ck4Cz7sLtgUmzR5UV3YF5mFHUlbChCzZBQZ+0m2cl/DH3tKgvphv1nKgFsJ48oCSg6p91q2Vm0I/ZMA==}
array.prototype.tosorted@1.1.4:
resolution: {integrity: sha512-p6Fx8B7b7ZhL/gmUsAy0D15WhvDccw3mnGNbZpi3pmeJdxtWsj2jEaI4Y6oo3XiHfzuSgPwKc04MYt6KgvC/wA==}
engines: {node: '>= 0.4'}
@ -1014,8 +1011,8 @@ packages:
camelize@1.0.1:
resolution: {integrity: sha512-dU+Tx2fsypxTgtLoE36npi3UqcjSSMNYfkqgmoEhtZrraP5VWq0K7FkWVTYa8eMPtnU/G2txVsfdCJTn9uzpuQ==}
caniuse-lite@1.0.30001642:
resolution: {integrity: sha512-3XQ0DoRgLijXJErLSl+bLnJ+Et4KqV1PY6JJBGAFlsNsz31zeAIncyeZfLCabHK/jtSh+671RM9YMldxjUPZtA==}
caniuse-lite@1.0.30001643:
resolution: {integrity: sha512-ERgWGNleEilSrHM6iUz/zJNSQTP8Mr21wDWpdgvRwcTXGAq6jMtOUPP4dqFPTdKqZ2wKTdtB+uucZ3MRpAUSmg==}
castable-video@1.0.10:
resolution: {integrity: sha512-tJgUv+8/zE191y8EKojvB0eKIyKA9obIttd6Wpdm6x2qBmuwZ7wDgzVCSmf5cN2v9jBiuu0s7O5poz8a8cFX/w==}
@ -1122,8 +1119,8 @@ packages:
resolution: {integrity: sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==}
engines: {node: '>=0.11'}
dayjs@1.11.11:
resolution: {integrity: sha512-okzr3f11N6WuqYtZSvm+F776mB41wRZMhKP+hc34YdW+KmtYYK9iqvHSwo2k9FEH3fhGXvOPV6yz2IcSrfRUDg==}
dayjs@1.11.12:
resolution: {integrity: sha512-Rt2g+nTbLlDWZTwwrIXjy9MeiZmSDI375FvZs72ngxx8PDC6YXOeR3q5LAuPzjZQxhiWdRKac7RKV+YyQYfYIg==}
debug@3.2.7:
resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==}
@ -1185,8 +1182,8 @@ packages:
resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==}
engines: {node: '>=6.0.0'}
electron-to-chromium@1.4.828:
resolution: {integrity: sha512-QOIJiWpQJDHAVO4P58pwb133Cwee0nbvy/MV1CwzZVGpkH1RX33N3vsaWRCpR6bF63AAq366neZrRTu7Qlsbbw==}
electron-to-chromium@1.4.832:
resolution: {integrity: sha512-cTen3SB0H2SGU7x467NRe1eVcQgcuS6jckKfWJHia2eo0cHIGOqHoAxevIYZD4eRHcWjkvFzo93bi3vJ9W+1lA==}
emoji-regex@9.2.2:
resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==}
@ -1306,11 +1303,11 @@ packages:
peerDependencies:
eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0
eslint-plugin-react@7.34.4:
resolution: {integrity: sha512-Np+jo9bUwJNxCsT12pXtrGhJgT3T44T1sHhn1Ssr42XFn8TES0267wPGo5nNrMHi8qkyimDAX2BUmkf9pSaVzA==}
eslint-plugin-react@7.35.0:
resolution: {integrity: sha512-v501SSMOWv8gerHkk+IIQBkcGRGrO2nfybfj5pLxuJNFTPxxA3PSryhXTK+9pNbtkggheDdsC0E9Q8CuPk6JKA==}
engines: {node: '>=4'}
peerDependencies:
eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8
eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7
eslint-scope@7.2.2:
resolution: {integrity: sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==}
@ -1439,8 +1436,8 @@ packages:
resolution: {integrity: sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==}
engines: {node: '>= 0.4'}
get-tsconfig@4.7.5:
resolution: {integrity: sha512-ZCuZCnlqNzjb4QprAzXKdpp/gh6KTxSJuw3IBsPnV/7fV4NxC9ckB+vPTt8w7fJA0TaSD7c55BR47JD6MEDyDw==}
get-tsconfig@4.7.6:
resolution: {integrity: sha512-ZAqrLlu18NbDdRaHq+AKXzAmqIUPswPWKUchfytdAjiRFnCe5ojG2bstg6mRiZabkKfCoL/e98pbBELIV/YCeA==}
github-from-package@0.0.0:
resolution: {integrity: sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==}
@ -1536,8 +1533,8 @@ packages:
resolution: {integrity: sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==}
engines: {node: '>= 4'}
immutable@4.3.6:
resolution: {integrity: sha512-Ju0+lEMyzMVZarkTn/gqRpdqd5dOPaz1mCZ0SH3JV6iFw81PldE/PEB1hWVEA288HPt4WXW8O7AWxB10M+03QQ==}
immutable@4.3.7:
resolution: {integrity: sha512-1hqclzwYwjRDFLjcFxOM5AYkkG0rpFPpr1RLPMEuGczoS7YA8gLhy8SWXYRAA/XwfEHpfo3cw5JGioS32fnMRw==}
import-fresh@3.3.0:
resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==}
@ -1591,8 +1588,8 @@ packages:
resolution: {integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==}
engines: {node: '>= 0.4'}
is-core-module@2.14.0:
resolution: {integrity: sha512-a5dFJih5ZLYlRtDc0dZWP7RiKr6xIKzmn/oAYCDvdLThadVgyJwlaoQPmRtMSpz+rk0OGAgIu+TcM9HUF0fk1A==}
is-core-module@2.15.0:
resolution: {integrity: sha512-Dd+Lb2/zvk9SKy1TGCt1wFJFo/MWBPMX5x7KcvLajWTGuomczdQX61PvY5yK6SVACwpoexWo81IfFyoKY2QnTA==}
engines: {node: '>= 0.4'}
is-data-view@1.0.1:
@ -1815,8 +1812,8 @@ packages:
ms@2.1.3:
resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
multiformats@13.1.3:
resolution: {integrity: sha512-CZPi9lFZCM/+7oRolWYsvalsyWQGFo+GpdaTmjxXXomC+nP/W1Rnxb9sUgjvmNmRZ5bOPqRAl4nuK+Ydw/4tGw==}
multiformats@13.2.0:
resolution: {integrity: sha512-ztpoAm2qHcdMR/RuOB0IhdYV6MocCLU2bp2Hcpwi2UHE5CT2PcCMyvwhSHMCS0gdApb3t6YzI/uQ5tmN7y/DRA==}
mux-embed@5.2.1:
resolution: {integrity: sha512-NukHw91xeEVDBeXVDBpi2BvXNix7gSuvdtyvOph5yR/ROn1hHbTlcYWoKQyCyJX9frsF00UROEul+S8wPzU3aQ==}
@ -1876,8 +1873,8 @@ packages:
node-addon-api@5.1.0:
resolution: {integrity: sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==}
node-releases@2.0.14:
resolution: {integrity: sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==}
node-releases@2.0.18:
resolution: {integrity: sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==}
normalize-path@3.0.0:
resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==}
@ -2152,8 +2149,8 @@ packages:
resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==}
hasBin: true
semver@7.6.2:
resolution: {integrity: sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==}
semver@7.6.3:
resolution: {integrity: sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==}
engines: {node: '>=10'}
hasBin: true
@ -2635,25 +2632,25 @@ snapshots:
'@eslint/js@8.57.0': {}
'@fortawesome/fontawesome-common-types@6.5.2': {}
'@fortawesome/fontawesome-common-types@6.6.0': {}
'@fortawesome/fontawesome-free@6.5.2': {}
'@fortawesome/fontawesome-free@6.6.0': {}
'@fortawesome/fontawesome-svg-core@6.5.2':
'@fortawesome/fontawesome-svg-core@6.6.0':
dependencies:
'@fortawesome/fontawesome-common-types': 6.5.2
'@fortawesome/fontawesome-common-types': 6.6.0
'@fortawesome/free-brands-svg-icons@6.5.2':
'@fortawesome/free-brands-svg-icons@6.6.0':
dependencies:
'@fortawesome/fontawesome-common-types': 6.5.2
'@fortawesome/fontawesome-common-types': 6.6.0
'@fortawesome/free-solid-svg-icons@6.5.2':
'@fortawesome/free-solid-svg-icons@6.6.0':
dependencies:
'@fortawesome/fontawesome-common-types': 6.5.2
'@fortawesome/fontawesome-common-types': 6.6.0
'@fortawesome/react-fontawesome@0.2.2(@fortawesome/fontawesome-svg-core@6.5.2)(react@18.3.1)':
'@fortawesome/react-fontawesome@0.2.2(@fortawesome/fontawesome-svg-core@6.6.0)(react@18.3.1)':
dependencies:
'@fortawesome/fontawesome-svg-core': 6.5.2
'@fortawesome/fontawesome-svg-core': 6.6.0
prop-types: 15.8.1
react: 18.3.1
@ -2870,11 +2867,11 @@ snapshots:
dependencies:
tslib: 2.6.3
'@tanstack/query-core@5.51.1': {}
'@tanstack/query-core@5.51.9': {}
'@tanstack/react-query@5.51.1(react@18.3.1)':
'@tanstack/react-query@5.51.11(react@18.3.1)':
dependencies:
'@tanstack/query-core': 5.51.1
'@tanstack/query-core': 5.51.9
react: 18.3.1
'@tanstack/react-table@8.19.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1)':
@ -2891,7 +2888,7 @@ snapshots:
'@types/lodash@4.17.7': {}
'@types/node@20.14.10':
'@types/node@20.14.11':
dependencies:
undici-types: 5.26.5
@ -2940,7 +2937,7 @@ snapshots:
globby: 11.1.0
is-glob: 4.0.3
minimatch: 9.0.3
semver: 7.6.2
semver: 7.6.3
ts-api-utils: 1.3.0(typescript@5.3.3)
optionalDependencies:
typescript: 5.3.3
@ -3257,13 +3254,6 @@ snapshots:
es-abstract: 1.23.3
es-shim-unscopables: 1.0.2
array.prototype.toreversed@1.1.2:
dependencies:
call-bind: 1.0.7
define-properties: 1.2.1
es-abstract: 1.23.3
es-shim-unscopables: 1.0.2
array.prototype.tosorted@1.1.4:
dependencies:
call-bind: 1.0.7
@ -3336,9 +3326,9 @@ snapshots:
browserslist@4.23.2:
dependencies:
caniuse-lite: 1.0.30001642
electron-to-chromium: 1.4.828
node-releases: 2.0.14
caniuse-lite: 1.0.30001643
electron-to-chromium: 1.4.832
node-releases: 2.0.18
update-browserslist-db: 1.1.0(browserslist@4.23.2)
buffer@5.7.1:
@ -3364,7 +3354,7 @@ snapshots:
camelize@1.0.1: {}
caniuse-lite@1.0.30001642: {}
caniuse-lite@1.0.30001643: {}
castable-video@1.0.10:
dependencies:
@ -3479,7 +3469,7 @@ snapshots:
dependencies:
'@babel/runtime': 7.24.8
dayjs@1.11.11: {}
dayjs@1.11.12: {}
debug@3.2.7:
dependencies:
@ -3548,7 +3538,7 @@ snapshots:
dependencies:
esutils: 2.0.3
electron-to-chromium@1.4.828: {}
electron-to-chromium@1.4.832: {}
emoji-regex@9.2.2: {}
@ -3681,7 +3671,7 @@ snapshots:
eslint-import-resolver-typescript: 3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0)
eslint-plugin-import: 2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0)
eslint-plugin-jsx-a11y: 6.9.0(eslint@8.57.0)
eslint-plugin-react: 7.34.4(eslint@8.57.0)
eslint-plugin-react: 7.35.0(eslint@8.57.0)
eslint-plugin-react-hooks: 4.6.2(eslint@8.57.0)
optionalDependencies:
typescript: 5.3.3
@ -3692,7 +3682,7 @@ snapshots:
eslint-import-resolver-node@0.3.9:
dependencies:
debug: 3.2.7
is-core-module: 2.14.0
is-core-module: 2.15.0
resolve: 1.22.8
transitivePeerDependencies:
- supports-color
@ -3705,8 +3695,8 @@ snapshots:
eslint-module-utils: 2.8.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0)
eslint-plugin-import: 2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0)
fast-glob: 3.3.2
get-tsconfig: 4.7.5
is-core-module: 2.14.0
get-tsconfig: 4.7.6
is-core-module: 2.15.0
is-glob: 4.0.3
transitivePeerDependencies:
- '@typescript-eslint/parser'
@ -3737,7 +3727,7 @@ snapshots:
eslint-import-resolver-node: 0.3.9
eslint-module-utils: 2.8.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0)
hasown: 2.0.2
is-core-module: 2.14.0
is-core-module: 2.15.0
is-glob: 4.0.3
minimatch: 3.1.2
object.fromentries: 2.0.8
@ -3776,12 +3766,11 @@ snapshots:
dependencies:
eslint: 8.57.0
eslint-plugin-react@7.34.4(eslint@8.57.0):
eslint-plugin-react@7.35.0(eslint@8.57.0):
dependencies:
array-includes: 3.1.8
array.prototype.findlast: 1.2.5
array.prototype.flatmap: 1.3.2
array.prototype.toreversed: 1.1.2
array.prototype.tosorted: 1.1.4
doctrine: 2.1.0
es-iterator-helpers: 1.0.19
@ -3960,7 +3949,7 @@ snapshots:
es-errors: 1.3.0
get-intrinsic: 1.2.4
get-tsconfig@4.7.5:
get-tsconfig@4.7.6:
dependencies:
resolve-pkg-maps: 1.0.0
@ -4061,7 +4050,7 @@ snapshots:
ignore@5.3.1: {}
immutable@4.3.6: {}
immutable@4.3.7: {}
import-fresh@3.3.0:
dependencies:
@ -4116,7 +4105,7 @@ snapshots:
is-callable@1.2.7: {}
is-core-module@2.14.0:
is-core-module@2.15.0:
dependencies:
hasown: 2.0.2
@ -4310,7 +4299,7 @@ snapshots:
ms@2.1.3: {}
multiformats@13.1.3: {}
multiformats@13.2.0: {}
mux-embed@5.2.1: {}
@ -4335,7 +4324,7 @@ snapshots:
'@next/env': 14.0.4
'@swc/helpers': 0.5.2
busboy: 1.6.0
caniuse-lite: 1.0.30001642
caniuse-lite: 1.0.30001643
graceful-fs: 4.2.11
postcss: 8.4.31
react: 18.3.1
@ -4367,11 +4356,11 @@ snapshots:
node-abi@3.65.0:
dependencies:
semver: 7.6.2
semver: 7.6.3
node-addon-api@5.1.0: {}
node-releases@2.0.14: {}
node-releases@2.0.18: {}
normalize-path@3.0.0: {}
@ -4612,13 +4601,13 @@ snapshots:
resolve@1.22.8:
dependencies:
is-core-module: 2.14.0
is-core-module: 2.15.0
path-parse: 1.0.7
supports-preserve-symlinks-flag: 1.0.0
resolve@2.0.0-next.5:
dependencies:
is-core-module: 2.14.0
is-core-module: 2.15.0
path-parse: 1.0.7
supports-preserve-symlinks-flag: 1.0.0
@ -4652,7 +4641,7 @@ snapshots:
sass@1.77.8:
dependencies:
chokidar: 3.6.0
immutable: 4.3.6
immutable: 4.3.7
source-map-js: 1.2.0
sax@1.4.1: {}
@ -4668,7 +4657,7 @@ snapshots:
semver@6.3.1: {}
semver@7.6.2: {}
semver@7.6.3: {}
set-function-length@1.2.2:
dependencies:
@ -4694,7 +4683,7 @@ snapshots:
detect-libc: 2.0.3
node-addon-api: 5.1.0
prebuild-install: 7.1.2
semver: 7.6.2
semver: 7.6.3
simple-get: 4.0.1
tar-fs: 2.1.1
tunnel-agent: 0.6.0
@ -4703,7 +4692,7 @@ snapshots:
dependencies:
color: 4.2.3
detect-libc: 2.0.3
semver: 7.6.2
semver: 7.6.3
optionalDependencies:
'@img/sharp-darwin-arm64': 0.33.4
'@img/sharp-darwin-x64': 0.33.4

View File

@ -2,7 +2,6 @@
"name": "@futureporn/temporal-workflows",
"version": "1.0.0",
"private": true,
"type": "module",
"exports": {
"./workflows": "./src/workflows.ts",
"./activities": "./src/activities.ts"

View File

@ -28,10 +28,10 @@ importers:
version: link:../utils
'@temporalio/activity':
specifier: ^1.10.1
version: 1.10.1
version: 1.10.2
'@temporalio/workflow':
specifier: ^1.10.1
version: 1.10.1
version: 1.10.2
'@types/qs':
specifier: ^6.9.15
version: 6.9.15
@ -62,7 +62,7 @@ importers:
version: 3.1.4
ts-node:
specifier: ^10.9.2
version: 10.9.2(@types/node@20.14.10)(typescript@5.5.3)
version: 10.9.2(@types/node@20.14.11)(typescript@5.5.3)
typescript:
specifier: ^5.5.3
version: 5.5.3
@ -159,17 +159,17 @@ packages:
'@protobufjs/utf8@1.1.0':
resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==}
'@temporalio/activity@1.10.1':
resolution: {integrity: sha512-ZCUOq3pzIYuWeGCtY5cAbtjUqFr8qg6oQJ64gHqUlstk9mweeOtMhYUyBUNBeb+MlYn+YhtxyCaUc1HQlVXkHg==}
'@temporalio/activity@1.10.2':
resolution: {integrity: sha512-dKpG2igwEa433wBK0ps4J2hTZo7DeYJlM45tHBbHbrCv2I5PKm5psPDdsrkn6OOZaM4nH23M3EK+M7FRLPo2rQ==}
'@temporalio/common@1.10.1':
resolution: {integrity: sha512-0l2XS2+4NW8y7zsCAMixAK0UEIEQLwx/f32j67VGT/DjJ1ri63W5ZTbZU7DoAB3yHjlfymblLQXgK46SzHgP3Q==}
'@temporalio/common@1.10.2':
resolution: {integrity: sha512-uEZVLwfc/AhbOIJfrnqu+FsDjWZ+ZGw8mTDn/eFYobJcgLq04TLQ12RZXEO0ItjpCKWD8QEIu3jsvvr3c/lNow==}
'@temporalio/proto@1.10.1':
resolution: {integrity: sha512-kKe/B6yZU1iMmTQP161GLGHmWIhwhyn79rOFaurZ87kXswLmmss1TGO37H0GVqR1k4K8S8fewLPbayiYf+oksw==}
'@temporalio/proto@1.10.2':
resolution: {integrity: sha512-D/rWt2DWjXdTNmQk/5bxv72vhUFMcq78h2ek4m1TVLBOmWjAlf7NWYweRDZyXdq9LJ9Z7Cr4Yh8LhSXyv4Pe0Q==}
'@temporalio/workflow@1.10.1':
resolution: {integrity: sha512-EaZEXxSjqcDI1PLo1RKU5eMAXzal98/bWw1pC71fN55AhW1YsxiS8upJQESp20Y+kkMmxJfBrpvinbFtMDgBvg==}
'@temporalio/workflow@1.10.2':
resolution: {integrity: sha512-awPfqsZro43o0x17loNO3uJnlaRrb8Ht1tfqSQb+Az/yuCI/7XR+f8Js90az42qZ2Nv+JWjHAvhveLjN9f2QUg==}
'@tsconfig/node10@1.0.11':
resolution: {integrity: sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==}
@ -186,8 +186,8 @@ packages:
'@types/json-schema@7.0.15':
resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==}
'@types/node@20.14.10':
resolution: {integrity: sha512-MdiXf+nDuMvY0gJKxyfZ7/6UFsETO7mGKF54MVD/ekJS6HdFtpZFBgrh6Pseu64XTb2MLyFPlbW6hj8HYRQNOQ==}
'@types/node@20.14.11':
resolution: {integrity: sha512-kprQpL8MMeszbz6ojB5/tU8PLN4kesnN8Gjzw349rDlNgsSzg90lAVj3llK99Dh7JON+t9AuscPPFW6mPbTnSA==}
'@types/qs@6.9.15':
resolution: {integrity: sha512-uXHQKES6DQKKCLh441Xv/dwxOq1TVS3JPUMlEqoEglvlhR6Mxnlew/Xq/LRVHpLyk7iK3zODe1qYHIMltO7XGg==}
@ -841,8 +841,8 @@ packages:
run-parallel@1.2.0:
resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==}
semver@7.6.2:
resolution: {integrity: sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==}
semver@7.6.3:
resolution: {integrity: sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==}
engines: {node: '>=10'}
hasBin: true
@ -1082,27 +1082,27 @@ snapshots:
'@protobufjs/utf8@1.1.0': {}
'@temporalio/activity@1.10.1':
'@temporalio/activity@1.10.2':
dependencies:
'@temporalio/common': 1.10.1
'@temporalio/common': 1.10.2
abort-controller: 3.0.0
'@temporalio/common@1.10.1':
'@temporalio/common@1.10.2':
dependencies:
'@temporalio/proto': 1.10.1
'@temporalio/proto': 1.10.2
long: 5.2.3
ms: 3.0.0-canary.1
proto3-json-serializer: 2.0.2
'@temporalio/proto@1.10.1':
'@temporalio/proto@1.10.2':
dependencies:
long: 5.2.3
protobufjs: 7.3.2
'@temporalio/workflow@1.10.1':
'@temporalio/workflow@1.10.2':
dependencies:
'@temporalio/common': 1.10.1
'@temporalio/proto': 1.10.1
'@temporalio/common': 1.10.2
'@temporalio/proto': 1.10.2
'@tsconfig/node10@1.0.11': {}
@ -1114,7 +1114,7 @@ snapshots:
'@types/json-schema@7.0.15': {}
'@types/node@20.14.10':
'@types/node@20.14.11':
dependencies:
undici-types: 5.26.5
@ -1134,7 +1134,7 @@ snapshots:
graphemer: 1.4.0
ignore: 5.3.1
natural-compare-lite: 1.4.0
semver: 7.6.2
semver: 7.6.3
tsutils: 3.21.0(typescript@5.5.3)
optionalDependencies:
typescript: 5.5.3
@ -1186,7 +1186,7 @@ snapshots:
debug: 4.3.5(supports-color@5.5.0)
globby: 11.1.0
is-glob: 4.0.3
semver: 7.6.2
semver: 7.6.3
tsutils: 3.21.0(typescript@5.5.3)
optionalDependencies:
typescript: 5.5.3
@ -1201,7 +1201,7 @@ snapshots:
globby: 11.1.0
is-glob: 4.0.3
minimatch: 9.0.3
semver: 7.6.2
semver: 7.6.3
ts-api-utils: 1.3.0(typescript@5.5.3)
optionalDependencies:
typescript: 5.5.3
@ -1218,7 +1218,7 @@ snapshots:
'@typescript-eslint/typescript-estree': 5.62.0(typescript@5.5.3)
eslint: 7.32.0
eslint-scope: 5.1.1
semver: 7.6.2
semver: 7.6.3
transitivePeerDependencies:
- supports-color
- typescript
@ -1232,7 +1232,7 @@ snapshots:
'@typescript-eslint/types': 6.21.0
'@typescript-eslint/typescript-estree': 6.21.0(typescript@5.5.3)
eslint: 7.32.0
semver: 7.6.2
semver: 7.6.3
transitivePeerDependencies:
- supports-color
- typescript
@ -1484,7 +1484,7 @@ snapshots:
optionator: 0.9.4
progress: 2.0.3
regexpp: 3.2.0
semver: 7.6.2
semver: 7.6.3
strip-ansi: 6.0.1
strip-json-comments: 3.1.1
table: 6.8.2
@ -1716,7 +1716,7 @@ snapshots:
ignore-by-default: 1.0.1
minimatch: 3.1.2
pstree.remy: 1.1.8
semver: 7.6.2
semver: 7.6.3
simple-update-notifier: 2.0.0
supports-color: 5.5.0
touch: 3.1.1
@ -1773,7 +1773,7 @@ snapshots:
'@protobufjs/path': 1.1.2
'@protobufjs/pool': 1.1.0
'@protobufjs/utf8': 1.1.0
'@types/node': 20.14.10
'@types/node': 20.14.11
long: 5.2.3
pstree.remy@1.1.8: {}
@ -1806,7 +1806,7 @@ snapshots:
dependencies:
queue-microtask: 1.2.3
semver@7.6.2: {}
semver@7.6.3: {}
set-function-length@1.2.2:
dependencies:
@ -1832,7 +1832,7 @@ snapshots:
simple-update-notifier@2.0.0:
dependencies:
semver: 7.6.2
semver: 7.6.3
slash@3.0.0: {}
@ -1884,14 +1884,14 @@ snapshots:
dependencies:
typescript: 5.5.3
ts-node@10.9.2(@types/node@20.14.10)(typescript@5.5.3):
ts-node@10.9.2(@types/node@20.14.11)(typescript@5.5.3):
dependencies:
'@cspotcode/source-map-support': 0.8.1
'@tsconfig/node10': 1.0.11
'@tsconfig/node12': 1.0.11
'@tsconfig/node14': 1.0.3
'@tsconfig/node16': 1.0.4
'@types/node': 20.14.10
'@types/node': 20.14.11
acorn: 8.12.1
acorn-walk: 8.3.3
arg: 4.1.3

View File

@ -0,0 +1,334 @@
import { type NotificationData } from '@futureporn/types'
import { type Helpers } from 'graphile-worker'
import {
type IStreamResponse,
type IStreamsResponse,
type IPlatformNotificationResponse,
type IVtubersResponse,
type IVtuberResponse,
type IVtuber,
} from '@futureporn/types'
import { subMinutes, addMinutes } from 'date-fns'
import qs from 'qs'
import { getProminentColor } from '@futureporn/image'
import { getImage } from '@futureporn/scout/vtuber.js'
import { fpSlugify } from '@futureporn/utils'
import { uploadFile } from '@futureporn/storage/s3.js'
export async function upsertStream({
date,
vtuberId,
platform,
pNotifId
}: {
date: string,
vtuberId: number,
platform: string,
pNotifId: number
}, helpers: Helpers): Promise<number> {
if (!date) throw new Error(`upsertStream requires date in the arg object, but it was undefined`);
if (!vtuberId) throw new Error(`upsertStream requires vtuberId in the arg object, but it was undefined`);
if (!platform) throw new Error(`upsertStream requires platform in the arg object, but it was undefined`);
if (!pNotifId) throw new Error(`upsertStream requires pNotifId in the arg object, but it was undefined`);
let streamId
// # Step 3.
// Finally we find or create the stream record
// The stream may already be in the db (the streamer is multi-platform streaming), so we look for that record.
// This gets a bit tricky. How do we determine one stream from another?
// For now, the rule is 30 minutes of separation.
// Anything <=30m is interpreted as the same stream. Anything >30m is interpreted as a different stream.
// If the stream is not in the db, we create the stream record
const dateSinceRange = subMinutes(new Date(date), 30)
const dateUntilRange = addMinutes(new Date(date), 30)
helpers.logger.info(`Find a stream within + or - 30 mins of the notif date=${new Date(date).toISOString()}. dateSinceRange=${dateSinceRange.toISOString()}, dateUntilRange=${dateUntilRange.toISOString()}`)
const findStreamQueryString = qs.stringify({
populate: 'platform-notifications',
filters: {
date: {
$gte: dateSinceRange,
$lte: dateUntilRange
},
vtuber: {
id: {
'$eq': vtuberId
}
}
}
}, { encode: false })
helpers.logger.info('>> findStream')
const findStreamRes = await fetch(`${process.env.STRAPI_URL}/api/streams?${findStreamQueryString}`, {
method: 'GET',
headers: {
'authorization': `Bearer ${process.env.SCOUT_STRAPI_API_KEY}`,
'Content-Type': 'application/json'
}
})
const findStreamData = await findStreamRes.json() as IStreamsResponse
if (findStreamData?.data && findStreamData.data.length > 0) {
helpers.logger.info('>> we found a findStreamData json. (there is an existing stream for this e-mail/notification)')
helpers.logger.info(JSON.stringify(findStreamData, null, 2))
const stream = findStreamData.data[0]
if (!stream) throw new Error('stream was undefined');
streamId = stream.id
// Before we're done here, we need to do something extra. We need to populate isChaturbateStream and/or isFanslyStream.
// We know which of these booleans to set based on the stream's related platformNotifications
// We go through each pNotif and look at it's platform
let isFanslyStream = false
let isChaturbateStream = false
if (stream.attributes.platformNotifications) {
for (const pn of stream.attributes.platformNotifications) {
if (pn.attributes.platform === 'fansly') {
isFanslyStream = true
} else if (pn.attributes.platform === 'chaturbate') {
isChaturbateStream = true
}
}
}
helpers.logger.info(`>>> updating stream ${streamId}. isFanslyStream=${isFanslyStream}, isChaturbateStream=${isChaturbateStream}`)
const updateStreamRes = await fetch(`${process.env.STRAPI_URL}/api/streams/${streamId}`, {
method: 'PUT',
headers: {
'authorization': `Bearer ${process.env.SCOUT_STRAPI_API_KEY}`,
'content-type': 'application/json'
},
body: JSON.stringify({
data: {
isFanslyStream: isFanslyStream,
isChaturbateStream: isChaturbateStream,
platformNotifications: [
pNotifId
]
}
})
})
const updateStreamJson = await updateStreamRes.json() as IStreamResponse
if (updateStreamJson?.error) throw new Error(JSON.stringify(updateStreamJson, null, 2));
helpers.logger.info(`>> assuming a successful update to the stream record. response as follows.`)
helpers.logger.info(JSON.stringify(updateStreamJson, null, 2))
}
if (!streamId) {
helpers.logger.info('>> did not find a streamId, so we go ahead and create a stream record in the db.')
const createStreamPayload = {
data: {
isFanslyStream: (platform === 'fansly') ? true : false,
isChaturbateStream: (platform === 'chaturbate') ? true : false,
archiveStatus: 'missing',
date: date,
date2: date,
date_str: date,
vtuber: vtuberId,
platformNotifications: [
pNotifId
]
}
}
helpers.logger.debug('>> createStreamPayload as follows')
helpers.logger.debug(JSON.stringify(createStreamPayload, null, 2))
const createStreamRes = await fetch(`${process.env.STRAPI_URL}/api/streams`, {
method: 'POST',
headers: {
'authorization': `Bearer ${process.env.SCOUT_STRAPI_API_KEY}`,
"Content-Type": "application/json"
},
body: JSON.stringify(createStreamPayload)
})
const createStreamJson = await createStreamRes.json() as IStreamResponse
helpers.logger.debug('>> we got the createStreamJson')
helpers.logger.debug(JSON.stringify(createStreamJson, null, 2))
if (createStreamJson.error) {
console.error(JSON.stringify(createStreamJson.error, null, 2))
throw new Error('Failed to create stream in DB due to an error. (see above)')
}
streamId = createStreamJson.data.id
}
if (!streamId) throw new Error('failed to get streamId')
return streamId
}
export async function upsertPlatformNotification({ source, date, platform, vtuberId }: { source: string, date: string, platform: string, vtuberId: number }, helpers: Helpers): Promise<number> {
helpers.logger.info('hello from upsertPlatformNotification', { source, date, platform, vtuberId });
if (!source) throw new Error(`upsertPlatformNotification requires source arg, but it was undefined`);
if (!date) throw new Error(`upsertPlatformNotification requires date arg, but it was undefined`);
if (!platform) throw new Error(`upsertPlatformNotification requires platform arg, but it was undefined`);
if (!vtuberId) throw new Error(`upsertPlatformNotification requires vtuberId arg, but it was undefined`);
let pNotifId
// # Step 2.
// Next we create the platform-notification record.
// This probably doesn't already exist, so we don't check for a pre-existing platform-notification.
const pNotifPayload = {
data: {
source: source,
date: date,
date2: date,
platform: platform,
vtuber: vtuberId,
}
}
helpers.logger.debug('pNotifPayload as follows')
helpers.logger.debug(JSON.stringify(pNotifPayload, null, 2))
const pNotifCreateRes = await fetch(`${process.env.STRAPI_URL}/api/platform-notifications`, {
method: 'POST',
headers: {
'authorization': `Bearer ${process.env.SCOUT_STRAPI_API_KEY}`,
'Content-Type': 'application/json'
},
body: JSON.stringify(pNotifPayload)
})
const pNotifData = await pNotifCreateRes.json() as IPlatformNotificationResponse
if (pNotifData.error) {
helpers.logger.error('>> we failed to create platform-notification, there was an error in the response')
helpers.logger.error(JSON.stringify(pNotifData.error, null, 2))
throw new Error(pNotifData.error)
}
helpers.logger.debug(`>> pNotifData (json response) is as follows`)
helpers.logger.debug(JSON.stringify(pNotifData, null, 2))
if (!pNotifData.data?.id) throw new Error('failed to created pNotifData! The response was missing an id');
pNotifId = pNotifData.data.id
if (!pNotifId) throw new Error('failed to get Platform Notification ID');
return pNotifId
}
export async function upsertVtuber({ platform, userId, url, channel }: { platform: string, userId: string | null, url: string, channel: string }, helpers: Helpers): Promise<number> {
let vtuberId
helpers.logger.debug('>> # Step 1, upsertVtuber')
// # Step 1.
// First we find or create the vtuber
// The vtuber may already be in the db, so we look for that record. All we need is the Vtuber ID.
// If the vtuber is not in the db, we create the vtuber record.
// GET /api/:pluralApiId?filters[field][operator]=value
const findVtubersFilters = (() => {
if (platform === 'chaturbate') {
return { chaturbate: { $eq: url } }
} else if (platform === 'fansly') {
if (!userId) throw new Error('Fansly userId was undefined, but it is required.')
return { fanslyId: { $eq: userId } }
}
})()
helpers.logger.debug('>>>>> the following is findVtubersFilters.')
helpers.logger.debug(JSON.stringify(findVtubersFilters, null, 2))
const findVtubersQueryString = qs.stringify({
filters: findVtubersFilters
}, { encode: false })
helpers.logger.debug(`>>>>> platform=${platform}, url=${url}, userId=${userId}`)
helpers.logger.debug('>> findVtuber')
const findVtuberRes = await fetch(`${process.env.STRAPI_URL}/api/vtubers?${findVtubersQueryString}`, {
method: 'GET',
headers: {
'content-type': 'application/json'
}
})
const findVtuberJson = await findVtuberRes.json() as IVtubersResponse
helpers.logger.debug('>> here is the vtuber json')
helpers.logger.debug(JSON.stringify(findVtuberJson, null, 2))
if (findVtuberJson?.data && findVtuberJson.data.length > 0) {
helpers.logger.debug('>> a vtuber was FOUND')
if (findVtuberJson.data.length > 1) throw new Error('There were more than one vtuber matches in the response. There must only be one.');
const vtuber = findVtuberJson.data[0]
if (!vtuber) throw new Error('vtuber did not have an id. vtuber must have an id.')
helpers.logger.debug('here is the findVtuberJson (as follows)')
helpers.logger.debug(JSON.stringify(findVtuberJson, null, 2))
helpers.logger.debug(`the matching vtuber has ID=${vtuber.id} (${vtuber.attributes.displayName})`)
}
if (!vtuberId) {
helpers.logger.info('>> vtuberId was not found so we create')
/**
* We are creating a vtuber record.
* We need a few things.
* * image URL
* * themeColor
*
* To get an image, we have to do a few things.
* * [x] download image from platform
* * [x] get themeColor from image
* * [x] upload image to b2
* * [x] get B2 cdn link to image
*
* To get themeColor, we need the image locally where we can then run
*/
// download image from platform
// vtuber.getImage expects a vtuber object, which we don't have yet, so we create a dummy one
const dummyVtuber: IVtuber = {
id: 69,
attributes: {
slug: fpSlugify(channel),
displayName: 'example',
vods: [],
description1: ' ',
image: ' ',
themeColor: ' ',
fanslyId: (platform === 'fansly') ? (userId ? userId : undefined) : undefined
}
}
const imageFile = await getImage(dummyVtuber)
// get themeColor from image
const themeColor = await getProminentColor(imageFile)
// upload image to b2
const b2FileData = await uploadFile(imageFile)
// get b2 cdn link to image
const imageCdnLink = `${process.env.CDN_BUCKET_URL}/${b2FileData.Key}`
helpers.logger.info(`>>> createVtuberRes here we go 3-2-1, POST!`)
const createVtuberRes = await fetch(`${process.env.STRAPI_URL}/api/vtubers`, {
method: 'POST',
headers: {
'authorization': `Bearer ${process.env.SCOUT_STRAPI_API_KEY}`,
'content-type': 'application/json'
},
body: JSON.stringify({
data: {
displayName: channel,
fansly: (platform === 'fansly') ? url : null,
fanslyId: (platform === 'fansly') ? userId : null,
chaturbate: (platform === 'chaturbate') ? url : null,
slug: fpSlugify(channel),
description1: ' ',
image: imageCdnLink,
themeColor: themeColor || '#dde1ec'
}
})
})
const createVtuberJson = await createVtuberRes.json() as IVtuberResponse
helpers.logger.info('>> createVtuberJson as follows')
helpers.logger.info(JSON.stringify(createVtuberJson, null, 2))
if (createVtuberJson.data) {
vtuberId = createVtuberJson.data.id
helpers.logger.info(`>>> vtuber created with id=${vtuberId}`)
}
}
if (!vtuberId) throw new Error(`upsertVtuber failed to produce a vtuberId! This should not happen under normal circumstances.`);
return vtuberId
}
export default async function (payload: NotificationData, helpers: Helpers) {
const source = 'email'
const { url, platform, channel, displayName, date, userId, avatar } = payload
helpers.logger.info(`process_notif_email task execution has begun with date=${date}, channel=${channel}, platform=${platform}, url=${url}, displayName=${displayName}, avatar=${avatar}`);
const vtuberId = await upsertVtuber({channel, platform, url, userId }, helpers);
const pNotifId = await upsertPlatformNotification({date, platform, source, vtuberId}, helpers);
const streamId = await upsertStream({date, platform, pNotifId, vtuberId}, helpers);
return `vtuberId: ${vtuberId} | pNotifId: ${pNotifId} | streamId: ${streamId}`;
}

View File

@ -3,7 +3,8 @@
"type": "module",
"version": "4.0.1",
"description": "vtuber data acquisition",
"main": "src/index.js",
"main": "src/index.ts",
"types": "src/index.ts",
"exports": {
"./*.js": "./src/*.js"
},
@ -22,7 +23,6 @@
"@aws-sdk/client-s3": "^3.583.0",
"@aws-sdk/lib-storage": "^3.588.0",
"@aws-sdk/s3-request-presigner": "^3.588.0",
"@futureporn/temporal-workflows": "workspace:*",
"@futureporn/types": "workspace:*",
"@paralleldrive/cuid2": "^2.2.2",
"@temporalio/client": "^1.9.0",
@ -51,6 +51,7 @@
},
"packageManager": "pnpm@9.2.0",
"devDependencies": {
"@futureporn/utils": "workspace:^",
"@types/chai": "^4.3.16",
"@types/cheerio": "^0.22.35",
"@types/mailparser": "^3.4.4",

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,4 @@
import { download } from './utils.js';
import { getTmpFile } from './utils.js';
import { download, getTmpFile } from '@futureporn/utils';
const regex = {
username: new RegExp(/^https:\/\/fansly\.com\/(?:live\/)?([^\/]+)/)

View File

@ -11,7 +11,6 @@ import { Client, Connection } from '@temporalio/client'
// import { type NotificationData } from '@futureporn/types'
// import { type FetchMessageObject } from 'imapflow'
import { createId } from '@paralleldrive/cuid2'
import { WorkflowA } from '@futureporn/temporal-workflows/workflows'
console.log('connecting to temporal...')
const connection = await Connection.connect({ address: 'temporal-frontend.futureporn.svc.cluster.local:7233' });
@ -63,15 +62,16 @@ const client = new Client({ connection, namespace: 'futureporn' });
// const email = new Email()
// email.once('message', (msg: FetchMessageObject) => handleMessage({ email, msg }))
// await email.connect()
console.log('scout is starting')
console.log('scout is starting @todo @todo')
const wfId = `process-email-${createId()}`
const handle = await client.workflow.start(WorkflowA, {
workflowId: wfId,
taskQueue: 'futureporn',
args: [ 'CJ_Clippy' ]
});
const result = await handle.result()
console.log(result)
// @todo
// const handle = await client.workflow.start(WorkflowA, {
// workflowId: wfId,
// taskQueue: 'futureporn',
// args: [ 'CJ_Clippy' ]
// });
// const result = await handle.result()
// console.log(result)
})()

View File

@ -1,7 +1,7 @@
import * as htmlparser2 from "htmlparser2";
import { load } from 'cheerio'
import { download } from './utils.js';
import { download } from '@futureporn/utils';
import pRetry, { AbortError } from 'p-retry';
if (!process.env.SCOUT_NITTER_ACCESS_KEY) throw new Error('SCOUT_NITTER_ACCESS_KEY was undefined in env');

Some files were not shown because too many files have changed in this diff Show More