Compare commits

...

2 Commits

Author SHA1 Message Date
CJ_Clippy f342bf9671 implement record retrying
ci / build (push) Failing after 1s Details
2024-08-19 20:27:40 -08:00
CJ_Clippy 9cd9b6a53d chatops progress 2024-08-16 18:42:44 -08:00
87 changed files with 2167 additions and 2289 deletions

View File

@ -33,7 +33,7 @@ Get through the [OODA loop](https://en.wikipedia.org/wiki/OODA_loop) as many tim
### The computer doesn't care
> "There are 2 hard problems in computer science: cache invalidation, naming things, and off-by-1 errors."
> Leon Bambrick
> -- Leon Bambrick
In other words, pick something for a name and roll with the punches.
@ -44,3 +44,8 @@ In other words, pick something for a name and roll with the punches.
3. Simplify or optimize
4. Accelerate Cycle Time
5. Automate
### Never Settle
> "But it's also about looking at things anew and what they could be instead of what they are"
> -- Rodney Mullen

View File

@ -215,6 +215,12 @@ cmd_button('postgres:drop',
icon_name='delete',
text='DROP all databases'
)
cmd_button('postgres:refresh',
argv=['sh', './scripts/postgres-refresh.sh'],
resource='migrations',
icon_name='refresh',
text='Refresh schema cache'
)
cmd_button('capture-api:create',
argv=['http', '--ignore-stdin', 'POST', 'http://localhost:5003/api/record', "url='https://twitch.tv/ironmouse'", "channel='ironmouse'"],
@ -223,9 +229,9 @@ cmd_button('capture-api:create',
text='Start Recording'
)
cmd_button('postgrest:migrate',
argv=['./scripts/postgrest-migrations.sh'],
resource='postgrest',
cmd_button('postgres:migrate',
argv=['./scripts/postgres-migrations.sh'],
resource='postgresql-primary',
icon_name='directions_run',
text='Run migrations',
)
@ -243,6 +249,16 @@ cmd_button('factory:test',
text='test',
)
## we ignore unused image warnings because we do actually use this image.
## instead of being invoked by helm, we start a container using this image manually via Tilt UI
# update_settings(suppress_unused_image_warnings=["fp/migrations"])
docker_build(
'fp/migrations',
'.',
dockerfile='dockerfiles/migrations.dockerfile',
target='migrations',
pull=False,
)
## Uncomment the following for fp/next in dev mode
## this is useful for changing the UI and seeing results
@ -350,7 +366,7 @@ docker_build(
'./services/capture',
],
live_update=[
sync('./services/capture/dist', '/app/dist'),
sync('./services/capture', '/app/services/capture'),
],
pull=False,
)
@ -416,6 +432,13 @@ k8s_resource(
link('https://game-2048.fp.sbtp.xyz/')
]
)
k8s_resource(
workload='whoami',
labels=['frontend'],
links=[
link('https://whoami.fp.sbtp.xyz/')
]
)
k8s_resource(
workload='postgresql-primary',
port_forwards=['5432'],
@ -513,7 +536,11 @@ k8s_resource(
port_forwards=['5050:80'],
labels=['database'],
)
k8s_resource(
workload='migrations',
labels=['database'],
resource_deps=['postgresql-primary'],
)
k8s_resource(
workload='cert-manager',

View File

@ -52,6 +52,11 @@ spec:
secretKeyRef:
name: postgrest
key: automationUserJwt
- name: HTTP_PROXY
valueFrom:
secretKeyRef:
name: capture
key: httpProxy
- name: POSTGREST_URL
value: "{{ .Values.postgrest.url }}"
- name: PORT

View File

@ -0,0 +1,21 @@
---
apiVersion: v1
kind: Pod
metadata:
name: migrations
namespace: futureporn
labels:
app.kubernetes.io/name: migrations
spec:
containers:
- name: migrations
image: "{{ .Values.migrations.imageName }}"
resources: {}
env:
- name: DATABASE_PASSWORD
valueFrom:
secretKeyRef:
name: postgresql
key: password
restartPolicy: Never

View File

@ -90,4 +90,6 @@ chisel:
game2048:
hostname: game-2048.fp.sbtp.xyz
whoami:
hostname: whoami.fp.sbtp.xyz
hostname: whoami.fp.sbtp.xyz
migrations:
imageName: fp/migrations

View File

@ -4,22 +4,49 @@ export as namespace Futureporn;
declare namespace Futureporn {
interface RecordingRecord {
id: number;
recordingState: RecordingState;
fileSize: number;
discordMessageId: string;
isAborted: boolean;
type PlatformNotificationType = 'email' | 'manual' | 'twitter'
type ArchiveStatus = 'good' | 'issue' | 'missing'
type RecordingState = 'recording' | 'stalled' | 'aborted' | 'failed' | 'finished'
type Status = Partial<'pending_recording' | RecordingState>
interface Stream {
id: string;
url: string;
platform_notification_type: PlatformNotificationType;
discord_message_id: string;
date: Date;
created_at: Date;
updated_at: Date;
vtuber: string;
tweet: string;
archive_status: ArchiveStatus;
is_chaturbate_stream: Boolean;
is_fansly_stream: Boolean;
is_recording_aborted: Boolean;
status: Status;
segments?: Segment[]
}
interface RawRecordingRecord {
interface RecordingRecord {
id: number;
recording_state: RecordingState;
file_size: number;
discord_message_id: string;
is_aborted: boolean;
is_recording_aborted: boolean;
updated_at: Date;
created_at: Date;
}
interface Segment {
id: number;
s3_key: string;
s3_id: string;
bytes: number;
stream?: Stream[];
created_at: Date;
updated_at: Date;
}
type RecordingState = 'pending' | 'recording' | 'aborted' | 'ended'
interface IMuxAsset {
@ -70,7 +97,7 @@ declare namespace Futureporn {
attributes: {
date: string;
date2: string;
archiveStatus: 'good' | 'issue' | 'missing';
archiveStatus: ArchiveStatus;
vods: IVodsResponse;
cuid: string;
vtuber: IVtuberResponse;

View File

@ -1,7 +0,0 @@
# @futureporn/worker
The system component which runs background tasks such as thumbnail generation, video encoding, file transfers, etc.
We use [Graphile Worker](https://worker.graphile.org)

View File

@ -1,26 +0,0 @@
{
"name": "@futureporn/worker",
"type": "module",
"version": "1.3.0",
"private": true,
"scripts": {
"bundle": "node ./src/create-workflow-bundle.js",
"build": "tsc --build",
"lint": "eslint .",
"dev": "nodemon --ext js,ts,json,yaml --watch ./src/index.ts --exec \"node --loader ts-node/esm --disable-warning=ExperimentalWarning ./src/index.ts\"",
"start": "node dist/index.js",
"clean": "rm -rf dist",
"superclean": "rm -rf node_modules && rm -rf pnpm-lock.yaml && rm -rf dist"
},
"dependencies": {
"date-fns": "^3.6.0",
"dotenv": "^16.4.5",
"graphile-worker": "^0.16.6",
"qs": "^6.12.3"
},
"packageManager": "pnpm@9.5.0",
"devDependencies": {
"nodemon": "^2.0.15",
"typescript": "^5.5.3"
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,35 +0,0 @@
import { run } from 'graphile-worker'
import { dirname } from 'node:path';
import { fileURLToPath } from 'url';
const __dirname = dirname(fileURLToPath(import.meta.url));
if (!process.env.DATABASE_URL) throw new Error('DATABASE_URL is undefined in env');
const connectionString = process.env.DATABASE_URL
console.log(`process.env.DATABASE_URL=${process.env.DATABASE_URL}`)
async function main() {
// Run a worker to execute jobs:
const runner = await run({
connectionString,
concurrency: 5,
// Install signal handlers for graceful shutdown on SIGINT, SIGTERM, etc
noHandleSignals: false,
pollInterval: 1000,
taskDirectory: `${__dirname}/tasks`,
});
// Immediately await (or otherwise handle) the resulting promise, to avoid
// "unhandled rejection" errors causing a process crash in the event of
// something going wrong.
await runner.promise;
// If the worker exits (whether through fatal error or otherwise), the above
// promise will resolve/reject.
}
main().catch((err) => {
console.error(err);
process.exit(1);
});

View File

@ -1,4 +0,0 @@
export default async function (payload: any, helpers: any) {
const { name } = payload;
helpers.logger.info(`Hello, ${name}`);
};

View File

@ -1,15 +0,0 @@
import { download } from "@futureporn/utils";
import { getProminentColor } from "@futureporn/image";
export default async function (payload: any, helpers: any) {
const { url } = payload;
// helpers.logger.info(`Downloading ${url}`)
// const imageFile = await download({ url, filePath: '/tmp/my-image.png' })
// helpers.logger.info(`downloaded to ${imageFile}`)
// if (!imageFile) throw new Error('imageFile was null')
// const color = await getProminentColor(imageFile)
// helpers.logger.info(`prominent color is ${color}`)
// return color
return '#0xffcc00'
}

View File

@ -1,29 +0,0 @@
{
"compilerOptions": {
// Base Options recommended for all projects
"esModuleInterop": true,
"skipLibCheck": true,
"target": "es2022",
"allowJs": true,
"resolveJsonModule": true,
"moduleDetection": "force",
"isolatedModules": true,
// Enable strict type checking so you can catch bugs early
"strict": true,
"noUncheckedIndexedAccess": true,
"noImplicitOverride": true,
// Transpile our TypeScript code to JavaScript
"module": "NodeNext",
"outDir": "dist",
"lib": [
"es2022"
]
},
// Include the necessary files for your project
"include": [
"**/*.ts"
],
"exclude": [
"node_modules"
]
}

View File

@ -71,7 +71,8 @@ kubectl --namespace futureporn delete secret capture --ignore-not-found
kubectl --namespace futureporn create secret generic capture \
--from-literal=workerConnectionString=${WORKER_CONNECTION_STRING} \
--from-literal=s3AccessKeyId=${S3_USC_BUCKET_KEY_ID} \
--from-literal=s3SecretAccessKey=${S3_USC_BUCKET_APPLICATION_KEY}
--from-literal=s3SecretAccessKey=${S3_USC_BUCKET_APPLICATION_KEY} \
--from-literal=httpProxy=${HTTP_PROXY}
kubectl --namespace futureporn delete secret mailbox --ignore-not-found
kubectl --namespace futureporn create secret generic mailbox \

View File

@ -61,14 +61,7 @@ kubectl -n futureporn exec ${postgres_pod_name} -- env PGPASSWORD=${POSTGRES_PAS
## Create the futureporn Postgrest database
kubectl -n futureporn exec ${postgres_pod_name} -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "\
CREATE DATABASE futureporn \
WITH \
OWNER = postgres \
ENCODING = 'UTF8' \
LOCALE_PROVIDER = 'libc' \
CONNECTION LIMIT = -1 \
IS_TEMPLATE = False;"
## !!! Don't create the database here! Allow @services/migrations to create the database.
# @futureporn/migrations takes care of these tasks now

View File

@ -5,5 +5,12 @@ if [ -z $POSTGRES_PASSWORD ]; then
fi
## drop futureporn_db
kubectl -n futureporn exec postgresql-primary -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "DROP DATABASE futureporn_db WITH (FORCE);"
kubectl -n futureporn exec postgresql-primary-0 -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "DROP DATABASE futureporn_db WITH (FORCE);"
## drop futureporn
kubectl -n futureporn exec postgresql-primary-0 -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "DROP DATABASE futureporn WITH (FORCE);"
## delete postgrest roles
kubectl -n futureporn exec postgresql-primary-0 -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "DROP ROLE authenticator;"
kubectl -n futureporn exec postgresql-primary-0 -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "DROP ROLE automation;"
kubectl -n futureporn exec postgresql-primary-0 -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "DROP ROLE web_anon;"

13
scripts/postgres-migrations.sh Executable file
View File

@ -0,0 +1,13 @@
#!/bin/bash
if [ -z $POSTGRES_PASSWORD ]; then
echo "POSTGRES_PASSWORD was missing in env. In development environment, runing this command via the UI button in Tilt is recommended as it sets the env var for you."
exit 5
fi
# kubectl -n futureporn run postgrest-migrations -i --rm=true --image=gitea.futureporn.net/futureporn/migrations:latest --env=DATABASE_PASSWORD=${POSTGRES_PASSWORD}
kubectl -n futureporn run postgres-migrations -i --rm=true --image=fp/migrations:latest --env=DATABASE_PASSWORD=${POSTGRES_PASSWORD}

View File

@ -0,0 +1,9 @@
if [ -z $POSTGRES_PASSWORD ]; then
echo "POSTGRES_PASSWORD was missing in env"
exit 5
fi
# reload the schema
# @see https://postgrest.org/en/latest/references/schema_cache.html#schema-reloading
kubectl -n futureporn exec postgresql-primary-0 -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "NOTIFY pgrst, 'reload schema'"

View File

@ -1,12 +0,0 @@
#!/bin/bash
if [ -z $POSTGRES_PASSWORD ]; then
echo "POSTGRES_PASSWORD was missing in env. In development environment, runing this command via the UI button in Tilt is recommended as it sets the env var for you."
exit 5
fi
kubectl -n futureporn run postgrest-migrations -i --rm=true --image=gitea.futureporn.net/futureporn/migrations:latest --env=DATABASE_PASSWORD=${POSTGRES_PASSWORD}

View File

@ -13,6 +13,6 @@
# * * * * * task ?opts {payload}
## every 5 minutes, we see which /records are stale and we mark them as such.
## every n minutes, we see which /records are stale and we mark them as such.
## this prevents stalled Record updates by marking stalled recordings as stopped
*/5 * * * * expire_records
* * * * * update_stream_statuses ?max=1 { stalled_minutes:1 }

View File

@ -7,8 +7,10 @@
"scripts": {
"test": "echo \"Warn: no test specified\" && exit 0",
"start": "node ./dist/index.js",
"dev.nodemon": "nodemon --legacy-watch --ext js,ts --watch ./src --exec \"node --loader ts-node/esm --disable-warning=ExperimentalWarning ./src/index.ts\"",
"dev": "tsx --watch ./src/index.ts",
"dev": "pnpm run dev.nodemon # yes this is crazy to have nodemon execute tsx, but it's the only way I have found to get live reloading in TS/ESM/docker with Graphile Worker's way of loading tasks",
"dev.tsx": "tsx ./src/index.ts",
"dev.nodemon": "nodemon --ext ts --exec \"pnpm run dev.tsx\"",
"dev.node": "node --no-warnings=ExperimentalWarning --loader ts-node/esm src/index.ts",
"build": "tsc --build",
"clean": "rm -rf dist",
"superclean": "rm -rf node_modules && rm -rf pnpm-lock.yaml && rm -rf dist",
@ -20,18 +22,22 @@
"license": "Unlicense",
"dependencies": {
"@discordeno/bot": "19.0.0-next.746f0a9",
"@types/node": "^22.2.0",
"@types/qs": "^6.9.15",
"date-fns": "^3.6.0",
"dd-cache-proxy": "^2.1.1",
"dotenv": "^16.4.5",
"graphile-config": "0.0.1-beta.9",
"graphile-worker": "^0.16.6",
"pretty-bytes": "^6.1.1"
"node-fetch": "^3.3.2",
"pretty-bytes": "^6.1.1",
"qs": "^6.13.0"
},
"devDependencies": {
"@futureporn/types": "workspace:^",
"nodemon": "^3.1.4",
"ts-node": "^10.9.2",
"tsx": "^4.16.2",
"typescript": "^5.5.3"
"tsx": "^4.17.0",
"typescript": "^5.5.4"
}
}

View File

@ -11,6 +11,12 @@ importers:
'@discordeno/bot':
specifier: 19.0.0-next.746f0a9
version: 19.0.0-next.746f0a9
'@types/node':
specifier: ^22.2.0
version: 22.2.0
'@types/qs':
specifier: ^6.9.15
version: 6.9.15
date-fns:
specifier: ^3.6.0
version: 3.6.0
@ -26,9 +32,15 @@ importers:
graphile-worker:
specifier: ^0.16.6
version: 0.16.6(typescript@5.5.4)
node-fetch:
specifier: ^3.3.2
version: 3.3.2
pretty-bytes:
specifier: ^6.1.1
version: 6.1.1
qs:
specifier: ^6.13.0
version: 6.13.0
devDependencies:
'@futureporn/types':
specifier: workspace:^
@ -38,12 +50,12 @@ importers:
version: 3.1.4
ts-node:
specifier: ^10.9.2
version: 10.9.2(@types/node@22.1.0)(typescript@5.5.4)
version: 10.9.2(@types/node@22.2.0)(typescript@5.5.4)
tsx:
specifier: ^4.16.2
version: 4.16.2
specifier: ^4.17.0
version: 4.17.0
typescript:
specifier: ^5.5.3
specifier: ^5.5.4
version: 5.5.4
packages:
@ -79,141 +91,147 @@ packages:
'@discordeno/utils@19.0.0-next.746f0a9':
resolution: {integrity: sha512-UY5GataakuY0yc4SN5qJLexUbTc5y293G3gNAWSaOjaZivEytcdxD4xgeqjNj9c4eN57B3Lfzus6tFZHXwXNOA==}
'@esbuild/aix-ppc64@0.21.5':
resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==}
engines: {node: '>=12'}
'@esbuild/aix-ppc64@0.23.0':
resolution: {integrity: sha512-3sG8Zwa5fMcA9bgqB8AfWPQ+HFke6uD3h1s3RIwUNK8EG7a4buxvuFTs3j1IMs2NXAk9F30C/FF4vxRgQCcmoQ==}
engines: {node: '>=18'}
cpu: [ppc64]
os: [aix]
'@esbuild/android-arm64@0.21.5':
resolution: {integrity: sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==}
engines: {node: '>=12'}
'@esbuild/android-arm64@0.23.0':
resolution: {integrity: sha512-EuHFUYkAVfU4qBdyivULuu03FhJO4IJN9PGuABGrFy4vUuzk91P2d+npxHcFdpUnfYKy0PuV+n6bKIpHOB3prQ==}
engines: {node: '>=18'}
cpu: [arm64]
os: [android]
'@esbuild/android-arm@0.21.5':
resolution: {integrity: sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==}
engines: {node: '>=12'}
'@esbuild/android-arm@0.23.0':
resolution: {integrity: sha512-+KuOHTKKyIKgEEqKbGTK8W7mPp+hKinbMBeEnNzjJGyFcWsfrXjSTNluJHCY1RqhxFurdD8uNXQDei7qDlR6+g==}
engines: {node: '>=18'}
cpu: [arm]
os: [android]
'@esbuild/android-x64@0.21.5':
resolution: {integrity: sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==}
engines: {node: '>=12'}
'@esbuild/android-x64@0.23.0':
resolution: {integrity: sha512-WRrmKidLoKDl56LsbBMhzTTBxrsVwTKdNbKDalbEZr0tcsBgCLbEtoNthOW6PX942YiYq8HzEnb4yWQMLQuipQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [android]
'@esbuild/darwin-arm64@0.21.5':
resolution: {integrity: sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==}
engines: {node: '>=12'}
'@esbuild/darwin-arm64@0.23.0':
resolution: {integrity: sha512-YLntie/IdS31H54Ogdn+v50NuoWF5BDkEUFpiOChVa9UnKpftgwzZRrI4J132ETIi+D8n6xh9IviFV3eXdxfow==}
engines: {node: '>=18'}
cpu: [arm64]
os: [darwin]
'@esbuild/darwin-x64@0.21.5':
resolution: {integrity: sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==}
engines: {node: '>=12'}
'@esbuild/darwin-x64@0.23.0':
resolution: {integrity: sha512-IMQ6eme4AfznElesHUPDZ+teuGwoRmVuuixu7sv92ZkdQcPbsNHzutd+rAfaBKo8YK3IrBEi9SLLKWJdEvJniQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [darwin]
'@esbuild/freebsd-arm64@0.21.5':
resolution: {integrity: sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==}
engines: {node: '>=12'}
'@esbuild/freebsd-arm64@0.23.0':
resolution: {integrity: sha512-0muYWCng5vqaxobq6LB3YNtevDFSAZGlgtLoAc81PjUfiFz36n4KMpwhtAd4he8ToSI3TGyuhyx5xmiWNYZFyw==}
engines: {node: '>=18'}
cpu: [arm64]
os: [freebsd]
'@esbuild/freebsd-x64@0.21.5':
resolution: {integrity: sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==}
engines: {node: '>=12'}
'@esbuild/freebsd-x64@0.23.0':
resolution: {integrity: sha512-XKDVu8IsD0/q3foBzsXGt/KjD/yTKBCIwOHE1XwiXmrRwrX6Hbnd5Eqn/WvDekddK21tfszBSrE/WMaZh+1buQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [freebsd]
'@esbuild/linux-arm64@0.21.5':
resolution: {integrity: sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==}
engines: {node: '>=12'}
'@esbuild/linux-arm64@0.23.0':
resolution: {integrity: sha512-j1t5iG8jE7BhonbsEg5d9qOYcVZv/Rv6tghaXM/Ug9xahM0nX/H2gfu6X6z11QRTMT6+aywOMA8TDkhPo8aCGw==}
engines: {node: '>=18'}
cpu: [arm64]
os: [linux]
'@esbuild/linux-arm@0.21.5':
resolution: {integrity: sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==}
engines: {node: '>=12'}
'@esbuild/linux-arm@0.23.0':
resolution: {integrity: sha512-SEELSTEtOFu5LPykzA395Mc+54RMg1EUgXP+iw2SJ72+ooMwVsgfuwXo5Fn0wXNgWZsTVHwY2cg4Vi/bOD88qw==}
engines: {node: '>=18'}
cpu: [arm]
os: [linux]
'@esbuild/linux-ia32@0.21.5':
resolution: {integrity: sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==}
engines: {node: '>=12'}
'@esbuild/linux-ia32@0.23.0':
resolution: {integrity: sha512-P7O5Tkh2NbgIm2R6x1zGJJsnacDzTFcRWZyTTMgFdVit6E98LTxO+v8LCCLWRvPrjdzXHx9FEOA8oAZPyApWUA==}
engines: {node: '>=18'}
cpu: [ia32]
os: [linux]
'@esbuild/linux-loong64@0.21.5':
resolution: {integrity: sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==}
engines: {node: '>=12'}
'@esbuild/linux-loong64@0.23.0':
resolution: {integrity: sha512-InQwepswq6urikQiIC/kkx412fqUZudBO4SYKu0N+tGhXRWUqAx+Q+341tFV6QdBifpjYgUndV1hhMq3WeJi7A==}
engines: {node: '>=18'}
cpu: [loong64]
os: [linux]
'@esbuild/linux-mips64el@0.21.5':
resolution: {integrity: sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==}
engines: {node: '>=12'}
'@esbuild/linux-mips64el@0.23.0':
resolution: {integrity: sha512-J9rflLtqdYrxHv2FqXE2i1ELgNjT+JFURt/uDMoPQLcjWQA5wDKgQA4t/dTqGa88ZVECKaD0TctwsUfHbVoi4w==}
engines: {node: '>=18'}
cpu: [mips64el]
os: [linux]
'@esbuild/linux-ppc64@0.21.5':
resolution: {integrity: sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==}
engines: {node: '>=12'}
'@esbuild/linux-ppc64@0.23.0':
resolution: {integrity: sha512-cShCXtEOVc5GxU0fM+dsFD10qZ5UpcQ8AM22bYj0u/yaAykWnqXJDpd77ublcX6vdDsWLuweeuSNZk4yUxZwtw==}
engines: {node: '>=18'}
cpu: [ppc64]
os: [linux]
'@esbuild/linux-riscv64@0.21.5':
resolution: {integrity: sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==}
engines: {node: '>=12'}
'@esbuild/linux-riscv64@0.23.0':
resolution: {integrity: sha512-HEtaN7Y5UB4tZPeQmgz/UhzoEyYftbMXrBCUjINGjh3uil+rB/QzzpMshz3cNUxqXN7Vr93zzVtpIDL99t9aRw==}
engines: {node: '>=18'}
cpu: [riscv64]
os: [linux]
'@esbuild/linux-s390x@0.21.5':
resolution: {integrity: sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==}
engines: {node: '>=12'}
'@esbuild/linux-s390x@0.23.0':
resolution: {integrity: sha512-WDi3+NVAuyjg/Wxi+o5KPqRbZY0QhI9TjrEEm+8dmpY9Xir8+HE/HNx2JoLckhKbFopW0RdO2D72w8trZOV+Wg==}
engines: {node: '>=18'}
cpu: [s390x]
os: [linux]
'@esbuild/linux-x64@0.21.5':
resolution: {integrity: sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==}
engines: {node: '>=12'}
'@esbuild/linux-x64@0.23.0':
resolution: {integrity: sha512-a3pMQhUEJkITgAw6e0bWA+F+vFtCciMjW/LPtoj99MhVt+Mfb6bbL9hu2wmTZgNd994qTAEw+U/r6k3qHWWaOQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [linux]
'@esbuild/netbsd-x64@0.21.5':
resolution: {integrity: sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==}
engines: {node: '>=12'}
'@esbuild/netbsd-x64@0.23.0':
resolution: {integrity: sha512-cRK+YDem7lFTs2Q5nEv/HHc4LnrfBCbH5+JHu6wm2eP+d8OZNoSMYgPZJq78vqQ9g+9+nMuIsAO7skzphRXHyw==}
engines: {node: '>=18'}
cpu: [x64]
os: [netbsd]
'@esbuild/openbsd-x64@0.21.5':
resolution: {integrity: sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==}
engines: {node: '>=12'}
'@esbuild/openbsd-arm64@0.23.0':
resolution: {integrity: sha512-suXjq53gERueVWu0OKxzWqk7NxiUWSUlrxoZK7usiF50C6ipColGR5qie2496iKGYNLhDZkPxBI3erbnYkU0rQ==}
engines: {node: '>=18'}
cpu: [arm64]
os: [openbsd]
'@esbuild/openbsd-x64@0.23.0':
resolution: {integrity: sha512-6p3nHpby0DM/v15IFKMjAaayFhqnXV52aEmv1whZHX56pdkK+MEaLoQWj+H42ssFarP1PcomVhbsR4pkz09qBg==}
engines: {node: '>=18'}
cpu: [x64]
os: [openbsd]
'@esbuild/sunos-x64@0.21.5':
resolution: {integrity: sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==}
engines: {node: '>=12'}
'@esbuild/sunos-x64@0.23.0':
resolution: {integrity: sha512-BFelBGfrBwk6LVrmFzCq1u1dZbG4zy/Kp93w2+y83Q5UGYF1d8sCzeLI9NXjKyujjBBniQa8R8PzLFAUrSM9OA==}
engines: {node: '>=18'}
cpu: [x64]
os: [sunos]
'@esbuild/win32-arm64@0.21.5':
resolution: {integrity: sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==}
engines: {node: '>=12'}
'@esbuild/win32-arm64@0.23.0':
resolution: {integrity: sha512-lY6AC8p4Cnb7xYHuIxQ6iYPe6MfO2CC43XXKo9nBXDb35krYt7KGhQnOkRGar5psxYkircpCqfbNDB4uJbS2jQ==}
engines: {node: '>=18'}
cpu: [arm64]
os: [win32]
'@esbuild/win32-ia32@0.21.5':
resolution: {integrity: sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==}
engines: {node: '>=12'}
'@esbuild/win32-ia32@0.23.0':
resolution: {integrity: sha512-7L1bHlOTcO4ByvI7OXVI5pNN6HSu6pUQq9yodga8izeuB1KcT2UkHaH6118QJwopExPn0rMHIseCTx1CRo/uNA==}
engines: {node: '>=18'}
cpu: [ia32]
os: [win32]
'@esbuild/win32-x64@0.21.5':
resolution: {integrity: sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==}
engines: {node: '>=12'}
'@esbuild/win32-x64@0.23.0':
resolution: {integrity: sha512-Arm+WgUFLUATuoxCJcahGuk6Yj9Pzxd6l11Zb/2aAuv5kWWvvfhLFo2fni4uSK5vzlUdCGZ/BdV5tH8klj8p8g==}
engines: {node: '>=18'}
cpu: [x64]
os: [win32]
@ -251,18 +269,18 @@ packages:
'@types/ms@0.7.34':
resolution: {integrity: sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==}
'@types/node@20.14.13':
resolution: {integrity: sha512-+bHoGiZb8UiQ0+WEtmph2IWQCjIqg8MDZMAV+ppRRhUZnquF5mQkP/9vpSwJClEiSM/C7fZZExPzfU0vJTyp8w==}
'@types/node@20.14.15':
resolution: {integrity: sha512-Fz1xDMCF/B00/tYSVMlmK7hVeLh7jE5f3B7X1/hmV0MJBwE27KlS7EvD/Yp+z1lm8mVhwV5w+n8jOZG8AfTlKw==}
'@types/node@22.0.0':
resolution: {integrity: sha512-VT7KSYudcPOzP5Q0wfbowyNLaVR8QWUdw+088uFWwfvpY6uCWaXpqV6ieLAu9WBcnTa7H4Z5RLK8I5t2FuOcqw==}
'@types/node@22.1.0':
resolution: {integrity: sha512-AOmuRF0R2/5j1knA3c6G3HOk523Ga+l+ZXltX8SF1+5oqcXijjfTd8fY3XRZqSihEu9XhtQnKYLmkFaoxgsJHw==}
'@types/node@22.2.0':
resolution: {integrity: sha512-bm6EG6/pCpkxDf/0gDNDdtDILMOHgaQBVOJGdwsqClnxA3xL6jtMv76rLBc006RVMWbmaf0xbmom4Z/5o2nRkQ==}
'@types/pg@8.11.6':
resolution: {integrity: sha512-/2WmmBXHLsfRqzfHW7BNZ8SbYzE8OSk7i3WjFYvfgRHj7S1xj+16Je5fUKv3lVdVzk/zn9TXOqf+avFCFIE0yQ==}
'@types/qs@6.9.15':
resolution: {integrity: sha512-uXHQKES6DQKKCLh441Xv/dwxOq1TVS3JPUMlEqoEglvlhR6Mxnlew/Xq/LRVHpLyk7iK3zODe1qYHIMltO7XGg==}
'@types/semver@7.5.8':
resolution: {integrity: sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==}
@ -311,6 +329,10 @@ packages:
resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
engines: {node: '>=8'}
call-bind@1.0.7:
resolution: {integrity: sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==}
engines: {node: '>= 0.4'}
callsites@3.1.0:
resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==}
engines: {node: '>=6'}
@ -359,6 +381,10 @@ packages:
create-require@1.1.1:
resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==}
data-uri-to-buffer@4.0.1:
resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==}
engines: {node: '>= 12'}
date-fns@3.6.0:
resolution: {integrity: sha512-fRHTG8g/Gif+kSh50gaGEdToemgfj74aRX3swtiouboip5JDLAyDE9F11nHMIcvOaXeOC6D7SpNhi7uFyB7Uww==}
@ -376,6 +402,10 @@ packages:
supports-color:
optional: true
define-data-property@1.1.4:
resolution: {integrity: sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==}
engines: {node: '>= 0.4'}
diff@4.0.2:
resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==}
engines: {node: '>=0.3.1'}
@ -390,9 +420,17 @@ packages:
error-ex@1.3.2:
resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==}
esbuild@0.21.5:
resolution: {integrity: sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==}
engines: {node: '>=12'}
es-define-property@1.0.0:
resolution: {integrity: sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==}
engines: {node: '>= 0.4'}
es-errors@1.3.0:
resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==}
engines: {node: '>= 0.4'}
esbuild@0.23.0:
resolution: {integrity: sha512-1lvV17H2bMYda/WaFb2jLPeHU3zml2k4/yagNMG8Q/YtfMjCwEUZa2eXXMgZTVSL5q1n4H7sQ0X6CdJDqqeCFA==}
engines: {node: '>=18'}
hasBin: true
escalade@3.1.2:
@ -403,19 +441,34 @@ packages:
resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==}
engines: {node: '>=0.8.0'}
fetch-blob@3.2.0:
resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==}
engines: {node: ^12.20 || >= 14.13}
fill-range@7.1.1:
resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
engines: {node: '>=8'}
formdata-polyfill@4.0.10:
resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==}
engines: {node: '>=12.20.0'}
fsevents@2.3.3:
resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
os: [darwin]
function-bind@1.1.2:
resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==}
get-caller-file@2.0.5:
resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==}
engines: {node: 6.* || 8.* || >= 10.*}
get-intrinsic@1.2.4:
resolution: {integrity: sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==}
engines: {node: '>= 0.4'}
get-tsconfig@4.7.6:
resolution: {integrity: sha512-ZAqrLlu18NbDdRaHq+AKXzAmqIUPswPWKUchfytdAjiRFnCe5ojG2bstg6mRiZabkKfCoL/e98pbBELIV/YCeA==}
@ -423,6 +476,9 @@ packages:
resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
engines: {node: '>= 6'}
gopd@1.0.1:
resolution: {integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==}
graphile-config@0.0.1-beta.9:
resolution: {integrity: sha512-7vNxXZ24OAgXxDKXYi9JtgWPMuNbBL3057Yf32Ux+/rVP4+EePgySCc+NNnn0tORi8qwqVreN8bdWqGIcSwNXg==}
engines: {node: '>=16'}
@ -440,6 +496,21 @@ packages:
resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==}
engines: {node: '>=8'}
has-property-descriptors@1.0.2:
resolution: {integrity: sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==}
has-proto@1.0.3:
resolution: {integrity: sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==}
engines: {node: '>= 0.4'}
has-symbols@1.0.3:
resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==}
engines: {node: '>= 0.4'}
hasown@2.0.2:
resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==}
engines: {node: '>= 0.4'}
ignore-by-default@1.0.1:
resolution: {integrity: sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==}
@ -501,6 +572,14 @@ packages:
ms@2.1.2:
resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==}
node-domexception@1.0.0:
resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==}
engines: {node: '>=10.5.0'}
node-fetch@3.3.2:
resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==}
engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
nodemon@3.1.4:
resolution: {integrity: sha512-wjPBbFhtpJwmIeY2yP7QF+UKzPfltVGtfce1g/bB15/8vCGZj8uxD62b/b9M9/WVgme0NZudpownKN+c0plXlQ==}
engines: {node: '>=10'}
@ -510,6 +589,10 @@ packages:
resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==}
engines: {node: '>=0.10.0'}
object-inspect@1.13.2:
resolution: {integrity: sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==}
engines: {node: '>= 0.4'}
obuf@1.1.2:
resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==}
@ -616,6 +699,10 @@ packages:
pstree.remy@1.1.8:
resolution: {integrity: sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==}
qs@6.13.0:
resolution: {integrity: sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==}
engines: {node: '>=0.6'}
readdirp@3.6.0:
resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==}
engines: {node: '>=8.10.0'}
@ -636,6 +723,14 @@ packages:
engines: {node: '>=10'}
hasBin: true
set-function-length@1.2.2:
resolution: {integrity: sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==}
engines: {node: '>= 0.4'}
side-channel@1.0.6:
resolution: {integrity: sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==}
engines: {node: '>= 0.4'}
simple-update-notifier@2.0.0:
resolution: {integrity: sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==}
engines: {node: '>=10'}
@ -682,11 +777,11 @@ packages:
'@swc/wasm':
optional: true
tslib@2.6.2:
resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==}
tslib@2.6.3:
resolution: {integrity: sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==}
tsx@4.16.2:
resolution: {integrity: sha512-C1uWweJDgdtX2x600HjaFaucXTilT7tgUZHbOE4+ypskZ1OP8CRCSDkCxG6Vya9EwaFIVagWwpaVAn5wzypaqQ==}
tsx@4.17.0:
resolution: {integrity: sha512-eN4mnDA5UMKDt4YZixo9tBioibaMBpoxBkD+rIPAjVmYERSG0/dWEY1CEFuV89CgASlKL499q8AhmkMnnjtOJg==}
engines: {node: '>=18.0.0'}
hasBin: true
@ -701,15 +796,16 @@ packages:
undici-types@5.26.5:
resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==}
undici-types@6.11.1:
resolution: {integrity: sha512-mIDEX2ek50x0OlRgxryxsenE5XaQD4on5U2inY7RApK3SOJpofyw7uW2AyfMKkhAxXIceo2DeWGVGwyvng1GNQ==}
undici-types@6.13.0:
resolution: {integrity: sha512-xtFJHudx8S2DSoujjMd1WeWvn7KKWFRESZTMeL1RptAYERu29D6jphMjjY+vn96jvN3kVPDNxU/E13VTaXj6jg==}
v8-compile-cache-lib@3.0.1:
resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==}
web-streams-polyfill@3.3.3:
resolution: {integrity: sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==}
engines: {node: '>= 8'}
wrap-ansi@7.0.0:
resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==}
engines: {node: '>=10'}
@ -796,73 +892,76 @@ snapshots:
dependencies:
'@discordeno/types': 19.0.0-next.746f0a9
'@esbuild/aix-ppc64@0.21.5':
'@esbuild/aix-ppc64@0.23.0':
optional: true
'@esbuild/android-arm64@0.21.5':
'@esbuild/android-arm64@0.23.0':
optional: true
'@esbuild/android-arm@0.21.5':
'@esbuild/android-arm@0.23.0':
optional: true
'@esbuild/android-x64@0.21.5':
'@esbuild/android-x64@0.23.0':
optional: true
'@esbuild/darwin-arm64@0.21.5':
'@esbuild/darwin-arm64@0.23.0':
optional: true
'@esbuild/darwin-x64@0.21.5':
'@esbuild/darwin-x64@0.23.0':
optional: true
'@esbuild/freebsd-arm64@0.21.5':
'@esbuild/freebsd-arm64@0.23.0':
optional: true
'@esbuild/freebsd-x64@0.21.5':
'@esbuild/freebsd-x64@0.23.0':
optional: true
'@esbuild/linux-arm64@0.21.5':
'@esbuild/linux-arm64@0.23.0':
optional: true
'@esbuild/linux-arm@0.21.5':
'@esbuild/linux-arm@0.23.0':
optional: true
'@esbuild/linux-ia32@0.21.5':
'@esbuild/linux-ia32@0.23.0':
optional: true
'@esbuild/linux-loong64@0.21.5':
'@esbuild/linux-loong64@0.23.0':
optional: true
'@esbuild/linux-mips64el@0.21.5':
'@esbuild/linux-mips64el@0.23.0':
optional: true
'@esbuild/linux-ppc64@0.21.5':
'@esbuild/linux-ppc64@0.23.0':
optional: true
'@esbuild/linux-riscv64@0.21.5':
'@esbuild/linux-riscv64@0.23.0':
optional: true
'@esbuild/linux-s390x@0.21.5':
'@esbuild/linux-s390x@0.23.0':
optional: true
'@esbuild/linux-x64@0.21.5':
'@esbuild/linux-x64@0.23.0':
optional: true
'@esbuild/netbsd-x64@0.21.5':
'@esbuild/netbsd-x64@0.23.0':
optional: true
'@esbuild/openbsd-x64@0.21.5':
'@esbuild/openbsd-arm64@0.23.0':
optional: true
'@esbuild/sunos-x64@0.21.5':
'@esbuild/openbsd-x64@0.23.0':
optional: true
'@esbuild/win32-arm64@0.21.5':
'@esbuild/sunos-x64@0.23.0':
optional: true
'@esbuild/win32-ia32@0.21.5':
'@esbuild/win32-arm64@0.23.0':
optional: true
'@esbuild/win32-x64@0.21.5':
'@esbuild/win32-ia32@0.23.0':
optional: true
'@esbuild/win32-x64@0.23.0':
optional: true
'@graphile/logger@0.2.0': {}
@ -890,28 +989,26 @@ snapshots:
'@types/interpret@1.1.3':
dependencies:
'@types/node': 22.0.0
'@types/node': 22.2.0
'@types/ms@0.7.34': {}
'@types/node@20.14.13':
'@types/node@20.14.15':
dependencies:
undici-types: 5.26.5
'@types/node@22.0.0':
dependencies:
undici-types: 6.11.1
'@types/node@22.1.0':
'@types/node@22.2.0':
dependencies:
undici-types: 6.13.0
'@types/pg@8.11.6':
dependencies:
'@types/node': 22.0.0
'@types/node': 22.2.0
pg-protocol: 1.6.1
pg-types: 4.0.2
'@types/qs@6.9.15': {}
'@types/semver@7.5.8': {}
acorn-walk@8.3.3:
@ -952,6 +1049,14 @@ snapshots:
dependencies:
fill-range: 7.1.1
call-bind@1.0.7:
dependencies:
es-define-property: 1.0.0
es-errors: 1.3.0
function-bind: 1.1.2
get-intrinsic: 1.2.4
set-function-length: 1.2.2
callsites@3.1.0: {}
chalk@2.4.2:
@ -1008,6 +1113,8 @@ snapshots:
create-require@1.1.1: {}
data-uri-to-buffer@4.0.1: {}
date-fns@3.6.0: {}
dd-cache-proxy@2.1.1(@discordeno/bot@19.0.0-next.746f0a9):
@ -1020,6 +1127,12 @@ snapshots:
optionalDependencies:
supports-color: 5.5.0
define-data-property@1.1.4:
dependencies:
es-define-property: 1.0.0
es-errors: 1.3.0
gopd: 1.0.1
diff@4.0.2: {}
dotenv@16.4.5: {}
@ -1030,45 +1143,71 @@ snapshots:
dependencies:
is-arrayish: 0.2.1
esbuild@0.21.5:
es-define-property@1.0.0:
dependencies:
get-intrinsic: 1.2.4
es-errors@1.3.0: {}
esbuild@0.23.0:
optionalDependencies:
'@esbuild/aix-ppc64': 0.21.5
'@esbuild/android-arm': 0.21.5
'@esbuild/android-arm64': 0.21.5
'@esbuild/android-x64': 0.21.5
'@esbuild/darwin-arm64': 0.21.5
'@esbuild/darwin-x64': 0.21.5
'@esbuild/freebsd-arm64': 0.21.5
'@esbuild/freebsd-x64': 0.21.5
'@esbuild/linux-arm': 0.21.5
'@esbuild/linux-arm64': 0.21.5
'@esbuild/linux-ia32': 0.21.5
'@esbuild/linux-loong64': 0.21.5
'@esbuild/linux-mips64el': 0.21.5
'@esbuild/linux-ppc64': 0.21.5
'@esbuild/linux-riscv64': 0.21.5
'@esbuild/linux-s390x': 0.21.5
'@esbuild/linux-x64': 0.21.5
'@esbuild/netbsd-x64': 0.21.5
'@esbuild/openbsd-x64': 0.21.5
'@esbuild/sunos-x64': 0.21.5
'@esbuild/win32-arm64': 0.21.5
'@esbuild/win32-ia32': 0.21.5
'@esbuild/win32-x64': 0.21.5
'@esbuild/aix-ppc64': 0.23.0
'@esbuild/android-arm': 0.23.0
'@esbuild/android-arm64': 0.23.0
'@esbuild/android-x64': 0.23.0
'@esbuild/darwin-arm64': 0.23.0
'@esbuild/darwin-x64': 0.23.0
'@esbuild/freebsd-arm64': 0.23.0
'@esbuild/freebsd-x64': 0.23.0
'@esbuild/linux-arm': 0.23.0
'@esbuild/linux-arm64': 0.23.0
'@esbuild/linux-ia32': 0.23.0
'@esbuild/linux-loong64': 0.23.0
'@esbuild/linux-mips64el': 0.23.0
'@esbuild/linux-ppc64': 0.23.0
'@esbuild/linux-riscv64': 0.23.0
'@esbuild/linux-s390x': 0.23.0
'@esbuild/linux-x64': 0.23.0
'@esbuild/netbsd-x64': 0.23.0
'@esbuild/openbsd-arm64': 0.23.0
'@esbuild/openbsd-x64': 0.23.0
'@esbuild/sunos-x64': 0.23.0
'@esbuild/win32-arm64': 0.23.0
'@esbuild/win32-ia32': 0.23.0
'@esbuild/win32-x64': 0.23.0
escalade@3.1.2: {}
escape-string-regexp@1.0.5: {}
fetch-blob@3.2.0:
dependencies:
node-domexception: 1.0.0
web-streams-polyfill: 3.3.3
fill-range@7.1.1:
dependencies:
to-regex-range: 5.0.1
formdata-polyfill@4.0.10:
dependencies:
fetch-blob: 3.2.0
fsevents@2.3.3:
optional: true
function-bind@1.1.2: {}
get-caller-file@2.0.5: {}
get-intrinsic@1.2.4:
dependencies:
es-errors: 1.3.0
function-bind: 1.1.2
has-proto: 1.0.3
has-symbols: 1.0.3
hasown: 2.0.2
get-tsconfig@4.7.6:
dependencies:
resolve-pkg-maps: 1.0.0
@ -1077,16 +1216,20 @@ snapshots:
dependencies:
is-glob: 4.0.3
gopd@1.0.1:
dependencies:
get-intrinsic: 1.2.4
graphile-config@0.0.1-beta.9:
dependencies:
'@types/interpret': 1.1.3
'@types/node': 20.14.13
'@types/node': 20.14.15
'@types/semver': 7.5.8
chalk: 4.1.2
debug: 4.3.6(supports-color@5.5.0)
interpret: 3.1.1
semver: 7.6.3
tslib: 2.6.2
tslib: 2.6.3
yargs: 17.7.2
transitivePeerDependencies:
- supports-color
@ -1100,7 +1243,7 @@ snapshots:
graphile-config: 0.0.1-beta.9
json5: 2.2.3
pg: 8.12.0
tslib: 2.6.2
tslib: 2.6.3
yargs: 17.7.2
transitivePeerDependencies:
- pg-native
@ -1111,6 +1254,18 @@ snapshots:
has-flag@4.0.0: {}
has-property-descriptors@1.0.2:
dependencies:
es-define-property: 1.0.0
has-proto@1.0.3: {}
has-symbols@1.0.3: {}
hasown@2.0.2:
dependencies:
function-bind: 1.1.2
ignore-by-default@1.0.1: {}
import-fresh@3.3.0:
@ -1156,6 +1311,14 @@ snapshots:
ms@2.1.2: {}
node-domexception@1.0.0: {}
node-fetch@3.3.2:
dependencies:
data-uri-to-buffer: 4.0.1
fetch-blob: 3.2.0
formdata-polyfill: 4.0.10
nodemon@3.1.4:
dependencies:
chokidar: 3.6.0
@ -1171,6 +1334,8 @@ snapshots:
normalize-path@3.0.0: {}
object-inspect@1.13.2: {}
obuf@1.1.2: {}
parent-module@1.0.1:
@ -1263,6 +1428,10 @@ snapshots:
pstree.remy@1.1.8: {}
qs@6.13.0:
dependencies:
side-channel: 1.0.6
readdirp@3.6.0:
dependencies:
picomatch: 2.3.1
@ -1275,6 +1444,22 @@ snapshots:
semver@7.6.3: {}
set-function-length@1.2.2:
dependencies:
define-data-property: 1.1.4
es-errors: 1.3.0
function-bind: 1.1.2
get-intrinsic: 1.2.4
gopd: 1.0.1
has-property-descriptors: 1.0.2
side-channel@1.0.6:
dependencies:
call-bind: 1.0.7
es-errors: 1.3.0
get-intrinsic: 1.2.4
object-inspect: 1.13.2
simple-update-notifier@2.0.0:
dependencies:
semver: 7.6.3
@ -1305,14 +1490,14 @@ snapshots:
touch@3.1.1: {}
ts-node@10.9.2(@types/node@22.1.0)(typescript@5.5.4):
ts-node@10.9.2(@types/node@22.2.0)(typescript@5.5.4):
dependencies:
'@cspotcode/source-map-support': 0.8.1
'@tsconfig/node10': 1.0.11
'@tsconfig/node12': 1.0.11
'@tsconfig/node14': 1.0.3
'@tsconfig/node16': 1.0.4
'@types/node': 22.1.0
'@types/node': 22.2.0
acorn: 8.12.1
acorn-walk: 8.3.3
arg: 4.1.3
@ -1323,11 +1508,11 @@ snapshots:
v8-compile-cache-lib: 3.0.1
yn: 3.1.1
tslib@2.6.2: {}
tslib@2.6.3: {}
tsx@4.16.2:
tsx@4.17.0:
dependencies:
esbuild: 0.21.5
esbuild: 0.23.0
get-tsconfig: 4.7.6
optionalDependencies:
fsevents: 2.3.3
@ -1338,12 +1523,12 @@ snapshots:
undici-types@5.26.5: {}
undici-types@6.11.1: {}
undici-types@6.13.0: {}
v8-compile-cache-lib@3.0.1: {}
web-streams-polyfill@3.3.3: {}
wrap-ansi@7.0.0:
dependencies:
ansi-styles: 4.3.0

View File

@ -1,12 +1,19 @@
import { createBot, Intents, type Bot } from '@discordeno/bot'
import { createBot, createGatewayManager, createRestManager, Intents, type Bot } from '@discordeno/bot'
import { createProxyCache, } from 'dd-cache-proxy';
import { configs } from './config.ts'
// not sure I need this.
// @see https://github.com/discordeno/discordeno/blob/352887c215cc9d93d7f1fa9c8589e66f47ffb3ea/packages/bot/src/bot.ts#L74
// const getSessionInfoHandler = async () => {
// return await bot.rest.getGatewayBot()
// }
export const bot = createProxyCache(
createBot({
token: configs.token,
intents: Intents.Guilds | Intents.GuildMessages
intents: Intents.Guilds | Intents.GuildMessages,
rest: createRestManager({ token: configs.token, applicationId: configs.discordApplicationId }),
gateway: createGatewayManager({ token: configs.token })
}),
{
desiredProps: {
@ -21,9 +28,6 @@ export const bot = createProxyCache(
},
)
// @todo figure out where this code belongs
// gateway.resharding.getSessionInfo = async () => { // insert code here to fetch getSessionInfo from rest process. }
// Setup desired properties
bot.transformers.desiredProperties.interaction.id = true
bot.transformers.desiredProperties.interaction.type = true
@ -32,6 +36,7 @@ bot.transformers.desiredProperties.interaction.token = true
bot.transformers.desiredProperties.interaction.guildId = true
bot.transformers.desiredProperties.interaction.member = true
bot.transformers.desiredProperties.interaction.message = true
bot.transformers.desiredProperties.interaction.user = true
bot.transformers.desiredProperties.message.activity = true
bot.transformers.desiredProperties.message.id = true

View File

@ -0,0 +1,14 @@
import { EventEmitter } from 'node:events'
import type { Interaction } from '@discordeno/bot'
export class ItemCollector extends EventEmitter {
onItem(callback: (item: Interaction) => unknown): void {
this.on('item', callback)
}
collect(item: Interaction): void {
this.emit('item', item)
}
}
export default ItemCollector

View File

@ -0,0 +1,51 @@
import { ApplicationCommandTypes, type Interaction } from '@discordeno/bot'
import type { Status } from '@futureporn/types'
import { createCommand } from '../commands.ts'
import { bot } from '../bot.ts'
import { configs } from '../config.ts'
createCommand({
name: 'cancel',
description: 'Cancel a recording',
type: ApplicationCommandTypes.ChatInput,
async execute(interaction: Interaction) {
bot.logger.info(`cancel command is executing now.`)
const message = interaction.message
if (!message) return bot.logger.error('interaction.message was missing');
if (!message.id) return bot.logger.error(`interaction.message.id was missing`);
const url = `${configs.postgrestUrl}/streams?discord_message_id=eq.${message.id}`;
const options = {
method: 'PATCH',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
'Prefer': 'return=representation',
'Authorization': `Bearer ${configs.automationUserJwt}`
},
body: JSON.stringify({
is_recording_aborted: true,
status: 'aborted' as Status
})
};
let streamId: string;
try {
const response = await fetch(url, options);
bot.logger.info(`response.ok=${response.ok}`)
const data: any = await response.json();
streamId = data?.at(0).id
bot.logger.info(interaction.user);
interaction.respond(`<@${interaction.user.id}> cancelled recording on Stream ${streamId}`, { isPrivate: false })
bot.logger.info(`Cancel command successfully ran on message.id=${message.id}`)
} catch (error) {
bot.logger.error('error encountered while cancelling job')
bot.logger.error(error);
}
},
})

View File

@ -4,31 +4,33 @@ import {
type Interaction,
EmbedsBuilder,
type InteractionCallbackData,
logger,
} from '@discordeno/bot'
import { createCommand } from '../commands.ts'
import { configs } from '../config.ts'
import type { Stream } from '@futureporn/types'
async function createRecordInDatabase(url: string, discordMessageId: string) {
const record = {
async function createStreamInDatabase(url: string, discordMessageId: string) {
const streamPayload = {
url,
recording_state: 'pending',
discord_message_id: discordMessageId,
file_size: 0
status: 'pending_recording',
discord_message_id: discordMessageId
}
const res = await fetch(`${configs.postgrestUrl}/records`, {
const res = await fetch(`${configs.postgrestUrl}/streams`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Prefer': 'return=headers-only',
'Authorization': `Bearer ${configs.automationUserJwt}`,
'Prefer': 'return=headers-only'
},
body: JSON.stringify(record)
body: JSON.stringify(streamPayload)
})
if (!res.ok) {
const status = res.status
const statusText = res.statusText
const msg = `fetch failed to create recording record in database. status=${status}, statusText=${statusText}`
const body = await res.text()
const msg = `failed to create stream in database. status=${status}, statusText=${statusText}, body=${body}`
console.error(msg)
throw new Error(msg)
}
@ -37,6 +39,47 @@ async function createRecordInDatabase(url: string, discordMessageId: string) {
return parseInt(id)
}
async function getUrlFromMessage(interaction: Interaction): Promise<string|null> {
const messageId = interaction.message?.id
const pgRequestUrl = `${configs.postgrestUrl}/streams?discord_message_id=eq.${messageId}`
logger.info(`pgRequestUrl=${pgRequestUrl}`)
const requestOptions = {
method: 'GET',
headers: {
'Authorization': `Bearer ${configs.automationUserJwt}`,
'Content-Type': 'application/json',
'Prefer': 'return=representation'
}
}
try {
const res = await fetch (pgRequestUrl, requestOptions)
if (!res.ok) {
const body = await res.json()
logger.error(body)
throw new Error(`Problem during getOptionsMessage. res.status=${res.status}, res.statusText=${res.statusText}`)
}
const json = await res.json() as Stream[]
const stream = json[0]
const url = stream?.url
if (!url) return null
else return url
} catch (e) {
logger.error(e)
throw e
}
}
async function getUrlFromData(interaction: Interaction): Promise<string|null> {
if (!interaction) throw new Error('interaction arg passed to getOptions was missing');
const url = (interaction.data?.options?.find(o => o.name === 'url'))?.value
if (!url) return null;
return String(url)
}
createCommand({
name: 'record',
description: 'Record a livestream.',
@ -49,43 +92,50 @@ createCommand({
},
],
async execute(interaction: Interaction) {
logger.info('logger.info hello? record command is running now`)')
await interaction.defer()
// console.log('interation.data as follows')
// console.log(interaction.data)
const options = interaction.data?.options
if (!options) throw new Error(`interaction options was undefined. it's expected to be an array of options.`);
const urlOption = options.find((o) => o.name === 'url')
if (!urlOption) throw new Error(`url option was missing from interaction data`);
const url = ''+urlOption.value
if (!url) throw new Error(`url was missing from interaction data options`);
try {
// The url can come from one of two places.
// interaction.data.options, or interaction.message?.embeds
let url
url = await getUrlFromData(interaction)
logger.info(`getUrlFromData url=${url}`)
if (!url) {
url = await getUrlFromMessage(interaction)
logger.info(`getUrlFromMessage url=${url}`)
}
logger.info(`url=${url}`)
if (!url) throw new Error('Neither the interaction data nor the message embed contained a URL.');
// respond to the interaction and get a message ID which we will then add to the database Record
const embeds = new EmbedsBuilder()
.setTitle(`Record ⋅`)
.setDescription('Waiting for a worker to start the job.')
.setFields([
{ name: 'Status', value: 'Pending', inline: true },
{ name: 'Filesize', value: '0 bytes', inline: true},
{ name: 'URL', value: url, inline: false }
])
.setColor('#808080')
const response: InteractionCallbackData = { embeds }
const message = await interaction.edit(response)
// respond to the interaction and get a message ID which we will then add to the database Record
const embeds = new EmbedsBuilder()
.setTitle(`Stream ⋅`)
.setDescription('Waiting for a worker to start the job.')
.setFields([
{ name: 'Status', value: 'Pending', inline: true },
{ name: 'URL', value: url, inline: true }
])
.setColor('#808080')
const response: InteractionCallbackData = { embeds }
const message = await interaction.edit(response)
// console.log('deferred, interaction message is as follows')
// console.log(message)
if (!message?.id) {
const msg = `message.id was empty, ruh roh raggy`
console.error(msg)
throw new Error(msg)
if (!message?.id) {
const msg = `message.id was empty, ruh roh raggy`
console.error(msg)
throw new Error(msg)
}
// @todo create stream in db
const stream = await createStreamInDatabase(url, message.id.toString())
logger.info(stream)
} catch (e) {
await interaction.edit(`Record failed due to the following error.\n${e}`)
}
// @todo create record in db
const record = await createRecordInDatabase(url, message.id.toString())
// console.log(record)
}
})

View File

@ -0,0 +1,20 @@
import { ApplicationCommandTypes, type Interaction } from '@discordeno/bot'
import { createCommand } from '../commands.ts'
import { bot } from '../bot.ts'
createCommand({
name: 'yeah',
description: 'Yeah! a message',
type: ApplicationCommandTypes.ChatInput,
async execute(interaction: Interaction) {
// interaction.message.id
const message = interaction.message
if (!message) return bot.logger.error('interaction.message was missing');
if (!message.id) return bot.logger.error(`interaction.message.id was missing`);
interaction.respond('https://futureporn-b2.b-cdn.net/yeah_nobg.png', { isPrivate: true })
bot.logger.info(`Yeah! command successfully ran with message.id=${message.id}`)
},
})

View File

@ -0,0 +1 @@
Handlers for Message Component interactions such as button presses

View File

@ -4,6 +4,7 @@ if (!process.env.POSTGREST_URL) throw new Error('Missing POSTGREST_URL env var')
if (!process.env.DISCORD_TOKEN) throw new Error('Missing DISCORD_TOKEN env var');
if (!process.env.DISCORD_CHANNEL_ID) throw new Error("DISCORD_CHANNEL_ID was missing from env");
if (!process.env.DISCORD_GUILD_ID) throw new Error("DISCORD_GUILD_ID was missing from env");
if (!process.env.DISCORD_APPLICATION_ID) throw new Error('DISCORD_APPLICATION_ID was missing from env');
if (!process.env.AUTOMATION_USER_JWT) throw new Error('Missing AUTOMATION_USER_JWT env var');
const token = process.env.DISCORD_TOKEN!
const postgrestUrl = process.env.POSTGREST_URL!
@ -11,8 +12,8 @@ const discordChannelId = process.env.DISCORD_CHANNEL_ID!
const discordGuildId = process.env.DISCORD_GUILD_ID!
const automationUserJwt = process.env.AUTOMATION_USER_JWT!
const connectionString = process.env.WORKER_CONNECTION_STRING!
const discordApplicationId = process.env.DISCORD_APPLICATION_ID!
console.log(`hello i am configs and configs.connectionString=${connectionString}`)
export interface Config {
@ -22,6 +23,7 @@ export interface Config {
discordGuildId: string;
discordChannelId: string;
connectionString: string;
discordApplicationId: string;
}
@ -32,4 +34,5 @@ export const configs: Config = {
discordGuildId,
discordChannelId,
connectionString,
discordApplicationId,
}

View File

@ -1,22 +1,50 @@
import { InteractionTypes, commandOptionsParser, type Interaction } from '@discordeno/bot'
import { bot } from '../bot.ts'
import { commands } from '../commands.ts'
import { commands, type Command } from '../commands.ts'
import ItemCollector from '../collector.ts'
bot.events.interactionCreate = async (interaction: Interaction) => {
if (!interaction.data || interaction.type !== InteractionTypes.ApplicationCommand) return
const command = commands.get(interaction.data.name)
if (!command) {
bot.logger.error(`Command ${interaction.data.name} not found`)
return
}
export const collectors = new Set<ItemCollector>()
const execCommand = async function execCommand(command: Command, interaction: Interaction) {
const options = commandOptionsParser(interaction)
try {
await command.execute(interaction, options)
} catch (error) {
bot.logger.error(`There was an error running the ${command.name} command.`, error)
}
}
const handleApplicationCommand = async function handleApplicationCommand (interaction: Interaction) {
if (!interaction.data) return
const command = commands.get(interaction.data.name)
if (!command) {
bot.logger.error(`Command ${interaction.data.name} (customId=${interaction.data.customId}) not found`)
return
}
execCommand(command, interaction)
}
const handleMessageComponent = async function handleMessageComponent (interaction: Interaction) {
if (!interaction.data) return
if (!interaction.data.customId) return
const command = commands.get(interaction.data.customId)
if (!command) return bot.logger.error(`Command ${interaction.data.customId} not found`);
execCommand(command, interaction)
}
bot.events.interactionCreate = async (interaction: Interaction) => {
if (interaction.type === InteractionTypes.ApplicationCommand) {
await handleApplicationCommand(interaction)
} else if (interaction.type === InteractionTypes.MessageComponent) {
await handleMessageComponent(interaction)
} else {
bot.logger.info(`received interaction of type=${interaction.type}`)
}
}

View File

@ -1,5 +1,5 @@
import updateDiscordMessage from './tasks/update_discord_message.js'
import update_discord_message from './tasks/update_discord_message.js'
import { type WorkerUtils, type RunnerOptions, run } from 'graphile-worker'
import { bot } from './bot.ts'
import type { Interaction } from '@discordeno/bot'
@ -26,15 +26,15 @@ async function setupGraphileWorker() {
taskDirectory: join(__dirname, 'tasks')
},
};
console.log('worker preset as follows')
console.log(preset)
// console.log('worker preset as follows')
// console.log(preset)
const runnerOptions: RunnerOptions = {
preset
// concurrency: 3,
// connectionString: configs.connectionString,
// taskDirectory: join(__dirname, 'tasks'),
// taskList: {
// 'update_discord_message': updateDiscordMessage
// 'update_discord_message': update_discord_message
// }
}

View File

@ -1,9 +0,0 @@
import { Client, Events, type Interaction } from 'discord.js';
export default {
name: Events.ClientReady,
once: true,
execute(client: Client) {
console.log(`Ready! Logged in as ${client?.user?.tag}`);
}
}

View File

@ -1,18 +0,0 @@
import { type CreateApplicationCommand, type CreateSlashApplicationCommand, type Interaction } from '@discordeno/bot'
import record from '../commands/record.ts'
import donger from '../commands/donger.ts'
export const commands = new Map<string, Command>(
[
record,
donger
].map(cmd => [cmd.name, cmd]),
)
export default commands
export interface Command extends CreateSlashApplicationCommand {
/** Handler that will be executed when this command is triggered */
execute(interaction: Interaction, args: Record<string, any>): Promise<any>
}

View File

@ -1,25 +0,0 @@
import 'dotenv/config';
import { REST, Routes } from 'discord.js';
if (!process.env.DISCORD_APPLICATION_ID) throw new Error('DISCORD_APPLICATION_ID was undefined in env');
if (!process.env.DISCORD_GUILD_ID) throw new Error('DISCORD_GUILD_ID was undefined in env');
if (!process.env.DISCORD_TOKEN) throw new Error('DISCORD_TOKEN was undefined in env');
// Construct and prepare an instance of the REST module
const rest = new REST({ version: '9' }).setToken(process.env.DISCORD_TOKEN);
export default async function deployCommands(commands: any[]): Promise<void> {
try {
// console.log(`Started refreshing ${commands.length} application (/) commands.`);
// and deploy your commands!
const data: any = await rest.put(
Routes.applicationGuildCommands(process.env.DISCORD_APPLICATION_ID!, process.env.DISCORD_GUILD_ID!),
{ body: commands },
);
// console.log(`Successfully reloaded ${data.length} application (/) commands.`);
} catch (error) {
// And of course, make sure you catch and log any errors!
console.error(error);
}
}

View File

@ -1,8 +0,0 @@
import type { EventHandlers } from '@discordeno/bot'
import { event as interactionCreateEvent } from './interactionCreate.ts.old'
export const events = {
interactionCreate: interactionCreateEvent,
} as Partial<EventHandlers>
export default events

View File

@ -1,71 +0,0 @@
import { Events, type Interaction, Client, Collection } from 'discord.js';
import type { WorkerUtils } from 'graphile-worker';
interface ExtendedClient extends Client {
commands: Collection<string, any>
}
export default {
name: Events.InteractionCreate,
once: false,
async execute(interaction: Interaction, workerUtils: WorkerUtils) {
// if (!interaction.isChatInputCommand()) return;
// console.log(interaction.client)
// const command = interaction.client.commands.get(interaction.commandName);
if (interaction.isButton()) {
console.log(`the interaction is a button type with customId=${interaction.customId}, message.id=${interaction.message.id}, user=${interaction.user.id} (${interaction.user.globalName})`)
if (interaction.customId === 'stop') {
interaction.reply(`Stopped by @${interaction.user.id}`)
workerUtils.addJob('stop_recording', { discordMessageId: interaction.message.id, userId: interaction.user.id }, { maxAttempts: 1 })
} else if (interaction.customId === 'retry') {
interaction.reply(`Retried by @${interaction.user.id}`)
workerUtils.addJob('start_recording', { discordMessageId: interaction.message.id, userId: interaction.user.id }, { maxAttempts: 3 })
} else {
console.error(`this button's customId=${interaction.customId} did not match one of the known customIds`)
}
} else if (interaction.isChatInputCommand()) {
console.log(`the interaction is a ChatInputCommandInteraction with commandName=${interaction.commandName}, user=${interaction.user.id} (${interaction.user.globalName})`)
const client = interaction.client as ExtendedClient
const command = client.commands.get(interaction.commandName);
if (!command) {
console.error(`No command matching ${interaction.commandName} was found.`);
return;
}
command.execute({ interaction, workerUtils })
}
},
};
// const { Events } = require('discord.js');
// module.exports = {
// name: Events.ClientReady,
// once: true,
// execute(client) {
// console.log(`Ready! Logged in as ${client.user.tag}`);
// },
// };
// client.on(Events.InteractionCreate, interaction => {
// if (interaction.isChatInputCommand()) {
// const { commandName } = interaction;
// console.log(`Received interaction with commandName=${commandName}`)
// const cmd = commands.find((c) => c.data.name === commandName)
// if (!cmd) {
// console.log(`no command handler matches commandName=${commandName}`)
// return;
// }
// cmd.execute({ interaction, workerUtils })
// } else {
// // probably a ButtonInteraction
// console.log(interaction)
// }
// });

View File

@ -1,31 +0,0 @@
import * as path from 'node:path';
import * as fs from 'node:fs';
import { dirname } from 'node:path';
import { fileURLToPath } from 'url';
const __dirname = dirname(fileURLToPath(import.meta.url));
export default async function loadCommands(): Promise<any[]> {
const commands: any[] = [];
// console.log('Grab all the command folders from the commands directory you created earlier')
const foldersPath = path.join(__dirname, 'commands');
const commandFolders = fs.readdirSync(foldersPath);
for (const folder of commandFolders) {
const commandsPath = path.join(foldersPath, folder);
const commandFiles = fs.readdirSync(commandsPath).filter(file => file.endsWith('.ts') || file.endsWith('.js'));
console.log(`commandFiles=${commandFiles}`);
// console.log(`Grab the SlashCommandBuilder#toJSON() output of each command's data for deployment`)
for (const file of commandFiles) {
const filePath = path.join(commandsPath, file);
const command = (await import(filePath)).default;
// console.log(command)
if (command?.data && command?.execute) {
commands.push(command);
} else {
console.log(`[WARNING] The command at ${filePath} is missing a required "data" or "execute" property.`);
}
}
}
return commands;
}

View File

@ -1,23 +0,0 @@
import * as path from 'node:path';
import * as fs from 'node:fs';
import { dirname } from 'node:path';
import { fileURLToPath } from 'url';
import type { Client } from 'discord.js';
import type { WorkerUtils } from 'graphile-worker';
const __dirname = dirname(fileURLToPath(import.meta.url));
export default async function loadEvents(client: Client, workerUtils: WorkerUtils) {
console.log(`loading events`);
const eventsPath = path.join(__dirname, 'events');
const eventFiles = fs.readdirSync(eventsPath).filter(file => file.endsWith('.ts') || file.endsWith('.js'));
console.log(`eventFiles=${eventFiles}`);
for (const file of eventFiles) {
const filePath = path.join(eventsPath, file);
const event = (await import(filePath)).default;
if (event.once) {
client.once(event.name, (...args) => event.execute(...args, workerUtils));
} else {
client.on(event.name, (...args) => event.execute(...args, workerUtils));
}
}
}

View File

@ -1,24 +0,0 @@
import { Client, Events, MessageReaction, User, type Interaction } from 'discord.js';
export default {
name: Events.MessageReactionAdd,
once: false,
async execute(reaction: MessageReaction, user: User) {
// When a reaction is received, check if the structure is partial
if (reaction.partial) {
// If the message this reaction belongs to was removed, the fetching might result in an API error which should be handled
try {
await reaction.fetch();
} catch (error) {
console.error('Something went wrong when fetching the message:', error);
// Return as `reaction.message.author` may be undefined/null
return;
}
}
// Now the message has been cached and is fully available
console.log(`${reaction.message.author}'s message "${reaction.message.content}" gained a reaction!`);
// The reaction is now also fully available and the properties will be reflected accurately:
console.log(`${reaction.count} user(s) have given the same reaction to this message!`);
}
}

View File

@ -1,12 +0,0 @@
import 'dotenv/config'
import { bot } from '../index.js'
import donger from '../commands/donger.js'
import record from '../commands/record.js'
const guildId = process.env.DISCORD_GUILD_ID!
const commands = [
donger,
record
]
await bot.rest.upsertGuildApplicationCommands(guildId, commands)

View File

@ -1,45 +0,0 @@
import { type ChatInputCommandInteraction, SlashCommandBuilder, Message } from 'discord.js';
const dongers: string[] = [
'( ͡ᵔ ͜ʖ ͡ᵔ )',
'¯\_(ツ)_/¯',
'(๑>ᴗ<๑)',
'(̿▀̿ ̿Ĺ̯̿̿▀̿ ̿)',
'( ͡° ͜ʖ ͡°)',
'٩(͡๏̯͡๏)۶',
'ლ(´◉❥◉`ლ)',
'( ゚Д゚)',
'ԅ( ͒ ۝ ͒ )ᕤ',
'( ͡ᵔ ͜ʖ ͡°)',
'( ͠° ͟ʖ ͡°)╭∩╮',
'༼ つ ❦౪❦ ༽つ',
'( ͡↑ ͜ʖ ͡↑)',
'(ভ_ ভ) ރ / ┊ \',
'ヽ(⌐□益□)ノ',
'༼ つ ◕‿◕ ༽つ',
'ヽ(⚆෴⚆)ノ',
'(つ .•́ _ʖ •̀.)つ',
'༼⌐■ل͟■༽',
'┬─┬ノ( ͡° ͜ʖ ͡°ノ)',
'༼⁰o⁰༽꒳ᵒ꒳ᵎᵎᵎ',
'( -_・) ▄︻̷̿┻̿═━一',
'【 º ᗜ º 】',
'ᕦ(✧╭╮✧)ᕥ',
'┗( TT )┛',
'(Φ ᆺ Φ)',
'(TдT)',
'☞(◉▽◉)☞'
];
export default {
data: new SlashCommandBuilder()
.setName('donger')
.setDescription('Replies with a free donger!'),
async execute({ interaction }: { interaction: ChatInputCommandInteraction}): Promise<void> {
await interaction.reply({
content: dongers[Math.floor(Math.random()*dongers.length)]
});
},
};

View File

@ -1,144 +0,0 @@
import type { ExecuteArguments } from '../../index.js';
if (!process.env.AUTOMATION_USER_JWT) throw new Error(`AUTOMATION_USER_JWT was missing from env`);
export default {
data: new SlashCommandBuilder()
.setName('record')
.setDescription('Record a livestream.')
.addStringOption((option) =>
option.setName('url')
.setMaxLength(1024)
.setDescription('The channel URL to record')
.setRequired(true)
),
async execute({ interaction, workerUtils }: ExecuteArguments): Promise<void> {
const url = interaction.options.getString('url')
// const row = new ActionRowBuilder<ButtonBuilder>()
// .addComponents(component);
// {
// content: `Button`,
// components: [
// new ActionRowBuilder<MessageActionRowComponentBuilder>().addComponents([
// new ButtonBuilder()
// .setCustomId('click/12345')
// .setLabel('LABEL')
// .setStyle(ButtonStyle.Primary)
// ])
// ]
// cols can be 5 high
// rows can be 5 wide
const statusEmbed = new EmbedBuilder()
.setTitle('Pending')
.setDescription('Waiting for a worker to accept the job.')
.setColor(2326507)
const buttonRow = new ActionRowBuilder<MessageActionRowComponentBuilder>()
.addComponents([
new ButtonBuilder()
.setCustomId('stop')
.setLabel('Stop Recording')
.setEmoji('🛑')
.setStyle(ButtonStyle.Danger),
]);
// const embed = new EmbedBuilder().setTitle('Attachments');
const idk = await interaction.reply({
content: `/record ${url}`,
embeds: [
statusEmbed
],
components: [
buttonRow
]
});
// console.log('the following is idk, the return value from interaction.reply')
// console.log(idk)
const message = await idk.fetch()
const discordMessageId = message.id
await workerUtils.addJob('start_recording', { url, discordMessageId }, { maxAttempts: 3 })
},
};
/**
{
"content": "https://chaturbate.com/projektmelody",
"tts": false,
"embeds": [
{
"id": 652627557,
"title": "Pending",
"description": "Waiting for a worker to accept the job.",
"color": 2326507,
"fields": []
},
{
"id": 88893690,
"title": "Recording",
"description": "The stream is being recorded.",
"color": 392960,
"fields": []
},
{
"id": 118185075,
"title": "Aborted",
"description": "The recording was stopped by the user.",
"color": 8289651,
"fields": []
},
{
"id": 954884517,
"title": "Ended",
"description": "The recording has stopped.",
"color": 10855845,
"fields": []
},
{
"id": 64407340,
"description": "",
"fields": [],
"image": {
"url": "https://futureporn-b2.b-cdn.net/ti8ht9bgwj6k783j7hglfg8j_projektmelody-chaturbate-2024-07-18.png"
}
}
],
"components": [
{
"id": 300630266,
"type": 1,
"components": [
{
"id": 320918638,
"type": 2,
"style": 4,
"label": "Stop Recording",
"action_set_id": "407606538",
"emoji": {
"name": "🛑",
"animated": false
}
}
]
}
],
"actions": {
"407606538": {
"actions": []
}
},
"username": "@futureporn/capture",
"avatar_url": "https://cdn.discordapp.com/avatars/1081818302344597506/93892e06c2f94c3ef1043732a49856db.webp?size=128"
}
*/

View File

@ -1,14 +0,0 @@
import { type ChatInputCommandInteraction, SlashCommandBuilder } from 'discord.js';
export default {
data: new SlashCommandBuilder()
.setName('sim-email')
.setDescription('Simulate an incoming platform notification e-mail'),
async execute(interaction: ChatInputCommandInteraction): Promise<void> {
await interaction.reply({
content: 'testing 123 this is sim-email (simEmail.ts)'
});
},
};

View File

@ -0,0 +1,85 @@
import type { Task, Helpers } from "graphile-worker"
import { sub } from 'date-fns'
import type { RecordingRecord } from "@futureporn/types"
import qs from 'qs'
import fetch from 'node-fetch'
import { configs } from '../config.ts'
interface Payload {
idle_minutes: number;
}
function assertPayload(payload: any): asserts payload is Payload {
if (typeof payload !== "object" || !payload) throw new Error("invalid payload");
if (!payload.idle_minutes) throw new Error('idle_minutes was missing from payload');
if (typeof payload.idle_minutes !== 'number') throw new Error('idle_minutes must be a number');
}
export const restart_failed_recordings: Task = async function (payload: unknown, helpers: Helpers) {
assertPayload(payload)
const { idle_minutes } = payload
helpers.logger.info(`restart_failed_recordings has begun. Expring 'recording' and 'pending' records that haven't been updated in ${idle_minutes} minutes.`)
const url = 'http://postgrest.futureporn.svc.cluster.local:9000/records'
let records: RecordingRecord[] = []
try {
// 1. identify failed /records
// Any record that was updated earlier than n minute ago AND is in 'pending' or 'recording' state is marked as stalled.
const timestamp = sub(new Date(), { minutes: idle_minutes }).toISOString()
const queryOptions = {
updated_at: `lt.${timestamp}`,
or: '(recording_state.eq.pending,recording_state.eq.recording)'
}
const updatePayload = {
updated_at: new Date().toISOString(),
recording_state: 'stalled'
}
helpers.logger.info(JSON.stringify(updatePayload))
const query = qs.stringify(queryOptions)
const res = await fetch (`${url}?${query}`, {
method: 'PATCH',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${configs.automationUserJwt}`,
'Prefer': 'return=headers-only'
},
body: JSON.stringify(updatePayload)
})
if (!res.ok) {
const body = await res.text()
helpers.logger.info(JSON.stringify(res.headers))
helpers.logger.error(`Response code was not 200. status=${res.status}, statusText=${res.statusText}`)
helpers.logger.error(body)
return;
}
const body = await res.text()
helpers.logger.info('body as follows')
helpers.logger.info(body)
// const data = await res.json() as RecordingRecord[]
// if (data.length < 1) return;
// records = data
} catch (e: any) {
if (e instanceof Error) {
helpers.logger.error(`hi there we encountered an error while fetching /records`)
helpers.logger.error(e.message)
} else {
helpers.logger.error(e)
}
}
// // 2. identify and update
// for (const record of records) {
// const res = await fetch(`${url}?`)
// }
// // 3. done
}
export default restart_failed_recordings

View File

@ -1,7 +1,7 @@
import 'dotenv/config'
import type { RecordingState } from '@futureporn/types'
import type { Status, Stream, Segment } from '@futureporn/types'
import { type Task, type Helpers } from 'graphile-worker'
import { add } from 'date-fns'
import { intervalToDuration, formatDuration, isBefore, sub, max } from 'date-fns'
import prettyBytes from 'pretty-bytes'
import {
EmbedsBuilder,
@ -9,91 +9,51 @@ import {
type ActionRow,
MessageComponentTypes,
type ButtonComponent,
type InputTextComponent,
type EditMessage,
type Message,
type Embed
} from '@discordeno/bot'
import { bot } from '../bot.ts'
import { configs } from '../config.ts'
const yeahEmojiId = BigInt('1253191939461873756')
interface Payload {
record_id: number;
stream_id: number;
}
function assertPayload(payload: any): asserts payload is Payload {
if (typeof payload !== "object" || !payload) throw new Error("invalid payload");
if (!payload.record_id) throw new Error(`record_id was absent in the payload`);
if (!payload.stream_id) throw new Error(`stream_id was absent in the payload`);
}
async function editDiscordMessage({ helpers, recordingState, discordMessageId, url, fileSize, recordId }: { recordId: number, fileSize: number, url: string, helpers: Helpers, recordingState: RecordingState, discordMessageId: string }) {
async function editDiscordMessage({ helpers, stream }: { stream: Stream, helpers: Helpers }) {
const discordMessageId = stream.discord_message_id
if (!discordMessageId) throw new Error(`discordMessageId was missing!`);
if (typeof discordMessageId !== 'string') throw new Error(`discordMessageId was not a string!`);
// const { captureJobId } = job.data
helpers.logger.info(`editDiscordMessage has begun with discordMessageId=${discordMessageId}, state=${recordingState}`)
// const guild = await bot.cache.guilds.get(BigInt(configs.discordGuildId))
// const channel = guild?.channels.get(BigInt(configs.discordChannelId))
// // const channel = await bot.cache.channels.get()
// console.log('channel as follows')
// console.log(channel)
const channelId = BigInt(configs.discordChannelId)
const updatedMessage: EditMessage = {
embeds: getStatusEmbed({ recordingState, fileSize, recordId, url }),
embeds: getEmbeds(stream),
components: getButtonRow(stream.status)
}
bot.helpers.editMessage(channelId, discordMessageId, updatedMessage)
// channel.
// const guild = await client.guilds.fetch(process.env.DISCORD_GUILD_ID!) as Guild
// if (!guild) throw new Error('guild was undefined');
// helpers.logger.info('here is the guild as follows')
// helpers.logger.info(guild.toString())
// helpers.logger.info(`fetching discord channel id=${process.env.DISCORD_CHANNEL_ID} from discord guild`)
// const channel = await client.channels.fetch(process.env.DISCORD_CHANNEL_ID!) as TextChannel
// if (!channel) throw new Error(`discord channel was undefined`);
// const message = await channel.messages.fetch(discordMessageId)
// helpers.logger.info(`discordMessageId=${discordMessageId}`)
// helpers.logger.info(message as any)
// const statusEmbed = getStatusEmbed({ recordId, recordingState, fileSize, url })
// const buttonRow = getButtonRow(recordingState)
// // const embed = new EmbedBuilder().setTitle('Attachments');
// const updatedMessage = {
// embeds: [
// statusEmbed
// ],
// components: [
// buttonRow
// ]
// };
// message.edit(updatedMessage)
}
async function getRecordFromDatabase(recordId: number) {
const res = await fetch(`${process.env.POSTGREST_URL}/records?id=eq.${recordId}`)
async function getStreamFromDatabase(streamId: number) {
const res = await fetch(`${process.env.POSTGREST_URL}/streams?select=*,segments(*)&id=eq.${streamId}`)
if (!res.ok) {
throw new Error(`failed fetching record ${recordId}. status=${res.status}, statusText=${res.statusText}`)
throw new Error(`failed fetching stream ${streamId}. status=${res.status}, statusText=${res.statusText}`)
}
const body = await res.json() as any
return body[0];
@ -102,117 +62,131 @@ async function getRecordFromDatabase(recordId: number) {
/**
* updateDiscordMessage is the task where we edit a previously sent discord message to display
* update_discord_message is the task where we edit a previously sent discord message to display
* the most up-to-date status information from the database
*
* Sometimes the update is changing the state, one of Pending|Recording|Aborted|Ended.
* Sometimes the update is updating the Filesize of the recording in-progress
* Sometimes the update is adding a thumbnail image to the message
*/
export const updateDiscordMessage: Task = async function (payload, helpers: Helpers) {
export const update_discord_message: Task = async function (payload, helpers: Helpers) {
try {
assertPayload(payload)
const { record_id } = payload
const recordId = record_id
helpers.logger.info(`updateDiscordMessage() with recordId=${recordId}`)
const record = await getRecordFromDatabase(recordId)
const { discord_message_id, recording_state, file_size, url } = record
const recordingState = recording_state
const discordMessageId = discord_message_id
const fileSize = file_size
editDiscordMessage({ helpers, recordingState, discordMessageId, url, fileSize, recordId })
// schedule the next update 10s from now, but only if the recording is still happening
if (recordingState !== 'ended') {
const runAt = add(new Date(), { seconds: 10 })
const recordId = record.id
await helpers.addJob('updateDiscordMessage', { recordId }, { jobKey: `record_${recordId}_update_discord_message`, maxAttempts: 3, runAt })
}
const { stream_id } = payload
const streamId = stream_id
const stream = await getStreamFromDatabase(streamId)
// helpers.logger.info(`update_discord_message with streamId=${streamId}. stream=${JSON.stringify(stream)}`)
editDiscordMessage({ helpers, stream })
} catch (e) {
helpers.logger.error(`caught an error during updateDiscordMessage. e=${e}`)
helpers.logger.error(`caught an error during update_discord_message. e=${e}`)
}
}
function getStatusEmbed({
recordingState, recordId, fileSize, url
}: { fileSize: number, recordingState: RecordingState, recordId: number, url: string }) {
function getEmbeds(stream: Stream) {
const streamId = stream.id
const url = stream.url
const segments = stream?.segments
const status = stream.status
const embeds = new EmbedsBuilder()
.setTitle(`Record ${recordId}`)
.setTitle(`Stream ${streamId}`)
.setFields([
{ name: 'Status', value: recordingState.charAt(0).toUpperCase()+recordingState.slice(1), inline: true },
{ name: 'Filesize', value: prettyBytes(fileSize), inline: true },
{ name: 'Status', value: status.charAt(0).toUpperCase()+status.slice(1), inline: true },
// { name: 'Filesize', value: prettyBytes(fileSize), inline: true }, // filesize isn't on stream. filesize is on segment. keeping for reference. @todo
{ name: 'URL', value: url, inline: false },
])
if (recordingState === 'pending') {
if (status === 'pending_recording') {
embeds
.setDescription("Waiting for a worker to accept the job.")
.setColor(2326507)
} else if (recordingState === 'recording') {
} else if (status === 'recording') {
embeds
.setDescription('The stream is being recorded.')
.setColor(392960)
} else if (recordingState === 'aborted') {
} else if (status === 'aborted') {
embeds
.setDescription("The recording was stopped by the user.")
.setColor(8289651)
} else if (recordingState === 'ended') {
} else if (status === 'finished') {
embeds
.setDescription("The recording has stopped.")
.setDescription("The recording has ended nominally.")
.setColor(10855845)
} else if (status === 'failed') {
embeds
.setDescription("The recording has ended abnorminally.")
.setColor(8289651)
} else if (status === 'stalled') {
embeds
.setDescription("We have not received a progress update in the past two minutes.")
.setColor(8289651)
} else {
embeds
.setDescription('The recording is in an unknown state? (this is a bug.)')
.setDescription(`The recording is in an unknown state? (streamStatus=${status} this is a bug.)`)
.setColor(10855845)
}
// Add an Embed for each segment
if (segments) {
const getDuration = (s: Segment) => formatDuration(intervalToDuration({ start: s.created_at, end: s.updated_at }))
embeds.newEmbed()
.setTitle(`Recording Segments`)
.setFields(segments.map((s, i) => (
{
name: `Segment ${i+1}`,
value: `${getDuration(s)} (${prettyBytes(s.bytes)})`,
inline: false
}
)))
}
return embeds
}
function getButtonRow(state: RecordingState): ActionRow {
function getButtonRow(streamStatus: Status): ActionRow[] {
const components: ButtonComponent[] = []
if (state === 'pending' || state === 'recording') {
const stopButton: ButtonComponent = {
type: MessageComponentTypes.Button,
customId: 'stop',
label: 'Cancel',
style: ButtonStyles.Danger
}
components.push(stopButton)
} else if (state === 'aborted') {
const retryButton: ButtonComponent = {
type: MessageComponentTypes.Button,
customId: 'retry',
label: 'Retry Recording',
emoji: {
name: 'retry'
},
style: ButtonStyles.Secondary
}
const yeahButton: ButtonComponent = {
type: MessageComponentTypes.Button,
customId: 'yeah',
label: "Yeah!",
emoji: {
id: yeahEmojiId
},
style: ButtonStyles.Success
}
const processButton: ButtonComponent = {
type: MessageComponentTypes.Button,
customId: 'process',
label: 'Process Recording',
style: ButtonStyles.Success
}
const cancelButton: ButtonComponent = {
type: MessageComponentTypes.Button,
customId: 'cancel',
label: 'Cancel',
style: ButtonStyles.Danger
}
const retryButton: ButtonComponent = {
type: MessageComponentTypes.Button,
customId: 'record',
label: 'Retry Recording',
style: ButtonStyles.Secondary
}
if (streamStatus === 'pending_recording' || streamStatus === 'recording') {
components.push(cancelButton)
components.push(processButton) // @todo this is only for testing. normally the process button is hidden until recording completes.
components.push(yeahButton) // @todo this is only for testing. normally the process button is hidden until recording completes.
} else if (streamStatus === 'aborted') {
components.push(retryButton)
} else if (state === 'ended') {
const downloadButton: ButtonComponent = {
type: MessageComponentTypes.Button,
customId: 'download',
label: 'Download Recording',
emoji: {
id: BigInt('1253191939461873756')
},
style: ButtonStyles.Success
}
components.push(downloadButton)
} else if (streamStatus === 'finished') {
components.push(processButton)
} else {
const unknownButton: ButtonComponent = {
type: MessageComponentTypes.Button,
customId: 'unknown',
label: 'Unknown State',
emoji: {
name: 'thinking'
},
style: ButtonStyles.Primary
}
components.push(unknownButton)
components.push(retryButton)
}
@ -221,8 +195,8 @@ function getButtonRow(state: RecordingState): ActionRow {
components: components as [ButtonComponent]
}
return actionRow
return [actionRow]
}
export default updateDiscordMessage
export default update_discord_message

View File

@ -0,0 +1,123 @@
import type { Task, Helpers } from "graphile-worker"
import { sub } from 'date-fns'
import type { Status } from "@futureporn/types"
import qs from 'qs'
import fetch from 'node-fetch'
import { configs } from '../config.ts'
interface Payload {
stalled_minutes: number;
}
function assertPayload(payload: any): asserts payload is Payload {
if (typeof payload !== "object" || !payload) throw new Error("invalid payload");
if (!payload.stalled_minutes) throw new Error(`stalled_minutes was absent in the payload`);
if (typeof payload.stalled_minutes !== 'number') throw new Error(`stalled_minutes parameter was not a number`);
}
async function updateStalledStreams({
helpers,
stalled_minutes,
url
}: {
helpers: Helpers,
stalled_minutes: number,
url: string
}) {
// 1. identify and update stalled /streams
// Any streams that was updated earlier than n minute ago AND is in 'pending_recording' or 'recording' state is marked as stalled.
const timestamp = sub(new Date(), { minutes: stalled_minutes }).toISOString()
const queryOptions = {
updated_at: `lt.${timestamp}`,
or: '(status.eq.pending_recording,status.eq.recording)'
}
const updatePayload = {
updated_at: new Date().toISOString(),
status: 'stalled' as Status
}
// helpers.logger.info(JSON.stringify(updatePayload))
const query = qs.stringify(queryOptions)
const res = await fetch (`${url}?${query}`, {
method: 'PATCH',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${configs.automationUserJwt}`,
'Prefer': 'return=headers-only'
},
body: JSON.stringify(updatePayload)
})
if (!res.ok) {
const body = await res.text()
helpers.logger.info(JSON.stringify(res.headers))
helpers.logger.error(`Response code was not 200. status=${res.status}, statusText=${res.statusText}`)
helpers.logger.error(body)
return;
}
}
async function updateRecordingStreams({
helpers,
url
}: {
helpers: Helpers,
url: string
}) {
// identify and update recording /streams
// Any streams that has a segment that was updated within the past 1 minutes is considered recording
const timestamp = sub(new Date(), { minutes: 1 }).toISOString()
const queryOptions = {
select: 'status,id,segments!inner(updated_at)',
'segments.updated_at': `lt.${timestamp}`,
or: '(status.eq.pending_recording,status.eq.recording)',
}
const updatePayload = {
status: 'recording'
}
// helpers.logger.info(JSON.stringify(updatePayload))
const query = qs.stringify(queryOptions)
const options = {
method: 'PATCH',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${configs.automationUserJwt}`,
'Prefer': 'return=headers-only'
},
body: JSON.stringify(updatePayload)
}
const res = await fetch (`${url}?${query}`, options)
if (!res.ok) {
const body = await res.text()
helpers.logger.info(JSON.stringify(res.headers))
helpers.logger.error(`Response code was not 200. status=${res.status}, statusText=${res.statusText}`)
helpers.logger.error(body)
return;
}
}
export const update_stream_statuses: Task = async function (payload: unknown, helpers: Helpers) {
assertPayload(payload)
const { stalled_minutes } = payload
// helpers.logger.info(`update_stream_statuses has begun.`)
const url = 'http://postgrest.futureporn.svc.cluster.local:9000/streams'
try {
// await updateStalledStreams({ helpers, url, stalled_minutes })
await updateRecordingStreams({ helpers, url })
} catch (e: any) {
if (e instanceof Error) {
helpers.logger.error(`hi there we encountered an error while fetching /streams`)
helpers.logger.error(e.message)
} else {
helpers.logger.error(e)
}
}
}
export default update_stream_statuses

View File

@ -10,9 +10,10 @@
"build": "tsup",
"test": "mocha",
"integration": "FUTUREPORN_WORKDIR=/home/cj/Downloads mocha ./integration/**/*.test.js",
"dev": "tsx --watch ./src/index.ts",
"dev.nodemon": "pnpm nodemon --ext ts,json,yaml --ignore ./dist --watch ./src --watch ./node_modules/@futureporn --exec \"pnpm run dev.build\"",
"dev.build": "pnpm run build && pnpm run start",
"dev": "pnpm run dev.nodemon # yes this is crazy to have nodemon execute tsx, but it's the only way I have found to get live reloading in TS/ESM/docker with Graphile Worker's way of loading tasks",
"dev.tsx": "tsx ./src/index.ts",
"dev.nodemon": "nodemon --ext ts --exec \"pnpm run dev.tsx\"",
"dev.node": "node --no-warnings=ExperimentalWarning --loader ts-node/esm src/index.ts",
"clean": "rm -rf dist",
"superclean": "rm -rf node_modules && rm -rf pnpm-lock.yaml && rm -rf dist"
},
@ -25,9 +26,12 @@
"@futureporn/utils": "workspace:^",
"@paralleldrive/cuid2": "^2.2.2",
"@types/chai": "^4.3.16",
"@types/chai-as-promised": "^7.1.8",
"@types/fluent-ffmpeg": "^2.1.24",
"@types/mocha": "^10.0.7",
"@types/qs": "^6.9.15",
"date-fns": "^3.6.0",
"discord.js": "^14.15.3",
"diskusage": "^1.2.0",
"dotenv": "^16.4.5",
"execa": "^6.1.0",
@ -47,6 +51,7 @@
"pg-boss": "^9.0.3",
"pino-pretty": "^11.2.1",
"postgres": "^3.4.4",
"qs": "^6.13.0",
"rxjs": "^7.8.1",
"sql": "^0.78.0",
"winston": "^3.13.1",
@ -61,6 +66,7 @@
"aws-sdk-client-mock": "^4.0.1",
"aws-sdk-mock": "^6.0.4",
"chai": "^4.4.1",
"chai-as-promised": "^8.0.0",
"cheerio": "1.0.0-rc.12",
"mocha": "^10.7.0",
"multiformats": "^11.0.2",

View File

@ -32,15 +32,24 @@ importers:
'@types/chai':
specifier: ^4.3.16
version: 4.3.16
'@types/chai-as-promised':
specifier: ^7.1.8
version: 7.1.8
'@types/fluent-ffmpeg':
specifier: ^2.1.24
version: 2.1.24
'@types/mocha':
specifier: ^10.0.7
version: 10.0.7
'@types/qs':
specifier: ^6.9.15
version: 6.9.15
date-fns:
specifier: ^3.6.0
version: 3.6.0
discord.js:
specifier: ^14.15.3
version: 14.15.3
diskusage:
specifier: ^1.2.0
version: 1.2.0
@ -98,6 +107,9 @@ importers:
postgres:
specifier: ^3.4.4
version: 3.4.4
qs:
specifier: ^6.13.0
version: 6.13.0
rxjs:
specifier: ^7.8.1
version: 7.8.1
@ -135,6 +147,9 @@ importers:
chai:
specifier: ^4.4.1
version: 4.5.0
chai-as-promised:
specifier: ^8.0.0
version: 8.0.0(chai@4.5.0)
cheerio:
specifier: 1.0.0-rc.12
version: 1.0.0-rc.12
@ -379,6 +394,34 @@ packages:
'@dabh/diagnostics@2.0.3':
resolution: {integrity: sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==}
'@discordjs/builders@1.8.2':
resolution: {integrity: sha512-6wvG3QaCjtMu0xnle4SoOIeFB4y6fKMN6WZfy3BMKJdQQtPLik8KGzDwBVL/+wTtcE/ZlFjgEk74GublyEVZ7g==}
engines: {node: '>=16.11.0'}
'@discordjs/collection@1.5.3':
resolution: {integrity: sha512-SVb428OMd3WO1paV3rm6tSjM4wC+Kecaa1EUGX7vc6/fddvw/6lg90z4QtCqm21zvVe92vMMDt9+DkIvjXImQQ==}
engines: {node: '>=16.11.0'}
'@discordjs/collection@2.1.0':
resolution: {integrity: sha512-mLcTACtXUuVgutoznkh6hS3UFqYirDYAg5Dc1m8xn6OvPjetnUlf/xjtqnnc47OwWdaoCQnHmHh9KofhD6uRqw==}
engines: {node: '>=18'}
'@discordjs/formatters@0.4.0':
resolution: {integrity: sha512-fJ06TLC1NiruF35470q3Nr1bi95BdvKFAF+T5bNfZJ4bNdqZ3VZ+Ttg6SThqTxm6qumSG3choxLBHMC69WXNXQ==}
engines: {node: '>=16.11.0'}
'@discordjs/rest@2.3.0':
resolution: {integrity: sha512-C1kAJK8aSYRv3ZwMG8cvrrW4GN0g5eMdP8AuN8ODH5DyOCbHgJspze1my3xHOAgwLJdKUbWNVyAeJ9cEdduqIg==}
engines: {node: '>=16.11.0'}
'@discordjs/util@1.1.0':
resolution: {integrity: sha512-IndcI5hzlNZ7GS96RV3Xw1R2kaDuXEp7tRIy/KlhidpN/BQ1qh1NZt3377dMLTa44xDUNKT7hnXkA/oUAzD/lg==}
engines: {node: '>=16.11.0'}
'@discordjs/ws@1.1.1':
resolution: {integrity: sha512-PZ+vLpxGCRtmr2RMkqh8Zp+BenUaJqlS6xhgWKEZcgC/vfHLEzpHtKkB0sl3nZWpwtcKk6YWy+pU3okL2I97FA==}
engines: {node: '>=16.11.0'}
'@esbuild/aix-ppc64@0.21.5':
resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==}
engines: {node: '>=12'}
@ -807,6 +850,18 @@ packages:
cpu: [x64]
os: [win32]
'@sapphire/async-queue@1.5.3':
resolution: {integrity: sha512-x7zadcfJGxFka1Q3f8gCts1F0xMwCKbZweM85xECGI0hBTeIZJGGCrHgLggihBoprlQ/hBmDR5LKfIPqnmHM3w==}
engines: {node: '>=v14.0.0', npm: '>=7.0.0'}
'@sapphire/shapeshift@3.9.7':
resolution: {integrity: sha512-4It2mxPSr4OGn4HSQWGmhFMsNFGfFVhWeRPCRwbH972Ek2pzfGRZtb0pJ4Ze6oIzcyh2jw7nUDa6qGlWofgd9g==}
engines: {node: '>=v16'}
'@sapphire/snowflake@3.5.3':
resolution: {integrity: sha512-jjmJywLAFoWeBi1W7994zZyiNWPIiqRRNAmSERxyg93xRGzNYvGjlZ0gR6x0F4gPRi2+0O6S71kOZYyr3cxaIQ==}
engines: {node: '>=v14.0.0', npm: '>=7.0.0'}
'@sinonjs/commons@2.0.0':
resolution: {integrity: sha512-uLa0j859mMrg2slwQYdO/AkrOfmH+X6LTVmNTS9CqexuE2IvVORIkSpJLqePAbEnKJ77aMmCwr1NUZ57120Xcg==}
@ -1040,6 +1095,9 @@ packages:
'@tsconfig/node16@1.0.4':
resolution: {integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==}
'@types/chai-as-promised@7.1.8':
resolution: {integrity: sha512-ThlRVIJhr69FLlh6IctTXFkmhtP3NpMZ2QGq69StYLyKZFp/HOp1VdKZj7RvfNWYYcJ1xlbLGLLWj1UvP5u/Gw==}
'@types/chai@4.3.16':
resolution: {integrity: sha512-PatH4iOdyh3MyWtmHVFXLWCCIhUbopaltqddG9BzB+gMIzee2MJrvd+jouii9Z3wzQJruGWAm7WOMjgfG8hQlQ==}
@ -1067,6 +1125,9 @@ packages:
'@types/pg@8.11.6':
resolution: {integrity: sha512-/2WmmBXHLsfRqzfHW7BNZ8SbYzE8OSk7i3WjFYvfgRHj7S1xj+16Je5fUKv3lVdVzk/zn9TXOqf+avFCFIE0yQ==}
'@types/qs@6.9.15':
resolution: {integrity: sha512-uXHQKES6DQKKCLh441Xv/dwxOq1TVS3JPUMlEqoEglvlhR6Mxnlew/Xq/LRVHpLyk7iK3zODe1qYHIMltO7XGg==}
'@types/retry@0.12.1':
resolution: {integrity: sha512-xoDlM2S4ortawSWORYqsdU+2rxdh4LRW9ytc3zmT37RIKQh6IHyKwwtKhKis9ah8ol07DCkZxPt8BBvPjC6v4g==}
@ -1088,6 +1149,13 @@ packages:
'@types/triple-beam@1.3.5':
resolution: {integrity: sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw==}
'@types/ws@8.5.12':
resolution: {integrity: sha512-3tPRkv1EtkDpzlgyKyI8pGsGZAGPEaXeu0DOj5DI25Ja91bdAYddYHbADRYVrZMRbfW+1l5YwXVDKohDJNQxkQ==}
'@vladfrangu/async_event_emitter@2.4.5':
resolution: {integrity: sha512-J7T3gUr3Wz0l7Ni1f9upgBZ7+J22/Q1B7dl0X6fG+fTsD+H+31DIosMHj4Um1dWQwqbcQ3oQf+YS2foYkDc9cQ==}
engines: {node: '>=v14.0.0', npm: '>=7.0.0'}
abort-controller@3.0.0:
resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==}
engines: {node: '>=6.5'}
@ -1270,6 +1338,11 @@ packages:
resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==}
engines: {node: '>=10'}
chai-as-promised@8.0.0:
resolution: {integrity: sha512-sMsGXTrS3FunP/wbqh/KxM8Kj/aLPXQGkNtvE5wPfSToq8wkkvBpTZo1LIiEVmC4BwkKpag+l5h/20lBMk6nUg==}
peerDependencies:
chai: '>= 2.1.2 < 6'
chai@4.5.0:
resolution: {integrity: sha512-RITGBfijLkBddZvnn8jdqoTypxvqbOLYQkGGxXzeFjVHvudaPw0HNFD9x928/eUwYWd2dPCugVqspGALTZZQKw==}
engines: {node: '>=4'}
@ -1285,6 +1358,10 @@ packages:
check-error@1.0.3:
resolution: {integrity: sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==}
check-error@2.1.1:
resolution: {integrity: sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==}
engines: {node: '>= 16'}
cheerio-select@2.1.0:
resolution: {integrity: sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==}
@ -1457,6 +1534,13 @@ packages:
resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==}
engines: {node: '>=8'}
discord-api-types@0.37.83:
resolution: {integrity: sha512-urGGYeWtWNYMKnYlZnOnDHm8fVRffQs3U0SpE8RHeiuLKb/u92APS8HoQnPTFbnXmY1vVnXjXO4dOxcAn3J+DA==}
discord.js@14.15.3:
resolution: {integrity: sha512-/UJDQO10VuU6wQPglA4kz2bw2ngeeSbogiIPx/TsnctfzV/tNf+q+i1HlgtX1OGpeOBpJH9erZQNO5oRM2uAtQ==}
engines: {node: '>=16.11.0'}
diskusage@1.2.0:
resolution: {integrity: sha512-2u3OG3xuf5MFyzc4MctNRUKjjwK+UkovRYdD2ed/NZNZPrt0lqHnLKxGhlFVvAb4/oufIgQG3nWgwmeTbHOvXA==}
@ -2024,12 +2108,18 @@ packages:
lodash.isarguments@3.1.0:
resolution: {integrity: sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==}
lodash.snakecase@4.1.1:
resolution: {integrity: sha512-QZ1d4xoBHYUeuouhEq3lk3Uq7ldgyFXGBhg04+oRLnIz8o9T65Eh+8YdroUwn846zchkA9yDsDl5CVVaV2nqYw==}
lodash.sortby@4.7.0:
resolution: {integrity: sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==}
lodash@4.1.0:
resolution: {integrity: sha512-B9sgtKUlz0xe7lkYb80BcOpwwJJw5iOiz4HkBDzF0+i5nJLiwfBnL08m7bBkCOPBfi+0aqvrJDMdZDfAvs8vYg==}
lodash@4.17.21:
resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==}
log-symbols@4.1.0:
resolution: {integrity: sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==}
engines: {node: '>=10'}
@ -2048,6 +2138,9 @@ packages:
resolution: {integrity: sha512-zobTr7akeGHnv7eBOXcRgMeCP6+uyYsczwmeRCauvpvaAltgNyTbLH/+VaEAPUeWBT+1GuNmz4wC/6jtQzbbVA==}
engines: {node: '>=12'}
magic-bytes.js@1.10.0:
resolution: {integrity: sha512-/k20Lg2q8LE5xiaaSkMXk4sfvI+9EGEykFS4b0CHHGWqDYU0bGUFSwchNOMA56D7TCs9GwVTkqe9als1/ns8UQ==}
make-error@1.3.6:
resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==}
@ -2408,6 +2501,10 @@ packages:
resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==}
engines: {node: '>=6'}
qs@6.13.0:
resolution: {integrity: sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==}
engines: {node: '>=0.6'}
querystring@0.2.0:
resolution: {integrity: sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==}
engines: {node: '>=0.4.x'}
@ -2746,6 +2843,9 @@ packages:
ts-interface-checker@0.1.13:
resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==}
ts-mixer@6.0.4:
resolution: {integrity: sha512-ufKpbmrugz5Aou4wcr5Wc1UUFWOLhq+Fm6qa6P0w0K5Qw2yhaUoiWszhCVuNQyNwrlGiscHOmqYoAox1PtvgjA==}
ts-node@10.9.2:
resolution: {integrity: sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==}
hasBin: true
@ -2760,6 +2860,9 @@ packages:
'@swc/wasm':
optional: true
tslib@2.6.2:
resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==}
tslib@2.6.3:
resolution: {integrity: sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==}
@ -2836,6 +2939,10 @@ packages:
undici-types@5.26.5:
resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==}
undici@6.13.0:
resolution: {integrity: sha512-Q2rtqmZWrbP8nePMq7mOJIN98M0fYvSgV89vwl/BQRT4mDOeY2GXZngfGpcBBhtky3woM7G24wZV3Q304Bv6cw==}
engines: {node: '>=18.0'}
universalify@0.2.0:
resolution: {integrity: sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==}
engines: {node: '>= 4.0.0'}
@ -2919,6 +3026,18 @@ packages:
wrappy@1.0.2:
resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==}
ws@8.18.0:
resolution: {integrity: sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==}
engines: {node: '>=10.0.0'}
peerDependencies:
bufferutil: ^4.0.1
utf-8-validate: '>=5.0.2'
peerDependenciesMeta:
bufferutil:
optional: true
utf-8-validate:
optional: true
xml2js@0.6.2:
resolution: {integrity: sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==}
engines: {node: '>=4.0.0'}
@ -3502,6 +3621,53 @@ snapshots:
enabled: 2.0.0
kuler: 2.0.0
'@discordjs/builders@1.8.2':
dependencies:
'@discordjs/formatters': 0.4.0
'@discordjs/util': 1.1.0
'@sapphire/shapeshift': 3.9.7
discord-api-types: 0.37.83
fast-deep-equal: 3.1.3
ts-mixer: 6.0.4
tslib: 2.6.3
'@discordjs/collection@1.5.3': {}
'@discordjs/collection@2.1.0': {}
'@discordjs/formatters@0.4.0':
dependencies:
discord-api-types: 0.37.83
'@discordjs/rest@2.3.0':
dependencies:
'@discordjs/collection': 2.1.0
'@discordjs/util': 1.1.0
'@sapphire/async-queue': 1.5.3
'@sapphire/snowflake': 3.5.3
'@vladfrangu/async_event_emitter': 2.4.5
discord-api-types: 0.37.83
magic-bytes.js: 1.10.0
tslib: 2.6.3
undici: 6.13.0
'@discordjs/util@1.1.0': {}
'@discordjs/ws@1.1.1':
dependencies:
'@discordjs/collection': 2.1.0
'@discordjs/rest': 2.3.0
'@discordjs/util': 1.1.0
'@sapphire/async-queue': 1.5.3
'@types/ws': 8.5.12
'@vladfrangu/async_event_emitter': 2.4.5
discord-api-types: 0.37.83
tslib: 2.6.3
ws: 8.18.0
transitivePeerDependencies:
- bufferutil
- utf-8-validate
'@esbuild/aix-ppc64@0.21.5':
optional: true
@ -3763,6 +3929,15 @@ snapshots:
'@rollup/rollup-win32-x64-msvc@4.19.1':
optional: true
'@sapphire/async-queue@1.5.3': {}
'@sapphire/shapeshift@3.9.7':
dependencies:
fast-deep-equal: 3.1.3
lodash: 4.17.21
'@sapphire/snowflake@3.5.3': {}
'@sinonjs/commons@2.0.0':
dependencies:
type-detect: 4.0.8
@ -4124,6 +4299,10 @@ snapshots:
'@tsconfig/node16@1.0.4': {}
'@types/chai-as-promised@7.1.8':
dependencies:
'@types/chai': 4.3.16
'@types/chai@4.3.16': {}
'@types/debug@4.1.12':
@ -4154,6 +4333,8 @@ snapshots:
pg-protocol: 1.6.1
pg-types: 4.0.2
'@types/qs@6.9.15': {}
'@types/retry@0.12.1': {}
'@types/semver@7.5.8': {}
@ -4175,6 +4356,12 @@ snapshots:
'@types/triple-beam@1.3.5': {}
'@types/ws@8.5.12':
dependencies:
'@types/node': 20.14.13
'@vladfrangu/async_event_emitter@2.4.5': {}
abort-controller@3.0.0:
dependencies:
event-target-shim: 5.0.1
@ -4356,6 +4543,11 @@ snapshots:
camelcase@6.3.0: {}
chai-as-promised@8.0.0(chai@4.5.0):
dependencies:
chai: 4.5.0
check-error: 2.1.1
chai@4.5.0:
dependencies:
assertion-error: 1.1.0
@ -4381,6 +4573,8 @@ snapshots:
dependencies:
get-func-name: 2.0.2
check-error@2.1.1: {}
cheerio-select@2.1.0:
dependencies:
boolbase: 1.0.0
@ -4566,6 +4760,26 @@ snapshots:
dependencies:
path-type: 4.0.0
discord-api-types@0.37.83: {}
discord.js@14.15.3:
dependencies:
'@discordjs/builders': 1.8.2
'@discordjs/collection': 1.5.3
'@discordjs/formatters': 0.4.0
'@discordjs/rest': 2.3.0
'@discordjs/util': 1.1.0
'@discordjs/ws': 1.1.1
'@sapphire/snowflake': 3.5.3
discord-api-types: 0.37.83
fast-deep-equal: 3.1.3
lodash.snakecase: 4.1.1
tslib: 2.6.2
undici: 6.13.0
transitivePeerDependencies:
- bufferutil
- utf-8-validate
diskusage@1.2.0:
dependencies:
es6-promise: 4.2.8
@ -5245,10 +5459,14 @@ snapshots:
lodash.isarguments@3.1.0: {}
lodash.snakecase@4.1.1: {}
lodash.sortby@4.7.0: {}
lodash@4.1.0: {}
lodash@4.17.21: {}
log-symbols@4.1.0:
dependencies:
chalk: 4.1.2
@ -5271,6 +5489,8 @@ snapshots:
luxon@3.4.4: {}
magic-bytes.js@1.10.0: {}
make-error@1.3.6: {}
merge-stream@2.0.0: {}
@ -5642,6 +5862,10 @@ snapshots:
punycode@2.3.1: {}
qs@6.13.0:
dependencies:
side-channel: 1.0.6
querystring@0.2.0: {}
querystringify@2.2.0: {}
@ -6003,6 +6227,8 @@ snapshots:
ts-interface-checker@0.1.13: {}
ts-mixer@6.0.4: {}
ts-node@10.9.2(@types/node@20.14.13)(typescript@5.5.4):
dependencies:
'@cspotcode/source-map-support': 0.8.1
@ -6021,6 +6247,8 @@ snapshots:
v8-compile-cache-lib: 3.0.1
yn: 3.1.1
tslib@2.6.2: {}
tslib@2.6.3: {}
tsup@8.2.3(tsx@4.16.2)(typescript@5.5.4):
@ -6120,6 +6348,8 @@ snapshots:
undici-types@5.26.5: {}
undici@6.13.0: {}
universalify@0.2.0: {}
url-parse@1.5.10:
@ -6226,6 +6456,8 @@ snapshots:
wrappy@1.0.2: {}
ws@8.18.0: {}
xml2js@0.6.2:
dependencies:
sax: 1.2.1

View File

@ -1,5 +1,5 @@
import Record from "./Record.js"
import { expect } from "chai"
import Record, { UploadStreamClosedError } from "./Record.js"
import * as chai from 'chai'
import { ChildProcess, spawn } from "child_process"
import { createReadStream, readFileSync, ReadStream } from "fs"
import AWSMock from 'aws-sdk-mock'
@ -13,7 +13,9 @@ import { HeadObjectOutput } from 'aws-sdk/clients/s3';
import { Readable } from 'stream';
import { mockClient } from 'aws-sdk-client-mock';
import { sdkStreamMixin } from '@smithy/util-stream'
import chaiAsPromised from 'chai-as-promised'
chai.use(chaiAsPromised)
const expect = chai.expect
// "pay no attention to that man behind the curtain"
@ -52,7 +54,7 @@ describe('Record', function () {
expect(record).to.have.property('bucket', 'test')
})
it('should be abortable', async function () {
xit('should be abortable', async function () {
const inputStream = createReadStream(join(__dirname, './fixtures/mock-stream0.mp4')) // 192627 bytes
const s3ClientMock = mockClient(S3Client)
const s3Client = new S3Client({ region: 'us-west-000' })
@ -65,6 +67,20 @@ describe('Record', function () {
await record.abort()
})
xit('should throw if the upload stream closes before the download stream closes', async function () {
const s3Mock = mockClient(S3Client)
// const inputStream = createReadStream(join(__dirname, './fixtures/mock-stream0.mp4'))
const inputStream = createReadStream('/dev/random') // forever random
// const s3Client = new S3Client({ region: 'us-west-000' })
// s3ClientMock.on()
s3Mock.on(PutObjectCommand).resolvesOnce({}).resolvesOnce({}).rejects({})
const s3 = new S3Client({ region: 'us-west-000' })
return expect(s3.send(new PutObjectCommand({ Body: inputStream, Bucket: 'taco', Key: 'my-cool-taco.mp4' }))).to.be.rejectedWith(UploadStreamClosedError)
})
xit('should restart if a EPIPE is encountered', async function () {
// @todo IDK how to implement this.
const inputStream = createReadStream(join(__dirname, './fixtures/mock-stream0.mp4'))

View File

@ -7,6 +7,13 @@ import 'dotenv/config'
const ua0 = 'Mozilla/5.0 (X11; Linux x86_64; rv:105.0) Gecko/20100101 Firefox/105.0'
export class UploadStreamClosedError extends Error {
constructor(message: string) {
super(message)
Object.setPrototypeOf(this, UploadStreamClosedError.prototype)
}
}
export interface RecordArgs {
filename?: string;
s3Client: S3Client;
@ -131,6 +138,7 @@ export default class Record {
parallelUploads3.on("httpUploadProgress", (progress) => {
if (progress?.loaded) {
// console.log(progress)
if (this.onProgress) this.onProgress(this.counter);
// console.log(`uploaded ${progress.loaded} bytes (${prettyBytes(progress.loaded)})`);
} else {
@ -143,8 +151,14 @@ export default class Record {
console.log('parallelUploads3 is complete.')
} catch (e) {
// if we got an abort error, e.name is not AbortError as expected. Instead, e.name is Error.
// so in order to catch AbortError, we don't even look there. instead, we check if our abortcontroller was aborted.
// in other words, `(e.name === 'AbortError')` will never be true.
if (this.abortSignal.aborted) return;
if (e instanceof Error) {
console.error(`We were uploading a file to S3 but then we encountered an error! ${JSON.stringify(e, null, 2)}`)
console.error(`We were uploading a file to S3 but then we encountered an exception!`)
console.error(e)
throw e
} else {
throw new Error(`error of some sort ${JSON.stringify(e, null, 2)}`)
@ -164,7 +178,14 @@ export default class Record {
this.counter += data.length
})
this.uploadStream.on('close', () => {
console.log('[!!!] upload stream has closed')
// if uploadStream closes before inputStream, throw an error.
if (!this.inputStream.closed) {
const msg = 'upload stream closed before download stream, which suggests the S3 upload failed.'
console.error(msg)
throw new UploadStreamClosedError(msg);
} else {
console.log('upload stream has closed. In this instance it is OK since the input stream is also closed.')
}
})
this.uploadStream.on('error', (e) => {
console.error('there was an error on the uploadStream. error as follows')

View File

@ -0,0 +1,40 @@
import 'dotenv/config'
const requiredEnvVars = [
'S3_ACCESS_KEY_ID',
'S3_SECRET_ACCESS_KEY',
'S3_REGION',
'S3_ENDPOINT',
'S3_BUCKET',
'POSTGREST_URL',
'AUTOMATION_USER_JWT',
] as const;
const getEnvVar = (key: typeof requiredEnvVars[number]): string => {
const value = process.env[key];
if (!value) {
throw new Error(`Missing ${key} env var`);
}
return value;
};
export interface Config {
postgrestUrl: string;
automationUserJwt: string;
s3AccessKeyId: string;
s3SecretAccessKey: string;
s3Region: string;
s3Bucket: string;
s3Endpoint: string;
}
export const configs: Config = {
postgrestUrl: getEnvVar('POSTGREST_URL'),
automationUserJwt: getEnvVar('AUTOMATION_USER_JWT'),
s3AccessKeyId: getEnvVar('S3_ACCESS_KEY_ID'),
s3SecretAccessKey: getEnvVar('S3_SECRET_ACCESS_KEY'),
s3Region: getEnvVar('S3_REGION'),
s3Bucket: getEnvVar('S3_BUCKET'),
s3Endpoint: getEnvVar('S3_ENDPOINT'),
}

View File

@ -10,9 +10,6 @@ import { fileURLToPath } from 'url';
import { getPackageVersion } from '@futureporn/utils';
import type { GraphileConfig } from "graphile-config";
import type {} from "graphile-worker";
import start_recording from './tasks/start_recording.ts';
import { stop_recording } from './tasks/stop_recording.ts';
import record from './tasks/record.ts'
const __dirname = dirname(fileURLToPath(import.meta.url));
const version = getPackageVersion(join(__dirname, '../package.json'))
@ -30,7 +27,9 @@ const preset: GraphileConfig.Preset = {
};
async function api() {
async function doRunApi() {
if (!process.env.PORT) throw new Error('PORT is missing in env');
console.log(`api FUNCTION listening on PORT ${process.env.PORT}`)
const PORT = parseInt(process.env.PORT!)
@ -54,20 +53,42 @@ async function api() {
})
}
async function worker(workerUtils: WorkerUtils) {
async function doRunWorker(workerUtils: WorkerUtils) {
let workerIds: string[] = []
const runnerOptions: RunnerOptions = {
preset,
concurrency,
// taskDirectory: join(__dirname, 'tasks'),
taskList: {
'record': record,
'start_recording': start_recording,
'stop_recording': stop_recording
}
taskDirectory: join(__dirname, 'tasks'),
}
const runner = await graphileRun(runnerOptions)
if (!runner) throw new Error('failed to initialize graphile worker');
/**
* This is likely only relevant during development.
* if nodemon restarts us, we need to unlock the graphile-worker job so it gets retried immediately by another worker.
*
*/
runner.events.on('worker:create', ({ worker }) => {
// There is no way to get workerIds on demand when the SIGUSR2 comes in, so we log the IDs ahead of time.
workerIds.push(worker.workerId)
})
process.on('SIGUSR2', async () => {
console.warn(`SIGUSR2 detected! ulocking ${workerIds.length} workers, workerIds=${workerIds}`)
await workerUtils.forceUnlockWorkers(workerIds)
process.kill(process.pid, 'SIGTERM');
})
runner.events.on("pool:gracefulShutdown", async ({ workerPool, message }) => {
const workerIds = workerPool._workers.map((w) => w.workerId)
console.warn(`gracefulShutdown detected. releasing job locks on ${workerIds.length} workers, workerIds=${workerIds}, message=${message}`);
await workerUtils.forceUnlockWorkers(workerIds)
});
await runner.promise
}
@ -79,9 +100,9 @@ async function main() {
console.log(`@futureporn/capture version ${version} (FUNCTION=${process.env.FUNCTION})`)
if (process.env.FUNCTION === 'api') {
api()
doRunApi()
} else if (process.env.FUNCTION === 'worker') {
worker(workerUtils)
doRunWorker(workerUtils)
} else {
throw new Error('process.env.FUNCTION must be either api or worker. got '+process.env.FUNCTION)
}
@ -90,5 +111,5 @@ async function main() {
main().catch((err) => {
console.error('there was an error!')
console.error(err);
process.exit(1);
process.exit(874);
});

View File

@ -1,9 +1,115 @@
/**
*
* # notes
*
* # creation
*
* ## api.records
*
* id: 2
* url: 'https://chaturbate.com/example'
* discord_message_id: 238492348324
* recording_state: 'pending'
* is_aborted: false
* created_at: 2024-08-15T21:36:27.796Z
* updated_at: 2024-08-15T21:36:27.796Z
*
* ## api.segments
*
* id: 5
* s3_key: example-date-cuid.mp4
* s3_id: 2342309492348324
* bytes: 0
* created_at: 2024-08-15T21:36:27.796Z
* updated_at: 2024-08-15T21:36:27.796Z
*
* ## api.records_segments_links
*
* id: 9
* stream_id: 2
* segment_id: 5
* segment_order: 0
* created_at: 2024-08-15T21:36:27.796Z
* updated_at: 2024-08-15T21:36:27.796Z
*
* # progress
*
* ## api.records
*
* id: 2
* url: 'https://chaturbate.com/example'
* discord_message_id: 238492348324
* recording_state: 'recording'
* is_aborted: false
* created_at: 2024-08-15T21:36:27.796Z
* updated_at: 2024-08-15T21:37:37.168Z
*
* ## api.segments
*
* id: 5
* s3_key: example-2024-08-15-72ff4b5ae7dae73b.mp4
* s3_id: 2342309492348324
* bytes: 8384
* created_at: 2024-08-15T21:36:27.796Z
* updated_at: 2024-08-15T21:37:37.168Z
*
*
* # new segment
*
* ## api.segments
*
* id: 6
* s3_key: example-2024-08-15-cda21be5e54621f2.mp4
* s3_id: a974eb6e194b7987
* byte: 0
* created_at: 2024-08-15T21:38:34.878Z
* updated_at: 2024-08-15T21:38:34.878Z
*
* ## api.records_segments_links
*
* id: 10
* stream_id: 2
* segment_id: 6
* segment_order: 1
* created_at: 2024-08-15T21:38:34.878Z
* updated_at: 2024-08-15T21:38:34.878Z
*
* # progress
*
* ## api.segments
*
* id: 6
* s3_key: example-2024-08-15-cda21be5e54621f2.mp4
* s3_id: a974eb6e194b7987
* byte: 1024
* created_at: 2024-08-15T21:38:34.878Z
* updated_at: 2024-08-15T21:39:11.437Z
*
* # completion
*
* ## api.records
*
* id: 2
* url: 'https://chaturbate.com/example'
* discord_message_id: 238492348324
* recording_state: 'finished'
* is_aborted: false
* created_at: 2024-08-15T21:36:27.796Z
* updated_at: 2024-08-15T21:39:41.692Z
*
*/
import querystring from 'node:querystring'
import { Helpers, type Task } from 'graphile-worker'
import Record from '../Record.ts'
import { getPlaylistUrl } from '@futureporn/scout/ytdlp.ts'
import type { RecordingState } from '@futureporn/types'
import type { RecordingState, RecordingRecord, Segment } from '@futureporn/types'
import { add } from 'date-fns'
import { backOff } from "exponential-backoff";
import { configs } from '../config.ts'
import qs from 'qs'
import { createId } from '@paralleldrive/cuid2'
/**
* url is the URL to be recorded. Ex: chaturbate.com/projektmelody
@ -12,189 +118,242 @@ import { add } from 'date-fns'
*/
interface Payload {
url: string;
record_id: number;
stream_id: string;
}
interface RecordingRecord {
id: number;
recordingState: RecordingState;
fileSize: number;
discordMessageId: string;
isAborted: boolean;
}
interface RawRecordingRecord {
id: number;
recording_state: RecordingState;
file_size: number;
discord_message_id: string;
is_aborted: boolean;
}
function assertPayload(payload: any): asserts payload is Payload {
if (typeof payload !== "object" || !payload) throw new Error("invalid payload");
if (typeof payload.url !== "string") throw new Error("invalid url");
if (typeof payload.record_id !== "number") throw new Error(`invalid record_id=${payload.record_id}`);
if (typeof payload.stream_id !== "string") throw new Error(`invalid stream_id=${payload.stream_id}`);
}
function assertEnv() {
if (!process.env.S3_ACCESS_KEY_ID) throw new Error('S3_ACCESS_KEY_ID was missing in env');
if (!process.env.S3_SECRET_ACCESS_KEY) throw new Error('S3_SECRET_ACCESS_KEY was missing in env');
if (!process.env.S3_REGION) throw new Error('S3_REGION was missing in env');
if (!process.env.S3_ENDPOINT) throw new Error('S3_ENDPOINT was missing in env');
if (!process.env.S3_BUCKET) throw new Error('S3_BUCKET was missing in env');
if (!process.env.POSTGREST_URL) throw new Error('POSTGREST_URL was missing in env');
if (!process.env.AUTOMATION_USER_JWT) throw new Error('AUTOMATION_USER_JWT was missing in env');
}
async function getRecording(url: string, recordId: number, helpers: Helpers) {
async function getRecordInstance(url: string, segment_id: number, helpers: Helpers) {
const abortController = new AbortController()
const abortSignal = abortController.signal
const accessKeyId = process.env.S3_ACCESS_KEY_ID!;
const secretAccessKey = process.env.S3_SECRET_ACCESS_KEY!;
const region = process.env.S3_REGION!;
const endpoint = process.env.S3_ENDPOINT!;
const bucket = process.env.S3_BUCKET!;
const accessKeyId = configs.s3AccessKeyId;
const secretAccessKey = configs.s3SecretAccessKey;
const region = configs.s3Region;
const endpoint = configs.s3Endpoint;
const bucket = configs.s3Bucket;
const playlistUrl = await getPlaylistUrl(url)
const s3Client = Record.makeS3Client({ accessKeyId, secretAccessKey, region, endpoint })
const inputStream = Record.getFFmpegStream({ url: playlistUrl })
const onProgress = (fileSize: number) => {
updateDatabaseRecord({ recordId, recordingState: 'recording', fileSize }).then(checkIfAborted).then((isAborted) => isAborted ? abortController.abort() : null)
updateDatabaseRecord({ segment_id, fileSize, helpers })
.then(checkIfAborted)
.then((isAborted) => {
isAborted ? abortController.abort() : null
})
.catch((e) => {
helpers.logger.error('caught error while updatingDatabaseRecord inside onProgress inside getRecordInstance')
helpers.logger.error(e)
})
}
const record = new Record({ inputStream, onProgress, bucket, s3Client, jobId: ''+recordId, abortSignal })
const record = new Record({ inputStream, onProgress, bucket, s3Client, jobId: ''+segment_id, abortSignal })
return record
}
function checkIfAborted(record: RawRecordingRecord): boolean {
return (record.is_aborted)
function checkIfAborted(segment: Partial<Segment>): boolean {
return (!!segment?.stream?.at(0)?.is_recording_aborted)
}
async function updateDatabaseRecord({
recordId,
recordingState,
fileSize
segment_id,
fileSize,
helpers
}: {
recordId: number,
recordingState: RecordingState,
fileSize: number
}): Promise<RawRecordingRecord> {
// console.log(`updating database record with recordId=${recordId}, recordingState=${recordingState}, fileSize=${fileSize}`)
segment_id: number,
fileSize: number,
helpers: Helpers
}): Promise<Segment> {
const payload: any = {
file_size: fileSize
bytes: fileSize
}
if (recordingState) payload.recording_state = recordingState;
const res = await fetch(`${process.env.POSTGREST_URL}/records?id=eq.${recordId}`, {
const res = await fetch(`${configs.postgrestUrl}/segments?id=eq.${segment_id}&select=stream:streams(is_recording_aborted)`, {
method: 'PATCH',
headers: {
'Content-Type': 'application/json',
'Accepts': 'application/json',
'Accept': 'application/json',
'Prefer': 'return=representation',
'Authorization': `Bearer ${process.env.AUTOMATION_USER_JWT}`
'Authorization': `Bearer ${configs.automationUserJwt}`
},
body: JSON.stringify(payload)
})
if (!res.ok) {
const body = await res.text()
throw new Error(`failed to updateDatabaseRecord. status=${res.status}, statusText=${res.statusText}, body=${body}`);
const msg = `failed to updateDatabaseRecord. status=${res.status}, statusText=${res.statusText}, body=${body}`
helpers.logger.error(msg)
throw new Error(msg);
}
const body = await res.json() as RawRecordingRecord[];
if (!body[0]) throw new Error(`failed to get a record that matched recordId=${recordId}`)
// helpers.logger.info(`response was OK~`)
const body = await res.json() as Segment[];
if (!body[0]) throw new Error(`failed to get a segment that matched segment_id=${segment_id}`);
const bod = body[0]
// helpers.logger.info('the following was the response from PATCH-ing /segments')
// helpers.logger.info(JSON.stringify(bod))
return bod
}
const getSegments = async function getSegments(stream_id: string): Promise<Segment> {
if (!stream_id) throw new Error('getSegments requires {String} stream_id as first arg');
const res = await fetch(`${configs.postgrestUrl}/segments_stream_links?stream_id=eq.${stream_id}`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
'Prefer': 'return=representation'
},
})
if (!res.ok) {
const body = await res.text()
throw new Error(`failed to getSegments. status=${res.status}, statusText=${res.statusText}, body=${body}`);
}
const body = await res.json() as Segment[];
if (!body[0]) throw new Error(`failed to get segments that matched stream_id=${stream_id}`)
return body[0]
}
export const record: Task = async function (payload, helpers) {
console.log(payload)
const createSegment = async function createSegment(s3_key: string, helpers: Helpers): Promise<number> {
if (!s3_key) throw new Error('getSegments requires {string} s3_key as first arg');
const segmentPayload = {
s3_key
}
helpers.logger.info(`Creating segment with s3_key=${s3_key}. payload as follows`)
helpers.logger.info(JSON.stringify(segmentPayload))
const res = await fetch(`${configs.postgrestUrl}/segments`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
'Prefer': 'return=headers-only',
'Authorization': `Bearer ${configs.automationUserJwt}`
},
body: JSON.stringify(segmentPayload)
})
if (!res.ok) {
const body = await res.text()
const msg = `failed to create Segment. status=${res.status}, statusText=${res.statusText}, body=${body}`
helpers.logger.error(msg)
throw new Error(msg);
}
const location = res.headers.get('location')
if (!location) throw new Error(`failed to get location header in response from postgrest`);
const parsedQuery = querystring.parse(location)
const segmentsId = parsedQuery['/segments?id']
if (!segmentsId) throw new Error('segmentsId was undefined which is unexpected');
if (Array.isArray(segmentsId)) throw new Error('segmentsId was an array which is unexpected');
const id = segmentsId.split('.').at(-1)
if (!id) throw new Error('failed to get id ');
return parseInt(id)
}
const createSegmentsStreamLink = async function createSegmentsStreamLink(stream_id: string, segment_id: number, helpers: Helpers): Promise<number> {
if (!stream_id) throw new Error('createSegmentsStreamLink requires {string} stream_id as first arg');
if (!segment_id) throw new Error('createSegmentsStreamLink requires {Number} segment_id as second arg');
const segmentStreamLinkPayload = {
stream_id,
segment_id
}
const res = await fetch(`${configs.postgrestUrl}/segments_stream_links`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
'Prefer': 'return=headers-only',
'Authorization': `Bearer ${configs.automationUserJwt}`,
},
body: JSON.stringify(segmentStreamLinkPayload)
})
if (!res.ok) {
const body = await res.text()
throw new Error(`failed to create SegmentsStreamLink. status=${res.status}, statusText=${res.statusText}, body=${body}`);
}
const location = res.headers.get('location')
if (!location) throw new Error(`failed to get location header in response from postgrest`);
const parsedQuery = querystring.parse(location)
const segmentsId = parsedQuery['/segments_stream_links?id']
if (!segmentsId) throw new Error('segments_stream_links?id was undefined which is unexpected');
if (Array.isArray(segmentsId)) throw new Error('segments_stream_links was an array which is unexpected');
const id = segmentsId.split('.').at(-1)
if (!id) throw new Error('failed to get id ');
return parseInt(id)
}
/**
* # doRecordSegment
*
* Record a segment of a livestream using ffmpeg.
*
* Ideally, we record the entire livestream, but the universe is not so kind. Network interruptions are common, so we handle the situation as best as we can.
*
* This function creates a new segments and segments_streams_links entry in the db via Postgrest REST API.
*
* This function also names the S3 file (s3_key) with a datestamp and a cuid.
*/
const doRecordSegment = async function doRecordSegment(url: string, stream_id: string, helpers: Helpers): Promise<void> {
const s3_key = `${new Date().toISOString()}-${createId()}.ts`
helpers.logger.info(`let's create a segment...`)
const segment_id = await createSegment(s3_key, helpers)
helpers.logger.info(`let's create a segmentsStreamLink...`)
const segmentsStreamLinkId = await createSegmentsStreamLink(stream_id, segment_id, helpers)
helpers.logger.info(`doTheRecording with segmentsStreamLinkId=${segmentsStreamLinkId}, stream_id=${stream_id}, segment_id=${segment_id}, url=${url}`)
const record = await getRecordInstance(url, segment_id, helpers)
await record.start()
}
export const record: Task = async function (payload: unknown, helpers: Helpers) {
assertPayload(payload)
assertEnv()
const { url, record_id } = payload
// let interval
const { url, stream_id } = payload
const streamId = stream_id
try {
// every 30s, we
// 1. update the db record with the filesize
// 2. poll db to see if our job has been aborted by the user
// interval = setInterval(async () => {
// try {
// helpers.logger.info(`updateDatabaseRecord()`)
// const recordingState: RecordingState = 'recording'
// const fileSize = record.counter
// const updatePayload = { recordingState, recordId, fileSize }
// const updatedRecord = await updateDatabaseRecord(updatePayload)
// if (updatedRecord.isAborted) {
// helpers.logger.info(`record ${recordId} has been aborted by a user so we stop the recording now.`)
// abortController.abort()
// }
// } catch (e) {
// helpers.logger.error(`error while updating database. For sake of the recording in progress we are ignoring the following error. ${e}`)
// }
// }, 3000)
// start recording and await the S3 upload being finished
const recordId = record_id
const record = await getRecording(url, recordId, helpers)
await record.start()
/**
* We do an exponential backoff timer when we record. If the Record() instance throws an error, we try again after a delay.
* This will take effect only when Record() throws an error.
* If however Record() returns, as is the case when the stream ends, this backoff timer will not retry.
* This does not handle the corner case where the streamer's internet temporarliy goes down, and their stream drops.
*
* @todo We must implement retrying at a higher level, and retry a few times to handle this type of corner-case.
*/
// await backOff(() => doRecordSegment(url, recordId, helpers))
await doRecordSegment(url, streamId, helpers)
} catch (e) {
helpers.logger.error(`caught an error duing record(). error as follows`)
// await updateDatabaseRecord({ recordId: stream_id, recordingState: 'failed' })
helpers.logger.error(`caught an error during record Task`)
if (e instanceof Error) {
helpers.logger.error(e.message)
helpers.logger.info(`error.name=${e.name}`)
if (e.name === 'RoomOfflineError') {
// If room is offline, we want to retry until graphile-worker retries expire.
// We don't want to swallow the error so we simply log the error then let the below throw re-throw the error
// graphile-worker will retry when we re-throw the error below.
helpers.logger.info(`Room is offline.`)
} else if (e.name === 'AbortError') {
// If the recording was aborted by an admin, we want graphile-worker to stop retrying the record job.
// We swallow the error and return in order to mark the job as succeeded.
helpers.logger.info(`>>> we got an AbortError so we are ending the record job.`)
return
} else {
helpers.logger.error(e.message)
}
} else {
helpers.logger.error(JSON.stringify(e))
}
// we throw the error which fails the graphile-worker job, thus causing graphile-worker to restart/retry the job.
helpers.logger.error(`we got an error during record Task so we throw and retry`)
throw e
}
// const recordId = await createRecordingRecord(payload, helpers)
// const { url } = payload;
// console.log(`@todo simulated start_recording with url=${url}, recordId=${recordId}`)
// await helpers.addJob('record', { url, recordId })
}
/**
* Here we middleman the stream from FFmpeg --> S3,
* counting bits and creating graphile jobs to inform the UI of our progress
*/
// const transformStreamFactory = (recordId: number, helpers: Helpers): PassThrough => {
// let counter = 0
// return new PassThrough ({
// async transform(chunk, controller) {
// controller.enqueue(chunk) // we don't actually transform anything here. we're only gathering statistics.
// counter += chunk.length
// if (counter % (1 * 1024 * 1024) <= 1024) {
// helpers.logger.info(`Updating record ${recordId}`)
// try {
// await updateDatabaseRecord({ fileSize: counter, recordId, recordingState: 'recording' })
// } catch (e) {
// helpers.logger.warn(`We are ignoring the following error which occured while updating db record ${e}`)
// }
// }
// },
// flush() {
// helpers.logger.info(`transformStream has flushed.`)
// }
// })
// }
// export const recordNg: Task = async function (payload, helpers) {
// assertPayload(payload)
// const { url, recordId } = payload
// try {
// const abortController = new AbortController()
// const abortSignal = abortController.signal
// const inputStream =
// const transformStream = transformStreamFactory(recordId, helpers)
// const record = new Record({ inputStream, abortSignal, transformStream })
// await record.done()
// } catch (e) {
// console.error(`error during recording. error as follows`)
// console.error(e)
// } finally {
// helpers.addJob('updateDiscordMessage', { recordId }, { maxAttempts: 3, runAt: add(new Date(), { seconds: 5 }) })
// }
// }
export default record

View File

@ -1,77 +0,0 @@
import Record from '../Record.ts'
import { getPlaylistUrl } from '@futureporn/scout/ytdlp.ts'
import 'dotenv/config'
import { type Job } from 'pg-boss'
import { backOff } from 'exponential-backoff'
export interface RecordJob extends Job {
data: {
url: string;
}
}
async function _record (job: RecordJob, retries?: number): Promise<string> {
if (!process.env.S3_BUCKET_NAME) throw new Error('S3_BUCKET_NAME was undefined in env');
if (!process.env.S3_ENDPOINT) throw new Error('S3_ENDPOINT was undefined in env');
if (!process.env.S3_REGION) throw new Error('S3_REGION was undefined in env');
if (!process.env.S3_ACCESS_KEY_ID) throw new Error('S3_ACCESS_KEY_ID was undefined in env');
if (!process.env.S3_SECRET_ACCESS_KEY) throw new Error('S3_SECRET_ACCESS_KEY was undefined in env');
if (!job) throw new Error('Job sent to job worker execution callback was empty!!!');
const { url } = job.data;
console.log(`'record' job ${job!.id} begin with url=${url}`)
const bucket = process.env.S3_BUCKET_NAME!
const endpoint = process.env.S3_ENDPOINT!
const region = process.env.S3_REGION!
const accessKeyId = process.env.S3_ACCESS_KEY_ID!
const secretAccessKey = process.env.S3_SECRET_ACCESS_KEY!
let playlistUrl
try {
playlistUrl = await getPlaylistUrl(url)
console.log(`playlistUrl=${playlistUrl}`)
} catch (e) {
console.error('error during getPlaylistUrl()')
console.error(e)
throw e
}
const jobId = job.id
const s3Client = Record.makeS3Client({ accessKeyId, secretAccessKey, region, endpoint })
const inputStream = Record.getFFmpegStream({ url: playlistUrl })
const record = new Record({ inputStream, bucket, s3Client, jobId })
await record.start()
console.log(`record job ${job.id} complete`)
return job.id
}
export default async function main (jobs: RecordJob[]): Promise<any> {
// @todo why are we passed multiple jobs? I'm expecting only one.
const backOffOptions = {
numOfAttempts: 5,
startingDelay: 5000,
retry: (e: any, attemptNumber: number) => {
console.log(`Record Job is retrying. Attempt number ${attemptNumber}. e=${JSON.stringify(e, null, 2)}`)
return true
}
}
for (const j of jobs) {
console.log(`record job ${j.id} GO GO GO`)
try {
await backOff(() => _record(j), backOffOptions)
} catch (e) {
console.warn(`record job ${j.id} encountered the following error.`)
console.error(e)
}
console.log(`record job ${j.id} is finished.`)
}
};

View File

@ -1,67 +0,0 @@
import { Helpers, type Task } from 'graphile-worker'
import { add } from 'date-fns'
/**
* url is the URL to be recorded. Ex: chaturbate.com/projektmelody
* discordMessageId is the ID of the discord messate which displays recording status.
* we use the ID to update the message later, and/or relate button press events to this record task
*/
interface Payload {
url: string;
discordMessageId: string;
isAborted: boolean;
}
function assertPayload(payload: any): asserts payload is Payload {
if (typeof payload !== "object" || !payload) throw new Error("invalid payload");
if (typeof payload.url !== "string") throw new Error("invalid url");
if (typeof payload.discordMessageId !== "string") throw new Error(`invalid discordMessageId=${payload.discordMessageId}`);
}
function assertEnv() {
if (!process.env.AUTOMATION_USER_JWT) throw new Error('AUTOMATION_USER_JWT was missing in env');
if (!process.env.POSTGREST_URL) throw new Error('POSTGREST_URL was missing in env');
}
async function createRecordingRecord(payload: Payload, helpers: Helpers): Promise<number> {
const { url, discordMessageId } = payload
const record = {
url,
discord_message_id: discordMessageId,
recording_state: 'pending',
file_size: 0
}
const res = await fetch(`${process.env.POSTGREST_URL}/records`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${process.env.AUTOMATION_USER_JWT}`,
'Prefer': 'return=headers-only'
},
body: JSON.stringify(record)
})
if (!res.ok) {
const status = res.status
const statusText = res.statusText
throw new Error(`fetch failed to create recording record in database. status=${status}, statusText=${statusText}`)
}
helpers.logger.info('res.headers.location as follows.')
helpers.logger.info(res.headers.get('location')!)
const id = res.headers.get('location')?.split('.').at(-1)
if (!id) throw new Error('id could not be parsed from location header');
return parseInt(id)
}
export const start_recording: Task = async function (payload, helpers) {
assertPayload(payload)
assertEnv()
const recordId = await createRecordingRecord(payload, helpers)
const { url } = payload;
await helpers.addJob('record', { url, recordId }, { maxAttempts: 3, jobKey: `record_${recordId}` })
const runAt = add(new Date(), { seconds: 10 })
await helpers.addJob('updateDiscordMessage', { recordId }, { jobKey: `record_${recordId}_update_discord_message`, maxAttempts: 3, runAt })
helpers.logger.info(`startRecording() with url=${url}, recordId=${recordId}, (updateDiscordMessage runAt=${runAt})`)
}
export default start_recording

View File

@ -1,18 +0,0 @@
import { type Task } from 'graphile-worker'
interface Payload {
id: string
}
function assertPayload(payload: any): asserts payload is Payload {
if (typeof payload !== "object" || !payload) throw new Error("invalid payload");
if (typeof payload.id !== "string") throw new Error("invalid id");
}
export const stop_recording: Task = async function (payload) {
assertPayload(payload)
const { id } = payload;
console.log(`@todo simulated stop_recording with id=${id}`)
}

View File

@ -25,7 +25,7 @@
// Include the necessary files for your project
"include": [
"src/**/*.ts"
],
, "../bot/src/tasks/restart_failed_recordings.ts" ],
"exclude": [
"node_modules"
]

View File

@ -4,9 +4,24 @@ Here we handle migrations for the postgrest database.
@see https://github.com/thomwright/postgres-migrations
Reminder: only write migrations that affect schema. (don't write migrations that affect data)
## K.I.S.S.
Keep It Stupidly Simple.
We are keeping this module as simple as possible. This means pure JS (no typescript!)
We are keeping this module as simple as possible. This means pure JS (no typescript!)
## troubleshooting
If you see the following error, graphile_worker likely hasn't had a chance to create it's functions. Make sure that a graphile_worker is running, so it can automatically create the necessary functions.
```json
{
"code": "42883",
"details": null,
"hint": "No function matches the given name and argument types. You might need to add explicit type casts.",
"message": "function graphile_worker.add_job(text, json, max_attempts => integer) does not exist"
}
```

View File

@ -2,6 +2,8 @@ import {migrate} from 'postgres-migrations'
import path, { dirname } from 'node:path'
import { fileURLToPath } from 'url';
import 'dotenv/config'
const __dirname = dirname(fileURLToPath(import.meta.url));
if (!process.env.DATABASE_PASSWORD) throw new Error('DATABASE_PASSWORD is missing in env');
@ -23,7 +25,7 @@ async function main() {
defaultDatabase: "postgres"
}
await migrate(dbConfig, path.join(__dirname, "./migrations/"))
await migrate(dbConfig, path.join(__dirname, "./migrations/"), { logger: console.log })
}

View File

@ -0,0 +1,5 @@
ALTER TABLE IF EXISTS api.records
ADD COLUMN created_at timestamp(6) without time zone;
ALTER TABLE IF EXISTS api.records
ADD COLUMN updated_at timestamp(6) without time zone;

View File

@ -0,0 +1,7 @@
ALTER TABLE IF EXISTS api.records
ADD CONSTRAINT created_at_not_null
CHECK (created_at IS NOT NULL) NOT VALID;
ALTER TABLE IF EXISTS api.records
ADD CONSTRAINT updated_at_not_null
CHECK (updated_at IS NOT NULL) NOT VALID;

View File

@ -0,0 +1,26 @@
-- In the prev. migration I added a CHECK, but I forgot to add the default
ALTER TABLE IF EXISTS api.records
ALTER COLUMN created_at SET DEFAULT now();
ALTER TABLE IF EXISTS api.records
ALTER COLUMN updated_at SET DEFAULT now();
-- create a function which updates the row's updated_at
CREATE FUNCTION public.tg__updated_at() RETURNS trigger
LANGUAGE plpgsql
SET search_path TO 'pg_catalog', 'public', 'pg_temp'
AS $$
BEGIN
NEW.updated_at = now();
RETURN NEW;
END;
$$;
-- create a trigger which runs the above function when a /record is updated
CREATE TRIGGER record_updated_at
AFTER UPDATE ON api.records
FOR EACH ROW
EXECUTE PROCEDURE public.tg__updated_at();

View File

@ -0,0 +1,140 @@
-- vtubers table
CREATE TABLE api.vtubers (
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
display_name TEXT NOT NULL,
chaturbate TEXT,
twitter TEXT,
patreon TEXT,
twitch TEXT,
tiktok TEXT,
onlyfans TEXT,
youtube TEXT,
linktree TEXT,
carrd TEXT,
fansly TEXT,
pornhub TEXT,
discord TEXT,
reddit TEXT,
throne TEXT,
instagram TEXT,
facebook TEXT,
merch TEXT,
slug TEXT NOT NULL,
description1 TEXT,
description2 TEXT,
image TEXT NOT NULL,
theme_color VARCHAR(7) NOT NULL,
image_blur TEXT DEFAULT 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAABmJLR0QA/wD/AP+gvaeTAAAADUlEQVQImWMwtf//HwAEkwJzh0T9qwAAAABJRU5ErkJggg==',
fansly_id TEXT,
chaturbate_id TEXT,
twitter_id TEXT
-- F.Y.I., relations as follows
-- toys (one-to-many)
-- vods (one-to-many)
-- streams (one-to-many)
);
GRANT all ON api.vtubers TO automation;
GRANT SELECT ON api.vtubers TO web_anon;
-- streams table
CREATE TABLE api.streams (
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
url TEXT NOT NULL,
platform_notification_type TEXT,
date timestamp(6) without time zone,
created_at timestamp(6) without time zone,
vtuber uuid,
FOREIGN KEY (vtuber) REFERENCES api.vtubers(id),
tweet TEXT,
archive_status TEXT,
is_chaturbate_stream BOOLEAN,
is_fansly_stream BOOLEAN
);
GRANT all ON api.streams TO automation;
GRANT SELECT ON api.streams TO web_anon;
-- toys table
CREATE TABLE api.toys (
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
-- relation. one toy to many tags
-- relation. one toy to many vtubers
make TEXT NOT NULL,
model TEXT NOT NULL,
image TEXT NOT NULL DEFAULT 'https://futureporn-b2.b-cdn.net/default-thumbnail.webp'
);
GRANT all ON api.toys TO automation;
GRANT SELECT ON api.toys TO web_anon;
-- tags table
CREATE TABLE api.tags (
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
name TEXT NOT NULL UNIQUE,
toy_id uuid,
FOREIGN KEY (toy_id) REFERENCES api.toys
);
GRANT all ON api.tags TO automation;
GRANT SELECT ON api.tags TO web_anon;
-- toys-tags junction table
CREATE TABLE api.toys_tags(
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
toy_id uuid,
tag_id uuid,
CONSTRAINT fk_toys FOREIGN KEY(toy_id) REFERENCES api.toys(id),
CONSTRAINT fk_tags FOREIGN KEY(tag_id) REFERENCES api.tags(id)
);
GRANT all ON api.toys_tags TO automation;
GRANT SELECT ON api.toys_tags TO web_anon;
-- tags-vods junction table
-- toys-vtubers junction table
CREATE TABLE api.toys_vtubers(
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
toy_id uuid,
vtuber_id uuid,
CONSTRAINT fk_toys FOREIGN KEY(toy_id) REFERENCES api.toys(id),
CONSTRAINT fk_vtubers FOREIGN KEY(vtuber_id) REFERENCES api.vtubers(id)
);
GRANT all ON api.toys_vtubers TO automation;
GRANT SELECT ON api.toys_vtubers TO web_anon;
-- vods table
CREATE TABLE api.vods (
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
stream_id uuid NOT NULL,
FOREIGN KEY (stream_id) REFERENCES api.streams(id),
video_cid TEXT UNIQUE,
CONSTRAINT check_video_cid CHECK (video_cid ~ 'Qm[1-9A-HJ-NP-Za-km-z]{44,}|b[A-Za-z2-7]{58,}|B[A-Z2-7]{58,}|z[1-9A-HJ-NP-Za-km-z]{48,}|F[0-9A-F]{50,}'),
announce_title TEXT,
announce_url TEXT,
note TEXT,
date timestamp(6) without time zone,
spoilers TEXT,
title TEXT,
uploader uuid,
mux_asset_id TEXT,
mux_playback_id TEXT,
s3_key TEXT,
s3_id TEXT,
thumbnail TEXT
);
GRANT all ON api.vods TO automation;
GRANT SELECT ON api.vods TO web_anon;
-- tags-vods junction table
CREATE TABLE api.tags_vods(
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
tag_id uuid,
vod_id uuid,
CONSTRAINT fk_tags FOREIGN KEY(tag_id) REFERENCES api.tags(id),
CONSTRAINT fk_vods FOREIGN KEY(vod_id) REFERENCES api.vods(id)
);
GRANT all ON api.tags_vods TO automation;
GRANT SELECT ON api.tags_vods TO web_anon;

View File

@ -0,0 +1,7 @@
-- we add the concept of segments to api.records
-- implemented as a multidimensional text array, s3_segments.
-- the first value is the s3 id, the second value is the s3 key
-- [id, key]
ALTER TABLE IF EXISTS api.records
ADD COLUMN s3_segments text[][];

View File

@ -0,0 +1,28 @@
-- we don't need s3_segments multidimential array. we're moving it's functionality to a new table
ALTER TABLE IF EXISTS api.records
DROP COLUMN s3_segments;
-- segments table
CREATE TABLE api.segments (
id INT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
s3_key TEXT NOT NULL,
s3_id TEXT NOT NULL,
bytes bigint DEFAULT 0
);
GRANT all ON api.segments TO automation;
GRANT SELECT ON api.segments TO web_anon;
-- records-segments join table
CREATE TABLE api.records_segments(
id INT GENERATED ALWAYS AS IDENTITY,
record_id INT NOT NULL,
segment_id INT NOT NULL,
CONSTRAINT fk_record FOREIGN KEY(record_id) REFERENCES api.records(id),
CONSTRAINT fk_segment FOREIGN KEY(segment_id) REFERENCES api.segments(id),
PRIMARY KEY(id, record_id, segment_id)
);
GRANT all ON api.records_segments TO automation;
GRANT SELECT ON api.records_segments TO web_anon;

View File

@ -0,0 +1,2 @@
ALTER TABLE IF EXISTS api.records_segments
ADD COLUMN segments_order INT NOT NULL DEFAULT 0;

View File

@ -0,0 +1,5 @@
ALTER TABLE IF EXISTS api.records_segments
DROP COLUMN segments_order;
ALTER TABLE IF EXISTS api.records_segments
ADD COLUMN segment_order INT NOT NULL DEFAULT 0;

View File

@ -0,0 +1,2 @@
DROP TABLE IF EXISTS api.records CASCADE;
DROP TABLE IF EXISTS api.records_segments CASCADE;

View File

@ -0,0 +1,16 @@
-- I forgot to actually create the new table
CREATE TABLE api.segments_stream_links (
id int PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
discord_message_id text NOT NULL,
capture_job_id text NOT NULL
);
-- roles & permissions
GRANT all ON api.segments_stream_links TO automation;
GRANT SELECT ON api.segments_stream_links TO web_anon;
-- there is no s3_id in the segments run context so we don't need a column for it
ALTER TABLE IF EXISTS api.segments
DROP COLUMN s3_id;

View File

@ -0,0 +1,8 @@
-- oops. bit by unfinished copy-paste
-- there is no s3_id in the segments run context so we don't need a column for it
ALTER TABLE IF EXISTS api.segments_stream_links
DROP COLUMN discord_message_id;
ALTER TABLE IF EXISTS api.segments_stream_links
DROP COLUMN capture_job_id;

View File

@ -0,0 +1,5 @@
ALTER TABLE IF EXISTS api.streams
ADD COLUMN updated_at timestamp(6) without time zone;
ALTER TABLE IF EXISTS api.streams
ADD COLUMN status TEXT NOT NULL;

View File

@ -0,0 +1,5 @@
ALTER TABLE IF EXISTS api.streams
DROP COLUMN IF EXISTS status;
ALTER TABLE api.streams
ADD COLUMN status TEXT NOT NULL DEFAULT 'pending_recording';

View File

@ -0,0 +1 @@
DROP TABLE IF EXISTS api.discord_interactions CASCADE;

View File

@ -0,0 +1,38 @@
-- delete outdated
DROP FUNCTION IF EXISTS public.tg__add_job();
-- We create a function which lets Postgrest's automation user create jobs in Graphile Worker.
-- Normally only the database owner, in our case `postgres`, can add jobs due to RLS in graphile_worker tables.
-- Under the advice of graphile_worker author, we can use a SECURITY DEFINER wrapper function.
-- @see https://worker.graphile.org/docs/sql-add-job#graphile_workeradd_job:~:text=graphile_worker.add_job(...),that%20are%20necessary.)
-- @see https://discord.com/channels/489127045289476126/1179293106336694333/1179605043729670306
-- @see https://discord.com/channels/489127045289476126/498852330754801666/1067707497235873822
CREATE FUNCTION public.tg__add_record_job() RETURNS trigger
LANGUAGE plpgsql SECURITY DEFINER
SET search_path TO 'pg_catalog', 'public', 'pg_temp'
AS $$
begin
PERFORM graphile_worker.add_job('record', json_build_object(
'url', NEW.url,
'stream_id', NEW.id
), max_attempts := 12);
return NEW;
end;
$$;
-- when a stream is updated, we add a job in graphile to update_discord_message
CREATE TRIGGER stream_update
AFTER UPDATE ON api.streams
FOR EACH ROW
EXECUTE PROCEDURE public.tg__update_discord_message('update_discord_message');
-- when a stream is created, we add a 'record' job in graphile-worker
CREATE TRIGGER stream_create
AFTER INSERT ON api.streams
FOR EACH ROW
EXECUTE PROCEDURE public.tg__add_record_job('record');

View File

@ -0,0 +1,9 @@
DROP TABLE api.segments_stream_links;
CREATE TABLE api.segments_stream_links (
id int GENERATED ALWAYS AS IDENTITY,
stream_id UUID NOT NULL REFERENCES api.streams(id),
segment_id INT NOT NULL REFERENCES api.segments(id),
capture_job_id text NOT NULL,
PRIMARY KEY(id, stream_id, segment_id)
);

View File

@ -0,0 +1,2 @@
GRANT all ON api.segments_stream_links TO automation;
GRANT SELECT ON api.segments_stream_links TO web_anon;

View File

@ -0,0 +1,3 @@
ALTER TABLE IF EXISTS api.segments_stream_links
DROP COLUMN IF EXISTS capture_job_id;

View File

@ -0,0 +1,39 @@
ALTER TABLE api.segments
ADD COLUMN created_at TIMESTAMP(6) WITHOUT TIME ZONE;
ALTER TABLE api.segments
ADD COLUMN updated_at TIMESTAMP(6) WITHOUT TIME ZONE;
-- in migration 8, we already created tg__updated_at() so we don't need to create that,
-- but we do need to create a function which will the row's created_at
CREATE FUNCTION public.tg__created_at() RETURNS trigger
LANGUAGE plpgsql
SET search_path TO 'pg_catalog', 'public', 'pg_temp'
AS $$
BEGIN
NEW.created_at = now();
RETURN NEW;
END;
$$;
-- create a trigger which runs the tg__updated_at() function when a /segment is updated
CREATE TRIGGER segment_updated_at
AFTER UPDATE ON api.segments
FOR EACH ROW
EXECUTE PROCEDURE public.tg__updated_at();
-- create a trigger which runs the tg__created_at() function when a /segment is created
CREATE TRIGGER segment_created_at
AFTER INSERT ON api.segments
FOR EACH ROW
EXECUTE PROCEDURE public.tg__created_at();
-- create a trigger which runs the tg__created_at() function when a /stream is created
CREATE TRIGGER stream_created_at
AFTER INSERT ON api.streams
FOR EACH ROW
EXECUTE PROCEDURE public.tg__created_at();

View File

@ -0,0 +1,2 @@
ALTER TABLE api.streams
ADD COLUMN is_recording_aborted BOOLEAN DEFAULT FALSE;

View File

@ -0,0 +1,2 @@
CREATE EXTENSION moddatetime;

View File

@ -0,0 +1,49 @@
-- now we set up the triggers
-- streams created_at
ALTER TABLE api.streams
ALTER created_at SET DEFAULT now();
DROP TRIGGER stream_created_at ON api.streams;
CREATE TRIGGER stream_created_at
BEFORE INSERT ON api.streams
FOR EACH ROW
EXECUTE PROCEDURE moddatetime (created_at);
-- streams updated_at
ALTER TABLE api.streams
ALTER updated_at SET DEFAULT now();
CREATE TRIGGER stream_updated_at
BEFORE UPDATE ON api.streams
FOR EACH ROW
EXECUTE PROCEDURE moddatetime (updated_at);
-- segments created_at
ALTER TABLE api.segments
ALTER created_at SET DEFAULT now();
DROP TRIGGER segment_created_at ON api.segments;
CREATE TRIGGER segment_created_at
BEFORE INSERT ON api.segments
FOR EACH ROW
EXECUTE PROCEDURE moddatetime(created_at);
-- segments updated_at
ALTER TABLE api.segments
ALTER updated_at SET DEFAULT now();
DROP TRIGGER segment_updated_at ON api.segments;
CREATE TRIGGER segment_updated_at
BEFORE UPDATE ON api.segments
FOR EACH ROW
EXECUTE PROCEDURE moddatetime(updated_at);

View File

@ -0,0 +1,9 @@
-- A fix for the following error
-- moddatetime: cannot process INSERT events
--
-- We don't need moddatetime for INSERT events because we have column defaults set the time when the row is created.
DROP TRIGGER segment_created_at ON api.segments;
DROP TRIGGER stream_created_at ON api.streams;

View File

@ -0,0 +1,5 @@
-- streams needs discord_message_id for chatops
ALTER TABLE api.streams
ADD COLUMN discord_message_id TEXT;

View File

@ -0,0 +1,14 @@
-- instead of using record_id, we need to use stream_id
DROP FUNCTION public.tg__update_discord_message CASCADE;
CREATE FUNCTION public.tg__update_discord_message() RETURNS trigger
LANGUAGE plpgsql SECURITY DEFINER
SET search_path TO 'pg_catalog', 'public', 'pg_temp'
AS $$
begin
PERFORM graphile_worker.add_job('update_discord_message', json_build_object(
'stream_id', NEW.id
), max_attempts := 3);
return NEW;
end;
$$;

View File

@ -0,0 +1,5 @@
-- when a stream is updated, we add a job in graphile to update_discord_message
CREATE TRIGGER stream_update
AFTER UPDATE ON api.streams
FOR EACH ROW
EXECUTE PROCEDURE public.tg__update_discord_message('update_discord_message');

View File

@ -0,0 +1,23 @@
-- in order for discord chatops messages to be updated when a segment is updated,
-- we need to have postgres update the related stream timestamp when a segment is updated.
CREATE OR REPLACE FUNCTION update_stream_on_segment_update()
RETURNS TRIGGER AS $$
BEGIN
UPDATE api.streams
SET updated_at = NOW()
WHERE id IN (
SELECT stream_id
FROM segments_stream_links
WHERE segment_id = NEW.id
);
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER trigger_update_stream
AFTER UPDATE ON api.segments
FOR EACH ROW
EXECUTE FUNCTION update_stream_on_segment_update();

View File

@ -8,6 +8,7 @@
"test": "echo \"Error: no test specified\" && exit 0",
"start": "node index.js"
},
"packageManager": "pnpm@9.6.0",
"keywords": [],
"author": "@CJ_Clippy",
"license": "Unlicense",