chatops progress

This commit is contained in:
CJ_Clippy 2024-08-16 18:42:44 -08:00
parent 7793e38878
commit 9cd9b6a53d
65 changed files with 1562 additions and 1759 deletions

View File

@ -33,7 +33,7 @@ Get through the [OODA loop](https://en.wikipedia.org/wiki/OODA_loop) as many tim
### The computer doesn't care ### The computer doesn't care
> "There are 2 hard problems in computer science: cache invalidation, naming things, and off-by-1 errors." > "There are 2 hard problems in computer science: cache invalidation, naming things, and off-by-1 errors."
> Leon Bambrick > -- Leon Bambrick
In other words, pick something for a name and roll with the punches. In other words, pick something for a name and roll with the punches.
@ -44,3 +44,8 @@ In other words, pick something for a name and roll with the punches.
3. Simplify or optimize 3. Simplify or optimize
4. Accelerate Cycle Time 4. Accelerate Cycle Time
5. Automate 5. Automate
### Never Settle
> "But it's also about looking at things anew and what they could be instead of what they are"
> -- Rodney Mullen

View File

@ -215,6 +215,12 @@ cmd_button('postgres:drop',
icon_name='delete', icon_name='delete',
text='DROP all databases' text='DROP all databases'
) )
cmd_button('postgres:refresh',
argv=['sh', './scripts/postgres-refresh.sh'],
resource='migrations',
icon_name='refresh',
text='Refresh schema cache'
)
cmd_button('capture-api:create', cmd_button('capture-api:create',
argv=['http', '--ignore-stdin', 'POST', 'http://localhost:5003/api/record', "url='https://twitch.tv/ironmouse'", "channel='ironmouse'"], argv=['http', '--ignore-stdin', 'POST', 'http://localhost:5003/api/record', "url='https://twitch.tv/ironmouse'", "channel='ironmouse'"],
@ -223,9 +229,9 @@ cmd_button('capture-api:create',
text='Start Recording' text='Start Recording'
) )
cmd_button('postgrest:migrate', cmd_button('postgres:migrate',
argv=['./scripts/postgrest-migrations.sh'], argv=['./scripts/postgres-migrations.sh'],
resource='postgrest', resource='postgresql-primary',
icon_name='directions_run', icon_name='directions_run',
text='Run migrations', text='Run migrations',
) )
@ -243,6 +249,16 @@ cmd_button('factory:test',
text='test', text='test',
) )
## we ignore unused image warnings because we do actually use this image.
## instead of being invoked by helm, we start a container using this image manually via Tilt UI
# update_settings(suppress_unused_image_warnings=["fp/migrations"])
docker_build(
'fp/migrations',
'.',
dockerfile='dockerfiles/migrations.dockerfile',
target='migrations',
pull=False,
)
## Uncomment the following for fp/next in dev mode ## Uncomment the following for fp/next in dev mode
## this is useful for changing the UI and seeing results ## this is useful for changing the UI and seeing results
@ -350,7 +366,7 @@ docker_build(
'./services/capture', './services/capture',
], ],
live_update=[ live_update=[
sync('./services/capture/dist', '/app/dist'), sync('./services/capture', '/app/services/capture'),
], ],
pull=False, pull=False,
) )
@ -513,7 +529,11 @@ k8s_resource(
port_forwards=['5050:80'], port_forwards=['5050:80'],
labels=['database'], labels=['database'],
) )
k8s_resource(
workload='migrations',
labels=['database'],
resource_deps=['postgresql-primary'],
)
k8s_resource( k8s_resource(
workload='cert-manager', workload='cert-manager',

View File

@ -0,0 +1,21 @@
---
apiVersion: v1
kind: Pod
metadata:
name: migrations
namespace: futureporn
labels:
app.kubernetes.io/name: migrations
spec:
containers:
- name: migrations
image: "{{ .Values.migrations.imageName }}"
resources: {}
env:
- name: DATABASE_PASSWORD
valueFrom:
secretKeyRef:
name: postgresql
key: password
restartPolicy: Never

View File

@ -91,3 +91,5 @@ game2048:
hostname: game-2048.fp.sbtp.xyz hostname: game-2048.fp.sbtp.xyz
whoami: whoami:
hostname: whoami.fp.sbtp.xyz hostname: whoami.fp.sbtp.xyz
migrations:
imageName: fp/migrations

View File

@ -4,22 +4,45 @@ export as namespace Futureporn;
declare namespace Futureporn { declare namespace Futureporn {
interface RecordingRecord { type PlatformNotificationType = 'email' | 'manual' | 'twitter'
id: number; type ArchiveStatus = 'good' | 'issue' | 'missing'
recordingState: RecordingState; type RecordingState = 'pending' | 'recording' | 'stalled' | 'aborted' | 'failed' | 'finished'
fileSize: number; type Status = Partial<RecordingState>
discordMessageId: string;
isAborted: boolean; interface Stream {
id: string;
url: string;
platform_notification_type: PlatformNotificationType;
date: Date;
created_at: Date;
updated_at: Date;
vtuber: string;
tweet: string;
archive_status: ArchiveStatus;
is_chaturbate_stream: Boolean;
is_fansly_stream: Boolean;
is_recording_aborted: Boolean;
status: Status;
} }
interface RawRecordingRecord {
interface RecordingRecord {
id: number; id: number;
recording_state: RecordingState; recording_state: RecordingState;
file_size: number; file_size: number;
discord_message_id: string; discord_message_id: string;
is_aborted: boolean; is_recording_aborted: boolean;
updated_at: Date;
created_at: Date;
}
interface Segment {
id: number;
s3_key: string;
s3_id: string;
bytes: number;
stream?: Stream[];
} }
type RecordingState = 'pending' | 'recording' | 'aborted' | 'ended'
interface IMuxAsset { interface IMuxAsset {
@ -70,7 +93,7 @@ declare namespace Futureporn {
attributes: { attributes: {
date: string; date: string;
date2: string; date2: string;
archiveStatus: 'good' | 'issue' | 'missing'; archiveStatus: ArchiveStatus;
vods: IVodsResponse; vods: IVodsResponse;
cuid: string; cuid: string;
vtuber: IVtuberResponse; vtuber: IVtuberResponse;

View File

@ -1,7 +0,0 @@
# @futureporn/worker
The system component which runs background tasks such as thumbnail generation, video encoding, file transfers, etc.
We use [Graphile Worker](https://worker.graphile.org)

View File

@ -1,26 +0,0 @@
{
"name": "@futureporn/worker",
"type": "module",
"version": "1.3.0",
"private": true,
"scripts": {
"bundle": "node ./src/create-workflow-bundle.js",
"build": "tsc --build",
"lint": "eslint .",
"dev": "nodemon --ext js,ts,json,yaml --watch ./src/index.ts --exec \"node --loader ts-node/esm --disable-warning=ExperimentalWarning ./src/index.ts\"",
"start": "node dist/index.js",
"clean": "rm -rf dist",
"superclean": "rm -rf node_modules && rm -rf pnpm-lock.yaml && rm -rf dist"
},
"dependencies": {
"date-fns": "^3.6.0",
"dotenv": "^16.4.5",
"graphile-worker": "^0.16.6",
"qs": "^6.12.3"
},
"packageManager": "pnpm@9.5.0",
"devDependencies": {
"nodemon": "^2.0.15",
"typescript": "^5.5.3"
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,35 +0,0 @@
import { run } from 'graphile-worker'
import { dirname } from 'node:path';
import { fileURLToPath } from 'url';
const __dirname = dirname(fileURLToPath(import.meta.url));
if (!process.env.DATABASE_URL) throw new Error('DATABASE_URL is undefined in env');
const connectionString = process.env.DATABASE_URL
console.log(`process.env.DATABASE_URL=${process.env.DATABASE_URL}`)
async function main() {
// Run a worker to execute jobs:
const runner = await run({
connectionString,
concurrency: 5,
// Install signal handlers for graceful shutdown on SIGINT, SIGTERM, etc
noHandleSignals: false,
pollInterval: 1000,
taskDirectory: `${__dirname}/tasks`,
});
// Immediately await (or otherwise handle) the resulting promise, to avoid
// "unhandled rejection" errors causing a process crash in the event of
// something going wrong.
await runner.promise;
// If the worker exits (whether through fatal error or otherwise), the above
// promise will resolve/reject.
}
main().catch((err) => {
console.error(err);
process.exit(1);
});

View File

@ -1,4 +0,0 @@
export default async function (payload: any, helpers: any) {
const { name } = payload;
helpers.logger.info(`Hello, ${name}`);
};

View File

@ -1,15 +0,0 @@
import { download } from "@futureporn/utils";
import { getProminentColor } from "@futureporn/image";
export default async function (payload: any, helpers: any) {
const { url } = payload;
// helpers.logger.info(`Downloading ${url}`)
// const imageFile = await download({ url, filePath: '/tmp/my-image.png' })
// helpers.logger.info(`downloaded to ${imageFile}`)
// if (!imageFile) throw new Error('imageFile was null')
// const color = await getProminentColor(imageFile)
// helpers.logger.info(`prominent color is ${color}`)
// return color
return '#0xffcc00'
}

View File

@ -1,29 +0,0 @@
{
"compilerOptions": {
// Base Options recommended for all projects
"esModuleInterop": true,
"skipLibCheck": true,
"target": "es2022",
"allowJs": true,
"resolveJsonModule": true,
"moduleDetection": "force",
"isolatedModules": true,
// Enable strict type checking so you can catch bugs early
"strict": true,
"noUncheckedIndexedAccess": true,
"noImplicitOverride": true,
// Transpile our TypeScript code to JavaScript
"module": "NodeNext",
"outDir": "dist",
"lib": [
"es2022"
]
},
// Include the necessary files for your project
"include": [
"**/*.ts"
],
"exclude": [
"node_modules"
]
}

View File

@ -61,14 +61,7 @@ kubectl -n futureporn exec ${postgres_pod_name} -- env PGPASSWORD=${POSTGRES_PAS
## Create the futureporn Postgrest database ## Create the futureporn Postgrest database
kubectl -n futureporn exec ${postgres_pod_name} -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "\ ## !!! Don't create the database here! Allow @services/migrations to create the database.
CREATE DATABASE futureporn \
WITH \
OWNER = postgres \
ENCODING = 'UTF8' \
LOCALE_PROVIDER = 'libc' \
CONNECTION LIMIT = -1 \
IS_TEMPLATE = False;"
# @futureporn/migrations takes care of these tasks now # @futureporn/migrations takes care of these tasks now

View File

@ -5,5 +5,12 @@ if [ -z $POSTGRES_PASSWORD ]; then
fi fi
## drop futureporn_db ## drop futureporn_db
kubectl -n futureporn exec postgresql-primary -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "DROP DATABASE futureporn_db WITH (FORCE);" kubectl -n futureporn exec postgresql-primary-0 -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "DROP DATABASE futureporn_db WITH (FORCE);"
## drop futureporn
kubectl -n futureporn exec postgresql-primary-0 -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "DROP DATABASE futureporn WITH (FORCE);"
## delete postgrest roles
kubectl -n futureporn exec postgresql-primary-0 -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "DROP ROLE authenticator;"
kubectl -n futureporn exec postgresql-primary-0 -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "DROP ROLE automation;"
kubectl -n futureporn exec postgresql-primary-0 -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "DROP ROLE web_anon;"

13
scripts/postgres-migrations.sh Executable file
View File

@ -0,0 +1,13 @@
#!/bin/bash
if [ -z $POSTGRES_PASSWORD ]; then
echo "POSTGRES_PASSWORD was missing in env. In development environment, runing this command via the UI button in Tilt is recommended as it sets the env var for you."
exit 5
fi
# kubectl -n futureporn run postgrest-migrations -i --rm=true --image=gitea.futureporn.net/futureporn/migrations:latest --env=DATABASE_PASSWORD=${POSTGRES_PASSWORD}
kubectl -n futureporn run postgres-migrations -i --rm=true --image=fp/migrations:latest --env=DATABASE_PASSWORD=${POSTGRES_PASSWORD}

View File

@ -0,0 +1,9 @@
if [ -z $POSTGRES_PASSWORD ]; then
echo "POSTGRES_PASSWORD was missing in env"
exit 5
fi
# reload the schema
# @see https://postgrest.org/en/latest/references/schema_cache.html#schema-reloading
kubectl -n futureporn exec postgresql-primary-0 -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "NOTIFY pgrst, 'reload schema'"

View File

@ -1,12 +0,0 @@
#!/bin/bash
if [ -z $POSTGRES_PASSWORD ]; then
echo "POSTGRES_PASSWORD was missing in env. In development environment, runing this command via the UI button in Tilt is recommended as it sets the env var for you."
exit 5
fi
kubectl -n futureporn run postgrest-migrations -i --rm=true --image=gitea.futureporn.net/futureporn/migrations:latest --env=DATABASE_PASSWORD=${POSTGRES_PASSWORD}

View File

@ -13,6 +13,6 @@
# * * * * * task ?opts {payload} # * * * * * task ?opts {payload}
## every 5 minutes, we see which /records are stale and we mark them as such. ## every n minutes, we see which /records are stale and we mark them as such.
## this prevents stalled Record updates by marking stalled recordings as stopped ## this prevents stalled Record updates by marking stalled recordings as stopped
*/5 * * * * expire_records * * * * * expire_stream_recordings ?max=1 { idle_minutes:2 }

View File

@ -7,8 +7,10 @@
"scripts": { "scripts": {
"test": "echo \"Warn: no test specified\" && exit 0", "test": "echo \"Warn: no test specified\" && exit 0",
"start": "node ./dist/index.js", "start": "node ./dist/index.js",
"dev.nodemon": "nodemon --legacy-watch --ext js,ts --watch ./src --exec \"node --loader ts-node/esm --disable-warning=ExperimentalWarning ./src/index.ts\"", "dev": "pnpm run dev.nodemon # yes this is crazy to have nodemon execute tsx, but it's the only way I have found to get live reloading in TS/ESM/docker with Graphile Worker's way of loading tasks",
"dev": "tsx --watch ./src/index.ts", "dev.tsx": "tsx ./src/index.ts",
"dev.nodemon": "nodemon --ext ts --exec \"pnpm run dev.tsx\"",
"dev.node": "node --no-warnings=ExperimentalWarning --loader ts-node/esm src/index.ts",
"build": "tsc --build", "build": "tsc --build",
"clean": "rm -rf dist", "clean": "rm -rf dist",
"superclean": "rm -rf node_modules && rm -rf pnpm-lock.yaml && rm -rf dist", "superclean": "rm -rf node_modules && rm -rf pnpm-lock.yaml && rm -rf dist",
@ -20,18 +22,22 @@
"license": "Unlicense", "license": "Unlicense",
"dependencies": { "dependencies": {
"@discordeno/bot": "19.0.0-next.746f0a9", "@discordeno/bot": "19.0.0-next.746f0a9",
"@types/node": "^22.2.0",
"@types/qs": "^6.9.15",
"date-fns": "^3.6.0", "date-fns": "^3.6.0",
"dd-cache-proxy": "^2.1.1", "dd-cache-proxy": "^2.1.1",
"dotenv": "^16.4.5", "dotenv": "^16.4.5",
"graphile-config": "0.0.1-beta.9", "graphile-config": "0.0.1-beta.9",
"graphile-worker": "^0.16.6", "graphile-worker": "^0.16.6",
"pretty-bytes": "^6.1.1" "node-fetch": "^3.3.2",
"pretty-bytes": "^6.1.1",
"qs": "^6.13.0"
}, },
"devDependencies": { "devDependencies": {
"@futureporn/types": "workspace:^", "@futureporn/types": "workspace:^",
"nodemon": "^3.1.4", "nodemon": "^3.1.4",
"ts-node": "^10.9.2", "ts-node": "^10.9.2",
"tsx": "^4.16.2", "tsx": "^4.17.0",
"typescript": "^5.5.3" "typescript": "^5.5.4"
} }
} }

View File

@ -11,6 +11,12 @@ importers:
'@discordeno/bot': '@discordeno/bot':
specifier: 19.0.0-next.746f0a9 specifier: 19.0.0-next.746f0a9
version: 19.0.0-next.746f0a9 version: 19.0.0-next.746f0a9
'@types/node':
specifier: ^22.2.0
version: 22.2.0
'@types/qs':
specifier: ^6.9.15
version: 6.9.15
date-fns: date-fns:
specifier: ^3.6.0 specifier: ^3.6.0
version: 3.6.0 version: 3.6.0
@ -26,9 +32,15 @@ importers:
graphile-worker: graphile-worker:
specifier: ^0.16.6 specifier: ^0.16.6
version: 0.16.6(typescript@5.5.4) version: 0.16.6(typescript@5.5.4)
node-fetch:
specifier: ^3.3.2
version: 3.3.2
pretty-bytes: pretty-bytes:
specifier: ^6.1.1 specifier: ^6.1.1
version: 6.1.1 version: 6.1.1
qs:
specifier: ^6.13.0
version: 6.13.0
devDependencies: devDependencies:
'@futureporn/types': '@futureporn/types':
specifier: workspace:^ specifier: workspace:^
@ -38,12 +50,12 @@ importers:
version: 3.1.4 version: 3.1.4
ts-node: ts-node:
specifier: ^10.9.2 specifier: ^10.9.2
version: 10.9.2(@types/node@22.1.0)(typescript@5.5.4) version: 10.9.2(@types/node@22.2.0)(typescript@5.5.4)
tsx: tsx:
specifier: ^4.16.2 specifier: ^4.17.0
version: 4.16.2 version: 4.17.0
typescript: typescript:
specifier: ^5.5.3 specifier: ^5.5.4
version: 5.5.4 version: 5.5.4
packages: packages:
@ -79,141 +91,147 @@ packages:
'@discordeno/utils@19.0.0-next.746f0a9': '@discordeno/utils@19.0.0-next.746f0a9':
resolution: {integrity: sha512-UY5GataakuY0yc4SN5qJLexUbTc5y293G3gNAWSaOjaZivEytcdxD4xgeqjNj9c4eN57B3Lfzus6tFZHXwXNOA==} resolution: {integrity: sha512-UY5GataakuY0yc4SN5qJLexUbTc5y293G3gNAWSaOjaZivEytcdxD4xgeqjNj9c4eN57B3Lfzus6tFZHXwXNOA==}
'@esbuild/aix-ppc64@0.21.5': '@esbuild/aix-ppc64@0.23.0':
resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==} resolution: {integrity: sha512-3sG8Zwa5fMcA9bgqB8AfWPQ+HFke6uD3h1s3RIwUNK8EG7a4buxvuFTs3j1IMs2NXAk9F30C/FF4vxRgQCcmoQ==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [ppc64] cpu: [ppc64]
os: [aix] os: [aix]
'@esbuild/android-arm64@0.21.5': '@esbuild/android-arm64@0.23.0':
resolution: {integrity: sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==} resolution: {integrity: sha512-EuHFUYkAVfU4qBdyivULuu03FhJO4IJN9PGuABGrFy4vUuzk91P2d+npxHcFdpUnfYKy0PuV+n6bKIpHOB3prQ==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [arm64] cpu: [arm64]
os: [android] os: [android]
'@esbuild/android-arm@0.21.5': '@esbuild/android-arm@0.23.0':
resolution: {integrity: sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==} resolution: {integrity: sha512-+KuOHTKKyIKgEEqKbGTK8W7mPp+hKinbMBeEnNzjJGyFcWsfrXjSTNluJHCY1RqhxFurdD8uNXQDei7qDlR6+g==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [arm] cpu: [arm]
os: [android] os: [android]
'@esbuild/android-x64@0.21.5': '@esbuild/android-x64@0.23.0':
resolution: {integrity: sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==} resolution: {integrity: sha512-WRrmKidLoKDl56LsbBMhzTTBxrsVwTKdNbKDalbEZr0tcsBgCLbEtoNthOW6PX942YiYq8HzEnb4yWQMLQuipQ==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [x64] cpu: [x64]
os: [android] os: [android]
'@esbuild/darwin-arm64@0.21.5': '@esbuild/darwin-arm64@0.23.0':
resolution: {integrity: sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==} resolution: {integrity: sha512-YLntie/IdS31H54Ogdn+v50NuoWF5BDkEUFpiOChVa9UnKpftgwzZRrI4J132ETIi+D8n6xh9IviFV3eXdxfow==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [arm64] cpu: [arm64]
os: [darwin] os: [darwin]
'@esbuild/darwin-x64@0.21.5': '@esbuild/darwin-x64@0.23.0':
resolution: {integrity: sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==} resolution: {integrity: sha512-IMQ6eme4AfznElesHUPDZ+teuGwoRmVuuixu7sv92ZkdQcPbsNHzutd+rAfaBKo8YK3IrBEi9SLLKWJdEvJniQ==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [x64] cpu: [x64]
os: [darwin] os: [darwin]
'@esbuild/freebsd-arm64@0.21.5': '@esbuild/freebsd-arm64@0.23.0':
resolution: {integrity: sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==} resolution: {integrity: sha512-0muYWCng5vqaxobq6LB3YNtevDFSAZGlgtLoAc81PjUfiFz36n4KMpwhtAd4he8ToSI3TGyuhyx5xmiWNYZFyw==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [arm64] cpu: [arm64]
os: [freebsd] os: [freebsd]
'@esbuild/freebsd-x64@0.21.5': '@esbuild/freebsd-x64@0.23.0':
resolution: {integrity: sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==} resolution: {integrity: sha512-XKDVu8IsD0/q3foBzsXGt/KjD/yTKBCIwOHE1XwiXmrRwrX6Hbnd5Eqn/WvDekddK21tfszBSrE/WMaZh+1buQ==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [x64] cpu: [x64]
os: [freebsd] os: [freebsd]
'@esbuild/linux-arm64@0.21.5': '@esbuild/linux-arm64@0.23.0':
resolution: {integrity: sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==} resolution: {integrity: sha512-j1t5iG8jE7BhonbsEg5d9qOYcVZv/Rv6tghaXM/Ug9xahM0nX/H2gfu6X6z11QRTMT6+aywOMA8TDkhPo8aCGw==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [arm64] cpu: [arm64]
os: [linux] os: [linux]
'@esbuild/linux-arm@0.21.5': '@esbuild/linux-arm@0.23.0':
resolution: {integrity: sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==} resolution: {integrity: sha512-SEELSTEtOFu5LPykzA395Mc+54RMg1EUgXP+iw2SJ72+ooMwVsgfuwXo5Fn0wXNgWZsTVHwY2cg4Vi/bOD88qw==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [arm] cpu: [arm]
os: [linux] os: [linux]
'@esbuild/linux-ia32@0.21.5': '@esbuild/linux-ia32@0.23.0':
resolution: {integrity: sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==} resolution: {integrity: sha512-P7O5Tkh2NbgIm2R6x1zGJJsnacDzTFcRWZyTTMgFdVit6E98LTxO+v8LCCLWRvPrjdzXHx9FEOA8oAZPyApWUA==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [ia32] cpu: [ia32]
os: [linux] os: [linux]
'@esbuild/linux-loong64@0.21.5': '@esbuild/linux-loong64@0.23.0':
resolution: {integrity: sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==} resolution: {integrity: sha512-InQwepswq6urikQiIC/kkx412fqUZudBO4SYKu0N+tGhXRWUqAx+Q+341tFV6QdBifpjYgUndV1hhMq3WeJi7A==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [loong64] cpu: [loong64]
os: [linux] os: [linux]
'@esbuild/linux-mips64el@0.21.5': '@esbuild/linux-mips64el@0.23.0':
resolution: {integrity: sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==} resolution: {integrity: sha512-J9rflLtqdYrxHv2FqXE2i1ELgNjT+JFURt/uDMoPQLcjWQA5wDKgQA4t/dTqGa88ZVECKaD0TctwsUfHbVoi4w==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [mips64el] cpu: [mips64el]
os: [linux] os: [linux]
'@esbuild/linux-ppc64@0.21.5': '@esbuild/linux-ppc64@0.23.0':
resolution: {integrity: sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==} resolution: {integrity: sha512-cShCXtEOVc5GxU0fM+dsFD10qZ5UpcQ8AM22bYj0u/yaAykWnqXJDpd77ublcX6vdDsWLuweeuSNZk4yUxZwtw==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [ppc64] cpu: [ppc64]
os: [linux] os: [linux]
'@esbuild/linux-riscv64@0.21.5': '@esbuild/linux-riscv64@0.23.0':
resolution: {integrity: sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==} resolution: {integrity: sha512-HEtaN7Y5UB4tZPeQmgz/UhzoEyYftbMXrBCUjINGjh3uil+rB/QzzpMshz3cNUxqXN7Vr93zzVtpIDL99t9aRw==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [riscv64] cpu: [riscv64]
os: [linux] os: [linux]
'@esbuild/linux-s390x@0.21.5': '@esbuild/linux-s390x@0.23.0':
resolution: {integrity: sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==} resolution: {integrity: sha512-WDi3+NVAuyjg/Wxi+o5KPqRbZY0QhI9TjrEEm+8dmpY9Xir8+HE/HNx2JoLckhKbFopW0RdO2D72w8trZOV+Wg==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [s390x] cpu: [s390x]
os: [linux] os: [linux]
'@esbuild/linux-x64@0.21.5': '@esbuild/linux-x64@0.23.0':
resolution: {integrity: sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==} resolution: {integrity: sha512-a3pMQhUEJkITgAw6e0bWA+F+vFtCciMjW/LPtoj99MhVt+Mfb6bbL9hu2wmTZgNd994qTAEw+U/r6k3qHWWaOQ==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [x64] cpu: [x64]
os: [linux] os: [linux]
'@esbuild/netbsd-x64@0.21.5': '@esbuild/netbsd-x64@0.23.0':
resolution: {integrity: sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==} resolution: {integrity: sha512-cRK+YDem7lFTs2Q5nEv/HHc4LnrfBCbH5+JHu6wm2eP+d8OZNoSMYgPZJq78vqQ9g+9+nMuIsAO7skzphRXHyw==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [x64] cpu: [x64]
os: [netbsd] os: [netbsd]
'@esbuild/openbsd-x64@0.21.5': '@esbuild/openbsd-arm64@0.23.0':
resolution: {integrity: sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==} resolution: {integrity: sha512-suXjq53gERueVWu0OKxzWqk7NxiUWSUlrxoZK7usiF50C6ipColGR5qie2496iKGYNLhDZkPxBI3erbnYkU0rQ==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [arm64]
os: [openbsd]
'@esbuild/openbsd-x64@0.23.0':
resolution: {integrity: sha512-6p3nHpby0DM/v15IFKMjAaayFhqnXV52aEmv1whZHX56pdkK+MEaLoQWj+H42ssFarP1PcomVhbsR4pkz09qBg==}
engines: {node: '>=18'}
cpu: [x64] cpu: [x64]
os: [openbsd] os: [openbsd]
'@esbuild/sunos-x64@0.21.5': '@esbuild/sunos-x64@0.23.0':
resolution: {integrity: sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==} resolution: {integrity: sha512-BFelBGfrBwk6LVrmFzCq1u1dZbG4zy/Kp93w2+y83Q5UGYF1d8sCzeLI9NXjKyujjBBniQa8R8PzLFAUrSM9OA==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [x64] cpu: [x64]
os: [sunos] os: [sunos]
'@esbuild/win32-arm64@0.21.5': '@esbuild/win32-arm64@0.23.0':
resolution: {integrity: sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==} resolution: {integrity: sha512-lY6AC8p4Cnb7xYHuIxQ6iYPe6MfO2CC43XXKo9nBXDb35krYt7KGhQnOkRGar5psxYkircpCqfbNDB4uJbS2jQ==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [arm64] cpu: [arm64]
os: [win32] os: [win32]
'@esbuild/win32-ia32@0.21.5': '@esbuild/win32-ia32@0.23.0':
resolution: {integrity: sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==} resolution: {integrity: sha512-7L1bHlOTcO4ByvI7OXVI5pNN6HSu6pUQq9yodga8izeuB1KcT2UkHaH6118QJwopExPn0rMHIseCTx1CRo/uNA==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [ia32] cpu: [ia32]
os: [win32] os: [win32]
'@esbuild/win32-x64@0.21.5': '@esbuild/win32-x64@0.23.0':
resolution: {integrity: sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==} resolution: {integrity: sha512-Arm+WgUFLUATuoxCJcahGuk6Yj9Pzxd6l11Zb/2aAuv5kWWvvfhLFo2fni4uSK5vzlUdCGZ/BdV5tH8klj8p8g==}
engines: {node: '>=12'} engines: {node: '>=18'}
cpu: [x64] cpu: [x64]
os: [win32] os: [win32]
@ -251,18 +269,18 @@ packages:
'@types/ms@0.7.34': '@types/ms@0.7.34':
resolution: {integrity: sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==} resolution: {integrity: sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==}
'@types/node@20.14.13': '@types/node@20.14.15':
resolution: {integrity: sha512-+bHoGiZb8UiQ0+WEtmph2IWQCjIqg8MDZMAV+ppRRhUZnquF5mQkP/9vpSwJClEiSM/C7fZZExPzfU0vJTyp8w==} resolution: {integrity: sha512-Fz1xDMCF/B00/tYSVMlmK7hVeLh7jE5f3B7X1/hmV0MJBwE27KlS7EvD/Yp+z1lm8mVhwV5w+n8jOZG8AfTlKw==}
'@types/node@22.0.0': '@types/node@22.2.0':
resolution: {integrity: sha512-VT7KSYudcPOzP5Q0wfbowyNLaVR8QWUdw+088uFWwfvpY6uCWaXpqV6ieLAu9WBcnTa7H4Z5RLK8I5t2FuOcqw==} resolution: {integrity: sha512-bm6EG6/pCpkxDf/0gDNDdtDILMOHgaQBVOJGdwsqClnxA3xL6jtMv76rLBc006RVMWbmaf0xbmom4Z/5o2nRkQ==}
'@types/node@22.1.0':
resolution: {integrity: sha512-AOmuRF0R2/5j1knA3c6G3HOk523Ga+l+ZXltX8SF1+5oqcXijjfTd8fY3XRZqSihEu9XhtQnKYLmkFaoxgsJHw==}
'@types/pg@8.11.6': '@types/pg@8.11.6':
resolution: {integrity: sha512-/2WmmBXHLsfRqzfHW7BNZ8SbYzE8OSk7i3WjFYvfgRHj7S1xj+16Je5fUKv3lVdVzk/zn9TXOqf+avFCFIE0yQ==} resolution: {integrity: sha512-/2WmmBXHLsfRqzfHW7BNZ8SbYzE8OSk7i3WjFYvfgRHj7S1xj+16Je5fUKv3lVdVzk/zn9TXOqf+avFCFIE0yQ==}
'@types/qs@6.9.15':
resolution: {integrity: sha512-uXHQKES6DQKKCLh441Xv/dwxOq1TVS3JPUMlEqoEglvlhR6Mxnlew/Xq/LRVHpLyk7iK3zODe1qYHIMltO7XGg==}
'@types/semver@7.5.8': '@types/semver@7.5.8':
resolution: {integrity: sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==} resolution: {integrity: sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==}
@ -311,6 +329,10 @@ packages:
resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
engines: {node: '>=8'} engines: {node: '>=8'}
call-bind@1.0.7:
resolution: {integrity: sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==}
engines: {node: '>= 0.4'}
callsites@3.1.0: callsites@3.1.0:
resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==}
engines: {node: '>=6'} engines: {node: '>=6'}
@ -359,6 +381,10 @@ packages:
create-require@1.1.1: create-require@1.1.1:
resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==}
data-uri-to-buffer@4.0.1:
resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==}
engines: {node: '>= 12'}
date-fns@3.6.0: date-fns@3.6.0:
resolution: {integrity: sha512-fRHTG8g/Gif+kSh50gaGEdToemgfj74aRX3swtiouboip5JDLAyDE9F11nHMIcvOaXeOC6D7SpNhi7uFyB7Uww==} resolution: {integrity: sha512-fRHTG8g/Gif+kSh50gaGEdToemgfj74aRX3swtiouboip5JDLAyDE9F11nHMIcvOaXeOC6D7SpNhi7uFyB7Uww==}
@ -376,6 +402,10 @@ packages:
supports-color: supports-color:
optional: true optional: true
define-data-property@1.1.4:
resolution: {integrity: sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==}
engines: {node: '>= 0.4'}
diff@4.0.2: diff@4.0.2:
resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==}
engines: {node: '>=0.3.1'} engines: {node: '>=0.3.1'}
@ -390,9 +420,17 @@ packages:
error-ex@1.3.2: error-ex@1.3.2:
resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==}
esbuild@0.21.5: es-define-property@1.0.0:
resolution: {integrity: sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==} resolution: {integrity: sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==}
engines: {node: '>=12'} engines: {node: '>= 0.4'}
es-errors@1.3.0:
resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==}
engines: {node: '>= 0.4'}
esbuild@0.23.0:
resolution: {integrity: sha512-1lvV17H2bMYda/WaFb2jLPeHU3zml2k4/yagNMG8Q/YtfMjCwEUZa2eXXMgZTVSL5q1n4H7sQ0X6CdJDqqeCFA==}
engines: {node: '>=18'}
hasBin: true hasBin: true
escalade@3.1.2: escalade@3.1.2:
@ -403,19 +441,34 @@ packages:
resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==}
engines: {node: '>=0.8.0'} engines: {node: '>=0.8.0'}
fetch-blob@3.2.0:
resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==}
engines: {node: ^12.20 || >= 14.13}
fill-range@7.1.1: fill-range@7.1.1:
resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
engines: {node: '>=8'} engines: {node: '>=8'}
formdata-polyfill@4.0.10:
resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==}
engines: {node: '>=12.20.0'}
fsevents@2.3.3: fsevents@2.3.3:
resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
os: [darwin] os: [darwin]
function-bind@1.1.2:
resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==}
get-caller-file@2.0.5: get-caller-file@2.0.5:
resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==}
engines: {node: 6.* || 8.* || >= 10.*} engines: {node: 6.* || 8.* || >= 10.*}
get-intrinsic@1.2.4:
resolution: {integrity: sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==}
engines: {node: '>= 0.4'}
get-tsconfig@4.7.6: get-tsconfig@4.7.6:
resolution: {integrity: sha512-ZAqrLlu18NbDdRaHq+AKXzAmqIUPswPWKUchfytdAjiRFnCe5ojG2bstg6mRiZabkKfCoL/e98pbBELIV/YCeA==} resolution: {integrity: sha512-ZAqrLlu18NbDdRaHq+AKXzAmqIUPswPWKUchfytdAjiRFnCe5ojG2bstg6mRiZabkKfCoL/e98pbBELIV/YCeA==}
@ -423,6 +476,9 @@ packages:
resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
engines: {node: '>= 6'} engines: {node: '>= 6'}
gopd@1.0.1:
resolution: {integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==}
graphile-config@0.0.1-beta.9: graphile-config@0.0.1-beta.9:
resolution: {integrity: sha512-7vNxXZ24OAgXxDKXYi9JtgWPMuNbBL3057Yf32Ux+/rVP4+EePgySCc+NNnn0tORi8qwqVreN8bdWqGIcSwNXg==} resolution: {integrity: sha512-7vNxXZ24OAgXxDKXYi9JtgWPMuNbBL3057Yf32Ux+/rVP4+EePgySCc+NNnn0tORi8qwqVreN8bdWqGIcSwNXg==}
engines: {node: '>=16'} engines: {node: '>=16'}
@ -440,6 +496,21 @@ packages:
resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==}
engines: {node: '>=8'} engines: {node: '>=8'}
has-property-descriptors@1.0.2:
resolution: {integrity: sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==}
has-proto@1.0.3:
resolution: {integrity: sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==}
engines: {node: '>= 0.4'}
has-symbols@1.0.3:
resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==}
engines: {node: '>= 0.4'}
hasown@2.0.2:
resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==}
engines: {node: '>= 0.4'}
ignore-by-default@1.0.1: ignore-by-default@1.0.1:
resolution: {integrity: sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==} resolution: {integrity: sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==}
@ -501,6 +572,14 @@ packages:
ms@2.1.2: ms@2.1.2:
resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==}
node-domexception@1.0.0:
resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==}
engines: {node: '>=10.5.0'}
node-fetch@3.3.2:
resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==}
engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
nodemon@3.1.4: nodemon@3.1.4:
resolution: {integrity: sha512-wjPBbFhtpJwmIeY2yP7QF+UKzPfltVGtfce1g/bB15/8vCGZj8uxD62b/b9M9/WVgme0NZudpownKN+c0plXlQ==} resolution: {integrity: sha512-wjPBbFhtpJwmIeY2yP7QF+UKzPfltVGtfce1g/bB15/8vCGZj8uxD62b/b9M9/WVgme0NZudpownKN+c0plXlQ==}
engines: {node: '>=10'} engines: {node: '>=10'}
@ -510,6 +589,10 @@ packages:
resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==}
engines: {node: '>=0.10.0'} engines: {node: '>=0.10.0'}
object-inspect@1.13.2:
resolution: {integrity: sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==}
engines: {node: '>= 0.4'}
obuf@1.1.2: obuf@1.1.2:
resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==} resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==}
@ -616,6 +699,10 @@ packages:
pstree.remy@1.1.8: pstree.remy@1.1.8:
resolution: {integrity: sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==} resolution: {integrity: sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==}
qs@6.13.0:
resolution: {integrity: sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==}
engines: {node: '>=0.6'}
readdirp@3.6.0: readdirp@3.6.0:
resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==}
engines: {node: '>=8.10.0'} engines: {node: '>=8.10.0'}
@ -636,6 +723,14 @@ packages:
engines: {node: '>=10'} engines: {node: '>=10'}
hasBin: true hasBin: true
set-function-length@1.2.2:
resolution: {integrity: sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==}
engines: {node: '>= 0.4'}
side-channel@1.0.6:
resolution: {integrity: sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==}
engines: {node: '>= 0.4'}
simple-update-notifier@2.0.0: simple-update-notifier@2.0.0:
resolution: {integrity: sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==} resolution: {integrity: sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==}
engines: {node: '>=10'} engines: {node: '>=10'}
@ -682,11 +777,11 @@ packages:
'@swc/wasm': '@swc/wasm':
optional: true optional: true
tslib@2.6.2: tslib@2.6.3:
resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==} resolution: {integrity: sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==}
tsx@4.16.2: tsx@4.17.0:
resolution: {integrity: sha512-C1uWweJDgdtX2x600HjaFaucXTilT7tgUZHbOE4+ypskZ1OP8CRCSDkCxG6Vya9EwaFIVagWwpaVAn5wzypaqQ==} resolution: {integrity: sha512-eN4mnDA5UMKDt4YZixo9tBioibaMBpoxBkD+rIPAjVmYERSG0/dWEY1CEFuV89CgASlKL499q8AhmkMnnjtOJg==}
engines: {node: '>=18.0.0'} engines: {node: '>=18.0.0'}
hasBin: true hasBin: true
@ -701,15 +796,16 @@ packages:
undici-types@5.26.5: undici-types@5.26.5:
resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==}
undici-types@6.11.1:
resolution: {integrity: sha512-mIDEX2ek50x0OlRgxryxsenE5XaQD4on5U2inY7RApK3SOJpofyw7uW2AyfMKkhAxXIceo2DeWGVGwyvng1GNQ==}
undici-types@6.13.0: undici-types@6.13.0:
resolution: {integrity: sha512-xtFJHudx8S2DSoujjMd1WeWvn7KKWFRESZTMeL1RptAYERu29D6jphMjjY+vn96jvN3kVPDNxU/E13VTaXj6jg==} resolution: {integrity: sha512-xtFJHudx8S2DSoujjMd1WeWvn7KKWFRESZTMeL1RptAYERu29D6jphMjjY+vn96jvN3kVPDNxU/E13VTaXj6jg==}
v8-compile-cache-lib@3.0.1: v8-compile-cache-lib@3.0.1:
resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==}
web-streams-polyfill@3.3.3:
resolution: {integrity: sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==}
engines: {node: '>= 8'}
wrap-ansi@7.0.0: wrap-ansi@7.0.0:
resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==}
engines: {node: '>=10'} engines: {node: '>=10'}
@ -796,73 +892,76 @@ snapshots:
dependencies: dependencies:
'@discordeno/types': 19.0.0-next.746f0a9 '@discordeno/types': 19.0.0-next.746f0a9
'@esbuild/aix-ppc64@0.21.5': '@esbuild/aix-ppc64@0.23.0':
optional: true optional: true
'@esbuild/android-arm64@0.21.5': '@esbuild/android-arm64@0.23.0':
optional: true optional: true
'@esbuild/android-arm@0.21.5': '@esbuild/android-arm@0.23.0':
optional: true optional: true
'@esbuild/android-x64@0.21.5': '@esbuild/android-x64@0.23.0':
optional: true optional: true
'@esbuild/darwin-arm64@0.21.5': '@esbuild/darwin-arm64@0.23.0':
optional: true optional: true
'@esbuild/darwin-x64@0.21.5': '@esbuild/darwin-x64@0.23.0':
optional: true optional: true
'@esbuild/freebsd-arm64@0.21.5': '@esbuild/freebsd-arm64@0.23.0':
optional: true optional: true
'@esbuild/freebsd-x64@0.21.5': '@esbuild/freebsd-x64@0.23.0':
optional: true optional: true
'@esbuild/linux-arm64@0.21.5': '@esbuild/linux-arm64@0.23.0':
optional: true optional: true
'@esbuild/linux-arm@0.21.5': '@esbuild/linux-arm@0.23.0':
optional: true optional: true
'@esbuild/linux-ia32@0.21.5': '@esbuild/linux-ia32@0.23.0':
optional: true optional: true
'@esbuild/linux-loong64@0.21.5': '@esbuild/linux-loong64@0.23.0':
optional: true optional: true
'@esbuild/linux-mips64el@0.21.5': '@esbuild/linux-mips64el@0.23.0':
optional: true optional: true
'@esbuild/linux-ppc64@0.21.5': '@esbuild/linux-ppc64@0.23.0':
optional: true optional: true
'@esbuild/linux-riscv64@0.21.5': '@esbuild/linux-riscv64@0.23.0':
optional: true optional: true
'@esbuild/linux-s390x@0.21.5': '@esbuild/linux-s390x@0.23.0':
optional: true optional: true
'@esbuild/linux-x64@0.21.5': '@esbuild/linux-x64@0.23.0':
optional: true optional: true
'@esbuild/netbsd-x64@0.21.5': '@esbuild/netbsd-x64@0.23.0':
optional: true optional: true
'@esbuild/openbsd-x64@0.21.5': '@esbuild/openbsd-arm64@0.23.0':
optional: true optional: true
'@esbuild/sunos-x64@0.21.5': '@esbuild/openbsd-x64@0.23.0':
optional: true optional: true
'@esbuild/win32-arm64@0.21.5': '@esbuild/sunos-x64@0.23.0':
optional: true optional: true
'@esbuild/win32-ia32@0.21.5': '@esbuild/win32-arm64@0.23.0':
optional: true optional: true
'@esbuild/win32-x64@0.21.5': '@esbuild/win32-ia32@0.23.0':
optional: true
'@esbuild/win32-x64@0.23.0':
optional: true optional: true
'@graphile/logger@0.2.0': {} '@graphile/logger@0.2.0': {}
@ -890,28 +989,26 @@ snapshots:
'@types/interpret@1.1.3': '@types/interpret@1.1.3':
dependencies: dependencies:
'@types/node': 22.0.0 '@types/node': 22.2.0
'@types/ms@0.7.34': {} '@types/ms@0.7.34': {}
'@types/node@20.14.13': '@types/node@20.14.15':
dependencies: dependencies:
undici-types: 5.26.5 undici-types: 5.26.5
'@types/node@22.0.0': '@types/node@22.2.0':
dependencies:
undici-types: 6.11.1
'@types/node@22.1.0':
dependencies: dependencies:
undici-types: 6.13.0 undici-types: 6.13.0
'@types/pg@8.11.6': '@types/pg@8.11.6':
dependencies: dependencies:
'@types/node': 22.0.0 '@types/node': 22.2.0
pg-protocol: 1.6.1 pg-protocol: 1.6.1
pg-types: 4.0.2 pg-types: 4.0.2
'@types/qs@6.9.15': {}
'@types/semver@7.5.8': {} '@types/semver@7.5.8': {}
acorn-walk@8.3.3: acorn-walk@8.3.3:
@ -952,6 +1049,14 @@ snapshots:
dependencies: dependencies:
fill-range: 7.1.1 fill-range: 7.1.1
call-bind@1.0.7:
dependencies:
es-define-property: 1.0.0
es-errors: 1.3.0
function-bind: 1.1.2
get-intrinsic: 1.2.4
set-function-length: 1.2.2
callsites@3.1.0: {} callsites@3.1.0: {}
chalk@2.4.2: chalk@2.4.2:
@ -1008,6 +1113,8 @@ snapshots:
create-require@1.1.1: {} create-require@1.1.1: {}
data-uri-to-buffer@4.0.1: {}
date-fns@3.6.0: {} date-fns@3.6.0: {}
dd-cache-proxy@2.1.1(@discordeno/bot@19.0.0-next.746f0a9): dd-cache-proxy@2.1.1(@discordeno/bot@19.0.0-next.746f0a9):
@ -1020,6 +1127,12 @@ snapshots:
optionalDependencies: optionalDependencies:
supports-color: 5.5.0 supports-color: 5.5.0
define-data-property@1.1.4:
dependencies:
es-define-property: 1.0.0
es-errors: 1.3.0
gopd: 1.0.1
diff@4.0.2: {} diff@4.0.2: {}
dotenv@16.4.5: {} dotenv@16.4.5: {}
@ -1030,45 +1143,71 @@ snapshots:
dependencies: dependencies:
is-arrayish: 0.2.1 is-arrayish: 0.2.1
esbuild@0.21.5: es-define-property@1.0.0:
dependencies:
get-intrinsic: 1.2.4
es-errors@1.3.0: {}
esbuild@0.23.0:
optionalDependencies: optionalDependencies:
'@esbuild/aix-ppc64': 0.21.5 '@esbuild/aix-ppc64': 0.23.0
'@esbuild/android-arm': 0.21.5 '@esbuild/android-arm': 0.23.0
'@esbuild/android-arm64': 0.21.5 '@esbuild/android-arm64': 0.23.0
'@esbuild/android-x64': 0.21.5 '@esbuild/android-x64': 0.23.0
'@esbuild/darwin-arm64': 0.21.5 '@esbuild/darwin-arm64': 0.23.0
'@esbuild/darwin-x64': 0.21.5 '@esbuild/darwin-x64': 0.23.0
'@esbuild/freebsd-arm64': 0.21.5 '@esbuild/freebsd-arm64': 0.23.0
'@esbuild/freebsd-x64': 0.21.5 '@esbuild/freebsd-x64': 0.23.0
'@esbuild/linux-arm': 0.21.5 '@esbuild/linux-arm': 0.23.0
'@esbuild/linux-arm64': 0.21.5 '@esbuild/linux-arm64': 0.23.0
'@esbuild/linux-ia32': 0.21.5 '@esbuild/linux-ia32': 0.23.0
'@esbuild/linux-loong64': 0.21.5 '@esbuild/linux-loong64': 0.23.0
'@esbuild/linux-mips64el': 0.21.5 '@esbuild/linux-mips64el': 0.23.0
'@esbuild/linux-ppc64': 0.21.5 '@esbuild/linux-ppc64': 0.23.0
'@esbuild/linux-riscv64': 0.21.5 '@esbuild/linux-riscv64': 0.23.0
'@esbuild/linux-s390x': 0.21.5 '@esbuild/linux-s390x': 0.23.0
'@esbuild/linux-x64': 0.21.5 '@esbuild/linux-x64': 0.23.0
'@esbuild/netbsd-x64': 0.21.5 '@esbuild/netbsd-x64': 0.23.0
'@esbuild/openbsd-x64': 0.21.5 '@esbuild/openbsd-arm64': 0.23.0
'@esbuild/sunos-x64': 0.21.5 '@esbuild/openbsd-x64': 0.23.0
'@esbuild/win32-arm64': 0.21.5 '@esbuild/sunos-x64': 0.23.0
'@esbuild/win32-ia32': 0.21.5 '@esbuild/win32-arm64': 0.23.0
'@esbuild/win32-x64': 0.21.5 '@esbuild/win32-ia32': 0.23.0
'@esbuild/win32-x64': 0.23.0
escalade@3.1.2: {} escalade@3.1.2: {}
escape-string-regexp@1.0.5: {} escape-string-regexp@1.0.5: {}
fetch-blob@3.2.0:
dependencies:
node-domexception: 1.0.0
web-streams-polyfill: 3.3.3
fill-range@7.1.1: fill-range@7.1.1:
dependencies: dependencies:
to-regex-range: 5.0.1 to-regex-range: 5.0.1
formdata-polyfill@4.0.10:
dependencies:
fetch-blob: 3.2.0
fsevents@2.3.3: fsevents@2.3.3:
optional: true optional: true
function-bind@1.1.2: {}
get-caller-file@2.0.5: {} get-caller-file@2.0.5: {}
get-intrinsic@1.2.4:
dependencies:
es-errors: 1.3.0
function-bind: 1.1.2
has-proto: 1.0.3
has-symbols: 1.0.3
hasown: 2.0.2
get-tsconfig@4.7.6: get-tsconfig@4.7.6:
dependencies: dependencies:
resolve-pkg-maps: 1.0.0 resolve-pkg-maps: 1.0.0
@ -1077,16 +1216,20 @@ snapshots:
dependencies: dependencies:
is-glob: 4.0.3 is-glob: 4.0.3
gopd@1.0.1:
dependencies:
get-intrinsic: 1.2.4
graphile-config@0.0.1-beta.9: graphile-config@0.0.1-beta.9:
dependencies: dependencies:
'@types/interpret': 1.1.3 '@types/interpret': 1.1.3
'@types/node': 20.14.13 '@types/node': 20.14.15
'@types/semver': 7.5.8 '@types/semver': 7.5.8
chalk: 4.1.2 chalk: 4.1.2
debug: 4.3.6(supports-color@5.5.0) debug: 4.3.6(supports-color@5.5.0)
interpret: 3.1.1 interpret: 3.1.1
semver: 7.6.3 semver: 7.6.3
tslib: 2.6.2 tslib: 2.6.3
yargs: 17.7.2 yargs: 17.7.2
transitivePeerDependencies: transitivePeerDependencies:
- supports-color - supports-color
@ -1100,7 +1243,7 @@ snapshots:
graphile-config: 0.0.1-beta.9 graphile-config: 0.0.1-beta.9
json5: 2.2.3 json5: 2.2.3
pg: 8.12.0 pg: 8.12.0
tslib: 2.6.2 tslib: 2.6.3
yargs: 17.7.2 yargs: 17.7.2
transitivePeerDependencies: transitivePeerDependencies:
- pg-native - pg-native
@ -1111,6 +1254,18 @@ snapshots:
has-flag@4.0.0: {} has-flag@4.0.0: {}
has-property-descriptors@1.0.2:
dependencies:
es-define-property: 1.0.0
has-proto@1.0.3: {}
has-symbols@1.0.3: {}
hasown@2.0.2:
dependencies:
function-bind: 1.1.2
ignore-by-default@1.0.1: {} ignore-by-default@1.0.1: {}
import-fresh@3.3.0: import-fresh@3.3.0:
@ -1156,6 +1311,14 @@ snapshots:
ms@2.1.2: {} ms@2.1.2: {}
node-domexception@1.0.0: {}
node-fetch@3.3.2:
dependencies:
data-uri-to-buffer: 4.0.1
fetch-blob: 3.2.0
formdata-polyfill: 4.0.10
nodemon@3.1.4: nodemon@3.1.4:
dependencies: dependencies:
chokidar: 3.6.0 chokidar: 3.6.0
@ -1171,6 +1334,8 @@ snapshots:
normalize-path@3.0.0: {} normalize-path@3.0.0: {}
object-inspect@1.13.2: {}
obuf@1.1.2: {} obuf@1.1.2: {}
parent-module@1.0.1: parent-module@1.0.1:
@ -1263,6 +1428,10 @@ snapshots:
pstree.remy@1.1.8: {} pstree.remy@1.1.8: {}
qs@6.13.0:
dependencies:
side-channel: 1.0.6
readdirp@3.6.0: readdirp@3.6.0:
dependencies: dependencies:
picomatch: 2.3.1 picomatch: 2.3.1
@ -1275,6 +1444,22 @@ snapshots:
semver@7.6.3: {} semver@7.6.3: {}
set-function-length@1.2.2:
dependencies:
define-data-property: 1.1.4
es-errors: 1.3.0
function-bind: 1.1.2
get-intrinsic: 1.2.4
gopd: 1.0.1
has-property-descriptors: 1.0.2
side-channel@1.0.6:
dependencies:
call-bind: 1.0.7
es-errors: 1.3.0
get-intrinsic: 1.2.4
object-inspect: 1.13.2
simple-update-notifier@2.0.0: simple-update-notifier@2.0.0:
dependencies: dependencies:
semver: 7.6.3 semver: 7.6.3
@ -1305,14 +1490,14 @@ snapshots:
touch@3.1.1: {} touch@3.1.1: {}
ts-node@10.9.2(@types/node@22.1.0)(typescript@5.5.4): ts-node@10.9.2(@types/node@22.2.0)(typescript@5.5.4):
dependencies: dependencies:
'@cspotcode/source-map-support': 0.8.1 '@cspotcode/source-map-support': 0.8.1
'@tsconfig/node10': 1.0.11 '@tsconfig/node10': 1.0.11
'@tsconfig/node12': 1.0.11 '@tsconfig/node12': 1.0.11
'@tsconfig/node14': 1.0.3 '@tsconfig/node14': 1.0.3
'@tsconfig/node16': 1.0.4 '@tsconfig/node16': 1.0.4
'@types/node': 22.1.0 '@types/node': 22.2.0
acorn: 8.12.1 acorn: 8.12.1
acorn-walk: 8.3.3 acorn-walk: 8.3.3
arg: 4.1.3 arg: 4.1.3
@ -1323,11 +1508,11 @@ snapshots:
v8-compile-cache-lib: 3.0.1 v8-compile-cache-lib: 3.0.1
yn: 3.1.1 yn: 3.1.1
tslib@2.6.2: {} tslib@2.6.3: {}
tsx@4.16.2: tsx@4.17.0:
dependencies: dependencies:
esbuild: 0.21.5 esbuild: 0.23.0
get-tsconfig: 4.7.6 get-tsconfig: 4.7.6
optionalDependencies: optionalDependencies:
fsevents: 2.3.3 fsevents: 2.3.3
@ -1338,12 +1523,12 @@ snapshots:
undici-types@5.26.5: {} undici-types@5.26.5: {}
undici-types@6.11.1: {}
undici-types@6.13.0: {} undici-types@6.13.0: {}
v8-compile-cache-lib@3.0.1: {} v8-compile-cache-lib@3.0.1: {}
web-streams-polyfill@3.3.3: {}
wrap-ansi@7.0.0: wrap-ansi@7.0.0:
dependencies: dependencies:
ansi-styles: 4.3.0 ansi-styles: 4.3.0

View File

@ -1,12 +1,19 @@
import { createBot, Intents, type Bot } from '@discordeno/bot' import { createBot, createGatewayManager, createRestManager, Intents, type Bot } from '@discordeno/bot'
import { createProxyCache, } from 'dd-cache-proxy'; import { createProxyCache, } from 'dd-cache-proxy';
import { configs } from './config.ts' import { configs } from './config.ts'
// not sure I need this.
// @see https://github.com/discordeno/discordeno/blob/352887c215cc9d93d7f1fa9c8589e66f47ffb3ea/packages/bot/src/bot.ts#L74
// const getSessionInfoHandler = async () => {
// return await bot.rest.getGatewayBot()
// }
export const bot = createProxyCache( export const bot = createProxyCache(
createBot({ createBot({
token: configs.token, token: configs.token,
intents: Intents.Guilds | Intents.GuildMessages intents: Intents.Guilds | Intents.GuildMessages,
rest: createRestManager({ token: configs.token, applicationId: configs.discordApplicationId }),
gateway: createGatewayManager({ token: configs.token })
}), }),
{ {
desiredProps: { desiredProps: {
@ -21,9 +28,6 @@ export const bot = createProxyCache(
}, },
) )
// @todo figure out where this code belongs
// gateway.resharding.getSessionInfo = async () => { // insert code here to fetch getSessionInfo from rest process. }
// Setup desired properties // Setup desired properties
bot.transformers.desiredProperties.interaction.id = true bot.transformers.desiredProperties.interaction.id = true
bot.transformers.desiredProperties.interaction.type = true bot.transformers.desiredProperties.interaction.type = true

View File

@ -9,26 +9,25 @@ import { createCommand } from '../commands.ts'
import { configs } from '../config.ts' import { configs } from '../config.ts'
async function createRecordInDatabase(url: string, discordMessageId: string) { async function createStreamInDatabase(url: string, discordMessageId: string) {
const record = { const streamPayload = {
url, url,
recording_state: 'pending', status: 'pending_recording',
discord_message_id: discordMessageId, discord_message_id: discordMessageId
file_size: 0
} }
const res = await fetch(`${configs.postgrestUrl}/records`, { const res = await fetch(`${configs.postgrestUrl}/streams`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
'Authorization': `Bearer ${configs.automationUserJwt}`, 'Authorization': `Bearer ${configs.automationUserJwt}`,
'Prefer': 'return=headers-only' 'Prefer': 'return=headers-only'
}, },
body: JSON.stringify(record) body: JSON.stringify(streamPayload)
}) })
if (!res.ok) { if (!res.ok) {
const status = res.status const status = res.status
const statusText = res.statusText const statusText = res.statusText
const msg = `fetch failed to create recording record in database. status=${status}, statusText=${statusText}` const msg = `fetch failed to create stream in database. status=${status}, statusText=${statusText}`
console.error(msg) console.error(msg)
throw new Error(msg) throw new Error(msg)
} }
@ -83,7 +82,7 @@ createCommand({
} }
// @todo create record in db // @todo create record in db
const record = await createRecordInDatabase(url, message.id.toString()) const record = await createStreamInDatabase(url, message.id.toString())
// console.log(record) // console.log(record)

View File

@ -4,6 +4,7 @@ if (!process.env.POSTGREST_URL) throw new Error('Missing POSTGREST_URL env var')
if (!process.env.DISCORD_TOKEN) throw new Error('Missing DISCORD_TOKEN env var'); if (!process.env.DISCORD_TOKEN) throw new Error('Missing DISCORD_TOKEN env var');
if (!process.env.DISCORD_CHANNEL_ID) throw new Error("DISCORD_CHANNEL_ID was missing from env"); if (!process.env.DISCORD_CHANNEL_ID) throw new Error("DISCORD_CHANNEL_ID was missing from env");
if (!process.env.DISCORD_GUILD_ID) throw new Error("DISCORD_GUILD_ID was missing from env"); if (!process.env.DISCORD_GUILD_ID) throw new Error("DISCORD_GUILD_ID was missing from env");
if (!process.env.DISCORD_APPLICATION_ID) throw new Error('DISCORD_APPLICATION_ID was missing from env');
if (!process.env.AUTOMATION_USER_JWT) throw new Error('Missing AUTOMATION_USER_JWT env var'); if (!process.env.AUTOMATION_USER_JWT) throw new Error('Missing AUTOMATION_USER_JWT env var');
const token = process.env.DISCORD_TOKEN! const token = process.env.DISCORD_TOKEN!
const postgrestUrl = process.env.POSTGREST_URL! const postgrestUrl = process.env.POSTGREST_URL!
@ -11,8 +12,8 @@ const discordChannelId = process.env.DISCORD_CHANNEL_ID!
const discordGuildId = process.env.DISCORD_GUILD_ID! const discordGuildId = process.env.DISCORD_GUILD_ID!
const automationUserJwt = process.env.AUTOMATION_USER_JWT! const automationUserJwt = process.env.AUTOMATION_USER_JWT!
const connectionString = process.env.WORKER_CONNECTION_STRING! const connectionString = process.env.WORKER_CONNECTION_STRING!
const discordApplicationId = process.env.DISCORD_APPLICATION_ID!
console.log(`hello i am configs and configs.connectionString=${connectionString}`)
export interface Config { export interface Config {
@ -22,6 +23,7 @@ export interface Config {
discordGuildId: string; discordGuildId: string;
discordChannelId: string; discordChannelId: string;
connectionString: string; connectionString: string;
discordApplicationId: string;
} }
@ -32,4 +34,5 @@ export const configs: Config = {
discordGuildId, discordGuildId,
discordChannelId, discordChannelId,
connectionString, connectionString,
discordApplicationId,
} }

View File

@ -1,5 +1,5 @@
import updateDiscordMessage from './tasks/update_discord_message.js' import update_discord_message from './tasks/update_discord_message.js'
import { type WorkerUtils, type RunnerOptions, run } from 'graphile-worker' import { type WorkerUtils, type RunnerOptions, run } from 'graphile-worker'
import { bot } from './bot.ts' import { bot } from './bot.ts'
import type { Interaction } from '@discordeno/bot' import type { Interaction } from '@discordeno/bot'
@ -26,15 +26,15 @@ async function setupGraphileWorker() {
taskDirectory: join(__dirname, 'tasks') taskDirectory: join(__dirname, 'tasks')
}, },
}; };
console.log('worker preset as follows') // console.log('worker preset as follows')
console.log(preset) // console.log(preset)
const runnerOptions: RunnerOptions = { const runnerOptions: RunnerOptions = {
preset preset
// concurrency: 3, // concurrency: 3,
// connectionString: configs.connectionString, // connectionString: configs.connectionString,
// taskDirectory: join(__dirname, 'tasks'), // taskDirectory: join(__dirname, 'tasks'),
// taskList: { // taskList: {
// 'update_discord_message': updateDiscordMessage // 'update_discord_message': update_discord_message
// } // }
} }

View File

@ -0,0 +1,72 @@
import type { Task, Helpers } from "graphile-worker"
import { sub } from 'date-fns'
import type { RecordingRecord, Stream } from "@futureporn/types"
import qs from 'qs'
import fetch from 'node-fetch'
import { configs } from '../config.ts'
interface Payload {
idle_minutes: number;
}
function assertPayload(payload: any): asserts payload is Payload {
if (typeof payload !== "object" || !payload) throw new Error("invalid payload");
if (!payload.idle_minutes) throw new Error(`idle_minutes was absent in the payload`);
if (typeof payload.idle_minutes !== 'number') throw new Error(`idle_minutes parameter was not a number`);
}
export const expire_stream_recordings: Task = async function (payload: unknown, helpers: Helpers) {
assertPayload(payload)
const { idle_minutes } = payload
helpers.logger.info(`expire_stream_recordings has begun. Expring 'recording' and 'pending' streams that haven't been updated in ${idle_minutes} minutes.`)
const url = 'http://postgrest.futureporn.svc.cluster.local:9000/streams'
let streams: Stream[] = []
try {
// 1. identify and update stalled /streams
// Any streams that was updated earlier than n minute ago AND is in 'pending_recording' or 'recording' state is marked as stalled.
const timestamp = sub(new Date(), { minutes: idle_minutes }).toISOString()
const queryOptions = {
updated_at: `lt.${timestamp}`,
or: '(status.eq.pending_recording,status.eq.recording)'
}
const updatePayload = {
updated_at: new Date().toISOString(),
status: 'stalled'
}
helpers.logger.info(JSON.stringify(updatePayload))
const query = qs.stringify(queryOptions)
const res = await fetch (`${url}?${query}`, {
method: 'PATCH',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${configs.automationUserJwt}`,
'Prefer': 'return=headers-only'
},
body: JSON.stringify(updatePayload)
})
if (!res.ok) {
const body = await res.text()
helpers.logger.info(JSON.stringify(res.headers))
helpers.logger.error(`Response code was not 200. status=${res.status}, statusText=${res.statusText}`)
helpers.logger.error(body)
return;
}
const body = await res.text()
helpers.logger.info('body as follows')
helpers.logger.info(body)
} catch (e: any) {
if (e instanceof Error) {
helpers.logger.error(`hi there we encountered an error while fetching /streams`)
helpers.logger.error(e.message)
} else {
helpers.logger.error(e)
}
}
}
export default expire_stream_recordings

View File

@ -0,0 +1,85 @@
import type { Task, Helpers } from "graphile-worker"
import { sub } from 'date-fns'
import type { RecordingRecord } from "@futureporn/types"
import qs from 'qs'
import fetch from 'node-fetch'
import { configs } from '../config.ts'
interface Payload {
idle_minutes: number;
}
function assertPayload(payload: any): asserts payload is Payload {
if (typeof payload !== "object" || !payload) throw new Error("invalid payload");
if (!payload.idle_minutes) throw new Error('idle_minutes was missing from payload');
if (typeof payload.idle_minutes !== 'number') throw new Error('idle_minutes must be a number');
}
export const restart_failed_recordings: Task = async function (payload: unknown, helpers: Helpers) {
assertPayload(payload)
const { idle_minutes } = payload
helpers.logger.info(`restart_failed_recordings has begun. Expring 'recording' and 'pending' records that haven't been updated in ${idle_minutes} minutes.`)
const url = 'http://postgrest.futureporn.svc.cluster.local:9000/records'
let records: RecordingRecord[] = []
try {
// 1. identify failed /records
// Any record that was updated earlier than n minute ago AND is in 'pending' or 'recording' state is marked as stalled.
const timestamp = sub(new Date(), { minutes: idle_minutes }).toISOString()
const queryOptions = {
updated_at: `lt.${timestamp}`,
or: '(recording_state.eq.pending,recording_state.eq.recording)'
}
const updatePayload = {
updated_at: new Date().toISOString(),
recording_state: 'stalled'
}
helpers.logger.info(JSON.stringify(updatePayload))
const query = qs.stringify(queryOptions)
const res = await fetch (`${url}?${query}`, {
method: 'PATCH',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${configs.automationUserJwt}`,
'Prefer': 'return=headers-only'
},
body: JSON.stringify(updatePayload)
})
if (!res.ok) {
const body = await res.text()
helpers.logger.info(JSON.stringify(res.headers))
helpers.logger.error(`Response code was not 200. status=${res.status}, statusText=${res.statusText}`)
helpers.logger.error(body)
return;
}
const body = await res.text()
helpers.logger.info('body as follows')
helpers.logger.info(body)
// const data = await res.json() as RecordingRecord[]
// if (data.length < 1) return;
// records = data
} catch (e: any) {
if (e instanceof Error) {
helpers.logger.error(`hi there we encountered an error while fetching /records`)
helpers.logger.error(e.message)
} else {
helpers.logger.error(e)
}
}
// // 2. identify and update
// for (const record of records) {
// const res = await fetch(`${url}?`)
// }
// // 3. done
}
export default restart_failed_recordings

View File

@ -1,5 +1,5 @@
import 'dotenv/config' import 'dotenv/config'
import type { RecordingState } from '@futureporn/types' import type { Status } from '@futureporn/types'
import { type Task, type Helpers } from 'graphile-worker' import { type Task, type Helpers } from 'graphile-worker'
import { add } from 'date-fns' import { add } from 'date-fns'
import prettyBytes from 'pretty-bytes' import prettyBytes from 'pretty-bytes'
@ -18,26 +18,26 @@ import { bot } from '../bot.ts'
import { configs } from '../config.ts' import { configs } from '../config.ts'
interface Payload { interface Payload {
record_id: number; stream_id: number;
} }
function assertPayload(payload: any): asserts payload is Payload { function assertPayload(payload: any): asserts payload is Payload {
if (typeof payload !== "object" || !payload) throw new Error("invalid payload"); if (typeof payload !== "object" || !payload) throw new Error("invalid payload");
if (!payload.record_id) throw new Error(`record_id was absent in the payload`); if (!payload.stream_id) throw new Error(`stream_id was absent in the payload`);
} }
async function editDiscordMessage({ helpers, recordingState, discordMessageId, url, fileSize, recordId }: { recordId: number, fileSize: number, url: string, helpers: Helpers, recordingState: RecordingState, discordMessageId: string }) { async function editDiscordMessage({ helpers, streamStatus, discordMessageId, url, fileSize, streamId }: { streamId: number, fileSize: number, url: string, helpers: Helpers, streamStatus: Status, discordMessageId: string }) {
if (!discordMessageId) throw new Error(`discordMessageId was missing!`); if (!discordMessageId) throw new Error(`discordMessageId was missing!`);
if (typeof discordMessageId !== 'string') throw new Error(`discordMessageId was not a string!`); if (typeof discordMessageId !== 'string') throw new Error(`discordMessageId was not a string!`);
// const { captureJobId } = job.data // const { captureJobId } = job.data
helpers.logger.info(`editDiscordMessage has begun with discordMessageId=${discordMessageId}, state=${recordingState}`) helpers.logger.info(`editDiscordMessage has begun with discordMessageId=${discordMessageId}, streamStatus=${streamStatus}`)
// const guild = await bot.cache.guilds.get(BigInt(configs.discordGuildId)) // const guild = await bot.cache.guilds.get(BigInt(configs.discordGuildId))
@ -49,51 +49,19 @@ async function editDiscordMessage({ helpers, recordingState, discordMessageId, u
const channelId = BigInt(configs.discordChannelId) const channelId = BigInt(configs.discordChannelId)
const updatedMessage: EditMessage = { const updatedMessage: EditMessage = {
embeds: getStatusEmbed({ recordingState, fileSize, recordId, url }), embeds: getStatusEmbed({ streamStatus, fileSize, streamId, url }),
} }
bot.helpers.editMessage(channelId, discordMessageId, updatedMessage) bot.helpers.editMessage(channelId, discordMessageId, updatedMessage)
// channel.
// const guild = await client.guilds.fetch(process.env.DISCORD_GUILD_ID!) as Guild
// if (!guild) throw new Error('guild was undefined');
// helpers.logger.info('here is the guild as follows')
// helpers.logger.info(guild.toString())
// helpers.logger.info(`fetching discord channel id=${process.env.DISCORD_CHANNEL_ID} from discord guild`)
// const channel = await client.channels.fetch(process.env.DISCORD_CHANNEL_ID!) as TextChannel
// if (!channel) throw new Error(`discord channel was undefined`);
// const message = await channel.messages.fetch(discordMessageId)
// helpers.logger.info(`discordMessageId=${discordMessageId}`)
// helpers.logger.info(message as any)
// const statusEmbed = getStatusEmbed({ recordId, recordingState, fileSize, url })
// const buttonRow = getButtonRow(recordingState)
// // const embed = new EmbedBuilder().setTitle('Attachments');
// const updatedMessage = {
// embeds: [
// statusEmbed
// ],
// components: [
// buttonRow
// ]
// };
// message.edit(updatedMessage)
} }
async function getRecordFromDatabase(recordId: number) { async function getStreamFromDatabase(streamId: number) {
const res = await fetch(`${process.env.POSTGREST_URL}/records?id=eq.${recordId}`) const res = await fetch(`${process.env.POSTGREST_URL}/streams?select=*,segment:segments(*)&id=eq.${streamId}`)
if (!res.ok) { if (!res.ok) {
throw new Error(`failed fetching record ${recordId}. status=${res.status}, statusText=${res.statusText}`) throw new Error(`failed fetching stream ${streamId}. status=${res.status}, statusText=${res.statusText}`)
} }
const body = await res.json() as any const body = await res.json() as any
return body[0]; return body[0];
@ -102,66 +70,74 @@ async function getRecordFromDatabase(recordId: number) {
/** /**
* updateDiscordMessage is the task where we edit a previously sent discord message to display * update_discord_message is the task where we edit a previously sent discord message to display
* the most up-to-date status information from the database * the most up-to-date status information from the database
* *
* Sometimes the update is changing the state, one of Pending|Recording|Aborted|Ended. * Sometimes the update is changing the state, one of Pending|Recording|Aborted|Ended.
* Sometimes the update is updating the Filesize of the recording in-progress * Sometimes the update is updating the Filesize of the recording in-progress
* Sometimes the update is adding a thumbnail image to the message * Sometimes the update is adding a thumbnail image to the message
*/ */
export const updateDiscordMessage: Task = async function (payload, helpers: Helpers) { export const update_discord_message: Task = async function (payload, helpers: Helpers) {
try { try {
assertPayload(payload) assertPayload(payload)
const { record_id } = payload const { stream_id } = payload
const recordId = record_id const streamId = stream_id
helpers.logger.info(`updateDiscordMessage() with recordId=${recordId}`) helpers.logger.info(`update_discord_message() with streamId=${streamId}`)
const record = await getRecordFromDatabase(recordId) const stream = await getStreamFromDatabase(streamId)
const { discord_message_id, recording_state, file_size, url } = record const { discord_message_id, status, file_size, url } = stream
const recordingState = recording_state const streamStatus = status
const discordMessageId = discord_message_id const discordMessageId = discord_message_id
const fileSize = file_size const fileSize = file_size
editDiscordMessage({ helpers, recordingState, discordMessageId, url, fileSize, recordId }) editDiscordMessage({ helpers, streamStatus, discordMessageId, url, fileSize, streamId })
// schedule the next update 10s from now, but only if the recording is still happening // schedule the next update 10s from now, but only if the recording is still happening
if (recordingState !== 'ended') { if (streamStatus !== 'ended') {
const runAt = add(new Date(), { seconds: 10 }) const runAt = add(new Date(), { seconds: 10 })
const recordId = record.id const streamId = stream.id
await helpers.addJob('updateDiscordMessage', { recordId }, { jobKey: `record_${recordId}_update_discord_message`, maxAttempts: 3, runAt }) await helpers.addJob('update_discord_message', { streamId }, { jobKey: `stream_${streamId}_update_discord_message`, maxAttempts: 3, runAt })
} }
} catch (e) { } catch (e) {
helpers.logger.error(`caught an error during updateDiscordMessage. e=${e}`) helpers.logger.error(`caught an error during update_discord_message. e=${e}`)
} }
} }
function getStatusEmbed({ function getStatusEmbed({
recordingState, recordId, fileSize, url streamStatus, streamId, fileSize, url
}: { fileSize: number, recordingState: RecordingState, recordId: number, url: string }) { }: { fileSize: number, streamStatus: Status, streamId: number, url: string }) {
const embeds = new EmbedsBuilder() const embeds = new EmbedsBuilder()
.setTitle(`Record ${recordId}`) .setTitle(`Stream ${streamId}`)
.setFields([ .setFields([
{ name: 'Status', value: recordingState.charAt(0).toUpperCase()+recordingState.slice(1), inline: true }, { name: 'Status', value: streamStatus.charAt(0).toUpperCase()+streamStatus.slice(1), inline: true },
{ name: 'Filesize', value: prettyBytes(fileSize), inline: true }, { name: 'Filesize', value: prettyBytes(fileSize), inline: true },
{ name: 'URL', value: url, inline: false }, { name: 'URL', value: url, inline: false },
]) ])
if (recordingState === 'pending') { if (streamStatus === 'pending') {
embeds embeds
.setDescription("Waiting for a worker to accept the job.") .setDescription("Waiting for a worker to accept the job.")
.setColor(2326507) .setColor(2326507)
} else if (recordingState === 'recording') { } else if (streamStatus === 'recording') {
embeds embeds
.setDescription('The stream is being recorded.') .setDescription('The stream is being recorded.')
.setColor(392960) .setColor(392960)
} else if (recordingState === 'aborted') { } else if (streamStatus === 'aborted') {
embeds embeds
.setDescription("The recording was stopped by the user.") .setDescription("The recording was stopped by the user.")
.setColor(8289651) .setColor(8289651)
} else if (recordingState === 'ended') { } else if (streamStatus === 'finished') {
embeds embeds
.setDescription("The recording has stopped.") .setDescription("The recording has ended nominally.")
.setColor(10855845) .setColor(10855845)
} else if (streamStatus === 'failed') {
embeds
.setDescription("The recording has ended abnorminally.")
.setColor(8289651)
} else if (streamStatus === 'stalled') {
embeds
.setDescription("We have not received a progress update in the past two minutes.")
.setColor(8289651)
} else { } else {
embeds embeds
.setDescription('The recording is in an unknown state? (this is a bug.)') .setDescription(`The recording is in an unknown state? (streamStatus=${streamStatus} this is a bug.)`)
.setColor(10855845) .setColor(10855845)
} }
return embeds return embeds
@ -169,10 +145,10 @@ function getStatusEmbed({
function getButtonRow(state: RecordingState): ActionRow { function getButtonRow(streamStatus: Status): ActionRow {
const components: ButtonComponent[] = [] const components: ButtonComponent[] = []
if (state === 'pending' || state === 'recording') { if (streamStatus === 'pending' || streamStatus === 'recording') {
const stopButton: ButtonComponent = { const stopButton: ButtonComponent = {
type: MessageComponentTypes.Button, type: MessageComponentTypes.Button,
customId: 'stop', customId: 'stop',
@ -180,7 +156,7 @@ function getButtonRow(state: RecordingState): ActionRow {
style: ButtonStyles.Danger style: ButtonStyles.Danger
} }
components.push(stopButton) components.push(stopButton)
} else if (state === 'aborted') { } else if (streamStatus === 'aborted') {
const retryButton: ButtonComponent = { const retryButton: ButtonComponent = {
type: MessageComponentTypes.Button, type: MessageComponentTypes.Button,
customId: 'retry', customId: 'retry',
@ -191,7 +167,7 @@ function getButtonRow(state: RecordingState): ActionRow {
style: ButtonStyles.Secondary style: ButtonStyles.Secondary
} }
components.push(retryButton) components.push(retryButton)
} else if (state === 'ended') { } else if (streamStatus === 'finished') {
const downloadButton: ButtonComponent = { const downloadButton: ButtonComponent = {
type: MessageComponentTypes.Button, type: MessageComponentTypes.Button,
customId: 'download', customId: 'download',
@ -206,7 +182,7 @@ function getButtonRow(state: RecordingState): ActionRow {
const unknownButton: ButtonComponent = { const unknownButton: ButtonComponent = {
type: MessageComponentTypes.Button, type: MessageComponentTypes.Button,
customId: 'unknown', customId: 'unknown',
label: 'Unknown State', label: 'Unknown Status',
emoji: { emoji: {
name: 'thinking' name: 'thinking'
}, },
@ -225,4 +201,4 @@ function getButtonRow(state: RecordingState): ActionRow {
} }
export default updateDiscordMessage export default update_discord_message

View File

@ -10,9 +10,10 @@
"build": "tsup", "build": "tsup",
"test": "mocha", "test": "mocha",
"integration": "FUTUREPORN_WORKDIR=/home/cj/Downloads mocha ./integration/**/*.test.js", "integration": "FUTUREPORN_WORKDIR=/home/cj/Downloads mocha ./integration/**/*.test.js",
"dev": "tsx --watch ./src/index.ts", "dev": "pnpm run dev.nodemon # yes this is crazy to have nodemon execute tsx, but it's the only way I have found to get live reloading in TS/ESM/docker with Graphile Worker's way of loading tasks",
"dev.nodemon": "pnpm nodemon --ext ts,json,yaml --ignore ./dist --watch ./src --watch ./node_modules/@futureporn --exec \"pnpm run dev.build\"", "dev.tsx": "tsx ./src/index.ts",
"dev.build": "pnpm run build && pnpm run start", "dev.nodemon": "nodemon --ext ts --exec \"pnpm run dev.tsx\"",
"dev.node": "node --no-warnings=ExperimentalWarning --loader ts-node/esm src/index.ts",
"clean": "rm -rf dist", "clean": "rm -rf dist",
"superclean": "rm -rf node_modules && rm -rf pnpm-lock.yaml && rm -rf dist" "superclean": "rm -rf node_modules && rm -rf pnpm-lock.yaml && rm -rf dist"
}, },
@ -25,8 +26,10 @@
"@futureporn/utils": "workspace:^", "@futureporn/utils": "workspace:^",
"@paralleldrive/cuid2": "^2.2.2", "@paralleldrive/cuid2": "^2.2.2",
"@types/chai": "^4.3.16", "@types/chai": "^4.3.16",
"@types/chai-as-promised": "^7.1.8",
"@types/fluent-ffmpeg": "^2.1.24", "@types/fluent-ffmpeg": "^2.1.24",
"@types/mocha": "^10.0.7", "@types/mocha": "^10.0.7",
"@types/qs": "^6.9.15",
"date-fns": "^3.6.0", "date-fns": "^3.6.0",
"diskusage": "^1.2.0", "diskusage": "^1.2.0",
"dotenv": "^16.4.5", "dotenv": "^16.4.5",
@ -47,6 +50,7 @@
"pg-boss": "^9.0.3", "pg-boss": "^9.0.3",
"pino-pretty": "^11.2.1", "pino-pretty": "^11.2.1",
"postgres": "^3.4.4", "postgres": "^3.4.4",
"qs": "^6.13.0",
"rxjs": "^7.8.1", "rxjs": "^7.8.1",
"sql": "^0.78.0", "sql": "^0.78.0",
"winston": "^3.13.1", "winston": "^3.13.1",
@ -61,6 +65,7 @@
"aws-sdk-client-mock": "^4.0.1", "aws-sdk-client-mock": "^4.0.1",
"aws-sdk-mock": "^6.0.4", "aws-sdk-mock": "^6.0.4",
"chai": "^4.4.1", "chai": "^4.4.1",
"chai-as-promised": "^8.0.0",
"cheerio": "1.0.0-rc.12", "cheerio": "1.0.0-rc.12",
"mocha": "^10.7.0", "mocha": "^10.7.0",
"multiformats": "^11.0.2", "multiformats": "^11.0.2",

View File

@ -32,12 +32,18 @@ importers:
'@types/chai': '@types/chai':
specifier: ^4.3.16 specifier: ^4.3.16
version: 4.3.16 version: 4.3.16
'@types/chai-as-promised':
specifier: ^7.1.8
version: 7.1.8
'@types/fluent-ffmpeg': '@types/fluent-ffmpeg':
specifier: ^2.1.24 specifier: ^2.1.24
version: 2.1.24 version: 2.1.24
'@types/mocha': '@types/mocha':
specifier: ^10.0.7 specifier: ^10.0.7
version: 10.0.7 version: 10.0.7
'@types/qs':
specifier: ^6.9.15
version: 6.9.15
date-fns: date-fns:
specifier: ^3.6.0 specifier: ^3.6.0
version: 3.6.0 version: 3.6.0
@ -98,6 +104,9 @@ importers:
postgres: postgres:
specifier: ^3.4.4 specifier: ^3.4.4
version: 3.4.4 version: 3.4.4
qs:
specifier: ^6.13.0
version: 6.13.0
rxjs: rxjs:
specifier: ^7.8.1 specifier: ^7.8.1
version: 7.8.1 version: 7.8.1
@ -135,6 +144,9 @@ importers:
chai: chai:
specifier: ^4.4.1 specifier: ^4.4.1
version: 4.5.0 version: 4.5.0
chai-as-promised:
specifier: ^8.0.0
version: 8.0.0(chai@4.5.0)
cheerio: cheerio:
specifier: 1.0.0-rc.12 specifier: 1.0.0-rc.12
version: 1.0.0-rc.12 version: 1.0.0-rc.12
@ -1040,6 +1052,9 @@ packages:
'@tsconfig/node16@1.0.4': '@tsconfig/node16@1.0.4':
resolution: {integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==} resolution: {integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==}
'@types/chai-as-promised@7.1.8':
resolution: {integrity: sha512-ThlRVIJhr69FLlh6IctTXFkmhtP3NpMZ2QGq69StYLyKZFp/HOp1VdKZj7RvfNWYYcJ1xlbLGLLWj1UvP5u/Gw==}
'@types/chai@4.3.16': '@types/chai@4.3.16':
resolution: {integrity: sha512-PatH4iOdyh3MyWtmHVFXLWCCIhUbopaltqddG9BzB+gMIzee2MJrvd+jouii9Z3wzQJruGWAm7WOMjgfG8hQlQ==} resolution: {integrity: sha512-PatH4iOdyh3MyWtmHVFXLWCCIhUbopaltqddG9BzB+gMIzee2MJrvd+jouii9Z3wzQJruGWAm7WOMjgfG8hQlQ==}
@ -1067,6 +1082,9 @@ packages:
'@types/pg@8.11.6': '@types/pg@8.11.6':
resolution: {integrity: sha512-/2WmmBXHLsfRqzfHW7BNZ8SbYzE8OSk7i3WjFYvfgRHj7S1xj+16Je5fUKv3lVdVzk/zn9TXOqf+avFCFIE0yQ==} resolution: {integrity: sha512-/2WmmBXHLsfRqzfHW7BNZ8SbYzE8OSk7i3WjFYvfgRHj7S1xj+16Je5fUKv3lVdVzk/zn9TXOqf+avFCFIE0yQ==}
'@types/qs@6.9.15':
resolution: {integrity: sha512-uXHQKES6DQKKCLh441Xv/dwxOq1TVS3JPUMlEqoEglvlhR6Mxnlew/Xq/LRVHpLyk7iK3zODe1qYHIMltO7XGg==}
'@types/retry@0.12.1': '@types/retry@0.12.1':
resolution: {integrity: sha512-xoDlM2S4ortawSWORYqsdU+2rxdh4LRW9ytc3zmT37RIKQh6IHyKwwtKhKis9ah8ol07DCkZxPt8BBvPjC6v4g==} resolution: {integrity: sha512-xoDlM2S4ortawSWORYqsdU+2rxdh4LRW9ytc3zmT37RIKQh6IHyKwwtKhKis9ah8ol07DCkZxPt8BBvPjC6v4g==}
@ -1270,6 +1288,11 @@ packages:
resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==} resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==}
engines: {node: '>=10'} engines: {node: '>=10'}
chai-as-promised@8.0.0:
resolution: {integrity: sha512-sMsGXTrS3FunP/wbqh/KxM8Kj/aLPXQGkNtvE5wPfSToq8wkkvBpTZo1LIiEVmC4BwkKpag+l5h/20lBMk6nUg==}
peerDependencies:
chai: '>= 2.1.2 < 6'
chai@4.5.0: chai@4.5.0:
resolution: {integrity: sha512-RITGBfijLkBddZvnn8jdqoTypxvqbOLYQkGGxXzeFjVHvudaPw0HNFD9x928/eUwYWd2dPCugVqspGALTZZQKw==} resolution: {integrity: sha512-RITGBfijLkBddZvnn8jdqoTypxvqbOLYQkGGxXzeFjVHvudaPw0HNFD9x928/eUwYWd2dPCugVqspGALTZZQKw==}
engines: {node: '>=4'} engines: {node: '>=4'}
@ -1285,6 +1308,10 @@ packages:
check-error@1.0.3: check-error@1.0.3:
resolution: {integrity: sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==} resolution: {integrity: sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==}
check-error@2.1.1:
resolution: {integrity: sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==}
engines: {node: '>= 16'}
cheerio-select@2.1.0: cheerio-select@2.1.0:
resolution: {integrity: sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==} resolution: {integrity: sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==}
@ -2408,6 +2435,10 @@ packages:
resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==}
engines: {node: '>=6'} engines: {node: '>=6'}
qs@6.13.0:
resolution: {integrity: sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==}
engines: {node: '>=0.6'}
querystring@0.2.0: querystring@0.2.0:
resolution: {integrity: sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==} resolution: {integrity: sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==}
engines: {node: '>=0.4.x'} engines: {node: '>=0.4.x'}
@ -4124,6 +4155,10 @@ snapshots:
'@tsconfig/node16@1.0.4': {} '@tsconfig/node16@1.0.4': {}
'@types/chai-as-promised@7.1.8':
dependencies:
'@types/chai': 4.3.16
'@types/chai@4.3.16': {} '@types/chai@4.3.16': {}
'@types/debug@4.1.12': '@types/debug@4.1.12':
@ -4154,6 +4189,8 @@ snapshots:
pg-protocol: 1.6.1 pg-protocol: 1.6.1
pg-types: 4.0.2 pg-types: 4.0.2
'@types/qs@6.9.15': {}
'@types/retry@0.12.1': {} '@types/retry@0.12.1': {}
'@types/semver@7.5.8': {} '@types/semver@7.5.8': {}
@ -4356,6 +4393,11 @@ snapshots:
camelcase@6.3.0: {} camelcase@6.3.0: {}
chai-as-promised@8.0.0(chai@4.5.0):
dependencies:
chai: 4.5.0
check-error: 2.1.1
chai@4.5.0: chai@4.5.0:
dependencies: dependencies:
assertion-error: 1.1.0 assertion-error: 1.1.0
@ -4381,6 +4423,8 @@ snapshots:
dependencies: dependencies:
get-func-name: 2.0.2 get-func-name: 2.0.2
check-error@2.1.1: {}
cheerio-select@2.1.0: cheerio-select@2.1.0:
dependencies: dependencies:
boolbase: 1.0.0 boolbase: 1.0.0
@ -5642,6 +5686,10 @@ snapshots:
punycode@2.3.1: {} punycode@2.3.1: {}
qs@6.13.0:
dependencies:
side-channel: 1.0.6
querystring@0.2.0: {} querystring@0.2.0: {}
querystringify@2.2.0: {} querystringify@2.2.0: {}

View File

@ -1,5 +1,5 @@
import Record from "./Record.js" import Record, { UploadStreamClosedError } from "./Record.js"
import { expect } from "chai" import * as chai from 'chai'
import { ChildProcess, spawn } from "child_process" import { ChildProcess, spawn } from "child_process"
import { createReadStream, readFileSync, ReadStream } from "fs" import { createReadStream, readFileSync, ReadStream } from "fs"
import AWSMock from 'aws-sdk-mock' import AWSMock from 'aws-sdk-mock'
@ -13,7 +13,9 @@ import { HeadObjectOutput } from 'aws-sdk/clients/s3';
import { Readable } from 'stream'; import { Readable } from 'stream';
import { mockClient } from 'aws-sdk-client-mock'; import { mockClient } from 'aws-sdk-client-mock';
import { sdkStreamMixin } from '@smithy/util-stream' import { sdkStreamMixin } from '@smithy/util-stream'
import chaiAsPromised from 'chai-as-promised'
chai.use(chaiAsPromised)
const expect = chai.expect
// "pay no attention to that man behind the curtain" // "pay no attention to that man behind the curtain"
@ -52,7 +54,7 @@ describe('Record', function () {
expect(record).to.have.property('bucket', 'test') expect(record).to.have.property('bucket', 'test')
}) })
it('should be abortable', async function () { xit('should be abortable', async function () {
const inputStream = createReadStream(join(__dirname, './fixtures/mock-stream0.mp4')) // 192627 bytes const inputStream = createReadStream(join(__dirname, './fixtures/mock-stream0.mp4')) // 192627 bytes
const s3ClientMock = mockClient(S3Client) const s3ClientMock = mockClient(S3Client)
const s3Client = new S3Client({ region: 'us-west-000' }) const s3Client = new S3Client({ region: 'us-west-000' })
@ -65,6 +67,20 @@ describe('Record', function () {
await record.abort() await record.abort()
}) })
xit('should throw if the upload stream closes before the download stream closes', async function () {
const s3Mock = mockClient(S3Client)
// const inputStream = createReadStream(join(__dirname, './fixtures/mock-stream0.mp4'))
const inputStream = createReadStream('/dev/random') // forever random
// const s3Client = new S3Client({ region: 'us-west-000' })
// s3ClientMock.on()
s3Mock.on(PutObjectCommand).resolvesOnce({}).resolvesOnce({}).rejects({})
const s3 = new S3Client({ region: 'us-west-000' })
return expect(s3.send(new PutObjectCommand({ Body: inputStream, Bucket: 'taco', Key: 'my-cool-taco.mp4' }))).to.be.rejectedWith(UploadStreamClosedError)
})
xit('should restart if a EPIPE is encountered', async function () { xit('should restart if a EPIPE is encountered', async function () {
// @todo IDK how to implement this. // @todo IDK how to implement this.
const inputStream = createReadStream(join(__dirname, './fixtures/mock-stream0.mp4')) const inputStream = createReadStream(join(__dirname, './fixtures/mock-stream0.mp4'))

View File

@ -7,6 +7,13 @@ import 'dotenv/config'
const ua0 = 'Mozilla/5.0 (X11; Linux x86_64; rv:105.0) Gecko/20100101 Firefox/105.0' const ua0 = 'Mozilla/5.0 (X11; Linux x86_64; rv:105.0) Gecko/20100101 Firefox/105.0'
export class UploadStreamClosedError extends Error {
constructor(message: string) {
super(message)
Object.setPrototypeOf(this, UploadStreamClosedError.prototype)
}
}
export interface RecordArgs { export interface RecordArgs {
filename?: string; filename?: string;
s3Client: S3Client; s3Client: S3Client;
@ -131,6 +138,7 @@ export default class Record {
parallelUploads3.on("httpUploadProgress", (progress) => { parallelUploads3.on("httpUploadProgress", (progress) => {
if (progress?.loaded) { if (progress?.loaded) {
// console.log(progress)
if (this.onProgress) this.onProgress(this.counter); if (this.onProgress) this.onProgress(this.counter);
// console.log(`uploaded ${progress.loaded} bytes (${prettyBytes(progress.loaded)})`); // console.log(`uploaded ${progress.loaded} bytes (${prettyBytes(progress.loaded)})`);
} else { } else {
@ -144,8 +152,13 @@ export default class Record {
} catch (e) { } catch (e) {
if (e instanceof Error) { if (e instanceof Error) {
console.error(`We were uploading a file to S3 but then we encountered an error! ${JSON.stringify(e, null, 2)}`) if (e.name === 'AbortError') {
throw e console.error(`We got an error, AbortError which is something we know how to handle. we will NOT throw and instead return gracefully.`)
return
} else {
console.error(`We were uploading a file to S3 but then we encountered an error! ${JSON.stringify(e, null, 2)}`)
throw e
}
} else { } else {
throw new Error(`error of some sort ${JSON.stringify(e, null, 2)}`) throw new Error(`error of some sort ${JSON.stringify(e, null, 2)}`)
} }
@ -164,7 +177,14 @@ export default class Record {
this.counter += data.length this.counter += data.length
}) })
this.uploadStream.on('close', () => { this.uploadStream.on('close', () => {
console.log('[!!!] upload stream has closed') // if uploadStream closes before inputStream, throw an error.
if (!this.inputStream.closed) {
const msg = 'upload stream closed before download stream, which suggests the S3 upload failed.'
console.error(msg)
throw new UploadStreamClosedError(msg);
} else {
console.log('upload stream has closed. In this instance it is OK since the input stream is also closed.')
}
}) })
this.uploadStream.on('error', (e) => { this.uploadStream.on('error', (e) => {
console.error('there was an error on the uploadStream. error as follows') console.error('there was an error on the uploadStream. error as follows')

View File

@ -0,0 +1,40 @@
import 'dotenv/config'
const requiredEnvVars = [
'S3_ACCESS_KEY_ID',
'S3_SECRET_ACCESS_KEY',
'S3_REGION',
'S3_ENDPOINT',
'S3_BUCKET',
'POSTGREST_URL',
'AUTOMATION_USER_JWT',
] as const;
const getEnvVar = (key: typeof requiredEnvVars[number]): string => {
const value = process.env[key];
if (!value) {
throw new Error(`Missing ${key} env var`);
}
return value;
};
export interface Config {
postgrestUrl: string;
automationUserJwt: string;
s3AccessKeyId: string;
s3SecretAccessKey: string;
s3Region: string;
s3Bucket: string;
s3Endpoint: string;
}
export const configs: Config = {
postgrestUrl: getEnvVar('POSTGREST_URL'),
automationUserJwt: getEnvVar('AUTOMATION_USER_JWT'),
s3AccessKeyId: getEnvVar('S3_ACCESS_KEY_ID'),
s3SecretAccessKey: getEnvVar('S3_SECRET_ACCESS_KEY'),
s3Region: getEnvVar('S3_REGION'),
s3Bucket: getEnvVar('S3_BUCKET'),
s3Endpoint: getEnvVar('S3_ENDPOINT'),
}

View File

@ -10,9 +10,6 @@ import { fileURLToPath } from 'url';
import { getPackageVersion } from '@futureporn/utils'; import { getPackageVersion } from '@futureporn/utils';
import type { GraphileConfig } from "graphile-config"; import type { GraphileConfig } from "graphile-config";
import type {} from "graphile-worker"; import type {} from "graphile-worker";
import start_recording from './tasks/start_recording.ts';
import { stop_recording } from './tasks/stop_recording.ts';
import record from './tasks/record.ts'
const __dirname = dirname(fileURLToPath(import.meta.url)); const __dirname = dirname(fileURLToPath(import.meta.url));
const version = getPackageVersion(join(__dirname, '../package.json')) const version = getPackageVersion(join(__dirname, '../package.json'))
@ -58,12 +55,10 @@ async function worker(workerUtils: WorkerUtils) {
const runnerOptions: RunnerOptions = { const runnerOptions: RunnerOptions = {
preset, preset,
concurrency, concurrency,
// taskDirectory: join(__dirname, 'tasks'), taskDirectory: join(__dirname, 'tasks'),
taskList: { // taskList: {
'record': record, // 'record': record,
'start_recording': start_recording, // }
'stop_recording': stop_recording
}
} }
const runner = await graphileRun(runnerOptions) const runner = await graphileRun(runnerOptions)

View File

@ -1,9 +1,115 @@
/**
*
* # notes
*
* # creation
*
* ## api.records
*
* id: 2
* url: 'https://chaturbate.com/example'
* discord_message_id: 238492348324
* recording_state: 'pending'
* is_aborted: false
* created_at: 2024-08-15T21:36:27.796Z
* updated_at: 2024-08-15T21:36:27.796Z
*
* ## api.segments
*
* id: 5
* s3_key: example-date-cuid.mp4
* s3_id: 2342309492348324
* bytes: 0
* created_at: 2024-08-15T21:36:27.796Z
* updated_at: 2024-08-15T21:36:27.796Z
*
* ## api.records_segments_links
*
* id: 9
* stream_id: 2
* segment_id: 5
* segment_order: 0
* created_at: 2024-08-15T21:36:27.796Z
* updated_at: 2024-08-15T21:36:27.796Z
*
* # progress
*
* ## api.records
*
* id: 2
* url: 'https://chaturbate.com/example'
* discord_message_id: 238492348324
* recording_state: 'recording'
* is_aborted: false
* created_at: 2024-08-15T21:36:27.796Z
* updated_at: 2024-08-15T21:37:37.168Z
*
* ## api.segments
*
* id: 5
* s3_key: example-2024-08-15-72ff4b5ae7dae73b.mp4
* s3_id: 2342309492348324
* bytes: 8384
* created_at: 2024-08-15T21:36:27.796Z
* updated_at: 2024-08-15T21:37:37.168Z
*
*
* # new segment
*
* ## api.segments
*
* id: 6
* s3_key: example-2024-08-15-cda21be5e54621f2.mp4
* s3_id: a974eb6e194b7987
* byte: 0
* created_at: 2024-08-15T21:38:34.878Z
* updated_at: 2024-08-15T21:38:34.878Z
*
* ## api.records_segments_links
*
* id: 10
* stream_id: 2
* segment_id: 6
* segment_order: 1
* created_at: 2024-08-15T21:38:34.878Z
* updated_at: 2024-08-15T21:38:34.878Z
*
* # progress
*
* ## api.segments
*
* id: 6
* s3_key: example-2024-08-15-cda21be5e54621f2.mp4
* s3_id: a974eb6e194b7987
* byte: 1024
* created_at: 2024-08-15T21:38:34.878Z
* updated_at: 2024-08-15T21:39:11.437Z
*
* # completion
*
* ## api.records
*
* id: 2
* url: 'https://chaturbate.com/example'
* discord_message_id: 238492348324
* recording_state: 'finished'
* is_aborted: false
* created_at: 2024-08-15T21:36:27.796Z
* updated_at: 2024-08-15T21:39:41.692Z
*
*/
import querystring from 'node:querystring'
import { Helpers, type Task } from 'graphile-worker' import { Helpers, type Task } from 'graphile-worker'
import Record from '../Record.ts' import Record from '../Record.ts'
import { getPlaylistUrl } from '@futureporn/scout/ytdlp.ts' import { getPlaylistUrl } from '@futureporn/scout/ytdlp.ts'
import type { RecordingState } from '@futureporn/types' import type { RecordingState, RecordingRecord, Segment } from '@futureporn/types'
import { add } from 'date-fns' import { add } from 'date-fns'
import { backOff } from "exponential-backoff";
import { configs } from '../config.ts'
import qs from 'qs'
import { createId } from '@paralleldrive/cuid2'
/** /**
* url is the URL to be recorded. Ex: chaturbate.com/projektmelody * url is the URL to be recorded. Ex: chaturbate.com/projektmelody
@ -12,189 +118,229 @@ import { add } from 'date-fns'
*/ */
interface Payload { interface Payload {
url: string; url: string;
record_id: number; stream_id: string;
}
interface RecordingRecord {
id: number;
recordingState: RecordingState;
fileSize: number;
discordMessageId: string;
isAborted: boolean;
}
interface RawRecordingRecord {
id: number;
recording_state: RecordingState;
file_size: number;
discord_message_id: string;
is_aborted: boolean;
} }
function assertPayload(payload: any): asserts payload is Payload { function assertPayload(payload: any): asserts payload is Payload {
if (typeof payload !== "object" || !payload) throw new Error("invalid payload"); if (typeof payload !== "object" || !payload) throw new Error("invalid payload");
if (typeof payload.url !== "string") throw new Error("invalid url"); if (typeof payload.url !== "string") throw new Error("invalid url");
if (typeof payload.record_id !== "number") throw new Error(`invalid record_id=${payload.record_id}`); if (typeof payload.stream_id !== "string") throw new Error(`invalid stream_id=${payload.stream_id}`);
}
function assertEnv() {
if (!process.env.S3_ACCESS_KEY_ID) throw new Error('S3_ACCESS_KEY_ID was missing in env');
if (!process.env.S3_SECRET_ACCESS_KEY) throw new Error('S3_SECRET_ACCESS_KEY was missing in env');
if (!process.env.S3_REGION) throw new Error('S3_REGION was missing in env');
if (!process.env.S3_ENDPOINT) throw new Error('S3_ENDPOINT was missing in env');
if (!process.env.S3_BUCKET) throw new Error('S3_BUCKET was missing in env');
if (!process.env.POSTGREST_URL) throw new Error('POSTGREST_URL was missing in env');
if (!process.env.AUTOMATION_USER_JWT) throw new Error('AUTOMATION_USER_JWT was missing in env');
} }
async function getRecording(url: string, recordId: number, helpers: Helpers) { async function getRecordInstance(url: string, segment_id: number, helpers: Helpers) {
const abortController = new AbortController() const abortController = new AbortController()
const abortSignal = abortController.signal const abortSignal = abortController.signal
const accessKeyId = process.env.S3_ACCESS_KEY_ID!; const accessKeyId = configs.s3AccessKeyId;
const secretAccessKey = process.env.S3_SECRET_ACCESS_KEY!; const secretAccessKey = configs.s3SecretAccessKey;
const region = process.env.S3_REGION!; const region = configs.s3Region;
const endpoint = process.env.S3_ENDPOINT!; const endpoint = configs.s3Endpoint;
const bucket = process.env.S3_BUCKET!; const bucket = configs.s3Bucket;
const playlistUrl = await getPlaylistUrl(url) const playlistUrl = await getPlaylistUrl(url)
const s3Client = Record.makeS3Client({ accessKeyId, secretAccessKey, region, endpoint }) const s3Client = Record.makeS3Client({ accessKeyId, secretAccessKey, region, endpoint })
const inputStream = Record.getFFmpegStream({ url: playlistUrl }) const inputStream = Record.getFFmpegStream({ url: playlistUrl })
const onProgress = (fileSize: number) => { const onProgress = (fileSize: number) => {
updateDatabaseRecord({ recordId, recordingState: 'recording', fileSize }).then(checkIfAborted).then((isAborted) => isAborted ? abortController.abort() : null) updateDatabaseRecord({ segment_id, fileSize, helpers })
.then((reee) => {
helpers.logger.info(JSON.stringify(reee))
return reee
})
.then(checkIfAborted)
.then((isAborted) => {
helpers.logger.info(`isAborted=${isAborted}`)
isAborted ? abortController.abort() : null
})
.catch((e) => {
helpers.logger.error('caught error while updatingDatabaseRecord inside onProgress inside getRecordInstance')
helpers.logger.error(e)
})
} }
const record = new Record({ inputStream, onProgress, bucket, s3Client, jobId: ''+recordId, abortSignal }) const record = new Record({ inputStream, onProgress, bucket, s3Client, jobId: ''+segment_id, abortSignal })
return record return record
} }
function checkIfAborted(record: RawRecordingRecord): boolean { function checkIfAborted(segment: Partial<Segment>): boolean {
return (record.is_aborted) return (!!segment?.stream?.at(0)?.is_recording_aborted)
} }
async function updateDatabaseRecord({ async function updateDatabaseRecord({
recordId, segment_id,
recordingState, fileSize,
fileSize helpers
}: { }: {
recordId: number, segment_id: number,
recordingState: RecordingState, fileSize: number,
fileSize: number helpers: Helpers
}): Promise<RawRecordingRecord> { }): Promise<Segment> {
// console.log(`updating database record with recordId=${recordId}, recordingState=${recordingState}, fileSize=${fileSize}`)
const payload: any = { const payload: any = {
file_size: fileSize bytes: fileSize
} }
if (recordingState) payload.recording_state = recordingState;
const res = await fetch(`${process.env.POSTGREST_URL}/records?id=eq.${recordId}`, { const res = await fetch(`${configs.postgrestUrl}/segments?id=eq.${segment_id}&select=stream:streams(is_recording_aborted)`, {
method: 'PATCH', method: 'PATCH',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
'Accepts': 'application/json', 'Accept': 'application/json',
'Prefer': 'return=representation', 'Prefer': 'return=representation',
'Authorization': `Bearer ${process.env.AUTOMATION_USER_JWT}` 'Authorization': `Bearer ${configs.automationUserJwt}`
}, },
body: JSON.stringify(payload) body: JSON.stringify(payload)
}) })
if (!res.ok) { if (!res.ok) {
const body = await res.text() const body = await res.text()
throw new Error(`failed to updateDatabaseRecord. status=${res.status}, statusText=${res.statusText}, body=${body}`); const msg = `failed to updateDatabaseRecord. status=${res.status}, statusText=${res.statusText}, body=${body}`
helpers.logger.error(msg)
throw new Error(msg);
} }
const body = await res.json() as RawRecordingRecord[]; // helpers.logger.info(`response was OK~`)
if (!body[0]) throw new Error(`failed to get a record that matched recordId=${recordId}`) const body = await res.json() as Segment[];
if (!body[0]) throw new Error(`failed to get a segment that matched segment_id=${segment_id}`);
const bod = body[0]
// helpers.logger.info('the following was the response from PATCH-ing /segments')
// helpers.logger.info(JSON.stringify(bod))
return bod
}
const getSegments = async function getSegments(stream_id: string): Promise<Segment> {
if (!stream_id) throw new Error('getSegments requires {String} stream_id as first arg');
const res = await fetch(`${configs.postgrestUrl}/segments_stream_links?stream_id=eq.${stream_id}`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
'Prefer': 'return=representation'
},
})
if (!res.ok) {
const body = await res.text()
throw new Error(`failed to getSegments. status=${res.status}, statusText=${res.statusText}, body=${body}`);
}
const body = await res.json() as Segment[];
if (!body[0]) throw new Error(`failed to get segments that matched stream_id=${stream_id}`)
return body[0] return body[0]
} }
export const record: Task = async function (payload, helpers) {
console.log(payload)
const createSegment = async function createSegment(s3_key: string, helpers: Helpers): Promise<number> {
if (!s3_key) throw new Error('getSegments requires {string} s3_key as first arg');
const segmentPayload = {
s3_key
}
helpers.logger.info(`Creating segment with s3_key=${s3_key}. payload as follows`)
helpers.logger.info(JSON.stringify(segmentPayload))
const res = await fetch(`${configs.postgrestUrl}/segments`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
'Prefer': 'return=headers-only',
'Authorization': `Bearer ${configs.automationUserJwt}`
},
body: JSON.stringify(segmentPayload)
})
if (!res.ok) {
const body = await res.text()
const msg = `failed to create Segment. status=${res.status}, statusText=${res.statusText}, body=${body}`
helpers.logger.error(msg)
throw new Error(msg);
}
const location = res.headers.get('location')
if (!location) throw new Error(`failed to get location header in response from postgrest`);
const parsedQuery = querystring.parse(location)
const segmentsId = parsedQuery['/segments?id']
if (!segmentsId) throw new Error('segmentsId was undefined which is unexpected');
if (Array.isArray(segmentsId)) throw new Error('segmentsId was an array which is unexpected');
const id = segmentsId.split('.').at(-1)
if (!id) throw new Error('failed to get id ');
return parseInt(id)
}
const createSegmentsStreamLink = async function createSegmentsStreamLink(stream_id: string, segment_id: number, helpers: Helpers): Promise<number> {
if (!stream_id) throw new Error('createSegmentsStreamLink requires {string} stream_id as first arg');
if (!segment_id) throw new Error('createSegmentsStreamLink requires {Number} segment_id as second arg');
const segmentStreamLinkPayload = {
stream_id,
segment_id
}
const res = await fetch(`${configs.postgrestUrl}/segments_stream_links`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
'Prefer': 'return=headers-only',
'Authorization': `Bearer ${configs.automationUserJwt}`,
},
body: JSON.stringify(segmentStreamLinkPayload)
})
if (!res.ok) {
const body = await res.text()
throw new Error(`failed to create SegmentsStreamLink. status=${res.status}, statusText=${res.statusText}, body=${body}`);
}
const location = res.headers.get('location')
if (!location) throw new Error(`failed to get location header in response from postgrest`);
const parsedQuery = querystring.parse(location)
const segmentsId = parsedQuery['/segments_stream_links?id']
if (!segmentsId) throw new Error('segments_stream_links?id was undefined which is unexpected');
if (Array.isArray(segmentsId)) throw new Error('segments_stream_links was an array which is unexpected');
const id = segmentsId.split('.').at(-1)
if (!id) throw new Error('failed to get id ');
return parseInt(id)
}
/**
* # doRecordSegment
*
* Record a segment of a livestream using ffmpeg.
*
* Ideally, we record the entire livestream, but the universe is not so kind. Network interruptions are common, so we handle the situation as best as we can.
*
* This function creates a new segments and segments_streams_links entry in the db via Postgrest REST API.
*
* This function also names the S3 file (s3_key) with a datestamp and a cuid.
*/
const doRecordSegment = async function doRecordSegment(url: string, stream_id: string, helpers: Helpers): Promise<void> {
const s3_key = `${new Date().toISOString()}-${createId()}.ts`
helpers.logger.info(`let's create a segment...`)
const segment_id = await createSegment(s3_key, helpers)
helpers.logger.info(`let's create a segmentsStreamLink...`)
const segmentsStreamLinkId = await createSegmentsStreamLink(stream_id, segment_id, helpers)
helpers.logger.info(`doTheRecording with segmentsStreamLinkId=${segmentsStreamLinkId}, stream_id=${stream_id}, segment_id=${segment_id}, url=${url}`)
const record = await getRecordInstance(url, segment_id, helpers)
await record.start()
}
export const record: Task = async function (payload: unknown, helpers: Helpers) {
assertPayload(payload) assertPayload(payload)
assertEnv() const { url, stream_id } = payload
const { url, record_id } = payload const recordId = stream_id
// let interval
try { try {
// every 30s, we /**
// 1. update the db record with the filesize * We do an exponential backoff timer when we record. If the Record() instance throws an error, we try again after a delay.
// 2. poll db to see if our job has been aborted by the user * This will take effect only when Record() throws an error.
// interval = setInterval(async () => { * If however Record() returns, as is the case when the stream ends, this backoff timer will not retry.
// try { * This does not handle the corner case where the streamer's internet temporarliy goes down, and their stream drops.
// helpers.logger.info(`updateDatabaseRecord()`) *
// const recordingState: RecordingState = 'recording' * @todo We must implement retrying at a higher level, and retry a few times to handle this type of corner-case.
// const fileSize = record.counter */
// const updatePayload = { recordingState, recordId, fileSize } // await backOff(() => doRecordSegment(url, recordId, helpers))
// const updatedRecord = await updateDatabaseRecord(updatePayload) await doRecordSegment(url, recordId, helpers)
// if (updatedRecord.isAborted) {
// helpers.logger.info(`record ${recordId} has been aborted by a user so we stop the recording now.`)
// abortController.abort()
// }
// } catch (e) {
// helpers.logger.error(`error while updating database. For sake of the recording in progress we are ignoring the following error. ${e}`)
// }
// }, 3000)
// start recording and await the S3 upload being finished
const recordId = record_id
const record = await getRecording(url, recordId, helpers)
await record.start()
} catch (e) { } catch (e) {
helpers.logger.error(`caught an error duing record(). error as follows`) // await updateDatabaseRecord({ recordId: stream_id, recordingState: 'failed' })
helpers.logger.error(`caught an error during record Task`)
if (e instanceof Error) { if (e instanceof Error) {
helpers.logger.error(e.message) helpers.logger.error(e.message)
} else { } else {
helpers.logger.error(JSON.stringify(e)) helpers.logger.error(JSON.stringify(e))
} }
// throw e // @todo uncomment this for production
} }
// const recordId = await createRecordingRecord(payload, helpers)
// const { url } = payload;
// console.log(`@todo simulated start_recording with url=${url}, recordId=${recordId}`)
// await helpers.addJob('record', { url, recordId })
} }
/**
* Here we middleman the stream from FFmpeg --> S3,
* counting bits and creating graphile jobs to inform the UI of our progress
*/
// const transformStreamFactory = (recordId: number, helpers: Helpers): PassThrough => {
// let counter = 0
// return new PassThrough ({
// async transform(chunk, controller) {
// controller.enqueue(chunk) // we don't actually transform anything here. we're only gathering statistics.
// counter += chunk.length
// if (counter % (1 * 1024 * 1024) <= 1024) {
// helpers.logger.info(`Updating record ${recordId}`)
// try {
// await updateDatabaseRecord({ fileSize: counter, recordId, recordingState: 'recording' })
// } catch (e) {
// helpers.logger.warn(`We are ignoring the following error which occured while updating db record ${e}`)
// }
// }
// },
// flush() {
// helpers.logger.info(`transformStream has flushed.`)
// }
// })
// }
// export const recordNg: Task = async function (payload, helpers) {
// assertPayload(payload)
// const { url, recordId } = payload
// try {
// const abortController = new AbortController()
// const abortSignal = abortController.signal
// const inputStream =
// const transformStream = transformStreamFactory(recordId, helpers)
// const record = new Record({ inputStream, abortSignal, transformStream })
// await record.done()
// } catch (e) {
// console.error(`error during recording. error as follows`)
// console.error(e)
// } finally {
// helpers.addJob('updateDiscordMessage', { recordId }, { maxAttempts: 3, runAt: add(new Date(), { seconds: 5 }) })
// }
// }
export default record export default record

View File

@ -1,77 +0,0 @@
import Record from '../Record.ts'
import { getPlaylistUrl } from '@futureporn/scout/ytdlp.ts'
import 'dotenv/config'
import { type Job } from 'pg-boss'
import { backOff } from 'exponential-backoff'
export interface RecordJob extends Job {
data: {
url: string;
}
}
async function _record (job: RecordJob, retries?: number): Promise<string> {
if (!process.env.S3_BUCKET_NAME) throw new Error('S3_BUCKET_NAME was undefined in env');
if (!process.env.S3_ENDPOINT) throw new Error('S3_ENDPOINT was undefined in env');
if (!process.env.S3_REGION) throw new Error('S3_REGION was undefined in env');
if (!process.env.S3_ACCESS_KEY_ID) throw new Error('S3_ACCESS_KEY_ID was undefined in env');
if (!process.env.S3_SECRET_ACCESS_KEY) throw new Error('S3_SECRET_ACCESS_KEY was undefined in env');
if (!job) throw new Error('Job sent to job worker execution callback was empty!!!');
const { url } = job.data;
console.log(`'record' job ${job!.id} begin with url=${url}`)
const bucket = process.env.S3_BUCKET_NAME!
const endpoint = process.env.S3_ENDPOINT!
const region = process.env.S3_REGION!
const accessKeyId = process.env.S3_ACCESS_KEY_ID!
const secretAccessKey = process.env.S3_SECRET_ACCESS_KEY!
let playlistUrl
try {
playlistUrl = await getPlaylistUrl(url)
console.log(`playlistUrl=${playlistUrl}`)
} catch (e) {
console.error('error during getPlaylistUrl()')
console.error(e)
throw e
}
const jobId = job.id
const s3Client = Record.makeS3Client({ accessKeyId, secretAccessKey, region, endpoint })
const inputStream = Record.getFFmpegStream({ url: playlistUrl })
const record = new Record({ inputStream, bucket, s3Client, jobId })
await record.start()
console.log(`record job ${job.id} complete`)
return job.id
}
export default async function main (jobs: RecordJob[]): Promise<any> {
// @todo why are we passed multiple jobs? I'm expecting only one.
const backOffOptions = {
numOfAttempts: 5,
startingDelay: 5000,
retry: (e: any, attemptNumber: number) => {
console.log(`Record Job is retrying. Attempt number ${attemptNumber}. e=${JSON.stringify(e, null, 2)}`)
return true
}
}
for (const j of jobs) {
console.log(`record job ${j.id} GO GO GO`)
try {
await backOff(() => _record(j), backOffOptions)
} catch (e) {
console.warn(`record job ${j.id} encountered the following error.`)
console.error(e)
}
console.log(`record job ${j.id} is finished.`)
}
};

View File

@ -1,67 +0,0 @@
import { Helpers, type Task } from 'graphile-worker'
import { add } from 'date-fns'
/**
* url is the URL to be recorded. Ex: chaturbate.com/projektmelody
* discordMessageId is the ID of the discord messate which displays recording status.
* we use the ID to update the message later, and/or relate button press events to this record task
*/
interface Payload {
url: string;
discordMessageId: string;
isAborted: boolean;
}
function assertPayload(payload: any): asserts payload is Payload {
if (typeof payload !== "object" || !payload) throw new Error("invalid payload");
if (typeof payload.url !== "string") throw new Error("invalid url");
if (typeof payload.discordMessageId !== "string") throw new Error(`invalid discordMessageId=${payload.discordMessageId}`);
}
function assertEnv() {
if (!process.env.AUTOMATION_USER_JWT) throw new Error('AUTOMATION_USER_JWT was missing in env');
if (!process.env.POSTGREST_URL) throw new Error('POSTGREST_URL was missing in env');
}
async function createRecordingRecord(payload: Payload, helpers: Helpers): Promise<number> {
const { url, discordMessageId } = payload
const record = {
url,
discord_message_id: discordMessageId,
recording_state: 'pending',
file_size: 0
}
const res = await fetch(`${process.env.POSTGREST_URL}/records`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${process.env.AUTOMATION_USER_JWT}`,
'Prefer': 'return=headers-only'
},
body: JSON.stringify(record)
})
if (!res.ok) {
const status = res.status
const statusText = res.statusText
throw new Error(`fetch failed to create recording record in database. status=${status}, statusText=${statusText}`)
}
helpers.logger.info('res.headers.location as follows.')
helpers.logger.info(res.headers.get('location')!)
const id = res.headers.get('location')?.split('.').at(-1)
if (!id) throw new Error('id could not be parsed from location header');
return parseInt(id)
}
export const start_recording: Task = async function (payload, helpers) {
assertPayload(payload)
assertEnv()
const recordId = await createRecordingRecord(payload, helpers)
const { url } = payload;
await helpers.addJob('record', { url, recordId }, { maxAttempts: 3, jobKey: `record_${recordId}` })
const runAt = add(new Date(), { seconds: 10 })
await helpers.addJob('updateDiscordMessage', { recordId }, { jobKey: `record_${recordId}_update_discord_message`, maxAttempts: 3, runAt })
helpers.logger.info(`startRecording() with url=${url}, recordId=${recordId}, (updateDiscordMessage runAt=${runAt})`)
}
export default start_recording

View File

@ -1,18 +0,0 @@
import { type Task } from 'graphile-worker'
interface Payload {
id: string
}
function assertPayload(payload: any): asserts payload is Payload {
if (typeof payload !== "object" || !payload) throw new Error("invalid payload");
if (typeof payload.id !== "string") throw new Error("invalid id");
}
export const stop_recording: Task = async function (payload) {
assertPayload(payload)
const { id } = payload;
console.log(`@todo simulated stop_recording with id=${id}`)
}

View File

@ -25,7 +25,7 @@
// Include the necessary files for your project // Include the necessary files for your project
"include": [ "include": [
"src/**/*.ts" "src/**/*.ts"
], , "../bot/src/tasks/restart_failed_recordings.ts" ],
"exclude": [ "exclude": [
"node_modules" "node_modules"
] ]

View File

@ -4,9 +4,24 @@ Here we handle migrations for the postgrest database.
@see https://github.com/thomwright/postgres-migrations @see https://github.com/thomwright/postgres-migrations
Reminder: only write migrations that affect schema. (don't write migrations that affect data)
## K.I.S.S. ## K.I.S.S.
Keep It Stupidly Simple. Keep It Stupidly Simple.
We are keeping this module as simple as possible. This means pure JS (no typescript!) We are keeping this module as simple as possible. This means pure JS (no typescript!)
## troubleshooting
If you see the following error, graphile_worker likely hasn't had a chance to create it's functions. Make sure that a graphile_worker is running, so it can automatically create the necessary functions.
```json
{
"code": "42883",
"details": null,
"hint": "No function matches the given name and argument types. You might need to add explicit type casts.",
"message": "function graphile_worker.add_job(text, json, max_attempts => integer) does not exist"
}
```

View File

@ -2,6 +2,8 @@ import {migrate} from 'postgres-migrations'
import path, { dirname } from 'node:path' import path, { dirname } from 'node:path'
import { fileURLToPath } from 'url'; import { fileURLToPath } from 'url';
import 'dotenv/config' import 'dotenv/config'
const __dirname = dirname(fileURLToPath(import.meta.url)); const __dirname = dirname(fileURLToPath(import.meta.url));
if (!process.env.DATABASE_PASSWORD) throw new Error('DATABASE_PASSWORD is missing in env'); if (!process.env.DATABASE_PASSWORD) throw new Error('DATABASE_PASSWORD is missing in env');
@ -23,7 +25,7 @@ async function main() {
defaultDatabase: "postgres" defaultDatabase: "postgres"
} }
await migrate(dbConfig, path.join(__dirname, "./migrations/")) await migrate(dbConfig, path.join(__dirname, "./migrations/"), { logger: console.log })
} }

View File

@ -0,0 +1,5 @@
ALTER TABLE IF EXISTS api.records
ADD COLUMN created_at timestamp(6) without time zone;
ALTER TABLE IF EXISTS api.records
ADD COLUMN updated_at timestamp(6) without time zone;

View File

@ -0,0 +1,7 @@
ALTER TABLE IF EXISTS api.records
ADD CONSTRAINT created_at_not_null
CHECK (created_at IS NOT NULL) NOT VALID;
ALTER TABLE IF EXISTS api.records
ADD CONSTRAINT updated_at_not_null
CHECK (updated_at IS NOT NULL) NOT VALID;

View File

@ -0,0 +1,26 @@
-- In the prev. migration I added a CHECK, but I forgot to add the default
ALTER TABLE IF EXISTS api.records
ALTER COLUMN created_at SET DEFAULT now();
ALTER TABLE IF EXISTS api.records
ALTER COLUMN updated_at SET DEFAULT now();
-- create a function which updates the row's updated_at
CREATE FUNCTION public.tg__updated_at() RETURNS trigger
LANGUAGE plpgsql
SET search_path TO 'pg_catalog', 'public', 'pg_temp'
AS $$
BEGIN
NEW.updated_at = now();
RETURN NEW;
END;
$$;
-- create a trigger which runs the above function when a /record is updated
CREATE TRIGGER record_updated_at
AFTER UPDATE ON api.records
FOR EACH ROW
EXECUTE PROCEDURE public.tg__updated_at();

View File

@ -0,0 +1,140 @@
-- vtubers table
CREATE TABLE api.vtubers (
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
display_name TEXT NOT NULL,
chaturbate TEXT,
twitter TEXT,
patreon TEXT,
twitch TEXT,
tiktok TEXT,
onlyfans TEXT,
youtube TEXT,
linktree TEXT,
carrd TEXT,
fansly TEXT,
pornhub TEXT,
discord TEXT,
reddit TEXT,
throne TEXT,
instagram TEXT,
facebook TEXT,
merch TEXT,
slug TEXT NOT NULL,
description1 TEXT,
description2 TEXT,
image TEXT NOT NULL,
theme_color VARCHAR(7) NOT NULL,
image_blur TEXT DEFAULT 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAABmJLR0QA/wD/AP+gvaeTAAAADUlEQVQImWMwtf//HwAEkwJzh0T9qwAAAABJRU5ErkJggg==',
fansly_id TEXT,
chaturbate_id TEXT,
twitter_id TEXT
-- F.Y.I., relations as follows
-- toys (one-to-many)
-- vods (one-to-many)
-- streams (one-to-many)
);
GRANT all ON api.vtubers TO automation;
GRANT SELECT ON api.vtubers TO web_anon;
-- streams table
CREATE TABLE api.streams (
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
url TEXT NOT NULL,
platform_notification_type TEXT,
date timestamp(6) without time zone,
created_at timestamp(6) without time zone,
vtuber uuid,
FOREIGN KEY (vtuber) REFERENCES api.vtubers(id),
tweet TEXT,
archive_status TEXT,
is_chaturbate_stream BOOLEAN,
is_fansly_stream BOOLEAN
);
GRANT all ON api.streams TO automation;
GRANT SELECT ON api.streams TO web_anon;
-- toys table
CREATE TABLE api.toys (
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
-- relation. one toy to many tags
-- relation. one toy to many vtubers
make TEXT NOT NULL,
model TEXT NOT NULL,
image TEXT NOT NULL DEFAULT 'https://futureporn-b2.b-cdn.net/default-thumbnail.webp'
);
GRANT all ON api.toys TO automation;
GRANT SELECT ON api.toys TO web_anon;
-- tags table
CREATE TABLE api.tags (
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
name TEXT NOT NULL UNIQUE,
toy_id uuid,
FOREIGN KEY (toy_id) REFERENCES api.toys
);
GRANT all ON api.tags TO automation;
GRANT SELECT ON api.tags TO web_anon;
-- toys-tags junction table
CREATE TABLE api.toys_tags(
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
toy_id uuid,
tag_id uuid,
CONSTRAINT fk_toys FOREIGN KEY(toy_id) REFERENCES api.toys(id),
CONSTRAINT fk_tags FOREIGN KEY(tag_id) REFERENCES api.tags(id)
);
GRANT all ON api.toys_tags TO automation;
GRANT SELECT ON api.toys_tags TO web_anon;
-- tags-vods junction table
-- toys-vtubers junction table
CREATE TABLE api.toys_vtubers(
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
toy_id uuid,
vtuber_id uuid,
CONSTRAINT fk_toys FOREIGN KEY(toy_id) REFERENCES api.toys(id),
CONSTRAINT fk_vtubers FOREIGN KEY(vtuber_id) REFERENCES api.vtubers(id)
);
GRANT all ON api.toys_vtubers TO automation;
GRANT SELECT ON api.toys_vtubers TO web_anon;
-- vods table
CREATE TABLE api.vods (
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
stream_id uuid NOT NULL,
FOREIGN KEY (stream_id) REFERENCES api.streams(id),
video_cid TEXT UNIQUE,
CONSTRAINT check_video_cid CHECK (video_cid ~ 'Qm[1-9A-HJ-NP-Za-km-z]{44,}|b[A-Za-z2-7]{58,}|B[A-Z2-7]{58,}|z[1-9A-HJ-NP-Za-km-z]{48,}|F[0-9A-F]{50,}'),
announce_title TEXT,
announce_url TEXT,
note TEXT,
date timestamp(6) without time zone,
spoilers TEXT,
title TEXT,
uploader uuid,
mux_asset_id TEXT,
mux_playback_id TEXT,
s3_key TEXT,
s3_id TEXT,
thumbnail TEXT
);
GRANT all ON api.vods TO automation;
GRANT SELECT ON api.vods TO web_anon;
-- tags-vods junction table
CREATE TABLE api.tags_vods(
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
tag_id uuid,
vod_id uuid,
CONSTRAINT fk_tags FOREIGN KEY(tag_id) REFERENCES api.tags(id),
CONSTRAINT fk_vods FOREIGN KEY(vod_id) REFERENCES api.vods(id)
);
GRANT all ON api.tags_vods TO automation;
GRANT SELECT ON api.tags_vods TO web_anon;

View File

@ -0,0 +1,7 @@
-- we add the concept of segments to api.records
-- implemented as a multidimensional text array, s3_segments.
-- the first value is the s3 id, the second value is the s3 key
-- [id, key]
ALTER TABLE IF EXISTS api.records
ADD COLUMN s3_segments text[][];

View File

@ -0,0 +1,28 @@
-- we don't need s3_segments multidimential array. we're moving it's functionality to a new table
ALTER TABLE IF EXISTS api.records
DROP COLUMN s3_segments;
-- segments table
CREATE TABLE api.segments (
id INT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
s3_key TEXT NOT NULL,
s3_id TEXT NOT NULL,
bytes bigint DEFAULT 0
);
GRANT all ON api.segments TO automation;
GRANT SELECT ON api.segments TO web_anon;
-- records-segments join table
CREATE TABLE api.records_segments(
id INT GENERATED ALWAYS AS IDENTITY,
record_id INT NOT NULL,
segment_id INT NOT NULL,
CONSTRAINT fk_record FOREIGN KEY(record_id) REFERENCES api.records(id),
CONSTRAINT fk_segment FOREIGN KEY(segment_id) REFERENCES api.segments(id),
PRIMARY KEY(id, record_id, segment_id)
);
GRANT all ON api.records_segments TO automation;
GRANT SELECT ON api.records_segments TO web_anon;

View File

@ -0,0 +1,2 @@
ALTER TABLE IF EXISTS api.records_segments
ADD COLUMN segments_order INT NOT NULL DEFAULT 0;

View File

@ -0,0 +1,5 @@
ALTER TABLE IF EXISTS api.records_segments
DROP COLUMN segments_order;
ALTER TABLE IF EXISTS api.records_segments
ADD COLUMN segment_order INT NOT NULL DEFAULT 0;

View File

@ -0,0 +1,2 @@
DROP TABLE IF EXISTS api.records CASCADE;
DROP TABLE IF EXISTS api.records_segments CASCADE;

View File

@ -0,0 +1,16 @@
-- I forgot to actually create the new table
CREATE TABLE api.segments_stream_links (
id int PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
discord_message_id text NOT NULL,
capture_job_id text NOT NULL
);
-- roles & permissions
GRANT all ON api.segments_stream_links TO automation;
GRANT SELECT ON api.segments_stream_links TO web_anon;
-- there is no s3_id in the segments run context so we don't need a column for it
ALTER TABLE IF EXISTS api.segments
DROP COLUMN s3_id;

View File

@ -0,0 +1,8 @@
-- oops. bit by unfinished copy-paste
-- there is no s3_id in the segments run context so we don't need a column for it
ALTER TABLE IF EXISTS api.segments_stream_links
DROP COLUMN discord_message_id;
ALTER TABLE IF EXISTS api.segments_stream_links
DROP COLUMN capture_job_id;

View File

@ -0,0 +1,5 @@
ALTER TABLE IF EXISTS api.streams
ADD COLUMN updated_at timestamp(6) without time zone;
ALTER TABLE IF EXISTS api.streams
ADD COLUMN status TEXT NOT NULL;

View File

@ -0,0 +1,5 @@
ALTER TABLE IF EXISTS api.streams
DROP COLUMN IF EXISTS status;
ALTER TABLE api.streams
ADD COLUMN status TEXT NOT NULL DEFAULT 'pending_recording';

View File

@ -0,0 +1 @@
DROP TABLE IF EXISTS api.discord_interactions CASCADE;

View File

@ -0,0 +1,38 @@
-- delete outdated
DROP FUNCTION IF EXISTS public.tg__add_job();
-- We create a function which lets Postgrest's automation user create jobs in Graphile Worker.
-- Normally only the database owner, in our case `postgres`, can add jobs due to RLS in graphile_worker tables.
-- Under the advice of graphile_worker author, we can use a SECURITY DEFINER wrapper function.
-- @see https://worker.graphile.org/docs/sql-add-job#graphile_workeradd_job:~:text=graphile_worker.add_job(...),that%20are%20necessary.)
-- @see https://discord.com/channels/489127045289476126/1179293106336694333/1179605043729670306
-- @see https://discord.com/channels/489127045289476126/498852330754801666/1067707497235873822
CREATE FUNCTION public.tg__add_record_job() RETURNS trigger
LANGUAGE plpgsql SECURITY DEFINER
SET search_path TO 'pg_catalog', 'public', 'pg_temp'
AS $$
begin
PERFORM graphile_worker.add_job('record', json_build_object(
'url', NEW.url,
'stream_id', NEW.id
), max_attempts := 12);
return NEW;
end;
$$;
-- when a stream is updated, we add a job in graphile to update_discord_message
CREATE TRIGGER stream_update
AFTER UPDATE ON api.streams
FOR EACH ROW
EXECUTE PROCEDURE public.tg__update_discord_message('update_discord_message');
-- when a stream is created, we add a 'record' job in graphile-worker
CREATE TRIGGER stream_create
AFTER INSERT ON api.streams
FOR EACH ROW
EXECUTE PROCEDURE public.tg__add_record_job('record');

View File

@ -0,0 +1,9 @@
DROP TABLE api.segments_stream_links;
CREATE TABLE api.segments_stream_links (
id int GENERATED ALWAYS AS IDENTITY,
stream_id UUID NOT NULL REFERENCES api.streams(id),
segment_id INT NOT NULL REFERENCES api.segments(id),
capture_job_id text NOT NULL,
PRIMARY KEY(id, stream_id, segment_id)
);

View File

@ -0,0 +1,2 @@
GRANT all ON api.segments_stream_links TO automation;
GRANT SELECT ON api.segments_stream_links TO web_anon;

View File

@ -0,0 +1,3 @@
ALTER TABLE IF EXISTS api.segments_stream_links
DROP COLUMN IF EXISTS capture_job_id;

View File

@ -0,0 +1,39 @@
ALTER TABLE api.segments
ADD COLUMN created_at TIMESTAMP(6) WITHOUT TIME ZONE;
ALTER TABLE api.segments
ADD COLUMN updated_at TIMESTAMP(6) WITHOUT TIME ZONE;
-- in migration 8, we already created tg__updated_at() so we don't need to create that,
-- but we do need to create a function which will the row's created_at
CREATE FUNCTION public.tg__created_at() RETURNS trigger
LANGUAGE plpgsql
SET search_path TO 'pg_catalog', 'public', 'pg_temp'
AS $$
BEGIN
NEW.created_at = now();
RETURN NEW;
END;
$$;
-- create a trigger which runs the tg__updated_at() function when a /segment is updated
CREATE TRIGGER segment_updated_at
AFTER UPDATE ON api.segments
FOR EACH ROW
EXECUTE PROCEDURE public.tg__updated_at();
-- create a trigger which runs the tg__created_at() function when a /segment is created
CREATE TRIGGER segment_created_at
AFTER INSERT ON api.segments
FOR EACH ROW
EXECUTE PROCEDURE public.tg__created_at();
-- create a trigger which runs the tg__created_at() function when a /stream is created
CREATE TRIGGER stream_created_at
AFTER INSERT ON api.streams
FOR EACH ROW
EXECUTE PROCEDURE public.tg__created_at();

View File

@ -0,0 +1,2 @@
ALTER TABLE api.streams
ADD COLUMN is_recording_aborted BOOLEAN DEFAULT FALSE;

View File

@ -0,0 +1,2 @@
CREATE EXTENSION moddatetime;

View File

@ -0,0 +1,49 @@
-- now we set up the triggers
-- streams created_at
ALTER TABLE api.streams
ALTER created_at SET DEFAULT now();
DROP TRIGGER stream_created_at ON api.streams;
CREATE TRIGGER stream_created_at
BEFORE INSERT ON api.streams
FOR EACH ROW
EXECUTE PROCEDURE moddatetime (created_at);
-- streams updated_at
ALTER TABLE api.streams
ALTER updated_at SET DEFAULT now();
CREATE TRIGGER stream_updated_at
BEFORE UPDATE ON api.streams
FOR EACH ROW
EXECUTE PROCEDURE moddatetime (updated_at);
-- segments created_at
ALTER TABLE api.segments
ALTER created_at SET DEFAULT now();
DROP TRIGGER segment_created_at ON api.segments;
CREATE TRIGGER segment_created_at
BEFORE INSERT ON api.segments
FOR EACH ROW
EXECUTE PROCEDURE moddatetime(created_at);
-- segments updated_at
ALTER TABLE api.segments
ALTER updated_at SET DEFAULT now();
DROP TRIGGER segment_updated_at ON api.segments;
CREATE TRIGGER segment_updated_at
BEFORE UPDATE ON api.segments
FOR EACH ROW
EXECUTE PROCEDURE moddatetime(updated_at);

View File

@ -0,0 +1,9 @@
-- A fix for the following error
-- moddatetime: cannot process INSERT events
--
-- We don't need moddatetime for INSERT events because we have column defaults set the time when the row is created.
DROP TRIGGER segment_created_at ON api.segments;
DROP TRIGGER stream_created_at ON api.streams;

View File

@ -0,0 +1,5 @@
-- streams needs discord_message_id for chatops
ALTER TABLE api.streams
ADD COLUMN discord_message_id TEXT;

View File

@ -8,6 +8,7 @@
"test": "echo \"Error: no test specified\" && exit 0", "test": "echo \"Error: no test specified\" && exit 0",
"start": "node index.js" "start": "node index.js"
}, },
"packageManager": "pnpm@9.6.0",
"keywords": [], "keywords": [],
"author": "@CJ_Clippy", "author": "@CJ_Clippy",
"license": "Unlicense", "license": "Unlicense",