bot progress
ci / build (push) Failing after 9m56s Details

This commit is contained in:
CJ_Clippy 2024-08-06 17:13:58 -08:00
parent b3d9035733
commit e4b979d61c
43 changed files with 855 additions and 487 deletions

View File

@ -150,7 +150,7 @@ docker_build(
dockerfile='./d.bot.dockerfile', dockerfile='./d.bot.dockerfile',
target='dev', target='dev',
live_update=[ live_update=[
sync('./services/bot', '/app') sync('./services/bot', '/app/services/bot')
] ]
) )

View File

@ -5,7 +5,7 @@ Vtuber data acquisition. Anything having to do with external WWW data acquisitio
## Features ## Features
* [x] Ingests going live notification e-mails * [x] Ingests going live notification e-mails
* [ ] Sends `startRecording` signals to @futureporn/capture * [ ] Sends `start_recording` signals to @futureporn/capture
* [x] Fetches vtuber data from platform * [x] Fetches vtuber data from platform
* [x] image * [x] image
* [x] themeColor * [x] themeColor

View File

@ -12,7 +12,7 @@ async function main() {
// Run a worker to execute jobs: // Run a worker to execute jobs:
const runner = await run({ const runner = await run({
connectionString: process.env.DATABASE_URL, connectionString,
concurrency: 5, concurrency: 5,
// Install signal handlers for graceful shutdown on SIGINT, SIGTERM, etc // Install signal handlers for graceful shutdown on SIGINT, SIGTERM, etc
noHandleSignals: false, noHandleSignals: false,

View File

@ -11,29 +11,29 @@ if [ -z $POSTGRES_PASSWORD ]; then
exit 5 exit 5
fi fi
## Enable pgcrypto (needed by pg-boss) # ## Enable pgcrypto (needed by pg-boss)
kubectl -n futureporn exec ${postgres_pod_name} -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "\ # kubectl -n futureporn exec ${postgres_pod_name} -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "\
CREATE EXTENSION pgcrypto;" # CREATE EXTENSION pgcrypto;"
## Create the temporal databases # ## Create the temporal databases
kubectl -n futureporn exec ${postgres_pod_name} -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "\ # kubectl -n futureporn exec ${postgres_pod_name} -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "\
CREATE DATABASE temporal_visibility \ # CREATE DATABASE temporal_visibility \
WITH \ # WITH \
OWNER = postgres \ # OWNER = postgres \
ENCODING = 'UTF8' \ # ENCODING = 'UTF8' \
LOCALE_PROVIDER = 'libc' \ # LOCALE_PROVIDER = 'libc' \
CONNECTION LIMIT = -1 \ # CONNECTION LIMIT = -1 \
IS_TEMPLATE = False;" # IS_TEMPLATE = False;"
kubectl -n futureporn exec ${postgres_pod_name} -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "\ # kubectl -n futureporn exec ${postgres_pod_name} -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "\
CREATE DATABASE temporal \ # CREATE DATABASE temporal \
WITH \ # WITH \
OWNER = postgres \ # OWNER = postgres \
ENCODING = 'UTF8' \ # ENCODING = 'UTF8' \
LOCALE_PROVIDER = 'libc' \ # LOCALE_PROVIDER = 'libc' \
CONNECTION LIMIT = -1 \ # CONNECTION LIMIT = -1 \
IS_TEMPLATE = False;" # IS_TEMPLATE = False;"
@ -59,48 +59,39 @@ kubectl -n futureporn exec ${postgres_pod_name} -- env PGPASSWORD=${POSTGRES_PAS
CONNECTION LIMIT = -1 \ CONNECTION LIMIT = -1 \
IS_TEMPLATE = False;" IS_TEMPLATE = False;"
## Create the futureporn postgrest database
kubectl -n futureporn exec ${postgres_pod_name} -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "\ # @futureporn/migrations takes care of these tasks now
CREATE DATABASE postgrest \ # ## Create graphile_worker db (for backend tasks)
WITH \ # kubectl -n futureporn exec ${postgres_pod_name} -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "\
OWNER = postgres \ # CREATE DATABASE graphile_worker \
ENCODING = 'UTF8' \ # WITH \
LOCALE_PROVIDER = 'libc' \ # OWNER = postgres \
CONNECTION LIMIT = -1 \ # ENCODING = 'UTF8' \
IS_TEMPLATE = False;" # LOCALE_PROVIDER = 'libc' \
# CONNECTION LIMIT = -1 \
# IS_TEMPLATE = False;"
## Create graphile_worker db (for backend tasks) # ## create futureporn user
kubectl -n futureporn exec ${postgres_pod_name} -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "\ # kubectl -n futureporn exec ${postgres_pod_name} -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "\
CREATE DATABASE graphile_worker \ # CREATE ROLE futureporn \
WITH \ # WITH \
OWNER = postgres \ # LOGIN \
ENCODING = 'UTF8' \ # NOSUPERUSER \
LOCALE_PROVIDER = 'libc' \ # NOCREATEDB \
CONNECTION LIMIT = -1 \ # NOCREATEROLE \
IS_TEMPLATE = False;" # INHERIT \
# NOREPLICATION \
# NOBYPASSRLS \
## create futureporn user # CONNECTION LIMIT -1 \
kubectl -n futureporn exec ${postgres_pod_name} -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "\ # PASSWORD '$POSTGRES_REALTIME_PASSWORD';"
CREATE ROLE futureporn \
WITH \
LOGIN \
NOSUPERUSER \
NOCREATEDB \
NOCREATEROLE \
INHERIT \
NOREPLICATION \
NOBYPASSRLS \
CONNECTION LIMIT -1 \
PASSWORD '$POSTGRES_REALTIME_PASSWORD';"
## grant futureporn user all privs ## grant futureporn user all privs
kubectl -n futureporn exec ${postgres_pod_name} -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "\ # kubectl -n futureporn exec ${postgres_pod_name} -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "\
GRANT ALL PRIVILEGES ON DATABASE postgrest TO futureporn;" # GRANT ALL PRIVILEGES ON DATABASE postgrest TO futureporn;"
kubectl -n futureporn exec ${postgres_pod_name} -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "\ # kubectl -n futureporn exec ${postgres_pod_name} -- env PGPASSWORD=${POSTGRES_PASSWORD} psql -U postgres --command "\
GRANT ALL PRIVILEGES ON DATABASE graphile_worker TO futureporn;" # GRANT ALL PRIVILEGES ON DATABASE graphile_worker TO futureporn;"

View File

@ -11,18 +11,21 @@
"dev": "tsx --watch ./src/index.ts", "dev": "tsx --watch ./src/index.ts",
"build": "tsc --build", "build": "tsc --build",
"clean": "rm -rf dist", "clean": "rm -rf dist",
"superclean": "rm -rf node_modules && rm -rf pnpm-lock.yaml && rm -rf dist" "superclean": "rm -rf node_modules && rm -rf pnpm-lock.yaml && rm -rf dist",
"register": "tsx ./src/register-commands.ts"
}, },
"packageManager": "pnpm@9.6.0", "packageManager": "pnpm@9.6.0",
"keywords": [], "keywords": [],
"author": "@CJ_Clippy", "author": "@CJ_Clippy",
"license": "Unlicense", "license": "Unlicense",
"dependencies": { "dependencies": {
"@discordeno/bot": "19.0.0-next.746f0a9",
"date-fns": "^3.6.0", "date-fns": "^3.6.0",
"discord.js": "^14.15.3", "dd-cache-proxy": "^2.1.1",
"dotenv": "^16.4.5", "dotenv": "^16.4.5",
"graphile-config": "0.0.1-beta.9", "graphile-config": "0.0.1-beta.9",
"graphile-worker": "^0.16.6" "graphile-worker": "^0.16.6",
"pretty-bytes": "^6.1.1"
}, },
"devDependencies": { "devDependencies": {
"@futureporn/types": "workspace:^", "@futureporn/types": "workspace:^",

View File

@ -8,12 +8,15 @@ importers:
.: .:
dependencies: dependencies:
'@discordeno/bot':
specifier: 19.0.0-next.746f0a9
version: 19.0.0-next.746f0a9
date-fns: date-fns:
specifier: ^3.6.0 specifier: ^3.6.0
version: 3.6.0 version: 3.6.0
discord.js: dd-cache-proxy:
specifier: ^14.15.3 specifier: ^2.1.1
version: 14.15.3 version: 2.1.1(@discordeno/bot@19.0.0-next.746f0a9)
dotenv: dotenv:
specifier: ^16.4.5 specifier: ^16.4.5
version: 16.4.5 version: 16.4.5
@ -23,6 +26,9 @@ importers:
graphile-worker: graphile-worker:
specifier: ^0.16.6 specifier: ^0.16.6
version: 0.16.6(typescript@5.5.4) version: 0.16.6(typescript@5.5.4)
pretty-bytes:
specifier: ^6.1.1
version: 6.1.1
devDependencies: devDependencies:
'@futureporn/types': '@futureporn/types':
specifier: workspace:^ specifier: workspace:^
@ -94,33 +100,20 @@ packages:
resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==}
engines: {node: '>=12'} engines: {node: '>=12'}
'@discordjs/builders@1.8.2': '@discordeno/bot@19.0.0-next.746f0a9':
resolution: {integrity: sha512-6wvG3QaCjtMu0xnle4SoOIeFB4y6fKMN6WZfy3BMKJdQQtPLik8KGzDwBVL/+wTtcE/ZlFjgEk74GublyEVZ7g==} resolution: {integrity: sha512-M0BqdbGcJSHr7Nmxw/okFtkKZ9mMM0yUHBbB0XApxFxBRt68I1JhVbdFMwDkVAutargEr8BVDSt5SqUVpMnbrQ==}
engines: {node: '>=16.11.0'}
'@discordjs/collection@1.5.3': '@discordeno/gateway@19.0.0-next.746f0a9':
resolution: {integrity: sha512-SVb428OMd3WO1paV3rm6tSjM4wC+Kecaa1EUGX7vc6/fddvw/6lg90z4QtCqm21zvVe92vMMDt9+DkIvjXImQQ==} resolution: {integrity: sha512-IvXISmDVC8bGUreR/wo4hYoH4p8w5YanDDMpdO+ex6DTlsA2AgvpzzIHeshfOZNAupdr4spp4TDxziXfq1skhQ==}
engines: {node: '>=16.11.0'}
'@discordjs/collection@2.1.0': '@discordeno/rest@19.0.0-next.746f0a9':
resolution: {integrity: sha512-mLcTACtXUuVgutoznkh6hS3UFqYirDYAg5Dc1m8xn6OvPjetnUlf/xjtqnnc47OwWdaoCQnHmHh9KofhD6uRqw==} resolution: {integrity: sha512-qM0d/MFhzC2TWDclwiVL4Tt/37C26gjCUgb0x9mwnQsetJvsYmd+nzQI6SCkzKjsn/esWCtjSSHFQ7z6bdURpw==}
engines: {node: '>=18'}
'@discordjs/formatters@0.4.0': '@discordeno/types@19.0.0-next.746f0a9':
resolution: {integrity: sha512-fJ06TLC1NiruF35470q3Nr1bi95BdvKFAF+T5bNfZJ4bNdqZ3VZ+Ttg6SThqTxm6qumSG3choxLBHMC69WXNXQ==} resolution: {integrity: sha512-v/nG0vIFukJzFqAzABat2eGV3a7jTDQzbPkj1yoWaFfcB6pxlF44XJ4nsLLsvWj7oRH8eR97yMa2BT697Rs5JA==}
engines: {node: '>=16.11.0'}
'@discordjs/rest@2.3.0': '@discordeno/utils@19.0.0-next.746f0a9':
resolution: {integrity: sha512-C1kAJK8aSYRv3ZwMG8cvrrW4GN0g5eMdP8AuN8ODH5DyOCbHgJspze1my3xHOAgwLJdKUbWNVyAeJ9cEdduqIg==} resolution: {integrity: sha512-UY5GataakuY0yc4SN5qJLexUbTc5y293G3gNAWSaOjaZivEytcdxD4xgeqjNj9c4eN57B3Lfzus6tFZHXwXNOA==}
engines: {node: '>=16.11.0'}
'@discordjs/util@1.1.0':
resolution: {integrity: sha512-IndcI5hzlNZ7GS96RV3Xw1R2kaDuXEp7tRIy/KlhidpN/BQ1qh1NZt3377dMLTa44xDUNKT7hnXkA/oUAzD/lg==}
engines: {node: '>=16.11.0'}
'@discordjs/ws@1.1.1':
resolution: {integrity: sha512-PZ+vLpxGCRtmr2RMkqh8Zp+BenUaJqlS6xhgWKEZcgC/vfHLEzpHtKkB0sl3nZWpwtcKk6YWy+pU3okL2I97FA==}
engines: {node: '>=16.11.0'}
'@esbuild/aix-ppc64@0.21.5': '@esbuild/aix-ppc64@0.21.5':
resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==} resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==}
@ -273,18 +266,6 @@ packages:
'@jridgewell/trace-mapping@0.3.9': '@jridgewell/trace-mapping@0.3.9':
resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==}
'@sapphire/async-queue@1.5.3':
resolution: {integrity: sha512-x7zadcfJGxFka1Q3f8gCts1F0xMwCKbZweM85xECGI0hBTeIZJGGCrHgLggihBoprlQ/hBmDR5LKfIPqnmHM3w==}
engines: {node: '>=v14.0.0', npm: '>=7.0.0'}
'@sapphire/shapeshift@3.9.7':
resolution: {integrity: sha512-4It2mxPSr4OGn4HSQWGmhFMsNFGfFVhWeRPCRwbH972Ek2pzfGRZtb0pJ4Ze6oIzcyh2jw7nUDa6qGlWofgd9g==}
engines: {node: '>=v16'}
'@sapphire/snowflake@3.5.3':
resolution: {integrity: sha512-jjmJywLAFoWeBi1W7994zZyiNWPIiqRRNAmSERxyg93xRGzNYvGjlZ0gR6x0F4gPRi2+0O6S71kOZYyr3cxaIQ==}
engines: {node: '>=v14.0.0', npm: '>=7.0.0'}
'@tsconfig/node10@1.0.11': '@tsconfig/node10@1.0.11':
resolution: {integrity: sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==} resolution: {integrity: sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==}
@ -321,13 +302,6 @@ packages:
'@types/semver@7.5.8': '@types/semver@7.5.8':
resolution: {integrity: sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==} resolution: {integrity: sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==}
'@types/ws@8.5.12':
resolution: {integrity: sha512-3tPRkv1EtkDpzlgyKyI8pGsGZAGPEaXeu0DOj5DI25Ja91bdAYddYHbADRYVrZMRbfW+1l5YwXVDKohDJNQxkQ==}
'@vladfrangu/async_event_emitter@2.4.5':
resolution: {integrity: sha512-J7T3gUr3Wz0l7Ni1f9upgBZ7+J22/Q1B7dl0X6fG+fTsD+H+31DIosMHj4Um1dWQwqbcQ3oQf+YS2foYkDc9cQ==}
engines: {node: '>=v14.0.0', npm: '>=7.0.0'}
acorn-walk@8.3.3: acorn-walk@8.3.3:
resolution: {integrity: sha512-MxXdReSRhGO7VlFe1bRG/oI7/mdLV9B9JJT0N8vZOhF7gFRR5l3M8W9G8JxmKV+JC5mGqJ0QvqfSOLsCPa4nUw==} resolution: {integrity: sha512-MxXdReSRhGO7VlFe1bRG/oI7/mdLV9B9JJT0N8vZOhF7gFRR5l3M8W9G8JxmKV+JC5mGqJ0QvqfSOLsCPa4nUw==}
engines: {node: '>=0.4.0'} engines: {node: '>=0.4.0'}
@ -424,6 +398,11 @@ packages:
date-fns@3.6.0: date-fns@3.6.0:
resolution: {integrity: sha512-fRHTG8g/Gif+kSh50gaGEdToemgfj74aRX3swtiouboip5JDLAyDE9F11nHMIcvOaXeOC6D7SpNhi7uFyB7Uww==} resolution: {integrity: sha512-fRHTG8g/Gif+kSh50gaGEdToemgfj74aRX3swtiouboip5JDLAyDE9F11nHMIcvOaXeOC6D7SpNhi7uFyB7Uww==}
dd-cache-proxy@2.1.1:
resolution: {integrity: sha512-7/5vLchfmhtkx0M0KXFA3vukPJR7+LPN9ci2yTCS/GBrV3YPEO1vie70d1+dzEYlGH1v4BGDxapBR3SykX3lgw==}
peerDependencies:
'@discordeno/bot': 19.0.0-next.d69e537
debug@4.3.6: debug@4.3.6:
resolution: {integrity: sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==} resolution: {integrity: sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==}
engines: {node: '>=6.0'} engines: {node: '>=6.0'}
@ -437,13 +416,6 @@ packages:
resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==}
engines: {node: '>=0.3.1'} engines: {node: '>=0.3.1'}
discord-api-types@0.37.83:
resolution: {integrity: sha512-urGGYeWtWNYMKnYlZnOnDHm8fVRffQs3U0SpE8RHeiuLKb/u92APS8HoQnPTFbnXmY1vVnXjXO4dOxcAn3J+DA==}
discord.js@14.15.3:
resolution: {integrity: sha512-/UJDQO10VuU6wQPglA4kz2bw2ngeeSbogiIPx/TsnctfzV/tNf+q+i1HlgtX1OGpeOBpJH9erZQNO5oRM2uAtQ==}
engines: {node: '>=16.11.0'}
dotenv@16.4.5: dotenv@16.4.5:
resolution: {integrity: sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==} resolution: {integrity: sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==}
engines: {node: '>=12'} engines: {node: '>=12'}
@ -467,9 +439,6 @@ packages:
resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==}
engines: {node: '>=0.8.0'} engines: {node: '>=0.8.0'}
fast-deep-equal@3.1.3:
resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==}
fill-range@7.1.1: fill-range@7.1.1:
resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
engines: {node: '>=8'} engines: {node: '>=8'}
@ -559,15 +528,6 @@ packages:
lines-and-columns@1.2.4: lines-and-columns@1.2.4:
resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==}
lodash.snakecase@4.1.1:
resolution: {integrity: sha512-QZ1d4xoBHYUeuouhEq3lk3Uq7ldgyFXGBhg04+oRLnIz8o9T65Eh+8YdroUwn846zchkA9yDsDl5CVVaV2nqYw==}
lodash@4.17.21:
resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==}
magic-bytes.js@1.10.0:
resolution: {integrity: sha512-/k20Lg2q8LE5xiaaSkMXk4sfvI+9EGEykFS4b0CHHGWqDYU0bGUFSwchNOMA56D7TCs9GwVTkqe9als1/ns8UQ==}
make-error@1.3.6: make-error@1.3.6:
resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==}
@ -685,6 +645,10 @@ packages:
postgres-range@1.1.4: postgres-range@1.1.4:
resolution: {integrity: sha512-i/hbxIE9803Alj/6ytL7UHQxRvZkI9O4Sy+J3HGc4F4oo/2eQAjTSNJ0bfxyse3bH0nuVesCk+3IRLaMtG3H6w==} resolution: {integrity: sha512-i/hbxIE9803Alj/6ytL7UHQxRvZkI9O4Sy+J3HGc4F4oo/2eQAjTSNJ0bfxyse3bH0nuVesCk+3IRLaMtG3H6w==}
pretty-bytes@6.1.1:
resolution: {integrity: sha512-mQUvGU6aUFQ+rNvTIAcZuWGRT9a6f6Yrg9bHs4ImKF+HZCEK+plBvnAZYSIQztknZF2qnzNtr6F8s0+IuptdlQ==}
engines: {node: ^14.13.1 || >=16.0.0}
pstree.remy@1.1.8: pstree.remy@1.1.8:
resolution: {integrity: sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==} resolution: {integrity: sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==}
@ -740,9 +704,6 @@ packages:
resolution: {integrity: sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==} resolution: {integrity: sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==}
hasBin: true hasBin: true
ts-mixer@6.0.4:
resolution: {integrity: sha512-ufKpbmrugz5Aou4wcr5Wc1UUFWOLhq+Fm6qa6P0w0K5Qw2yhaUoiWszhCVuNQyNwrlGiscHOmqYoAox1PtvgjA==}
ts-node@10.9.2: ts-node@10.9.2:
resolution: {integrity: sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==} resolution: {integrity: sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==}
hasBin: true hasBin: true
@ -782,10 +743,6 @@ packages:
undici-types@6.13.0: undici-types@6.13.0:
resolution: {integrity: sha512-xtFJHudx8S2DSoujjMd1WeWvn7KKWFRESZTMeL1RptAYERu29D6jphMjjY+vn96jvN3kVPDNxU/E13VTaXj6jg==} resolution: {integrity: sha512-xtFJHudx8S2DSoujjMd1WeWvn7KKWFRESZTMeL1RptAYERu29D6jphMjjY+vn96jvN3kVPDNxU/E13VTaXj6jg==}
undici@6.13.0:
resolution: {integrity: sha512-Q2rtqmZWrbP8nePMq7mOJIN98M0fYvSgV89vwl/BQRT4mDOeY2GXZngfGpcBBhtky3woM7G24wZV3Q304Bv6cw==}
engines: {node: '>=18.0'}
v8-compile-cache-lib@3.0.1: v8-compile-cache-lib@3.0.1:
resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==}
@ -845,53 +802,36 @@ snapshots:
dependencies: dependencies:
'@jridgewell/trace-mapping': 0.3.9 '@jridgewell/trace-mapping': 0.3.9
'@discordjs/builders@1.8.2': '@discordeno/bot@19.0.0-next.746f0a9':
dependencies: dependencies:
'@discordjs/formatters': 0.4.0 '@discordeno/gateway': 19.0.0-next.746f0a9
'@discordjs/util': 1.1.0 '@discordeno/rest': 19.0.0-next.746f0a9
'@sapphire/shapeshift': 3.9.7 '@discordeno/types': 19.0.0-next.746f0a9
discord-api-types: 0.37.83 '@discordeno/utils': 19.0.0-next.746f0a9
fast-deep-equal: 3.1.3 transitivePeerDependencies:
ts-mixer: 6.0.4 - bufferutil
tslib: 2.6.2 - utf-8-validate
'@discordjs/collection@1.5.3': {} '@discordeno/gateway@19.0.0-next.746f0a9':
'@discordjs/collection@2.1.0': {}
'@discordjs/formatters@0.4.0':
dependencies: dependencies:
discord-api-types: 0.37.83 '@discordeno/types': 19.0.0-next.746f0a9
'@discordeno/utils': 19.0.0-next.746f0a9
'@discordjs/rest@2.3.0':
dependencies:
'@discordjs/collection': 2.1.0
'@discordjs/util': 1.1.0
'@sapphire/async-queue': 1.5.3
'@sapphire/snowflake': 3.5.3
'@vladfrangu/async_event_emitter': 2.4.5
discord-api-types: 0.37.83
magic-bytes.js: 1.10.0
tslib: 2.6.2
undici: 6.13.0
'@discordjs/util@1.1.0': {}
'@discordjs/ws@1.1.1':
dependencies:
'@discordjs/collection': 2.1.0
'@discordjs/rest': 2.3.0
'@discordjs/util': 1.1.0
'@sapphire/async-queue': 1.5.3
'@types/ws': 8.5.12
'@vladfrangu/async_event_emitter': 2.4.5
discord-api-types: 0.37.83
tslib: 2.6.2
ws: 8.18.0 ws: 8.18.0
transitivePeerDependencies: transitivePeerDependencies:
- bufferutil - bufferutil
- utf-8-validate - utf-8-validate
'@discordeno/rest@19.0.0-next.746f0a9':
dependencies:
'@discordeno/types': 19.0.0-next.746f0a9
'@discordeno/utils': 19.0.0-next.746f0a9
'@discordeno/types@19.0.0-next.746f0a9': {}
'@discordeno/utils@19.0.0-next.746f0a9':
dependencies:
'@discordeno/types': 19.0.0-next.746f0a9
'@esbuild/aix-ppc64@0.21.5': '@esbuild/aix-ppc64@0.21.5':
optional: true optional: true
@ -972,15 +912,6 @@ snapshots:
'@jridgewell/resolve-uri': 3.1.2 '@jridgewell/resolve-uri': 3.1.2
'@jridgewell/sourcemap-codec': 1.5.0 '@jridgewell/sourcemap-codec': 1.5.0
'@sapphire/async-queue@1.5.3': {}
'@sapphire/shapeshift@3.9.7':
dependencies:
fast-deep-equal: 3.1.3
lodash: 4.17.21
'@sapphire/snowflake@3.5.3': {}
'@tsconfig/node10@1.0.11': {} '@tsconfig/node10@1.0.11': {}
'@tsconfig/node12@1.0.11': {} '@tsconfig/node12@1.0.11': {}
@ -1019,12 +950,6 @@ snapshots:
'@types/semver@7.5.8': {} '@types/semver@7.5.8': {}
'@types/ws@8.5.12':
dependencies:
'@types/node': 22.1.0
'@vladfrangu/async_event_emitter@2.4.5': {}
acorn-walk@8.3.3: acorn-walk@8.3.3:
dependencies: dependencies:
acorn: 8.12.1 acorn: 8.12.1
@ -1121,6 +1046,10 @@ snapshots:
date-fns@3.6.0: {} date-fns@3.6.0: {}
dd-cache-proxy@2.1.1(@discordeno/bot@19.0.0-next.746f0a9):
dependencies:
'@discordeno/bot': 19.0.0-next.746f0a9
debug@4.3.6(supports-color@5.5.0): debug@4.3.6(supports-color@5.5.0):
dependencies: dependencies:
ms: 2.1.2 ms: 2.1.2
@ -1129,26 +1058,6 @@ snapshots:
diff@4.0.2: {} diff@4.0.2: {}
discord-api-types@0.37.83: {}
discord.js@14.15.3:
dependencies:
'@discordjs/builders': 1.8.2
'@discordjs/collection': 1.5.3
'@discordjs/formatters': 0.4.0
'@discordjs/rest': 2.3.0
'@discordjs/util': 1.1.0
'@discordjs/ws': 1.1.1
'@sapphire/snowflake': 3.5.3
discord-api-types: 0.37.83
fast-deep-equal: 3.1.3
lodash.snakecase: 4.1.1
tslib: 2.6.2
undici: 6.13.0
transitivePeerDependencies:
- bufferutil
- utf-8-validate
dotenv@16.4.5: {} dotenv@16.4.5: {}
emoji-regex@8.0.0: {} emoji-regex@8.0.0: {}
@ -1187,8 +1096,6 @@ snapshots:
escape-string-regexp@1.0.5: {} escape-string-regexp@1.0.5: {}
fast-deep-equal@3.1.3: {}
fill-range@7.1.1: fill-range@7.1.1:
dependencies: dependencies:
to-regex-range: 5.0.1 to-regex-range: 5.0.1
@ -1277,12 +1184,6 @@ snapshots:
lines-and-columns@1.2.4: {} lines-and-columns@1.2.4: {}
lodash.snakecase@4.1.1: {}
lodash@4.17.21: {}
magic-bytes.js@1.10.0: {}
make-error@1.3.6: {} make-error@1.3.6: {}
minimatch@3.1.2: minimatch@3.1.2:
@ -1394,6 +1295,8 @@ snapshots:
postgres-range@1.1.4: {} postgres-range@1.1.4: {}
pretty-bytes@6.1.1: {}
pstree.remy@1.1.8: {} pstree.remy@1.1.8: {}
readdirp@3.6.0: readdirp@3.6.0:
@ -1438,8 +1341,6 @@ snapshots:
touch@3.1.1: {} touch@3.1.1: {}
ts-mixer@6.0.4: {}
ts-node@10.9.2(@types/node@22.1.0)(typescript@5.5.4): ts-node@10.9.2(@types/node@22.1.0)(typescript@5.5.4):
dependencies: dependencies:
'@cspotcode/source-map-support': 0.8.1 '@cspotcode/source-map-support': 0.8.1
@ -1477,8 +1378,6 @@ snapshots:
undici-types@6.13.0: {} undici-types@6.13.0: {}
undici@6.13.0: {}
v8-compile-cache-lib@3.0.1: {} v8-compile-cache-lib@3.0.1: {}
wrap-ansi@7.0.0: wrap-ansi@7.0.0:

57
services/bot/src/bot.ts Normal file
View File

@ -0,0 +1,57 @@
import { createBot, Intents, type Bot } from '@discordeno/bot'
import { createProxyCache, } from 'dd-cache-proxy';
import { configs } from './config.ts'
export const bot = createProxyCache(
createBot({
token: configs.token,
intents: Intents.Guilds
}),
{
desiredProps: {
guilds: ['id', 'name', 'roles'],
roles: ['id', 'guildId', 'permissions'],
},
cacheInMemory: {
guilds: true,
roles: true,
default: false,
},
},
)
// @todo figure out where this code belongs
// gateway.resharding.getSessionInfo = async () => { // insert code here to fetch getSessionInfo from rest process. }
// Setup desired properties
bot.transformers.desiredProperties.interaction.id = true
bot.transformers.desiredProperties.interaction.type = true
bot.transformers.desiredProperties.interaction.data = true
bot.transformers.desiredProperties.interaction.token = true
bot.transformers.desiredProperties.interaction.guildId = true
bot.transformers.desiredProperties.interaction.member = true
bot.transformers.desiredProperties.interaction.message = true
bot.transformers.desiredProperties.message.activity = true
bot.transformers.desiredProperties.message.id = true
bot.transformers.desiredProperties.message.referencedMessage = true
bot.transformers.desiredProperties.guild.id = true
bot.transformers.desiredProperties.guild.name = true
bot.transformers.desiredProperties.guild.roles = true
bot.transformers.desiredProperties.role.id = true
bot.transformers.desiredProperties.role.guildId = true
bot.transformers.desiredProperties.role.permissions = true
bot.transformers.desiredProperties.member.id = true
bot.transformers.desiredProperties.member.roles = true
bot.transformers.desiredProperties.channel.id = true
bot.transformers.desiredProperties.user.id = true
bot.transformers.desiredProperties.user.username = true
bot.transformers.desiredProperties.user.discriminator = true

View File

@ -0,0 +1,22 @@
// @see https://github.com/discordeno/discordeno/blob/main/examples/advanced/src/commands.ts
import { type ApplicationCommandOption, type ApplicationCommandTypes, Collection, type Interaction } from '@discordeno/bot'
export const commands = new Collection<string, Command>()
export function createCommand(command: Command): void {
commands.set(command.name, command)
}
export interface Command {
/** The name of this command. */
name: string
/** What does this command do? */
description: string
/** The type of command this is. */
type: ApplicationCommandTypes
/** The options for this command */
options?: ApplicationCommandOption[]
/** This will be executed when the command is run. */
execute: (interaction: Interaction, options: Record<string, unknown>) => unknown
}

View File

@ -0,0 +1,49 @@
import { ApplicationCommandTypes, type Interaction } from '@discordeno/bot'
import { createCommand } from '../commands.ts'
const dongers: string[] = [
'( ͡ᵔ ͜ʖ ͡ᵔ )',
'¯\_(ツ)_/¯',
'(๑>ᴗ<๑)',
'(̿▀̿ ̿Ĺ̯̿̿▀̿ ̿)',
'( ͡° ͜ʖ ͡°)',
'٩(͡๏̯͡๏)۶',
'ლ(´◉❥◉`ლ)',
'( ゚Д゚)',
'ԅ( ͒ ۝ ͒ )ᕤ',
'( ͡ᵔ ͜ʖ ͡°)',
'( ͠° ͟ʖ ͡°)╭∩╮',
'༼ つ ❦౪❦ ༽つ',
'( ͡↑ ͜ʖ ͡↑)',
'(ভ_ ভ) ރ / ┊ \',
'ヽ(⌐□益□)ノ',
'༼ つ ◕‿◕ ༽つ',
'ヽ(⚆෴⚆)ノ',
'(つ .•́ _ʖ •̀.)つ',
'༼⌐■ل͟■༽',
'┬─┬ノ( ͡° ͜ʖ ͡°ノ)',
'༼⁰o⁰༽꒳ᵒ꒳ᵎᵎᵎ',
'( -_・) ▄︻̷̿┻̿═━一',
'【 º ᗜ º 】',
'ᕦ(✧╭╮✧)ᕥ',
'┗( TT )┛',
'(Φ ᆺ Φ)',
'(TдT)',
'☞(◉▽◉)☞'
];
createCommand({
name: 'donger',
description: 'Get a free donger!',
type: ApplicationCommandTypes.ChatInput,
async execute(interaction: Interaction) {
const selectedDonger = dongers[Math.floor(Math.random()*dongers.length)]
console.log(`selectedDonger=${selectedDonger}`)
await interaction.respond({
content: selectedDonger
})
},
})

View File

@ -0,0 +1,15 @@
import { ApplicationCommandTypes, createEmbeds, snowflakeToTimestamp, type Interaction } from '@discordeno/bot'
import { createCommand } from '../commands.ts'
createCommand({
name: 'ping',
description: 'See if the bot latency is okay',
type: ApplicationCommandTypes.ChatInput,
async execute(interaction: Interaction) {
const ping = Date.now() - snowflakeToTimestamp(interaction.id)
const embeds = createEmbeds().setTitle(`The bot ping is ${ping}ms`)
await interaction.respond({ embeds })
},
})

View File

@ -0,0 +1,141 @@
import {
ApplicationCommandOptionTypes,
ApplicationCommandTypes,
type Interaction,
EmbedsBuilder,
type InteractionCallbackData,
} from '@discordeno/bot'
import { createCommand } from '../commands.ts'
import { configs } from '../config.ts'
async function createRecordInDatabase(url: string, discordMessageId: string) {
const record = {
url,
recording_state: 'pending',
discord_message_id: discordMessageId,
file_size: 0
}
const res = await fetch(`${configs.postgrestUrl}/records`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${configs.automationUserJwt}`,
'Prefer': 'return=headers-only'
},
body: JSON.stringify(record)
})
if (!res.ok) {
const status = res.status
const statusText = res.statusText
const msg = `fetch failed to create recording record in database. status=${status}, statusText=${statusText}`
console.error(msg)
throw new Error(msg)
}
const id = res.headers.get('location')?.split('.').at(-1)
if (!id) throw new Error('id could not be parsed from location header');
return parseInt(id)
}
createCommand({
name: 'record',
description: 'Record a livestream.',
type: ApplicationCommandTypes.ChatInput,
options: [
{
name: 'url',
description: 'URL of the livestream',
type: ApplicationCommandOptionTypes.String
},
],
async execute(interaction: Interaction) {
await interaction.defer()
console.log('interation.data as follows')
console.log(interaction.data)
const options = interaction.data?.options
if (!options) throw new Error(`interaction options was undefined. it's expected to be an array of options.`);
const urlOption = options.find((o) => o.name === 'url')
if (!urlOption) throw new Error(`url option was missing from interaction data`);
const url = ''+urlOption.value
if (!url) throw new Error(`url was missing from interaction data options`);
// const url = (interaction.data?.options?.find(o => o.name === 'url')?.value) ?? undefined;
// respond to the interaction and get a message ID which we will then add to the database Record
const embeds = new EmbedsBuilder()
.setTitle(`Record ⋅`)
.setDescription('Waiting for a worker to start the job.')
.setFields([
{ name: 'Status', value: 'Pending', inline: true },
{ name: 'Filesize', value: '0 bytes', inline: true},
{ name: 'URL', value: url, inline: false }
])
.setColor('#33eb23')
const response: InteractionCallbackData = { embeds }
const message = await interaction.edit(response)
console.log('defferred, interaction message is as follows')
console.log(message)
if (!message?.id) {
const msg = `message.id was empty, ruh roh raggy`
console.error(msg)
throw new Error(msg)
}
// @todo create record in db
const record = await createRecordInDatabase(url, message.id.toString())
console.log(record)
// if (!interaction.data?.values) throw new Error('interaction data was missing values');
// console.log(`while executing record command, the following values were seen.`)
// console.log(interaction.data.values)
// const embeds = new EmbedsBuilder()
// .setTitle('My Embed')
// .setDescription('This is my new embed')
// .newEmbed()
// .setTitle('My Second Embed')
// await interaction.respond({
// embeds:
// })
},
})
// const statusEmbed = new EmbedBuilder()
// .setTitle('Pending')
// .setDescription('Waiting for a worker to accept the job.')
// .setColor(2326507)
// const buttonRow = new ActionRowBuilder<MessageActionRowComponentBuilder>()
// .addComponents([
// new ButtonBuilder()
// .setCustomId('stop')
// .setLabel('Stop Recording')
// .setEmoji('🛑')
// .setStyle(ButtonStyle.Danger),
// ]);
// const command: CreateSlashApplicationCommand = {
// name: 'record',
// description: 'Record a livestream.',
// options: [
// {
// name: 'url',
// description: 'URL of the livestream',
// type: ApplicationCommandOptionTypes.String
// },
// ],
// async execute(interaction: Interaction) {
// const ping = Date.now() - snowflakeToTimestamp(interaction.id)
// const embeds = createEmbeds().setTitle(`The bot ping is ${ping}ms`)
// await interaction.respond({ embeds })
// },
// }
// export default command

View File

@ -0,0 +1,18 @@
if (!process.env.POSTGREST_URL) throw new Error('Missing POSTGREST_URL env var');
if (!process.env.DISCORD_TOKEN) throw new Error('Missing DISCORD_TOKEN env var');
if (!process.env.AUTOMATION_USER_JWT) throw new Error('Missing AUTOMATION_USER_JWT env var');
const token = process.env.DISCORD_TOKEN!
const postgrestUrl = process.env.POSTGREST_URL!
const automationUserJwt = process.env.AUTOMATION_USER_JWT!
export const configs: Config = {
token,
postgrestUrl,
automationUserJwt,
}
export interface Config {
token: string;
postgrestUrl: string;
automationUserJwt: string;
}

View File

@ -1,71 +1,22 @@
import { Events, type Interaction, Client, Collection } from 'discord.js'; import { InteractionTypes, commandOptionsParser, type Interaction } from '@discordeno/bot'
import type { WorkerUtils } from 'graphile-worker'; import { bot } from '../bot.ts'
import { commands } from '../commands.ts'
interface ExtendedClient extends Client { bot.events.interactionCreate = async (interaction: Interaction) => {
commands: Collection<string, any> if (!interaction.data || interaction.type !== InteractionTypes.ApplicationCommand) return
const command = commands.get(interaction.data.name)
if (!command) {
bot.logger.error(`Command ${interaction.data.name} not found`)
return
}
const options = commandOptionsParser(interaction)
try {
await command.execute(interaction, options)
} catch (error) {
bot.logger.error(`There was an error running the ${command.name} command.`, error)
}
} }
export default {
name: Events.InteractionCreate,
once: false,
async execute(interaction: Interaction, workerUtils: WorkerUtils) {
// if (!interaction.isChatInputCommand()) return;
// console.log(interaction.client)
// const command = interaction.client.commands.get(interaction.commandName);
if (interaction.isButton()) {
console.log(`the interaction is a button type with customId=${interaction.customId}, message.id=${interaction.message.id}, user=${interaction.user.id} (${interaction.user.globalName})`)
if (interaction.customId === 'stop') {
interaction.reply('[stop] IDK IDK IDK ??? @todo')
workerUtils.addJob('stopRecording', { discordMessageId: interaction.message.id, userId: interaction.user.id })
} else if (interaction.customId === 'retry') {
interaction.reply('[retry] IDK IDK IDK ??? @todo')
workerUtils.addJob('startRecording', { discordMessageId: interaction.message.id, userId: interaction.user.id })
} else {
console.error(`this button's customId=${interaction.customId} did not match one of the known customIds`)
}
} else if (interaction.isChatInputCommand()) {
console.log(`the interaction is a ChatInputCommandInteraction with commandName=${interaction.commandName}, user=${interaction.user.id} (${interaction.user.globalName})`)
const client = interaction.client as ExtendedClient
const command = client.commands.get(interaction.commandName);
if (!command) {
console.error(`No command matching ${interaction.commandName} was found.`);
return;
}
command.execute({ interaction, workerUtils })
}
},
};
// const { Events } = require('discord.js');
// module.exports = {
// name: Events.ClientReady,
// once: true,
// execute(client) {
// console.log(`Ready! Logged in as ${client.user.tag}`);
// },
// };
// client.on(Events.InteractionCreate, interaction => {
// if (interaction.isChatInputCommand()) {
// const { commandName } = interaction;
// console.log(`Received interaction with commandName=${commandName}`)
// const cmd = commands.find((c) => c.data.name === commandName)
// if (!cmd) {
// console.log(`no command handler matches commandName=${commandName}`)
// return;
// }
// cmd.execute({ interaction, workerUtils })
// } else {
// // probably a ButtonInteraction
// console.log(interaction)
// }
// });

View File

@ -0,0 +1,20 @@
import { ActivityTypes } from '@discordeno/bot'
import { bot } from '../bot.ts'
bot.events.ready = async ({ shardId }) => {
await bot.gateway.editShardStatus(shardId, {
status: 'online',
activities: [
{
name: 'chat',
type: ActivityTypes.Watching,
timestamps: {
start: Date.now(),
},
},
],
})
}

View File

@ -1,90 +1,77 @@
import 'dotenv/config' import 'dotenv/config'
import { type ChatInputCommandInteraction, Client, GatewayIntentBits, Partials, Collection } from 'discord.js' // import loadCommands from './loadCommands.js'
import loadCommands from './loadCommands.js' // import deployCommands from './deployCommands.js'
import deployCommands from './deployCommands.js' // import loadEvents from './loadEvents.js'
import discordMessageUpdate from './tasks/discordMessageUpdate.js' // import updateDiscordMessage from './tasks/update_discord_message.js'
import { makeWorkerUtils, type WorkerUtils, type RunnerOptions, run } from 'graphile-worker' import { type WorkerUtils } from 'graphile-worker'
import loadEvents from './loadEvents.js' import { bot } from './bot.ts'
import type { Interaction } from '@discordeno/bot'
import { importDirectory } from './utils/loader.ts'
import { join, dirname } from 'node:path'
import { fileURLToPath } from 'url';
const __dirname = dirname(fileURLToPath(import.meta.url));
export interface ExecuteArguments { export interface ExecuteArguments {
interaction: ChatInputCommandInteraction; interaction: Interaction;
workerUtils: WorkerUtils workerUtils: WorkerUtils;
} }
if (!process.env.AUTOMATION_USER_JWT) throw new Error(`AUTOMATION_USER_JWT was missing from env`); if (!process.env.AUTOMATION_USER_JWT) throw new Error(`AUTOMATION_USER_JWT was missing from env`);
if (!process.env.DISCORD_TOKEN) throw new Error("DISCORD_TOKEN was missing from env"); if (!process.env.DISCORD_TOKEN) throw new Error("DISCORD_TOKEN was missing from env");
if (!process.env.DISCORD_CHANNEL_ID) throw new Error("DISCORD_CHANNEL_ID was missing from env"); if (!process.env.DISCORD_CHANNEL_ID) throw new Error("DISCORD_CHANNEL_ID was missing from env");
if (!process.env.DISCORD_GUILD_ID) throw new Error("DISCORD_GUILD_ID was missing from env");
if (!process.env.WORKER_CONNECTION_STRING) throw new Error("WORKER_CONNECTION_STRING was missing from env"); if (!process.env.WORKER_CONNECTION_STRING) throw new Error("WORKER_CONNECTION_STRING was missing from env");
const preset: GraphileConfig.Preset = { const preset: GraphileConfig.Preset = {
worker: { worker: {
connectionString: process.env.WORKER_CONNECTION_STRING, connectionString: process.env.WORKER_CONNECTION_STRING,
concurrentJobs: 1, concurrentJobs: 3,
fileExtensions: [".js", ".ts"] fileExtensions: [".js", ".ts"]
}, },
}; };
async function setupGraphileWorker() { // async function setupGraphileWorker() {
const runnerOptions: RunnerOptions = { // const runnerOptions: RunnerOptions = {
preset, // preset,
taskList: { // taskList: {
'discordMessageUpdate': discordMessageUpdate // 'updateDiscordMessage': updateDiscordMessage
} // }
} // }
const runner = await run(runnerOptions) // const runner = await run(runnerOptions)
if (!runner) throw new Error('failed to initialize graphile worker'); // if (!runner) throw new Error('failed to initialize graphile worker');
await runner.promise // await runner.promise
} // }
async function setupWorkerUtils() { // async function setupWorkerUtils() {
const workerUtils = await makeWorkerUtils({ // const workerUtils = await makeWorkerUtils({
preset // preset
}); // });
await workerUtils.migrate() // await workerUtils.migrate()
return workerUtils // return workerUtils
} // }
async function setupDiscordBot(commands: any[], workerUtils: WorkerUtils) {
console.log(`setup()`)
if (!commands) throw new Error('commands passed to setup() was missing');
console.log(`Create a new client instance`)
let client: any = new Client({
intents: [
GatewayIntentBits.Guilds,
GatewayIntentBits.GuildMessages,
GatewayIntentBits.GuildMessageReactions,
],
partials: [
Partials.Message,
Partials.Channel,
Partials.Reaction,
]
});
client.commands = new Collection();
commands.forEach((c) => client.commands.set(c.data.name, c))
// Log in to Discord with your client's token
client.login(process.env.DISCORD_TOKEN);
await loadEvents(client, workerUtils)
}
async function main() { async function main() {
console.log(`main()`)
const commands = await loadCommands() bot.logger.info('Starting @futureporn/bot.')
if (!commands) throw new Error('there were no commands available to be loaded.');
await deployCommands(commands.map((c) => c.data.toJSON())) bot.logger.info('Loading commands...')
console.log(`${commands.length} commands deployed: ${commands.map((c) => c.data.name).join(', ')}`) await importDirectory(join(__dirname, './commands'))
const workerUtils = await setupWorkerUtils()
setupDiscordBot(commands, workerUtils) bot.logger.info('Loading events...')
setupGraphileWorker() await importDirectory(join(__dirname, './events'))
// const commands = await loadCommands()
// if (!commands) throw new Error('there were no commands available to be loaded.');
// await deployCommands(commands.map((c) => c.data.toJSON()))
// console.log(`${commands.length} commands deployed: ${commands.map((c) => c.data.name).join(', ')}`)
// const workerUtils = await setupWorkerUtils()
// setupGraphileWorker()
await bot.start()
} }
main().catch((e) => { main().catch((e) => {

View File

@ -0,0 +1,18 @@
import { type CreateApplicationCommand, type CreateSlashApplicationCommand, type Interaction } from '@discordeno/bot'
import record from '../commands/record.ts'
import donger from '../commands/donger.ts'
export const commands = new Map<string, Command>(
[
record,
donger
].map(cmd => [cmd.name, cmd]),
)
export default commands
export interface Command extends CreateSlashApplicationCommand {
/** Handler that will be executed when this command is triggered */
execute(interaction: Interaction, args: Record<string, any>): Promise<any>
}

View File

@ -0,0 +1,8 @@
import type { EventHandlers } from '@discordeno/bot'
import { event as interactionCreateEvent } from './interactionCreate.ts.old'
export const events = {
interactionCreate: interactionCreateEvent,
} as Partial<EventHandlers>
export default events

View File

@ -0,0 +1,71 @@
import { Events, type Interaction, Client, Collection } from 'discord.js';
import type { WorkerUtils } from 'graphile-worker';
interface ExtendedClient extends Client {
commands: Collection<string, any>
}
export default {
name: Events.InteractionCreate,
once: false,
async execute(interaction: Interaction, workerUtils: WorkerUtils) {
// if (!interaction.isChatInputCommand()) return;
// console.log(interaction.client)
// const command = interaction.client.commands.get(interaction.commandName);
if (interaction.isButton()) {
console.log(`the interaction is a button type with customId=${interaction.customId}, message.id=${interaction.message.id}, user=${interaction.user.id} (${interaction.user.globalName})`)
if (interaction.customId === 'stop') {
interaction.reply(`Stopped by @${interaction.user.id}`)
workerUtils.addJob('stop_recording', { discordMessageId: interaction.message.id, userId: interaction.user.id }, { maxAttempts: 1 })
} else if (interaction.customId === 'retry') {
interaction.reply(`Retried by @${interaction.user.id}`)
workerUtils.addJob('start_recording', { discordMessageId: interaction.message.id, userId: interaction.user.id }, { maxAttempts: 3 })
} else {
console.error(`this button's customId=${interaction.customId} did not match one of the known customIds`)
}
} else if (interaction.isChatInputCommand()) {
console.log(`the interaction is a ChatInputCommandInteraction with commandName=${interaction.commandName}, user=${interaction.user.id} (${interaction.user.globalName})`)
const client = interaction.client as ExtendedClient
const command = client.commands.get(interaction.commandName);
if (!command) {
console.error(`No command matching ${interaction.commandName} was found.`);
return;
}
command.execute({ interaction, workerUtils })
}
},
};
// const { Events } = require('discord.js');
// module.exports = {
// name: Events.ClientReady,
// once: true,
// execute(client) {
// console.log(`Ready! Logged in as ${client.user.tag}`);
// },
// };
// client.on(Events.InteractionCreate, interaction => {
// if (interaction.isChatInputCommand()) {
// const { commandName } = interaction;
// console.log(`Received interaction with commandName=${commandName}`)
// const cmd = commands.find((c) => c.data.name === commandName)
// if (!cmd) {
// console.log(`no command handler matches commandName=${commandName}`)
// return;
// }
// cmd.execute({ interaction, workerUtils })
// } else {
// // probably a ButtonInteraction
// console.log(interaction)
// }
// });

View File

@ -0,0 +1,12 @@
import 'dotenv/config'
import { bot } from '../index.js'
import donger from '../commands/donger.js'
import record from '../commands/record.js'
const guildId = process.env.DISCORD_GUILD_ID!
const commands = [
donger,
record
]
await bot.rest.upsertGuildApplicationCommands(guildId, commands)

View File

@ -1,11 +1,4 @@
import {
SlashCommandBuilder,
ActionRowBuilder,
ButtonBuilder,
type MessageActionRowComponentBuilder,
ButtonStyle,
EmbedBuilder
} from 'discord.js';
import type { ExecuteArguments } from '../../index.js'; import type { ExecuteArguments } from '../../index.js';
@ -59,7 +52,7 @@ export default {
const idk = await interaction.reply({ const idk = await interaction.reply({
content: `Recording ${url}`, content: `/record ${url}`,
embeds: [ embeds: [
statusEmbed statusEmbed
], ],
@ -73,7 +66,7 @@ export default {
const message = await idk.fetch() const message = await idk.fetch()
const discordMessageId = message.id const discordMessageId = message.id
await workerUtils.addJob('startRecording', { url, discordMessageId }, { maxAttempts: 3 }) await workerUtils.addJob('start_recording', { url, discordMessageId }, { maxAttempts: 3 })
}, },
}; };

View File

@ -1,8 +1,6 @@
import { type ChatInputCommandInteraction, SlashCommandBuilder } from 'discord.js'; import { type ChatInputCommandInteraction, SlashCommandBuilder } from 'discord.js';
export default { export default {
data: new SlashCommandBuilder() data: new SlashCommandBuilder()
.setName('sim-email') .setName('sim-email')

View File

@ -0,0 +1,7 @@
import 'dotenv/config'
import { bot } from './bot.js'
import { updateApplicationCommands } from './utils/update-commands.ts'
bot.logger.info('Updating commands...')
await updateApplicationCommands()

View File

@ -0,0 +1 @@
task names uses underscores because graphile_worker expects them to be that way because graphile_worker interfaces with Postgresql which uses lowercase and numberscores.

View File

@ -2,7 +2,9 @@ import 'dotenv/config'
import type { RecordingState } from '@futureporn/types' import type { RecordingState } from '@futureporn/types'
import { type Task, type Helpers } from 'graphile-worker' import { type Task, type Helpers } from 'graphile-worker'
import { add } from 'date-fns' import { add } from 'date-fns'
import prettyBytes from 'pretty-bytes'
import { import {
type APIEmbedField,
Client, Client,
GatewayIntentBits, GatewayIntentBits,
TextChannel, TextChannel,
@ -34,10 +36,13 @@ if (!process.env.DISCORD_GUILD_ID) throw new Error("DISCORD_GUILD_ID was missing
async function editDiscordMessage({ helpers, state, discordMessageId }: { helpers: Helpers, state: RecordingState, discordMessageId: string }) { async function editDiscordMessage({ helpers, recordingState, discordMessageId, url, fileSize, recordId }: { recordId: number, fileSize: number, url: string, helpers: Helpers, recordingState: RecordingState, discordMessageId: string }) {
if (!discordMessageId) throw new Error(`discordMessageId was missing!`);
if (typeof discordMessageId !== 'string') throw new Error(`discordMessageId was not a string!`);
// const { captureJobId } = job.data // const { captureJobId } = job.data
helpers.logger.info(`editDiscordMessage has begun with discordMessageId=${discordMessageId}, state=${state}`) helpers.logger.info(`editDiscordMessage has begun with discordMessageId=${discordMessageId}, state=${recordingState}`)
// create a discord.js client // create a discord.js client
@ -59,12 +64,11 @@ async function editDiscordMessage({ helpers, state, discordMessageId }: { helper
if (!channel) throw new Error(`discord channel was undefined`); if (!channel) throw new Error(`discord channel was undefined`);
const message = await channel.messages.fetch(discordMessageId) const message = await channel.messages.fetch(discordMessageId)
helpers.logger.info(`discordMessageId=${discordMessageId}`)
helpers.logger.info(message as any)
helpers.logger.info(`the following is the message taht we have fetched`) const statusEmbed = getStatusEmbed({ recordId, recordingState, fileSize, url })
helpers.logger.info(message.toString()) const buttonRow = getButtonRow(recordingState)
const statusEmbed = getStatusEmbed(state)
const buttonRow = getButtonRow(state)
// const embed = new EmbedBuilder().setTitle('Attachments'); // const embed = new EmbedBuilder().setTitle('Attachments');
@ -107,11 +111,16 @@ async function getRecordFromDatabase(recordId: number) {
export const updateDiscordMessage: Task = async function (payload, helpers: Helpers) { export const updateDiscordMessage: Task = async function (payload, helpers: Helpers) {
try { try {
assertPayload(payload) assertPayload(payload)
const record = await getRecordFromDatabase(payload.recordId) const { recordId } = payload
const { discordMessageId, state } = record helpers.logger.info(`updateDiscordMessage() with recordId=${recordId}`)
editDiscordMessage({ helpers, state, discordMessageId }) const record = await getRecordFromDatabase(recordId)
const { discord_message_id, recording_state, file_size, url } = record
const recordingState = recording_state
const discordMessageId = discord_message_id
const fileSize = file_size
editDiscordMessage({ helpers, recordingState, discordMessageId, url, fileSize, recordId })
// schedule the next update 10s from now, but only if the recording is still happening // schedule the next update 10s from now, but only if the recording is still happening
if (state !== 'ended') { if (recordingState !== 'ended') {
const runAt = add(new Date(), { seconds: 10 }) const runAt = add(new Date(), { seconds: 10 })
const recordId = record.id const recordId = record.id
await helpers.addJob('updateDiscordMessage', { recordId }, { jobKey: `record_${recordId}_update_discord_message`, maxAttempts: 3, runAt }) await helpers.addJob('updateDiscordMessage', { recordId }, { jobKey: `record_${recordId}_update_discord_message`, maxAttempts: 3, runAt })
@ -121,30 +130,33 @@ export const updateDiscordMessage: Task = async function (payload, helpers: Help
} }
} }
function getStatusEmbed(state: RecordingState) { function getStatusEmbed({
let title, description, color; recordingState, recordId, fileSize, url
if (state === 'pending') { }: { fileSize: number, recordingState: RecordingState, recordId: number, url: string }) {
title = "Pending" let title, description, color, fields;
title = `Record ${recordId}`
fields = [
{ name: 'Status', value: 'Pending', inline: true },
{ name: 'Filesize', value: `${fileSize} bytes (${prettyBytes(fileSize)})`, inline: true },
{ name: 'URL', value: url, inline: false },
] as APIEmbedField[]
if (recordingState === 'pending') {
description = "Waiting for a worker to accept the job." description = "Waiting for a worker to accept the job."
color = 2326507 color = 2326507
} else if (state === 'recording') { } else if (recordingState === 'recording') {
title = 'Recording'
description = 'The stream is being recorded.' description = 'The stream is being recorded.'
color = 392960 color = 392960
} else if (state === 'aborted') { } else if (recordingState === 'aborted') {
title = "Aborted"
description = "The recording was stopped by the user." description = "The recording was stopped by the user."
color = 8289651 color = 8289651
} else if (state === 'ended') { } else if (recordingState === 'ended') {
title = "Ended"
description = "The recording has stopped." description = "The recording has stopped."
color = 10855845 color = 10855845
} else { } else {
title = 'Unknown'
description = 'The recording is in an unknown state? (this is a bug.)' description = 'The recording is in an unknown state? (this is a bug.)'
color = 10855845 color = 10855845
} }
return new EmbedBuilder().setTitle(title).setDescription(description).setColor(color) return new EmbedBuilder().setTitle(title).setDescription(description).setColor(color).setFields(fields)
} }

View File

@ -0,0 +1,23 @@
// greetz https://github.com/discordeno/discordeno/blob/main/examples/advanced/src/utils/loader.ts
import { readdir } from 'node:fs/promises'
import { logger } from '@discordeno/bot'
import { join } from 'node:path'
export async function importDirectory(folder: string): Promise<void> {
const files = await readdir(folder, { recursive: true })
// bot.logger.info(files)
for (const filename of files) {
if (!filename.endsWith('.js') && !filename.endsWith('.ts')) continue
logger.info(`loading ${filename}`)
// Using `file://` and `process.cwd()` to avoid weird issues with relative paths and/or Windows
// await import(`file://${process.cwd()}/${folder}/${filename}`).catch((x) =>
await import(join(folder, filename)).catch((x) =>
// console.error(x)
logger.error(`cannot import ${filename} for reason: ${x}`)
// logger.fatal(`Cannot import file (${folder}/${filename}) for reason: ${x}`),
)
}
}

View File

@ -0,0 +1,6 @@
import { bot } from '../bot.ts'
import { commands } from '../commands.ts'
export async function updateApplicationCommands(): Promise<void> {
await bot.helpers.upsertGlobalApplicationCommands(commands.array())
}

View File

@ -5,6 +5,8 @@
"skipLibCheck": true, "skipLibCheck": true,
"target": "es2022", "target": "es2022",
"allowJs": true, "allowJs": true,
"noEmit": true,
"allowImportingTsExtensions": true,
"resolveJsonModule": true, "resolveJsonModule": true,
"moduleDetection": "force", "moduleDetection": "force",
"isolatedModules": true, "isolatedModules": true,
@ -22,9 +24,8 @@
}, },
// Include the necessary files for your project // Include the necessary files for your project
"include": [ "include": [
"**/*.ts", "**/*.ts"
"**/*.tsx" , "src/events/interactionCreate.ts.old" ],
],
"exclude": [ "exclude": [
"node_modules" "node_modules"
] ]

View File

@ -1,5 +1,5 @@
import { spawn } from 'child_process'; import { spawn } from 'child_process';
import { PassThrough, pipeline, Readable } from 'stream'; import { EventEmitter, PassThrough, pipeline, Readable } from 'stream';
import prettyBytes from 'pretty-bytes'; import prettyBytes from 'pretty-bytes';
import { Upload } from "@aws-sdk/lib-storage"; import { Upload } from "@aws-sdk/lib-storage";
import { S3Client } from "@aws-sdk/client-s3"; import { S3Client } from "@aws-sdk/client-s3";
@ -14,6 +14,8 @@ export interface RecordArgs {
date?: string; date?: string;
inputStream: Readable; inputStream: Readable;
jobId: string; jobId: string;
abortSignal: AbortSignal;
onProgress: (fileSize: number) => void;
} }
interface MakeS3ClientOptions { interface MakeS3ClientOptions {
@ -38,14 +40,17 @@ export default class Record {
filename?: string; filename?: string;
jobId: string; jobId: string;
date?: string; date?: string;
// saveToDiskStream: Writable; abortSignal: AbortSignal;
onProgress: Function;
constructor({ inputStream, s3Client, bucket, jobId }: RecordArgs) { constructor({ inputStream, s3Client, bucket, jobId, abortSignal, onProgress }: RecordArgs) {
if (!inputStream) throw new Error('Record constructor was missing inputStream.'); if (!inputStream) throw new Error('Record constructor was missing inputStream.');
if (!bucket) throw new Error('Record constructor was missing bucket.'); if (!bucket) throw new Error('Record constructor was missing bucket.');
if (!jobId) throw new Error('Record constructer was missing jobId!'); if (!jobId) throw new Error('Record constructer was missing jobId!');
if (!s3Client) throw new Error('Record constructer was missing s3Client'); if (!s3Client) throw new Error('Record constructer was missing s3Client');
if (!abortSignal) throw new Error('Record constructer was missing abortSignal');
this.inputStream = inputStream this.inputStream = inputStream
this.onProgress = onProgress
this.s3Client = s3Client this.s3Client = s3Client
this.bucket = bucket this.bucket = bucket
this.jobId = jobId this.jobId = jobId
@ -53,7 +58,8 @@ export default class Record {
this.datestamp = new Date().toISOString() this.datestamp = new Date().toISOString()
this.keyName = `${this.datestamp}-${jobId}.ts` this.keyName = `${this.datestamp}-${jobId}.ts`
this.uploadStream = new PassThrough() this.uploadStream = new PassThrough()
// this.saveToDiskStream = createWriteStream('/tmp/idk.ts') // @todo delete this line this.abortSignal = abortSignal
this.abortSignal.addEventListener("abort", this.abortEventListener.bind(this))
} }
@ -94,12 +100,16 @@ export default class Record {
return ffmpegProc.stdout return ffmpegProc.stdout
} }
// async saveToDisk() {
// return new Promise((resolve, reject) => {
// this.saveToDiskStream.once('exit', resolve) abortEventListener() {
// this.saveToDiskStream.once('error', reject) console.log(`abortEventListener has been invoked. this.abortSignal is as follows`)
// }) console.log(this.abortSignal)
// } console.log(JSON.stringify(this.abortSignal, null, 2))
const reason = this.abortSignal.reason
console.log(`aborted the stream download with reason=${reason}`)
this.inputStream.destroy(new Error(reason))
}
async uploadToS3() { async uploadToS3() {
const target = { const target = {
@ -121,7 +131,7 @@ export default class Record {
parallelUploads3.on("httpUploadProgress", (progress) => { parallelUploads3.on("httpUploadProgress", (progress) => {
if (progress?.loaded) { if (progress?.loaded) {
console.log(`loaded ${progress.loaded} bytes (${prettyBytes(progress.loaded)})`); console.log(`uploaded ${progress.loaded} bytes (${prettyBytes(progress.loaded)})`);
} else { } else {
console.log(`httpUploadProgress ${JSON.stringify(progress, null, 2)}`) console.log(`httpUploadProgress ${JSON.stringify(progress, null, 2)}`)
} }
@ -145,9 +155,6 @@ export default class Record {
async start() { async start() {
// @todo remove this
// @todo remove this -- this is test code to validate one stream at a time. here we are saving to disk
// @todo remove this
// streams setup // streams setup
@ -155,6 +162,7 @@ export default class Record {
this.counter += data.length this.counter += data.length
if (this.counter % (1 * 1024 * 1024) <= 1024) { if (this.counter % (1 * 1024 * 1024) <= 1024) {
console.log(`Received ${this.counter} bytes (${prettyBytes(this.counter)})`); console.log(`Received ${this.counter} bytes (${prettyBytes(this.counter)})`);
if (this.onProgress) this.onProgress(this.counter)
} }
}) })
this.uploadStream.on('close', () => { this.uploadStream.on('close', () => {
@ -181,12 +189,12 @@ export default class Record {
console.info('[vvv] drain on inputStream.') console.info('[vvv] drain on inputStream.')
}) })
// pipe the ffmpeg stream to the S3 upload stream // pipe the ffmpeg stream to the S3 upload stream
// this has the effect of uploading the stream to S3 at the same time we're recording it. // this has the effect of uploading the stream to S3 at the same time we're recording it.
pipeline( pipeline(
this.inputStream, this.inputStream,
// this.saveToDiskStream, // @todo delete this test code this.uploadStream,
this.uploadStream, // @todo restore this code
(err) => { (err) => {
if (err) { if (err) {
console.error(`pipeline errored.`) console.error(`pipeline errored.`)

View File

@ -28,7 +28,7 @@ const build = function (opts: Record<string, any>={}, connectionString: string)
app.put('/api/message', async function (request: FastifyRequest<{ Body: MessageBodyType }>, reply) { app.put('/api/message', async function (request: FastifyRequest<{ Body: MessageBodyType }>, reply) {
const { state, discordMessageId } = request.body const { state, discordMessageId } = request.body
if (app?.graphile) { if (app?.graphile) {
const jobId = await app.graphile.addJob('discordMessageUpdate', { const jobId = await app.graphile.addJob('update_discord_message', {
discordMessageId, discordMessageId,
state state
}, { maxAttempts: 3 }) }, { maxAttempts: 3 })
@ -41,7 +41,7 @@ const build = function (opts: Record<string, any>={}, connectionString: string)
console.log(`POST /api/record with url=${url}`) console.log(`POST /api/record with url=${url}`)
if (app?.graphile) { if (app?.graphile) {
const jobId = await app.graphile.addJob('startRecording', { const jobId = await app.graphile.addJob('start_recording', {
url, url,
discordMessageId discordMessageId
}, { maxAttempts: 3 }) }, { maxAttempts: 3 })

View File

@ -10,8 +10,8 @@ import { fileURLToPath } from 'url';
import { getPackageVersion } from '@futureporn/utils'; import { getPackageVersion } from '@futureporn/utils';
import type { GraphileConfig } from "graphile-config"; import type { GraphileConfig } from "graphile-config";
import type {} from "graphile-worker"; import type {} from "graphile-worker";
import startRecording from './tasks/startRecording.ts'; import start_recording from './tasks/start_recording.ts';
import { stopRecording } from './tasks/stopRecording.ts'; import { stop_recording } from './tasks/stop_recording.ts';
import record from './tasks/record.ts' import record from './tasks/record.ts'
const __dirname = dirname(fileURLToPath(import.meta.url)); const __dirname = dirname(fileURLToPath(import.meta.url));
@ -61,8 +61,8 @@ async function worker(workerUtils: WorkerUtils) {
// taskDirectory: join(__dirname, 'tasks'), // taskDirectory: join(__dirname, 'tasks'),
taskList: { taskList: {
'record': record, 'record': record,
'startRecording': startRecording, 'start_recording': start_recording,
'stopRecording': stopRecording 'stop_recording': stop_recording
} }
} }

View File

@ -3,6 +3,7 @@ import { Helpers, type Task } from 'graphile-worker'
import Record from '../Record.ts' import Record from '../Record.ts'
import { getPlaylistUrl } from '@futureporn/scout/ytdlp.ts' import { getPlaylistUrl } from '@futureporn/scout/ytdlp.ts'
import type { RecordingState } from '@futureporn/types' import type { RecordingState } from '@futureporn/types'
import { add } from 'date-fns'
/** /**
* url is the URL to be recorded. Ex: chaturbate.com/projektmelody * url is the URL to be recorded. Ex: chaturbate.com/projektmelody
@ -10,8 +11,8 @@ import type { RecordingState } from '@futureporn/types'
* we use the ID to poll the db to see if the job is aborted by the user * we use the ID to poll the db to see if the job is aborted by the user
*/ */
interface Payload { interface Payload {
url: string, url: string;
recordId: number record_id: number;
} }
interface RecordingRecord { interface RecordingRecord {
@ -19,12 +20,20 @@ interface RecordingRecord {
recordingState: RecordingState; recordingState: RecordingState;
fileSize: number; fileSize: number;
discordMessageId: string; discordMessageId: string;
isAborted: boolean;
}
interface RawRecordingRecord {
id: number;
recording_state: RecordingState;
file_size: number;
discord_message_id: string;
is_aborted: boolean;
} }
function assertPayload(payload: any): asserts payload is Payload { function assertPayload(payload: any): asserts payload is Payload {
if (typeof payload !== "object" || !payload) throw new Error("invalid payload"); if (typeof payload !== "object" || !payload) throw new Error("invalid payload");
if (typeof payload.url !== "string") throw new Error("invalid url"); if (typeof payload.url !== "string") throw new Error("invalid url");
if (typeof payload.recordId !== "number") throw new Error(`invalid recordId=${payload.recordId}`); if (typeof payload.record_id !== "number") throw new Error(`invalid record_id=${payload.record_id}`);
} }
function assertEnv() { function assertEnv() {
@ -37,7 +46,10 @@ function assertEnv() {
if (!process.env.AUTOMATION_USER_JWT) throw new Error('AUTOMATION_USER_JWT was missing in env'); if (!process.env.AUTOMATION_USER_JWT) throw new Error('AUTOMATION_USER_JWT was missing in env');
} }
async function getRecording(url: string, recordId: number, abortSignal: AbortSignal) {
async function getRecording(url: string, recordId: number, helpers: Helpers) {
const abortController = new AbortController()
const abortSignal = abortController.signal
const accessKeyId = process.env.S3_ACCESS_KEY_ID!; const accessKeyId = process.env.S3_ACCESS_KEY_ID!;
const secretAccessKey = process.env.S3_SECRET_ACCESS_KEY!; const secretAccessKey = process.env.S3_SECRET_ACCESS_KEY!;
const region = process.env.S3_REGION!; const region = process.env.S3_REGION!;
@ -46,27 +58,16 @@ async function getRecording(url: string, recordId: number, abortSignal: AbortSig
const playlistUrl = await getPlaylistUrl(url) const playlistUrl = await getPlaylistUrl(url)
const s3Client = Record.makeS3Client({ accessKeyId, secretAccessKey, region, endpoint }) const s3Client = Record.makeS3Client({ accessKeyId, secretAccessKey, region, endpoint })
const inputStream = Record.getFFmpegStream({ url: playlistUrl }) const inputStream = Record.getFFmpegStream({ url: playlistUrl })
const onProgress = (fileSize: number) => { helpers.logger.info(`onProgress() has fired~! fileSize=${fileSize}`); updateDatabaseRecord({ recordId, recordingState: 'recording', fileSize }).then(checkIfAborted).then((isAborted) => isAborted ? abortController.abort() : null) }
const record = new Record({ inputStream, bucket, s3Client, jobId: ''+recordId }) // @todo add abortsignal const record = new Record({ inputStream, onProgress, bucket, s3Client, jobId: ''+recordId, abortSignal })
return record return record
} }
// async function checkIfAborted(recordId: number): Promise<boolean> { function checkIfAborted(record: RawRecordingRecord): boolean {
// const res = await fetch(`${process.env.POSTGREST_URL}/records?id=eq.${recordId}`, { return (record.is_aborted)
// headers: { }
// 'Content-Type': 'application/json',
// 'Accepts': 'application/json'
// }
// })
// if (!res.ok) {
// throw new Error(`failed to checkIfAborted. status=${res.status}, statusText=${res.statusText}`);
// }
// const body = await res.json() as RecordingRecord[];
// if (!body[0]) throw new Error(`failed to get a record that matched recordId=${recordId}`)
// return body[0].isAborted
// }
async function updateDatabaseRecord({recordId, recordingState, fileSize}: { recordId: number, recordingState?: RecordingState, fileSize: number }): Promise<RecordingRecord> { async function updateDatabaseRecord({recordId, recordingState, fileSize}: { recordId: number, recordingState: RecordingState, fileSize: number }): Promise<RawRecordingRecord> {
const payload: any = { const payload: any = {
file_size: fileSize file_size: fileSize
} }
@ -84,7 +85,7 @@ async function updateDatabaseRecord({recordId, recordingState, fileSize}: { reco
if (!res.ok) { if (!res.ok) {
throw new Error(`failed to updateDatabaseRecord. status=${res.status}, statusText=${res.statusText}`); throw new Error(`failed to updateDatabaseRecord. status=${res.status}, statusText=${res.statusText}`);
} }
const body = await res.json() as RecordingRecord[]; const body = await res.json() as RawRecordingRecord[];
if (!body[0]) throw new Error(`failed to get a record that matched recordId=${recordId}`) if (!body[0]) throw new Error(`failed to get a record that matched recordId=${recordId}`)
return body[0] return body[0]
} }
@ -93,33 +94,40 @@ export const record: Task = async function (payload, helpers) {
console.log(payload) console.log(payload)
assertPayload(payload) assertPayload(payload)
assertEnv() assertEnv()
const { url, recordId } = payload const { url, record_id } = payload
const abortController = new AbortController() // let interval
let interval
try { try {
const record = await getRecording(url, recordId, abortController.signal)
// every 30s, we // every 30s, we
// 1. update the db record with the RecordingState and filesize // 1. update the db record with the filesize
// 2. poll db to see if our job has been aborted by the user // 2. poll db to see if our job has been aborted by the user
interval = setInterval(async () => { // interval = setInterval(async () => {
try { // try {
helpers.logger.info(`checkIfAborted()`) // helpers.logger.info(`updateDatabaseRecord()`)
const updatePayload = { recordId, fileSize: record.counter } // const recordingState: RecordingState = 'recording'
const updatedRecord = await updateDatabaseRecord(updatePayload) // const fileSize = record.counter
if (updatedRecord.recordingState === 'ended') { // const updatePayload = { recordingState, recordId, fileSize }
helpers.logger.info(`record ${recordId} has been aborted by a user so we stop the recording now.`) // const updatedRecord = await updateDatabaseRecord(updatePayload)
abortController.abort() // if (updatedRecord.isAborted) {
} // helpers.logger.info(`record ${recordId} has been aborted by a user so we stop the recording now.`)
} catch (e) { // abortController.abort()
helpers.logger.error(`error while updating database. For sake of the recording in progress we are ignoring the following error. ${e}`) // }
} // } catch (e) {
}, 3000) // helpers.logger.error(`error while updating database. For sake of the recording in progress we are ignoring the following error. ${e}`)
// }
// }, 3000)
// start recording and await the S3 upload being finished // start recording and await the S3 upload being finished
const recordId = record_id
const record = await getRecording(url, recordId, helpers)
await record.start() await record.start()
} finally { } catch (e) {
clearInterval(interval) helpers.logger.error(`caught an error duing record(). error as follows`)
if (e instanceof Error) {
helpers.logger.error(e.message)
} else {
helpers.logger.error(JSON.stringify(e))
}
} }
@ -131,6 +139,49 @@ export const record: Task = async function (payload, helpers) {
// await helpers.addJob('record', { url, recordId }) // await helpers.addJob('record', { url, recordId })
} }
/**
* Here we middleman the stream from FFmpeg --> S3,
* counting bits and creating graphile jobs to inform the UI of our progress
*/
// const transformStreamFactory = (recordId: number, helpers: Helpers): PassThrough => {
// let counter = 0
// return new PassThrough ({
// async transform(chunk, controller) {
// controller.enqueue(chunk) // we don't actually transform anything here. we're only gathering statistics.
// counter += chunk.length
// if (counter % (1 * 1024 * 1024) <= 1024) {
// helpers.logger.info(`Updating record ${recordId}`)
// try {
// await updateDatabaseRecord({ fileSize: counter, recordId, recordingState: 'recording' })
// } catch (e) {
// helpers.logger.warn(`We are ignoring the following error which occured while updating db record ${e}`)
// }
// }
// },
// flush() {
// helpers.logger.info(`transformStream has flushed.`)
// }
// })
// }
// export const recordNg: Task = async function (payload, helpers) {
// assertPayload(payload)
// const { url, recordId } = payload
// try {
// const abortController = new AbortController()
// const abortSignal = abortController.signal
// const inputStream =
// const transformStream = transformStreamFactory(recordId, helpers)
// const record = new Record({ inputStream, abortSignal, transformStream })
// await record.done()
// } catch (e) {
// console.error(`error during recording. error as follows`)
// console.error(e)
// } finally {
// helpers.addJob('updateDiscordMessage', { recordId }, { maxAttempts: 3, runAt: add(new Date(), { seconds: 5 }) })
// }
// }

View File

@ -53,15 +53,15 @@ async function createRecordingRecord(payload: Payload, helpers: Helpers): Promis
return parseInt(id) return parseInt(id)
} }
export const startRecording: Task = async function (payload, helpers) { export const start_recording: Task = async function (payload, helpers) {
assertPayload(payload) assertPayload(payload)
assertEnv() assertEnv()
const recordId = await createRecordingRecord(payload, helpers) const recordId = await createRecordingRecord(payload, helpers)
const { url } = payload; const { url } = payload;
console.log(`@todo simulated startRecording with url=${url}, recordId=${recordId}`)
await helpers.addJob('record', { url, recordId }, { maxAttempts: 3, jobKey: `record_${recordId}` }) await helpers.addJob('record', { url, recordId }, { maxAttempts: 3, jobKey: `record_${recordId}` })
const runAt = add(new Date(), { seconds: 10 }) const runAt = add(new Date(), { seconds: 10 })
await helpers.addJob('updateDiscordMessage', { recordId }, { jobKey: `record_${recordId}_update_discord_message`, maxAttempts: 3, runAt }) await helpers.addJob('updateDiscordMessage', { recordId }, { jobKey: `record_${recordId}_update_discord_message`, maxAttempts: 3, runAt })
helpers.logger.info(`startRecording() with url=${url}, recordId=${recordId}, (updateDiscordMessage runAt=${runAt})`)
} }
export default startRecording export default start_recording

View File

@ -11,7 +11,7 @@ function assertPayload(payload: any): asserts payload is Payload {
} }
export const stopRecording: Task = async function (payload) { export const stop_recording: Task = async function (payload) {
assertPayload(payload) assertPayload(payload)
const { id } = payload; const { id } = payload;
console.log(`@todo simulated stop_recording with id=${id}`) console.log(`@todo simulated stop_recording with id=${id}`)

View File

@ -26,4 +26,5 @@ async function main() {
await migrate(dbConfig, path.join(__dirname, "./migrations/")) await migrate(dbConfig, path.join(__dirname, "./migrations/"))
} }
main()
await main()

View File

@ -1,28 +0,0 @@
-- one trigger function to rule them all @see https://worker.graphile.org/docs/sql-add-job#example-one-trigger-function-to-rule-them-all
CREATE FUNCTION trigger_job() RETURNS trigger AS $$
BEGIN
PERFORM graphile_worker.add_job(TG_ARGV[0], json_build_object(
'schema', TG_TABLE_SCHEMA,
'table', TG_TABLE_NAME,
'op', TG_OP,
'id', (CASE WHEN TG_OP = 'DELETE' THEN OLD.id ELSE NEW.id END)
));
RETURN NEW;
END;
$$ LANGUAGE plpgsql VOLATILE;
-- When a record is created, add a graphile_worker job to start recording
CREATE TRIGGER record
AFTER INSERT ON api.records
FOR EACH ROW
EXECUTE PROCEDURE trigger_job('record');
-- When a record is updated, add a graphile_worker job to update the discord message
CREATE TRIGGER update_discord_message
AFTER UPDATE ON api.records
FOR EACH ROW
EXECUTE PROCEDURE trigger_job('update_discord_message');
-- for more reference, @see https://worker.graphile.org/docs/sql-add-job#example-one-trigger-function-to-rule-them-all

View File

@ -0,0 +1,4 @@
CREATE schema graphile_worker;
GRANT all ON SCHEMA graphile_worker TO postgres;
GRANT all ON SCHEMA graphile_worker TO automation;

View File

@ -0,0 +1,29 @@
-- We create a function which lets Postgrest's automation user create jobs in Graphile Worker.
-- Normally only the database owner, in our case `postgres`, can add jobs due to RLS in graphile_worker tables.
-- Under the advice of graphile_worker author, we can use a SECURITY DEFINER wrapper function.
-- @see https://worker.graphile.org/docs/sql-add-job#graphile_workeradd_job:~:text=graphile_worker.add_job(...),that%20are%20necessary.)
-- @see https://discord.com/channels/489127045289476126/1179293106336694333/1179605043729670306
-- @see https://discord.com/channels/489127045289476126/498852330754801666/1067707497235873822
CREATE FUNCTION public.tg__add_job() RETURNS trigger
LANGUAGE plpgsql SECURITY DEFINER
SET search_path TO 'pg_catalog', 'public', 'pg_temp'
AS $$
begin
PERFORM graphile_worker.add_job(tg_argv[0], json_build_object(
'url', NEW.url,
'record_id', NEW.id
), max_attempts := 12);
return NEW;
end;
$$;
CREATE TRIGGER record
AFTER INSERT ON api.records
FOR EACH ROW
EXECUTE PROCEDURE public.tg__add_job('record');