TDD on scout
ci / build (push) Has been cancelled Details

This commit is contained in:
CJ_Clippy 2024-06-04 15:06:47 -08:00
parent 8d122573bf
commit 194377dbd9
32 changed files with 4291 additions and 726 deletions

View File

@ -1,6 +1,6 @@
git monorepo. git monorepo.
pnpm required for workspaces. pnpm for workspaces.
Kubernetes for Development using Tiltfile Kubernetes for Development using Tiltfile
@ -13,3 +13,7 @@ dokku for Production, deployed with `git push`.
Kubernetes for Production, deployed using FluxCD Kubernetes for Production, deployed using FluxCD
direnv for loading .envrc direnv for loading .envrc
Domain Driven Development
Test Driven Development

View File

@ -44,6 +44,13 @@ spec:
key: postgresRealtimeConnectionString key: postgresRealtimeConnectionString
- name: STRAPI_URL - name: STRAPI_URL
value: https://strapi.piko.sbtp.xyz value: https://strapi.piko.sbtp.xyz
- name: SCOUT_NITTER_ACCESS_KEY
valueFrom:
secretKeyRef:
name: scout
key: nitterAccessKey
- NAME: SCOUT_NITTER_URL
value: https://nitter.sbtp.xyz
- name: SCOUT_RECENTS_TOKEN - name: SCOUT_RECENTS_TOKEN
valueFrom: valueFrom:
secretKeyRef: secretKeyRef:

View File

@ -10,7 +10,6 @@ import os from 'os'
import fs from 'node:fs' import fs from 'node:fs'
import { loggerFactory } from "./src/logger.js" import { loggerFactory } from "./src/logger.js"
import { verifyStorage } from './src/disk.js' import { verifyStorage } from './src/disk.js'
import faye from 'faye'
import { record, assertDependencyDirectory, checkFFmpeg } from './src/record.js' import { record, assertDependencyDirectory, checkFFmpeg } from './src/record.js'
import fastq from 'fastq' import fastq from 'fastq'
import pRetry from 'p-retry'; import pRetry from 'p-retry';

View File

@ -1,4 +0,0 @@
{
"extension": ["js"],
"spec": "src/**/*.spec.js"
}

View File

@ -1,19 +0,0 @@
{
"name": "common",
"type": "module",
"version": "1.0.0",
"description": "regular expressions, constants, and helper functions which are used app-wide",
"scripts": {
"test": "mocha"
},
"exports": {
"./fansly": "./src/fansly.js"
},
"keywords": [],
"author": "@CJ_Clippy",
"license": "Unlicense",
"dependencies": {
"chai": "^5.1.1",
"mocha": "^10.4.0"
}
}

View File

@ -1,621 +0,0 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
importers:
.:
dependencies:
chai:
specifier: ^5.1.1
version: 5.1.1
mocha:
specifier: ^10.4.0
version: 10.4.0
packages:
ansi-colors@4.1.1:
resolution: {integrity: sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==}
engines: {node: '>=6'}
ansi-regex@5.0.1:
resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==}
engines: {node: '>=8'}
ansi-styles@4.3.0:
resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==}
engines: {node: '>=8'}
anymatch@3.1.3:
resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==}
engines: {node: '>= 8'}
argparse@2.0.1:
resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==}
assertion-error@2.0.1:
resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==}
engines: {node: '>=12'}
balanced-match@1.0.2:
resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
binary-extensions@2.3.0:
resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==}
engines: {node: '>=8'}
brace-expansion@2.0.1:
resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==}
braces@3.0.3:
resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
engines: {node: '>=8'}
browser-stdout@1.3.1:
resolution: {integrity: sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==}
camelcase@6.3.0:
resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==}
engines: {node: '>=10'}
chai@5.1.1:
resolution: {integrity: sha512-pT1ZgP8rPNqUgieVaEY+ryQr6Q4HXNg8Ei9UnLUrjN4IA7dvQC5JB+/kxVcPNDHyBcc/26CXPkbNzq3qwrOEKA==}
engines: {node: '>=12'}
chalk@4.1.2:
resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==}
engines: {node: '>=10'}
check-error@2.1.1:
resolution: {integrity: sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==}
engines: {node: '>= 16'}
chokidar@3.5.3:
resolution: {integrity: sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==}
engines: {node: '>= 8.10.0'}
cliui@7.0.4:
resolution: {integrity: sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==}
color-convert@2.0.1:
resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==}
engines: {node: '>=7.0.0'}
color-name@1.1.4:
resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==}
debug@4.3.4:
resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==}
engines: {node: '>=6.0'}
peerDependencies:
supports-color: '*'
peerDependenciesMeta:
supports-color:
optional: true
decamelize@4.0.0:
resolution: {integrity: sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==}
engines: {node: '>=10'}
deep-eql@5.0.1:
resolution: {integrity: sha512-nwQCf6ne2gez3o1MxWifqkciwt0zhl0LO1/UwVu4uMBuPmflWM4oQ70XMqHqnBJA+nhzncaqL9HVL6KkHJ28lw==}
engines: {node: '>=6'}
diff@5.0.0:
resolution: {integrity: sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==}
engines: {node: '>=0.3.1'}
emoji-regex@8.0.0:
resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==}
escalade@3.1.2:
resolution: {integrity: sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==}
engines: {node: '>=6'}
escape-string-regexp@4.0.0:
resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==}
engines: {node: '>=10'}
fill-range@7.1.1:
resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
engines: {node: '>=8'}
find-up@5.0.0:
resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==}
engines: {node: '>=10'}
flat@5.0.2:
resolution: {integrity: sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==}
hasBin: true
fs.realpath@1.0.0:
resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==}
fsevents@2.3.3:
resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
os: [darwin]
get-caller-file@2.0.5:
resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==}
engines: {node: 6.* || 8.* || >= 10.*}
get-func-name@2.0.2:
resolution: {integrity: sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==}
glob-parent@5.1.2:
resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
engines: {node: '>= 6'}
glob@8.1.0:
resolution: {integrity: sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==}
engines: {node: '>=12'}
deprecated: Glob versions prior to v9 are no longer supported
has-flag@4.0.0:
resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==}
engines: {node: '>=8'}
he@1.2.0:
resolution: {integrity: sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==}
hasBin: true
inflight@1.0.6:
resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==}
deprecated: This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.
inherits@2.0.4:
resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==}
is-binary-path@2.1.0:
resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==}
engines: {node: '>=8'}
is-extglob@2.1.1:
resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
engines: {node: '>=0.10.0'}
is-fullwidth-code-point@3.0.0:
resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==}
engines: {node: '>=8'}
is-glob@4.0.3:
resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
engines: {node: '>=0.10.0'}
is-number@7.0.0:
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
engines: {node: '>=0.12.0'}
is-plain-obj@2.1.0:
resolution: {integrity: sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==}
engines: {node: '>=8'}
is-unicode-supported@0.1.0:
resolution: {integrity: sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==}
engines: {node: '>=10'}
js-yaml@4.1.0:
resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==}
hasBin: true
locate-path@6.0.0:
resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==}
engines: {node: '>=10'}
log-symbols@4.1.0:
resolution: {integrity: sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==}
engines: {node: '>=10'}
loupe@3.1.1:
resolution: {integrity: sha512-edNu/8D5MKVfGVFRhFf8aAxiTM6Wumfz5XsaatSxlD3w4R1d/WEKUTydCdPGbl9K7QG/Ca3GnDV2sIKIpXRQcw==}
minimatch@5.0.1:
resolution: {integrity: sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==}
engines: {node: '>=10'}
mocha@10.4.0:
resolution: {integrity: sha512-eqhGB8JKapEYcC4ytX/xrzKforgEc3j1pGlAXVy3eRwrtAy5/nIfT1SvgGzfN0XZZxeLq0aQWkOUAmqIJiv+bA==}
engines: {node: '>= 14.0.0'}
hasBin: true
ms@2.1.2:
resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==}
ms@2.1.3:
resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
normalize-path@3.0.0:
resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==}
engines: {node: '>=0.10.0'}
once@1.4.0:
resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==}
p-limit@3.1.0:
resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==}
engines: {node: '>=10'}
p-locate@5.0.0:
resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==}
engines: {node: '>=10'}
path-exists@4.0.0:
resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==}
engines: {node: '>=8'}
pathval@2.0.0:
resolution: {integrity: sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==}
engines: {node: '>= 14.16'}
picomatch@2.3.1:
resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
engines: {node: '>=8.6'}
randombytes@2.1.0:
resolution: {integrity: sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==}
readdirp@3.6.0:
resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==}
engines: {node: '>=8.10.0'}
require-directory@2.1.1:
resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==}
engines: {node: '>=0.10.0'}
safe-buffer@5.2.1:
resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==}
serialize-javascript@6.0.0:
resolution: {integrity: sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==}
string-width@4.2.3:
resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==}
engines: {node: '>=8'}
strip-ansi@6.0.1:
resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==}
engines: {node: '>=8'}
strip-json-comments@3.1.1:
resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==}
engines: {node: '>=8'}
supports-color@7.2.0:
resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==}
engines: {node: '>=8'}
supports-color@8.1.1:
resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==}
engines: {node: '>=10'}
to-regex-range@5.0.1:
resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
engines: {node: '>=8.0'}
workerpool@6.2.1:
resolution: {integrity: sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw==}
wrap-ansi@7.0.0:
resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==}
engines: {node: '>=10'}
wrappy@1.0.2:
resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==}
y18n@5.0.8:
resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==}
engines: {node: '>=10'}
yargs-parser@20.2.4:
resolution: {integrity: sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==}
engines: {node: '>=10'}
yargs-unparser@2.0.0:
resolution: {integrity: sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==}
engines: {node: '>=10'}
yargs@16.2.0:
resolution: {integrity: sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==}
engines: {node: '>=10'}
yocto-queue@0.1.0:
resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==}
engines: {node: '>=10'}
snapshots:
ansi-colors@4.1.1: {}
ansi-regex@5.0.1: {}
ansi-styles@4.3.0:
dependencies:
color-convert: 2.0.1
anymatch@3.1.3:
dependencies:
normalize-path: 3.0.0
picomatch: 2.3.1
argparse@2.0.1: {}
assertion-error@2.0.1: {}
balanced-match@1.0.2: {}
binary-extensions@2.3.0: {}
brace-expansion@2.0.1:
dependencies:
balanced-match: 1.0.2
braces@3.0.3:
dependencies:
fill-range: 7.1.1
browser-stdout@1.3.1: {}
camelcase@6.3.0: {}
chai@5.1.1:
dependencies:
assertion-error: 2.0.1
check-error: 2.1.1
deep-eql: 5.0.1
loupe: 3.1.1
pathval: 2.0.0
chalk@4.1.2:
dependencies:
ansi-styles: 4.3.0
supports-color: 7.2.0
check-error@2.1.1: {}
chokidar@3.5.3:
dependencies:
anymatch: 3.1.3
braces: 3.0.3
glob-parent: 5.1.2
is-binary-path: 2.1.0
is-glob: 4.0.3
normalize-path: 3.0.0
readdirp: 3.6.0
optionalDependencies:
fsevents: 2.3.3
cliui@7.0.4:
dependencies:
string-width: 4.2.3
strip-ansi: 6.0.1
wrap-ansi: 7.0.0
color-convert@2.0.1:
dependencies:
color-name: 1.1.4
color-name@1.1.4: {}
debug@4.3.4(supports-color@8.1.1):
dependencies:
ms: 2.1.2
optionalDependencies:
supports-color: 8.1.1
decamelize@4.0.0: {}
deep-eql@5.0.1: {}
diff@5.0.0: {}
emoji-regex@8.0.0: {}
escalade@3.1.2: {}
escape-string-regexp@4.0.0: {}
fill-range@7.1.1:
dependencies:
to-regex-range: 5.0.1
find-up@5.0.0:
dependencies:
locate-path: 6.0.0
path-exists: 4.0.0
flat@5.0.2: {}
fs.realpath@1.0.0: {}
fsevents@2.3.3:
optional: true
get-caller-file@2.0.5: {}
get-func-name@2.0.2: {}
glob-parent@5.1.2:
dependencies:
is-glob: 4.0.3
glob@8.1.0:
dependencies:
fs.realpath: 1.0.0
inflight: 1.0.6
inherits: 2.0.4
minimatch: 5.0.1
once: 1.4.0
has-flag@4.0.0: {}
he@1.2.0: {}
inflight@1.0.6:
dependencies:
once: 1.4.0
wrappy: 1.0.2
inherits@2.0.4: {}
is-binary-path@2.1.0:
dependencies:
binary-extensions: 2.3.0
is-extglob@2.1.1: {}
is-fullwidth-code-point@3.0.0: {}
is-glob@4.0.3:
dependencies:
is-extglob: 2.1.1
is-number@7.0.0: {}
is-plain-obj@2.1.0: {}
is-unicode-supported@0.1.0: {}
js-yaml@4.1.0:
dependencies:
argparse: 2.0.1
locate-path@6.0.0:
dependencies:
p-locate: 5.0.0
log-symbols@4.1.0:
dependencies:
chalk: 4.1.2
is-unicode-supported: 0.1.0
loupe@3.1.1:
dependencies:
get-func-name: 2.0.2
minimatch@5.0.1:
dependencies:
brace-expansion: 2.0.1
mocha@10.4.0:
dependencies:
ansi-colors: 4.1.1
browser-stdout: 1.3.1
chokidar: 3.5.3
debug: 4.3.4(supports-color@8.1.1)
diff: 5.0.0
escape-string-regexp: 4.0.0
find-up: 5.0.0
glob: 8.1.0
he: 1.2.0
js-yaml: 4.1.0
log-symbols: 4.1.0
minimatch: 5.0.1
ms: 2.1.3
serialize-javascript: 6.0.0
strip-json-comments: 3.1.1
supports-color: 8.1.1
workerpool: 6.2.1
yargs: 16.2.0
yargs-parser: 20.2.4
yargs-unparser: 2.0.0
ms@2.1.2: {}
ms@2.1.3: {}
normalize-path@3.0.0: {}
once@1.4.0:
dependencies:
wrappy: 1.0.2
p-limit@3.1.0:
dependencies:
yocto-queue: 0.1.0
p-locate@5.0.0:
dependencies:
p-limit: 3.1.0
path-exists@4.0.0: {}
pathval@2.0.0: {}
picomatch@2.3.1: {}
randombytes@2.1.0:
dependencies:
safe-buffer: 5.2.1
readdirp@3.6.0:
dependencies:
picomatch: 2.3.1
require-directory@2.1.1: {}
safe-buffer@5.2.1: {}
serialize-javascript@6.0.0:
dependencies:
randombytes: 2.1.0
string-width@4.2.3:
dependencies:
emoji-regex: 8.0.0
is-fullwidth-code-point: 3.0.0
strip-ansi: 6.0.1
strip-ansi@6.0.1:
dependencies:
ansi-regex: 5.0.1
strip-json-comments@3.1.1: {}
supports-color@7.2.0:
dependencies:
has-flag: 4.0.0
supports-color@8.1.1:
dependencies:
has-flag: 4.0.0
to-regex-range@5.0.1:
dependencies:
is-number: 7.0.0
workerpool@6.2.1: {}
wrap-ansi@7.0.0:
dependencies:
ansi-styles: 4.3.0
string-width: 4.2.3
strip-ansi: 6.0.1
wrappy@1.0.2: {}
y18n@5.0.8: {}
yargs-parser@20.2.4: {}
yargs-unparser@2.0.0:
dependencies:
camelcase: 6.3.0
decamelize: 4.0.0
flat: 5.0.2
is-plain-obj: 2.1.0
yargs@16.2.0:
dependencies:
cliui: 7.0.4
escalade: 3.1.2
get-caller-file: 2.0.5
require-directory: 2.1.1
string-width: 4.2.3
y18n: 5.0.8
yargs-parser: 20.2.4
yocto-queue@0.1.0: {}

View File

@ -1,23 +0,0 @@
const regex = {
username: new RegExp(/^https:\/\/fansly\.com\/(?:live\/)?([^\/]+)/)
}
const normalize = (url) => {
if (!url) throw new Error('normalized received a null or undefined url.');
return fromUsername(fansly.regex.username.exec(url).at(1))
}
const fromUsername = (username) => `https://fansly.com/${username}`
const url = {
normalize,
fromUsername
}
const fansly = {
regex,
url
}
export default fansly

View File

@ -1,5 +0,0 @@
import fansly from './fansly.js'
export default {
fansly
}

View File

@ -7,8 +7,8 @@ import {
QueryClient, QueryClient,
useQuery, useQuery,
} from '@tanstack/react-query' } from '@tanstack/react-query'
import { format } from 'date-fns'
import Image from 'next/image'
import { import {
PaginationState, PaginationState,
useReactTable, useReactTable,
@ -39,28 +39,57 @@ export default function StreamsTable() {
const rerender = React.useReducer(() => ({}), {})[1] const rerender = React.useReducer(() => ({}), {})[1]
// name // image & name
// title // title
// platform // platform
// date // date & time
// archiveStatus // archiveStatus
const columns = React.useMemo<ColumnDef<IStream>[]>( const columns = React.useMemo<ColumnDef<IStream>[]>(
() => [ () => [
{ {
header: 'VTuber', header: 'VTuber',
accessorFn: d => d.attributes.vtuber.data?.attributes?.displayName, accessorFn: d => ({
displayName: d.attributes.vtuber.data?.attributes?.displayName,
image: d.attributes.vtuber.data?.attributes.image,
imageBlur: d.attributes.vtuber.data?.attributes.imageBlur
}),
cell: info => {
const { displayName, image, imageBlur } = info.getValue<{ displayName: string, image: string, imageBlur: string }>();
return (
<>
<div className="columns is-mobile">
<div className="column mr-0 is-flex-grow-0">
<figure className="image is-24x24">
<Image
className="is-rounded"
src={image}
alt={displayName}
placeholder="blur"
blurDataURL={imageBlur}
width={32}
height={32}
/>
</figure>
</div>
<div className="column ml-0">
<span>{displayName}</span>
</div>
</div>
</>
)
}
}, },
{ {
header: 'Date', header: 'Date',
accessorFn: d => new Date(d.attributes.date2).toISOString().split('T').at(0), accessorFn: d => format(new Date(d.attributes.date2), 'yyyy-MM-dd HH:mm'),
cell: info => <Link href={`/archive/${info.row.original.attributes.cuid}`}>{info.getValue() as string}</Link> cell: info => (<Link href={`/archive/${info.row.original.attributes.cuid}`}>{info.getValue() as string}</Link>)
}, },
{ {
header: 'Platform', header: 'Platform',
accessorFn: d => [ accessorFn: d => [
(d.attributes.isChaturbateStream && 'CB'), (d.attributes.isChaturbateStream && 'CB'),
(d.attributes.isFanslyStream && 'Fansly') (d.attributes.isFanslyStream && 'Fansly')
].filter(Boolean).join(', ') || '???' ].filter(Boolean).join(' ') || '???'
}, },
{ {
header: 'Status', header: 'Status',

View File

@ -321,7 +321,7 @@ export async function fetchStreamData({ pageIndex, pageSize }: { pageIndex: numb
const query = qs.stringify({ const query = qs.stringify({
populate: { populate: {
vtuber: { vtuber: {
fields: ['slug', 'displayName', 'publishedAt'] fields: ['slug', 'displayName', 'publishedAt', 'image', 'imageBlur']
} }
}, },
filters: { filters: {

View File

@ -1,10 +1,19 @@
# scout # scout
Watches an e-mail inbox for going live notifications Vtuber data acquisition.
## Features
* [x] Ingests going live notification e-mails
* [ ] Sends `startRecording` signals to @futureporn/capture
* [ ] Fetches vtuber data from platform
* [ ] image
* [ ] themeColor
* [x] displayName
* [ ] Platform Support
* [ ] fansly
* [ ] chaturbate
Support for
* [ ] Chaturbate
* [ ] Fansly
## Design requirements ## Design requirements
@ -18,7 +27,7 @@ Support for
* [ ] throws errors when unable to connect * [ ] throws errors when unable to connect
* [ ] runs browser headless * [ ] runs browser headless
* [ ] runs in the cloud * [ ] runs in the cloud
* [ ] runs in k8s cluster * [x] runs in k8s cluster

View File

@ -2,7 +2,7 @@
"name": "scout", "name": "scout",
"type": "module", "type": "module",
"version": "3.3.0", "version": "3.3.0",
"description": "detect when a stream goes live", "description": "vtuber data acquisition",
"main": "src/index.email.js", "main": "src/index.email.js",
"scripts": { "scripts": {
"test": "mocha", "test": "mocha",
@ -13,18 +13,26 @@
"author": "@CJ_Clippy", "author": "@CJ_Clippy",
"license": "Unlicense", "license": "Unlicense",
"dependencies": { "dependencies": {
"@aws-sdk/client-s3": "^3.583.0",
"@aws-sdk/lib-storage": "^3.588.0",
"@aws-sdk/s3-request-presigner": "^3.588.0",
"@book000/twitterts": "^0.62.50",
"@paralleldrive/cuid2": "^2.2.2",
"cheerio": "1.0.0-rc.12", "cheerio": "1.0.0-rc.12",
"common": "workspace:*",
"concurrently": "^8.2.2", "concurrently": "^8.2.2",
"date-fns": "^3.6.0", "date-fns": "^3.6.0",
"dotenv": "^16.4.5", "dotenv": "^16.4.5",
"fastq": "^1.17.1", "fastq": "^1.17.1",
"faye": "^1.4.0", "faye": "^1.4.0",
"htmlparser2": "^9.1.0",
"imapflow": "^1.0.160", "imapflow": "^1.0.160",
"limiter": "2.0.1", "limiter": "2.0.1",
"mailparser": "^3.7.1", "mailparser": "^3.7.1",
"node-vibrant": "3.2.1-alpha.1",
"p-retry": "^5.1.2",
"pg-pubsub": "workspace:*", "pg-pubsub": "workspace:*",
"qs": "^6.12.1", "qs": "^6.12.1",
"sharp": "^0.33.4",
"slugify": "^1.6.6", "slugify": "^1.6.6",
"xpath": "^0.0.34" "xpath": "^0.0.34"
}, },

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,38 @@
import { download } from './utils.js';
import { getTmpFile } from './utils.js';
const regex = {
username: new RegExp(/^https:\/\/fansly\.com\/(?:live\/)?([^\/]+)/)
}
const normalize = (url) => {
if (!url) throw new Error('normalized received a null or undefined url.');
return fromUsername(fansly.regex.username.exec(url).at(1))
}
const fromUsername = (username) => `https://fansly.com/${username}`
const image = async function image (limiter, fanslyUserId) {
if (!limiter) throw new Error(`first arg passed to fansly.data.image must be a node-rate-limiter instance`);
if (!fanslyUserId) throw new Error(`second arg passed to fansly.data.image must be a {string} fanslyUserId`);
const url = `https://api.fansly.com/api/v1/account/${fanslyUserId}/avatar`
const filePath = getTmpFile('avatar.jpg')
return download({ filePath, limiter, url })
}
const url = {
normalize,
fromUsername
}
const data = {
image
}
const fansly = {
regex,
url,
data,
}
export default fansly

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

View File

@ -0,0 +1,16 @@
// import ColorThief from 'colorthief'
import sharp from 'sharp'
export async function getProminentColor(imageFile) {
const { dominant } = await sharp(imageFile).stats();
const { r, g, b } = dominant;
return rgbToHex(r, g, b)
}
export function rgbToHex(r, g, b) {
return "#" + (1 << 24 | r << 16 | g << 8 | b).toString(16).slice(1);
}

View File

@ -0,0 +1,25 @@
import { getProminentColor, rgbToHex } from './image.js'
import { expect } from 'chai'
import { describe } from 'mocha'
import path from 'node:path'
describe('image', function () {
describe('getProminentColor', function () {
it('should get a {String} hex code color', async function () {
const color = await getProminentColor(path.join(import.meta.dirname, './fixtures/sample.webp'))
expect(color).to.equal('#0878a8')
})
})
describe('rgbToHex', function () {
it('should convert color to hex {String} hexidecimal code', function () {
const mulberry = [255, 87, 51]
const screaminGreen = [77, 255, 106]
const amaranth = [227, 64, 81]
expect(rgbToHex(...mulberry)).to.equal('#ff5733')
expect(rgbToHex(...screaminGreen)).to.equal('#4dff6a')
expect(rgbToHex(...amaranth)).to.equal('#e34051')
})
})
})

View File

@ -18,14 +18,14 @@ async function handleMessage({email, msg}) {
const body = await email.loadMessage(msg.uid) const body = await email.loadMessage(msg.uid)
console.log(' ✏️ checking e-mail') console.log(' ✏️ checking e-mail')
const { isMatch, url, platform, channel, displayName, date, userId } = (await checkEmail(body)) const { isMatch, url, platform, channel, displayName, date, userId, avatar } = (await checkEmail(body))
if (isMatch) { if (isMatch) {
console.log(' ✏️✏️ signalling realtime') console.log(' ✏️✏️ signalling realtime')
await signalRealtime({ url, platform, channel, displayName, date, userId }) await signalRealtime({ url, platform, channel, displayName, date, userId, avatar })
console.log(' ✏️✏️ creating stream entry in db') console.log(' ✏️✏️ creating stream entry in db')
await createStreamInDb({ source: 'email', platform, channel, date, url, userId }) await createStreamInDb({ source: 'email', platform, channel, date, url, userId, avatar })
} }
console.log(' ✏️ archiving e-mail') console.log(' ✏️ archiving e-mail')

59
packages/scout/src/s3.js Normal file
View File

@ -0,0 +1,59 @@
import dotenv from 'dotenv'
dotenv.config({
path: '../../.env'
})
import { S3Client } from "@aws-sdk/client-s3"
import { Upload } from "@aws-sdk/lib-storage"
// import { getSignedUrl } from "@aws-sdk/s3-request-presigner"
import { createId } from '@paralleldrive/cuid2'
import { basename } from 'node:path'
import fs from 'node:fs'
if (!process.env.S3_BUCKET_NAME) throw new Error('S3_BUCKET was undefined in env');
if (!process.env.SCOUT_NITTER_URL) throw new Error('SCOUT_NITTER_URL was undefined in env');
export async function uploadFile(filePath) {
if (!filePath) throw new Error("first argument, 'filePath' is undefined");
const client = new S3Client({
endpoint: 'https://s3.us-west-000.backblazeb2.com',
region:'us-west-000',
credentials:{
accessKeyId: process.env.S3_BUCKET_KEY_ID,
secretAccessKey: process.env.S3_BUCKET_APPLICATION_KEY
}
});
const target = {
Bucket: process.env.S3_BUCKET_NAME,
Key: `${createId()}-${basename(filePath)}`,
Body: fs.createReadStream(filePath)
}
// greets https://stackoverflow.com/a/70159394/1004931
try {
const parallelUploads3 = new Upload({
client: client,
//tags: [...], // optional tags
queueSize: 4, // optional concurrency configuration
leavePartsOnError: false, // optional manually handle dropped parts
params: target,
});
// parallelUploads3.on("httpUploadProgress", (progress) => {
// console.log(progress);
// });
const res = await parallelUploads3.done();
return res
} catch (e) {
console.error(`while uploading a file to s3, we encountered an error`)
throw new Error(e);
}
}

View File

@ -0,0 +1,19 @@
import { describe } from 'mocha'
import { expect } from 'chai';
import { uploadFile } from './s3.js'
import path from 'node:path'
describe('s3', function () {
this.timeout(1000*30)
describe('uploadFile', function () {
it('should upload a file to specified bucket and return the storage data', async function () {
const data = await uploadFile(path.join(import.meta.dirname, './fixtures/sample.webp'))
expect(data).to.have.property('VersionId')
expect(data).to.have.property('ETag')
expect(data).to.have.property('Bucket')
expect(data).to.have.property('Key')
expect(data).to.have.property('Location')
expect(data).to.have.property('$metadata')
})
})
})

View File

@ -4,7 +4,10 @@ import 'dotenv/config'
import { PgPubSub } from 'pg-pubsub' import { PgPubSub } from 'pg-pubsub'
import qs from 'qs' import qs from 'qs'
import { subMinutes, addMinutes } from 'date-fns' import { subMinutes, addMinutes } from 'date-fns'
import slugify from 'slugify' import { fpSlugify, download } from './utils.js'
import { getProminentColor } from './image.js'
import { RateLimiter } from "limiter"
import { getImage } from './vtuber.js'
// alternative js libraries for postgres notify/wait // alternative js libraries for postgres notify/wait
// * https://github.com/imqueue/pg-pubsub // * https://github.com/imqueue/pg-pubsub
@ -15,6 +18,7 @@ if (!process.env.SCOUT_STRAPI_API_KEY) throw new Error('SCOUT_STRAPI_API_KEY is
if (!process.env.STRAPI_URL) throw new Error('STRAPI_URL is missing from env'); if (!process.env.STRAPI_URL) throw new Error('STRAPI_URL is missing from env');
if (!process.env.SCOUT_RECENTS_TOKEN) throw new Error('SCOUT_RECENTS_TOKEN is undefined in env'); if (!process.env.SCOUT_RECENTS_TOKEN) throw new Error('SCOUT_RECENTS_TOKEN is undefined in env');
if (!process.env.POSTGRES_REALTIME_CONNECTION_STRING) throw new Error('POSTGRES_REALTIME_CONNECTION_STRING is undefined in env'); if (!process.env.POSTGRES_REALTIME_CONNECTION_STRING) throw new Error('POSTGRES_REALTIME_CONNECTION_STRING is undefined in env');
if (!process.env.CDN_BUCKET_URL) throw new Error('CDN_BUCKET_URL is undefined in env');
console.log(`process.env.POSTGRES_REALTIME_CONNECTION_STRING=${process.env.POSTGRES_REALTIME_CONNECTION_STRING}`) console.log(`process.env.POSTGRES_REALTIME_CONNECTION_STRING=${process.env.POSTGRES_REALTIME_CONNECTION_STRING}`)
@ -52,9 +56,10 @@ export async function signalRealtime ({ url, platform, channel, displayName, dat
*/ */
export async function createStreamInDb ({ source, platform, channel, date, url, userId }) { export async function createStreamInDb ({ source, platform, channel, date, url, userId }) {
const limiter = new RateLimiter({ tokensPerInterval: 0.3, interval: "second" });
let vtuberId, streamId let vtuberId, streamId
console.log('>># Step 1') console.log('>> # Step 1')
// # Step 1. // # Step 1.
// First we find or create the vtuber // First we find or create the vtuber
// The vtuber may already be in the db, so we look for that record. All we need is the Vtuber ID. // The vtuber may already be in the db, so we look for that record. All we need is the Vtuber ID.
@ -98,6 +103,43 @@ export async function createStreamInDb ({ source, platform, channel, date, url,
if (!vtuberId) { if (!vtuberId) {
console.log('>> vtuberId was not found so we create') console.log('>> vtuberId was not found so we create')
/**
* We are creating a vtuber record.
* We need a few things.
* * image URL
* * themeColor
*
* To get an image, we have to do a few things.
* * [x] download image from platform
* * [x] get themeColor from image
* * [x] upload image to b2
* * [x] get B2 cdn link to image
*
* To get themeColor, we need the image locally where we can then run
*/
// download image from platform
// vtuber.getImage expects a vtuber object, which we don't have yet, so we create a dummy one
const dummyVtuber = {
attributes: {
slug: fpSlugify(channel),
fansly: fansly.url.fromUsername(channel)
}
}
const platformImageUrl = await getImage(limiter, dummyVtuber)
const imageFile = await download({ limiter, url: platformImageUrl })
// get themeColor from image
const themeColor = await getProminentColor(imageFile)
// upload image to b2
const b2FileData = await s3.uploadFile(imageFile)
// get b2 cdn link to image
const imageCdnLink = `https://${process.env.CDN_BUCKET_URL}/${b2FileData.Key}`
const createVtuberRes = await fetch(`${process.env.STRAPI_URL}/api/vtubers`, { const createVtuberRes = await fetch(`${process.env.STRAPI_URL}/api/vtubers`, {
method: 'POST', method: 'POST',
headers: { headers: {
@ -110,10 +152,10 @@ export async function createStreamInDb ({ source, platform, channel, date, url,
fansly: (platform === 'fansly') ? url : null, fansly: (platform === 'fansly') ? url : null,
fanslyId: (platform === 'fansly') ? userId : null, fanslyId: (platform === 'fansly') ? userId : null,
chaturbate: (platform === 'chaturbate') ? url : null, chaturbate: (platform === 'chaturbate') ? url : null,
slug: slugify(channel), slug: fpSlugify(channel),
description1: ' ', description1: ' ',
image: 'https://futureporn-b2.b-cdn.net/200x200.png', image: imageCdnLink,
themeColor: '#dde1ec' themeColor: themeColor || '#dde1ec'
} }
}) })
}) })
@ -168,22 +210,6 @@ export async function createStreamInDb ({ source, platform, channel, date, url,
// For now, the rule is 30 minutes of separation. // For now, the rule is 30 minutes of separation.
// Anything <=30m is interpreted as the same stream. Anything >30m is interpreted as a different stream. // Anything <=30m is interpreted as the same stream. Anything >30m is interpreted as a different stream.
// If the stream is not in the db, we create the stream record // If the stream is not in the db, we create the stream record
// qs.stringify({
// populate: '*',
// filters: {
// isFanslyStream: {
// "$eq": true
// },
// }
// }, {
// encode: false
// })
const dateSinceRange = subMinutes(new Date(date), 30) const dateSinceRange = subMinutes(new Date(date), 30)
const dateUntilRange = addMinutes(new Date(date), 30) const dateUntilRange = addMinutes(new Date(date), 30)
console.log(`Find a stream within + or - 30 mins of the notif date=${new Date(date).toISOString()}. dateSinceRange=${dateSinceRange.toISOString()}, dateUntilRange=${dateUntilRange.toISOString()}`) console.log(`Find a stream within + or - 30 mins of the notif date=${new Date(date).toISOString()}. dateSinceRange=${dateSinceRange.toISOString()}, dateUntilRange=${dateUntilRange.toISOString()}`)
@ -246,7 +272,6 @@ export async function createStreamInDb ({ source, platform, channel, date, url,
if (updateStreamJson?.error) throw new Error(updateStreamJson); if (updateStreamJson?.error) throw new Error(updateStreamJson);
console.log(`>> assuming a successful update to the stream record. response as follows.`) console.log(`>> assuming a successful update to the stream record. response as follows.`)
console.log(JSON.stringify(updateStreamJson, null, 2)) console.log(JSON.stringify(updateStreamJson, null, 2))
} }
if (!streamId) { if (!streamId) {

View File

@ -0,0 +1,66 @@
import * as htmlparser2 from "htmlparser2";
import { load } from 'cheerio'
import { download } from './utils.js';
import pRetry, { AbortError } from 'p-retry';
if (!process.env.SCOUT_NITTER_ACCESS_KEY) throw new Error('SCOUT_NITTER_ACCESS_KEY was undefined in env');
if (!process.env.SCOUT_NITTER_URL) throw new Error('SCOUT_NITTER_URL was undefined in env');
const regex = {
username: new RegExp(/https:\/\/(?:twitter\.com|x\.com)\/([a-zA-Z0-9_]+)/i)
}
const normalize = (url) => {
if (!url) throw new Error('normalized received a null or undefined url.');
return fromUsername(twitter.regex.username.exec(url).at(1))
}
const image = async function image (limiter, twitterUsername) {
if (!limiter) throw new Error('first arg to twitter.data.image must be an instance of node-rate-limiter');
if (!twitterUsername) throw new Error('second arg to twitter.data.image must be a twitterUsername. It was undefined.');
const requestDataFromNitter = async () => {
const url = `${process.env.SCOUT_NITTER_URL}/${twitterUsername}/rss?key=${process.env.SCOUT_NITTER_ACCESS_KEY}`
// console.log(`fetching from url=${url}`)
const response = await fetch(url);
// Abort retrying if the resource doesn't exist
if (response.status === 404) {
throw new AbortError(response.statusText);
}
return response.text();
}
const body = await pRetry(requestDataFromNitter, { retries: 5 });
try {
const dom = htmlparser2.parseDocument(body);
const $ = load(dom, { _useHtmlParser2: true })
const urls = $('url:contains("profile_images")').first()
const downloadedImageFile = await download({ limiter, url: urls.text() })
return downloadedImageFile
} catch (e) {
console.error(`while fetching rss from nitter, the following error was encountered.`)
console.error(e)
}
}
const fromUsername = (username) => `https://x.com/${username}`
const url = {
normalize,
fromUsername
}
const data = {
image
}
const twitter = {
regex,
url,
data,
}
export default twitter

View File

@ -0,0 +1,30 @@
import { expect } from 'chai'
import twitter from './twitter.js'
import { describe } from 'mocha'
import { tmpFileRegex } from './utils.js'
import { RateLimiter } from 'limiter'
describe('twitter', function () {
describe('regex', function () {
describe('username', function () {
it('should get the username of the channel', function () {
expect(twitter.regex.username.exec('https://twitter.com/18Plus').at(1)).to.equal('18Plus')
expect(twitter.regex.username.exec('https://twitter.com/projektmelody').at(1)).to.equal('projektmelody')
expect(twitter.regex.username.exec('https://twitter.com/GoodKittenVR').at(1)).to.equal('GoodKittenVR')
expect(twitter.regex.username.exec('https://x.com/projektmelody').at(1)).to.equal('projektmelody')
expect(twitter.regex.username.exec('https://x.com/18Plus').at(1)).to.equal('18Plus')
expect(twitter.regex.username.exec('https://x.com/GoodKittenVR').at(1)).to.equal('GoodKittenVR')
})
})
})
describe('data', function () {
this.timeout(1000*30)
const limiter = new RateLimiter({ tokensPerInterval: 10, interval: "second" })
describe('image', function () {
it("should download the twitter users's avatar and save it to disk", async function () {
const imgFile = await twitter.data.image(limiter, 'projektmelody')
expect(imgFile).to.match(tmpFileRegex)
})
})
})
})

1
packages/scout/src/ua.js Normal file
View File

@ -0,0 +1 @@
export const ua0 = 'Mozilla/5.0 (X11; Linux x86_64; rv:105.0) Gecko/20100101 Firefox/105.0'

View File

@ -0,0 +1,70 @@
import slugify from 'slugify'
import path, { basename } from 'node:path'
import os from 'node:os'
import fs from 'node:fs'
import { createId } from '@paralleldrive/cuid2'
import { ua0 } from './ua.js'
import { Readable } from 'stream'
import { finished } from 'stream/promises'
import pRetry from 'p-retry'
export function fpSlugify(str) {
return slugify(str, {
lower: true,
strict: true,
locale: 'en',
trim: true
})
}
export function getTmpFile(str) {
return path.join(os.tmpdir(), `${createId()}_${basename(str)}`)
}
/**
*
* @param {Object} limiter [https://github.com/jhurliman/node-rate-limiter](node-rate-limiter) instance
* @param {String} url
* @returns {String} filePath
*
* greetz https://stackoverflow.com/a/74722818/1004931
*/
export async function download({ limiter, url, filePath }) {
if (!limiter) throw new Error(`first arg passed to download() must be a node-rate-limiter instance.`);
if (!url) throw new Error(`second arg passed to download() must be a {string} url`);
const fileBaseName = basename(url)
filePath = filePath || path.join(os.tmpdir(), `${createId()}_${fileBaseName}`)
const stream = fs.createWriteStream(filePath)
await limiter.removeTokens(1);
const requestData = async () => {
const response = await fetch(url, {
headers: {
'user-agent': ua0
}
})
const { body } = response
await finished(Readable.fromWeb(body).pipe(stream))
// Abort retrying if the resource doesn't exist
if (response.status === 404) {
throw new AbortError(response.statusText);
}
return
}
try {
await pRetry(requestData, { retries: 3 })
} catch (e) {
console.error(`utils.download failed to download ${url}`)
console.error(e)
return null
}
return filePath
}
export const tmpFileRegex = /^\/tmp\/.*\.jpg$/

View File

@ -0,0 +1,27 @@
import { fpSlugify, getTmpFile, download } from './utils.js'
import { expect } from 'chai'
import { describe } from 'mocha'
import { RateLimiter } from "limiter"
describe('utils', function () {
describe('fpSlugify', function () {
it('should remove all capitalization and uppercase and spaces and special characters', function () {
expect(fpSlugify('ProjektMelody')).to.equal('projektmelody')
expect(fpSlugify('CJ_Clippy')).to.equal('cjclippy')
})
})
describe('getTmpFile', function () {
it('should give a /tmp/<random>_<basename> path', function () {
expect(getTmpFile('my-cool-image.webp')).to.match(/\/tmp\/.*_my-cool-image\.webp/)
expect(getTmpFile('video.mp4')).to.match(/\/tmp\/.*_video\.mp4/)
})
}),
describe('download', function () {
const limiter = new RateLimiter({ tokensPerInterval: 100, interval: "second" })
it('should get the file', async function () {
const file = await download({ limiter, url: 'https://futureporn-b2.b-cdn.net/sample.webp' })
expect(file).to.match(/\/tmp\/.*sample\.webp$/)
})
})
})

View File

@ -0,0 +1,47 @@
import dotenv from 'dotenv'
dotenv.config({
path: '../../.env'
})
import twitter from './twitter.js'
import fansly from './fansly.js'
/**
* Acquire a vtuber image from the www
*
* Sources preference
* 1. Twitter
* 2. Fansly
*
* Our task is to download an avatar image of the vtuber.
* A slug is good for pulling a record from the database. From there, we can see any social medias such as Twitter or Fansly.
* Twitter is preferred.
*
* We depend on one of these social media URLs. If there is neither Twitter or fansly listed, we throw an error.
*
* @param {Object} limiter -- instance of node-rate-limiter
* @param {Object} vtuber -- vtuber instance from strapi
* @returns {String} filePath -- path on disk where the image was saved
*/
export async function getImage(limiter, vtuber) {
if (!limiter) throw new Error('first arg must be node-rate-limiter instace');
if (!vtuber) throw new Error('second arg must be vtuber instance');
await limiter.removeTokens(1);
const { twitter: twitterUrl, fanslyId: fanslyId } = vtuber.attributes
const twitterUsername = twitterUrl && twitter.regex.username.exec(twitterUrl).at(1)
let img;
if (twitterUrl) {
img = await twitter.data.image(limiter, twitterUsername)
} else if (fanslyId) {
img = await fansly.data.image(limiter, fanslyId)
} else {
const msg = 'while attempting to get vtuber image, there was neither twitter nor fansly listed. One of these must exist for us to download an image.'
console.error(msg)
throw new Error(msg)
}
return img
}

View File

@ -0,0 +1,38 @@
import { expect } from 'chai'
import { describe } from 'mocha'
import { RateLimiter } from 'limiter'
import { getImage } from './vtuber.js'
import { tmpFileRegex } from './utils.js'
const vtuberFixture0 = {
id: 0,
attributes: {
slug: 'projektmelody',
twitter: 'https://x.com/projektmelody',
fansly: 'https://fansly.com/projektmelody'
}
}
const vtuberFixture1 = {
id: 0,
attributes: {
slug: 'projektmelody',
twitter: undefined,
fanslyId: '284824898138812416'
}
}
describe('vtuber', function () {
this.timeout(1000*60)
describe('getImage', function () {
const limiter = new RateLimiter({ tokensPerInterval: 1, interval: "second" })
it('should download an avatar image from twitter', async function () {
const file = await getImage(limiter, vtuberFixture0)
expect(file).to.match(tmpFileRegex)
})
it('should download an avatar image from fansly', async function () {
const file = await getImage(limiter, vtuberFixture1)
expect(file).to.match(tmpFileRegex)
})
})
})

View File

@ -20,6 +20,14 @@ module.exports = {
}, },
async afterUpdate(event) { async afterUpdate(event) {
/**
* NOTE
*
* These hooks do not fire in response to API calls. They only fire in response to UI saves.
*/
console.log(`>>>>>>>>>>>>>> STREAM is afterUpdate !!!!!!!!!!!!`); console.log(`>>>>>>>>>>>>>> STREAM is afterUpdate !!!!!!!!!!!!`);
const { data, where, select, populate } = event.params; const { data, where, select, populate } = event.params;
console.log(data); console.log(data);

View File

@ -39,6 +39,7 @@ kubectl --namespace futureporn create secret generic scout \
--from-literal=imapUsername=${SCOUT_IMAP_USERNAME} \ --from-literal=imapUsername=${SCOUT_IMAP_USERNAME} \
--from-literal=imapPassword=${SCOUT_IMAP_PASSWORD} \ --from-literal=imapPassword=${SCOUT_IMAP_PASSWORD} \
--from-literal=imapAccessToken=${SCOUT_IMAP_ACCESS_TOKEN} \ --from-literal=imapAccessToken=${SCOUT_IMAP_ACCESS_TOKEN} \
--from-literal=nitterAccessKey=${SCOUT_NITTER_ACCESS_KEY}
kubectl --namespace futureporn delete secret link2cid --ignore-not-found kubectl --namespace futureporn delete secret link2cid --ignore-not-found
kubectl --namespace futureporn create secret generic link2cid \ kubectl --namespace futureporn create secret generic link2cid \
@ -85,3 +86,4 @@ kubectl --namespace futureporn create secret generic strapi \
kubectl --namespace futureporn delete secret realtime --ignore-not-found kubectl --namespace futureporn delete secret realtime --ignore-not-found
kubectl --namespace futureporn create secret generic realtime \ kubectl --namespace futureporn create secret generic realtime \
--from-literal=postgresRealtimeConnectionString=${POSTGRES_REALTIME_CONNECTION_STRING} --from-literal=postgresRealtimeConnectionString=${POSTGRES_REALTIME_CONNECTION_STRING}

View File

@ -24,7 +24,23 @@ dotenv(fn='.env')
# args=['--default_config_file=%s' % os.getenv('TILT_NGROK_DEFAULT_CONFIG_FILE')] # args=['--default_config_file=%s' % os.getenv('TILT_NGROK_DEFAULT_CONFIG_FILE')]
# load('ext://helm_remote', 'helm_remote') load('ext://helm_remote', 'helm_remote')
# helm_remote(
# 'redis',
# repo_name='redis',
# repo_url='https://charts.bitnami.com/bitnami',
# namespace='futureporn',
# version='19.5.0',
# values=['./charts/nitter/redis.values.yaml']
# )
# helm_remote(
# 'nitter',
# repo_name='truecharts',
# repo_url='https://charts.truecharts.org',
# namespace='futureporn',
# version='7.1.4',
# )
# helm_remote( # helm_remote(
# 'frp-operator', # 'frp-operator',
# repo_name='frp-operator', # repo_name='frp-operator',
@ -45,6 +61,11 @@ dotenv(fn='.env')
# ) # )
# k8s_yaml(helm(
# './charts/nitter',
# values=['./charts/nitter/values.yaml'],
# ))
k8s_yaml(helm( k8s_yaml(helm(
'./charts/fp', './charts/fp',
values=['./charts/fp/values-dev.yaml'], values=['./charts/fp/values-dev.yaml'],
@ -187,8 +208,12 @@ k8s_resource(
port_forwards=['5432'] port_forwards=['5432']
) )
k8s_resource( # k8s_resource(
workload='pgadmin', # workload='pgadmin',
port_forwards=['5050'] # port_forwards=['5050']
) # )
# k8s_resource(
# workload='nitter',
# port_forwards=['6060:10606'],
# )