Compare commits
No commits in common. "a5433e7bd5aa76fcc5c7cf861dccaac9cd03ac1d" and "35a15fcf634bbf73119eabacab55936fb253ddea" have entirely different histories.
a5433e7bd5
...
35a15fcf63
@ -1,6 +1,4 @@
|
|||||||
# futureporn codename "our"
|
# futureporn
|
||||||
|
|
||||||
**DEPRECATED. Superceded by services/pocketbase and services/worker**
|
|
||||||
|
|
||||||
https://future.porn
|
https://future.porn
|
||||||
|
|
||||||
@ -47,7 +45,7 @@ Start node app in dev mode. Env vars must be available to the app-- We're using
|
|||||||
* [x] VTubers
|
* [x] VTubers
|
||||||
* [ ] Tags
|
* [ ] Tags
|
||||||
* [ ] Toys
|
* [ ] Toys
|
||||||
* [ ] Monetized affiliate links (https://www.viglink.com maybe?)
|
* [ ] Monetized affiliate links
|
||||||
|
|
||||||
## Tiers & Privs
|
## Tiers & Privs
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "futureporn",
|
"name": "futureporn",
|
||||||
"version": "4.0.0",
|
"version": "3.5.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"description": "Dedication to the preservation of lewdtuber history",
|
"description": "Dedication to the preservation of lewdtuber history",
|
||||||
"license": "Unlicense",
|
"license": "Unlicense",
|
||||||
|
|||||||
@ -98,7 +98,7 @@ onFileDownloadRequest((event) => {
|
|||||||
// Then serve a 302 redirect instead of serving the file proxied thru PB
|
// Then serve a 302 redirect instead of serving the file proxied thru PB
|
||||||
|
|
||||||
const path = event.servedPath;
|
const path = event.servedPath;
|
||||||
const expires = Math.round(Date.now() / 1000) + 7 * 24 * 3600; // 7 days
|
const expires = Math.round(Date.now() / 1000) + 3600;
|
||||||
const signedUrl = signUrlCool(securityKey, baseUrl, path, rawQuery, expires);
|
const signedUrl = signUrlCool(securityKey, baseUrl, path, rawQuery, expires);
|
||||||
// console.log(`rawQUery`, rawQuery, 'path', path);
|
// console.log(`rawQUery`, rawQuery, 'path', path);
|
||||||
// console.log(`signedUrl=${signedUrl}`);
|
// console.log(`signedUrl=${signedUrl}`);
|
||||||
|
|||||||
@ -83,8 +83,8 @@
|
|||||||
<p>To watch a vod, please log in.</p>
|
<p>To watch a vod, please log in.</p>
|
||||||
</div>
|
</div>
|
||||||
<% } %>
|
<% } %>
|
||||||
<div class="notification is-success">
|
<div class="notification is-info">
|
||||||
<p>Torrents are in the process of being added to the site.</p>
|
<p>Torrent downloads are coming soon.</p>
|
||||||
</div>
|
</div>
|
||||||
<div class="notification">
|
<div class="notification">
|
||||||
<p>Futureporn is no longer using IPFS. <a target="_blank" href="https://www.patreon.com/posts/143616210">Read more</a></p>
|
<p>Futureporn is no longer using IPFS. <a target="_blank" href="https://www.patreon.com/posts/143616210">Read more</a></p>
|
||||||
|
|||||||
@ -41,13 +41,11 @@
|
|||||||
<!-- Player toggle buttons -->
|
<!-- Player toggle buttons -->
|
||||||
<nav class="level mt-5">
|
<nav class="level mt-5">
|
||||||
<div class="level-left">
|
<div class="level-left">
|
||||||
<% if (data?.vod?.get('muxAssetId')) { %>
|
|
||||||
<% if (data?.user?.get('patron')) { %>
|
<% if (data?.user?.get('patron')) { %>
|
||||||
<button class="button is-success" data-on-click="$selected = 'cdn1'">CDN1 player</button>
|
<button class="button is-success" data-on-click="$selected = 'cdn1'">CDN1 player</button>
|
||||||
<% } else { %>
|
<% } else { %>
|
||||||
<button disabled class="button is-danger">CDN1 player (patrons only)</button>
|
<button disabled class="button is-danger">CDN1 player (patrons only)</button>
|
||||||
<% } %>
|
<% } %>
|
||||||
<% } %>
|
|
||||||
<button class="button is-success" data-on-click="$selected = 'cdn2'">CDN2 player</button>
|
<button class="button is-success" data-on-click="$selected = 'cdn2'">CDN2 player</button>
|
||||||
</div>
|
</div>
|
||||||
</nav>
|
</nav>
|
||||||
@ -79,10 +77,6 @@
|
|||||||
</p>
|
</p>
|
||||||
<% } %>
|
<% } %>
|
||||||
|
|
||||||
<% if (data.vod?.get('torrent')) { %>
|
|
||||||
<p><b id="torrent">Torrent:</b> <a target="_blank" href="<%= data.vod?.get('torrent')%>">download</a></p>
|
|
||||||
<% } %>
|
|
||||||
|
|
||||||
<% if (data.vod?.get('magnetLink')) { %>
|
<% if (data.vod?.get('magnetLink')) { %>
|
||||||
<p><b id="magnet-link">Magnet Link:</b> <a target="_blank" href="<%= data.vod?.get('magnetLink')%>"><span class="icon"><svg xmlns="http://www.w3.org/2000/svg" width="32" height="32" viewBox="0 0 32 32">
|
<p><b id="magnet-link">Magnet Link:</b> <a target="_blank" href="<%= data.vod?.get('magnetLink')%>"><span class="icon"><svg xmlns="http://www.w3.org/2000/svg" width="32" height="32" viewBox="0 0 32 32">
|
||||||
<g fill="none">
|
<g fill="none">
|
||||||
|
|||||||
@ -6,8 +6,6 @@
|
|||||||
<th>Stream Date</th>
|
<th>Stream Date</th>
|
||||||
<th>VTuber</th>
|
<th>VTuber</th>
|
||||||
<th>Thumbnail</th>
|
<th>Thumbnail</th>
|
||||||
<th>Torrent</th>
|
|
||||||
<th>Magnet Link</th>
|
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
@ -39,30 +37,6 @@
|
|||||||
<span>No thumbnail</span>
|
<span>No thumbnail</span>
|
||||||
<% } %>
|
<% } %>
|
||||||
</td>
|
</td>
|
||||||
<td>
|
|
||||||
<% if (vod?.torrent) { %>
|
|
||||||
|
|
||||||
<!-- /api/files/collectionIdOrName/recordId/filename -->
|
|
||||||
<a target="_blank" href="/api/files/vods/<%= vod?.id %>/<%= vod?.torrent %>">
|
|
||||||
<span class="icon">
|
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24">
|
|
||||||
<path fill="currentColor" d="M5 20h14v-2H5zM19 9h-4V3H9v6H5l7 7z" />
|
|
||||||
</svg>
|
|
||||||
</span></a>
|
|
||||||
<% } %>
|
|
||||||
</td>
|
|
||||||
<td>
|
|
||||||
<% if (vod?.magnetLink) { %>
|
|
||||||
<a target="_blank" href="<%= vod?.magnetLink %>">
|
|
||||||
<span class="icon"><svg xmlns="http://www.w3.org/2000/svg" width="32" height="32" viewBox="0 0 32 32">
|
|
||||||
<g fill="none">
|
|
||||||
<path fill="#d3d3d3" d="M11 23v6.06c0 .52-.42.94-.94.94H3.94c-.52 0-.94-.42-.94-.94V23l4.028-2.152zm18 0v6.06c0 .52-.42.94-.94.94h-6.12c-.52 0-.94-.42-.94-.94V23l3.99-2.152z" />
|
|
||||||
<path fill="#f8312f" d="M11 23v-7.94c0-2.75 2.2-5.04 4.95-5.06c2.78-.03 5.05 2.23 5.05 5v8h8v-8c0-7.18-5.82-13-13-13S3 7.82 3 15v8z" />
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
||||||
</span></a>
|
|
||||||
<% } %>
|
|
||||||
</td>
|
|
||||||
</tr>
|
</tr>
|
||||||
<% } %>
|
<% } %>
|
||||||
</tbody>
|
</tbody>
|
||||||
|
|||||||
@ -1,22 +1,18 @@
|
|||||||
<%#
|
<%#
|
||||||
index.ejs — RSS Feed Generator (RSS 2.0)
|
index.ejs — RSS Feed Generator (RSS 2.0)
|
||||||
Expects: data.vods = [{ id, title, notes, thumbnail, streamDate, magnetLink }]
|
Expects: data.vods = [{ id, title, notes, thumbnail, streamDate }]
|
||||||
When in doubt, copy https://nyaa.si/?page=rss
|
|
||||||
%>
|
%>
|
||||||
|
|
||||||
<%
|
<%
|
||||||
response.header("Content-Type", "application/rss+xml");
|
response.header("Content-Type", "application/rss+xml")
|
||||||
|
|
||||||
|
// Build RSS XML as a string
|
||||||
let rss = `<?xml version="1.0" encoding="utf-8"?>
|
let rss = `<?xml version="1.0" encoding="utf-8"?>
|
||||||
<rss version="2.0"
|
<rss version="2.0" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:content="http://purl.org/rss/1.0/modules/content/">
|
||||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
|
||||||
xmlns:content="http://purl.org/rss/1.0/modules/content/"
|
|
||||||
xmlns:torrent="http://xmlns.ezrss.it/0.1/">
|
|
||||||
|
|
||||||
<channel>
|
<channel>
|
||||||
<title>Futureporn.net VODs</title>
|
<title>Futureporn.net VODs</title>
|
||||||
<link>https://futureporn.net</link>
|
<link>https://futureporn.net</link>
|
||||||
<description>Dedication to the preservation of lewdtuber history</description>
|
<description>Dedication to the preservaton of lewdtuber history</description>
|
||||||
<lastBuildDate>${new Date().toUTCString()}</lastBuildDate>
|
<lastBuildDate>${new Date().toUTCString()}</lastBuildDate>
|
||||||
<docs>https://validator.w3.org/feed/docs/rss2.html</docs>
|
<docs>https://validator.w3.org/feed/docs/rss2.html</docs>
|
||||||
<generator>pocketpages</generator>
|
<generator>pocketpages</generator>
|
||||||
@ -28,40 +24,24 @@ let rss = `<?xml version="1.0" encoding="utf-8"?>
|
|||||||
</image>`;
|
</image>`;
|
||||||
|
|
||||||
for (const vod of data.vods) {
|
for (const vod of data.vods) {
|
||||||
const vodId = vod.get('id');
|
const url = `${env('ORIGIN')}/vods/${vod.id}`;
|
||||||
const title =
|
|
||||||
(vod?.get('expand')?.vtubers?.map(v => v.get('displayName')).join(', '))
|
|
||||||
|| vod.get('streamDate');
|
|
||||||
|
|
||||||
const vodUrl = `${env('ORIGIN')}/vods/${vodId}`;
|
|
||||||
const pubDate = new Date(vod.get('streamDate')).toUTCString();
|
|
||||||
const notes = vod.get('notes');
|
|
||||||
const thumbnail = vod.get('thumbnail');
|
|
||||||
const magnetLink = vod.get('magnetLink');
|
|
||||||
|
|
||||||
// @TODO the <link> must contain a .torrent, NOT a magnetLink.
|
|
||||||
rss += `
|
rss += `
|
||||||
<item>
|
<item>
|
||||||
<title><![CDATA[${title}]]></title>
|
|
||||||
<comments>${vodUrl}</comments>
|
|
||||||
<link>${vodUrl}</link>
|
|
||||||
<guid isPermaLink="true">${vodUrl}</guid>
|
|
||||||
<pubDate>${pubDate}</pubDate>`;
|
|
||||||
|
|
||||||
// Description
|
<title><![CDATA[${
|
||||||
if (notes) {
|
((vod?.get('expand')?.vtubers?.map(vt => vt.get('displayName')) || []).join(', ')
|
||||||
|
|| vod.get('streamDate'))
|
||||||
|
}]]></title>
|
||||||
|
<link>${url}</link>
|
||||||
|
<guid>${url}</guid>
|
||||||
|
<pubDate>${new Date(vod.get('streamDate')).toUTCString()}</pubDate>`;
|
||||||
|
if (vod.get('notes')) {
|
||||||
rss += `
|
rss += `
|
||||||
<description><![CDATA[<a href="${vodUrl}">${vodUrl}</a> ${notes}]]></description>`;
|
<description><![CDATA[${vod.get('notes')}]]></description>`;
|
||||||
}
|
}
|
||||||
|
if (vod.get('thumbnail')) {
|
||||||
// Thumbnail (custom tag — allowed)
|
rss += `<image_link>${env('ORIGIN')}/api/files/vods/${vod.get('id')}/${vod.get('thumbnail')}</image_link>`
|
||||||
if (thumbnail) {
|
|
||||||
rss += `
|
|
||||||
<image_link>${env('ORIGIN')}/api/files/vods/${vodId}/${thumbnail}</image_link>`;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
rss += `
|
rss += `
|
||||||
</item>`;
|
</item>`;
|
||||||
}
|
}
|
||||||
@ -70,5 +50,6 @@ rss += `
|
|||||||
</channel>
|
</channel>
|
||||||
</rss>`;
|
</rss>`;
|
||||||
|
|
||||||
|
// Send the RSS
|
||||||
response.html(200, rss);
|
response.html(200, rss);
|
||||||
%>
|
%>
|
||||||
@ -62,5 +62,12 @@ function signUrl(url, securityKey, expirationTime = 103600, userIp, isDirectory
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const securityKey = 'd5814175-cc56-4098-ae63-1096301fb3c1';
|
||||||
|
const sampleUrl = 'https://fppbdev.b-cdn.net/pbc_3872109612/z0bpy5cwxi1uksv/g4ot5_omb_qaei_o7_y_tuzdlcn1se.jpeg';
|
||||||
|
// const expires = Math.round(Date.now() / 1000) + 3600;
|
||||||
|
const expires = 3524904301;
|
||||||
|
const signedUrl = signUrl(sampleUrl, securityKey, expires, null, false, '');
|
||||||
|
|
||||||
|
console.log(`signedUrl=${signedUrl}`);
|
||||||
|
|
||||||
module.exports = { signUrl };
|
module.exports = { signUrl };
|
||||||
@ -1,5 +1,3 @@
|
|||||||
import envPaths from 'env-paths';
|
|
||||||
const paths = envPaths('futureporn', { suffix: '' });
|
|
||||||
|
|
||||||
const env = (() => {
|
const env = (() => {
|
||||||
if (!process.env.POCKETBASE_URL) throw new Error('POCKETBASE_URL missing in env');
|
if (!process.env.POCKETBASE_URL) throw new Error('POCKETBASE_URL missing in env');
|
||||||
@ -29,6 +27,7 @@ const env = (() => {
|
|||||||
if (!process.env.FANSLY_PASSWORD) throw new Error('FANSLY_PASSWORD missing in env');
|
if (!process.env.FANSLY_PASSWORD) throw new Error('FANSLY_PASSWORD missing in env');
|
||||||
if (!process.env.APIFY_TOKEN) throw new Error('APIFY_TOKEN missing in env');
|
if (!process.env.APIFY_TOKEN) throw new Error('APIFY_TOKEN missing in env');
|
||||||
if (!process.env.NODE_ENV) throw new Error('APIFY_TOKEN missing in env');
|
if (!process.env.NODE_ENV) throw new Error('APIFY_TOKEN missing in env');
|
||||||
|
if (!process.env.CACHE_ROOT) throw new Error('CACHE_ROOT missing in env');
|
||||||
if (!process.env.SEEDBOX_SFTP_HOST) throw new Error('SEEDBOX_SFTP_HOST missing in env');
|
if (!process.env.SEEDBOX_SFTP_HOST) throw new Error('SEEDBOX_SFTP_HOST missing in env');
|
||||||
if (!process.env.SEEDBOX_SFTP_PORT) throw new Error('SEEDBOX_SFTP_PORT missing in env');
|
if (!process.env.SEEDBOX_SFTP_PORT) throw new Error('SEEDBOX_SFTP_PORT missing in env');
|
||||||
if (!process.env.SEEDBOX_SFTP_USERNAME) throw new Error('SEEDBOX_SFTP_USERNAME missing in env');
|
if (!process.env.SEEDBOX_SFTP_USERNAME) throw new Error('SEEDBOX_SFTP_USERNAME missing in env');
|
||||||
@ -38,10 +37,6 @@ const env = (() => {
|
|||||||
if (!process.env.QBT_PASSWORD) throw new Error('QBT_PASSWORD missing in env');
|
if (!process.env.QBT_PASSWORD) throw new Error('QBT_PASSWORD missing in env');
|
||||||
if (!process.env.QBT_USERNAME) throw new Error('QBT_USERNAME missing in env');
|
if (!process.env.QBT_USERNAME) throw new Error('QBT_USERNAME missing in env');
|
||||||
if (!process.env.WORKER_WORKERS) throw new Error('WORKER_WORKERS missing in env');
|
if (!process.env.WORKER_WORKERS) throw new Error('WORKER_WORKERS missing in env');
|
||||||
if (!process.env.BUNNY_ZONE_URL) throw new Error('BUNNY_ZONE_URL missing in env');
|
|
||||||
if (!process.env.BUNNY_TOKEN_KEY) throw new Error('BUNNY_TOKEN_KEY missing in env');
|
|
||||||
|
|
||||||
const CACHE_ROOT = process.env?.CACHE_ROOT || paths.cache;
|
|
||||||
|
|
||||||
const {
|
const {
|
||||||
PORT,
|
PORT,
|
||||||
@ -70,6 +65,7 @@ const env = (() => {
|
|||||||
FANSLY_PASSWORD,
|
FANSLY_PASSWORD,
|
||||||
APIFY_TOKEN,
|
APIFY_TOKEN,
|
||||||
NODE_ENV,
|
NODE_ENV,
|
||||||
|
CACHE_ROOT,
|
||||||
SEEDBOX_SFTP_HOST,
|
SEEDBOX_SFTP_HOST,
|
||||||
SEEDBOX_SFTP_PORT,
|
SEEDBOX_SFTP_PORT,
|
||||||
SEEDBOX_SFTP_USERNAME,
|
SEEDBOX_SFTP_USERNAME,
|
||||||
@ -80,11 +76,8 @@ const env = (() => {
|
|||||||
QBT_PASSWORD,
|
QBT_PASSWORD,
|
||||||
QBT_PORT,
|
QBT_PORT,
|
||||||
WORKER_WORKERS,
|
WORKER_WORKERS,
|
||||||
BUNNY_ZONE_URL,
|
|
||||||
BUNNY_TOKEN_KEY,
|
|
||||||
} = process.env
|
} = process.env
|
||||||
return {
|
return {
|
||||||
CACHE_ROOT,
|
|
||||||
PORT,
|
PORT,
|
||||||
WORKER_PORT,
|
WORKER_PORT,
|
||||||
POCKETBASE_URL,
|
POCKETBASE_URL,
|
||||||
@ -111,6 +104,7 @@ const env = (() => {
|
|||||||
FANSLY_USERNAME,
|
FANSLY_USERNAME,
|
||||||
APIFY_TOKEN,
|
APIFY_TOKEN,
|
||||||
NODE_ENV,
|
NODE_ENV,
|
||||||
|
CACHE_ROOT,
|
||||||
SEEDBOX_SFTP_HOST,
|
SEEDBOX_SFTP_HOST,
|
||||||
SEEDBOX_SFTP_PORT,
|
SEEDBOX_SFTP_PORT,
|
||||||
SEEDBOX_SFTP_USERNAME,
|
SEEDBOX_SFTP_USERNAME,
|
||||||
@ -121,8 +115,6 @@ const env = (() => {
|
|||||||
QBT_PASSWORD,
|
QBT_PASSWORD,
|
||||||
QBT_PORT,
|
QBT_PORT,
|
||||||
WORKER_WORKERS,
|
WORKER_WORKERS,
|
||||||
BUNNY_ZONE_URL,
|
|
||||||
BUNNY_TOKEN_KEY,
|
|
||||||
}
|
}
|
||||||
})()
|
})()
|
||||||
|
|
||||||
|
|||||||
@ -1,27 +1,15 @@
|
|||||||
# worker
|
# worker
|
||||||
|
|
||||||
To install node dependencies:
|
To install dependencies:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
npm install
|
bun install
|
||||||
```
|
```
|
||||||
|
|
||||||
To run:
|
To run:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
npm run start
|
bun run index.ts
|
||||||
```
|
```
|
||||||
|
|
||||||
|
This project was created using `bun init` in bun v1.3.1. [Bun](https://bun.com) is a fast all-in-one JavaScript runtime.
|
||||||
|
|
||||||
## External Dependencies
|
|
||||||
|
|
||||||
Worker needs these packages installed in order to be successful. Please make sure these are installed in the environment.
|
|
||||||
|
|
||||||
* vcsi
|
|
||||||
* whisper-cli
|
|
||||||
* yolo
|
|
||||||
* ffmpeg
|
|
||||||
* qbittorrent-nox
|
|
||||||
* b2cli
|
|
||||||
* valkey
|
|
||||||
|
|||||||
753
services/worker/package-lock.json
generated
753
services/worker/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -6,7 +6,7 @@
|
|||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"typescript": "^5"
|
"typescript": "^5"
|
||||||
},
|
},
|
||||||
"version": "0.2.0",
|
"version": "0.0.1",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@bull-board/express": "^6.14.0",
|
"@bull-board/express": "^6.14.0",
|
||||||
"@mux/mux-node": "^12.8.0",
|
"@mux/mux-node": "^12.8.0",
|
||||||
@ -15,7 +15,7 @@
|
|||||||
"@types/node": "^24.10.1",
|
"@types/node": "^24.10.1",
|
||||||
"@types/semver": "^7.7.1",
|
"@types/semver": "^7.7.1",
|
||||||
"@types/ssh2": "^1.15.5",
|
"@types/ssh2": "^1.15.5",
|
||||||
"apify-client": "^2.20.0",
|
"apify-client": "^2.19.0",
|
||||||
"bullmq": "^5.63.0",
|
"bullmq": "^5.63.0",
|
||||||
"date-fns": "^4.1.0",
|
"date-fns": "^4.1.0",
|
||||||
"fs-extra": "^11.3.2",
|
"fs-extra": "^11.3.2",
|
||||||
@ -24,14 +24,14 @@
|
|||||||
"nanoid": "^5.1.6",
|
"nanoid": "^5.1.6",
|
||||||
"onnxruntime-web": "^1.23.2",
|
"onnxruntime-web": "^1.23.2",
|
||||||
"pocketbase": "^0.26.3",
|
"pocketbase": "^0.26.3",
|
||||||
"puppeteer": "^24.31.0",
|
"puppeteer": "^24.30.0",
|
||||||
"puppeteer-extra": "^3.3.6",
|
"puppeteer-extra": "^3.3.6",
|
||||||
"puppeteer-extra-plugin-stealth": "^2.11.2",
|
"puppeteer-extra-plugin-stealth": "^2.11.2",
|
||||||
"semver": "^7.7.3",
|
"semver": "^7.7.3",
|
||||||
"sharp": "^0.34.5",
|
"sharp": "^0.34.5",
|
||||||
"slugify": "^1.6.6",
|
"slugify": "^1.6.6",
|
||||||
"ssh2": "^1.17.0",
|
"ssh2": "^1.17.0",
|
||||||
"which": "^6.0.0"
|
"which": "^5.0.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"tsx": "^4.20.6",
|
"tsx": "^4.20.6",
|
||||||
|
|||||||
@ -1 +0,0 @@
|
|||||||
*.mp4
|
|
||||||
@ -1 +0,0 @@
|
|||||||
*.torrent
|
|
||||||
@ -1,6 +1,7 @@
|
|||||||
import { createBullBoard } from '@bull-board/api';
|
import { createBullBoard } from '@bull-board/api';
|
||||||
import { BullMQAdapter } from '@bull-board/api/bullMQAdapter';
|
import { BullMQAdapter } from '@bull-board/api/bullMQAdapter';
|
||||||
import { ExpressAdapter } from '@bull-board/express';
|
import { ExpressAdapter } from '@bull-board/express';
|
||||||
|
import { type JobsOptions } from 'bullmq';
|
||||||
import express, { type Request, type Response } from 'express';
|
import express, { type Request, type Response } from 'express';
|
||||||
import { generalQueue } from './queues/generalQueue.ts';
|
import { generalQueue } from './queues/generalQueue.ts';
|
||||||
import { gpuQueue } from './queues/gpuQueue.ts';
|
import { gpuQueue } from './queues/gpuQueue.ts';
|
||||||
@ -9,8 +10,6 @@ import env from '../.config/env.ts';
|
|||||||
import { version } from '../package.json';
|
import { version } from '../package.json';
|
||||||
import { downloadQueue } from './queues/downloadQueue.ts';
|
import { downloadQueue } from './queues/downloadQueue.ts';
|
||||||
import { cacheQueue } from './queues/cacheQueue.ts';
|
import { cacheQueue } from './queues/cacheQueue.ts';
|
||||||
import { muxQueue } from './queues/muxQueue.ts';
|
|
||||||
import { b2Queue } from './queues/b2Queue.ts';
|
|
||||||
|
|
||||||
const run = async () => {
|
const run = async () => {
|
||||||
|
|
||||||
@ -29,8 +28,6 @@ const run = async () => {
|
|||||||
new BullMQAdapter(gpuQueue),
|
new BullMQAdapter(gpuQueue),
|
||||||
new BullMQAdapter(downloadQueue),
|
new BullMQAdapter(downloadQueue),
|
||||||
new BullMQAdapter(cacheQueue),
|
new BullMQAdapter(cacheQueue),
|
||||||
new BullMQAdapter(muxQueue),
|
|
||||||
new BullMQAdapter(b2Queue),
|
|
||||||
],
|
],
|
||||||
serverAdapter,
|
serverAdapter,
|
||||||
});
|
});
|
||||||
|
|||||||
@ -1,9 +1,9 @@
|
|||||||
import { Job } from "bullmq";
|
import { Job } from "bullmq";
|
||||||
import fs from "node:fs/promises";
|
import fs from "node:fs/promises";
|
||||||
import { join } from "node:path";
|
import path from "node:path";
|
||||||
import env from "../../.config/env";
|
import env from "../../.config/env";
|
||||||
|
|
||||||
const retainmentDayCount = 2;
|
const retainmentDayCount = 7;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* cacheCleanup
|
* cacheCleanup
|
||||||
@ -11,7 +11,7 @@ const retainmentDayCount = 2;
|
|||||||
* Deletes files in the cache directory that are older than retainmentDayCount days
|
* Deletes files in the cache directory that are older than retainmentDayCount days
|
||||||
*/
|
*/
|
||||||
export default async function cacheCleanup(job: Job) {
|
export default async function cacheCleanup(job: Job) {
|
||||||
const cacheDir = join(env.CACHE_ROOT, 'worker');
|
const cacheDir = env.CACHE_ROOT;
|
||||||
let cleanedCount = 0;
|
let cleanedCount = 0;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@ -22,7 +22,7 @@ export default async function cacheCleanup(job: Job) {
|
|||||||
const retainMs = retainmentDayCount * 24 * 60 * 60 * 1000; // days → ms
|
const retainMs = retainmentDayCount * 24 * 60 * 60 * 1000; // days → ms
|
||||||
|
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
const filePath = join(cacheDir, file);
|
const filePath = path.join(cacheDir, file);
|
||||||
try {
|
try {
|
||||||
const stat = await fs.stat(filePath);
|
const stat = await fs.stat(filePath);
|
||||||
// only delete files older than retainment
|
// only delete files older than retainment
|
||||||
|
|||||||
@ -18,7 +18,7 @@ interface FileInfo {
|
|||||||
fileId: string;
|
fileId: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
const cacheRoot = join(env?.CACHE_ROOT, 'worker');
|
const cacheRoot = env.CACHE_ROOT;
|
||||||
|
|
||||||
function assertPayload(payload: any): asserts payload is Payload {
|
function assertPayload(payload: any): asserts payload is Payload {
|
||||||
if (typeof payload !== "object" || !payload) throw new Error("invalid payload-- was not an object.");
|
if (typeof payload !== "object" || !payload) throw new Error("invalid payload-- was not an object.");
|
||||||
@ -53,13 +53,13 @@ export async function getB2FileInfo(job: Job, s3Key: string): Promise<FileInfo>
|
|||||||
const args = ["file", "info", `b2://${env.AWS_BUCKET}/${s3Key}`];
|
const args = ["file", "info", `b2://${env.AWS_BUCKET}/${s3Key}`];
|
||||||
|
|
||||||
let stdout: string;
|
let stdout: string;
|
||||||
await job.log(`Running ${cmd} ${args.join(' ')}`);
|
await job.log(`Running ${cmd}, ${args.join(' ')}`);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const result = await spawn(cmd, args);
|
const result = await spawn(cmd, args);
|
||||||
stdout = result.stdout;
|
stdout = result.stdout;
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
throw new Error(`Failed to run 'b2 file info': stderr:${err.stderr} message:${err.message}`);
|
throw new Error(`Failed to run 'b2 file info': ${err.stderr || err.message}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
let data: any;
|
let data: any;
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copy futureporn.net s3 asset to future.porn s3 bucket.
|
||||||
|
// ex: https://futureporn-b2.b-cdn.net/(...) -> https://fp-usc.b-cdn.net/(...)
|
||||||
|
|
||||||
import logger from "../utils/logger";
|
import logger from "../utils/logger";
|
||||||
import { Task } from "graphile-worker";
|
import { Task } from "graphile-worker";
|
||||||
import { PrismaClient } from "../../generated/prisma";
|
import { PrismaClient } from "../../generated/prisma";
|
||||||
@ -78,14 +81,8 @@ async function copyFromBucketToBucket(spawn: typeof NanoSpawn, v1Url: string, v2
|
|||||||
return v2Url;
|
return v2Url;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
// example v1 https://futureporn-b2.b-cdn.net/projektmelody-chaturbate-2023-01-01.mp4
|
||||||
* Copy futureporn.net s3 asset to future.porn s3 bucket.
|
// example v2 https://fp-usc.b-cdn.net/projektmelody-chaturbate-2023-01-01.mp4
|
||||||
* example: https://futureporn-b2.b-cdn.net/(...) -> https://fppbpro.b-cdn.net/(...)
|
|
||||||
*
|
|
||||||
* example of Futureporn v1 asset https://futureporn-b2.b-cdn.net/projektmelody-chaturbate-2023-01-01.mp4
|
|
||||||
* example of Futureporn v2 asset https://fp-usc.b-cdn.net/projektmelody-chaturbate-2023-01-01.mp4
|
|
||||||
* example of Futureporn v3 asset https://fppbpro.b-cdn.net/pbc_144770472/oj0lkw4it4f5mzx/projektmelody-fansly-2025-11-12.mp4
|
|
||||||
*/
|
|
||||||
const copyV1S3ToV2: Task = async (payload: any) => {
|
const copyV1S3ToV2: Task = async (payload: any) => {
|
||||||
|
|
||||||
logger.info(`copyV1S3ToV2 with vodId=${payload.vodId}`);
|
logger.info(`copyV1S3ToV2 with vodId=${payload.vodId}`);
|
||||||
|
|||||||
@ -1,10 +1,12 @@
|
|||||||
import { Job } from "bullmq";
|
import { Job } from "bullmq";
|
||||||
import { getPocketBaseClient } from "../util/pocketbase";
|
import { getPocketBaseClient } from "../util/pocketbase";
|
||||||
import Client, { RecordModel } from "pocketbase";
|
import Client, { RecordModel } from "pocketbase";
|
||||||
|
import { Vod } from "../types";
|
||||||
import { basename } from 'node:path';
|
import { basename } from 'node:path';
|
||||||
import env from "../../.config/env";
|
import env from "../../.config/env";
|
||||||
import spawn from 'nano-spawn';
|
import spawn from 'nano-spawn';
|
||||||
|
|
||||||
|
const foo = 'bar';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* barFunction
|
* barFunction
|
||||||
@ -60,7 +62,7 @@ export async function copyV1VideoToV3(job: Job) {
|
|||||||
if (!vodId) throw new Error('vodId was missing from input data');
|
if (!vodId) throw new Error('vodId was missing from input data');
|
||||||
|
|
||||||
const pb = await getPocketBaseClient();
|
const pb = await getPocketBaseClient();
|
||||||
const vod = await pb.collection('vods').getOne(vodId)
|
const vod = await pb.collection('vods').getOne(job.data.vodId)
|
||||||
|
|
||||||
|
|
||||||
const sourceVideo = await copyBetweenBuckets(job, vod);
|
const sourceVideo = await copyBetweenBuckets(job, vod);
|
||||||
|
|||||||
@ -2,12 +2,7 @@ import { Job } from "bullmq";
|
|||||||
import { getPocketBaseClient } from "../util/pocketbase";
|
import { getPocketBaseClient } from "../util/pocketbase";
|
||||||
import Client, { RecordModel } from "pocketbase";
|
import Client, { RecordModel } from "pocketbase";
|
||||||
import { Vod } from "../types";
|
import { Vod } from "../types";
|
||||||
import { signUrl } from "../util/bunnyCDN";
|
|
||||||
import env from "../../.config/env";
|
|
||||||
import { basename } from "path";
|
|
||||||
import { add, getUnixTime } from 'date-fns';
|
|
||||||
import { generalQueue } from "../queues/generalQueue";
|
|
||||||
import { highPriorityQueue } from "../queues/highPriorityQueue";
|
|
||||||
|
|
||||||
interface MuxAssetCreationResponse {
|
interface MuxAssetCreationResponse {
|
||||||
data: {
|
data: {
|
||||||
@ -37,22 +32,15 @@ interface MuxAssetCreationResponse {
|
|||||||
*/
|
*/
|
||||||
export async function __createMuxAsset(vod: RecordModel) {
|
export async function __createMuxAsset(vod: RecordModel) {
|
||||||
|
|
||||||
const { sourceVideo, muxAssetId, muxPlaybackId } = vod;
|
const { videoSrcB2, muxAssetId, muxPlaybackId } = vod;
|
||||||
if (!sourceVideo) throw new Error(`vod ${vod.id} was missing sourceVideo`);
|
if (!videoSrcB2) throw new Error(`vod ${vod.id} was missing videoSrcB2`);
|
||||||
|
|
||||||
if (muxAssetId !== '') throw new Error('this vod already has a muxAssetId');
|
if (muxAssetId !== '') throw new Error('this vod already has a muxAssetId');
|
||||||
if (muxPlaybackId !== '') throw new Error('this vod already has a muxPlaybackId');
|
if (muxPlaybackId !== '') throw new Error('this vod already has a muxPlaybackId');
|
||||||
|
|
||||||
const expiry = getUnixTime(add(new Date(), { days: 30 }));
|
|
||||||
const b2Url = signUrl(env.BUNNY_TOKEN_KEY, env.BUNNY_ZONE_URL, sourceVideo, undefined, expiry);
|
|
||||||
|
|
||||||
const res = await fetch('https://api.mux.com/video/v1/assets', {
|
const res = await fetch('https://api.mux.com/video/v1/assets', {
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Authorization': 'Basic ' + Buffer.from(`${env.MUX_TOKEN_ID}:${env.MUX_TOKEN_SECRET}`).toString('base64'),
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
"input": b2Url,
|
"input": videoSrcB2,
|
||||||
"playback_policy": [
|
"playback_policy": [
|
||||||
"signed"
|
"signed"
|
||||||
]
|
]
|
||||||
@ -93,10 +81,6 @@ export async function createMuxAsset(job: Job) {
|
|||||||
const { assetId, playbackId } = (await __createMuxAsset(vod));
|
const { assetId, playbackId } = (await __createMuxAsset(vod));
|
||||||
job.log(`Created assetId=${assetId}, playbackId=${playbackId}`);
|
job.log(`Created assetId=${assetId}, playbackId=${playbackId}`);
|
||||||
pb.collection('vods').update(vodId, { muxAssetId: assetId, muxPlaybackId: playbackId });
|
pb.collection('vods').update(vodId, { muxAssetId: assetId, muxPlaybackId: playbackId });
|
||||||
|
|
||||||
// we need to sign the mux asset so it's playable on the webpage.
|
|
||||||
await highPriorityQueue.add('presignMuxAsset', { vodId });
|
|
||||||
|
|
||||||
job.log('Vod record updated. All done.');
|
job.log('Vod record updated. All done.');
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -20,13 +20,16 @@
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
import env from "../../.config/env";
|
||||||
import { sshClient } from "../util/sftp";
|
import { sshClient } from "../util/sftp";
|
||||||
import { qbtClient, QBTorrentInfo } from "../util/qbittorrent";
|
import { qbtClient, QBTorrentInfo } from "../util/qbittorrent";
|
||||||
import { Job } from "bullmq";
|
import { Job } from "bullmq";
|
||||||
import { getPocketBaseClient } from "../util/pocketbase";
|
import { getPocketBaseClient } from "../util/pocketbase";
|
||||||
import { basename } from 'node:path';
|
import spawn from "nano-spawn";
|
||||||
|
import { join, basename } from 'node:path';
|
||||||
|
import { tmpdir } from "node:os";
|
||||||
|
import { nanoid } from "nanoid";
|
||||||
import { cacheQueue, cacheQueueEvents } from "../queues/cacheQueue";
|
import { cacheQueue, cacheQueueEvents } from "../queues/cacheQueue";
|
||||||
import { readFile } from "node:fs/promises";
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -81,29 +84,15 @@ function assertPayload(payload: any): asserts payload is Payload {
|
|||||||
|
|
||||||
// }
|
// }
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* Create a v1/v2 hybrid torrent using bittorrent.
|
|
||||||
* The default is to immediately delete the torrent from qBittorrent afterwards
|
|
||||||
* Because we seed from a seedbox instead.
|
|
||||||
*
|
|
||||||
* @param videoFilePath
|
|
||||||
* @param persist - whether or not to keep the torrent in qbittorrent after we have created it.
|
|
||||||
* @returns
|
|
||||||
*/
|
|
||||||
async function createQBittorrentTorrent(
|
async function createQBittorrentTorrent(
|
||||||
videoFilePath: string,
|
vodId: string,
|
||||||
persist: boolean = false,
|
videoFilePath: string
|
||||||
): Promise<{
|
): Promise<{
|
||||||
magnetLink: string,
|
magnetLink: string,
|
||||||
torrentFilePath: string,
|
torrentFilePath: string,
|
||||||
info: QBTorrentInfo,
|
info: QBTorrentInfo,
|
||||||
}> {
|
}> {
|
||||||
const torrent = await qbtClient.createTorrent(videoFilePath);
|
return qbtClient.createTorrent(videoFilePath);
|
||||||
if (!persist) {
|
|
||||||
await qbtClient.deleteTorrent(torrent.info.hash);
|
|
||||||
}
|
|
||||||
return torrent;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// async function createTorrentfileTorrent(
|
// async function createTorrentfileTorrent(
|
||||||
@ -161,14 +150,8 @@ async function createQBittorrentTorrent(
|
|||||||
async function uploadTorrentToSeedbox(job: Job, videoFilePath: string, torrentFilePath: string) {
|
async function uploadTorrentToSeedbox(job: Job, videoFilePath: string, torrentFilePath: string) {
|
||||||
|
|
||||||
job.log(`Uploading ${videoFilePath} to seedbox...`);
|
job.log(`Uploading ${videoFilePath} to seedbox...`);
|
||||||
let lastLog = 0; // timestamp in ms
|
|
||||||
|
|
||||||
await sshClient.uploadFile(videoFilePath, './data', async ({ percent }) => {
|
await sshClient.uploadFile(videoFilePath, './data', async ({ percent }) => {
|
||||||
const now = Date.now();
|
await job.log(`Video upload progress: ${percent.toFixed(1)}%`);
|
||||||
if (now - lastLog >= 1_000) { // 10 seconds
|
|
||||||
lastLog = now;
|
|
||||||
await job.updateProgress(percent.toFixed(1))
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
job.log(`Uploading ${torrentFilePath} to seedbox...`);
|
job.log(`Uploading ${torrentFilePath} to seedbox...`);
|
||||||
@ -208,41 +191,27 @@ export async function createTorrent(job: Job) {
|
|||||||
const cacheGetJob = await cacheQueue.add(
|
const cacheGetJob = await cacheQueue.add(
|
||||||
'cacheGet',
|
'cacheGet',
|
||||||
{ vodId },
|
{ vodId },
|
||||||
{ jobId: `cacheGet-${vodId}` }
|
{ jobId: `cache-${vodId}` }
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
// 4. wait up to 5 hours for download to finish
|
// 4. wait up to 3 hours for download to finish
|
||||||
const results = (await cacheGetJob.waitUntilFinished(cacheQueueEvents, 1000 * 60 * 60 * 5));
|
const results = (await cacheGetJob.waitUntilFinished(cacheQueueEvents, 1000 * 60 * 60 * 3));
|
||||||
await job.log(`cacheGet results: ${JSON.stringify(results)}`);
|
await job.log(`cacheGet results: ${JSON.stringify(results)}`);
|
||||||
const { cachePath } = results;
|
const { cachePath } = results;
|
||||||
|
|
||||||
await job.log(`cachePath=${cachePath}. vodId=${vodId}.`);
|
await job.log(`cachePath=${cachePath}. vodId=${vodId}. NEXT UP, create QBittorrentTorrent...`);
|
||||||
await job.log(`Creating v1/v2 hybrid torrent using QBittorrentTorrent...`);
|
|
||||||
|
|
||||||
// 4.5 Create a torrent using the local qBittorrent
|
|
||||||
const { magnetLink, torrentFilePath } = await createQBittorrentTorrent(cachePath);
|
|
||||||
|
|
||||||
|
|
||||||
|
const { magnetLink, torrentFilePath } = await createQBittorrentTorrent(vodId, cachePath);
|
||||||
await job.log(`great! torrent created at ${torrentFilePath}. Now let's upload that torrent and the VOD to the seedbox. This will take some time...`);
|
await job.log(`great! torrent created at ${torrentFilePath}. Now let's upload that torrent and the VOD to the seedbox. This will take some time...`);
|
||||||
await uploadTorrentToSeedbox(job, cachePath, torrentFilePath);
|
await uploadTorrentToSeedbox(job, cachePath, torrentFilePath);
|
||||||
|
|
||||||
job.log(`Updating vod record in the db. This involes sending a multipart/form to pocketbase with the .torrent file as a buffer and the magnetLink as text...`);
|
job.log(`updating vod record...`);
|
||||||
const formData = new FormData();
|
await pb.collection('vods').update(vod.id, {
|
||||||
|
magnetLink
|
||||||
const torrentBuffer = await readFile(torrentFilePath)
|
});
|
||||||
formData.append('torrent', new Blob([torrentBuffer]), basename(torrentFilePath));
|
|
||||||
formData.append('magnetLink', magnetLink);
|
|
||||||
|
|
||||||
await pb.collection('vods').update(vod.id, formData);
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
job.log(`Torrent creation complete.`);
|
|
||||||
await job.updateProgress(100);
|
|
||||||
|
|
||||||
|
job.log(`🏆 torrent creation complete.`);
|
||||||
return { magnetLink, cachePath, torrentFilePath, vodId };
|
return { magnetLink, cachePath, torrentFilePath, vodId };
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,73 +1,33 @@
|
|||||||
import env from "../../.config/env";
|
import type { Helpers } from "graphile-worker";
|
||||||
import { Job } from "bullmq";
|
import { PrismaClient } from "../../generated/prisma";
|
||||||
import { getPocketBaseClient } from "../util/pocketbase";
|
import { withAccelerate } from "@prisma/extension-accelerate";
|
||||||
import spawn from 'nano-spawn';
|
import { getOrDownloadAsset } from "../utils/cache";
|
||||||
import { readFile } from "fs/promises";
|
import { env } from "../config/env";
|
||||||
import { basename } from "node:path";
|
import { getS3Client, uploadFile } from "../utils/s3";
|
||||||
import { cacheQueue, cacheQueueEvents } from '../queues/cacheQueue';
|
import { nanoid } from "nanoid";
|
||||||
|
import { getNanoSpawn } from "../utils/nanoSpawn";
|
||||||
|
import { generateS3Path } from "../utils/formatters";
|
||||||
|
import logger from "../utils/logger";
|
||||||
|
import { preparePython } from "../utils/python";
|
||||||
|
|
||||||
|
const prisma = new PrismaClient().$extends(withAccelerate());
|
||||||
|
|
||||||
|
|
||||||
interface Payload {
|
interface Payload {
|
||||||
vodId: string;
|
vodId: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function createThumbnail(helpers: Helpers, inputFilePath: string) {
|
||||||
function assertPayload(payload: any): asserts payload is Payload {
|
logger.debug(`createThumbnail with inputFilePath=${inputFilePath}`)
|
||||||
if (typeof payload !== "object" || !payload) throw new Error("invalid payload-- was not an object.");
|
|
||||||
if (typeof payload.vodId !== "string") throw new Error("invalid payload-- was missing vodId");
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* createVideoThumbnail
|
|
||||||
*
|
|
||||||
* uses vcsi to create a 5x5 grid of video frames
|
|
||||||
*/
|
|
||||||
export async function createVideoThumbnail(job: Job) {
|
|
||||||
|
|
||||||
assertPayload(job.data);
|
|
||||||
const vodId = job.data.vodId;
|
|
||||||
const pb = await getPocketBaseClient();
|
|
||||||
const vod = await pb.collection('vods').getOne(vodId);
|
|
||||||
|
|
||||||
job.log(`createVideoThumbnail for ${vodId} starting.`);
|
|
||||||
|
|
||||||
job.log(`pulling sourceVideo from cache...`);
|
|
||||||
|
|
||||||
const cacheJob = await cacheQueue.add(
|
|
||||||
'cacheGet',
|
|
||||||
{ vodId },
|
|
||||||
{ jobId: `cacheGet-${vodId}` }
|
|
||||||
);
|
|
||||||
|
|
||||||
|
|
||||||
// 4. wait for cache/download to finish
|
|
||||||
const result = await cacheJob.waitUntilFinished(cacheQueueEvents, 1000 * 60 * 60 * 3);
|
|
||||||
const cachePath = result.cachePath;
|
|
||||||
|
|
||||||
// 5. create thumbnail
|
|
||||||
const thumbnailFilePath = await __createThumbnail(cachePath);
|
|
||||||
|
|
||||||
|
|
||||||
// 6. update db record
|
|
||||||
const formData = new FormData();
|
|
||||||
|
|
||||||
const thumbnailBuffer = await readFile(thumbnailFilePath)
|
|
||||||
formData.append('thumbnail', new Blob([thumbnailBuffer]), basename(thumbnailFilePath));
|
|
||||||
await pb.collection('vods').update(vod.id, formData);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async function __createThumbnail(inputFilePath: string) {
|
|
||||||
if (!inputFilePath) {
|
if (!inputFilePath) {
|
||||||
throw new Error("inputFilePath is missing");
|
throw new Error("inputFilePath is missing");
|
||||||
}
|
}
|
||||||
|
|
||||||
const outputFilePath = inputFilePath.replace(/\.[^/.]+$/, '') + '-thumb.png';
|
const outputFilePath = inputFilePath.replace(/\.[^/.]+$/, '') + '-thumb.png';
|
||||||
|
const spawn = await getNanoSpawn();
|
||||||
await spawn('vcsi', [
|
const result = await spawn('vcsi', [
|
||||||
inputFilePath,
|
inputFilePath,
|
||||||
'--metadata-position', 'hidden',
|
'--metadata-position', 'hidden',
|
||||||
'--metadata-margin', '0',
|
'--metadata-margin', '0',
|
||||||
@ -86,9 +46,81 @@ async function __createThumbnail(inputFilePath: string) {
|
|||||||
], {
|
], {
|
||||||
stdout: 'inherit',
|
stdout: 'inherit',
|
||||||
stderr: 'inherit',
|
stderr: 'inherit',
|
||||||
cwd: env.CACHE_ROOT,
|
cwd: env.APP_DIR,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
logger.debug('result as follows')
|
||||||
|
logger.debug(JSON.stringify(result, null, 2))
|
||||||
|
|
||||||
|
logger.info(`✅ Thumbnail saved to: ${outputFilePath}`);
|
||||||
return outputFilePath
|
return outputFilePath
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function assertPayload(payload: any): asserts payload is Payload {
|
||||||
|
if (typeof payload !== "object" || !payload) throw new Error("invalid payload-- was not an object.");
|
||||||
|
if (typeof payload.vodId !== "string") throw new Error("invalid payload-- was missing vodId");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
export default async function createVideoThumbnail(payload: any, helpers: Helpers) {
|
||||||
|
assertPayload(payload)
|
||||||
|
const { vodId } = payload
|
||||||
|
const vod = await prisma.vod.findFirstOrThrow({
|
||||||
|
where: {
|
||||||
|
id: vodId
|
||||||
|
},
|
||||||
|
include: {
|
||||||
|
vtubers: {
|
||||||
|
select: {
|
||||||
|
slug: true,
|
||||||
|
id: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
// * [x] load vod
|
||||||
|
|
||||||
|
|
||||||
|
// * [x] exit if video.thumbnail already defined
|
||||||
|
if (vod.thumbnail) {
|
||||||
|
logger.info(`Doing nothing-- vod ${vodId} already has a thumbnail.`)
|
||||||
|
return; // Exit the function early
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!vod.sourceVideo) {
|
||||||
|
throw new Error(`Failed to create thumbnail-- vod ${vodId} is missing a sourceVideo.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
logger.info('Creating Video Thumbnail')
|
||||||
|
const s3Client = getS3Client()
|
||||||
|
|
||||||
|
// * [x] download video segments from pull-thru cache
|
||||||
|
const videoFilePath = await getOrDownloadAsset(s3Client, env.S3_BUCKET, vod.sourceVideo)
|
||||||
|
logger.debug(`videoFilePath=${videoFilePath}`)
|
||||||
|
|
||||||
|
// * [x] run vcsi
|
||||||
|
const thumbnailPath = await createThumbnail(helpers, videoFilePath)
|
||||||
|
logger.debug(`thumbnailPath=${thumbnailPath}`)
|
||||||
|
|
||||||
|
// * [x] generate thumbnail s3 key
|
||||||
|
const slug = vod.vtubers[0].slug
|
||||||
|
if (!slug) throw new Error(`vtuber ${vod.vtubers[0].id} was missing slug`);
|
||||||
|
const s3Key = generateS3Path(slug, vod.streamDate, vod.id, `thumbnail.png`);
|
||||||
|
|
||||||
|
|
||||||
|
// * [x] upload thumbnail to s3
|
||||||
|
await uploadFile(s3Client, env.S3_BUCKET, s3Key, thumbnailPath, 'image/png')
|
||||||
|
|
||||||
|
// * [x] update vod record
|
||||||
|
await prisma.vod.update({
|
||||||
|
where: { id: vodId },
|
||||||
|
data: { thumbnail: s3Key }
|
||||||
|
});
|
||||||
|
|
||||||
|
// * [x] done
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
@ -49,8 +49,8 @@ async function monitorProgress(cachePath: string, expectedSize: number, job: Job
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
const { size } = await stat(cachePath);
|
const { size } = await stat(cachePath);
|
||||||
const progress = Math.floor((size / expectedSize) * 100);
|
const progress = Math.min((size / expectedSize) * 100, 100);
|
||||||
// await job.log(`size:${size}, expectedSize:${expectedSize}, progress:${progress}`);
|
await job.log(`size:${size}, expectedSize:${expectedSize}, progress:${progress}`);
|
||||||
await job.updateProgress(progress);
|
await job.updateProgress(progress);
|
||||||
} catch {
|
} catch {
|
||||||
// file might not exist yet
|
// file might not exist yet
|
||||||
@ -90,7 +90,6 @@ export async function __download(job: Job, s3Key: string, cachePath: string) {
|
|||||||
stopMonitor(); // always stop monitor, even on error
|
stopMonitor(); // always stop monitor, even on error
|
||||||
}
|
}
|
||||||
|
|
||||||
await job.updateProgress(100);
|
|
||||||
job.log('Download complete.');
|
job.log('Download complete.');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,185 +1,41 @@
|
|||||||
import { Job, Queue } from "bullmq";
|
import { Job } from "bullmq";
|
||||||
|
import { getPocketBaseClient } from "../util/pocketbase";
|
||||||
import Client from "pocketbase";
|
import Client from "pocketbase";
|
||||||
import { getPocketBaseClient } from "../util/pocketbase.ts";
|
import { generalQueue } from "../queues/generalQueue";
|
||||||
import { generalQueue } from "../queues/generalQueue.ts";
|
|
||||||
import { muxQueue } from "../queues/muxQueue.ts";
|
|
||||||
import { b2Queue } from "../queues/b2Queue.ts";
|
|
||||||
import { shuffle } from "../util/random.ts";
|
|
||||||
|
|
||||||
const queues: Record<string, Queue> = {
|
|
||||||
generalQueue: generalQueue,
|
|
||||||
muxQueue: muxQueue,
|
|
||||||
b2Queue: b2Queue,
|
|
||||||
};
|
|
||||||
|
|
||||||
type VodJobConfig = {
|
|
||||||
filter: string;
|
|
||||||
queueName: string;
|
|
||||||
processorName: string;
|
|
||||||
logMessage: (vodId: string) => string;
|
|
||||||
};
|
|
||||||
|
|
||||||
async function handleMissing(job: Job, pb: Client, config: VodJobConfig) {
|
|
||||||
|
|
||||||
|
|
||||||
// Sometimes, we can run into a softlock state.
|
export async function findMissingTorrent(job: Job, pb: Client) {
|
||||||
// If procesing a vod repeatedly fails, findWork will fail to queue new jobs because of task deduplication.
|
|
||||||
// @see https://docs.bullmq.io/patterns/throttle-jobs
|
const results = await pb.collection('vods').getList(1, 1, {
|
||||||
// To overcome this, we randomly choose a vod from a list of the latest 3.
|
filter: "videoSrcB2 != '' && magnetLink = ''",
|
||||||
// If one vod gets permafailed, we have 2 more to continue working.
|
sort: '-streamDate'
|
||||||
// This is an imperfect solution because the one permafailed vod must be rectified by an admin.
|
|
||||||
// @todo figure out a better way to handle permafailed vod processing tasks that isn't a ticking timebomb.
|
|
||||||
const results = await pb.collection('vods').getList(1, 3, {
|
|
||||||
filter: config.filter,
|
|
||||||
sort: '-created',
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const vods = results.items;
|
const vods = results.items;
|
||||||
if (!vods.length) {
|
const vod = vods[0];
|
||||||
// job.log(`No vods matching filter [${config.filter}]. Nothing to do.`)
|
|
||||||
return;
|
job.log(`findWork found ${vod.id} in need of a torrent.`);
|
||||||
|
|
||||||
|
const jobId = `createTorrent-${vod.id}`;
|
||||||
|
|
||||||
|
const existingJob = await generalQueue.getJob(jobId);
|
||||||
|
|
||||||
|
// only add a createTorrent job if there isn't one queued
|
||||||
|
if (existingJob?.isActive) {
|
||||||
|
job.log(`refusing to add createTorrent job for vod ${vod.id} because a similar job is active`);
|
||||||
|
} else {
|
||||||
|
|
||||||
|
await generalQueue.add('createTorrent', {
|
||||||
|
vodId: vod.id
|
||||||
|
}, {
|
||||||
|
jobId
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
const vod = shuffle(vods).at(0);
|
|
||||||
if (!vod) {
|
|
||||||
throw new Error('no vod found after shuffling');
|
|
||||||
};
|
|
||||||
const vodId = vod.id;
|
|
||||||
|
|
||||||
job.log(config.logMessage(vodId));
|
|
||||||
|
|
||||||
const jobId = `${config.processorName}-${vodId}`;
|
|
||||||
const attempts = 3;
|
|
||||||
|
|
||||||
const queue = queues[config.queueName];
|
|
||||||
await queue.add(config.processorName, { vodId }, { jobId, attempts });
|
|
||||||
}
|
|
||||||
|
|
||||||
// export async function handleMissingTorrent(job: Job, pb: Client) {
|
|
||||||
|
|
||||||
// const results = await pb.collection('vods').getList(1, 1, {
|
|
||||||
// filter: "sourceVideo != '' && magnetLink = ''",
|
|
||||||
// sort: '-created'
|
|
||||||
// });
|
|
||||||
// const vods = results.items;
|
|
||||||
// const vod = vods[0];
|
|
||||||
|
|
||||||
// job.log(`findWork found ${vod.id} in need of a torrent.`);
|
|
||||||
|
|
||||||
// const jobId = `createTorrent-${vod.id}`;
|
|
||||||
|
|
||||||
// await generalQueue.add('createTorrent', {
|
|
||||||
// vodId: vod.id
|
|
||||||
// }, {
|
|
||||||
// jobId
|
|
||||||
// });
|
|
||||||
// }
|
|
||||||
|
|
||||||
// export async function handleMissingStreamDate(job: Job, pb: Client) {
|
|
||||||
// const results = await pb.collection('vods').getList(1, 1, {
|
|
||||||
// filter: "announceUrl != '' && streamDate = ''",
|
|
||||||
// sort: '-created'
|
|
||||||
// });
|
|
||||||
// const vods = results.items;
|
|
||||||
// if (vods.length === 0) return; // no vods with missing streamDate.
|
|
||||||
|
|
||||||
// const vod = vods[0];
|
|
||||||
// const vodId = vod.id;
|
|
||||||
// job.log(`findWork found ${vodId} in need of a stream date.`);
|
|
||||||
// const jobId = `getAnnounceUrlDetails-${vodId}`;
|
|
||||||
|
|
||||||
// await generalQueue.add('getAnnounceUrlDetails', {
|
|
||||||
// vodId: vodId
|
|
||||||
// }, {
|
|
||||||
// jobId
|
|
||||||
// });
|
|
||||||
// }
|
|
||||||
|
|
||||||
// export async function handleMissingSourceVideo(job: Job, pb: Client) {
|
|
||||||
// const results = await pb.collection('vods').getList(1, 1, {
|
|
||||||
// filter: "videoSrcB2 != '' && sourceVideo = ''",
|
|
||||||
// sort: '-created',
|
|
||||||
// });
|
|
||||||
// const vods = results.items;
|
|
||||||
// if (vods.length === 0) return; // no vods with missing sourceVideo
|
|
||||||
// const vod = vods[0];
|
|
||||||
// const vodId = vod.id;
|
|
||||||
// job.log(`findWork found ${vodId} in need of a source video.`);
|
|
||||||
// const jobId = `handleMissingSourceVideo-${vodId}`;
|
|
||||||
// await generalQueue.add('copyV1VideoToV3', {
|
|
||||||
// vodId
|
|
||||||
// }, {
|
|
||||||
// jobId
|
|
||||||
// });
|
|
||||||
// }
|
|
||||||
|
|
||||||
// export async function handleMissingMuxAsset(job: Job, pb: Client) {
|
|
||||||
// const results = await pb.collection('vods').getList(1, 1, {
|
|
||||||
// filter: "muxAssetId = '' && muxPlaybackId = '' && sourceVideo != ''",
|
|
||||||
// sort: '-created',
|
|
||||||
// });
|
|
||||||
// // ...
|
|
||||||
// }
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
export async function handleMissingTorrent(job: Job, pb: Client) {
|
|
||||||
return handleMissing(job, pb, {
|
|
||||||
filter: "sourceVideo != '' && magnetLink = ''",
|
|
||||||
queueName: 'generalQueue',
|
|
||||||
processorName: 'createTorrent',
|
|
||||||
logMessage: (id) => `findWork found ${id} in need of a torrent.`
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function handleMissingStreamDate(job: Job, pb: Client) {
|
|
||||||
return handleMissing(job, pb, {
|
|
||||||
filter: "announceUrl != '' && streamDate = ''",
|
|
||||||
queueName: 'generalQueue',
|
|
||||||
processorName: 'getAnnounceUrlDetails',
|
|
||||||
logMessage: (id) => `findWork found ${id} in need of a stream date.`
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function handleMissingSourceVideo(job: Job, pb: Client) {
|
|
||||||
return handleMissing(job, pb, {
|
|
||||||
filter: "videoSrcB2 != '' && sourceVideo = ''",
|
|
||||||
queueName: 'b2Queue',
|
|
||||||
processorName: 'copyV1VideoToV3',
|
|
||||||
logMessage: (id) => `findWork found ${id} in need of a source video.`
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function handleMissingThumbnail(job: Job, pb: Client) {
|
|
||||||
return handleMissing(job, pb, {
|
|
||||||
filter: "sourceVideo != '' && thumbnail = ''",
|
|
||||||
queueName: 'generalQueue',
|
|
||||||
processorName: 'createVideoThumbnail',
|
|
||||||
logMessage: (id) => `findWork found ${id} in need of a thumbnail.`
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* handleMissingMuxAsset
|
|
||||||
*
|
|
||||||
* We only add Mux assets to new vods because Mux cost a lot of money.
|
|
||||||
* Future plan is to build our own Mux alternative and remove Mux.
|
|
||||||
*/
|
|
||||||
export async function handleMissingMuxAsset(job: Job, pb: Client) {
|
|
||||||
const sinceDate = '2025-11-01';
|
|
||||||
return handleMissing(job, pb, {
|
|
||||||
filter: `created > '${sinceDate}' && muxAssetId = '' && muxPlaybackId = '' && sourceVideo != ''`,
|
|
||||||
queueName: 'muxQueue', // whatever queue name you use
|
|
||||||
processorName: 'createMuxAsset',
|
|
||||||
logMessage: (id) => `findWork found ${id} in need of a Mux asset.`
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* This processor is all about identifying known issues in the database and handling them by delegating the work to a processor.
|
* findWork
|
||||||
*
|
*
|
||||||
* Remember to makes processors
|
* Remember to makes processors
|
||||||
* * idempotent
|
* * idempotent
|
||||||
@ -190,12 +46,7 @@ export async function findWork(job: Job) {
|
|||||||
|
|
||||||
const pb = await getPocketBaseClient();
|
const pb = await getPocketBaseClient();
|
||||||
|
|
||||||
await handleMissingTorrent(job, pb);
|
await findMissingTorrent(job, pb);
|
||||||
await handleMissingStreamDate(job, pb);
|
|
||||||
await handleMissingSourceVideo(job, pb);
|
|
||||||
await handleMissingMuxAsset(job, pb);
|
|
||||||
await handleMissingThumbnail(job, pb);
|
|
||||||
|
|
||||||
|
|
||||||
// findMissingThumbnail
|
// findMissingThumbnail
|
||||||
// findMissingAudioAnalysis
|
// findMissingAudioAnalysis
|
||||||
|
|||||||
@ -32,12 +32,11 @@ export function getTweetDates(tweetUrls: string[]): Date[] {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function getVodsWithAnnounceUrlAndNoStreamDate() {
|
export async function getVodsWithAnnounceUrlAndNoStreamDate() {
|
||||||
const pb = await getPocketBaseClient();
|
const pb = await getPocketBaseClient()
|
||||||
|
|
||||||
const results = await pb.collection('vods').getList(1, 25, {
|
const results = await pb.collection('vods').getList(1, 25, {
|
||||||
filter: "announceUrl != '' && streamDate = ''",
|
filter: "announceUrl != '' && streamDate = ''"
|
||||||
requestKey: `getAnnounceUrlDetails-1..25`
|
})
|
||||||
});
|
|
||||||
const vods = results.items;
|
const vods = results.items;
|
||||||
|
|
||||||
return vods;
|
return vods;
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import Mux from '@mux/mux-node';
|
import Mux from '@mux/mux-node';
|
||||||
import env from '../../.config/env';
|
import env from '../../.config/env';
|
||||||
import { type Job } from 'bullmq';
|
import { type QueueOptions, type Job } from 'bullmq';
|
||||||
import { getPocketBaseClient } from '../util/pocketbase';
|
import { getPocketBaseClient } from '../util/pocketbase';
|
||||||
|
|
||||||
|
|
||||||
@ -25,21 +25,8 @@ async function createToken(playbackId: string) {
|
|||||||
return token
|
return token
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function presignMuxAsset(job: Job) {
|
|
||||||
const pb = await getPocketBaseClient();
|
|
||||||
const vodId = job.data.vodId as string;
|
|
||||||
const vod = await pb.collection('vods').getOne(vodId);
|
|
||||||
|
|
||||||
const muxPlaybackId = vod?.muxPlaybackId;
|
export async function presignMuxAssets(job: Job) {
|
||||||
if (!muxPlaybackId) throw new Error(`presignMuxAsset called with vodId ${vodId} failed because this vod is missing a muxPlaybackId.`);
|
|
||||||
const muxPlaybackToken = await createToken(muxPlaybackId);
|
|
||||||
await pb.collection('vods').update(vod.id, {
|
|
||||||
muxPlaybackToken
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
export async function presignAllMuxAssets(job: Job) {
|
|
||||||
|
|
||||||
const pb = await getPocketBaseClient();
|
const pb = await getPocketBaseClient();
|
||||||
|
|
||||||
|
|||||||
@ -1,11 +0,0 @@
|
|||||||
/**
|
|
||||||
* b2Queue for running long-running Backblaze tasks.
|
|
||||||
* Sometimes they take 3+ hours to do a server side bucket transfer and it holds up other tasks.
|
|
||||||
* Backblaze tasks get their own queue so as not to hold up other tasks.
|
|
||||||
*/
|
|
||||||
import { Queue, QueueEvents } from 'bullmq';
|
|
||||||
import { connection } from '../../.config/bullmq.config';
|
|
||||||
export const b2Queue = new Queue('b2Queue', { connection });
|
|
||||||
export const b2QueueEvents = new QueueEvents("b2Queue", {
|
|
||||||
connection
|
|
||||||
});
|
|
||||||
@ -4,5 +4,3 @@ export const downloadQueue = new Queue('downloadQueue', { connection });
|
|||||||
export const downloadQueueEvents = new QueueEvents("downloadQueue", {
|
export const downloadQueueEvents = new QueueEvents("downloadQueue", {
|
||||||
connection
|
connection
|
||||||
});
|
});
|
||||||
|
|
||||||
await downloadQueue.setGlobalConcurrency(1);
|
|
||||||
@ -8,14 +8,14 @@ await generalQueue.upsertJobScheduler(
|
|||||||
pattern: '3 7 * * *', // Runs at 07:03 every day
|
pattern: '3 7 * * *', // Runs at 07:03 every day
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'presignAllMuxAssets',
|
name: 'presignMuxAsset',
|
||||||
data: {},
|
data: {},
|
||||||
opts: {}, // Optional additional job options
|
opts: {}, // Optional additional job options
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
await generalQueue.upsertJobScheduler(
|
// await generalQueue.upsertJobScheduler(
|
||||||
'find-work-often',
|
// 'find-work-every-one-minute',
|
||||||
{ every: 1000 * 30 },
|
// { every: 1000 * 60 },
|
||||||
{ name: 'findWork' }
|
// { name: 'findWork' }
|
||||||
)
|
// )
|
||||||
@ -3,3 +3,15 @@ import { Queue } from "bullmq";
|
|||||||
import { connection } from "../../.config/bullmq.config";
|
import { connection } from "../../.config/bullmq.config";
|
||||||
export const gpuQueue = new Queue('gpuQueue', { connection });
|
export const gpuQueue = new Queue('gpuQueue', { connection });
|
||||||
|
|
||||||
|
await gpuQueue.upsertJobScheduler(
|
||||||
|
'schedule-vod-processing-recurring',
|
||||||
|
{
|
||||||
|
pattern: '* * * * *'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'cron-schedule-vod-processing',
|
||||||
|
data: {},
|
||||||
|
opts: {}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|||||||
@ -13,3 +13,15 @@ await highPriorityQueue.upsertJobScheduler(
|
|||||||
opts: {}
|
opts: {}
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
await highPriorityQueue.upsertJobScheduler(
|
||||||
|
'get-announce-url-details',
|
||||||
|
{
|
||||||
|
every: 1000 * 63
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'getAnnounceUrlDetails',
|
||||||
|
data: {},
|
||||||
|
opts: {},
|
||||||
|
},
|
||||||
|
);
|
||||||
@ -1,6 +0,0 @@
|
|||||||
import { Queue, QueueEvents } from 'bullmq';
|
|
||||||
import { connection } from '../../.config/bullmq.config';
|
|
||||||
export const muxQueue = new Queue('muxQueue', { connection });
|
|
||||||
export const muxQueueEvents = new QueueEvents("muxQueue", {
|
|
||||||
connection
|
|
||||||
});
|
|
||||||
@ -1,101 +0,0 @@
|
|||||||
## Known errors
|
|
||||||
|
|
||||||
### Error: Failed to run 'b2 file info': b2: tar_extract_all() failed: Invalid argument
|
|
||||||
|
|
||||||
This one is weird. It happens when several failed b2cli are running in the background. I came across this during testing and first I thought my executable was somehow corrupted.
|
|
||||||
|
|
||||||
```
|
|
||||||
cj@cj54hd:~/Downloads$ b2 file info b2://fppbdev/pbc_144770472/4csbaj06u481x9t/el_xox-chaturbate-2024-03-10.mp4
|
|
||||||
b2: tar_extract_all() failed: Invalid argument
|
|
||||||
cj@cj54hd:~/Downloads$ b2 file ls b2://fppbdev/pbc_144770472/4csbaj06u481x9t/el_xox-chaturbate-2024-03-10.mp4
|
|
||||||
b2: tar_extract_all() failed: Invalid argument
|
|
||||||
cj@cj54hd:~/Downloads$ b2 ls b2://fppbdev/pbc_144770472/4csbaj06u481x9t/el_xox-chaturbate-2024-03-10.mp4
|
|
||||||
b2: tar_extract_all() failed: Invalid argument
|
|
||||||
cj@cj54hd:~/Downloads$ b2 ls
|
|
||||||
b2: tar_extract_all() failed: Invalid argument
|
|
||||||
cj@cj54hd:~/Downloads$ b2 -h
|
|
||||||
b2: tar_extract_all() failed: Invalid argument
|
|
||||||
cj@cj54hd:~/Downloads$ b2
|
|
||||||
b2: tar_extract_all() failed: Invalid argument
|
|
||||||
cj@cj54hd:~/Downloads$ which b2
|
|
||||||
/home/cj/.local/bin/b2
|
|
||||||
cj@cj54hd:~/Downloads$ head b2
|
|
||||||
head: cannot open 'b2' for reading: No such file or directory
|
|
||||||
cj@cj54hd:~/Downloads$ ~/.local/bin/b2
|
|
||||||
b2: tar_extract_all() failed: Invalid argument
|
|
||||||
cj@cj54hd:~/Downloads$ ls -la ~/.local/bin/b2
|
|
||||||
-rwxrwxr-x 1 cj cj 32597952 Aug 23 17:07 /home/cj/.local/bin/b2
|
|
||||||
cj@cj54hd:~/Downloads$ file ~/.local/bin/b2
|
|
||||||
/home/cj/.local/bin/b2: ELF 64-bit LSB executable, x86-64, version 1 (SYSV), statically linked, stripped
|
|
||||||
cj@cj54hd:~/Downloads$ cp b2v3-linux ~/.local/bin/b2
|
|
||||||
cp: cannot create regular file '/home/cj/.local/bin/b2': Text file busy
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
apparently there were a shitton of b2 instances running in the background!
|
|
||||||
|
|
||||||
```
|
|
||||||
cj@cj54hd:~/Downloads$ ps aux | grep 'b2:/[/]'
|
|
||||||
cj 191334 0.0 0.0 292 148 pts/11 S+ Nov21 0:00 b2 file download b2://fppbdev/pbc_144770472/6v80rae2m128yw1/projektmelody-chaturbate-2025-10-19.mp4 /tmp/vods/6v80rae2m128yw1/sourceVideo/13629592283.mp4
|
|
||||||
cj 191335 0.0 0.0 2840 2004 pts/11 S+ Nov21 0:00 /tmp/staticx-gDLAjK/b2v3 file download b2://fppbdev/pbc_144770472/6v80rae2m128yw1/projektmelody-chaturbate-2025-10-19.mp4 /tmp/vods/6v80rae2m128yw1/sourceVideo/13629592283.mp4
|
|
||||||
cj 191336 0.2 0.3 950960 90496 pts/11 Sl+ Nov21 5:48 /tmp/staticx-gDLAjK/b2v3 file download b2://fppbdev/pbc_144770472/6v80rae2m128yw1/projektmelody-chaturbate-2025-10-19.mp4 /tmp/vods/6v80rae2m128yw1/sourceVideo/13629592283.mp4
|
|
||||||
cj 205138 0.0 0.0 292 152 pts/11 S+ Nov21 0:00 b2 file download b2://fppbdev/pbc_144770472/6v80rae2m128yw1/projektmelody-chaturbate-2025-10-19.mp4 /tmp/vods/6v80rae2m128yw1/sourceVideo/13629592283.mp4
|
|
||||||
cj 205139 0.0 0.0 2840 1988 pts/11 S+ Nov21 0:00 /tmp/staticx-OMOMPi/b2v3 file download b2://fppbdev/pbc_144770472/6v80rae2m128yw1/projektmelody-chaturbate-2025-10-19.mp4 /tmp/vods/6v80rae2m128yw1/sourceVideo/13629592283.mp4
|
|
||||||
cj 205140 0.0 0.3 950960 90528 pts/11 Sl+ Nov21 0:18 /tmp/staticx-OMOMPi/b2v3 file download b2://fppbdev/pbc_144770472/6v80rae2m128yw1/projektmelody-chaturbate-2025-10-19.mp4 /tmp/vods/6v80rae2m128yw1/sourceVideo/13629592283.mp4
|
|
||||||
cj 257781 0.0 0.0 292 96 pts/11 S+ Nov21 0:00 b2 file download b2://fppbdev/pbc_144770472/4csbaj06u481x9t/el_xox-chaturbate-2024-03-10.mp4 /tmp/2024-03-10-el_xox-4csbaj06u481x9t.mp4
|
|
||||||
cj 257782 0.0 0.0 2840 2028 pts/11 S+ Nov21 0:00 /tmp/staticx-GFFmLI/b2v3 file download b2://fppbdev/pbc_144770472/4csbaj06u481x9t/el_xox-chaturbate-2024-03-10.mp4 /tmp/2024-03-10-el_xox-4csbaj06u481x9t.mp4
|
|
||||||
cj 257783 0.0 0.3 951984 90080 pts/11 Sl+ Nov21 1:37 /tmp/staticx-GFFmLI/b2v3 file download b2://fppbdev/pbc_144770472/4csbaj06u481x9t/el_xox-chaturbate-2024-03-10.mp4 /tmp/2024-03-10-el_xox-4csbaj06u481x9t.mp4
|
|
||||||
cj 264819 0.0 0.0 292 148 pts/11 S+ Nov21 0:00 b2 file download b2://fppbdev/pbc_144770472/4csbaj06u481x9t/el_xox-chaturbate-2024-03-10.mp4 /tmp/vods/4csbaj06u481x9t/sourceVideo/3835045469/2024-03-10-el_xox-4csbaj06u481x9t.mp4
|
|
||||||
cj 264820 0.0 0.0 2840 2032 pts/11 S+ Nov21 0:00 /tmp/staticx-NFonPF/b2v3 file download b2://fppbdev/pbc_144770472/4csbaj06u481x9t/el_xox-chaturbate-2024-03-10.mp4 /tmp/vods/4csbaj06u481x9t/sourceVideo/3835045469/2024-03-10-el_xox-4csbaj06u481x9t.mp4
|
|
||||||
cj 264821 0.1 0.3 950956 90488 pts/11 Sl+ Nov21 2:47 /tmp/staticx-NFonPF/b2v3 file download b2://fppbdev/pbc_144770472/4csbaj06u481x9t/el_xox-chaturbate-2024-03-10.mp4 /tmp/vods/4csbaj06u481x9t/sourceVideo/3835045469/2024-03-10-el_xox-4csbaj06u481x9t.mp4
|
|
||||||
cj 607425 0.0 0.0 292 148 pts/11 S+ Nov22 0:00 b2 file download b2://fppbdev/pbc_144770472/2ct5pnh3fdx91ks/projektmelody-chaturbate-2024-03-04.mp4 /tmp/vods/2ct5pnh3fdx91ks/sourceVideo/4784598000/2024-03-03-projektmelody-2ct5pnh3fdx91ks.mp4
|
|
||||||
cj 607426 0.0 0.0 2840 1972 pts/11 S+ Nov22 0:00 /tmp/staticx-cjcddG/b2v3 file download b2://fppbdev/pbc_144770472/2ct5pnh3fdx91ks/projektmelody-chaturbate-2024-03-04.mp4 /tmp/vods/2ct5pnh3fdx91ks/sourceVideo/4784598000/2024-03-03-projektmelody-2ct5pnh3fdx91ks.mp4
|
|
||||||
cj 607427 0.1 0.3 950956 90492 pts/11 Sl+ Nov22 1:09 /tmp/staticx-cjcddG/b2v3 file download b2://fppbdev/pbc_144770472/2ct5pnh3fdx91ks/projektmelody-chaturbate-2024-03-04.mp4 /tmp/vods/2ct5pnh3fdx91ks/sourceVideo/4784598000/2024-03-03-projektmelody-2ct5pnh3fdx91ks.mp4
|
|
||||||
```
|
|
||||||
|
|
||||||
kill 'em!
|
|
||||||
|
|
||||||
```
|
|
||||||
cj@cj54hd:~/Downloads$ killall b2
|
|
||||||
cj@cj54hd:~/Downloads$ ps aux | grep 'b2:/[/]'
|
|
||||||
cj@cj54hd:~/Downloads$
|
|
||||||
```
|
|
||||||
|
|
||||||
and now it works. holy shit!
|
|
||||||
|
|
||||||
```
|
|
||||||
cj@cj54hd:~/Downloads$ b2 file info b2://fppbdev/pbc_144770472/4csbaj06u481x9t/el_xox-chaturbate-2024-03-10.mp4
|
|
||||||
{
|
|
||||||
"cacheControl": "max-age=604800",
|
|
||||||
"contentSha1": "84bbd4218b16184f6204eeb8094720150c8479d5",
|
|
||||||
"contentType": "video/mp4",
|
|
||||||
"fileId": "4_zad63b74f11035a9993a30b18_f100aec8c206f88f4_d20251116_m225322_c000_v0001417_t0037_u01763333602168",
|
|
||||||
"fileInfo": {
|
|
||||||
"large_file_sha1": "84bbd4218b16184f6204eeb8094720150c8479d5",
|
|
||||||
"src_last_modified_millis": "1710056557459"
|
|
||||||
},
|
|
||||||
"fileName": "pbc_144770472/4csbaj06u481x9t/el_xox-chaturbate-2024-03-10.mp4",
|
|
||||||
"fileRetention": {
|
|
||||||
"mode": null,
|
|
||||||
"retainUntilTimestamp": null
|
|
||||||
},
|
|
||||||
"legalHold": null,
|
|
||||||
"replicationStatus": null,
|
|
||||||
"serverSideEncryption": {
|
|
||||||
"mode": "none"
|
|
||||||
},
|
|
||||||
"size": 3835045469,
|
|
||||||
"uploadTimestamp": 1763333602168
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
But it still failed when run by BullMQ.
|
|
||||||
|
|
||||||
```
|
|
||||||
Error: Failed to run 'b2 file info': b2: tar_extract_all() failed: Invalid argument
|
|
||||||
at onFailed (/home/cj/Documents/futureporn-monorepo/services/worker/node_modules/bullmq/src/classes/job.ts:1295:16)
|
|
||||||
at <anonymous> (/home/cj/Documents/futureporn-monorepo/services/worker/node_modules/bullmq/src/classes/job.ts:1324:11)
|
|
||||||
at process.processTicksAndRejections (node:internal/process/task_queues:105:5)
|
|
||||||
```
|
|
||||||
|
|
||||||
OH, it is actually that my /tmp dir doesn't have enough space to store the file being downloaded! @see https://github.com/JonathonReinhart/staticx/issues/244
|
|
||||||
@ -1,30 +0,0 @@
|
|||||||
import { describe, it, expect } from "vitest";
|
|
||||||
import { signUrl } from "./bunnyCDN.ts";
|
|
||||||
|
|
||||||
describe("signUrl", () => {
|
|
||||||
it("generates a correct signed BunnyCDN URL", () => {
|
|
||||||
const securityKey = "my-secret";
|
|
||||||
const baseUrl = "https://cdn.example.com";
|
|
||||||
const path = "/videos/test.mp4";
|
|
||||||
const rawQuery = "width=500&quality=80";
|
|
||||||
const expires = 1732600000;
|
|
||||||
|
|
||||||
const signed = signUrl(securityKey, baseUrl, path, rawQuery, expires);
|
|
||||||
|
|
||||||
// token part is deterministic but long, so let's just check it contains required parts
|
|
||||||
expect(signed).toContain(baseUrl + path);
|
|
||||||
expect(signed).toContain("token=");
|
|
||||||
expect(signed).toContain("quality=80&width=500");
|
|
||||||
expect(signed).toContain(`expires=${expires}`);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("throws if baseUrl ends with slash", () => {
|
|
||||||
expect(() => signUrl("k", "https://example.com/", "/file.jpg", "", 123))
|
|
||||||
.toThrow(/must not end with a slash/);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("prepends slash if path does not start with one", () => {
|
|
||||||
const out = signUrl("k", "https://x", "file.jpg", "", 1);
|
|
||||||
expect(out.startsWith("https://x/file.jpg")).toBe(true);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@ -1,49 +0,0 @@
|
|||||||
|
|
||||||
import crypto from "node:crypto";
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate a signed BunnyCDN URL.
|
|
||||||
*
|
|
||||||
* @see https://support.bunny.net/hc/en-us/articles/360016055099-How-to-sign-URLs-for-BunnyCDN-Token-Authentication
|
|
||||||
*/
|
|
||||||
export function signUrl(securityKey: string, baseUrl: string, path: string, rawQuery = "", expires: number) {
|
|
||||||
if (!path) throw new Error('signUrl requires a path argument, but it was falsy.');
|
|
||||||
if (!path.startsWith('/')) path = '/' + path;
|
|
||||||
if (baseUrl.endsWith('/')) throw new Error(`baseUrl must not end with a slash. got baseUrl=${baseUrl}`);
|
|
||||||
|
|
||||||
// Build parameter string (sort keys alphabetically)
|
|
||||||
let parameterData = "";
|
|
||||||
if (rawQuery) {
|
|
||||||
const params = rawQuery
|
|
||||||
.split("&")
|
|
||||||
.map(p => p.split("="))
|
|
||||||
.filter(([key]) => key && key !== "token" && key !== "expires")
|
|
||||||
.sort(([a], [b]) => a.localeCompare(b));
|
|
||||||
|
|
||||||
if (params.length) {
|
|
||||||
parameterData = params.map(([k, v]) => `${k}=${v}`).join("&");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build hashable base
|
|
||||||
const hashableBase = securityKey + path + expires + parameterData;
|
|
||||||
// console.log(`hashableBase`, hashableBase)
|
|
||||||
|
|
||||||
// Compute token using your $security.sha256 workflow
|
|
||||||
|
|
||||||
const tokenH = crypto.createHash("sha256").update(hashableBase).digest("hex");
|
|
||||||
const token = Buffer.from(tokenH, "hex")
|
|
||||||
.toString("base64")
|
|
||||||
.replace(/\n/g, "")
|
|
||||||
.replace(/\+/g, "-")
|
|
||||||
.replace(/\//g, "_")
|
|
||||||
.replace(/=/g, "");
|
|
||||||
|
|
||||||
// Build final signed URL
|
|
||||||
let tokenUrl = baseUrl + path + "?token=" + token;
|
|
||||||
if (parameterData) tokenUrl += "&" + parameterData;
|
|
||||||
tokenUrl += "&expires=" + expires;
|
|
||||||
|
|
||||||
return tokenUrl;
|
|
||||||
}
|
|
||||||
@ -383,28 +383,6 @@ export class QBittorrentClient {
|
|||||||
return filePath;
|
return filePath;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* get torrent info from qBittorrent
|
|
||||||
* When a torrent is created, It can take some time before it appears in the list. So we retry it.
|
|
||||||
*
|
|
||||||
* @param torrentName
|
|
||||||
* @returns
|
|
||||||
*/
|
|
||||||
async getTorrentInfos(torrentName: string): Promise<QBTorrentInfo> {
|
|
||||||
return retry(
|
|
||||||
() => this.__getTorrentInfos(torrentName),
|
|
||||||
6,
|
|
||||||
500
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generally it's preferred to use this.getTorrentInfos over this.__getTorrentInfos because it is more fault tolerant.
|
|
||||||
*
|
|
||||||
* @param torrentName
|
|
||||||
* @returns
|
|
||||||
*/
|
|
||||||
private async __getTorrentInfos(torrentName: string): Promise<QBTorrentInfo> {
|
private async __getTorrentInfos(torrentName: string): Promise<QBTorrentInfo> {
|
||||||
if (!torrentName) throw new Error('__getTorrentInfos requires torrentName as first arg. However, arg was falsy. ');
|
if (!torrentName) throw new Error('__getTorrentInfos requires torrentName as first arg. However, arg was falsy. ');
|
||||||
// ensure we're logged in
|
// ensure we're logged in
|
||||||
@ -434,8 +412,12 @@ export class QBittorrentClient {
|
|||||||
async getInfoHashV2(torrentName: string): Promise<string> {
|
async getInfoHashV2(torrentName: string): Promise<string> {
|
||||||
console.log(`getInfoHashV2 using torrentName=${torrentName}`)
|
console.log(`getInfoHashV2 using torrentName=${torrentName}`)
|
||||||
|
|
||||||
|
// __getTorrentInfos can take some time. So we retry it every 1/2 second up to 6 times
|
||||||
const torrent = await this.getTorrentInfos(torrentName);
|
const torrent = await retry(
|
||||||
|
() => this.__getTorrentInfos(torrentName),
|
||||||
|
6,
|
||||||
|
500
|
||||||
|
);
|
||||||
|
|
||||||
return torrent.infohash_v2;
|
return torrent.infohash_v2;
|
||||||
}
|
}
|
||||||
@ -448,7 +430,7 @@ export class QBittorrentClient {
|
|||||||
const bn = basename(localFilePath).replace('.torrent', '');
|
const bn = basename(localFilePath).replace('.torrent', '');
|
||||||
await this.connect();
|
await this.connect();
|
||||||
await this.__addTorrent(localFilePath);
|
await this.__addTorrent(localFilePath);
|
||||||
return (await this.getTorrentInfos(bn));
|
return (await this.__getTorrentInfos(bn));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -498,7 +480,6 @@ export class QBittorrentClient {
|
|||||||
*/
|
*/
|
||||||
async deleteTorrent(id: string): Promise<void> {
|
async deleteTorrent(id: string): Promise<void> {
|
||||||
await this.connect();
|
await this.connect();
|
||||||
console.log(`Deleting torrent ${id}...`);
|
|
||||||
|
|
||||||
if (!this.sidCookie) {
|
if (!this.sidCookie) {
|
||||||
throw new Error('Not logged in. sidCookie missing.');
|
throw new Error('Not logged in. sidCookie missing.');
|
||||||
@ -514,7 +495,7 @@ export class QBittorrentClient {
|
|||||||
hashToDelete = id;
|
hashToDelete = id;
|
||||||
} else {
|
} else {
|
||||||
// Not a hash → treat as name → look up hash
|
// Not a hash → treat as name → look up hash
|
||||||
const info = await this.getTorrentInfos(id);
|
const info = await this.__getTorrentInfos(id);
|
||||||
console.log('info', info);
|
console.log('info', info);
|
||||||
hashToDelete = info.hash;
|
hashToDelete = info.hash;
|
||||||
}
|
}
|
||||||
@ -556,14 +537,14 @@ export class QBittorrentClient {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @deprecated use getTorrentInfos instead
|
* @deprecated use __getTorrentInfos instead
|
||||||
*/
|
*/
|
||||||
async getMagnetLink(fileName: string): Promise<string> {
|
async getMagnetLink(fileName: string): Promise<string> {
|
||||||
console.log(`getMagnetLink using fileName=${fileName}`)
|
console.log(`getMagnetLink using fileName=${fileName}`)
|
||||||
|
|
||||||
// qBittorrent does NOT return infoHash directly here
|
// qBittorrent does NOT return infoHash directly here
|
||||||
// we have to get it by querying the torrents list
|
// we have to get it by querying the torrents list
|
||||||
const torrent = await this.getTorrentInfos(fileName);
|
const torrent = await this.__getTorrentInfos(fileName);
|
||||||
|
|
||||||
if (!torrent) {
|
if (!torrent) {
|
||||||
throw new Error(`Torrent ${fileName} not found in qBittorrent after adding`);
|
throw new Error(`Torrent ${fileName} not found in qBittorrent after adding`);
|
||||||
@ -598,7 +579,7 @@ export class QBittorrentClient {
|
|||||||
|
|
||||||
// 5. Get magnet link
|
// 5. Get magnet link
|
||||||
console.log('lets get the torrent infos');
|
console.log('lets get the torrent infos');
|
||||||
const info = await this.getTorrentInfos(basename(localFilePath))
|
const info = await this.__getTorrentInfos(basename(localFilePath))
|
||||||
const magnetLink = info.magnet_uri;
|
const magnetLink = info.magnet_uri;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
|||||||
@ -1,11 +0,0 @@
|
|||||||
|
|
||||||
/**
|
|
||||||
* randomly shuffle an array
|
|
||||||
*/
|
|
||||||
export function shuffle<T>(arr: T[]): T[] {
|
|
||||||
for (let i = arr.length - 1; i > 0; i--) {
|
|
||||||
const j = Math.floor(Math.random() * (i + 1));
|
|
||||||
[arr[i], arr[j]] = [arr[j], arr[i]];
|
|
||||||
}
|
|
||||||
return arr;
|
|
||||||
}
|
|
||||||
@ -1,26 +0,0 @@
|
|||||||
import { Worker } from 'bullmq';
|
|
||||||
import { connection } from '../../.config/bullmq.config.ts';
|
|
||||||
import { copyV1VideoToV3 } from '../processors/copyV1VideoToV3.ts';
|
|
||||||
|
|
||||||
const workerName = 'b2Worker';
|
|
||||||
const queueName = 'b2Queue';
|
|
||||||
|
|
||||||
new Worker(
|
|
||||||
queueName,
|
|
||||||
async (job) => {
|
|
||||||
console.log(`${workerName}. we got a job on the ${queueName}. data=${JSON.stringify(job.data)}, job name=${job.name}`);
|
|
||||||
switch (job.name) {
|
|
||||||
case 'copyV1VideoToV3':
|
|
||||||
return await copyV1VideoToV3(job);
|
|
||||||
|
|
||||||
default:
|
|
||||||
throw new Error(`${workerName} Unknown job name: ${job.name}`);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
connection,
|
|
||||||
concurrency: 3
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
console.log(`${workerName} is running...`);
|
|
||||||
@ -2,24 +2,18 @@
|
|||||||
|
|
||||||
import { Worker } from 'bullmq';
|
import { Worker } from 'bullmq';
|
||||||
import { connection } from '../../.config/bullmq.config.ts';
|
import { connection } from '../../.config/bullmq.config.ts';
|
||||||
import { presignAllMuxAssets, presignMuxAsset } from '../processors/presignMuxAssets.ts';
|
import { presignMuxAssets } from '../processors/presignMuxAssets.ts';
|
||||||
import { copyV1VideoAll } from '../processors/copyV1VideoAll.ts';
|
import { copyV1VideoAll } from '../processors/copyV1VideoAll.ts';
|
||||||
import { copyV2ThumbToV3 } from '../processors/copyV2ThumbToV3.ts';
|
import { copyV2ThumbToV3 } from '../processors/copyV2ThumbToV3.ts';
|
||||||
import { copyV1VideoToV3 } from '../processors/copyV1VideoToV3.ts';
|
import { copyV1VideoToV3 } from '../processors/copyV1VideoToV3.ts';
|
||||||
import { createTorrent } from '../processors/createTorrent.ts';
|
import { createTorrent } from '../processors/createTorrent.ts';
|
||||||
import { analyzeAudio } from '../processors/analyzeAudio.ts';
|
import { analyzeAudio } from '../processors/analyzeAudio.ts';
|
||||||
import { findWork } from '../processors/findWork.ts';
|
import { findWork } from '../processors/findWork.ts';
|
||||||
import { getAnnounceUrlDetails } from '../processors/getAnnounceUrlDetails.ts';
|
|
||||||
import { createVideoThumbnail } from '../processors/createVideoThumbnail.ts';
|
|
||||||
|
|
||||||
new Worker(
|
new Worker(
|
||||||
'generalQueue',
|
'generalQueue',
|
||||||
async (job) => {
|
async (job) => {
|
||||||
console.log('generalWorker. we got a job on the generalQueue.', job.data, job.name);
|
console.log('generalWorker. we got a job on the generalQueue.', job.data, job.name);
|
||||||
switch (job.name) {
|
switch (job.name) {
|
||||||
case 'getAnnounceUrlDetails':
|
|
||||||
return await getAnnounceUrlDetails(job);
|
|
||||||
|
|
||||||
case 'findWork':
|
case 'findWork':
|
||||||
return await findWork(job);
|
return await findWork(job);
|
||||||
|
|
||||||
@ -29,11 +23,8 @@ new Worker(
|
|||||||
case 'copyV1VideoToV3':
|
case 'copyV1VideoToV3':
|
||||||
return await copyV1VideoToV3(job);
|
return await copyV1VideoToV3(job);
|
||||||
|
|
||||||
case 'presignAllMuxAssets':
|
case 'presignMuxAssets':
|
||||||
return await presignAllMuxAssets(job);
|
return await presignMuxAssets(job);
|
||||||
|
|
||||||
case 'presignMuxAsset':
|
|
||||||
return await presignMuxAsset(job);
|
|
||||||
|
|
||||||
case 'copyV2ThumbToV3':
|
case 'copyV2ThumbToV3':
|
||||||
return await copyV2ThumbToV3(job);
|
return await copyV2ThumbToV3(job);
|
||||||
@ -44,9 +35,6 @@ new Worker(
|
|||||||
case 'analyzeAudio':
|
case 'analyzeAudio':
|
||||||
return await analyzeAudio(job);
|
return await analyzeAudio(job);
|
||||||
|
|
||||||
case 'createVideoThumbnail':
|
|
||||||
return await createVideoThumbnail(job);
|
|
||||||
|
|
||||||
default:
|
default:
|
||||||
throw new Error(`Unknown job name: ${job.name}`);
|
throw new Error(`Unknown job name: ${job.name}`);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -15,17 +15,13 @@ new Worker(
|
|||||||
case '':
|
case '':
|
||||||
throw new Error('missing job name.')
|
throw new Error('missing job name.')
|
||||||
case 'syncronizePatreon':
|
case 'syncronizePatreon':
|
||||||
await syncronizePatreon(job);
|
return await syncronizePatreon(job);
|
||||||
break;
|
|
||||||
case 'getAnnounceUrlDetails':
|
case 'getAnnounceUrlDetails':
|
||||||
await getAnnounceUrlDetails(job);
|
return await getAnnounceUrlDetails(job);
|
||||||
break;
|
|
||||||
case 'createTorrent':
|
case 'createTorrent':
|
||||||
await createTorrent(job);
|
return await createTorrent(job);
|
||||||
break;
|
|
||||||
case 'copyV1VideoToV3':
|
case 'copyV1VideoToV3':
|
||||||
await copyV1VideoToV3(job);
|
return await copyV1VideoToV3(job);
|
||||||
break;
|
|
||||||
default:
|
default:
|
||||||
throw new Error(`Unknown job name: ${job.name}`);
|
throw new Error(`Unknown job name: ${job.name}`);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,23 +0,0 @@
|
|||||||
import { Worker } from 'bullmq';
|
|
||||||
import { connection } from '../../.config/bullmq.config.ts';
|
|
||||||
import { createMuxAsset } from '../processors/createMuxAsset.ts';
|
|
||||||
|
|
||||||
const workerName = 'muxWorker';
|
|
||||||
const queueName = 'muxQueue';
|
|
||||||
|
|
||||||
new Worker(
|
|
||||||
queueName,
|
|
||||||
async (job) => {
|
|
||||||
console.log(`${workerName}. we got a job on the ${queueName}. data=${JSON.stringify(job.data)}, job name=${job.name}`);
|
|
||||||
switch (job.name) {
|
|
||||||
case 'createMuxAsset':
|
|
||||||
return await createMuxAsset(job);
|
|
||||||
|
|
||||||
default:
|
|
||||||
throw new Error(`${workerName} Unknown job name: ${job.name}`);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{ connection }
|
|
||||||
);
|
|
||||||
|
|
||||||
console.log(`${workerName} is running...`);
|
|
||||||
@ -1,15 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=qbittorrent-nox
|
|
||||||
After=network.target
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
Type=simple
|
|
||||||
Restart=always
|
|
||||||
RestartSec=5
|
|
||||||
ExecStart=/home/cj/.local/bin/qbittorrent-nox --confirm-legal-notice --webui-port=8069 --profile=/home/cj/.config/futureporn/qbittorrent-nox
|
|
||||||
WorkingDirectory=/home/cj/Documents/futureporn-monorepo/services/worker
|
|
||||||
EnvironmentFile=/home/cj/Documents/futureporn-monorepo/services/worker/.env.production.local
|
|
||||||
Restart=on-failure
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=default.target
|
|
||||||
@ -2,14 +2,12 @@
|
|||||||
|
|
||||||
loginctl enable-linger
|
loginctl enable-linger
|
||||||
sudo cp worker.service /etc/systemd/user/worker.service
|
sudo cp worker.service /etc/systemd/user/worker.service
|
||||||
sudo cp qbittorrent-nox.service /etc/systemd/user/worker.service
|
|
||||||
|
|
||||||
|
|
||||||
systemctl --user daemon-reload
|
systemctl --user daemon-reload
|
||||||
systemctl --user restart worker
|
systemctl --user restart worker
|
||||||
systemctl --user restart qbittorrent-nox
|
|
||||||
systemctl --user enable worker
|
systemctl --user enable worker
|
||||||
systemctl --user enable qbittorrent-nox
|
systemctl --user status worker
|
||||||
systemctl --user status worker
|
systemctl --user status worker
|
||||||
systemctl status valkey
|
systemctl status valkey
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user