178 lines
5.4 KiB
TypeScript
178 lines
5.4 KiB
TypeScript
/**
|
|
*
|
|
* @todo if we have the ffmpeg stream send an end event, does the ffmpegStream close?
|
|
* so far, we have observed the end of a CB stream, and the uploadStream is what shows as closed.
|
|
* It would be better to have the ffmpeg stream do the closing, amirite? or does it even matter?
|
|
* Here's what the console.log shows when the CB stream ended while we were not using { end: true } on the ffmpeg stream
|
|
*
|
|
*
|
|
* [data] uploadStream. 118018880 aggregated bytes (118 MB).
|
|
[data] uploadStream. 118067384 aggregated bytes (118 MB).
|
|
[data] uploadStream. 118101224 aggregated bytes (118 MB).
|
|
[data] uploadStream. 118119648 aggregated bytes (118 MB).
|
|
[close] uploadStream closed
|
|
pipeline succeeded.
|
|
*/
|
|
|
|
|
|
|
|
import { getRandomRoom } from '@futureporn/scout/cb.js'
|
|
import { ua0 } from "@futureporn/scout/ua.js";
|
|
import { spawn } from "child_process";
|
|
import { PassThrough, pipeline } from "stream";
|
|
import { Upload } from "@aws-sdk/lib-storage";
|
|
import { S3Client } from "@aws-sdk/client-s3";
|
|
import { createId } from '@paralleldrive/cuid2';
|
|
import prettyBytes from 'pretty-bytes';
|
|
import dotenv from 'dotenv'
|
|
dotenv.config({
|
|
path: '../../.env.development'
|
|
})
|
|
|
|
if (!process.env.S3_BUCKET_NAME) throw new Error('S3_BUCKET_NAME missing in env');
|
|
if (!process.env.S3_ACCESS_KEY_ID) throw new Error('S3_ACCESS_KEY_ID missing in env');
|
|
if (!process.env.S3_SECRET_ACCESS_KEY) throw new Error('S3_SECRET_ACCESS_KEY missing in env');
|
|
|
|
async function main() {
|
|
const client = new S3Client({
|
|
endpoint: 'https://s3.us-west-000.backblazeb2.com',
|
|
region: 'us-west-000',
|
|
credentials: {
|
|
accessKeyId: process.env.S3_ACCESS_KEY_ID!,
|
|
secretAccessKey: process.env.S3_SECRET_ACCESS_KEY!
|
|
}
|
|
});
|
|
|
|
|
|
const randomRoom = await getRandomRoom()
|
|
console.log(`randomRoom=${randomRoom.name}`)
|
|
|
|
const playlistUrl: string = await new Promise((resolve, reject) => {
|
|
|
|
// get the m3u8 playlist for the livestream
|
|
const ytdlp = spawn('yt-dlp', [ '-g', randomRoom.url ])
|
|
let output = ''
|
|
ytdlp.on('error', (err) => {
|
|
console.error(err)
|
|
})
|
|
ytdlp.once('exit', (code) => {
|
|
console.log(`code=${code}`)
|
|
if (code !== 0) reject(`yt-dlp exited with code ${code}. stderr as follows ${JSON.stringify(ytdlp.stderr, null, 2)}`);
|
|
resolve(output)
|
|
})
|
|
ytdlp.stderr.on('data', (data) => {
|
|
console.error('stderr data as follows')
|
|
console.error(data.toString())
|
|
})
|
|
ytdlp.stdout.on('data', (data) => {
|
|
output = data
|
|
})
|
|
|
|
})
|
|
console.log(`playlistUrl=${playlistUrl}`)
|
|
if (!playlistUrl) throw new Error(`failed to get playlistUrl from yt-dlp -g ${randomRoom.url}`);
|
|
|
|
let debugCounter = 0
|
|
// let ffmpegLogStream = createWriteStream('/tmp/ffmpeg-log.txt')
|
|
let uploadStream = new PassThrough()
|
|
uploadStream.on('data', (data) => {
|
|
debugCounter += data.length
|
|
if (debugCounter % (1 * 1024 * 1024) < 1024) {
|
|
console.log(`Received ${debugCounter} bytes (${prettyBytes(debugCounter)}) [${debugCounter % (1*1024*1024)}]`);
|
|
}
|
|
})
|
|
// uploadStream.on('drain', () => {
|
|
// console.log('[drain] uploadStream')
|
|
// })
|
|
uploadStream.on('close', () => {
|
|
console.log(`[close] uploadStream closed`)
|
|
})
|
|
uploadStream.on('error', (err) => {
|
|
console.error('[error] uploadStream had an error as follows')
|
|
console.error(err)
|
|
})
|
|
uploadStream.on('exit', (code) => {
|
|
console.log(`[exit] uploadStream exited with code ${code}`)
|
|
})
|
|
uploadStream.on('disconnect', () => {
|
|
console.log('[disconnect] uploadStream disconnected')
|
|
})
|
|
uploadStream.on('message', (msg) => {
|
|
console.log('[message] uploadStream sent a message as follows')
|
|
console.log(msg)
|
|
})
|
|
|
|
|
|
const datestamp = new Date().toISOString()
|
|
|
|
|
|
|
|
const ffmpegProc = spawn('ffmpeg', [
|
|
'-headers', `"User-Agent: ${ua0}"`,
|
|
'-i', playlistUrl,
|
|
'-c:v', 'copy',
|
|
'-c:a', 'copy',
|
|
'-movflags', 'faststart',
|
|
'-y',
|
|
'-f', 'mpegts',
|
|
'-loglevel', 'quiet',
|
|
'pipe:1'
|
|
], {
|
|
// ignoring stderr is important because if not, ffmpeg will fill that buffer and node will hang
|
|
stdio: ['pipe', 'pipe', 'ignore']
|
|
})
|
|
|
|
console.log('the following is the ffmpegProc.stdout')
|
|
console.log(ffmpegProc.stdout.constructor.name)
|
|
|
|
// we set up a pipeline which has an readable stream (ffmpeg), a transform stream (debug), and a writable stream (s3 Upload)
|
|
pipeline(
|
|
ffmpegProc.stdout,
|
|
uploadStream,
|
|
(err) => {
|
|
if (err) {
|
|
console.error(`pipeline errored.`)
|
|
console.error(err)
|
|
} else {
|
|
console.log('pipeline succeeded.')
|
|
}
|
|
}
|
|
)
|
|
|
|
// Create a stream to the S3 bucket. We use this stream to upload the livestream to Backblaze S3 service
|
|
const keyName = `${datestamp}-${randomRoom.name}-chaturbate-${createId()}.ts`
|
|
console.log(`keyName=${keyName}`)
|
|
const target = {
|
|
Bucket: process.env.S3_BUCKET_NAME,
|
|
Key: keyName,
|
|
Body: uploadStream
|
|
}
|
|
// greets https://stackoverflow.com/a/70159394/1004931
|
|
try {
|
|
const parallelUploads3 = new Upload({
|
|
client: client,
|
|
//tags: [...], // optional tags
|
|
queueSize: 4, // optional concurrency configuration
|
|
leavePartsOnError: false, // optional manually handle dropped parts
|
|
params: target,
|
|
});
|
|
|
|
await parallelUploads3.done();
|
|
|
|
} catch (e) {
|
|
if (e instanceof Error) {
|
|
console.error(`while uploading a file to s3, we encountered an error`)
|
|
throw new Error(e.message);
|
|
} else {
|
|
throw new Error(`error of some sort ${JSON.stringify(e, null, 2)}`)
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
main()
|
|
|
|
|