2024-07-23 02:59:41 +00:00
|
|
|
import { spawn } from 'child_process';
|
2024-08-07 01:13:58 +00:00
|
|
|
import { EventEmitter, PassThrough, pipeline, Readable } from 'stream';
|
2024-07-25 13:53:52 +00:00
|
|
|
import prettyBytes from 'pretty-bytes';
|
|
|
|
import { Upload } from "@aws-sdk/lib-storage";
|
|
|
|
import { S3Client } from "@aws-sdk/client-s3";
|
|
|
|
import 'dotenv/config'
|
|
|
|
|
2024-07-28 00:42:09 +00:00
|
|
|
const ua0 = 'Mozilla/5.0 (X11; Linux x86_64; rv:105.0) Gecko/20100101 Firefox/105.0'
|
2024-07-23 02:59:41 +00:00
|
|
|
|
|
|
|
export interface RecordArgs {
|
|
|
|
filename?: string;
|
2024-07-25 13:53:52 +00:00
|
|
|
s3Client: S3Client;
|
|
|
|
bucket: string;
|
2024-07-23 02:59:41 +00:00
|
|
|
date?: string;
|
2024-07-25 13:53:52 +00:00
|
|
|
inputStream: Readable;
|
2024-07-28 00:42:09 +00:00
|
|
|
jobId: string;
|
2024-08-07 01:13:58 +00:00
|
|
|
abortSignal: AbortSignal;
|
|
|
|
onProgress: (fileSize: number) => void;
|
2024-07-25 13:53:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
interface MakeS3ClientOptions {
|
|
|
|
accessKeyId: string;
|
|
|
|
secretAccessKey: string;
|
|
|
|
region: string;
|
|
|
|
endpoint: string
|
|
|
|
}
|
|
|
|
|
2024-07-28 00:42:09 +00:00
|
|
|
interface getFFmpegOptions {
|
2024-07-25 13:53:52 +00:00
|
|
|
url: string;
|
2024-07-23 02:59:41 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
export default class Record {
|
2024-07-25 13:53:52 +00:00
|
|
|
private s3Client: S3Client;
|
|
|
|
private uploadStream: PassThrough;
|
|
|
|
inputStream: Readable;
|
|
|
|
counter: number;
|
|
|
|
bucket: string;
|
|
|
|
keyName: string;
|
|
|
|
datestamp: string;
|
2024-07-23 02:59:41 +00:00
|
|
|
filename?: string;
|
2024-07-28 00:42:09 +00:00
|
|
|
jobId: string;
|
2024-07-23 02:59:41 +00:00
|
|
|
date?: string;
|
2024-08-07 01:13:58 +00:00
|
|
|
abortSignal: AbortSignal;
|
|
|
|
onProgress: Function;
|
2024-07-23 02:59:41 +00:00
|
|
|
|
2024-08-07 01:13:58 +00:00
|
|
|
constructor({ inputStream, s3Client, bucket, jobId, abortSignal, onProgress }: RecordArgs) {
|
2024-07-25 13:53:52 +00:00
|
|
|
if (!inputStream) throw new Error('Record constructor was missing inputStream.');
|
|
|
|
if (!bucket) throw new Error('Record constructor was missing bucket.');
|
2024-07-28 00:42:09 +00:00
|
|
|
if (!jobId) throw new Error('Record constructer was missing jobId!');
|
2024-07-25 13:53:52 +00:00
|
|
|
if (!s3Client) throw new Error('Record constructer was missing s3Client');
|
2024-08-07 01:13:58 +00:00
|
|
|
if (!abortSignal) throw new Error('Record constructer was missing abortSignal');
|
2024-07-25 13:53:52 +00:00
|
|
|
this.inputStream = inputStream
|
2024-08-07 01:13:58 +00:00
|
|
|
this.onProgress = onProgress
|
2024-07-25 13:53:52 +00:00
|
|
|
this.s3Client = s3Client
|
|
|
|
this.bucket = bucket
|
2024-07-28 00:42:09 +00:00
|
|
|
this.jobId = jobId
|
2024-07-25 13:53:52 +00:00
|
|
|
this.counter = 0
|
|
|
|
this.datestamp = new Date().toISOString()
|
2024-07-28 00:42:09 +00:00
|
|
|
this.keyName = `${this.datestamp}-${jobId}.ts`
|
2024-07-25 13:53:52 +00:00
|
|
|
this.uploadStream = new PassThrough()
|
2024-08-07 01:13:58 +00:00
|
|
|
this.abortSignal = abortSignal
|
|
|
|
this.abortSignal.addEventListener("abort", this.abortEventListener.bind(this))
|
2024-07-23 02:59:41 +00:00
|
|
|
}
|
|
|
|
|
2024-07-25 13:53:52 +00:00
|
|
|
|
|
|
|
|
|
|
|
static makeS3Client({
|
|
|
|
accessKeyId,
|
|
|
|
secretAccessKey,
|
|
|
|
region,
|
|
|
|
endpoint
|
|
|
|
}: MakeS3ClientOptions): S3Client {
|
|
|
|
const client = new S3Client({
|
|
|
|
endpoint,
|
|
|
|
region,
|
|
|
|
credentials: {
|
|
|
|
accessKeyId,
|
|
|
|
secretAccessKey
|
|
|
|
}
|
2024-07-23 02:59:41 +00:00
|
|
|
})
|
2024-07-25 13:53:52 +00:00
|
|
|
return client
|
|
|
|
}
|
2024-07-23 02:59:41 +00:00
|
|
|
|
2024-07-28 00:42:09 +00:00
|
|
|
static getFFmpegStream({ url }: getFFmpegOptions): Readable {
|
|
|
|
console.log(`getFFmpegStream using url=${url}`)
|
2024-07-25 13:53:52 +00:00
|
|
|
const ffmpegProc = spawn('ffmpeg', [
|
2024-07-23 02:59:41 +00:00
|
|
|
'-headers', `"User-Agent: ${ua0}"`,
|
2024-07-28 00:42:09 +00:00
|
|
|
'-i', url,
|
2024-07-23 02:59:41 +00:00
|
|
|
'-c:v', 'copy',
|
|
|
|
'-c:a', 'copy',
|
|
|
|
'-movflags', 'faststart',
|
|
|
|
'-y',
|
|
|
|
'-f', 'mpegts',
|
2024-07-25 13:53:52 +00:00
|
|
|
'-loglevel', 'quiet',
|
|
|
|
'pipe:1'
|
2024-07-23 02:59:41 +00:00
|
|
|
], {
|
2024-07-25 13:53:52 +00:00
|
|
|
// ignoring stderr is important because if not, ffmpeg will fill that buffer and node will hang
|
|
|
|
stdio: ['pipe', 'pipe', 'ignore']
|
|
|
|
})
|
|
|
|
return ffmpegProc.stdout
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2024-08-07 01:13:58 +00:00
|
|
|
|
|
|
|
abortEventListener() {
|
|
|
|
console.log(`abortEventListener has been invoked. this.abortSignal is as follows`)
|
|
|
|
console.log(this.abortSignal)
|
|
|
|
console.log(JSON.stringify(this.abortSignal, null, 2))
|
|
|
|
const reason = this.abortSignal.reason
|
|
|
|
console.log(`aborted the stream download with reason=${reason}`)
|
|
|
|
this.inputStream.destroy(new Error(reason))
|
|
|
|
}
|
|
|
|
|
2024-07-25 13:53:52 +00:00
|
|
|
async uploadToS3() {
|
|
|
|
const target = {
|
|
|
|
Bucket: this.bucket,
|
|
|
|
Key: this.keyName,
|
|
|
|
Body: this.uploadStream
|
|
|
|
}
|
|
|
|
|
|
|
|
// greets https://stackoverflow.com/a/70159394/1004931
|
|
|
|
try {
|
|
|
|
const parallelUploads3 = new Upload({
|
|
|
|
client: this.s3Client,
|
|
|
|
partSize: 1024 * 1024 * 5,
|
|
|
|
queueSize: 1,
|
|
|
|
leavePartsOnError: false,
|
|
|
|
params: target,
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
parallelUploads3.on("httpUploadProgress", (progress) => {
|
|
|
|
if (progress?.loaded) {
|
2024-08-07 23:43:17 +00:00
|
|
|
if (this.onProgress) this.onProgress(this.counter);
|
2024-08-07 01:13:58 +00:00
|
|
|
console.log(`uploaded ${progress.loaded} bytes (${prettyBytes(progress.loaded)})`);
|
2024-07-25 13:53:52 +00:00
|
|
|
} else {
|
|
|
|
console.log(`httpUploadProgress ${JSON.stringify(progress, null, 2)}`)
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
2024-08-01 19:16:35 +00:00
|
|
|
console.log('Waiting for parallelUploads3 to finish...')
|
2024-07-25 13:53:52 +00:00
|
|
|
await parallelUploads3.done();
|
2024-08-01 19:16:35 +00:00
|
|
|
console.log('parallelUploads3 is complete.')
|
2024-07-25 13:53:52 +00:00
|
|
|
|
|
|
|
} catch (e) {
|
|
|
|
if (e instanceof Error) {
|
2024-07-28 00:42:09 +00:00
|
|
|
console.error(`We were uploading a file to S3 but then we encountered an error! ${JSON.stringify(e, null, 2)}`)
|
|
|
|
throw e
|
2024-07-25 13:53:52 +00:00
|
|
|
} else {
|
|
|
|
throw new Error(`error of some sort ${JSON.stringify(e, null, 2)}`)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2024-07-23 02:59:41 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
2024-07-25 13:53:52 +00:00
|
|
|
async start() {
|
|
|
|
|
2024-07-23 02:59:41 +00:00
|
|
|
|
2024-07-28 00:42:09 +00:00
|
|
|
|
2024-07-25 13:53:52 +00:00
|
|
|
// streams setup
|
2024-08-07 23:43:17 +00:00
|
|
|
|
2024-07-25 13:53:52 +00:00
|
|
|
this.uploadStream.on('data', (data) => {
|
|
|
|
this.counter += data.length
|
2024-07-28 00:42:09 +00:00
|
|
|
})
|
|
|
|
this.uploadStream.on('close', () => {
|
|
|
|
console.log('[!!!] upload stream has closed')
|
|
|
|
})
|
|
|
|
this.uploadStream.on('error', (e) => {
|
|
|
|
console.error('there was an error on the uploadStream. error as follows')
|
|
|
|
console.error(e)
|
|
|
|
})
|
|
|
|
// T.M.I.
|
|
|
|
// this.uploadStream.on('drain', () => {
|
|
|
|
// console.info('[vvv] drain on uploadStream.')
|
|
|
|
// })
|
|
|
|
|
|
|
|
// input stream event handlers
|
|
|
|
this.inputStream.on('close', () => {
|
|
|
|
console.log('[!!!] input stream has closed.')
|
|
|
|
})
|
|
|
|
this.inputStream.on('error', (e) => {
|
|
|
|
console.error('there was an error on the inputStream. error as follows')
|
|
|
|
console.error(e)
|
|
|
|
})
|
|
|
|
this.inputStream.on('drain', () => {
|
|
|
|
console.info('[vvv] drain on inputStream.')
|
2024-07-25 13:53:52 +00:00
|
|
|
})
|
2024-07-23 02:59:41 +00:00
|
|
|
|
2024-08-07 01:13:58 +00:00
|
|
|
|
2024-07-28 00:42:09 +00:00
|
|
|
// pipe the ffmpeg stream to the S3 upload stream
|
|
|
|
// this has the effect of uploading the stream to S3 at the same time we're recording it.
|
2024-07-25 13:53:52 +00:00
|
|
|
pipeline(
|
|
|
|
this.inputStream,
|
2024-08-07 01:13:58 +00:00
|
|
|
this.uploadStream,
|
2024-07-25 13:53:52 +00:00
|
|
|
(err) => {
|
|
|
|
if (err) {
|
|
|
|
console.error(`pipeline errored.`)
|
|
|
|
console.error(err)
|
|
|
|
} else {
|
|
|
|
console.log('pipeline succeeded.')
|
|
|
|
}
|
2024-07-23 02:59:41 +00:00
|
|
|
}
|
2024-07-25 13:53:52 +00:00
|
|
|
)
|
|
|
|
|
2024-07-28 00:42:09 +00:00
|
|
|
// await this.saveToDisk()
|
|
|
|
|
|
|
|
console.log('awaiting uploadToS3()...')
|
2024-07-25 13:53:52 +00:00
|
|
|
await this.uploadToS3()
|
2024-07-28 00:42:09 +00:00
|
|
|
console.log('uploadToS3() is complete.')
|
2024-07-23 02:59:41 +00:00
|
|
|
|
|
|
|
return {
|
2024-07-28 00:42:09 +00:00
|
|
|
jobId: this.jobId,
|
|
|
|
keyName: this.keyName
|
2024-07-23 02:59:41 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async stop() {
|
|
|
|
throw new Error('@todo please implement')
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|