use custom logger
This commit is contained in:
parent
f6db07fb97
commit
65b4e2f956
@ -17,6 +17,17 @@ services:
|
||||
start_period: 10s
|
||||
timeout: 10s
|
||||
|
||||
pgbackweb:
|
||||
image: 'eduardolat/pgbackweb:latest'
|
||||
ports:
|
||||
- '8085:8085'
|
||||
env_file: ./../../.env.development.local
|
||||
volumes:
|
||||
- ${HOME}/.local/share/futureporn/backups:/backups
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
|
||||
|
||||
volumes:
|
||||
pgdata:
|
@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "futureporn",
|
||||
"private": true,
|
||||
"version": "2.4.5",
|
||||
"version": "2.4.6",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "concurrently npm:dev:serve npm:dev:build npm:dev:worker npm:dev:compose npm:dev:sftp",
|
||||
|
@ -1,10 +1,11 @@
|
||||
import type { Task, Helpers } from "graphile-worker";
|
||||
import { cleanExpiredFiles } from "../utils/cache";
|
||||
import logger from "../utils/logger";
|
||||
|
||||
const cleanup: Task = async (_payload, helpers: Helpers) => {
|
||||
helpers.logger.debug(`cleanup begin.`);
|
||||
logger.debug(`cleanup begin.`);
|
||||
let count = await cleanExpiredFiles()
|
||||
if (count > 0) helpers.logger.info(`Deleted ${count} old files.`);
|
||||
if (count > 0) logger.info(`Deleted ${count} old files.`);
|
||||
};
|
||||
|
||||
export default cleanup;
|
||||
|
@ -226,7 +226,7 @@ async function updateTwitchChannelPointReward(user: User, reward: TwitchChannelP
|
||||
export default async function consolidate_twitch_channel_rewards(payload: any, helpers: Helpers) {
|
||||
assertPayload(payload);
|
||||
const { userId } = payload;
|
||||
// helpers.logger.info(`Hello, ${name}`);
|
||||
// logger.info(`Hello, ${name}`);
|
||||
|
||||
const user = await prisma.user.findFirstOrThrow({
|
||||
where: {
|
||||
@ -278,8 +278,8 @@ export default async function consolidate_twitch_channel_rewards(payload: any, h
|
||||
const costMismatched = twitchRewardsData
|
||||
.filter((r: TwitchChannelPointReward) => isWrongRedeemCost(r, user.redeemCost));
|
||||
|
||||
helpers.logger.info(`There are ${outOfDate.length} out of date Channel Point Rewards. outOfDate=${JSON.stringify(outOfDate.map((ood) => ({ title: ood.title, cost: ood.cost, id: ood.id })))}`)
|
||||
helpers.logger.info(`costMismatched=${JSON.stringify(costMismatched)}`)
|
||||
logger.info(`There are ${outOfDate.length} out of date Channel Point Rewards. outOfDate=${JSON.stringify(outOfDate.map((ood) => ({ title: ood.title, cost: ood.cost, id: ood.id })))}`)
|
||||
logger.info(`costMismatched=${JSON.stringify(costMismatched)}`)
|
||||
|
||||
// * make the REST request(s) to get the twitch channel point rewards up-to-date
|
||||
for (const reward of outOfDate) {
|
||||
|
@ -69,37 +69,37 @@ async function preparePython(helpers) {
|
||||
try {
|
||||
pythonCmd = which.sync("python3");
|
||||
} catch {
|
||||
helpers.logger.error("Python is not installed or not in PATH.");
|
||||
logger.error("Python is not installed or not in PATH.");
|
||||
throw new Error("Python not found in PATH.");
|
||||
}
|
||||
|
||||
// If venv doesn't exist, create it
|
||||
if (!existsSync(venvPath)) {
|
||||
helpers.logger.info("Python venv not found. Creating one...");
|
||||
logger.info("Python venv not found. Creating one...");
|
||||
|
||||
try {
|
||||
await spawn(pythonCmd, ["-m", "venv", "venv"], {
|
||||
cwd: env.VIBEUI_DIR,
|
||||
});
|
||||
|
||||
helpers.logger.info("Python venv successfully created.");
|
||||
logger.info("Python venv successfully created.");
|
||||
} catch (err) {
|
||||
helpers.logger.error("Failed to create Python venv:", err);
|
||||
logger.error("Failed to create Python venv:", err);
|
||||
|
||||
// Clean up partially created venv if needed
|
||||
try {
|
||||
if (existsSync(venvPath)) {
|
||||
rmSync(venvPath, { recursive: true, force: true });
|
||||
helpers.logger.warn("Removed broken venv directory.");
|
||||
logger.warn("Removed broken venv directory.");
|
||||
}
|
||||
} catch (cleanupErr) {
|
||||
helpers.logger.error("Error while cleaning up broken venv:", cleanupErr);
|
||||
logger.error("Error while cleaning up broken venv:", cleanupErr);
|
||||
}
|
||||
|
||||
throw new Error("Python venv creation failed. Check if python3 and python3-venv are installed.");
|
||||
}
|
||||
} else {
|
||||
helpers.logger.info("Using existing Python venv.");
|
||||
logger.info("Using existing Python venv.");
|
||||
}
|
||||
}
|
||||
|
||||
@ -137,7 +137,7 @@ export async function buildFunscript(
|
||||
const labelDir = join(predictionOutput, 'labels');
|
||||
const yamlPath = join(predictionOutput, 'data.yaml');
|
||||
const outputPath = join(process.env.CACHE_ROOT ?? '/tmp', `${nanoid()}.funscript`);
|
||||
helpers.logger.info('Starting Funscript generation');
|
||||
logger.info('Starting Funscript generation');
|
||||
|
||||
try {
|
||||
|
||||
@ -151,7 +151,7 @@ export async function buildFunscript(
|
||||
|
||||
return outputPath;
|
||||
} catch (error) {
|
||||
helpers.logger.error(`Error generating Funscript: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||
logger.error(`Error generating Funscript: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@ -225,19 +225,19 @@ async function loadClassPositionMap(data: DataYaml, helpers: Helpers): Promise<C
|
||||
const names = Object.values(data.names);
|
||||
for (const name of names) {
|
||||
if (typeof name !== 'string' || name.trim() === '') {
|
||||
helpers.logger.info(`Skipping invalid class name: ${name}`);
|
||||
logger.info(`Skipping invalid class name: ${name}`);
|
||||
continue;
|
||||
}
|
||||
if (!(name in positionMap)) {
|
||||
helpers.logger.info(`No position mapping for class "${name}", defaulting to 0`);
|
||||
logger.info(`No position mapping for class "${name}", defaulting to 0`);
|
||||
positionMap[name] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
helpers.logger.info(`Loaded class position map: ${JSON.stringify(positionMap)}`);
|
||||
logger.info(`Loaded class position map: ${JSON.stringify(positionMap)}`);
|
||||
return positionMap;
|
||||
} catch (error) {
|
||||
helpers.logger.error(`Error loading data.yaml: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||
logger.error(`Error loading data.yaml: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@ -276,7 +276,7 @@ function generatePatternPositions(startMs: number, durationMs: number, className
|
||||
|
||||
async function loadVideoMetadata(videoPath: string, helpers: Helpers) {
|
||||
const { fps, frames: totalFrames } = await ffprobe(videoPath);
|
||||
helpers.logger.info(`Video metadata: fps=${fps}, frames=${totalFrames}`);
|
||||
logger.info(`Video metadata: fps=${fps}, frames=${totalFrames}`);
|
||||
return { fps, totalFrames };
|
||||
}
|
||||
|
||||
@ -288,12 +288,12 @@ async function processLabelFiles(labelDir: string, helpers: Helpers, data: DataY
|
||||
for (const file of labelFiles) {
|
||||
const match = file.match(/(\d+)\.txt$/);
|
||||
if (!match) {
|
||||
helpers.logger.info(`Skipping invalid filename: ${file}`);
|
||||
logger.info(`Skipping invalid filename: ${file}`);
|
||||
continue;
|
||||
}
|
||||
const frameIndex = parseInt(match[1], 10);
|
||||
if (isNaN(frameIndex)) {
|
||||
helpers.logger.info(`Skipping invalid frame index from filename: ${file}`);
|
||||
logger.info(`Skipping invalid frame index from filename: ${file}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -397,7 +397,7 @@ function generateActions(totalDurationMs: number, fps: number, detectionSegments
|
||||
async function writeFunscript(outputPath: string, actions: FunscriptAction[], helpers: Helpers) {
|
||||
const funscript: Funscript = { version: '1.0', actions };
|
||||
await writeFile(outputPath, JSON.stringify(funscript, null, 2));
|
||||
helpers.logger.info(`Funscript generated: ${outputPath} (${actions.length} actions)`);
|
||||
logger.info(`Funscript generated: ${outputPath} (${actions.length} actions)`);
|
||||
}
|
||||
|
||||
|
||||
@ -411,13 +411,13 @@ const createFunscript: Task = async (payload: any, helpers: Helpers) => {
|
||||
await preparePython(helpers)
|
||||
|
||||
if (vod.funscript) {
|
||||
helpers.logger.info(`Doing nothing-- vod ${vodId} already has a funscript.`);
|
||||
logger.info(`Doing nothing-- vod ${vodId} already has a funscript.`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!vod.sourceVideo) {
|
||||
const msg = `Cannot create funscript: Vod ${vodId} is missing a source video.`;
|
||||
helpers.logger.warn(msg);
|
||||
logger.warn(msg);
|
||||
throw new Error(msg);
|
||||
}
|
||||
|
||||
@ -425,12 +425,12 @@ const createFunscript: Task = async (payload: any, helpers: Helpers) => {
|
||||
|
||||
const s3Client = getS3Client();
|
||||
const videoFilePath = await getOrDownloadAsset(s3Client, env.S3_BUCKET, vod.sourceVideo);
|
||||
helpers.logger.info(`Downloaded video to ${videoFilePath}`);
|
||||
logger.info(`Downloaded video to ${videoFilePath}`);
|
||||
|
||||
helpers.logger.info(`Creating funscript for vod ${vodId}...`);
|
||||
logger.info(`Creating funscript for vod ${vodId}...`);
|
||||
|
||||
const predictionOutput = await inference(helpers, videoFilePath);
|
||||
helpers.logger.info(`prediction output ${predictionOutput}`);
|
||||
logger.info(`prediction output ${predictionOutput}`);
|
||||
|
||||
|
||||
const funscriptFilePath = await buildFunscript(helpers, predictionOutput, videoFilePath)
|
||||
@ -439,14 +439,14 @@ const createFunscript: Task = async (payload: any, helpers: Helpers) => {
|
||||
const s3Key = `funscripts/${vodId}.funscript`;
|
||||
const s3Url = await uploadFile(s3Client, env.S3_BUCKET, s3Key, funscriptFilePath, "application/json");
|
||||
|
||||
helpers.logger.info(`Uploaded funscript to S3: ${s3Url}`);
|
||||
logger.info(`Uploaded funscript to S3: ${s3Url}`);
|
||||
|
||||
await prisma.vod.update({
|
||||
where: { id: vodId },
|
||||
data: { funscript: s3Key }
|
||||
});
|
||||
|
||||
helpers.logger.info(`Funscript saved to database for vod ${vodId}`);
|
||||
logger.info(`Funscript saved to database for vod ${vodId}`);
|
||||
};
|
||||
|
||||
export default createFunscript;
|
||||
|
@ -7,7 +7,7 @@ import { getS3Client, uploadFile } from "../utils/s3";
|
||||
import { inference } from "../utils/vibeui";
|
||||
import { preparePython } from "../utils/python";
|
||||
import { buildFunscript } from "../utils/funscripts";
|
||||
|
||||
import logger from "../utils/logger";
|
||||
|
||||
interface Payload {
|
||||
vodId: string;
|
||||
@ -37,13 +37,13 @@ const createFunscript: Task = async (payload: any, helpers: Helpers) => {
|
||||
await preparePython()
|
||||
|
||||
if (vod.funscript) {
|
||||
helpers.logger.info(`Doing nothing-- vod ${vodId} already has a funscript.`);
|
||||
logger.info(`Doing nothing-- vod ${vodId} already has a funscript.`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!vod.sourceVideo) {
|
||||
const msg = `Cannot create funscript: Vod ${vodId} is missing a source video.`;
|
||||
helpers.logger.warn(msg);
|
||||
logger.warn(msg);
|
||||
throw new Error(msg);
|
||||
}
|
||||
|
||||
@ -51,12 +51,12 @@ const createFunscript: Task = async (payload: any, helpers: Helpers) => {
|
||||
|
||||
const s3Client = getS3Client();
|
||||
const videoFilePath = await getOrDownloadAsset(s3Client, env.S3_BUCKET, vod.sourceVideo);
|
||||
helpers.logger.info(`Downloaded video to ${videoFilePath}`);
|
||||
logger.info(`Downloaded video to ${videoFilePath}`);
|
||||
|
||||
helpers.logger.info(`Creating funscript for vod ${vodId}...`);
|
||||
logger.info(`Creating funscript for vod ${vodId}...`);
|
||||
|
||||
const predictionOutputPath = await inference(videoFilePath);
|
||||
helpers.logger.info(`prediction output ${predictionOutputPath}`);
|
||||
logger.info(`prediction output ${predictionOutputPath}`);
|
||||
|
||||
|
||||
const funscriptFilePath = await buildFunscript(predictionOutputPath, videoFilePath)
|
||||
@ -65,14 +65,14 @@ const createFunscript: Task = async (payload: any, helpers: Helpers) => {
|
||||
const s3Key = `funscripts/${vodId}.funscript`;
|
||||
const s3Url = await uploadFile(s3Client, env.S3_BUCKET, s3Key, funscriptFilePath, "application/json");
|
||||
|
||||
helpers.logger.info(`Uploaded funscript to S3: ${s3Url}`);
|
||||
logger.info(`Uploaded funscript to S3: ${s3Url}`);
|
||||
|
||||
await prisma.vod.update({
|
||||
where: { id: vodId },
|
||||
data: { funscript: s3Key }
|
||||
});
|
||||
|
||||
helpers.logger.info(`Funscript saved to database for vod ${vodId}`);
|
||||
logger.info(`Funscript saved to database for vod ${vodId}`);
|
||||
};
|
||||
|
||||
export default createFunscript;
|
||||
|
@ -33,13 +33,13 @@ const createFunscript: Task = async (payload: any, helpers: Helpers) => {
|
||||
const vod = await prisma.vod.findFirstOrThrow({ where: { id: vodId } });
|
||||
|
||||
if (vod.funscript) {
|
||||
helpers.logger.info(`Doing nothing-- vod ${vodId} already has a funscript.`);
|
||||
logger.info(`Doing nothing-- vod ${vodId} already has a funscript.`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!vod.sourceVideo) {
|
||||
const msg = `Cannot create funscript: Vod ${vodId} is missing a source video.`;
|
||||
helpers.logger.warn(msg);
|
||||
logger.warn(msg);
|
||||
throw new Error(msg);
|
||||
}
|
||||
|
||||
@ -47,13 +47,13 @@ const createFunscript: Task = async (payload: any, helpers: Helpers) => {
|
||||
|
||||
const s3Client = getS3Client();
|
||||
const videoFilePath = await getOrDownloadAsset(s3Client, env.S3_BUCKET, vod.sourceVideo);
|
||||
helpers.logger.info(`Downloaded video to ${videoFilePath}`);
|
||||
logger.info(`Downloaded video to ${videoFilePath}`);
|
||||
|
||||
helpers.logger.info(`Creating funscript for vod ${vodId}...`);
|
||||
logger.info(`Creating funscript for vod ${vodId}...`);
|
||||
|
||||
const modelPath = join(env.VIBEUI_DIR, 'vibeui.onnx')
|
||||
const predictionOutput = await vibeuiInference(modelPath, videoFilePath);
|
||||
helpers.logger.info(`prediction output ${predictionOutput}`);
|
||||
logger.info(`prediction output ${predictionOutput}`);
|
||||
const classes = await getModelClasses(modelPath)
|
||||
|
||||
const funscriptFilePath = await buildFunscript(classes, predictionOutput, videoFilePath)
|
||||
@ -62,14 +62,14 @@ const createFunscript: Task = async (payload: any, helpers: Helpers) => {
|
||||
const s3Key = `funscripts/${vodId}.funscript`;
|
||||
const s3Url = await uploadFile(s3Client, env.S3_BUCKET, s3Key, funscriptFilePath, "application/json");
|
||||
|
||||
helpers.logger.info(`Uploaded funscript to S3: ${s3Url}`);
|
||||
logger.info(`Uploaded funscript to S3: ${s3Url}`);
|
||||
|
||||
await prisma.vod.update({
|
||||
where: { id: vodId },
|
||||
data: { funscript: s3Key }
|
||||
});
|
||||
|
||||
helpers.logger.info(`Funscript saved to database for vod ${vodId}`);
|
||||
logger.info(`Funscript saved to database for vod ${vodId}`);
|
||||
};
|
||||
|
||||
export default createFunscript;
|
||||
|
@ -11,6 +11,7 @@ import { mkdirp } from "fs-extra";
|
||||
import { listFilesRecursive } from "../utils/filesystem";
|
||||
import { getMimeType } from "../utils/mimetype";
|
||||
import { getNanoSpawn } from "../utils/nanoSpawn";
|
||||
import logger from "../utils/logger";
|
||||
|
||||
const prisma = new PrismaClient().$extends(withAccelerate());
|
||||
|
||||
@ -155,7 +156,7 @@ export default async function createHlsPlaylist(payload: any, helpers: Helpers)
|
||||
|
||||
// * [x] exit if video.hlsPlaylist already defined
|
||||
if (vod.hlsPlaylist) {
|
||||
helpers.logger.info(`Doing nothing-- vod ${vodId} already has a hlsPlaylist.`)
|
||||
logger.info(`Doing nothing-- vod ${vodId} already has a hlsPlaylist.`)
|
||||
return; // Exit the function early
|
||||
}
|
||||
|
||||
@ -163,32 +164,32 @@ export default async function createHlsPlaylist(payload: any, helpers: Helpers)
|
||||
throw new Error(`Failed to create hlsPlaylist-- vod ${vodId} is missing a sourceVideo.`);
|
||||
}
|
||||
|
||||
helpers.logger.info(`Creating HLS Playlist.`)
|
||||
logger.info(`Creating HLS Playlist.`)
|
||||
const s3Client = getS3Client()
|
||||
const taskId = nanoid()
|
||||
const workDirPath = join(env.CACHE_ROOT, taskId)
|
||||
const packageDirPath = join(workDirPath, 'package', 'hls')
|
||||
await mkdirp(packageDirPath)
|
||||
|
||||
helpers.logger.info("download source video from pull-thru cache")
|
||||
logger.info("download source video from pull-thru cache")
|
||||
const videoFilePath = await getOrDownloadAsset(s3Client, env.S3_BUCKET, vod.sourceVideo)
|
||||
helpers.logger.info(`videoFilePath=${videoFilePath}`)
|
||||
logger.info(`videoFilePath=${videoFilePath}`)
|
||||
|
||||
helpers.logger.info("create ABR variants")
|
||||
logger.info("create ABR variants")
|
||||
const variants = await createVariants(helpers, videoFilePath)
|
||||
helpers.logger.info('variants as follows')
|
||||
helpers.logger.info(JSON.stringify(variants))
|
||||
logger.info('variants as follows')
|
||||
logger.info(JSON.stringify(variants))
|
||||
|
||||
|
||||
helpers.logger.info("run shaka packager")
|
||||
logger.info("run shaka packager")
|
||||
const masterPlaylistPath = await packageHls(helpers, variants, packageDirPath)
|
||||
helpers.logger.debug(`masterPlaylistPath=${masterPlaylistPath}`)
|
||||
logger.debug(`masterPlaylistPath=${masterPlaylistPath}`)
|
||||
|
||||
|
||||
helpers.logger.info('uploading assets')
|
||||
logger.info('uploading assets')
|
||||
let assets = await listFilesRecursive(workDirPath)
|
||||
helpers.logger.info('assets as follows')
|
||||
helpers.logger.info(JSON.stringify(assets))
|
||||
logger.info('assets as follows')
|
||||
logger.info(JSON.stringify(assets))
|
||||
for (let i = 0; i < assets.length; i++) {
|
||||
const asset = assets[i]
|
||||
const s3Key = `package/${taskId}/hls/${basename(asset)}`
|
||||
@ -197,7 +198,7 @@ export default async function createHlsPlaylist(payload: any, helpers: Helpers)
|
||||
};
|
||||
|
||||
|
||||
helpers.logger.info("generate thumbnail s3 key")
|
||||
logger.info("generate thumbnail s3 key")
|
||||
const s3Key = `package/${taskId}/hls/master.m3u8`
|
||||
|
||||
|
||||
|
@ -7,6 +7,7 @@ import { S3Client } from "@aws-sdk/client-s3";
|
||||
import { getS3Client, uploadFile } from "../utils/s3";
|
||||
import { nanoid } from "nanoid";
|
||||
import { getNanoSpawn } from "../utils/nanoSpawn";
|
||||
import logger from "../utils/logger";
|
||||
|
||||
const prisma = new PrismaClient().$extends(withAccelerate());
|
||||
|
||||
@ -23,7 +24,7 @@ function getCidFromStdout(output: string) {
|
||||
}
|
||||
|
||||
async function hash(helpers: Helpers, inputFilePath: string) {
|
||||
helpers.logger.info(`createIpfsCid with inputFilePath=${inputFilePath}`)
|
||||
logger.info(`createIpfsCid with inputFilePath=${inputFilePath}`)
|
||||
|
||||
|
||||
if (!inputFilePath) {
|
||||
@ -43,7 +44,7 @@ async function hash(helpers: Helpers, inputFilePath: string) {
|
||||
// console.error(`vcsi failed with exit code ${exitCode}`);
|
||||
// process.exit(exitCode);
|
||||
// }
|
||||
helpers.logger.info(JSON.stringify(result))
|
||||
logger.info(JSON.stringify(result))
|
||||
return getCidFromStdout(result.stdout)
|
||||
|
||||
}
|
||||
@ -67,7 +68,7 @@ export default async function createIpfsCid(payload: any, helpers: Helpers) {
|
||||
|
||||
// * [x] exit if video.thumbnail already defined
|
||||
if (vod.cidv1) {
|
||||
helpers.logger.info(`Doing nothing-- vod ${vodId} already has a cidv1.`)
|
||||
logger.info(`Doing nothing-- vod ${vodId} already has a cidv1.`)
|
||||
return; // Exit the function early
|
||||
}
|
||||
|
||||
@ -76,19 +77,19 @@ export default async function createIpfsCid(payload: any, helpers: Helpers) {
|
||||
}
|
||||
|
||||
|
||||
helpers.logger.info('Creating CID')
|
||||
logger.info('Creating CID')
|
||||
const s3Client = getS3Client()
|
||||
|
||||
// * [x] download video segments from pull-thru cache
|
||||
const videoFilePath = await getOrDownloadAsset(s3Client, env.S3_BUCKET, vod.sourceVideo)
|
||||
helpers.logger.info(`videoFilePath=${videoFilePath}`)
|
||||
logger.info(`videoFilePath=${videoFilePath}`)
|
||||
|
||||
// * [x] run ipfs to get a CID
|
||||
const cidv1 = await hash(helpers, videoFilePath)
|
||||
|
||||
if (!cidv1) throw new Error(`cidv1 ${cidv1} was falsy`);
|
||||
|
||||
helpers.logger.info(`cidv1=${cidv1}`)
|
||||
logger.info(`cidv1=${cidv1}`)
|
||||
|
||||
// * [x] update vod record
|
||||
await prisma.vod.update({
|
||||
|
@ -11,7 +11,6 @@ import logger from "../utils/logger";
|
||||
import { basename, join } from "node:path";
|
||||
import SftpClient from 'ssh2-sftp-client';
|
||||
|
||||
|
||||
const prisma = new PrismaClient().$extends(withAccelerate());
|
||||
|
||||
|
||||
@ -21,7 +20,7 @@ interface Payload {
|
||||
|
||||
|
||||
// async function createTorrent(payload: any, helpers: Helpers) {
|
||||
// helpers.logger.debug(`createTorrent`)
|
||||
// logger.debug(`createTorrent`)
|
||||
|
||||
|
||||
// if (!inputFilePath) {
|
||||
@ -33,16 +32,16 @@ interface Payload {
|
||||
// const spawn = await getNanoSpawn();
|
||||
|
||||
|
||||
// helpers.logger.debug('result as follows')
|
||||
// helpers.logger.debug(JSON.stringify(result, null, 2))
|
||||
// logger.debug('result as follows')
|
||||
// logger.debug(JSON.stringify(result, null, 2))
|
||||
|
||||
// helpers.logger.info(`✅ Thumbnail saved to: ${outputFilePath}`);
|
||||
// logger.info(`✅ Thumbnail saved to: ${outputFilePath}`);
|
||||
// return outputFilePath
|
||||
|
||||
// }
|
||||
function assertPayload(payload: any): asserts payload is Payload {
|
||||
if (typeof payload !== "object" || !payload) throw new Error("invalid payload-- was not an object.");
|
||||
if (typeof payload.vodId !== "string") throw new Error("invalid payload-- was missing vodId");
|
||||
if (typeof payload.vodId !== "string") throw new Error(`invalid payload-- ${JSON.stringify(payload)} was missing vodId`);
|
||||
}
|
||||
|
||||
|
||||
|
@ -8,6 +8,7 @@ import { nanoid } from "nanoid";
|
||||
import { getNanoSpawn } from "../utils/nanoSpawn";
|
||||
import { preparePython } from "../utils/python";
|
||||
import { generateS3Path } from "../utils/formatters";
|
||||
import logger from "../utils/logger";
|
||||
|
||||
const prisma = new PrismaClient().$extends(withAccelerate());
|
||||
|
||||
@ -17,7 +18,7 @@ interface Payload {
|
||||
}
|
||||
|
||||
async function createThumbnail(helpers: Helpers, inputFilePath: string) {
|
||||
helpers.logger.debug(`createThumbnail with inputFilePath=${inputFilePath}`)
|
||||
logger.debug(`createThumbnail with inputFilePath=${inputFilePath}`)
|
||||
|
||||
|
||||
if (!inputFilePath) {
|
||||
@ -49,10 +50,10 @@ async function createThumbnail(helpers: Helpers, inputFilePath: string) {
|
||||
cwd: env.APP_DIR,
|
||||
});
|
||||
|
||||
helpers.logger.debug('result as follows')
|
||||
helpers.logger.debug(JSON.stringify(result, null, 2))
|
||||
logger.debug('result as follows')
|
||||
logger.debug(JSON.stringify(result, null, 2))
|
||||
|
||||
helpers.logger.info(`✅ Thumbnail saved to: ${outputFilePath}`);
|
||||
logger.info(`✅ Thumbnail saved to: ${outputFilePath}`);
|
||||
return outputFilePath
|
||||
|
||||
}
|
||||
@ -84,7 +85,7 @@ export default async function createVideoThumbnail(payload: any, helpers: Helper
|
||||
|
||||
// * [x] exit if video.thumbnail already defined
|
||||
if (vod.thumbnail) {
|
||||
helpers.logger.info(`Doing nothing-- vod ${vodId} already has a thumbnail.`)
|
||||
logger.info(`Doing nothing-- vod ${vodId} already has a thumbnail.`)
|
||||
return; // Exit the function early
|
||||
}
|
||||
|
||||
@ -93,7 +94,7 @@ export default async function createVideoThumbnail(payload: any, helpers: Helper
|
||||
}
|
||||
|
||||
|
||||
helpers.logger.info('Creating Video Thumbnail')
|
||||
logger.info('Creating Video Thumbnail')
|
||||
const s3Client = getS3Client()
|
||||
|
||||
// * [x] download video segments from pull-thru cache
|
||||
|
@ -1,11 +1,12 @@
|
||||
import type { Task, Helpers } from "graphile-worker";
|
||||
import { PrismaClient } from "../../generated/prisma";
|
||||
import { withAccelerate } from "@prisma/extension-accelerate";
|
||||
import logger from "../utils/logger";
|
||||
|
||||
const prisma = new PrismaClient().$extends(withAccelerate());
|
||||
|
||||
const findWork: Task = async (_payload, helpers: Helpers) => {
|
||||
helpers.logger.info(`findWork begin.`);
|
||||
logger.info(`findWork begin.`);
|
||||
|
||||
const approvedUploads = await prisma.vod.findMany({
|
||||
where: {
|
||||
@ -16,14 +17,14 @@ const findWork: Task = async (_payload, helpers: Helpers) => {
|
||||
},
|
||||
});
|
||||
|
||||
helpers.logger.info(`findWork found ${approvedUploads.length} uploads.`);
|
||||
logger.info(`findWork found ${approvedUploads.length} uploads.`);
|
||||
for (let i = 0; i < approvedUploads.length; i++) {
|
||||
const vod = approvedUploads[i];
|
||||
await helpers.addJob("scheduleVodProcessing", { vodId: vod.id });
|
||||
helpers.logger.info(`scheduleVodProcessing for vod ${vod.id}`);
|
||||
logger.info(`scheduleVodProcessing for vod ${vod.id}`);
|
||||
}
|
||||
|
||||
helpers.logger.info(`findWork finished.`);
|
||||
logger.info(`findWork finished.`);
|
||||
};
|
||||
|
||||
export default findWork;
|
||||
|
@ -8,6 +8,7 @@ import { createReadStream } from "node:fs";
|
||||
import { getOrDownloadAsset } from "../utils/cache";
|
||||
import { env } from "../config/env";
|
||||
import { getS3Client } from "../utils/s3";
|
||||
import logger from "../utils/logger";
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
@ -19,7 +20,7 @@ const client = getS3Client()
|
||||
|
||||
const generateVideoChecksum: Task = async (payload: unknown, helpers) => {
|
||||
const { vodId } = payload as Payload;
|
||||
helpers.logger.info(`Generating checksum for VOD ${vodId}`);
|
||||
logger.info(`Generating checksum for VOD ${vodId}`);
|
||||
|
||||
// 1. Get VOD record with source video path
|
||||
const vod = await prisma.vod.findUnique({
|
||||
@ -33,7 +34,7 @@ const generateVideoChecksum: Task = async (payload: unknown, helpers) => {
|
||||
|
||||
// 2. Verify file exists
|
||||
const videoPath = await getOrDownloadAsset(client, env.S3_BUCKET, vod.sourceVideo)
|
||||
helpers.logger.info(`videoPath=${videoPath}`)
|
||||
logger.info(`videoPath=${videoPath}`)
|
||||
|
||||
try {
|
||||
await access(videoPath);
|
||||
@ -52,7 +53,7 @@ const generateVideoChecksum: Task = async (payload: unknown, helpers) => {
|
||||
);
|
||||
|
||||
const checksum = hash.digest('hex');
|
||||
helpers.logger.info(`Generated checksum for ${path.basename(vod.sourceVideo)}: ${checksum}`);
|
||||
logger.info(`Generated checksum for ${path.basename(vod.sourceVideo)}: ${checksum}`);
|
||||
|
||||
// 4. Update VOD record
|
||||
await prisma.vod.update({
|
||||
@ -61,7 +62,7 @@ const generateVideoChecksum: Task = async (payload: unknown, helpers) => {
|
||||
});
|
||||
|
||||
} catch (err) {
|
||||
helpers.logger.error(`Failed to generate checksum: ${err.message}`);
|
||||
logger.error(`Failed to generate checksum: ${err.message}`);
|
||||
throw err; // Will trigger retry if configured
|
||||
}
|
||||
};
|
||||
|
@ -1,7 +1,7 @@
|
||||
// src/tasks/hello.ts
|
||||
|
||||
import type { Task, Helpers } from "graphile-worker";
|
||||
|
||||
import logger from "../utils/logger";
|
||||
|
||||
interface Payload {
|
||||
name: string;
|
||||
@ -16,6 +16,6 @@ function assertPayload(payload: any): asserts payload is Payload {
|
||||
export default async function hello(payload: any, helpers: Helpers) {
|
||||
assertPayload(payload);
|
||||
const { name } = payload;
|
||||
helpers.logger.info(`Helloooooo, ${name}`);
|
||||
logger.info(`Helloooooo, ${name}`);
|
||||
};
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import type { Task, Job } from "graphile-worker";
|
||||
import { PrismaClient } from "../../generated/prisma";
|
||||
import { withAccelerate } from "@prisma/extension-accelerate";
|
||||
|
||||
import logger from '../utils/logger';
|
||||
|
||||
interface Payload {
|
||||
vodId: string;
|
||||
@ -27,11 +27,11 @@ const scheduleVodProcessing: Task = async (payload: unknown, helpers) => {
|
||||
}
|
||||
|
||||
const { vodId } = payload;
|
||||
helpers.logger.info(`Starting processing for VOD ${vodId}`);
|
||||
logger.info(`Starting processing for VOD ${vodId}`);
|
||||
|
||||
const vod = await prisma.vod.findUnique({ where: { id: vodId } });
|
||||
if (!vod) {
|
||||
helpers.logger.error(`VOD not found: ${vodId}`);
|
||||
logger.error(`VOD not found: ${vodId}`);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -57,7 +57,7 @@ const scheduleVodProcessing: Task = async (payload: unknown, helpers) => {
|
||||
data: { status: "processing" }
|
||||
});
|
||||
|
||||
helpers.logger.info(`Scheduled ${changes} jobs for VOD ${vodId}`);
|
||||
logger.info(`Scheduled ${changes} jobs for VOD ${vodId}`);
|
||||
|
||||
// Schedule next check
|
||||
// @huh? @todo IDK what is up with this, but it seems to run right away even though it has the runAt defined.
|
||||
@ -72,7 +72,7 @@ const scheduleVodProcessing: Task = async (payload: unknown, helpers) => {
|
||||
where: { id: vodId },
|
||||
data: { status: "processed" }
|
||||
});
|
||||
helpers.logger.info(`All processing complete for VOD ${vodId}`);
|
||||
logger.info(`All processing complete for VOD ${vodId}`);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -35,12 +35,6 @@
|
||||
<td>✅</td>
|
||||
<td>✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>API</td>
|
||||
<td>✅</td>
|
||||
<td>✅</td>
|
||||
<td>✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Torrent Downloads</td>
|
||||
<td>✅</td>
|
||||
@ -53,6 +47,12 @@
|
||||
<td>✅</td>
|
||||
<td>✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>API</td>
|
||||
<td>❌</td>
|
||||
<td>✅</td>
|
||||
<td>✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Ad-Free</td>
|
||||
<td>❌</td>
|
||||
|
Loading…
x
Reference in New Issue
Block a user