switch to pino
This commit is contained in:
parent
0d53e49d89
commit
caab230807
4
.gitignore
vendored
4
.gitignore
vendored
@ -1,3 +1,7 @@
|
|||||||
|
venv/
|
||||||
|
|
||||||
|
.direnv/
|
||||||
|
|
||||||
backups
|
backups
|
||||||
|
|
||||||
.kamal/secrets*
|
.kamal/secrets*
|
||||||
|
@ -36,7 +36,6 @@ const EnvSchema = z.object({
|
|||||||
});
|
});
|
||||||
|
|
||||||
const parsed = EnvSchema.safeParse(process.env);
|
const parsed = EnvSchema.safeParse(process.env);
|
||||||
// console.log(parsed)
|
|
||||||
|
|
||||||
if (!parsed.success) {
|
if (!parsed.success) {
|
||||||
console.error('❌ Invalid environment variables:', parsed.error.flatten().fieldErrors);
|
console.error('❌ Invalid environment variables:', parsed.error.flatten().fieldErrors);
|
||||||
|
@ -9,6 +9,7 @@ import { PatreonUserResponse, PatreonIncluded, RoleName } from '../types/index'
|
|||||||
import {
|
import {
|
||||||
getRoles,
|
getRoles,
|
||||||
} from '../utils/patreonTiers';
|
} from '../utils/patreonTiers';
|
||||||
|
import logger from '../utils/logger.ts'
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -53,17 +54,17 @@ export default async function authRoutes(
|
|||||||
|
|
||||||
generateStateFunction: (request) => {
|
generateStateFunction: (request) => {
|
||||||
const state = random(16)
|
const state = random(16)
|
||||||
console.log('we are generating a state value. state=' + state)
|
logger.debug('we are generating a state value. state=' + state)
|
||||||
request.session.set('state', state)
|
request.session.set('state', state)
|
||||||
request.session.set('test', 'hello worldy')
|
request.session.set('test', 'hello worldy')
|
||||||
return state
|
return state
|
||||||
},
|
},
|
||||||
|
|
||||||
checkStateFunction: (request: FastifyRequest<OAuthQuery>, callback) => {
|
checkStateFunction: (request: FastifyRequest<OAuthQuery>, callback) => {
|
||||||
console.log(`we are checking the state value.`)
|
logger.debug(`we are checking the state value.`)
|
||||||
const queryState = request.query.state
|
const queryState = request.query.state
|
||||||
const sessionState = request.session.get('state')
|
const sessionState = request.session.get('state')
|
||||||
console.log(`queryState=${queryState} sessionState=${sessionState}`)
|
logger.debug(`queryState=${queryState} sessionState=${sessionState}`)
|
||||||
if (queryState !== sessionState) {
|
if (queryState !== sessionState) {
|
||||||
callback(new Error('Invalid state'))
|
callback(new Error('Invalid state'))
|
||||||
}
|
}
|
||||||
@ -84,8 +85,8 @@ export default async function authRoutes(
|
|||||||
|
|
||||||
const { token } = await fastify.patreonOAuth2.getAccessTokenFromAuthorizationCodeFlow(request);
|
const { token } = await fastify.patreonOAuth2.getAccessTokenFromAuthorizationCodeFlow(request);
|
||||||
|
|
||||||
console.log('patreon token as follows')
|
logger.debug('patreon token as follows')
|
||||||
console.log(token)
|
logger.debug(token)
|
||||||
|
|
||||||
// get patreon user data from patreon
|
// get patreon user data from patreon
|
||||||
|
|
||||||
@ -98,17 +99,17 @@ export default async function authRoutes(
|
|||||||
|
|
||||||
const data = await res.json() as PatreonUserResponse
|
const data = await res.json() as PatreonUserResponse
|
||||||
|
|
||||||
console.log('patreon user data as follows')
|
logger.debug('patreon user data as follows')
|
||||||
console.log(JSON.stringify(data))
|
logger.debug(JSON.stringify(data))
|
||||||
|
|
||||||
|
|
||||||
// request.session.set('patreonAccessToken', token)
|
// request.session.set('patreonAccessToken', token)
|
||||||
const patreonUserData = data.data
|
const patreonUserData = data.data
|
||||||
console.log(patreonUserData)
|
logger.debug(patreonUserData)
|
||||||
// request.session.set('patreonUserId', patreonUserData.id)
|
// request.session.set('patreonUserId', patreonUserData.id)
|
||||||
|
|
||||||
const roles = getRoles(data)
|
const roles = getRoles(data)
|
||||||
console.log(`patreon user ${patreonUserData.id} is being assigned roles=${JSON.stringify(roles)}`)
|
logger.debug(`patreon user ${patreonUserData.id} is being assigned roles=${JSON.stringify(roles)}`)
|
||||||
|
|
||||||
// create or update user in db
|
// create or update user in db
|
||||||
const upsertedUser = await prisma.user.upsert({
|
const upsertedUser = await prisma.user.upsert({
|
||||||
@ -120,14 +121,20 @@ export default async function authRoutes(
|
|||||||
patreonFullName: patreonUserData.attributes.full_name,
|
patreonFullName: patreonUserData.attributes.full_name,
|
||||||
imageUrl: patreonUserData.attributes.image_url,
|
imageUrl: patreonUserData.attributes.image_url,
|
||||||
roles: {
|
roles: {
|
||||||
connect: roles.map((role) => ({ name: role }))
|
connectOrCreate: roles.map((role) => ({
|
||||||
|
where: { name: role },
|
||||||
|
create: { name: role },
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
update: {
|
update: {
|
||||||
patreonFullName: patreonUserData.attributes.full_name,
|
patreonFullName: patreonUserData.attributes.full_name,
|
||||||
imageUrl: patreonUserData.attributes.image_url,
|
imageUrl: patreonUserData.attributes.image_url,
|
||||||
roles: {
|
roles: {
|
||||||
connect: roles.map((role) => ({ name: role }))
|
connectOrCreate: roles.map((role) => ({
|
||||||
|
where: { name: role },
|
||||||
|
create: { name: role },
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
@ -6,6 +6,7 @@ import { env } from '../config/env'
|
|||||||
import { type FastifyInstance, type FastifyReply, type FastifyRequest } from 'fastify'
|
import { type FastifyInstance, type FastifyReply, type FastifyRequest } from 'fastify'
|
||||||
import crypto from 'crypto'
|
import crypto from 'crypto'
|
||||||
import { type IncomingHttpHeaders } from 'http';
|
import { type IncomingHttpHeaders } from 'http';
|
||||||
|
import logger from './utils/logger.ts'
|
||||||
|
|
||||||
|
|
||||||
export interface ChannelPointRedemptionEvent {
|
export interface ChannelPointRedemptionEvent {
|
||||||
@ -90,13 +91,13 @@ export default async function redeemsRoutes(
|
|||||||
|
|
||||||
|
|
||||||
fastify.post('/eventsub', async (request: FastifyRequest, reply: FastifyReply) => {
|
fastify.post('/eventsub', async (request: FastifyRequest, reply: FastifyReply) => {
|
||||||
console.log('eventsub ablagafkadlfijaldf ')
|
logger.debug('eventsub ablagafkadlfijaldf ')
|
||||||
const secret = getSecret();
|
const secret = getSecret();
|
||||||
const rawBody = request.body;
|
const rawBody = request.body;
|
||||||
const headers = request.headers;
|
const headers = request.headers;
|
||||||
console.log(headers)
|
logger.debug(headers)
|
||||||
|
|
||||||
console.log(`twitch_message_timestamp=${getHeader(headers, TWITCH_MESSAGE_TIMESTAMP)}`)
|
logger.debug(`twitch_message_timestamp=${getHeader(headers, TWITCH_MESSAGE_TIMESTAMP)}`)
|
||||||
|
|
||||||
const message =
|
const message =
|
||||||
getHeader(headers, TWITCH_MESSAGE_ID) +
|
getHeader(headers, TWITCH_MESSAGE_ID) +
|
||||||
@ -107,7 +108,7 @@ export default async function redeemsRoutes(
|
|||||||
const hmac = HMAC_PREFIX + getHmac(secret, message);
|
const hmac = HMAC_PREFIX + getHmac(secret, message);
|
||||||
|
|
||||||
if (verifyMessage(hmac, getHeader(headers, TWITCH_MESSAGE_SIGNATURE))) {
|
if (verifyMessage(hmac, getHeader(headers, TWITCH_MESSAGE_SIGNATURE))) {
|
||||||
console.log('signatures match');
|
logger.debug('signatures match');
|
||||||
|
|
||||||
if (!(rawBody instanceof Buffer)) {
|
if (!(rawBody instanceof Buffer)) {
|
||||||
throw new Error("Expected rawBody to be a Buffer");
|
throw new Error("Expected rawBody to be a Buffer");
|
||||||
@ -118,18 +119,18 @@ export default async function redeemsRoutes(
|
|||||||
const messageType = headers[MESSAGE_TYPE];
|
const messageType = headers[MESSAGE_TYPE];
|
||||||
|
|
||||||
if (messageType === MESSAGE_TYPE_NOTIFICATION) {
|
if (messageType === MESSAGE_TYPE_NOTIFICATION) {
|
||||||
console.log(`Event type: ${notification.subscription.type}`);
|
logger.debug(`Event type: ${notification.subscription.type}`);
|
||||||
console.log(JSON.stringify(notification.event, null, 4));
|
logger.debug(JSON.stringify(notification.event, null, 4));
|
||||||
|
|
||||||
if (notification.subscription.type === 'channel.channel_points_custom_reward_redemption.add') {
|
if (notification.subscription.type === 'channel.channel_points_custom_reward_redemption.add') {
|
||||||
const event = notification.event as ChannelPointRedemptionEvent
|
const event = notification.event as ChannelPointRedemptionEvent
|
||||||
console.log(`looking for reward id ${event.reward.id}`)
|
logger.debug(`looking for reward id ${event.reward.id}`)
|
||||||
const pick = await prisma.pick.findFirstOrThrow({
|
const pick = await prisma.pick.findFirstOrThrow({
|
||||||
where: {
|
where: {
|
||||||
twitchChannelPointRewardId: event.reward.id
|
twitchChannelPointRewardId: event.reward.id
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
console.log(`looking for broadcaster user id =${event.broadcaster_user_id}`)
|
logger.debug(`looking for broadcaster user id =${event.broadcaster_user_id}`)
|
||||||
const user = await prisma.user.findFirstOrThrow({
|
const user = await prisma.user.findFirstOrThrow({
|
||||||
where: {
|
where: {
|
||||||
twitchId: event.broadcaster_user_id
|
twitchId: event.broadcaster_user_id
|
||||||
@ -149,16 +150,16 @@ export default async function redeemsRoutes(
|
|||||||
} else if (messageType === MESSAGE_TYPE_VERIFICATION) {
|
} else if (messageType === MESSAGE_TYPE_VERIFICATION) {
|
||||||
return reply.type('text/plain').code(200).send(notification.challenge);
|
return reply.type('text/plain').code(200).send(notification.challenge);
|
||||||
} else if (messageType === MESSAGE_TYPE_REVOCATION) {
|
} else if (messageType === MESSAGE_TYPE_REVOCATION) {
|
||||||
console.log(`${notification.subscription.type} notifications revoked!`);
|
logger.debug(`${notification.subscription.type} notifications revoked!`);
|
||||||
console.log(`reason: ${notification.subscription.status}`);
|
logger.debug(`reason: ${notification.subscription.status}`);
|
||||||
console.log(`condition: ${JSON.stringify(notification.subscription.condition, null, 4)}`);
|
logger.debug(`condition: ${JSON.stringify(notification.subscription.condition, null, 4)}`);
|
||||||
return reply.code(204).send();
|
return reply.code(204).send();
|
||||||
} else {
|
} else {
|
||||||
console.log(`Unknown message type: ${messageType}`);
|
logger.debug(`Unknown message type: ${messageType}`);
|
||||||
return reply.code(204).send();
|
return reply.code(204).send();
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
console.log('403 - Invalid signature');
|
logger.debug('403 - Invalid signature');
|
||||||
return reply.code(403).send();
|
return reply.code(403).send();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -2,6 +2,7 @@ import fp from 'fastify-plugin'
|
|||||||
import { type FastifyPluginAsync } from 'fastify'
|
import { type FastifyPluginAsync } from 'fastify'
|
||||||
import { PgPubSub } from '@imqueue/pg-pubsub'
|
import { PgPubSub } from '@imqueue/pg-pubsub'
|
||||||
import { env } from '../config/env'
|
import { env } from '../config/env'
|
||||||
|
import logger from '../utils/logger'
|
||||||
|
|
||||||
declare module 'fastify' {
|
declare module 'fastify' {
|
||||||
interface FastifyInstance {
|
interface FastifyInstance {
|
||||||
@ -16,7 +17,7 @@ const pubsubPlugin: FastifyPluginAsync = async (fastify) => {
|
|||||||
connectionString: env.DATABASE_URL,
|
connectionString: env.DATABASE_URL,
|
||||||
})
|
})
|
||||||
|
|
||||||
await pubsub.connect().catch(err => console.error('PubSub error:', err));
|
await pubsub.connect().catch(err => logger.error('PubSub error:', err));
|
||||||
|
|
||||||
fastify.decorate('pubsub', pubsub)
|
fastify.decorate('pubsub', pubsub)
|
||||||
|
|
||||||
@ -27,9 +28,9 @@ const pubsubPlugin: FastifyPluginAsync = async (fastify) => {
|
|||||||
|
|
||||||
|
|
||||||
pubsub.on('connect', () => {
|
pubsub.on('connect', () => {
|
||||||
console.log('✅ PubSub connected');
|
logger.debug('✅ PubSub connected');
|
||||||
});
|
});
|
||||||
pubsub.on('end', () => console.warn('pubsub Connection closed!'));
|
pubsub.on('end', () => logger.warn('pubsub Connection closed!'));
|
||||||
|
|
||||||
}
|
}
|
||||||
// Export plugin with metadata
|
// Export plugin with metadata
|
||||||
|
@ -14,6 +14,7 @@ import { slug } from '../utils/formatters'
|
|||||||
import { run } from '../utils/remove-bg'
|
import { run } from '../utils/remove-bg'
|
||||||
import type { OnBehalfQuery } from '../types';
|
import type { OnBehalfQuery } from '../types';
|
||||||
import { getTargetUser } from '../utils/authorization'
|
import { getTargetUser } from '../utils/authorization'
|
||||||
|
import logger from "../utils/logger";
|
||||||
|
|
||||||
|
|
||||||
const prisma = new PrismaClient().$extends(withAccelerate())
|
const prisma = new PrismaClient().$extends(withAccelerate())
|
||||||
@ -54,7 +55,7 @@ export default async function streamsRoutes(
|
|||||||
|
|
||||||
if (!user?.id) throw new Error('failed to lookup user. please log in and try again.');
|
if (!user?.id) throw new Error('failed to lookup user. please log in and try again.');
|
||||||
|
|
||||||
console.log(`Received /image data. filename=${data.filename}, mimetype=${data.mimetype}, waifu-name=${data.fields['waifu-name']}, remove-bg=${data.fields['remove-bg']}`, data);
|
logger.debug(`Received /image data. filename=${data.filename}, mimetype=${data.mimetype}, waifu-name=${data.fields['waifu-name']}, remove-bg=${data.fields['remove-bg']}`, data);
|
||||||
|
|
||||||
let tmpFile = path.join(tmpdir(), nanoid());
|
let tmpFile = path.join(tmpdir(), nanoid());
|
||||||
await pipeline(data.file, fs.createWriteStream(tmpFile));
|
await pipeline(data.file, fs.createWriteStream(tmpFile));
|
||||||
@ -77,7 +78,7 @@ export default async function streamsRoutes(
|
|||||||
}
|
}
|
||||||
|
|
||||||
function getS3Key(waifuName: string, filename: string, isWebp: boolean) {
|
function getS3Key(waifuName: string, filename: string, isWebp: boolean) {
|
||||||
console.log(`getS3Key called with ${waifuName} ${filename} ${isWebp}`)
|
logger.debug(`getS3Key called with ${waifuName} ${filename} ${isWebp}`)
|
||||||
const ext = (isWebp) ? 'webp' : filename.split('.').pop()?.toLowerCase();
|
const ext = (isWebp) ? 'webp' : filename.split('.').pop()?.toLowerCase();
|
||||||
return `img/${nanoid()}/${slug(waifuName).substring(0, 24)}.${ext}`
|
return `img/${nanoid()}/${slug(waifuName).substring(0, 24)}.${ext}`
|
||||||
}
|
}
|
||||||
@ -92,16 +93,16 @@ export default async function streamsRoutes(
|
|||||||
}
|
}
|
||||||
|
|
||||||
const uploadedAsset = await uploadFile(s3Client, s3Resource, pre, mimetype)
|
const uploadedAsset = await uploadFile(s3Client, s3Resource, pre, mimetype)
|
||||||
console.log('uploadedAsset as follows')
|
logger.debug('uploadedAsset as follows')
|
||||||
console.log(uploadedAsset)
|
logger.debug(uploadedAsset)
|
||||||
|
|
||||||
const idk = await createRecord(waifuName, uploadedAsset.key, user.id)
|
const idk = await createRecord(waifuName, uploadedAsset.key, user.id)
|
||||||
console.log('idk as follows')
|
logger.debug('idk as follows')
|
||||||
console.log(idk)
|
logger.debug(idk)
|
||||||
|
|
||||||
const url = buildUrl(idk.imageS3Key)
|
const url = buildUrl(idk.imageS3Key)
|
||||||
// console.log('url as follows')
|
logger.trace('url as follows')
|
||||||
// console.log(url)
|
logger.trace(url)
|
||||||
|
|
||||||
|
|
||||||
reply.send({
|
reply.send({
|
||||||
@ -120,7 +121,7 @@ export default async function streamsRoutes(
|
|||||||
authorId
|
authorId
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
console.log(newWaifu)
|
logger.debug(newWaifu)
|
||||||
return newWaifu
|
return newWaifu
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -150,7 +151,7 @@ export default async function streamsRoutes(
|
|||||||
|
|
||||||
const userId = targetUser.id
|
const userId = targetUser.id
|
||||||
const { waifuId } = request.body as { waifuId: number }
|
const { waifuId } = request.body as { waifuId: number }
|
||||||
console.log(`userId=${userId}, waifuId=${waifuId}`)
|
logger.debug(`userId=${userId}, waifuId=${waifuId}`)
|
||||||
|
|
||||||
if (!userId) {
|
if (!userId) {
|
||||||
return reply.code(400).send({ error: 'Missing userId' })
|
return reply.code(400).send({ error: 'Missing userId' })
|
||||||
@ -174,7 +175,7 @@ export default async function streamsRoutes(
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
console.log("~~~~~~ adding graphileWorker job")
|
logger.debug("~~~~~~ adding graphileWorker job")
|
||||||
await fastify.graphileWorker.addJob('consolidate_twitch_channel_rewards', { userId })
|
await fastify.graphileWorker.addJob('consolidate_twitch_channel_rewards', { userId })
|
||||||
|
|
||||||
|
|
||||||
@ -184,7 +185,7 @@ export default async function streamsRoutes(
|
|||||||
fastify.delete('/picks', async function (request: FastifyRequest, reply: FastifyReply) {
|
fastify.delete('/picks', async function (request: FastifyRequest, reply: FastifyReply) {
|
||||||
const userId = request.session.get('user_id')
|
const userId = request.session.get('user_id')
|
||||||
const { pickId } = request.body as { pickId: number }
|
const { pickId } = request.body as { pickId: number }
|
||||||
console.log(`userId=${userId}, pickId=${pickId}`)
|
logger.debug(`userId=${userId}, pickId=${pickId}`)
|
||||||
|
|
||||||
if (!userId || !pickId) {
|
if (!userId || !pickId) {
|
||||||
return reply.code(400).send({ error: 'Missing userId or pickId' })
|
return reply.code(400).send({ error: 'Missing userId or pickId' })
|
||||||
@ -224,7 +225,7 @@ export default async function streamsRoutes(
|
|||||||
|
|
||||||
fastify.post('/redeems', async function (request, reply) {
|
fastify.post('/redeems', async function (request, reply) {
|
||||||
const targetUser = await getTargetUser(request, reply);
|
const targetUser = await getTargetUser(request, reply);
|
||||||
console.log(`we are creating a redeem and the targetuser is id=${targetUser.id}`)
|
logger.debug(`we are creating a redeem and the targetuser is id=${targetUser.id}`)
|
||||||
|
|
||||||
const { pickId } = request.body as { pickId: number };
|
const { pickId } = request.body as { pickId: number };
|
||||||
|
|
||||||
@ -250,7 +251,7 @@ export default async function streamsRoutes(
|
|||||||
|
|
||||||
fastify.delete('/redeems', async function (request, reply) {
|
fastify.delete('/redeems', async function (request, reply) {
|
||||||
const targetUser = await getTargetUser(request, reply);
|
const targetUser = await getTargetUser(request, reply);
|
||||||
console.log(`we are deleting redeems and the targetuser is id=${targetUser.id}`)
|
logger.debug(`we are deleting redeems and the targetuser is id=${targetUser.id}`)
|
||||||
|
|
||||||
|
|
||||||
await prisma.user.update({
|
await prisma.user.update({
|
||||||
@ -399,7 +400,7 @@ export default async function streamsRoutes(
|
|||||||
// const eventEmitter = new EventEmitter();
|
// const eventEmitter = new EventEmitter();
|
||||||
|
|
||||||
// const interval = setInterval(() => {
|
// const interval = setInterval(() => {
|
||||||
// // console.log('> intervalling ' + nanoid())
|
// // logger.debug('> intervalling ' + nanoid())
|
||||||
// eventEmitter.emit('update', {
|
// eventEmitter.emit('update', {
|
||||||
// name: 'tick',
|
// name: 'tick',
|
||||||
// time: new Date(),
|
// time: new Date(),
|
||||||
@ -409,7 +410,7 @@ export default async function streamsRoutes(
|
|||||||
|
|
||||||
// // Async generator producing SSE events
|
// // Async generator producing SSE events
|
||||||
// const asyncIterable = (async function* () {
|
// const asyncIterable = (async function* () {
|
||||||
// console.log('iterating!')
|
// logger.debug('iterating!')
|
||||||
// for await (const [event] of on(eventEmitter, 'update', {})) {
|
// for await (const [event] of on(eventEmitter, 'update', {})) {
|
||||||
// yield {
|
// yield {
|
||||||
// event: event.name,
|
// event: event.name,
|
||||||
|
@ -20,6 +20,7 @@ import { isUnprivilegedUser } from '../utils/privs'
|
|||||||
import { getS3Client } from '../utils/s3'
|
import { getS3Client } from '../utils/s3'
|
||||||
import { UppyFile } from '../types/index'
|
import { UppyFile } from '../types/index'
|
||||||
import mime from 'mime-types'
|
import mime from 'mime-types'
|
||||||
|
import logger from '../utils/logger'
|
||||||
|
|
||||||
interface MultipartBody {
|
interface MultipartBody {
|
||||||
type: string
|
type: string
|
||||||
@ -214,10 +215,10 @@ export default async function uploadsRoutes(
|
|||||||
notes?: string;
|
notes?: string;
|
||||||
vtuberIds?: string[];
|
vtuberIds?: string[];
|
||||||
};
|
};
|
||||||
console.log(body)
|
logger.debug(body)
|
||||||
console.log('uppyResult as follows')
|
logger.debug('uppyResult as follows')
|
||||||
console.log(body.uppyResult)
|
logger.debug(body.uppyResult)
|
||||||
console.log(`Array.isArray(body.vtuberIds)=${Array.isArray(body.vtuberIds)}`)
|
logger.debug(`Array.isArray(body.vtuberIds)=${Array.isArray(body.vtuberIds)}`)
|
||||||
|
|
||||||
|
|
||||||
const userId = request.session.get('userId');
|
const userId = request.session.get('userId');
|
||||||
@ -280,8 +281,8 @@ export default async function uploadsRoutes(
|
|||||||
|
|
||||||
if (!userId) return reply.status(401).send('Failed to find userId in session. Please log-in and try again.');
|
if (!userId) return reply.status(401).send('Failed to find userId in session. Please log-in and try again.');
|
||||||
|
|
||||||
// console.log('data as fllows')
|
// logger.debug('data as fllows')
|
||||||
// console.log(data)
|
// logger.debug(data)
|
||||||
|
|
||||||
if (isUnprivilegedUser(user)) {
|
if (isUnprivilegedUser(user)) {
|
||||||
return reply.status(403).send('Upload failed-- user is not a patron');
|
return reply.status(403).send('Upload failed-- user is not a patron');
|
||||||
@ -297,8 +298,8 @@ export default async function uploadsRoutes(
|
|||||||
// I *think* the correct behavior for us is to ignore all but the last batch.
|
// I *think* the correct behavior for us is to ignore all but the last batch.
|
||||||
let mostRecentUploadBatch = data.at(-1)
|
let mostRecentUploadBatch = data.at(-1)
|
||||||
if (!mostRecentUploadBatch) throw new Error('mostRecentUploadBatch not found');
|
if (!mostRecentUploadBatch) throw new Error('mostRecentUploadBatch not found');
|
||||||
// console.log('mostRecentUploadBatch as follows')
|
// logger.debug('mostRecentUploadBatch as follows')
|
||||||
// console.log(mostRecentUploadBatch)
|
// logger.debug(mostRecentUploadBatch)
|
||||||
|
|
||||||
|
|
||||||
if (mostRecentUploadBatch.failed.length > 0) {
|
if (mostRecentUploadBatch.failed.length > 0) {
|
||||||
@ -464,7 +465,7 @@ export default async function uploadsRoutes(
|
|||||||
// }
|
// }
|
||||||
// })
|
// })
|
||||||
|
|
||||||
// console.log(`Upload ${uploadId} status updated to ${status}`);
|
// logger.debug(`Upload ${uploadId} status updated to ${status}`);
|
||||||
// return upload
|
// return upload
|
||||||
// }
|
// }
|
||||||
|
|
||||||
@ -570,7 +571,7 @@ export default async function uploadsRoutes(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if ((status !== 'pending' && status !== 'ordering') && !userIsModerator) {
|
if ((status !== 'pending' && status !== 'ordering') && !userIsModerator) {
|
||||||
console.log(`status=${status} and !userIsModerator=${!userIsModerator}`)
|
logger.debug(`status=${status} and !userIsModerator=${!userIsModerator}`)
|
||||||
return reply.status(403).send('Only moderators can update status.');
|
return reply.status(403).send('Only moderators can update status.');
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -625,7 +626,7 @@ export default async function uploadsRoutes(
|
|||||||
|
|
||||||
const { filename, contentType } = extractFileParameters(request)
|
const { filename, contentType } = extractFileParameters(request)
|
||||||
validateFileParameters(filename, contentType)
|
validateFileParameters(filename, contentType)
|
||||||
console.log(`User ${user.id} is uploading ${filename} (${contentType}).`);
|
logger.debug(`User ${user.id} is uploading ${filename} (${contentType}).`);
|
||||||
const ext = mime.extension(contentType)
|
const ext = mime.extension(contentType)
|
||||||
const Key = generateS3Key(ext)
|
const Key = generateS3Key(ext)
|
||||||
|
|
||||||
@ -685,7 +686,7 @@ export default async function uploadsRoutes(
|
|||||||
uploadId: data.UploadId,
|
uploadId: data.UploadId,
|
||||||
})
|
})
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err)
|
logger.error(err)
|
||||||
request.log.error(err)
|
request.log.error(err)
|
||||||
reply.code(500).send({ error: 'Failed to create multipart upload' })
|
reply.code(500).send({ error: 'Failed to create multipart upload' })
|
||||||
}
|
}
|
||||||
@ -696,7 +697,7 @@ export default async function uploadsRoutes(
|
|||||||
const client = getS3Client()
|
const client = getS3Client()
|
||||||
const { uploadId } = request.params as { uploadId: string }
|
const { uploadId } = request.params as { uploadId: string }
|
||||||
const { key } = request.query as { key?: string }
|
const { key } = request.query as { key?: string }
|
||||||
console.log(`s3 multipart with uploadId=${uploadId}, key=${key}`)
|
logger.debug(`s3 multipart with uploadId=${uploadId}, key=${key}`)
|
||||||
|
|
||||||
if (typeof key !== 'string') {
|
if (typeof key !== 'string') {
|
||||||
reply.status(400).send({
|
reply.status(400).send({
|
||||||
@ -781,7 +782,7 @@ export default async function uploadsRoutes(
|
|||||||
fastify.post('/s3/multipart/:uploadId/complete', async (request, reply) => {
|
fastify.post('/s3/multipart/:uploadId/complete', async (request, reply) => {
|
||||||
|
|
||||||
const userId = request.session.get('userId')
|
const userId = request.session.get('userId')
|
||||||
console.log(`userId=${userId}`)
|
logger.debug(`userId=${userId}`)
|
||||||
if (!userId) return reply.status(401).send('User id not found in session. Please log-in.');
|
if (!userId) return reply.status(401).send('User id not found in session. Please log-in.');
|
||||||
|
|
||||||
const client = getS3Client()
|
const client = getS3Client()
|
||||||
@ -823,8 +824,8 @@ export default async function uploadsRoutes(
|
|||||||
location: data.Location,
|
location: data.Location,
|
||||||
})
|
})
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('there was an error during CompleteMultipartUploadCommand.')
|
logger.error('there was an error during CompleteMultipartUploadCommand.')
|
||||||
console.error(err)
|
logger.error(err)
|
||||||
reply.send(err)
|
reply.send(err)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -3,6 +3,7 @@ import { isEditorAuthorized } from '../utils/authorization'
|
|||||||
import { OnBehalfQuery } from '../types'
|
import { OnBehalfQuery } from '../types'
|
||||||
import { PrismaClient, type User } from '../../generated/prisma'
|
import { PrismaClient, type User } from '../../generated/prisma'
|
||||||
import { withAccelerate } from "@prisma/extension-accelerate"
|
import { withAccelerate } from "@prisma/extension-accelerate"
|
||||||
|
import logger from "../utils/logger";
|
||||||
|
|
||||||
const prisma = new PrismaClient().$extends(withAccelerate())
|
const prisma = new PrismaClient().$extends(withAccelerate())
|
||||||
|
|
||||||
@ -24,12 +25,12 @@ export default async function usersRoutes(
|
|||||||
|
|
||||||
// const results = await api.dbViewRow.create("noco", NOCO_BASE, NOCO_WAIFUS_TABLE, NOCO_DEFAULT_VIEW, waifus)
|
// const results = await api.dbViewRow.create("noco", NOCO_BASE, NOCO_WAIFUS_TABLE, NOCO_DEFAULT_VIEW, waifus)
|
||||||
// .catch(err => {
|
// .catch(err => {
|
||||||
// console.error("Failed to create waifus:", JSON.stringify(waifus), err);
|
// logger.error("Failed to create waifus:", JSON.stringify(waifus), err);
|
||||||
// return null;
|
// return null;
|
||||||
// })
|
// })
|
||||||
|
|
||||||
const user = request.session.get('user')
|
const user = request.session.get('user')
|
||||||
console.log(user)
|
logger.debug(user)
|
||||||
reply.send(user)
|
reply.send(user)
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -42,7 +43,7 @@ export default async function usersRoutes(
|
|||||||
where: { id: userId }
|
where: { id: userId }
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log('onbehalfof=' + onBehalfOf)
|
logger.debug('onbehalfof=' + onBehalfOf)
|
||||||
|
|
||||||
// Determine which user is being updated
|
// Determine which user is being updated
|
||||||
const targetUser = onBehalfOf
|
const targetUser = onBehalfOf
|
||||||
@ -71,10 +72,10 @@ export default async function usersRoutes(
|
|||||||
data.modsAreEditors = Boolean(raw.modsAreEditors);
|
data.modsAreEditors = Boolean(raw.modsAreEditors);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('>>> data s follows')
|
logger.debug('>>> data s follows')
|
||||||
console.log(data)
|
logger.debug(data)
|
||||||
console.log('target user is as follows')
|
logger.debug('target user is as follows')
|
||||||
console.log(targetUser)
|
logger.debug(targetUser)
|
||||||
|
|
||||||
await prisma.user.update({
|
await prisma.user.update({
|
||||||
where: { id: targetUser.id },
|
where: { id: targetUser.id },
|
||||||
|
@ -7,6 +7,7 @@ import { isUnprivilegedUser } from "../utils/privs";
|
|||||||
import { slug } from "../utils/formatters";
|
import { slug } from "../utils/formatters";
|
||||||
import type { UploadResult } from '../types/index'
|
import type { UploadResult } from '../types/index'
|
||||||
import { env } from "../config/env";
|
import { env } from "../config/env";
|
||||||
|
import logger from "../utils/logger";
|
||||||
|
|
||||||
const prisma = new PrismaClient().$extends(withAccelerate())
|
const prisma = new PrismaClient().$extends(withAccelerate())
|
||||||
const hexColorRegex = /^#([0-9a-fA-F]{6})$/;
|
const hexColorRegex = /^#([0-9a-fA-F]{6})$/;
|
||||||
@ -21,7 +22,7 @@ export default async function vtubersRoutes(
|
|||||||
|
|
||||||
const vtuberIndexHandler = async (request: FastifyRequest, reply: FastifyReply) => {
|
const vtuberIndexHandler = async (request: FastifyRequest, reply: FastifyReply) => {
|
||||||
const userId = request.session.get('userId')
|
const userId = request.session.get('userId')
|
||||||
console.log(`userId=${userId}`)
|
logger.debug(`userId=${userId}`)
|
||||||
|
|
||||||
let user = null
|
let user = null
|
||||||
if (userId !== undefined) {
|
if (userId !== undefined) {
|
||||||
@ -60,7 +61,7 @@ export default async function vtubersRoutes(
|
|||||||
include: { roles: true },
|
include: { roles: true },
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log(user)
|
logger.debug(user)
|
||||||
if (isUnprivilegedUser(user)) {
|
if (isUnprivilegedUser(user)) {
|
||||||
return reply.status(403).send('Only patrons and moderators can add new vtubers');
|
return reply.status(403).send('Only patrons and moderators can add new vtubers');
|
||||||
}
|
}
|
||||||
@ -186,8 +187,8 @@ export default async function vtubersRoutes(
|
|||||||
|
|
||||||
if (!userId) return reply.status(401).send('Failed to find userId in session. Please log-in and try again.');
|
if (!userId) return reply.status(401).send('Failed to find userId in session. Please log-in and try again.');
|
||||||
|
|
||||||
// console.log('data as fllows')
|
// logger.debug('data as fllows')
|
||||||
// console.log(data)
|
// logger.debug(data)
|
||||||
|
|
||||||
if (isUnprivilegedUser(user)) {
|
if (isUnprivilegedUser(user)) {
|
||||||
return reply.status(403).send('Upload failed-- user does not have contributor privs');
|
return reply.status(403).send('Upload failed-- user does not have contributor privs');
|
||||||
@ -200,8 +201,8 @@ export default async function vtubersRoutes(
|
|||||||
// Also it's just how Uppy formats the data, so we have to handle the array of upload iterations.
|
// Also it's just how Uppy formats the data, so we have to handle the array of upload iterations.
|
||||||
// I *think* the correct behavior for us is to ignore all but the last batch.
|
// I *think* the correct behavior for us is to ignore all but the last batch.
|
||||||
|
|
||||||
console.log('data sub negative one as follows')
|
logger.debug('data sub negative one as follows')
|
||||||
console.log(data.at(-1))
|
logger.debug(data.at(-1))
|
||||||
|
|
||||||
const last = data.at(-1);
|
const last = data.at(-1);
|
||||||
if (!last) {
|
if (!last) {
|
||||||
@ -210,12 +211,12 @@ export default async function vtubersRoutes(
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
console.log('data sub negative dot successful sub zero as follows')
|
logger.debug('data sub negative dot successful sub zero as follows')
|
||||||
console.log(last.successful[0])
|
logger.debug(last.successful[0])
|
||||||
|
|
||||||
if (!last) throw new Error('mostRecentUploadBatch not found');
|
if (!last) throw new Error('mostRecentUploadBatch not found');
|
||||||
console.log('last as follows')
|
logger.debug('last as follows')
|
||||||
console.log(last)
|
logger.debug(last)
|
||||||
const vtuber = await prisma.vtuber.create({
|
const vtuber = await prisma.vtuber.create({
|
||||||
data: {
|
data: {
|
||||||
// segmentKeys: mostRecentUploadBatch.successful.map((d) => ({ key: d.s3Multipart.key, name: d.name })),
|
// segmentKeys: mostRecentUploadBatch.successful.map((d) => ({ key: d.s3Multipart.key, name: d.name })),
|
||||||
|
@ -1,296 +0,0 @@
|
|||||||
// src/tasks/consolidate_twitch_channel_rewards.ts
|
|
||||||
|
|
||||||
import type { Task, Helpers } from "graphile-worker";
|
|
||||||
import { PrismaClient, User, type Pick } from '../../generated/prisma'
|
|
||||||
import { withAccelerate } from "@prisma/extension-accelerate"
|
|
||||||
import { env } from "../config/env";
|
|
||||||
import { constants } from "../config/constants";
|
|
||||||
import { getRateLimiter } from "../utils/rateLimiter";
|
|
||||||
|
|
||||||
const prisma = new PrismaClient().$extends(withAccelerate())
|
|
||||||
const cprPath = env.TWITCH_MOCK ? constants.twitch.dev.paths.channelPointRewards : constants.twitch.prod.paths.channelPointRewards
|
|
||||||
|
|
||||||
interface Payload {
|
|
||||||
userId: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TwitchChannelPointReward {
|
|
||||||
id: string;
|
|
||||||
broadcaster_id: string;
|
|
||||||
broadcaster_login: string;
|
|
||||||
broadcaster_name: string;
|
|
||||||
image: string | null;
|
|
||||||
background_color: string;
|
|
||||||
is_enabled: boolean;
|
|
||||||
cost: number;
|
|
||||||
title: string;
|
|
||||||
prompt: string;
|
|
||||||
is_user_input_required: boolean;
|
|
||||||
max_per_stream_setting: {
|
|
||||||
is_enabled: boolean;
|
|
||||||
max_per_stream: number;
|
|
||||||
};
|
|
||||||
max_per_user_per_stream_setting: {
|
|
||||||
is_enabled: boolean;
|
|
||||||
max_per_user_per_stream: number;
|
|
||||||
};
|
|
||||||
global_cooldown_setting: {
|
|
||||||
is_enabled: boolean;
|
|
||||||
global_cooldown_seconds: number;
|
|
||||||
};
|
|
||||||
is_paused: boolean;
|
|
||||||
is_in_stock: boolean;
|
|
||||||
default_image: {
|
|
||||||
url_1x: string;
|
|
||||||
url_2x: string;
|
|
||||||
url_4x: string;
|
|
||||||
};
|
|
||||||
should_redemptions_skip_request_queue: boolean;
|
|
||||||
redemptions_redeemed_current_stream: number | null;
|
|
||||||
cooldown_expires_at: string | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function getAuthToken(user: User) {
|
|
||||||
const authToken = env.TWITCH_MOCK ? env.TWITCH_MOCK_USER_ACCESS_TOKEN : user.twitchToken?.accessToken
|
|
||||||
return authToken
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function assertPayload(payload: any): asserts payload is Payload {
|
|
||||||
if (typeof payload !== "object" || !payload) throw new Error("invalid payload");
|
|
||||||
if (typeof payload.userId !== "number") throw new Error("invalid payload.userId");
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async function getTwitchChannelPointRewards(user: User) {
|
|
||||||
if (!user) throw new Error(`getTwitchChannelPointRewards called with falsy user`);
|
|
||||||
if (!user.twitchToken) throw new Error(`user.twitchToken is not existing, when it needs to.`);
|
|
||||||
|
|
||||||
const authToken = getAuthToken(user)
|
|
||||||
const limiter = getRateLimiter()
|
|
||||||
await limiter.consume('twitch', 1)
|
|
||||||
// Create the custom Channel Point Reward on Twitch.
|
|
||||||
// @see https://dev.twitch.tv/docs/api/reference/#create-custom-rewards
|
|
||||||
// POST https://api.twitch.tv/helix/channel_points/custom_rewards
|
|
||||||
const query = new URLSearchParams({
|
|
||||||
broadcaster_id: user.twitchId
|
|
||||||
})
|
|
||||||
const res = await fetch(`${env.TWITCH_API_ORIGIN}${cprPath}?${query}`, {
|
|
||||||
headers: {
|
|
||||||
'Authorization': `Bearer ${authToken}`,
|
|
||||||
'Client-Id': env.TWITCH_CLIENT_ID
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!res.ok) {
|
|
||||||
console.error(`failed to get a custom channel point rewards for user id=${user.id}`)
|
|
||||||
console.error(res.statusText)
|
|
||||||
throw new Error(res.statusText);
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await res.json()
|
|
||||||
return data
|
|
||||||
}
|
|
||||||
|
|
||||||
async function postTwitchChannelPointRewards(user: User, pick: Pick) {
|
|
||||||
|
|
||||||
const authToken = getAuthToken(user)
|
|
||||||
const limiter = getRateLimiter()
|
|
||||||
await limiter.consume('twitch', 1)
|
|
||||||
// Create the custom Channel Point Reward on Twitch.
|
|
||||||
// @see https://dev.twitch.tv/docs/api/reference/#create-custom-rewards
|
|
||||||
// POST https://api.twitch.tv/helix/channel_points/custom_rewards
|
|
||||||
const query = new URLSearchParams({
|
|
||||||
broadcaster_id: user.twitchId
|
|
||||||
})
|
|
||||||
const res = await fetch(`${env.TWITCH_API_ORIGIN}${cprPath}?${query}`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Authorization': `Bearer ${authToken}`,
|
|
||||||
'Client-Id': env.TWITCH_CLIENT_ID
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
cost: user.redeemCost,
|
|
||||||
title: pick.waifu.name
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!res.ok) {
|
|
||||||
console.error(`failed to create a custom channel point reward for userId=${user.id}`)
|
|
||||||
console.error(res.statusText)
|
|
||||||
throw new Error(res.statusText);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Associate the twitch channel point reward with our Pick
|
|
||||||
const data = await res.json()
|
|
||||||
const twitchChannelPointRewardId = data.data.at(0).id
|
|
||||||
|
|
||||||
await prisma.pick.update({
|
|
||||||
where: {
|
|
||||||
id: pick.id
|
|
||||||
},
|
|
||||||
data: {
|
|
||||||
twitchChannelPointRewardId
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// * filter rewards which we previously created
|
|
||||||
const isWaifusChannelPointReward = (reward: TwitchChannelPointReward, picks: Pick[]) => {
|
|
||||||
return picks.some((pick) => pick.twitchChannelPointRewardId === reward.id)
|
|
||||||
}
|
|
||||||
|
|
||||||
// * filter rewards which should no longer be displayed
|
|
||||||
// * delete
|
|
||||||
// * filter rewards which have the wrong redeemCost
|
|
||||||
// * update so they have the correct redeemCost
|
|
||||||
|
|
||||||
const isOutOfDateReward = (
|
|
||||||
reward: TwitchChannelPointReward,
|
|
||||||
picks: Pick[],
|
|
||||||
waifuChoicePoolSize: number
|
|
||||||
): boolean => {
|
|
||||||
const currentPicks = picks.slice(0, waifuChoicePoolSize);
|
|
||||||
console.log('currentPicks as follows')
|
|
||||||
console.log(currentPicks)
|
|
||||||
return !currentPicks.some(pick => pick.twitchChannelPointRewardId === reward.id);
|
|
||||||
};
|
|
||||||
|
|
||||||
const isWrongRedeemCost = (reward: TwitchChannelPointReward, redeemCost: number) => reward.cost !== redeemCost
|
|
||||||
|
|
||||||
|
|
||||||
// @see https://dev.twitch.tv/docs/api/reference/#delete-custom-reward
|
|
||||||
// DELETE https://api.twitch.tv/helix/channel_points/custom_rewards
|
|
||||||
async function deleteTwitchChannelPointReward(user: User, reward: TwitchChannelPointReward) {
|
|
||||||
const limiter = getRateLimiter()
|
|
||||||
await limiter.consume('twitch', 1)
|
|
||||||
|
|
||||||
const authToken = getAuthToken(user)
|
|
||||||
const query = new URLSearchParams({
|
|
||||||
broadcaster_id: user.twitchId,
|
|
||||||
id: reward.id
|
|
||||||
})
|
|
||||||
const res = await fetch(`${env.TWITCH_API_ORIGIN}${cprPath}?${query}`, {
|
|
||||||
method: 'DELETE',
|
|
||||||
headers: {
|
|
||||||
'Authorization': `Bearer ${authToken}`,
|
|
||||||
'Client-Id': env.TWITCH_CLIENT_ID
|
|
||||||
}
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
throw new Error(`Failed to delete twitch channel point reward.id=${reward.id} for user.id=${user.id} (user.twitchId=${user.twitchId}) `);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
// @see https://dev.twitch.tv/docs/api/reference/#update-custom-reward
|
|
||||||
async function updateTwitchChannelPointReward(user: User, reward: TwitchChannelPointReward) {
|
|
||||||
const limiter = getRateLimiter()
|
|
||||||
await limiter.consume('twitch', 1)
|
|
||||||
|
|
||||||
const authToken = getAuthToken(user)
|
|
||||||
const query = new URLSearchParams({
|
|
||||||
broadcaster_id: user.twitchId,
|
|
||||||
id: reward.id
|
|
||||||
})
|
|
||||||
const res = await fetch(`${env.TWITCH_API_ORIGIN}${cprPath}?${query}`, {
|
|
||||||
method: 'PATCH',
|
|
||||||
headers: {
|
|
||||||
'Authorization': `Bearer ${authToken}`,
|
|
||||||
'Client-Id': env.TWITCH_CLIENT_ID
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
cost: user.redeemCost
|
|
||||||
})
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
throw new Error(`Failed to update twitch channel point reward.id=${reward.id} with redeemCost=${user.redeemCost} for user.id=${user.id} (user.twitchId=${user.twitchId}) `);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* consolidate_twitch_channel_rewards
|
|
||||||
*
|
|
||||||
* This script is meant to run via crontab.
|
|
||||||
* It finds Users with Picks that lack a twitchChannelPointRewardId, then
|
|
||||||
*
|
|
||||||
*
|
|
||||||
* @param payload
|
|
||||||
* @param helpers
|
|
||||||
*/
|
|
||||||
export default async function consolidate_twitch_channel_rewards(payload: any, helpers: Helpers) {
|
|
||||||
assertPayload(payload);
|
|
||||||
const { userId } = payload;
|
|
||||||
// logger.info(`Hello, ${name}`);
|
|
||||||
|
|
||||||
const user = await prisma.user.findFirstOrThrow({
|
|
||||||
where: {
|
|
||||||
id: userId
|
|
||||||
},
|
|
||||||
include: {
|
|
||||||
twitchToken: true
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
// * get the current number of picks
|
|
||||||
const picks = await prisma.pick.findMany({
|
|
||||||
where: {
|
|
||||||
userId
|
|
||||||
},
|
|
||||||
take: constants.twitch.maxChannelPointRewards,
|
|
||||||
orderBy: {
|
|
||||||
createdAt: 'desc'
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// * get the user's configured redeemCost
|
|
||||||
const redeemCost = user.redeemCost
|
|
||||||
const twitchId = user.twitchId
|
|
||||||
|
|
||||||
|
|
||||||
// * get the user's currently configured twitch channel point rewards
|
|
||||||
const twitchChannelPointRewards = await getTwitchChannelPointRewards(user)
|
|
||||||
const tcpr = twitchChannelPointRewards.data.map((cpr) => ({ id: cpr.id, cost: cpr.cost, is_in_stock: cpr.is_in_stock, title: cpr.title }))
|
|
||||||
|
|
||||||
// * identify the actions we need to do to get the channel point rewards up-to-date
|
|
||||||
const twitchRewards = await getTwitchChannelPointRewards(user);
|
|
||||||
const twitchRewardsData = twitchRewards.data;
|
|
||||||
console.log(`User ${userId} has ${picks.length} picks. And ${twitchRewardsData.length} twitch rewards. waifuChoicePoolSize=${user.waifuChoicePoolSize}, maxOnScreenWaifus=${user.maxOnScreenWaifus}`)
|
|
||||||
|
|
||||||
|
|
||||||
const currentPicks = picks.slice(0, user.waifuChoicePoolSize);
|
|
||||||
|
|
||||||
const outOfDate = twitchRewardsData.filter((reward: TwitchChannelPointReward) =>
|
|
||||||
picks.some(p => p.twitchChannelPointRewardId === reward.id) &&
|
|
||||||
!currentPicks.some(p => p.twitchChannelPointRewardId === reward.id)
|
|
||||||
);
|
|
||||||
|
|
||||||
console.log(`outOfDate as follows`)
|
|
||||||
console.log(outOfDate)
|
|
||||||
|
|
||||||
const costMismatched = twitchRewardsData
|
|
||||||
.filter((r: TwitchChannelPointReward) => isWrongRedeemCost(r, user.redeemCost));
|
|
||||||
|
|
||||||
logger.info(`There are ${outOfDate.length} out of date Channel Point Rewards. outOfDate=${JSON.stringify(outOfDate.map((ood) => ({ title: ood.title, cost: ood.cost, id: ood.id })))}`)
|
|
||||||
logger.info(`costMismatched=${JSON.stringify(costMismatched)}`)
|
|
||||||
|
|
||||||
// * make the REST request(s) to get the twitch channel point rewards up-to-date
|
|
||||||
for (const reward of outOfDate) {
|
|
||||||
console.log(`deleting reward.id=${reward.id} with reward.title=${reward.title}`)
|
|
||||||
await deleteTwitchChannelPointReward(user, reward)
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const reward of costMismatched) {
|
|
||||||
console.log(`updating reward.id=${reward.id} with reward.title=${reward.title}`)
|
|
||||||
await updateTwitchChannelPointReward(user, reward)
|
|
||||||
}
|
|
||||||
|
|
||||||
};
|
|
||||||
|
|
@ -6,6 +6,7 @@ import { withAccelerate } from "@prisma/extension-accelerate";
|
|||||||
import { env } from "../config/env";
|
import { env } from "../config/env";
|
||||||
import { constants } from "../config/constants";
|
import { constants } from "../config/constants";
|
||||||
import { getRateLimiter } from "../utils/rateLimiter";
|
import { getRateLimiter } from "../utils/rateLimiter";
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
const prisma = new PrismaClient().$extends(withAccelerate());
|
const prisma = new PrismaClient().$extends(withAccelerate());
|
||||||
const cprPath = env.TWITCH_MOCK ? constants.twitch.dev.paths.channelPointRewards : constants.twitch.prod.paths.channelPointRewards;
|
const cprPath = env.TWITCH_MOCK ? constants.twitch.dev.paths.channelPointRewards : constants.twitch.prod.paths.channelPointRewards;
|
||||||
@ -55,10 +56,10 @@ const createTwitchReward = async (user: User, pick: Pick) => {
|
|||||||
const authToken = getAuthToken(user);
|
const authToken = getAuthToken(user);
|
||||||
const limiter = getRateLimiter();
|
const limiter = getRateLimiter();
|
||||||
await limiter.consume('twitch', 1);
|
await limiter.consume('twitch', 1);
|
||||||
console.log('pick as follows')
|
logger.debug('pick as follows')
|
||||||
console.log(pick)
|
logger.debug(pick)
|
||||||
|
|
||||||
console.log(`pick?.waifu?.name=${pick?.waifu?.name}`)
|
logger.debug(`pick?.waifu?.name=${pick?.waifu?.name}`)
|
||||||
|
|
||||||
const query = new URLSearchParams({ broadcaster_id: user.twitchId });
|
const query = new URLSearchParams({ broadcaster_id: user.twitchId });
|
||||||
const res = await fetch(`${env.TWITCH_API_ORIGIN}${cprPath}?${query}`, {
|
const res = await fetch(`${env.TWITCH_API_ORIGIN}${cprPath}?${query}`, {
|
||||||
@ -136,7 +137,7 @@ const consolidateTwitchRewards = async (userId: number) => {
|
|||||||
// Ensure every pick has a reward before processing Twitch side
|
// Ensure every pick has a reward before processing Twitch side
|
||||||
for (const pick of picks) {
|
for (const pick of picks) {
|
||||||
if (!pick.twitchChannelPointRewardId) {
|
if (!pick.twitchChannelPointRewardId) {
|
||||||
console.log(`Creating new reward for pick: ${pick.id}`);
|
logger.debug(`Creating new reward for pick: ${pick.id}`);
|
||||||
await createTwitchReward(user, pick);
|
await createTwitchReward(user, pick);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -153,8 +154,8 @@ const consolidateTwitchRewards = async (userId: number) => {
|
|||||||
updatedPicks.slice(0, user.waifuChoicePoolSize).map(p => p.twitchChannelPointRewardId)
|
updatedPicks.slice(0, user.waifuChoicePoolSize).map(p => p.twitchChannelPointRewardId)
|
||||||
);
|
);
|
||||||
|
|
||||||
console.log('currentPickIds as follows');
|
logger.debug('currentPickIds as follows');
|
||||||
console.log(currentPickIds);
|
logger.debug(currentPickIds);
|
||||||
|
|
||||||
// Fetch Twitch-side rewards
|
// Fetch Twitch-side rewards
|
||||||
const twitchData = await getTwitchChannelPointRewards(user);
|
const twitchData = await getTwitchChannelPointRewards(user);
|
||||||
@ -165,10 +166,10 @@ const consolidateTwitchRewards = async (userId: number) => {
|
|||||||
if (!updatedPicks.some(p => p.twitchChannelPointRewardId === reward.id)) continue;
|
if (!updatedPicks.some(p => p.twitchChannelPointRewardId === reward.id)) continue;
|
||||||
|
|
||||||
if (!currentPickIds.has(reward.id)) {
|
if (!currentPickIds.has(reward.id)) {
|
||||||
console.log(`Deleting out-of-date reward: ${reward.id}`);
|
logger.debug(`Deleting out-of-date reward: ${reward.id}`);
|
||||||
await deleteTwitchReward(user, reward.id);
|
await deleteTwitchReward(user, reward.id);
|
||||||
} else if (reward.cost !== user.redeemCost) {
|
} else if (reward.cost !== user.redeemCost) {
|
||||||
console.log(`Updating reward cost for: ${reward.id}`);
|
logger.debug(`Updating reward cost for: ${reward.id}`);
|
||||||
await updateTwitchReward(user, reward.id, user.redeemCost);
|
await updateTwitchReward(user, reward.id, user.redeemCost);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -41,7 +41,7 @@ async function hash(helpers: Helpers, inputFilePath: string) {
|
|||||||
|
|
||||||
// const exitCode = await subprocess;
|
// const exitCode = await subprocess;
|
||||||
// if (exitCode !== 0) {
|
// if (exitCode !== 0) {
|
||||||
// console.error(`vcsi failed with exit code ${exitCode}`);
|
// logger.error(`vcsi failed with exit code ${exitCode}`);
|
||||||
// process.exit(exitCode);
|
// process.exit(exitCode);
|
||||||
// }
|
// }
|
||||||
logger.info(JSON.stringify(result))
|
logger.info(JSON.stringify(result))
|
||||||
|
@ -99,11 +99,11 @@ export default async function createVideoThumbnail(payload: any, helpers: Helper
|
|||||||
|
|
||||||
// * [x] download video segments from pull-thru cache
|
// * [x] download video segments from pull-thru cache
|
||||||
const videoFilePath = await getOrDownloadAsset(s3Client, env.S3_BUCKET, vod.sourceVideo)
|
const videoFilePath = await getOrDownloadAsset(s3Client, env.S3_BUCKET, vod.sourceVideo)
|
||||||
console.log(`videoFilePath=${videoFilePath}`)
|
logger.debug(`videoFilePath=${videoFilePath}`)
|
||||||
|
|
||||||
// * [x] run vcsi
|
// * [x] run vcsi
|
||||||
const thumbnailPath = await createThumbnail(helpers, videoFilePath)
|
const thumbnailPath = await createThumbnail(helpers, videoFilePath)
|
||||||
console.log(`thumbnailPath=${thumbnailPath}`)
|
logger.debug(`thumbnailPath=${thumbnailPath}`)
|
||||||
|
|
||||||
// * [x] generate thumbnail s3 key
|
// * [x] generate thumbnail s3 key
|
||||||
const slug = vod.vtubers[0].slug
|
const slug = vod.vtubers[0].slug
|
||||||
|
@ -8,6 +8,7 @@ import { DataYaml, writeFunscript, generateActions1, classPositionMap, buildFuns
|
|||||||
import { nanoid } from 'nanoid';
|
import { nanoid } from 'nanoid';
|
||||||
import { getNanoSpawn } from '../utils/nanoSpawn';
|
import { getNanoSpawn } from '../utils/nanoSpawn';
|
||||||
import { preparePython } from '../utils/python';
|
import { preparePython } from '../utils/python';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
interface Detection {
|
interface Detection {
|
||||||
startFrame: number;
|
startFrame: number;
|
||||||
@ -38,19 +39,19 @@ const sampleVideoPath = join(fixturesDir, 'sample.mp4')
|
|||||||
*/
|
*/
|
||||||
async function processLabelFiles(labelDir: string, data: DataYaml): Promise<Detection[]> {
|
async function processLabelFiles(labelDir: string, data: DataYaml): Promise<Detection[]> {
|
||||||
const labelFiles = (await readdir(labelDir)).filter(file => file.endsWith('.txt'));
|
const labelFiles = (await readdir(labelDir)).filter(file => file.endsWith('.txt'));
|
||||||
console.log("Label files found:", labelFiles);
|
logger.debug("Label files found:", labelFiles);
|
||||||
const detections: Map<number, Detection[]> = new Map();
|
const detections: Map<number, Detection[]> = new Map();
|
||||||
const names = data.names;
|
const names = data.names;
|
||||||
|
|
||||||
for (const file of labelFiles) {
|
for (const file of labelFiles) {
|
||||||
const match = file.match(/(\d+)\.txt$/);
|
const match = file.match(/(\d+)\.txt$/);
|
||||||
if (!match) {
|
if (!match) {
|
||||||
console.log(`Skipping invalid filename: ${file}`);
|
logger.debug(`Skipping invalid filename: ${file}`);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
const frameIndex = parseInt(match[1], 10);
|
const frameIndex = parseInt(match[1], 10);
|
||||||
if (isNaN(frameIndex)) {
|
if (isNaN(frameIndex)) {
|
||||||
console.log(`Skipping invalid frame index from filename: ${file}`);
|
logger.debug(`Skipping invalid frame index from filename: ${file}`);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -123,7 +124,7 @@ describe('createFunscript', () => {
|
|||||||
const videoPath = join(__dirname, 'fixtures', 'sample.mp4');
|
const videoPath = join(__dirname, 'fixtures', 'sample.mp4');
|
||||||
|
|
||||||
const funscriptPath = await buildFunscript(predictionOutputPath, videoPath);
|
const funscriptPath = await buildFunscript(predictionOutputPath, videoPath);
|
||||||
console.log(`built funscript at ${funscriptPath}`)
|
logger.debug(`built funscript at ${funscriptPath}`)
|
||||||
|
|
||||||
|
|
||||||
const content = JSON.parse(await readFile(funscriptPath, 'utf8') as string);
|
const content = JSON.parse(await readFile(funscriptPath, 'utf8') as string);
|
||||||
|
@ -6,7 +6,7 @@ import {
|
|||||||
import { readFile, mkdir, rm } from 'fs-extra';
|
import { readFile, mkdir, rm } from 'fs-extra';
|
||||||
import { join } from 'node:path';
|
import { join } from 'node:path';
|
||||||
import { tmpdir } from 'node:os';
|
import { tmpdir } from 'node:os';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
describe('[integration] buildFunscript', () => {
|
describe('[integration] buildFunscript', () => {
|
||||||
const TMP_DIR = join(tmpdir(), 'funscript-test');
|
const TMP_DIR = join(tmpdir(), 'funscript-test');
|
||||||
@ -26,7 +26,7 @@ describe('[integration] buildFunscript', () => {
|
|||||||
const dataYamlPath = join(__dirname, 'fixtures', 'data.yaml');
|
const dataYamlPath = join(__dirname, 'fixtures', 'data.yaml');
|
||||||
|
|
||||||
const outputPath = await buildFunscript(dataYamlPath, predictionOutput, videoPath);
|
const outputPath = await buildFunscript(dataYamlPath, predictionOutput, videoPath);
|
||||||
console.log(`built funscript at ${outputPath}`)
|
logger.info(`built funscript at ${outputPath}`)
|
||||||
|
|
||||||
|
|
||||||
const content = JSON.parse(await readFile(outputPath, 'utf8') as string);
|
const content = JSON.parse(await readFile(outputPath, 'utf8') as string);
|
||||||
|
@ -1,214 +0,0 @@
|
|||||||
// inference.integration.test.ts
|
|
||||||
|
|
||||||
// this idea was to use onnx, so we could run inference via node.
|
|
||||||
// I couldn't figure it out-- the detection bounding boxes were in the wrong place.
|
|
||||||
// I'm going back to shelling out to pytorch. saving this for reference.
|
|
||||||
|
|
||||||
import { describe, it, expect, vi, beforeEach, afterEach, beforeAll } from 'vitest';
|
|
||||||
import { InferenceSession, Tensor } from 'onnxruntime-node';
|
|
||||||
import { join } from 'node:path';
|
|
||||||
import { preprocessImage } from '../utils/vibeui';
|
|
||||||
import { createCanvas, loadImage } from 'canvas';
|
|
||||||
import { writeFileSync } from 'fs';
|
|
||||||
import sharp from 'sharp';
|
|
||||||
|
|
||||||
const __dirname = import.meta.dirname;
|
|
||||||
|
|
||||||
const distDir = join(__dirname, '../../dist')
|
|
||||||
const fixturesDir = join(__dirname, '..', 'tests', 'fixtures')
|
|
||||||
const modelPath = join(distDir, 'vibeui', 'vibeui.onnx')
|
|
||||||
const sampleFrame = join(fixturesDir, 'prediction', 'frames', '000001.jpg')
|
|
||||||
|
|
||||||
const NUM_BOXES = 8400;
|
|
||||||
const NUM_CLASSES = 19;
|
|
||||||
const CONFIDENCE_THRESHOLD = 0.9; // tune as needed
|
|
||||||
const IMAGE_WIDTH = 640;
|
|
||||||
const IMAGE_HEIGHT = 640;
|
|
||||||
|
|
||||||
type Detection = {
|
|
||||||
x1: number;
|
|
||||||
y1: number;
|
|
||||||
x2: number;
|
|
||||||
y2: number;
|
|
||||||
confidence: number;
|
|
||||||
classIndex: number;
|
|
||||||
classScore: number;
|
|
||||||
};
|
|
||||||
|
|
||||||
function iou(a: Detection, b: Detection): number {
|
|
||||||
const x1 = Math.max(a.x1, b.x1);
|
|
||||||
const y1 = Math.max(a.y1, b.y1);
|
|
||||||
const x2 = Math.min(a.x2, b.x2);
|
|
||||||
const y2 = Math.min(a.y2, b.y2);
|
|
||||||
|
|
||||||
const intersection = Math.max(0, x2 - x1) * Math.max(0, y2 - y1);
|
|
||||||
const areaA = (a.x2 - a.x1) * (a.y2 - a.y1);
|
|
||||||
const areaB = (b.x2 - b.x1) * (b.y2 - b.y1);
|
|
||||||
const union = areaA + areaB - intersection;
|
|
||||||
|
|
||||||
return intersection / union;
|
|
||||||
}
|
|
||||||
|
|
||||||
function nms(detections: Detection[], iouThreshold = 0.45, topK = 50): Detection[] {
|
|
||||||
const sorted = [...detections].sort((a, b) => b.confidence - a.confidence);
|
|
||||||
const selected: Detection[] = [];
|
|
||||||
|
|
||||||
while (sorted.length > 0 && selected.length < topK) {
|
|
||||||
const best = sorted.shift()!;
|
|
||||||
selected.push(best);
|
|
||||||
|
|
||||||
for (let i = sorted.length - 1; i >= 0; i--) {
|
|
||||||
if (iou(best, sorted[i]) > iouThreshold) {
|
|
||||||
sorted.splice(i, 1); // remove overlapping box
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return selected;
|
|
||||||
}
|
|
||||||
|
|
||||||
function softmax(logits: Float32Array): number[] {
|
|
||||||
const max = Math.max(...logits);
|
|
||||||
const exps = logits.map(x => Math.exp(x - max));
|
|
||||||
const sum = exps.reduce((a, b) => a + b, 0);
|
|
||||||
return exps.map(e => e / sum);
|
|
||||||
}
|
|
||||||
|
|
||||||
function sigmoid(x: number): number {
|
|
||||||
return 1 / (1 + Math.exp(-x));
|
|
||||||
}
|
|
||||||
|
|
||||||
function postprocessTensor(tensor: Tensor): Detection[] {
|
|
||||||
const results: Detection[] = [];
|
|
||||||
const data = tensor.cpuData;
|
|
||||||
|
|
||||||
for (let i = 0; i < NUM_BOXES; i++) {
|
|
||||||
const offset = i * 24;
|
|
||||||
|
|
||||||
const cx = data[offset + 0]; // already in pixel coords
|
|
||||||
const cy = data[offset + 1];
|
|
||||||
const w = data[offset + 2];
|
|
||||||
const h = data[offset + 3];
|
|
||||||
|
|
||||||
const objectness = sigmoid(data[offset + 4]);
|
|
||||||
if (objectness < CONFIDENCE_THRESHOLD) continue;
|
|
||||||
|
|
||||||
const classLogits = data.slice(offset + 5, offset + 24);
|
|
||||||
const classScores = softmax(classLogits as Float32Array);
|
|
||||||
|
|
||||||
const maxClassScore = Math.max(...classScores);
|
|
||||||
const classIndex = classScores.findIndex(score => score === maxClassScore);
|
|
||||||
|
|
||||||
const confidence = objectness * maxClassScore;
|
|
||||||
if (confidence < CONFIDENCE_THRESHOLD) continue;
|
|
||||||
|
|
||||||
const x1 = cx - w / 2;
|
|
||||||
const y1 = cy - h / 2;
|
|
||||||
const x2 = cx + w / 2;
|
|
||||||
const y2 = cy + h / 2;
|
|
||||||
|
|
||||||
results.push({
|
|
||||||
x1,
|
|
||||||
y1,
|
|
||||||
x2,
|
|
||||||
y2,
|
|
||||||
confidence,
|
|
||||||
classIndex,
|
|
||||||
classScore: maxClassScore,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async function renderDetectionsSharp(
|
|
||||||
imagePath: string,
|
|
||||||
detections: Detection[],
|
|
||||||
outputPath: string,
|
|
||||||
classNames?: string[]
|
|
||||||
) {
|
|
||||||
const baseImage = sharp(imagePath);
|
|
||||||
const { width, height } = await baseImage.metadata();
|
|
||||||
|
|
||||||
if (!width || !height) throw new Error('Image must have width and height');
|
|
||||||
|
|
||||||
const svg = createSvgOverlay(width, height, detections, classNames);
|
|
||||||
const overlay = Buffer.from(svg);
|
|
||||||
|
|
||||||
await baseImage
|
|
||||||
.composite([{ input: overlay, blend: 'over' }])
|
|
||||||
.toFile(outputPath);
|
|
||||||
}
|
|
||||||
|
|
||||||
function createSvgOverlay(
|
|
||||||
width: number,
|
|
||||||
height: number,
|
|
||||||
detections: Detection[],
|
|
||||||
classNames?: string[]
|
|
||||||
): string {
|
|
||||||
const elements = detections.map(det => {
|
|
||||||
const x = det.x1;
|
|
||||||
const y = det.y1;
|
|
||||||
const w = det.x2 - det.x1;
|
|
||||||
const h = det.y2 - det.y1;
|
|
||||||
const className = classNames?.[det.classIndex] ?? `cls ${det.classIndex}`;
|
|
||||||
const confPct = (det.confidence * 100).toFixed(1);
|
|
||||||
|
|
||||||
return `
|
|
||||||
<g>
|
|
||||||
<rect x="${x}" y="${y}" width="${w}" height="${h}"
|
|
||||||
fill="none" stroke="red" stroke-width="2"/>
|
|
||||||
<text x="${x + 4}" y="${y + 16}" fill="red" font-size="16"
|
|
||||||
font-family="sans-serif">${className} (${confPct}%)</text>
|
|
||||||
</g>
|
|
||||||
`;
|
|
||||||
});
|
|
||||||
|
|
||||||
return `
|
|
||||||
<svg width="${width}" height="${height}" xmlns="http://www.w3.org/2000/svg">
|
|
||||||
${elements.join('\n')}
|
|
||||||
</svg>
|
|
||||||
`;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
describe('inference', () => {
|
|
||||||
it('pytorch', async () => {
|
|
||||||
const session = await InferenceSession.create(modelPath)
|
|
||||||
const imageTensor = await preprocessImage(sampleFrame);
|
|
||||||
console.log(session.inputNames)
|
|
||||||
console.log(session.outputNames)
|
|
||||||
const feeds = {
|
|
||||||
images: imageTensor
|
|
||||||
}
|
|
||||||
|
|
||||||
const res = await session.run(feeds)
|
|
||||||
|
|
||||||
const { output0 } = res
|
|
||||||
|
|
||||||
const detections = postprocessTensor(output0);
|
|
||||||
// console.log(detections)
|
|
||||||
// console.log(detections.length)
|
|
||||||
// console.log(detections.slice(0, 5)); // first 5 predictions
|
|
||||||
|
|
||||||
const finalDetections = nms(detections, undefined, 3);
|
|
||||||
console.log(finalDetections);
|
|
||||||
console.log(`there were ${finalDetections.length} detections`)
|
|
||||||
|
|
||||||
const classNames = Array.from({ length: 19 }, (_, i) => `class${i}`);
|
|
||||||
await renderDetectionsSharp(
|
|
||||||
sampleFrame,
|
|
||||||
finalDetections,
|
|
||||||
join(fixturesDir, 'prediction', 'output-sharp.png'),
|
|
||||||
classNames
|
|
||||||
);
|
|
||||||
|
|
||||||
|
|
||||||
expect(output0.dims).toEqual([1, 24, 8400]);
|
|
||||||
expect(output0.type).toBe('float32');
|
|
||||||
expect(output0.cpuData[0]).toBeGreaterThan(0); // or some expected value
|
|
||||||
|
|
||||||
})
|
|
||||||
})
|
|
@ -4,7 +4,7 @@ import { resolve, join } from 'node:path';
|
|||||||
import { readdir, readFile, rm } from 'node:fs/promises';
|
import { readdir, readFile, rm } from 'node:fs/promises';
|
||||||
import { readJson } from 'fs-extra';
|
import { readJson } from 'fs-extra';
|
||||||
import { DataYaml } from '../utils/funscripts';
|
import { DataYaml } from '../utils/funscripts';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
const __dirname = import.meta.dirname;
|
const __dirname = import.meta.dirname;
|
||||||
const FIXTURE_DIR = resolve(__dirname, 'fixtures');
|
const FIXTURE_DIR = resolve(__dirname, 'fixtures');
|
||||||
@ -20,12 +20,12 @@ describe('[integration] vibeuiInference', () => {
|
|||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
outputPath = await vibeuiInference(MODEL, VIDEO);
|
outputPath = await vibeuiInference(MODEL, VIDEO);
|
||||||
console.log(`outputPath=${outputPath}`)
|
logger.info(`outputPath=${outputPath}`)
|
||||||
}, 35_000);
|
}, 35_000);
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
// await rm(outputPath, { recursive: true, force: true });
|
// await rm(outputPath, { recursive: true, force: true });
|
||||||
console.log('@todo cleanup')
|
logger.info('@todo cleanup')
|
||||||
});
|
});
|
||||||
|
|
||||||
it('outputs detection labels and frames', async () => {
|
it('outputs detection labels and frames', async () => {
|
||||||
@ -40,7 +40,7 @@ describe('[integration] vibeuiInference', () => {
|
|||||||
const content = await readFile(labelFile, 'utf8');
|
const content = await readFile(labelFile, 'utf8');
|
||||||
|
|
||||||
const firstLine = content.split('\n')[0].trim().replace(/\r/g, '');
|
const firstLine = content.split('\n')[0].trim().replace(/\r/g, '');
|
||||||
console.log('First line content:', JSON.stringify(firstLine));
|
logger.info('First line content:', JSON.stringify(firstLine));
|
||||||
|
|
||||||
// Expect initial class number + exactly 5 floats/ints after it (6 total numbers)
|
// Expect initial class number + exactly 5 floats/ints after it (6 total numbers)
|
||||||
expect(firstLine).toMatch(/^\d+( (-?\d+(\.\d+)?)){5}$/);
|
expect(firstLine).toMatch(/^\d+( (-?\d+(\.\d+)?)){5}$/);
|
||||||
|
@ -3,7 +3,7 @@ import { PrismaClient } from '../../generated/prisma'
|
|||||||
import { withAccelerate } from "@prisma/extension-accelerate"
|
import { withAccelerate } from "@prisma/extension-accelerate"
|
||||||
import type { FastifyRequest, FastifyReply } from 'fastify';
|
import type { FastifyRequest, FastifyReply } from 'fastify';
|
||||||
import type { OnBehalfQuery } from '../types';
|
import type { OnBehalfQuery } from '../types';
|
||||||
|
import logger from './logger';
|
||||||
|
|
||||||
const prisma = new PrismaClient().$extends(withAccelerate())
|
const prisma = new PrismaClient().$extends(withAccelerate())
|
||||||
|
|
||||||
@ -45,10 +45,10 @@ export async function getTargetUser(
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (!onBehalfOf) {
|
if (!onBehalfOf) {
|
||||||
console.log(`we have found the condition where onBehalfOf not set`)
|
logger.warn(`we have found the condition where onBehalfOf not set`)
|
||||||
return requester
|
return requester
|
||||||
} else if (onBehalfOf === requester.twitchName) {
|
} else if (onBehalfOf === requester.twitchName) {
|
||||||
console.log(`we have found the condtion where onBehalfOf is the same name as requester.twitchName`)
|
logger.warn(`we have found the condtion where onBehalfOf is the same name as requester.twitchName`)
|
||||||
return requester;
|
return requester;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -11,8 +11,8 @@ import logger from './logger';
|
|||||||
|
|
||||||
const keyv = new Keyv(new KeyvPostgres({ uri: env.DATABASE_URL, schema: 'keyv' }));
|
const keyv = new Keyv(new KeyvPostgres({ uri: env.DATABASE_URL, schema: 'keyv' }));
|
||||||
keyv.on('error', (err) => {
|
keyv.on('error', (err) => {
|
||||||
console.error('keyv error encountered.')
|
logger.error('keyv error encountered.')
|
||||||
console.error(err)
|
logger.error(err)
|
||||||
});
|
});
|
||||||
|
|
||||||
const cache = createCache({ stores: [keyv] });
|
const cache = createCache({ stores: [keyv] });
|
||||||
@ -126,7 +126,7 @@ export async function cleanExpiredFiles(): Promise<number> {
|
|||||||
deletedCount++;
|
deletedCount++;
|
||||||
logger.debug(`Deleted expired file: ${fullPath}`);
|
logger.debug(`Deleted expired file: ${fullPath}`);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.warn(`Failed to delete file ${fullPath}:`, err);
|
logger.warn(`Failed to delete file ${fullPath}:`, err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,6 +3,7 @@ import { writeJson } from "fs-extra";
|
|||||||
import { env } from "../config/env";
|
import { env } from "../config/env";
|
||||||
import { nanoid } from "nanoid";
|
import { nanoid } from "nanoid";
|
||||||
import { loadDataYaml, loadVideoMetadata, processLabelFiles } from "./vibeui";
|
import { loadDataYaml, loadVideoMetadata, processLabelFiles } from "./vibeui";
|
||||||
|
import logger from "./logger";
|
||||||
|
|
||||||
export interface FunscriptAction {
|
export interface FunscriptAction {
|
||||||
at: number;
|
at: number;
|
||||||
@ -160,10 +161,10 @@ export function generateActions2(
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
console.debug('[generateActions] Total duration (ms):', totalDurationMs);
|
logger.debug('[generateActions] Total duration (ms):', totalDurationMs);
|
||||||
console.debug('[generateActions] FPS:', fps);
|
logger.debug('[generateActions] FPS:', fps);
|
||||||
console.debug('[generateActions] Detection segments:', detectionSegments);
|
logger.debug('[generateActions] Detection segments:', detectionSegments);
|
||||||
console.debug('[generateActions] Class position map:', classPositionMap);
|
logger.debug('[generateActions] Class position map:', classPositionMap);
|
||||||
|
|
||||||
// Generate static position actions
|
// Generate static position actions
|
||||||
for (let timeMs = 0; timeMs <= totalDurationMs; timeMs += intervalMs) {
|
for (let timeMs = 0; timeMs <= totalDurationMs; timeMs += intervalMs) {
|
||||||
@ -180,13 +181,13 @@ export function generateActions2(
|
|||||||
position = classPositionMap[className];
|
position = classPositionMap[className];
|
||||||
break;
|
break;
|
||||||
} else {
|
} else {
|
||||||
console.warn(`[generateActions] Static class not mapped to number: ${className}`);
|
logger.warn(`[generateActions] Static class not mapped to number: ${className}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!matchedSegment) {
|
if (!matchedSegment) {
|
||||||
console.debug(`[generateActions] No matching segment for time ${timeMs} (frame ${frameIndex})`);
|
logger.debug(`[generateActions] No matching segment for time ${timeMs} (frame ${frameIndex})`);
|
||||||
}
|
}
|
||||||
|
|
||||||
actions.push({ at: timeMs, pos: position });
|
actions.push({ at: timeMs, pos: position });
|
||||||
@ -200,10 +201,10 @@ export function generateActions2(
|
|||||||
const startMs = Math.floor((segment.startFrame / fps) * 1000);
|
const startMs = Math.floor((segment.startFrame / fps) * 1000);
|
||||||
const durationMs = Math.floor(((segment.endFrame - segment.startFrame + 1) / fps) * 1000);
|
const durationMs = Math.floor(((segment.endFrame - segment.startFrame + 1) / fps) * 1000);
|
||||||
|
|
||||||
console.debug(`[generateActions] Generating pattern for class "${className}" from ${startMs}ms for ${durationMs}ms`);
|
logger.debug(`[generateActions] Generating pattern for class "${className}" from ${startMs}ms for ${durationMs}ms`);
|
||||||
const patternActions = generatePatternPositions(startMs, durationMs, className, fps);
|
const patternActions = generatePatternPositions(startMs, durationMs, className, fps);
|
||||||
|
|
||||||
console.debug(`[generateActions] Generated ${patternActions.length} pattern actions for class "${className}"`);
|
logger.debug(`[generateActions] Generated ${patternActions.length} pattern actions for class "${className}"`);
|
||||||
actions.push(...patternActions);
|
actions.push(...patternActions);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -219,7 +220,7 @@ export function generateActions2(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
console.debug(`[generateActions] Final action count: ${uniqueActions.length}`);
|
logger.debug(`[generateActions] Final action count: ${uniqueActions.length}`);
|
||||||
|
|
||||||
return uniqueActions;
|
return uniqueActions;
|
||||||
}
|
}
|
||||||
@ -229,7 +230,7 @@ export function generateActions2(
|
|||||||
*
|
*
|
||||||
* - Wraps the provided actions in a Funscript object with version `1.0`.
|
* - Wraps the provided actions in a Funscript object with version `1.0`.
|
||||||
* - Saves the JSON to the specified output path.
|
* - Saves the JSON to the specified output path.
|
||||||
* - Logs the file path and action count to the console.
|
* - Logs the file path and action count to the logger.
|
||||||
*
|
*
|
||||||
* @param outputPath - Destination file path for the .funscript output.
|
* @param outputPath - Destination file path for the .funscript output.
|
||||||
* @param actions - Array of `FunscriptAction` entries to include.
|
* @param actions - Array of `FunscriptAction` entries to include.
|
||||||
@ -239,8 +240,8 @@ export async function writeFunscript(outputPath: string, actions: FunscriptActio
|
|||||||
const funscript: Funscript = { version: '1.0', actions };
|
const funscript: Funscript = { version: '1.0', actions };
|
||||||
|
|
||||||
await writeJson(outputPath, funscript);
|
await writeJson(outputPath, funscript);
|
||||||
console.log(`Funscript generated: ${outputPath} (${actions.length} actions)`);
|
logger.debug(`Funscript generated: ${outputPath} (${actions.length} actions)`);
|
||||||
console.log(funscript)
|
logger.debug(funscript)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -276,7 +277,7 @@ export async function buildFunscript(
|
|||||||
|
|
||||||
return outputPath;
|
return outputPath;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error generating Funscript: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
logger.error(`Error generating Funscript: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,6 +3,7 @@ import { join } from "node:path";
|
|||||||
import { env } from "../config/env";
|
import { env } from "../config/env";
|
||||||
import which from "which";
|
import which from "which";
|
||||||
import { existsSync } from "fs-extra";
|
import { existsSync } from "fs-extra";
|
||||||
|
import logger from "./logger";
|
||||||
|
|
||||||
export async function preparePython() {
|
export async function preparePython() {
|
||||||
const spawn = await getNanoSpawn();
|
const spawn = await getNanoSpawn();
|
||||||
@ -19,30 +20,30 @@ export async function preparePython() {
|
|||||||
|
|
||||||
// 2. Create venv if missing
|
// 2. Create venv if missing
|
||||||
if (!existsSync(venvPath)) {
|
if (!existsSync(venvPath)) {
|
||||||
console.log("Creating Python venv...");
|
logger.debug("Creating Python venv...");
|
||||||
await spawn(pythonCmd, ["-m", "venv", venvPath], {
|
await spawn(pythonCmd, ["-m", "venv", venvPath], {
|
||||||
cwd: env.APP_DIR,
|
cwd: env.APP_DIR,
|
||||||
});
|
});
|
||||||
console.log("Python venv created.");
|
logger.debug("Python venv created.");
|
||||||
} else {
|
} else {
|
||||||
console.log("Using existing Python venv.");
|
logger.debug("Using existing Python venv.");
|
||||||
}
|
}
|
||||||
|
|
||||||
// 3. Install requirements.txt
|
// 3. Install requirements.txt
|
||||||
const pipCmd = join(venvBin, "pip");
|
const pipCmd = join(venvBin, "pip");
|
||||||
console.log("Installing requirements.txt...");
|
logger.debug("Installing requirements.txt...");
|
||||||
await spawn(pipCmd, ["install", "-r", "requirements.txt"], {
|
await spawn(pipCmd, ["install", "-r", "requirements.txt"], {
|
||||||
cwd: env.APP_DIR,
|
cwd: env.APP_DIR,
|
||||||
});
|
});
|
||||||
console.log("requirements.txt installed.");
|
logger.debug("requirements.txt installed.");
|
||||||
|
|
||||||
// 4. Confirm vcsi CLI binary exists
|
// 4. Confirm vcsi CLI binary exists
|
||||||
const vcsiBinary = join(venvBin, "vcsi");
|
const vcsiBinary = join(venvBin, "vcsi");
|
||||||
if (!existsSync(vcsiBinary)) {
|
if (!existsSync(vcsiBinary)) {
|
||||||
console.error("vcsi binary not found in venv after installing requirements.");
|
logger.error("vcsi binary not found in venv after installing requirements.");
|
||||||
console.error("Make sure 'vcsi' is listed in requirements.txt and that it installs a CLI.");
|
logger.error("Make sure 'vcsi' is listed in requirements.txt and that it installs a CLI.");
|
||||||
throw new Error("vcsi installation failed or did not expose CLI.");
|
throw new Error("vcsi installation failed or did not expose CLI.");
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log("vcsi CLI is available at", vcsiBinary);
|
logger.debug("vcsi CLI is available at", vcsiBinary);
|
||||||
}
|
}
|
||||||
|
@ -5,17 +5,17 @@ import {
|
|||||||
} from '@imgly/background-removal-node';
|
} from '@imgly/background-removal-node';
|
||||||
import { nanoid } from 'nanoid';
|
import { nanoid } from 'nanoid';
|
||||||
import fs from 'node:fs';
|
import fs from 'node:fs';
|
||||||
|
import logger from './logger';
|
||||||
|
|
||||||
export async function run(imageFilePath: string): Promise<string> {
|
export async function run(imageFilePath: string): Promise<string> {
|
||||||
|
|
||||||
console.log(`imageFilePath=${imageFilePath}`);
|
logger.info(`imageFilePath=${imageFilePath}`);
|
||||||
const config = {
|
const config = {
|
||||||
debug: false,
|
debug: false,
|
||||||
// publicPath: ...,
|
// publicPath: ...,
|
||||||
progress: (key, current, total) => {
|
progress: (key, current, total) => {
|
||||||
const [type, subtype] = key.split(':');
|
const [type, subtype] = key.split(':');
|
||||||
console.log(
|
logger.info(
|
||||||
`progress:: ${type} ${subtype} ${((current / total) * 100).toFixed(0)}%`
|
`progress:: ${type} ${subtype} ${((current / total) * 100).toFixed(0)}%`
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
@ -25,19 +25,19 @@ export async function run(imageFilePath: string): Promise<string> {
|
|||||||
format: 'image/webp' //image/jpeg, image/webp
|
format: 'image/webp' //image/jpeg, image/webp
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
console.time();
|
|
||||||
const blob = await removeBackground(imageFilePath, config);
|
const blob = await removeBackground(imageFilePath, config);
|
||||||
|
|
||||||
|
|
||||||
// const mask = await segmentForeground(randomImage, config);
|
// const mask = await segmentForeground(randomImage, config);
|
||||||
// const blob = await applySegmentationMask(randomImage, mask, config);
|
// const blob = await applySegmentationMask(randomImage, mask, config);
|
||||||
console.timeEnd();
|
|
||||||
const buffer = await blob.arrayBuffer();
|
const buffer = await blob.arrayBuffer();
|
||||||
|
|
||||||
const format = config.output.format.split('/').pop();
|
const format = config.output.format.split('/').pop();
|
||||||
const outFile = `tmp/${nanoid()}.${format}`;
|
const outFile = `tmp/${nanoid()}.${format}`;
|
||||||
await fs.promises.mkdir('tmp', { recursive: true });
|
await fs.promises.mkdir('tmp', { recursive: true });
|
||||||
await fs.promises.writeFile(outFile, Buffer.from(buffer));
|
await fs.promises.writeFile(outFile, Buffer.from(buffer));
|
||||||
console.log(`Image saved to ${outFile}`);
|
logger.info(`Image saved to ${outFile}`);
|
||||||
return outFile
|
return outFile
|
||||||
|
|
||||||
|
|
||||||
|
@ -4,6 +4,8 @@
|
|||||||
import { existsSync, mkdirSync, readdirSync, rmSync, writeFileSync } from 'fs';
|
import { existsSync, mkdirSync, readdirSync, rmSync, writeFileSync } from 'fs';
|
||||||
import { join } from 'path';
|
import { join } from 'path';
|
||||||
import { getNanoSpawn } from './nanoSpawn';
|
import { getNanoSpawn } from './nanoSpawn';
|
||||||
|
import logger from './logger';
|
||||||
|
|
||||||
|
|
||||||
// Entry point
|
// Entry point
|
||||||
(async function main() {
|
(async function main() {
|
||||||
@ -16,7 +18,7 @@ import { getNanoSpawn } from './nanoSpawn';
|
|||||||
const outputDir = process.argv[4];
|
const outputDir = process.argv[4];
|
||||||
|
|
||||||
if (isNaN(fps) || fps <= 0) {
|
if (isNaN(fps) || fps <= 0) {
|
||||||
console.error('Error: Invalid FPS value. Please provide a positive integer.');
|
logger.error('Error: Invalid FPS value. Please provide a positive integer.');
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -34,12 +36,12 @@ import { getNanoSpawn } from './nanoSpawn';
|
|||||||
cleanup(thumbnailsDir);
|
cleanup(thumbnailsDir);
|
||||||
|
|
||||||
const elapsed = (Date.now() - startTime) / 1000;
|
const elapsed = (Date.now() - startTime) / 1000;
|
||||||
console.log(`Process completed in ${elapsed.toFixed(2)} seconds.`);
|
logger.debug(`Process completed in ${elapsed.toFixed(2)} seconds.`);
|
||||||
})();
|
})();
|
||||||
|
|
||||||
function showUsage(): never {
|
function showUsage(): never {
|
||||||
console.log('Usage: ts-node storyboard.ts <fps> <input_file> <output_dir>');
|
logger.debug('Usage: ts-node storyboard.ts <fps> <input_file> <output_dir>');
|
||||||
console.log('Example: ts-node storyboard.ts 1 example.mp4 ./output');
|
logger.debug('Example: ts-node storyboard.ts 1 example.mp4 ./output');
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -51,26 +53,26 @@ function ensureDirectoryExists(path: string) {
|
|||||||
|
|
||||||
export async function generateThumbnails(fps: number, inputFile: string, thumbnailsDir: string) {
|
export async function generateThumbnails(fps: number, inputFile: string, thumbnailsDir: string) {
|
||||||
const spawn = await getNanoSpawn()
|
const spawn = await getNanoSpawn()
|
||||||
console.log('Generating thumbnails...');
|
logger.debug('Generating thumbnails...');
|
||||||
try {
|
try {
|
||||||
spawn('ffmpeg', [
|
spawn('ffmpeg', [
|
||||||
'-i', inputFile,
|
'-i', inputFile,
|
||||||
'-vf', `fps=1/${fps},scale=384:160`,
|
'-vf', `fps=1/${fps},scale=384:160`,
|
||||||
join(thumbnailsDir, 'thumb%05d.jpg'),
|
join(thumbnailsDir, 'thumb%05d.jpg'),
|
||||||
], { stdio: 'inherit' });
|
], { stdio: 'inherit' });
|
||||||
console.log('Thumbnails generated successfully.');
|
logger.debug('Thumbnails generated successfully.');
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Error generating thumbnails:', err);
|
logger.error('Error generating thumbnails:', err);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function generateStoryboard(thumbnailsDir: string, storyboardImage: string) {
|
export async function generateStoryboard(thumbnailsDir: string, storyboardImage: string) {
|
||||||
const spawn = await getNanoSpawn()
|
const spawn = await getNanoSpawn()
|
||||||
console.log('Creating storyboard.jpg...');
|
logger.debug('Creating storyboard.jpg...');
|
||||||
const thumbnails = readdirSync(thumbnailsDir).filter(f => f.endsWith('.jpg'));
|
const thumbnails = readdirSync(thumbnailsDir).filter(f => f.endsWith('.jpg'));
|
||||||
if (thumbnails.length === 0) {
|
if (thumbnails.length === 0) {
|
||||||
console.error('No thumbnails found. Exiting.');
|
logger.error('No thumbnails found. Exiting.');
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -85,15 +87,15 @@ export async function generateStoryboard(thumbnailsDir: string, storyboardImage:
|
|||||||
'-filter_complex', tileFilter,
|
'-filter_complex', tileFilter,
|
||||||
storyboardImage,
|
storyboardImage,
|
||||||
], { stdio: 'inherit' });
|
], { stdio: 'inherit' });
|
||||||
console.log('Storyboard image created successfully.');
|
logger.debug('Storyboard image created successfully.');
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Error creating storyboard image:', err);
|
logger.error('Error creating storyboard image:', err);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function generateVTT(fps: number, thumbnailsDir: string, vttFile: string) {
|
function generateVTT(fps: number, thumbnailsDir: string, vttFile: string) {
|
||||||
console.log('Generating storyboard.vtt...');
|
logger.debug('Generating storyboard.vtt...');
|
||||||
const thumbnails = readdirSync(thumbnailsDir).filter(f => f.startsWith('thumb') && f.endsWith('.jpg'));
|
const thumbnails = readdirSync(thumbnailsDir).filter(f => f.startsWith('thumb') && f.endsWith('.jpg'));
|
||||||
const durationPerThumb = fps;
|
const durationPerThumb = fps;
|
||||||
let vttContent = 'WEBVTT\n\n';
|
let vttContent = 'WEBVTT\n\n';
|
||||||
@ -111,19 +113,19 @@ function generateVTT(fps: number, thumbnailsDir: string, vttFile: string) {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
writeFileSync(vttFile, vttContent, 'utf8');
|
writeFileSync(vttFile, vttContent, 'utf8');
|
||||||
console.log('Storyboard VTT file generated successfully.');
|
logger.debug('Storyboard VTT file generated successfully.');
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Error writing VTT file:', err);
|
logger.error('Error writing VTT file:', err);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function cleanup(thumbnailsDir: string) {
|
function cleanup(thumbnailsDir: string) {
|
||||||
console.log('Cleaning up temporary files...');
|
logger.debug('Cleaning up temporary files...');
|
||||||
try {
|
try {
|
||||||
rmSync(thumbnailsDir, { recursive: true, force: true });
|
rmSync(thumbnailsDir, { recursive: true, force: true });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Error cleaning up thumbnails:', err);
|
logger.error('Error cleaning up thumbnails:', err);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -11,6 +11,7 @@ import { env } from '../config/env';
|
|||||||
import fs from 'fs/promises';
|
import fs from 'fs/promises';
|
||||||
import { readJson } from 'fs-extra';
|
import { readJson } from 'fs-extra';
|
||||||
import { preparePython } from './python';
|
import { preparePython } from './python';
|
||||||
|
import logger from './logger';
|
||||||
|
|
||||||
interface Detection {
|
interface Detection {
|
||||||
startFrame: number;
|
startFrame: number;
|
||||||
@ -192,7 +193,7 @@ export async function inference(videoFilePath: string): Promise<string> {
|
|||||||
// })
|
// })
|
||||||
|
|
||||||
// return outputPath // contains labels/ folder and predictions
|
// return outputPath // contains labels/ folder and predictions
|
||||||
// console.log(`prediction output ${predictionOutput}`);
|
// logger.info(`prediction output ${predictionOutput}`);
|
||||||
|
|
||||||
|
|
||||||
// const funscriptFilePath = await buildFunscript(helpers, predictionOutput, videoFilePath)
|
// const funscriptFilePath = await buildFunscript(helpers, predictionOutput, videoFilePath)
|
||||||
@ -201,10 +202,10 @@ export async function inference(videoFilePath: string): Promise<string> {
|
|||||||
// const s3Key = `funscripts/${vodId}.funscript`;
|
// const s3Key = `funscripts/${vodId}.funscript`;
|
||||||
// const s3Url = await uploadFile(s3Client, env.S3_BUCKET, s3Key, funscriptFilePath, "application/json");
|
// const s3Url = await uploadFile(s3Client, env.S3_BUCKET, s3Key, funscriptFilePath, "application/json");
|
||||||
|
|
||||||
// console.log(`Uploaded funscript to S3: ${s3Url}`);
|
// logger.info(`Uploaded funscript to S3: ${s3Url}`);
|
||||||
|
|
||||||
|
|
||||||
// console.log(`Funscript saved to database for vod ${vodId}`);
|
// logger.info(`Funscript saved to database for vod ${vodId}`);
|
||||||
|
|
||||||
// }
|
// }
|
||||||
|
|
||||||
@ -217,11 +218,11 @@ export async function inference(videoFilePath: string): Promise<string> {
|
|||||||
// if (!videoFilePath) throw new Error('missing videoFilePath, arg1');
|
// if (!videoFilePath) throw new Error('missing videoFilePath, arg1');
|
||||||
|
|
||||||
// // Load ONNX model
|
// // Load ONNX model
|
||||||
// console.log(`Loading ONNX model from ${modelPath}`);
|
// logger.info(`Loading ONNX model from ${modelPath}`);
|
||||||
|
|
||||||
// const session = await ort.InferenceSession.create(modelPath);
|
// const session = await ort.InferenceSession.create(modelPath);
|
||||||
|
|
||||||
// console.log(`inputNames=${session.inputNames} outputNames=${session.outputNames}`)
|
// logger.info(`inputNames=${session.inputNames} outputNames=${session.outputNames}`)
|
||||||
|
|
||||||
// // Prepare output dir
|
// // Prepare output dir
|
||||||
// // const videoExt = extname(videoFilePath);
|
// // const videoExt = extname(videoFilePath);
|
||||||
@ -233,7 +234,7 @@ export async function inference(videoFilePath: string): Promise<string> {
|
|||||||
// // Extract frames
|
// // Extract frames
|
||||||
// const framesDir = join(outputPath, 'frames');
|
// const framesDir = join(outputPath, 'frames');
|
||||||
// mkdirSync(framesDir, { recursive: true })
|
// mkdirSync(framesDir, { recursive: true })
|
||||||
// console.log(`Extracting video frames from ${videoFilePath} to ${framesDir}...`);
|
// logger.info(`Extracting video frames from ${videoFilePath} to ${framesDir}...`);
|
||||||
// await extractFrames(videoFilePath, framesDir);
|
// await extractFrames(videoFilePath, framesDir);
|
||||||
|
|
||||||
// // Load class names from data.yaml
|
// // Load class names from data.yaml
|
||||||
@ -245,12 +246,12 @@ export async function inference(videoFilePath: string): Promise<string> {
|
|||||||
// .filter(f => f.endsWith('.jpg'))
|
// .filter(f => f.endsWith('.jpg'))
|
||||||
// .sort();
|
// .sort();
|
||||||
|
|
||||||
// // console.log(`frameFiles=${JSON.stringify(frameFiles)}`)
|
// // logger.info(`frameFiles=${JSON.stringify(frameFiles)}`)
|
||||||
// const detectionsByFrame = new Map<number, DetectionOutput[]>();
|
// const detectionsByFrame = new Map<number, DetectionOutput[]>();
|
||||||
|
|
||||||
// if (frameFiles.length === 0) throw new Error(`No frames extracted! This is likely a bug.`);
|
// if (frameFiles.length === 0) throw new Error(`No frames extracted! This is likely a bug.`);
|
||||||
|
|
||||||
// console.log(`Running inference on ${frameFiles.length} frames...`);
|
// logger.info(`Running inference on ${frameFiles.length} frames...`);
|
||||||
|
|
||||||
|
|
||||||
// for (const file of frameFiles) {
|
// for (const file of frameFiles) {
|
||||||
@ -258,7 +259,7 @@ export async function inference(videoFilePath: string): Promise<string> {
|
|||||||
// const imagePath = join(framesDir, file);
|
// const imagePath = join(framesDir, file);
|
||||||
// const inputTensor = await preprocessImage(imagePath);
|
// const inputTensor = await preprocessImage(imagePath);
|
||||||
// const detections = await runModelInference(session, inputTensor)
|
// const detections = await runModelInference(session, inputTensor)
|
||||||
// console.log(`[frame ${frameIndex}] detections.length = ${detections.length}`);
|
// logger.info(`[frame ${frameIndex}] detections.length = ${detections.length}`);
|
||||||
// detectionsByFrame.set(frameIndex, detections);
|
// detectionsByFrame.set(frameIndex, detections);
|
||||||
|
|
||||||
// }
|
// }
|
||||||
@ -323,7 +324,7 @@ export async function ffprobe(videoPath: string): Promise<{ fps: number; frames:
|
|||||||
// throw new Error('Invalid labels format in JSON');
|
// throw new Error('Invalid labels format in JSON');
|
||||||
// }
|
// }
|
||||||
// } catch (err) {
|
// } catch (err) {
|
||||||
// console.error(`Failed to read labels from ${jsonPath}:`, err);
|
// logger.error(`Failed to read labels from ${jsonPath}:`, err);
|
||||||
// throw err;
|
// throw err;
|
||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
@ -333,7 +334,7 @@ export async function ffprobe(videoPath: string): Promise<{ fps: number; frames:
|
|||||||
* Loads basic metadata from a video file using ffprobe.
|
* Loads basic metadata from a video file using ffprobe.
|
||||||
*
|
*
|
||||||
* - Retrieves the video's frame rate (fps) and total frame count.
|
* - Retrieves the video's frame rate (fps) and total frame count.
|
||||||
* - Logs the extracted metadata to the console.
|
* - Logs the extracted metadata
|
||||||
*
|
*
|
||||||
* @param videoPath - Path to the video file to analyze.
|
* @param videoPath - Path to the video file to analyze.
|
||||||
* @returns An object containing `fps` and `totalFrames`.
|
* @returns An object containing `fps` and `totalFrames`.
|
||||||
@ -341,14 +342,14 @@ export async function ffprobe(videoPath: string): Promise<{ fps: number; frames:
|
|||||||
*/
|
*/
|
||||||
export async function loadVideoMetadata(videoPath: string) {
|
export async function loadVideoMetadata(videoPath: string) {
|
||||||
const { fps, frames: totalFrames } = await ffprobe(videoPath);
|
const { fps, frames: totalFrames } = await ffprobe(videoPath);
|
||||||
console.log(`Video metadata: fps=${fps}, frames=${totalFrames}`);
|
logger.info(`Video metadata: fps=${fps}, frames=${totalFrames}`);
|
||||||
return { fps, totalFrames };
|
return { fps, totalFrames };
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function processLabelFiles(labelDir: string, data: DataYaml): Promise<Detection[]> {
|
export async function processLabelFiles(labelDir: string, data: DataYaml): Promise<Detection[]> {
|
||||||
const labelFiles = (await readdir(labelDir)).filter(file => file.endsWith('.txt'));
|
const labelFiles = (await readdir(labelDir)).filter(file => file.endsWith('.txt'));
|
||||||
console.log(`[processLabelFiles] Found label files: ${labelFiles.length}`);
|
logger.info(`[processLabelFiles] Found label files: ${labelFiles.length}`);
|
||||||
if (labelFiles.length === 0) console.warn(`⚠️⚠️⚠️ no label files found! this should normally NOT happen unless the video contained no lovense overlay. ⚠️⚠️⚠️`);
|
if (labelFiles.length === 0) logger.warn(`⚠️⚠️⚠️ no label files found! this should normally NOT happen unless the video contained no lovense overlay. ⚠️⚠️⚠️`);
|
||||||
|
|
||||||
const detections: Map<number, Detection[]> = new Map();
|
const detections: Map<number, Detection[]> = new Map();
|
||||||
const names = data.names;
|
const names = data.names;
|
||||||
@ -356,13 +357,13 @@ export async function processLabelFiles(labelDir: string, data: DataYaml): Promi
|
|||||||
for (const file of labelFiles) {
|
for (const file of labelFiles) {
|
||||||
const match = file.match(/(\d+)\.txt$/);
|
const match = file.match(/(\d+)\.txt$/);
|
||||||
if (!match) {
|
if (!match) {
|
||||||
console.log(`[processLabelFiles] Skipping invalid filename: ${file}`);
|
logger.info(`[processLabelFiles] Skipping invalid filename: ${file}`);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
const frameIndex = parseInt(match[1], 10);
|
const frameIndex = parseInt(match[1], 10);
|
||||||
if (isNaN(frameIndex)) {
|
if (isNaN(frameIndex)) {
|
||||||
console.log(`[processLabelFiles] Skipping invalid frame index: ${file}`);
|
logger.info(`[processLabelFiles] Skipping invalid frame index: ${file}`);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -391,10 +392,10 @@ export async function processLabelFiles(labelDir: string, data: DataYaml): Promi
|
|||||||
if (maxConfidence > 0 && selectedClassIndex !== -1) {
|
if (maxConfidence > 0 && selectedClassIndex !== -1) {
|
||||||
const className = names[selectedClassIndex.toString()];
|
const className = names[selectedClassIndex.toString()];
|
||||||
if (className) {
|
if (className) {
|
||||||
console.log(`[processLabelFiles] Frame ${frameIndex}: detected class "${className}" with confidence ${maxConfidence}`);
|
logger.info(`[processLabelFiles] Frame ${frameIndex}: detected class "${className}" with confidence ${maxConfidence}`);
|
||||||
frameDetections.push({ startFrame: frameIndex, endFrame: frameIndex, className });
|
frameDetections.push({ startFrame: frameIndex, endFrame: frameIndex, className });
|
||||||
} else {
|
} else {
|
||||||
console.log(`[processLabelFiles] Frame ${frameIndex}: class index ${selectedClassIndex} has no name`);
|
logger.info(`[processLabelFiles] Frame ${frameIndex}: class index ${selectedClassIndex} has no name`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -420,9 +421,9 @@ export async function processLabelFiles(labelDir: string, data: DataYaml): Promi
|
|||||||
|
|
||||||
if (currentDetection) detectionSegments.push(currentDetection);
|
if (currentDetection) detectionSegments.push(currentDetection);
|
||||||
|
|
||||||
console.log(`[processLabelFiles] Total detection segments: ${detectionSegments.length}`);
|
logger.info(`[processLabelFiles] Total detection segments: ${detectionSegments.length}`);
|
||||||
for (const segment of detectionSegments) {
|
for (const segment of detectionSegments) {
|
||||||
console.log(` - Class "${segment.className}": frames ${segment.startFrame}–${segment.endFrame}`);
|
logger.info(` - Class "${segment.className}": frames ${segment.startFrame}–${segment.endFrame}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
return detectionSegments;
|
return detectionSegments;
|
||||||
|
@ -2,6 +2,8 @@
|
|||||||
|
|
||||||
import { run } from "graphile-worker";
|
import { run } from "graphile-worker";
|
||||||
import preset from '../graphile.config'
|
import preset from '../graphile.config'
|
||||||
|
import logger from "./utils/logger";
|
||||||
|
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
// Run a worker to execute jobs:
|
// Run a worker to execute jobs:
|
||||||
@ -17,6 +19,6 @@ async function main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
main().catch((err) => {
|
main().catch((err) => {
|
||||||
console.error(err);
|
logger.error(err);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
});
|
});
|
||||||
|
Loading…
x
Reference in New Issue
Block a user