fixing frontend image quirks

This commit is contained in:
CJ_Clippy 2024-06-12 17:38:11 -08:00
parent 1b436de8d8
commit 71f19065d0
33 changed files with 407 additions and 171 deletions

View File

@ -11,6 +11,9 @@ crds:
cert-manager:
kubectl apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.14.4/cert-manager.yaml
secrets:
./scripts/k8s-secrets.sh
flux:
flux bootstrap git --url="ssh://git@gitea.futureporn.net:2222/futureporn/fp" --branch=main --path="clusters/production" --private-key-file=/home/cj/.ssh/fp-flux

View File

@ -34,6 +34,16 @@ spec:
value: "{{ .Values.scout.cdnBucketUrl }}"
- name: STRAPI_URL
value: https://strapi.piko.sbtp.xyz
- name: S3_BUCKET_APPLICATION_KEY
valueFrom:
secretKeyRef:
name: scout
key: s3BucketApplicationKey
- name: S3_BUCKET_KEY_ID
valueFrom:
secretKeyRef:
name: scout
key: s3BucketKeyId
- name: SCOUT_NITTER_ACCESS_KEY
valueFrom:
secretKeyRef:

View File

@ -1,4 +1,4 @@
import { getAllStreamsForVtuber } from "@/lib/streams";
import { getAllStreamsForVtuber, getStreamCountForVtuber } from "@/lib/streams";
import { getVodsForVtuber } from "@/lib/vods";
import { IVtuber } from "@/lib/vtubers";
@ -8,7 +8,7 @@ export interface IArchiveProgressProps {
export default async function ArchiveProgress ({ vtuber }: IArchiveProgressProps) {
// const vods = await getVodsForVtuber(vtuber.id)
// const streams = await getAllStreamsForVtuber(vtuber.id);
const streams = await getStreamCountForVtuber(vtuber.id);
// const goodStreams = await getAllStreamsForVtuber(vtuber.id, ['good']);
// const issueStreams = await getAllStreamsForVtuber(vtuber.id, ['issue']);
// const totalStreams = streams.length;
@ -21,7 +21,11 @@ export default async function ArchiveProgress ({ vtuber }: IArchiveProgressProps
const eligibleStreams = 50
return (
<div>
<p>@todo</p>
<pre>
<code>
{JSON.stringify(streams, null, 2)}
</code>
</pre>
<p className="heading">{eligibleStreams}/{totalStreams} Streams Archived ({completedPercentage}%)</p>
<progress className="progress is-success" value={eligibleStreams} max={totalStreams}>{completedPercentage}%</progress>
</div>

View File

@ -169,7 +169,8 @@ export default function StreamPage({ stream }: IStreamProps) {
<span className="title is-1"><FontAwesomeIcon icon={icon}></FontAwesomeIcon></span>
<p className="mt-3">{desc1}</p>
<p className="mt-5">{desc2}<br />
<Link href={`/upload?cuid=${stream.attributes.cuid}`}>Upload it here.</Link></p>
{/* <Link href={`/upload?cuid=${stream.attributes.cuid}`}>Upload it here.</Link></p> */}
<Link style={{ cursor: 'not-allowed' }} href={`/upload?cuid=${stream.attributes.cuid}`}><i>Uploads coming soon.</i></Link></p>
</div>
</article>
</div>

View File

@ -65,6 +65,7 @@ export default function StreamsTable() {
src={image}
alt={displayName}
placeholder="blur"
objectFit='contain'
blurDataURL={imageBlur}
width={32}
height={32}

View File

@ -24,6 +24,7 @@ export default async function VTuberCard(vtuber: IVtuber) {
className="is-rounded"
src={image}
alt={displayName}
objectFit="cover"
placeholder="blur"
blurDataURL={imageBlur}
width={48}

View File

@ -351,7 +351,25 @@ export async function fetchStreamData({ pageIndex, pageSize }: { pageIndex: numb
return d;
}
export async function getStreamCountForVtuber(vtuberId: number): Promise<IStreamsResponse> {
if (!vtuberId) throw new Error(`getStreamCountForVtuber requires a vtuberId, but it was undefined.`);
const query = qs.stringify(
{
filters: {
vtuber: {
id: {
$eq: vtuberId
}
}
}
}
)
const res = await fetch(`${strapiUrl}/api/streams?${query}`, fetchStreamsOptions)
const data = await res.json()
console.log('getStreamCountForVtuber')
console.log(JSON.stringify(data, null, 2))
return data.meta.pagination.total
}
export async function getStreamsForVtuber(vtuberId: number, page: number = 1, pageSize: number = 25, sortDesc = true): Promise<IStreamsResponse> {
console.log(`getStreamsForVtuber() with strapiUrl=${strapiUrl}`)

View File

@ -26,3 +26,21 @@ export interface IMuxAssetResponse {
export interface IMeta {
pagination: IPagination;
}
export interface IPlatformNotification {
id: number;
attributes: {
source: string;
platform: string;
date: string;
date2: string;
vtuber: number;
}
}
export interface IPlatformNotificationResponse {
data: IPlatformNotification;
meta: IMeta;
}

View File

@ -2,9 +2,7 @@
import { IVod } from './vods'
import { strapiUrl, siteUrl } from './constants';
import { getSafeDate } from './dates';
import qs from 'qs';
import { resourceLimits } from 'worker_threads';
import { IMeta } from './types';
@ -43,6 +41,9 @@ export interface IVtuber {
image: string;
imageBlur?: string;
themeColor: string;
fanslyId?: string;
chaturbateId?: string;
twitterId?: string;
}
}

View File

@ -64,6 +64,7 @@ export default async function Page({ params }: { params: { slug: string } }) {
alt={vtuber.attributes.displayName}
src={vtuber.attributes.image}
fill={true}
objectFit='cover'
placeholder='blur'
blurDataURL={vtuber.attributes.imageBlur}
/>

View File

@ -14,6 +14,12 @@ const nextConfig = {
port: '',
pathname: '/**',
},
{
protocol: 'https',
hostname: 'fp-dev.b-cdn.net',
port: '',
pathname: '/**',
},
],
}
};

View File

@ -2,12 +2,10 @@ import cheerio from 'cheerio'
/**
*
* @param {Object} limiter An instance of node-rate-limiter, see https://github.com/jhurliman/node-rate-limiter
* @param {String} roomUrl example: https://chaturbate.com/projektmelody
* @returns {Object} initialRoomDossier
*/
export async function getInitialRoomDossier(limiter, roomUrl) {
await limiter.removeTokens(1);
export async function getInitialRoomDossier(roomUrl) {
try {
const res = await fetch(roomUrl, {
headers: {

View File

@ -1,13 +1,11 @@
import { describe } from 'mocha'
import { expect } from 'chai';
import { getInitialRoomDossier } from './cb.js'
import { RateLimiter } from "limiter";
describe('cb', function () {
let limiter = new RateLimiter({ tokensPerInterval: 10, interval: "minute" })
describe('getInitialRoomDossier', function () {
it('should return json', async function () {
const dossier = await getInitialRoomDossier(limiter, 'https://chaturbate.com/projektmelody')
const dossier = await getInitialRoomDossier('https://chaturbate.com/projektmelody')
expect(dossier).to.have.property('wschat_host')
})
})

View File

@ -12,12 +12,11 @@ const normalize = (url) => {
const fromUsername = (username) => `https://fansly.com/${username}`
const image = async function image (limiter, fanslyUserId) {
if (!limiter) throw new Error(`first arg passed to fansly.data.image must be a node-rate-limiter instance`);
if (!fanslyUserId) throw new Error(`second arg passed to fansly.data.image must be a {string} fanslyUserId`);
const image = async function image (fanslyUserId) {
if (!fanslyUserId) throw new Error(`first arg passed to fansly.data.image must be a {string} fanslyUserId`);
const url = `https://api.fansly.com/api/v1/account/${fanslyUserId}/avatar`
const filePath = getTmpFile('avatar.jpg')
return download({ filePath, limiter, url })
return download({ filePath, url })
}
const url = {

View File

@ -4,10 +4,6 @@ import EventEmitter from 'node:events';
import 'dotenv/config';
import { simpleParser } from 'mailparser';
// pinned to v2.0.1 due to https://github.com/jhurliman/node-rate-limiter/issues/80
import * as $limiter from 'limiter';
const { RateLimiter } = $limiter
if (!process.env.SCOUT_IMAP_SERVER) throw new Error('SCOUT_IMAP_SERVER is missing from env');
@ -15,8 +11,6 @@ if (!process.env.SCOUT_IMAP_PORT) throw new Error('SCOUT_IMAP_PORT is missing fr
if (!process.env.SCOUT_IMAP_USERNAME) throw new Error('SCOUT_IMAP_USERNAME is missing from env');
if (!process.env.SCOUT_IMAP_PASSWORD) throw new Error('SCOUT_IMAP_PASSWORD is missing from env');
const limiter = new RateLimiter({ tokensPerInterval: 1, interval: 3000 });
// https://stackoverflow.com/a/49428486/1004931
function streamToString(stream) {
const chunks = [];
@ -37,7 +31,6 @@ export class Email extends EventEmitter {
}
async archiveMessage(uid) {
await limiter.removeTokens(1);
await this.client.messageDelete(uid, { uid: true })
}

View File

@ -25,29 +25,26 @@ async function handleMessage({ email, msg }: { email: Email, msg: FetchMessageOb
// console.log(' ✏️ checking e-mail')
const { isMatch, url, platform, channel, displayName, date, userId, avatar }: NotificationData = (await checkEmail(body) )
if (isMatch) {
const wfId = `process-email-${createId()}`
// console.log(` ✏️ [DRY] starting Temporal workflow ${wfId} @todo actually start temporal workflow`)
// await signalRealtime({ url, platform, channel, displayName, date, userId, avatar })
// @todo invoke a Temporal workflow here
console.log(' ✏️✏️ starting Temporal workflow')
const handle = await client.workflow.start(processEmail, {
workflowId: wfId,
taskQueue: 'scout',
args: [{ url, platform, channel, displayName, date, userId, avatar }]
});
// const handle = client.getHandle(workflowId);
const result = await handle.result();
const result = await handle.result()
console.log(`result of the workflow is as follows`)
console.log(result)
throw new Error('!todo we are stopping after just one (for now) @todo')
// console.log(' ✏️✏️ creating stream entry in db')
// await createStreamInDb({ source: 'email', platform, channel, date, url, userId, avatar })
}
// console.log(' ✏️ archiving e-mail')
// await email.archiveMessage(msg.uid)
console.log(' ✏️ archiving e-mail')
await email.archiveMessage(msg.uid)
} catch (e) {
// console.error('error encoutered')
console.error(`An error was encountered while handling the following e-mail message.\n${JSON.stringify(msg, null, 2)}\nError as follows.`)
@ -58,6 +55,6 @@ async function handleMessage({ email, msg }: { email: Email, msg: FetchMessageOb
(async () => {
const email = new Email()
email.once('message', (msg: FetchMessageObject) => handleMessage({ email, msg }))
email.on('message', (msg: FetchMessageObject) => handleMessage({ email, msg }))
await email.connect()
})()

View File

@ -11,6 +11,8 @@ import fs from 'node:fs'
if (!process.env.S3_BUCKET_NAME) throw new Error('S3_BUCKET_NAME was undefined in env');
if (!process.env.SCOUT_NITTER_URL) throw new Error('SCOUT_NITTER_URL was undefined in env');
if (!process.env.S3_BUCKET_KEY_ID) throw new Error('S3_BUCKET_KEY_ID was undefined in env');
if (!process.env.S3_BUCKET_APPLICATION_KEY) throw new Error('S3_BUCKET_APPLICATION_KEY was undefined in env');

View File

@ -4,7 +4,6 @@ import qs from 'qs'
import { subMinutes, addMinutes } from 'date-fns'
import { fpSlugify, download } from './utils.js'
import { getProminentColor } from './image.js'
import { RateLimiter } from "limiter"
import { getImage } from './vtuber.js'
import fansly from './fansly.js'
@ -36,7 +35,6 @@ if (!process.env.CDN_BUCKET_URL) throw new Error('CDN_BUCKET_URL is undefined in
*/
export async function createStreamInDb ({ source, platform, channel, date, url, userId }) {
// const limiter = new RateLimiter({ tokensPerInterval: 0.3, interval: "second" });
let vtuberId, streamId
console.log('>> # Step 1')
@ -117,7 +115,7 @@ export async function createStreamInDb ({ source, platform, channel, date, url,
const b2FileData = await s3.uploadFile(imageFile)
// get b2 cdn link to image
const imageCdnLink = `https://${process.env.CDN_BUCKET_URL}/${b2FileData.Key}`
const imageCdnLink = `${process.env.CDN_BUCKET_URL}/${b2FileData.Key}`
const createVtuberRes = await fetch(`${process.env.STRAPI_URL}/api/vtubers`, {

View File

@ -6,12 +6,12 @@
import fetch from "node-fetch"
import { NotificationData, processEmail } from "./workflows.js"
import qs from 'qs'
import { IVtuberResponse } from 'next'
import { IPlatformNotificationResponse, IVtuberResponse, IStreamResponse } from 'next'
import { getImage } from '../vtuber.js'
import { fpSlugify, download } from '../utils.js'
import fansly from '../fansly.js'
import { fpSlugify } from '../utils.js'
import { getProminentColor } from '../image.js'
import { uploadFile } from '../s3.js'
import { addMinutes, subMinutes } from 'date-fns'
export type ChargeResult = {
status: string;
@ -99,11 +99,10 @@ export async function upsertVtuber({ platform, userId, url, channel }: Notificat
const dummyVtuber = {
attributes: {
slug: fpSlugify(channel),
fansly: fansly.url.fromUsername(channel)
fanslyId: (platform === 'fansly') ? userId : null
}
}
const platformImageUrl = await getImage(dummyVtuber)
const imageFile = await download({ url: platformImageUrl })
const imageFile = await getImage(dummyVtuber)
// get themeColor from image
const themeColor = await getProminentColor(imageFile)
@ -112,7 +111,7 @@ export async function upsertVtuber({ platform, userId, url, channel }: Notificat
const b2FileData = await uploadFile(imageFile)
// get b2 cdn link to image
const imageCdnLink = `https://${process.env.CDN_BUCKET_URL}/${b2FileData.Key}`
const imageCdnLink = `${process.env.CDN_BUCKET_URL}/${b2FileData.Key}`
const createVtuberRes = await fetch(`${process.env.STRAPI_URL}/api/vtubers`, {
@ -142,13 +141,187 @@ export async function upsertVtuber({ platform, userId, url, channel }: Notificat
console.log(`>>> vtuber created with id=${vtuberId}`)
}
}
return 777
return vtuberId
}
export async function upsertPlatformNotification(): Promise<number> {
return 777
export async function upsertPlatformNotification({ source, date, platform, vtuberId }: { source: string, date: string, platform: string, vtuberId: number }): Promise<number> {
if (!source) throw new Error(`upsertPlatformNotification requires source arg, but it was undefined`);
if (!date) throw new Error(`upsertPlatformNotification requires date arg, but it was undefined`);
if (!platform) throw new Error(`upsertPlatformNotification requires platform arg, but it was undefined`);
if (!vtuberId) throw new Error(`upsertPlatformNotification requires vtuberId arg, but it was undefined`);
let pNotifId
// # Step 2.
// Next we create the platform-notification record.
// This probably doesn't already exist, so we don't check for a pre-existing platform-notification.
const pNotifPayload = {
data: {
source: source,
date: date,
date2: date,
platform: platform,
vtuber: vtuberId,
}
export async function upsertStream(): Promise<number> {
return 777
}
console.log('pNotifPayload as follows')
console.log(pNotifPayload)
const pNotifCreateRes = await fetch(`${process.env.STRAPI_URL}/api/platform-notifications`, {
method: 'POST',
headers: {
'authorization': `Bearer ${process.env.SCOUT_STRAPI_API_KEY}`,
'Content-Type': 'application/json'
},
body: JSON.stringify(pNotifPayload)
})
const pNotifData = await pNotifCreateRes.json() as IPlatformNotificationResponse
if (pNotifData.error) {
console.error('>> we failed to create platform-notification, there was an error in the response')
console.error(JSON.stringify(pNotifData.error, null, 2))
throw new Error(pNotifData.error)
}
console.log(`>> pNotifData (json response) is as follows`)
console.log(pNotifData)
if (!pNotifData.data?.id) throw new Error('failed to created pNotifData! The response was missing an id');
pNotifId = pNotifData.data.id
if (!pNotifId) throw new Error('failed to get Platform Notification ID');
return pNotifId
}
export async function upsertStream({
date,
vtuberId,
platform,
pNotifId
}: {
date: string,
vtuberId: number,
platform: string,
pNotifId: number
}): Promise<number> {
if (!date) throw new Error(`upsertStream requires date in the arg object, but it was undefined`);
if (!vtuberId) throw new Error(`upsertStream requires vtuberId in the arg object, but it was undefined`);
if (!platform) throw new Error(`upsertStream requires platform in the arg object, but it was undefined`);
if (!pNotifId) throw new Error(`upsertStream requires pNotifId in the arg object, but it was undefined`);
let streamId
// # Step 3.
// Finally we find or create the stream record
// The stream may already be in the db (the streamer is multi-platform streaming), so we look for that record.
// This gets a bit tricky. How do we determine one stream from another?
// For now, the rule is 30 minutes of separation.
// Anything <=30m is interpreted as the same stream. Anything >30m is interpreted as a different stream.
// If the stream is not in the db, we create the stream record
const dateSinceRange = subMinutes(new Date(date), 30)
const dateUntilRange = addMinutes(new Date(date), 30)
console.log(`Find a stream within + or - 30 mins of the notif date=${new Date(date).toISOString()}. dateSinceRange=${dateSinceRange.toISOString()}, dateUntilRange=${dateUntilRange.toISOString()}`)
const findStreamQueryString = qs.stringify({
populate: 'platform-notifications',
filters: {
date: {
$gte: dateSinceRange,
$lte: dateUntilRange
},
vtuber: {
id: {
'$eq': vtuberId
}
}
}
}, { encode: false })
console.log('>> findStream')
const findStreamRes = await fetch(`${process.env.STRAPI_URL}/api/streams?${findStreamQueryString}`, {
method: 'GET',
headers: {
'authorization': `Bearer ${process.env.SCOUT_STRAPI_API_KEY}`,
'Content-Type': 'application/json'
}
})
const findStreamData = await findStreamRes.json() as IStreamResponse
if (findStreamData?.data && findStreamData.data.length > 0) {
console.log('>> we found a findStreamData json. (there is an existing stream for this e-mail/notification)')
console.log(JSON.stringify(findStreamData, null, 2))
streamId = findStreamData.data[0].id
// Before we're done here, we need to do something extra. We need to populate isChaturbateStream and/or isFanslyStream.
// We know which of these booleans to set based on the stream's related platformNotifications
// We go through each pNotif and look at it's platform
let isFanslyStream = false
let isChaturbateStream = false
if (findStreamData.data[0].attributes.platformNotifications) {
for (const pn of findStreamData.data[0].attributes.platformNotifications) {
if (pn.platform === 'fansly') {
isFanslyStream = true
} else if (pn.platform === 'chaturbate') {
isChaturbateStream = true
}
}
}
console.log(`>>> updating stream ${streamId}. isFanslyStream=${isFanslyStream}, isChaturbateStream=${isChaturbateStream}`)
const updateStreamRes = await fetch(`${process.env.STRAPI_URL}/api/streams/${streamId}`, {
method: 'PUT',
headers: {
'authorization': `Bearer ${process.env.SCOUT_STRAPI_API_KEY}`,
'content-type': 'application/json'
},
body: JSON.stringify({
data: {
isFanslyStream: isFanslyStream,
isChaturbateStream: isChaturbateStream,
platformNotifications: [
pNotifId
]
}
})
})
const updateStreamJson = await updateStreamRes.json() as IStreamResponse
if (updateStreamJson?.error) throw new Error(JSON.stringify(updateStreamJson, null, 2));
console.log(`>> assuming a successful update to the stream record. response as follows.`)
console.log(JSON.stringify(updateStreamJson, null, 2))
}
if (!streamId) {
console.log('>> did not find a streamId, so we go ahead and create a stream record in the db.')
const createStreamPayload = {
data: {
isFanslyStream: (platform === 'fansly') ? true : false,
isChaturbateStream: (platform === 'chaturbate') ? true : false,
archiveStatus: 'missing',
date: date,
date2: date,
date_str: date,
vtuber: vtuberId,
platformNotifications: [
pNotifId
]
}
}
console.log('>> createStreamPayload as follows')
console.log(createStreamPayload)
const createStreamRes = await fetch(`${process.env.STRAPI_URL}/api/streams`, {
method: 'POST',
headers: {
'authorization': `Bearer ${process.env.SCOUT_STRAPI_API_KEY}`,
"Content-Type": "application/json"
},
body: JSON.stringify(createStreamPayload)
})
const createStreamJson = await createStreamRes.json() as IStreamResponse
console.log('>> we got the createStreamJson')
console.log(createStreamJson)
if (createStreamJson.error) {
console.error(JSON.stringify(createStreamJson.error, null, 2))
throw new Error('Failed to create stream in DB due to an error. (see above)')
}
streamId = createStreamJson.id
}
if (!streamId) throw new Error('failed to get streamId')
return streamId
}

View File

@ -1,6 +1,7 @@
import path from "path"
import { NativeConnection, Worker } from "@temporalio/worker"
import * as activities from "./activities.js"
import pRetry from 'p-retry'
if (!process.env.TEMPORAL_SERVICE_ADDRESS) throw new Error(`TEMPORAL_SERVICE_ADDRESS is missing in env`);
@ -49,7 +50,11 @@ async function run() {
await worker.run();
}
run().catch((err) => {
console.error(err);
process.exit(1);
await pRetry(run, {
forever: true,
onFailedAttempt: (e) => {
console.error(e);
console.error(`there was an error during scout-worker run(). run() will now restart.`)
console.log(`P.S., check out these booba --> (.)(.)`)
}
})

View File

@ -15,9 +15,6 @@ export type NotificationData = {
};
const {
chargeUser,
checkAndDecrementInventory,
incrementInventory,
upsertPlatformNotification,
upsertStream,
upsertVtuber,
@ -39,10 +36,9 @@ export async function processEmail({
// Step 1
const vtuberId = await upsertVtuber({ url, platform, channel, displayName, date, userId, avatar })
console.log('we have finished upsertVtuber and the vtuberId is '+vtuberId)
throw new Error('Error: Error: error: erorreorororr; @todo');
const pNotifId = await upsertPlatformNotification()
const streamId = await upsertStream()
const pNotifId = await upsertPlatformNotification({ vtuberId, source: 'email', date, platform })
const streamId = await upsertStream({ date, vtuberId, platform, pNotifId })
return { vtuberId, pNotifId, streamId }
}

View File

@ -19,9 +19,8 @@ const normalize = (url) => {
}
const image = async function image (limiter, twitterUsername) {
if (!limiter) throw new Error('first arg to twitter.data.image must be an instance of node-rate-limiter');
if (!twitterUsername) throw new Error('second arg to twitter.data.image must be a twitterUsername. It was undefined.');
const image = async function image (twitterUsername) {
if (!twitterUsername) throw new Error('first arg to twitter.data.image must be a twitterUsername. It was undefined.');
const requestDataFromNitter = async () => {
const url = `${process.env.SCOUT_NITTER_URL}/${twitterUsername}/rss?key=${process.env.SCOUT_NITTER_ACCESS_KEY}`
// console.log(`fetching from url=${url}`)
@ -37,7 +36,7 @@ const image = async function image (limiter, twitterUsername) {
const dom = htmlparser2.parseDocument(body);
const $ = load(dom, { _useHtmlParser2: true })
const urls = $('url:contains("profile_images")').first()
const downloadedImageFile = await download({ limiter, url: urls.text() })
const downloadedImageFile = await download({ url: urls.text() })
return downloadedImageFile
} catch (e) {
console.error(`while fetching rss from nitter, the following error was encountered.`)

View File

@ -2,7 +2,6 @@ import { expect } from 'chai'
import twitter from './twitter.js'
import { describe } from 'mocha'
import { tmpFileRegex } from './utils.js'
import { RateLimiter } from 'limiter'
describe('twitter', function () {
describe('regex', function () {
@ -19,10 +18,9 @@ describe('twitter', function () {
})
describe('data', function () {
this.timeout(1000*30)
const limiter = new RateLimiter({ tokensPerInterval: 10, interval: "second" })
describe('image', function () {
it("should download the twitter users's avatar and save it to disk", async function () {
const imgFile = await twitter.data.image(limiter, 'projektmelody')
const imgFile = await twitter.data.image('projektmelody')
expect(imgFile).to.match(tmpFileRegex)
})
})

View File

@ -24,19 +24,16 @@ export function getTmpFile(str) {
/**
*
* @param {Object} limiter [https://github.com/jhurliman/node-rate-limiter](node-rate-limiter) instance
* @param {String} url
* @returns {String} filePath
*
* greetz https://stackoverflow.com/a/74722818/1004931
*/
export async function download({ limiter, url, filePath }) {
if (!limiter) throw new Error(`first arg passed to download() must be a node-rate-limiter instance.`);
export async function download({ url, filePath }) {
if (!url) throw new Error(`second arg passed to download() must be a {string} url`);
const fileBaseName = basename(url)
filePath = filePath || path.join(os.tmpdir(), `${createId()}_${fileBaseName}`)
const stream = fs.createWriteStream(filePath)
await limiter.removeTokens(1);
const requestData = async () => {
const response = await fetch(url, {

View File

@ -1,7 +1,6 @@
import { fpSlugify, getTmpFile, download } from './utils.js'
import { expect } from 'chai'
import { describe } from 'mocha'
import { RateLimiter } from "limiter"
describe('utils', function () {
@ -18,9 +17,8 @@ describe('utils', function () {
})
}),
describe('download', function () {
const limiter = new RateLimiter({ tokensPerInterval: 100, interval: "second" })
it('should get the file', async function () {
const file = await download({ limiter, url: 'https://futureporn-b2.b-cdn.net/sample.webp' })
const file = await download({ url: 'https://futureporn-b2.b-cdn.net/sample.webp' })
expect(file).to.match(/\/tmp\/.*sample\.webp$/)
})
})

View File

@ -20,25 +20,22 @@ import fansly from './fansly.js'
*
* We depend on one of these social media URLs. If there is neither Twitter or fansly listed, we throw an error.
*
* @param {Object} limiter -- instance of node-rate-limiter
* @param {Object} vtuber -- vtuber instance from strapi
* @returns {String} filePath -- path on disk where the image was saved
*/
export async function getImage(limiter, vtuber) {
if (!limiter) throw new Error('first arg must be node-rate-limiter instace');
if (!vtuber) throw new Error('second arg must be vtuber instance');
await limiter.removeTokens(1);
export async function getImage(vtuber) {
if (!vtuber) throw new Error('first arg must be vtuber instance');
const { twitter: twitterUrl, fanslyId: fanslyId } = vtuber.attributes
const twitterUsername = twitterUrl && twitter.regex.username.exec(twitterUrl).at(1)
let img;
if (twitterUrl) {
img = await twitter.data.image(limiter, twitterUsername)
img = await twitter.data.image(twitterUsername)
} else if (fanslyId) {
img = await fansly.data.image(limiter, fanslyId)
img = await fansly.data.image(fanslyId)
} else {
const msg = 'while attempting to get vtuber image, there was neither twitter nor fansly listed. One of these must exist for us to download an image.'
const msg = ` while attempting to get vtuber image, there was neither twitterUrl nor fanslyId listed. One of these must exist for us to download an image. \nvtuber=${JSON.stringify(vtuber, null, 2)}`
console.error(msg)
throw new Error(msg)
}

View File

@ -1,7 +1,6 @@
import { expect } from 'chai'
import { describe } from 'mocha'
import { RateLimiter } from 'limiter'
import { getImage } from './vtuber.js'
import { tmpFileRegex } from './utils.js'
@ -25,13 +24,12 @@ const vtuberFixture1 = {
describe('vtuber', function () {
this.timeout(1000*60)
describe('getImage', function () {
const limiter = new RateLimiter({ tokensPerInterval: 1, interval: "second" })
it('should download an avatar image from twitter', async function () {
const file = await getImage(limiter, vtuberFixture0)
const file = await getImage(vtuberFixture0)
expect(file).to.match(tmpFileRegex)
})
it('should download an avatar image from fansly', async function () {
const file = await getImage(limiter, vtuberFixture1)
const file = await getImage(vtuberFixture1)
expect(file).to.match(tmpFileRegex)
})
})

View File

@ -5,3 +5,9 @@
* ironmouse "Thank you" (for testing): 4760169
* cj_clippy "Full library access" (for production): 9380584
* cj_clippy "Your URL displayed on Futureporn.net": 10663202
### Content-Type Builder (Docker caveat)
Don't use the web UI to create or update Content-Types! The changes will be lost. This is a side-effect of our hacked together solution for Strapi with pnpm in docker.
Instead, content-type schemas must be hand-edited in ./src/api/(...). For the changes to take effect, trigger a strapi resource update in Tilt.

View File

@ -45,6 +45,12 @@
"type": "relation",
"relation": "oneToOne",
"target": "api::vtuber.vtuber"
},
"stream": {
"type": "relation",
"relation": "manyToOne",
"target": "api::stream.stream",
"inversedBy": "platformNotifications"
}
}
}

View File

@ -22,11 +22,6 @@ module.exports = {
async afterUpdate(event) {
/**
* NOTE
*
* These hooks do not fire in response to API calls. They only fire in response to UI saves.
*/
console.log(`>>>>>>>>>>>>>> STREAM is afterUpdate !!!!!!!!!!!!`);
const { data, where, select, populate } = event.params;
@ -93,14 +88,16 @@ module.exports = {
console.log(`lets find the platformNotifications`)
console.log(JSON.stringify(existingData2, null, 2))
// Iterate through all vods to determine archiveStatus
for (const pn of existingData2.platform_notifications) {
// Iterate through all platformNotifications to determine platform
if (existingData2.platformNotifications) {
for (const pn of existingData2.platformNotifications) {
if (pn.platform === 'fansly') {
isFanslyStream = true
} else if (pn.platform === 'chaturbate') {
isChaturbateStream = true
}
}
}
// we can't use query engine here, because that would trigger an infinite loop
// where this

View File

@ -12,6 +12,12 @@
},
"pluginOptions": {},
"attributes": {
"platformNotifications": {
"type": "relation",
"relation": "oneToMany",
"target": "api::platform-notification.platform-notification",
"mappedBy": "stream"
},
"date_str": {
"type": "string",
"required": true,

View File

@ -79,7 +79,7 @@
},
"description1": {
"type": "text",
"required": true
"required": false
},
"description2": {
"type": "text"
@ -113,6 +113,15 @@
"relation": "oneToMany",
"target": "api::stream.stream",
"mappedBy": "vtuber"
},
"fanslyId": {
"type": "string"
},
"chaturbateId": {
"type": "string"
},
"twitterId": {
"type": "string"
}
}
}

View File

@ -39,7 +39,9 @@ kubectl --namespace futureporn create secret generic scout \
--from-literal=imapUsername=${SCOUT_IMAP_USERNAME} \
--from-literal=imapPassword=${SCOUT_IMAP_PASSWORD} \
--from-literal=imapAccessToken=${SCOUT_IMAP_ACCESS_TOKEN} \
--from-literal=nitterAccessKey=${SCOUT_NITTER_ACCESS_KEY}
--from-literal=nitterAccessKey=${SCOUT_NITTER_ACCESS_KEY} \
--from-literal=s3BucketKeyId=${S3_BUCKET_KEY_ID} \
--from-literal=s3BucketApplicationKey=${S3_BUCKET_APPLICATION_KEY}
kubectl --namespace futureporn delete secret link2cid --ignore-not-found
kubectl --namespace futureporn create secret generic link2cid \