temporarily simplify scout for testing
ci / build (push) Failing after 2m4s Details

This commit is contained in:
CJ_Clippy 2024-07-15 22:57:27 -08:00
parent 5583580afc
commit 4b060bf0f9
13 changed files with 159 additions and 86 deletions

View File

@ -2,29 +2,43 @@ FROM node:20 as base
ENV PNPM_HOME="/pnpm" ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH" ENV PATH="$PNPM_HOME:$PATH"
WORKDIR /app WORKDIR /app
RUN corepack enable RUN corepack enable && corepack prepare pnpm@9.2.0 --activate
FROM base as build FROM base as build
WORKDIR /usr/src/app WORKDIR /app
RUN mkdir -p /usr/src/app/packages/scout && mkdir /usr/src/app/packages/taco && mkdir -p /prod/scout RUN mkdir -p /app/packages/scout && mkdir /app/packages/taco && mkdir -p /prod/scout
COPY package.json pnpm-lock.yaml .
COPY ./packages/scout/pnpm-lock.yaml ./packages/scout/ ## Copy manfiests, lockfiles, and configs into docker context
COPY ./packages/taco/pnpm-lock.yaml ./packages/taco/ COPY package.json pnpm-lock.yaml .npmrc .
COPY ./packages/types/pnpm-lock.yaml ./packages/types/ COPY ./packages/scout/pnpm-lock.yaml ./packages/scout/package.json ./packages/scout/
COPY ./packages/temporal-workflows/pnpm-lock.yaml ./packages/temporal-workflows/ COPY ./packages/utils/pnpm-lock.yaml ./packages/utils/package.json ./packages/utils/
COPY ./packages/temporal-worker/pnpm-lock.yaml ./packages/temporal-worker/ COPY ./packages/image/pnpm-lock.yaml ./packages/image/package.json ./packages/image/
RUN pnpm fetch COPY ./packages/storage/pnpm-lock.yaml ./packages/storage/package.json ./packages/storage/
COPY ./packages/taco/pnpm-lock.yaml ./packages/taco/package.json ./packages/taco/
COPY ./packages/types/pnpm-lock.yaml ./packages/types/package.json ./packages/types/
COPY ./packages/temporal-workflows/pnpm-lock.yaml ./packages/temporal-workflows/package.json ./packages/temporal-workflows/
COPY ./packages/temporal-worker/pnpm-lock.yaml ./packages/temporal-worker/package.json ./packages/temporal-worker/
## Install npm packages
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --recursive --frozen-lockfile RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --recursive --frozen-lockfile
## Copy package code into docker context
COPY ./packages/utils/ ./packages/utils/
COPY ./packages/image/ ./packages/image/
COPY ./packages/storage/ ./packages/storage/
COPY ./packages/scout/ ./packages/scout/ COPY ./packages/scout/ ./packages/scout/
COPY ./packages/taco/ ./packages/taco/ COPY ./packages/taco/ ./packages/taco/
COPY ./packages/types/ ./packages/types/ COPY ./packages/types/ ./packages/types/
COPY ./packages/temporal-workflows/ ./packages/temporal-workflows/ COPY ./packages/temporal-workflows/ ./packages/temporal-workflows/
COPY ./packages/temporal-worker/ ./packages/temporal-worker/ COPY ./packages/temporal-worker/ ./packages/temporal-worker/
## Build (transpile TS into JS)
RUN pnpm -r build RUN pnpm -r build
## Deploy (copy all production code into one place)
RUN pnpm deploy --filter=scout --prod /prod/scout RUN pnpm deploy --filter=scout --prod /prod/scout
FROM base as scout FROM base as scout
COPY --from=build /prod/scout . COPY --from=build /prod/scout .
RUN ls -la .
ENTRYPOINT ["pnpm", "start"] ENTRYPOINT ["pnpm", "start"]

View File

@ -9,10 +9,9 @@
}, },
"scripts": { "scripts": {
"test": "mocha --require ts-node/register src/**/*.spec.ts", "test": "mocha --require ts-node/register src/**/*.spec.ts",
"build:worker": "tsc --build ./tsconfig.json", "build": "tsc --build",
"start": "echo please use either start:manager or start:worker", "dev": "nodemon --ext js,ts,json,yaml --exec \"node --loader ts-node/esm --disable-warning=ExperimentalWarning ./src/index.ts\"",
"start:manager": "node --loader ts-node/esm ./src/index.ts", "start": "node /app/dist/index.js"
"start:worker": "node --loader ts-node/esm ./src/temporal/worker.ts"
}, },
"keywords": [], "keywords": [],
"author": "@CJ_Clippy", "author": "@CJ_Clippy",
@ -21,6 +20,8 @@
"@aws-sdk/client-s3": "^3.583.0", "@aws-sdk/client-s3": "^3.583.0",
"@aws-sdk/lib-storage": "^3.588.0", "@aws-sdk/lib-storage": "^3.588.0",
"@aws-sdk/s3-request-presigner": "^3.588.0", "@aws-sdk/s3-request-presigner": "^3.588.0",
"@futureporn/temporal-workflows": "workspace:^",
"@futureporn/types": "workspace:^",
"@paralleldrive/cuid2": "^2.2.2", "@paralleldrive/cuid2": "^2.2.2",
"@temporalio/client": "^1.9.0", "@temporalio/client": "^1.9.0",
"@temporalio/worker": "^1.9.0", "@temporalio/worker": "^1.9.0",
@ -38,23 +39,24 @@
"limiter": "2.0.1", "limiter": "2.0.1",
"mailparser": "^3.7.1", "mailparser": "^3.7.1",
"node-fetch": "^3.3.0", "node-fetch": "^3.3.0",
"nodemon": "^3.1.3",
"p-retry": "^5.1.2", "p-retry": "^5.1.2",
"prevvy": "^7.0.1", "prevvy": "^7.0.1",
"qs": "^6.12.1", "qs": "^6.12.1",
"sharp": "^0.33.4", "sharp": "^0.33.4",
"slugify": "^1.6.6", "slugify": "^1.6.6",
"tsx": "^4.7.2", "tsx": "^4.7.2",
"@futureporn/types": "workspace:^",
"xpath": "^0.0.34" "xpath": "^0.0.34"
}, },
"packageManager": "pnpm@9.2.0", "packageManager": "pnpm@9.2.0",
"devDependencies": { "devDependencies": {
"@types/chai": "^4.3.16", "@types/chai": "^4.3.16",
"@types/cheerio": "^0.22.35",
"@types/mailparser": "^3.4.4",
"@types/mocha": "^10.0.7", "@types/mocha": "^10.0.7",
"chai": "^5.1.0", "chai": "^5.1.0",
"mocha": "^10.4.0", "mocha": "^10.4.0",
"nodemon": "^3.1.4",
"ts-node": "^10.9.2", "ts-node": "^10.9.2",
"typescript": "^5.4.5" "typescript": "^5.5.3"
} }
} }

View File

@ -17,6 +17,9 @@ importers:
'@aws-sdk/s3-request-presigner': '@aws-sdk/s3-request-presigner':
specifier: ^3.588.0 specifier: ^3.588.0
version: 3.614.0 version: 3.614.0
'@futureporn/temporal-workflows':
specifier: workspace:^
version: link:../temporal-workflows
'@futureporn/types': '@futureporn/types':
specifier: workspace:^ specifier: workspace:^
version: link:../types version: link:../types
@ -71,9 +74,6 @@ importers:
node-fetch: node-fetch:
specifier: ^3.3.0 specifier: ^3.3.0
version: 3.3.2 version: 3.3.2
nodemon:
specifier: ^3.1.3
version: 3.1.4
p-retry: p-retry:
specifier: ^5.1.2 specifier: ^5.1.2
version: 5.1.2 version: 5.1.2
@ -99,6 +99,12 @@ importers:
'@types/chai': '@types/chai':
specifier: ^4.3.16 specifier: ^4.3.16
version: 4.3.16 version: 4.3.16
'@types/cheerio':
specifier: ^0.22.35
version: 0.22.35
'@types/mailparser':
specifier: ^3.4.4
version: 3.4.4
'@types/mocha': '@types/mocha':
specifier: ^10.0.7 specifier: ^10.0.7
version: 10.0.7 version: 10.0.7
@ -108,11 +114,14 @@ importers:
mocha: mocha:
specifier: ^10.4.0 specifier: ^10.4.0
version: 10.6.0 version: 10.6.0
nodemon:
specifier: ^3.1.4
version: 3.1.4
ts-node: ts-node:
specifier: ^10.9.2 specifier: ^10.9.2
version: 10.9.2(@swc/core@1.6.13)(@types/node@20.14.10)(typescript@5.5.3) version: 10.9.2(@swc/core@1.6.13)(@types/node@20.14.10)(typescript@5.5.3)
typescript: typescript:
specifier: ^5.4.5 specifier: ^5.5.3
version: 5.5.3 version: 5.5.3
packages: packages:
@ -972,6 +981,9 @@ packages:
'@types/chai@4.3.16': '@types/chai@4.3.16':
resolution: {integrity: sha512-PatH4iOdyh3MyWtmHVFXLWCCIhUbopaltqddG9BzB+gMIzee2MJrvd+jouii9Z3wzQJruGWAm7WOMjgfG8hQlQ==} resolution: {integrity: sha512-PatH4iOdyh3MyWtmHVFXLWCCIhUbopaltqddG9BzB+gMIzee2MJrvd+jouii9Z3wzQJruGWAm7WOMjgfG8hQlQ==}
'@types/cheerio@0.22.35':
resolution: {integrity: sha512-yD57BchKRvTV+JD53UZ6PD8KWY5g5rvvMLRnZR3EQBCZXiDT/HR+pKpMzFGlWNhFrXlo7VPZXtKvIEwZkAWOIA==}
'@types/debug@4.1.12': '@types/debug@4.1.12':
resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==} resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==}
@ -996,6 +1008,9 @@ packages:
'@types/luxon@3.4.2': '@types/luxon@3.4.2':
resolution: {integrity: sha512-TifLZlFudklWlMBfhubvgqTXRzLDI5pCbGa4P8a3wPyUQSW+1xQ5eDsreP9DWHX3tjq1ke96uYG/nwundroWcA==} resolution: {integrity: sha512-TifLZlFudklWlMBfhubvgqTXRzLDI5pCbGa4P8a3wPyUQSW+1xQ5eDsreP9DWHX3tjq1ke96uYG/nwundroWcA==}
'@types/mailparser@3.4.4':
resolution: {integrity: sha512-C6Znp2QVS25JqtuPyxj38Qh+QoFcLycdxsvcc6IZCGekhaMBzbdTXzwGzhGoYb3TfKu8IRCNV0sV1o3Od97cEQ==}
'@types/mocha@10.0.7': '@types/mocha@10.0.7':
resolution: {integrity: sha512-GN8yJ1mNTcFcah/wKEFIJckJx9iJLoMSzWcfRRuxz/Jk+U6KQNnml+etbtxFK8lPjzOw3zp4Ha/kjSst9fsHYw==} resolution: {integrity: sha512-GN8yJ1mNTcFcah/wKEFIJckJx9iJLoMSzWcfRRuxz/Jk+U6KQNnml+etbtxFK8lPjzOw3zp4Ha/kjSst9fsHYw==}
@ -3536,6 +3551,10 @@ snapshots:
'@types/chai@4.3.16': {} '@types/chai@4.3.16': {}
'@types/cheerio@0.22.35':
dependencies:
'@types/node': 20.14.10
'@types/debug@4.1.12': '@types/debug@4.1.12':
dependencies: dependencies:
'@types/ms': 0.7.34 '@types/ms': 0.7.34
@ -3564,6 +3583,11 @@ snapshots:
'@types/luxon@3.4.2': {} '@types/luxon@3.4.2': {}
'@types/mailparser@3.4.4':
dependencies:
'@types/node': 20.14.10
iconv-lite: 0.6.3
'@types/mocha@10.0.7': {} '@types/mocha@10.0.7': {}
'@types/ms@0.7.34': {} '@types/ms@0.7.34': {}

View File

@ -4,57 +4,74 @@
* watches an e-mail inbox for going live notifications * watches an e-mail inbox for going live notifications
*/ */
import { checkEmail } from './parsers.js' // import { checkEmail } from './parsers.js'
// import { createStreamInDb } from './signals.js' // // import { createStreamInDb } from './signals.js'
import { Email } from './imap.js' // import { Email } from './imap.js'
import { Client, Connection } from '@temporalio/client' import { Client, Connection } from '@temporalio/client'
import { NotificationData, processEmailNotification } from 'temporal-workflows' // import { type NotificationData } from '@futureporn/types'
import { FetchMessageObject } from 'imapflow' // import { type FetchMessageObject } from 'imapflow'
import { createId } from '@paralleldrive/cuid2' import { createId } from '@paralleldrive/cuid2'
import { WorkflowA } from '@futureporn/temporal-workflows/workflows.js'
console.log('connecting to temporal...')
const connection = await Connection.connect({ address: 'temporal-frontend.futureporn.svc.cluster.local:7233' }); const connection = await Connection.connect({ address: 'temporal-frontend.futureporn.svc.cluster.local:7233' });
const client = new Client({ connection, namespace: 'futureporn' }); const client = new Client({ connection, namespace: 'futureporn' });
async function handleMessage({ email, msg }: { email: Email, msg: FetchMessageObject }) { // async function handleMessage({ email, msg }: { email: Email, msg: FetchMessageObject }) {
try { // try {
console.log(' ✏️ loading message') // console.log(' ✏️ loading message')
const body = await email.loadMessage(msg.uid) // const body = await email.loadMessage(msg.uid)
// console.log(' ✏️ checking e-mail') // console.log(' ✏️ checking e-mail')
const { isMatch, url, platform, channel, displayName, date, userId, avatar }: NotificationData = (await checkEmail(body) ) // const { isMatch, url, platform, channel, displayName, date, userId, avatar }: NotificationData = (await checkEmail(body) )
if (isMatch) { // if (isMatch) {
const wfId = `process-email-${createId()}` // const wfId = `process-email-${createId()}`
// console.log(` ✏️ [DRY] starting Temporal workflow ${wfId} @todo actually start temporal workflow`) // // console.log(` ✏️ [DRY] starting Temporal workflow ${wfId} @todo actually start temporal workflow`)
// await signalRealtime({ url, platform, channel, displayName, date, userId, avatar }) // // await signalRealtime({ url, platform, channel, displayName, date, userId, avatar })
// @todo invoke a Temporal workflow here // // @todo invoke a Temporal workflow here
console.log(' ✏️✏️ starting Temporal workflow') // console.log(' ✏️✏️ starting Temporal workflow')
const handle = await client.workflow.start(processEmail, { // // const handle = await client.workflow.start(WorkflowA, {
workflowId: wfId, // // workflowId: wfId,
taskQueue: 'scout', // // taskQueue: 'futureporn'
args: [{ url, platform, channel, displayName, date, userId, avatar }] // // });
}); // // // const handle = await client.workflow.start(processNotifEmail, {
// const handle = client.getHandle(workflowId); // // // workflowId: wfId,
const result = await handle.result() // // // taskQueue: 'futureporn',
console.log(`result of the workflow is as follows`) // // // args: [{ url, platform, channel, displayName, date, userId, avatar }]
console.log(result) // // // });
} // // // const handle = client.getHandle(workflowId);
// // const result = await handle.result()
// // console.log(`result of the workflow is as follows`)
// // console.log(result)
// }
console.log(' ✏️ archiving e-mail') // console.log(' ✏️ archiving e-mail')
await email.archiveMessage(msg.uid) // await email.archiveMessage(msg.uid)
} catch (e) { // } catch (e) {
// console.error('error encoutered') // // console.error('error encoutered')
console.error(`An error was encountered while handling the following e-mail message.\n${JSON.stringify(msg, null, 2)}\nError as follows.`) // console.error(`An error was encountered while handling the following e-mail message.\n${JSON.stringify(msg, null, 2)}\nError as follows.`)
console.error(e) // console.error(e)
} // }
} // }
(async () => { (async () => {
const email = new Email() // const email = new Email()
email.once('message', (msg: FetchMessageObject) => handleMessage({ email, msg })) // email.once('message', (msg: FetchMessageObject) => handleMessage({ email, msg }))
await email.connect() // await email.connect()
})() console.log('scout is starting')
const wfId = `process-email-${createId()}`
const handle = await client.workflow.start(WorkflowA, {
workflowId: wfId,
taskQueue: 'futureporn'
});
const result = await handle.result()
console.log(result)
})()
// console.log('init')

View File

@ -1,6 +1,7 @@
import { simpleParser } from 'mailparser'; import { simpleParser } from 'mailparser';
import { load } from 'cheerio' import { load } from 'cheerio'
import { type NotificationData } from '@futureporn/types'
const definitions = [ const definitions = [
{ {
@ -14,10 +15,10 @@ const definitions = [
{ {
platform: 'fansly', platform: 'fansly',
selectors: { selectors: {
channel: ($) => $("a[href*='/live/']").attr('href').toString().split('/').at(-1), channel: ($: any) => $("a[href*='/live/']").attr('href').toString().split('/').at(-1),
displayName: 'div[class*="message-col"] div:nth-child(5)', displayName: 'div[class*="message-col"] div:nth-child(5)',
userId: ($) => $("img[src*='/api/v1/account/']").attr('src').toString().split('/').at(-2), userId: ($: any) => $("img[src*='/api/v1/account/']").attr('src').toString().split('/').at(-2),
avatar: ($) => $("img[src*='/api/v1/account/']").attr('src').toString() avatar: ($: any) => $("img[src*='/api/v1/account/']").attr('src').toString()
}, },
from: 'no-reply@fansly.com', from: 'no-reply@fansly.com',
template: 'https://fansly.com/:channel', template: 'https://fansly.com/:channel',
@ -25,7 +26,7 @@ const definitions = [
} }
] ]
function render(template, values) { function render(template: string, values: ) {
// console.log(`values=${values}`) // console.log(`values=${values}`)
// console.log(values) // console.log(values)
return template.replace(/:([a-zA-Z0-9_]+)/g, (match, key) => { return template.replace(/:([a-zA-Z0-9_]+)/g, (match, key) => {
@ -53,7 +54,7 @@ function render(template, values) {
* @returns {String|null} result.userId Varies by platform. Some platforms don't have the userId in the e-mail, so it's null. * @returns {String|null} result.userId Varies by platform. Some platforms don't have the userId in the e-mail, so it's null.
* fansly example: '555722198917066752' * fansly example: '555722198917066752'
*/ */
export async function checkEmail (body) { export async function checkEmail (body: string): Promise<NotificationData> {
const mail = await simpleParser(body) const mail = await simpleParser(body)
if (!mail?.html) { if (!mail?.html) {

View File

@ -22,7 +22,8 @@
"@futureporn/scout": "workspace:^", "@futureporn/scout": "workspace:^",
"@futureporn/storage": "workspace:^", "@futureporn/storage": "workspace:^",
"@futureporn/types": "workspace:*", "@futureporn/types": "workspace:*",
"@futureporn/utils": "workspace:*" "@futureporn/utils": "workspace:*",
"@futureporn/temporal-workflows": "workspace:*"
}, },
"devDependencies": { "devDependencies": {
"@typescript-eslint/eslint-plugin": "^5.62.0", "@typescript-eslint/eslint-plugin": "^5.62.0",

View File

@ -17,6 +17,9 @@ importers:
'@futureporn/storage': '@futureporn/storage':
specifier: workspace:^ specifier: workspace:^
version: link:../storage version: link:../storage
'@futureporn/temporal-workflows':
specifier: workspace:*
version: 'link:'
'@futureporn/types': '@futureporn/types':
specifier: workspace:* specifier: workspace:*
version: link:../types version: link:../types

View File

@ -1,6 +1,6 @@
export * from './workflowA/activities/activitiesA.js' export * from './workflowA/activities/activitiesA.js'
export * from './workflowA/activities/activitiesB.js' export * from './workflowA/activities/activitiesB.js'
export * from './workflowB/activities.js' // export * from './workflowB/activities.js'
export * from './processNotifEmail/activities/upsertPlatformNotification.js' // export * from './processNotifEmail/activities/upsertPlatformNotification.js'
export * from './processNotifEmail/activities/upsertStream.js' // export * from './processNotifEmail/activities/upsertStream.js'
export * from './processNotifEmail/activities/upsertVtuber.js' // export * from './processNotifEmail/activities/upsertVtuber.js'

View File

@ -15,10 +15,12 @@ const { upsertPlatformNotification } = proxyActivities<typeof upsertPlatformNoti
startToCloseTimeout: '1 minute', startToCloseTimeout: '1 minute',
}); });
export async function processNotifEmail({ url, platform, channel, displayName, date, userId, avatar }: NotificationData): Promise<string> { export async function processNotifEmail(args: NotificationData): Promise<string> {
log.info('Hello from processNotifEmail workflow'); return '@todo @todo @todo'
const vtuberId = await upsertVtuber({ platform, userId, url, channel }); // const { url, platform, channel, displayName, date, userId, avatar } = args
const pNotifId = await upsertPlatformNotification({ source: 'email', date, platform, vtuberId }); // log.info('Hello from processNotifEmail workflow');
const streamId = await upsertStream({ date, vtuberId, platform, pNotifId }); // const vtuberId = await upsertVtuber({ platform, userId, url, channel });
return `vtuberId: ${vtuberId} | pNotifId: ${pNotifId} | streamId: ${streamId}`; // const pNotifId = await upsertPlatformNotification({ source: 'email', date, platform, vtuberId });
// const streamId = await upsertStream({ date, vtuberId, platform, pNotifId });
// return `vtuberId: ${vtuberId} | pNotifId: ${pNotifId} | streamId: ${streamId}`;
} }

View File

@ -1,3 +1,3 @@
export * from './workflowA/workflow.js' export * from './workflowA/workflow.js'
export * from './workflowB/workflow.js' // export * from './workflowB/workflow.js'
export * from './processNotifEmail/workflow.js' // export * from './processNotifEmail/workflow.js'

View File

@ -122,12 +122,12 @@ declare namespace Futureporn {
type NotificationData = { type NotificationData = {
isMatch?: boolean; isMatch?: boolean;
url: string; url?: string;
platform: string; platform?: string;
channel: string; channel?: string | null;
displayName: string; displayName?: string;
date: string; date?: string;
userId: string | null; userId?: string | null;
avatar: string; avatar?: string;
}; };
} }

View File

@ -1,2 +1,11 @@
## We differentiate packages and services in order to make Dockerfile builds less terse.
## When we are building a docker image for a service, we need to include all their package dependencies.
## Instead of having to manually pick and choose every dependent package into the build context,
## we can just copy all the packages/* into build context.
## For example, when we are building fp/scout, we don't want to include @futureporn/next or @futureporn/strapi in the docker build context.
## @futureporn/scout, @futureporn/strapi, @futureporn/next would all be considered services.
## In other words, services/* depend on packages, but packages/* do not depend on services/*
packages: packages:
- 'packages/*' - 'packages/*'
- 'services/*'