temporarily simplify scout for testing
ci / build (push) Failing after 2m4s
Details
ci / build (push) Failing after 2m4s
Details
This commit is contained in:
parent
5583580afc
commit
4b060bf0f9
|
@ -2,29 +2,43 @@ FROM node:20 as base
|
|||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
WORKDIR /app
|
||||
RUN corepack enable
|
||||
RUN corepack enable && corepack prepare pnpm@9.2.0 --activate
|
||||
|
||||
FROM base as build
|
||||
WORKDIR /usr/src/app
|
||||
RUN mkdir -p /usr/src/app/packages/scout && mkdir /usr/src/app/packages/taco && mkdir -p /prod/scout
|
||||
COPY package.json pnpm-lock.yaml .
|
||||
COPY ./packages/scout/pnpm-lock.yaml ./packages/scout/
|
||||
COPY ./packages/taco/pnpm-lock.yaml ./packages/taco/
|
||||
COPY ./packages/types/pnpm-lock.yaml ./packages/types/
|
||||
COPY ./packages/temporal-workflows/pnpm-lock.yaml ./packages/temporal-workflows/
|
||||
COPY ./packages/temporal-worker/pnpm-lock.yaml ./packages/temporal-worker/
|
||||
RUN pnpm fetch
|
||||
WORKDIR /app
|
||||
RUN mkdir -p /app/packages/scout && mkdir /app/packages/taco && mkdir -p /prod/scout
|
||||
|
||||
## Copy manfiests, lockfiles, and configs into docker context
|
||||
COPY package.json pnpm-lock.yaml .npmrc .
|
||||
COPY ./packages/scout/pnpm-lock.yaml ./packages/scout/package.json ./packages/scout/
|
||||
COPY ./packages/utils/pnpm-lock.yaml ./packages/utils/package.json ./packages/utils/
|
||||
COPY ./packages/image/pnpm-lock.yaml ./packages/image/package.json ./packages/image/
|
||||
COPY ./packages/storage/pnpm-lock.yaml ./packages/storage/package.json ./packages/storage/
|
||||
COPY ./packages/taco/pnpm-lock.yaml ./packages/taco/package.json ./packages/taco/
|
||||
COPY ./packages/types/pnpm-lock.yaml ./packages/types/package.json ./packages/types/
|
||||
COPY ./packages/temporal-workflows/pnpm-lock.yaml ./packages/temporal-workflows/package.json ./packages/temporal-workflows/
|
||||
COPY ./packages/temporal-worker/pnpm-lock.yaml ./packages/temporal-worker/package.json ./packages/temporal-worker/
|
||||
|
||||
## Install npm packages
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --recursive --frozen-lockfile
|
||||
|
||||
## Copy package code into docker context
|
||||
COPY ./packages/utils/ ./packages/utils/
|
||||
COPY ./packages/image/ ./packages/image/
|
||||
COPY ./packages/storage/ ./packages/storage/
|
||||
COPY ./packages/scout/ ./packages/scout/
|
||||
COPY ./packages/taco/ ./packages/taco/
|
||||
COPY ./packages/types/ ./packages/types/
|
||||
COPY ./packages/temporal-workflows/ ./packages/temporal-workflows/
|
||||
COPY ./packages/temporal-worker/ ./packages/temporal-worker/
|
||||
|
||||
## Build (transpile TS into JS)
|
||||
RUN pnpm -r build
|
||||
|
||||
## Deploy (copy all production code into one place)
|
||||
RUN pnpm deploy --filter=scout --prod /prod/scout
|
||||
|
||||
FROM base as scout
|
||||
COPY --from=build /prod/scout .
|
||||
RUN ls -la .
|
||||
ENTRYPOINT ["pnpm", "start"]
|
||||
|
||||
|
|
|
@ -9,10 +9,9 @@
|
|||
},
|
||||
"scripts": {
|
||||
"test": "mocha --require ts-node/register src/**/*.spec.ts",
|
||||
"build:worker": "tsc --build ./tsconfig.json",
|
||||
"start": "echo please use either start:manager or start:worker",
|
||||
"start:manager": "node --loader ts-node/esm ./src/index.ts",
|
||||
"start:worker": "node --loader ts-node/esm ./src/temporal/worker.ts"
|
||||
"build": "tsc --build",
|
||||
"dev": "nodemon --ext js,ts,json,yaml --exec \"node --loader ts-node/esm --disable-warning=ExperimentalWarning ./src/index.ts\"",
|
||||
"start": "node /app/dist/index.js"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "@CJ_Clippy",
|
||||
|
@ -21,6 +20,8 @@
|
|||
"@aws-sdk/client-s3": "^3.583.0",
|
||||
"@aws-sdk/lib-storage": "^3.588.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.588.0",
|
||||
"@futureporn/temporal-workflows": "workspace:^",
|
||||
"@futureporn/types": "workspace:^",
|
||||
"@paralleldrive/cuid2": "^2.2.2",
|
||||
"@temporalio/client": "^1.9.0",
|
||||
"@temporalio/worker": "^1.9.0",
|
||||
|
@ -38,23 +39,24 @@
|
|||
"limiter": "2.0.1",
|
||||
"mailparser": "^3.7.1",
|
||||
"node-fetch": "^3.3.0",
|
||||
"nodemon": "^3.1.3",
|
||||
"p-retry": "^5.1.2",
|
||||
"prevvy": "^7.0.1",
|
||||
"qs": "^6.12.1",
|
||||
"sharp": "^0.33.4",
|
||||
"slugify": "^1.6.6",
|
||||
"tsx": "^4.7.2",
|
||||
"@futureporn/types": "workspace:^",
|
||||
"xpath": "^0.0.34"
|
||||
},
|
||||
"packageManager": "pnpm@9.2.0",
|
||||
"devDependencies": {
|
||||
"@types/chai": "^4.3.16",
|
||||
"@types/cheerio": "^0.22.35",
|
||||
"@types/mailparser": "^3.4.4",
|
||||
"@types/mocha": "^10.0.7",
|
||||
"chai": "^5.1.0",
|
||||
"mocha": "^10.4.0",
|
||||
"nodemon": "^3.1.4",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.4.5"
|
||||
"typescript": "^5.5.3"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,6 +17,9 @@ importers:
|
|||
'@aws-sdk/s3-request-presigner':
|
||||
specifier: ^3.588.0
|
||||
version: 3.614.0
|
||||
'@futureporn/temporal-workflows':
|
||||
specifier: workspace:^
|
||||
version: link:../temporal-workflows
|
||||
'@futureporn/types':
|
||||
specifier: workspace:^
|
||||
version: link:../types
|
||||
|
@ -71,9 +74,6 @@ importers:
|
|||
node-fetch:
|
||||
specifier: ^3.3.0
|
||||
version: 3.3.2
|
||||
nodemon:
|
||||
specifier: ^3.1.3
|
||||
version: 3.1.4
|
||||
p-retry:
|
||||
specifier: ^5.1.2
|
||||
version: 5.1.2
|
||||
|
@ -99,6 +99,12 @@ importers:
|
|||
'@types/chai':
|
||||
specifier: ^4.3.16
|
||||
version: 4.3.16
|
||||
'@types/cheerio':
|
||||
specifier: ^0.22.35
|
||||
version: 0.22.35
|
||||
'@types/mailparser':
|
||||
specifier: ^3.4.4
|
||||
version: 3.4.4
|
||||
'@types/mocha':
|
||||
specifier: ^10.0.7
|
||||
version: 10.0.7
|
||||
|
@ -108,11 +114,14 @@ importers:
|
|||
mocha:
|
||||
specifier: ^10.4.0
|
||||
version: 10.6.0
|
||||
nodemon:
|
||||
specifier: ^3.1.4
|
||||
version: 3.1.4
|
||||
ts-node:
|
||||
specifier: ^10.9.2
|
||||
version: 10.9.2(@swc/core@1.6.13)(@types/node@20.14.10)(typescript@5.5.3)
|
||||
typescript:
|
||||
specifier: ^5.4.5
|
||||
specifier: ^5.5.3
|
||||
version: 5.5.3
|
||||
|
||||
packages:
|
||||
|
@ -972,6 +981,9 @@ packages:
|
|||
'@types/chai@4.3.16':
|
||||
resolution: {integrity: sha512-PatH4iOdyh3MyWtmHVFXLWCCIhUbopaltqddG9BzB+gMIzee2MJrvd+jouii9Z3wzQJruGWAm7WOMjgfG8hQlQ==}
|
||||
|
||||
'@types/cheerio@0.22.35':
|
||||
resolution: {integrity: sha512-yD57BchKRvTV+JD53UZ6PD8KWY5g5rvvMLRnZR3EQBCZXiDT/HR+pKpMzFGlWNhFrXlo7VPZXtKvIEwZkAWOIA==}
|
||||
|
||||
'@types/debug@4.1.12':
|
||||
resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==}
|
||||
|
||||
|
@ -996,6 +1008,9 @@ packages:
|
|||
'@types/luxon@3.4.2':
|
||||
resolution: {integrity: sha512-TifLZlFudklWlMBfhubvgqTXRzLDI5pCbGa4P8a3wPyUQSW+1xQ5eDsreP9DWHX3tjq1ke96uYG/nwundroWcA==}
|
||||
|
||||
'@types/mailparser@3.4.4':
|
||||
resolution: {integrity: sha512-C6Znp2QVS25JqtuPyxj38Qh+QoFcLycdxsvcc6IZCGekhaMBzbdTXzwGzhGoYb3TfKu8IRCNV0sV1o3Od97cEQ==}
|
||||
|
||||
'@types/mocha@10.0.7':
|
||||
resolution: {integrity: sha512-GN8yJ1mNTcFcah/wKEFIJckJx9iJLoMSzWcfRRuxz/Jk+U6KQNnml+etbtxFK8lPjzOw3zp4Ha/kjSst9fsHYw==}
|
||||
|
||||
|
@ -3536,6 +3551,10 @@ snapshots:
|
|||
|
||||
'@types/chai@4.3.16': {}
|
||||
|
||||
'@types/cheerio@0.22.35':
|
||||
dependencies:
|
||||
'@types/node': 20.14.10
|
||||
|
||||
'@types/debug@4.1.12':
|
||||
dependencies:
|
||||
'@types/ms': 0.7.34
|
||||
|
@ -3564,6 +3583,11 @@ snapshots:
|
|||
|
||||
'@types/luxon@3.4.2': {}
|
||||
|
||||
'@types/mailparser@3.4.4':
|
||||
dependencies:
|
||||
'@types/node': 20.14.10
|
||||
iconv-lite: 0.6.3
|
||||
|
||||
'@types/mocha@10.0.7': {}
|
||||
|
||||
'@types/ms@0.7.34': {}
|
||||
|
|
|
@ -4,57 +4,74 @@
|
|||
* watches an e-mail inbox for going live notifications
|
||||
*/
|
||||
|
||||
import { checkEmail } from './parsers.js'
|
||||
// import { createStreamInDb } from './signals.js'
|
||||
import { Email } from './imap.js'
|
||||
// import { checkEmail } from './parsers.js'
|
||||
// // import { createStreamInDb } from './signals.js'
|
||||
// import { Email } from './imap.js'
|
||||
import { Client, Connection } from '@temporalio/client'
|
||||
import { NotificationData, processEmailNotification } from 'temporal-workflows'
|
||||
import { FetchMessageObject } from 'imapflow'
|
||||
// import { type NotificationData } from '@futureporn/types'
|
||||
// import { type FetchMessageObject } from 'imapflow'
|
||||
import { createId } from '@paralleldrive/cuid2'
|
||||
import { WorkflowA } from '@futureporn/temporal-workflows/workflows.js'
|
||||
|
||||
console.log('connecting to temporal...')
|
||||
const connection = await Connection.connect({ address: 'temporal-frontend.futureporn.svc.cluster.local:7233' });
|
||||
const client = new Client({ connection, namespace: 'futureporn' });
|
||||
|
||||
|
||||
|
||||
async function handleMessage({ email, msg }: { email: Email, msg: FetchMessageObject }) {
|
||||
try {
|
||||
console.log(' ✏️ loading message')
|
||||
const body = await email.loadMessage(msg.uid)
|
||||
// async function handleMessage({ email, msg }: { email: Email, msg: FetchMessageObject }) {
|
||||
// try {
|
||||
// console.log(' ✏️ loading message')
|
||||
// const body = await email.loadMessage(msg.uid)
|
||||
|
||||
// console.log(' ✏️ checking e-mail')
|
||||
const { isMatch, url, platform, channel, displayName, date, userId, avatar }: NotificationData = (await checkEmail(body) )
|
||||
// console.log(' ✏️ checking e-mail')
|
||||
// const { isMatch, url, platform, channel, displayName, date, userId, avatar }: NotificationData = (await checkEmail(body) )
|
||||
|
||||
if (isMatch) {
|
||||
const wfId = `process-email-${createId()}`
|
||||
// console.log(` ✏️ [DRY] starting Temporal workflow ${wfId} @todo actually start temporal workflow`)
|
||||
// await signalRealtime({ url, platform, channel, displayName, date, userId, avatar })
|
||||
// @todo invoke a Temporal workflow here
|
||||
console.log(' ✏️✏️ starting Temporal workflow')
|
||||
const handle = await client.workflow.start(processEmail, {
|
||||
workflowId: wfId,
|
||||
taskQueue: 'scout',
|
||||
args: [{ url, platform, channel, displayName, date, userId, avatar }]
|
||||
});
|
||||
// const handle = client.getHandle(workflowId);
|
||||
const result = await handle.result()
|
||||
console.log(`result of the workflow is as follows`)
|
||||
console.log(result)
|
||||
}
|
||||
// if (isMatch) {
|
||||
// const wfId = `process-email-${createId()}`
|
||||
// // console.log(` ✏️ [DRY] starting Temporal workflow ${wfId} @todo actually start temporal workflow`)
|
||||
// // await signalRealtime({ url, platform, channel, displayName, date, userId, avatar })
|
||||
// // @todo invoke a Temporal workflow here
|
||||
// console.log(' ✏️✏️ starting Temporal workflow')
|
||||
// // const handle = await client.workflow.start(WorkflowA, {
|
||||
// // workflowId: wfId,
|
||||
// // taskQueue: 'futureporn'
|
||||
// // });
|
||||
// // // const handle = await client.workflow.start(processNotifEmail, {
|
||||
// // // workflowId: wfId,
|
||||
// // // taskQueue: 'futureporn',
|
||||
// // // args: [{ url, platform, channel, displayName, date, userId, avatar }]
|
||||
// // // });
|
||||
// // // const handle = client.getHandle(workflowId);
|
||||
// // const result = await handle.result()
|
||||
// // console.log(`result of the workflow is as follows`)
|
||||
// // console.log(result)
|
||||
// }
|
||||
|
||||
console.log(' ✏️ archiving e-mail')
|
||||
await email.archiveMessage(msg.uid)
|
||||
// console.log(' ✏️ archiving e-mail')
|
||||
// await email.archiveMessage(msg.uid)
|
||||
|
||||
} catch (e) {
|
||||
// console.error('error encoutered')
|
||||
console.error(`An error was encountered while handling the following e-mail message.\n${JSON.stringify(msg, null, 2)}\nError as follows.`)
|
||||
console.error(e)
|
||||
}
|
||||
}
|
||||
// } catch (e) {
|
||||
// // console.error('error encoutered')
|
||||
// console.error(`An error was encountered while handling the following e-mail message.\n${JSON.stringify(msg, null, 2)}\nError as follows.`)
|
||||
// console.error(e)
|
||||
// }
|
||||
// }
|
||||
|
||||
|
||||
(async () => {
|
||||
const email = new Email()
|
||||
email.once('message', (msg: FetchMessageObject) => handleMessage({ email, msg }))
|
||||
await email.connect()
|
||||
})()
|
||||
// const email = new Email()
|
||||
// email.once('message', (msg: FetchMessageObject) => handleMessage({ email, msg }))
|
||||
// await email.connect()
|
||||
console.log('scout is starting')
|
||||
const wfId = `process-email-${createId()}`
|
||||
const handle = await client.workflow.start(WorkflowA, {
|
||||
workflowId: wfId,
|
||||
taskQueue: 'futureporn'
|
||||
});
|
||||
const result = await handle.result()
|
||||
console.log(result)
|
||||
})()
|
||||
|
||||
|
||||
// console.log('init')
|
|
@ -1,6 +1,7 @@
|
|||
|
||||
import { simpleParser } from 'mailparser';
|
||||
import { load } from 'cheerio'
|
||||
import { type NotificationData } from '@futureporn/types'
|
||||
|
||||
const definitions = [
|
||||
{
|
||||
|
@ -14,10 +15,10 @@ const definitions = [
|
|||
{
|
||||
platform: 'fansly',
|
||||
selectors: {
|
||||
channel: ($) => $("a[href*='/live/']").attr('href').toString().split('/').at(-1),
|
||||
channel: ($: any) => $("a[href*='/live/']").attr('href').toString().split('/').at(-1),
|
||||
displayName: 'div[class*="message-col"] div:nth-child(5)',
|
||||
userId: ($) => $("img[src*='/api/v1/account/']").attr('src').toString().split('/').at(-2),
|
||||
avatar: ($) => $("img[src*='/api/v1/account/']").attr('src').toString()
|
||||
userId: ($: any) => $("img[src*='/api/v1/account/']").attr('src').toString().split('/').at(-2),
|
||||
avatar: ($: any) => $("img[src*='/api/v1/account/']").attr('src').toString()
|
||||
},
|
||||
from: 'no-reply@fansly.com',
|
||||
template: 'https://fansly.com/:channel',
|
||||
|
@ -25,7 +26,7 @@ const definitions = [
|
|||
}
|
||||
]
|
||||
|
||||
function render(template, values) {
|
||||
function render(template: string, values: ) {
|
||||
// console.log(`values=${values}`)
|
||||
// console.log(values)
|
||||
return template.replace(/:([a-zA-Z0-9_]+)/g, (match, key) => {
|
||||
|
@ -53,7 +54,7 @@ function render(template, values) {
|
|||
* @returns {String|null} result.userId Varies by platform. Some platforms don't have the userId in the e-mail, so it's null.
|
||||
* fansly example: '555722198917066752'
|
||||
*/
|
||||
export async function checkEmail (body) {
|
||||
export async function checkEmail (body: string): Promise<NotificationData> {
|
||||
|
||||
const mail = await simpleParser(body)
|
||||
if (!mail?.html) {
|
|
@ -22,7 +22,8 @@
|
|||
"@futureporn/scout": "workspace:^",
|
||||
"@futureporn/storage": "workspace:^",
|
||||
"@futureporn/types": "workspace:*",
|
||||
"@futureporn/utils": "workspace:*"
|
||||
"@futureporn/utils": "workspace:*",
|
||||
"@futureporn/temporal-workflows": "workspace:*"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@typescript-eslint/eslint-plugin": "^5.62.0",
|
||||
|
|
|
@ -17,6 +17,9 @@ importers:
|
|||
'@futureporn/storage':
|
||||
specifier: workspace:^
|
||||
version: link:../storage
|
||||
'@futureporn/temporal-workflows':
|
||||
specifier: workspace:*
|
||||
version: 'link:'
|
||||
'@futureporn/types':
|
||||
specifier: workspace:*
|
||||
version: link:../types
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
export * from './workflowA/activities/activitiesA.js'
|
||||
export * from './workflowA/activities/activitiesB.js'
|
||||
export * from './workflowB/activities.js'
|
||||
export * from './processNotifEmail/activities/upsertPlatformNotification.js'
|
||||
export * from './processNotifEmail/activities/upsertStream.js'
|
||||
export * from './processNotifEmail/activities/upsertVtuber.js'
|
||||
// export * from './workflowB/activities.js'
|
||||
// export * from './processNotifEmail/activities/upsertPlatformNotification.js'
|
||||
// export * from './processNotifEmail/activities/upsertStream.js'
|
||||
// export * from './processNotifEmail/activities/upsertVtuber.js'
|
||||
|
|
|
@ -15,10 +15,12 @@ const { upsertPlatformNotification } = proxyActivities<typeof upsertPlatformNoti
|
|||
startToCloseTimeout: '1 minute',
|
||||
});
|
||||
|
||||
export async function processNotifEmail({ url, platform, channel, displayName, date, userId, avatar }: NotificationData): Promise<string> {
|
||||
log.info('Hello from processNotifEmail workflow');
|
||||
const vtuberId = await upsertVtuber({ platform, userId, url, channel });
|
||||
const pNotifId = await upsertPlatformNotification({ source: 'email', date, platform, vtuberId });
|
||||
const streamId = await upsertStream({ date, vtuberId, platform, pNotifId });
|
||||
return `vtuberId: ${vtuberId} | pNotifId: ${pNotifId} | streamId: ${streamId}`;
|
||||
export async function processNotifEmail(args: NotificationData): Promise<string> {
|
||||
return '@todo @todo @todo'
|
||||
// const { url, platform, channel, displayName, date, userId, avatar } = args
|
||||
// log.info('Hello from processNotifEmail workflow');
|
||||
// const vtuberId = await upsertVtuber({ platform, userId, url, channel });
|
||||
// const pNotifId = await upsertPlatformNotification({ source: 'email', date, platform, vtuberId });
|
||||
// const streamId = await upsertStream({ date, vtuberId, platform, pNotifId });
|
||||
// return `vtuberId: ${vtuberId} | pNotifId: ${pNotifId} | streamId: ${streamId}`;
|
||||
}
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
export * from './workflowA/workflow.js'
|
||||
export * from './workflowB/workflow.js'
|
||||
export * from './processNotifEmail/workflow.js'
|
||||
// export * from './workflowB/workflow.js'
|
||||
// export * from './processNotifEmail/workflow.js'
|
|
@ -122,12 +122,12 @@ declare namespace Futureporn {
|
|||
|
||||
type NotificationData = {
|
||||
isMatch?: boolean;
|
||||
url: string;
|
||||
platform: string;
|
||||
channel: string;
|
||||
displayName: string;
|
||||
date: string;
|
||||
userId: string | null;
|
||||
avatar: string;
|
||||
url?: string;
|
||||
platform?: string;
|
||||
channel?: string | null;
|
||||
displayName?: string;
|
||||
date?: string;
|
||||
userId?: string | null;
|
||||
avatar?: string;
|
||||
};
|
||||
}
|
|
@ -1,2 +1,11 @@
|
|||
## We differentiate packages and services in order to make Dockerfile builds less terse.
|
||||
## When we are building a docker image for a service, we need to include all their package dependencies.
|
||||
## Instead of having to manually pick and choose every dependent package into the build context,
|
||||
## we can just copy all the packages/* into build context.
|
||||
## For example, when we are building fp/scout, we don't want to include @futureporn/next or @futureporn/strapi in the docker build context.
|
||||
## @futureporn/scout, @futureporn/strapi, @futureporn/next would all be considered services.
|
||||
## In other words, services/* depend on packages, but packages/* do not depend on services/*
|
||||
|
||||
packages:
|
||||
- 'packages/*'
|
||||
- 'services/*'
|
Loading…
Reference in New Issue