Temporal integration progress
ci / build (push) Waiting to run Details

This commit is contained in:
CJ_Clippy 2024-06-11 20:28:36 -08:00
parent 194377dbd9
commit 7a7db2717e
62 changed files with 11050 additions and 4146 deletions

8
.npmrc
View File

@ -1,2 +1,8 @@
# we are giving every package their own lockfile to keep the docker build process fast
shared-workspace-lockfile=false
shared-workspace-lockfile=false
engine-strict=true
package-manager-strict=true
use-node-version=20.13.1
node-version=20.13.1
recursive-install=true

View File

@ -14,6 +14,11 @@ Kubernetes for Production, deployed using FluxCD
direnv for loading .envrc
Temporal for work queue
Postgres for data storage
S3 for media storage
Domain Driven Development
Test Driven Development

View File

@ -12,7 +12,13 @@ cert-manager:
kubectl apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.14.4/cert-manager.yaml
flux:
flux bootstrap git --url="ssh://git@gitea.futureporn.net:2222/futureporn/fp" --branch=main --path="clusters/production" --private-key-file=/home/chris/.ssh/fp-flux
flux bootstrap git --url="ssh://git@gitea.futureporn.net:2222/futureporn/fp" --branch=main --path="clusters/production" --private-key-file=/home/cj/.ssh/fp-flux
cluster:
./scripts/kind-with-local-registry.sh
./scripts/k8s-namespaces.sh
./scripts/k8s-secrets.sh
./scripts/k8s-chisel-operator.sh
argo:
helmsman --apply -f ./helmsman.argocd.yaml
@ -49,6 +55,9 @@ minikube:
kind:
bash -x ./scripts/kind-with-local-registry.sh
chisel:
./scripts/k8s-chisel-operator.sh
deps:
sudo pamac install make entr nvm minikube kubectl docker helm
curl -fsSL https://raw.githubusercontent.com/tilt-dev/tilt/master/scripts/install.sh | bash

View File

@ -10,6 +10,7 @@ spec:
- name: web
port: 3000
targetPort: 3000
---
apiVersion: apps/v1
kind: Deployment

View File

@ -1,53 +0,0 @@
apiVersion: v1
kind: Service
metadata:
name: pgadmin
namespace: futureporn
spec:
selector:
app.kubernetes.io/name: pgadmin
ports:
- name: web
protocol: TCP
port: 5050
targetPort: 5050
status:
loadBalancer: {}
---
apiVersion: v1
kind: Pod
metadata:
name: pgadmin
namespace: futureporn
labels:
app.kubernetes.io/name: pgadmin
spec:
containers:
- name: pgadmin
image: dpage/pgadmin4
ports:
- containerPort: 5050
resources:
limits:
cpu: 500m
memory: 1Gi
env:
- name: PGADMIN_LISTEN_PORT
value: '5050'
- name: POSTGRES_PASSWORD
valueFrom:
secretKeyRef:
name: postgres
key: password
- name: PGADMIN_DEFAULT_PASSWORD
valueFrom:
secretKeyRef:
name: pgadmin
key: defaultPassword
- name: PGADMIN_DEFAULT_EMAIL
valueFrom:
secretKeyRef:
name: pgadmin
key: defaultEmail
restartPolicy: OnFailure

View File

@ -1,61 +1,117 @@
apiVersion: v1
kind: Service
apiVersion: apps/v1
kind: ReplicaSet
metadata:
name: scout
name: scout-worker
namespace: futureporn
labels:
app: scout-worker
spec:
replicas: {{ .Values.scout.worker.replicas }}
selector:
app.kubernetes.io/name: scout
ports:
- name: http
port: 3000
targetPort: http
protocol: TCP
matchLabels:
app: scout-worker
template:
metadata:
labels:
app: scout-worker
spec:
containers:
- name: scout-worker
image: fp/scout-worker:latest
imagePullPolicy: Always
ports:
- containerPort: 8080
env:
- name: TEMPORAL_SERVICE_ADDRESS
value: "temporal-frontend.futureporn.svc.cluster.local:7233"
- name: TEMPORAL_NAMESPACE
value: "futureporn"
- name: TEMPORAL_TASK_QUEUE
value: "scout"
- name: S3_BUCKET_NAME
value: "{{ .Values.scout.s3BucketName }}"
- name: CDN_BUCKET_URL
value: "{{ .Values.scout.cdnBucketUrl }}"
- name: STRAPI_URL
value: https://strapi.piko.sbtp.xyz
- name: SCOUT_NITTER_ACCESS_KEY
valueFrom:
secretKeyRef:
name: scout
key: nitterAccessKey
- name: SCOUT_NITTER_URL
value: https://nitter.sbtp.xyz
- name: SCOUT_RECENTS_TOKEN
valueFrom:
secretKeyRef:
name: scout
key: recentsToken
- name: SCOUT_STRAPI_API_KEY
valueFrom:
secretKeyRef:
name: scout
key: strapiApiKey
# - name: SCOUT_IMAP_SERVER
# valueFrom:
# secretKeyRef:
# name: scout
# key: imapServer
# - name: SCOUT_IMAP_PORT
# valueFrom:
# secretKeyRef:
# name: scout
# key: imapPort
# - name: SCOUT_IMAP_USERNAME
# valueFrom:
# secretKeyRef:
# name: scout
# key: imapUsername
# - name: SCOUT_IMAP_PASSWORD
# valueFrom:
# secretKeyRef:
# name: scout
# key: imapPassword
# - name: SCOUT_IMAP_ACCESS_TOKEN
# valueFrom:
# secretKeyRef:
# name: scout
# key: imapAccessToken
# Add any other necessary environment variables
resources:
limits:
cpu: "500m"
memory: "512Mi"
requests:
cpu: "250m"
memory: "256Mi"
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: scout
name: scout-manager
namespace: futureporn
labels:
app: scout
app: scout-manager
spec:
replicas: 1
selector:
matchLabels:
app: scout
app: scout-manager
template:
metadata:
labels:
app: scout
app: scout-manager
spec:
containers:
- name: scout
image: "{{ .Values.scout.containerName }}"
- name: scout-manager
image: "{{ .Values.scout.manager.containerName }}"
ports:
- name: http
containerPort: 3000
env:
- name: POSTGRES_REALTIME_CONNECTION_STRING
valueFrom:
secretKeyRef:
name: realtime
key: postgresRealtimeConnectionString
- name: STRAPI_URL
value: https://strapi.piko.sbtp.xyz
- name: SCOUT_NITTER_ACCESS_KEY
valueFrom:
secretKeyRef:
name: scout
key: nitterAccessKey
- NAME: SCOUT_NITTER_URL
value: https://nitter.sbtp.xyz
- name: SCOUT_RECENTS_TOKEN
valueFrom:
secretKeyRef:
name: scout
key: recentsToken
- name: SCOUT_IMAP_SERVER
valueFrom:
secretKeyRef:
@ -81,37 +137,56 @@ spec:
secretKeyRef:
name: scout
key: imapAccessToken
- name: SCOUT_STRAPI_API_KEY
valueFrom:
secretKeyRef:
name: scout
key: strapiApiKey
# env:
# - name: POSTGRES_REALTIME_CONNECTION_STRING
# valueFrom:
# secretKeyRef:
# name: realtime
# key: postgresRealtimeConnectionString
# - name: CDN_BUCKET_URL
# value: "{{ .Values.scout.cdnBucketUrl }}"
# - name: STRAPI_URL
# value: https://strapi.piko.sbtp.xyz
# - name: SCOUT_NITTER_ACCESS_KEY
# valueFrom:
# secretKeyRef:
# name: scout
# key: nitterAccessKey
# - name: SCOUT_NITTER_URL
# value: https://nitter.sbtp.xyz
# - name: SCOUT_RECENTS_TOKEN
# valueFrom:
# secretKeyRef:
# name: scout
# key: recentsToken
# - name: SCOUT_IMAP_SERVER
# valueFrom:
# secretKeyRef:
# name: scout
# key: imapServer
# - name: SCOUT_IMAP_PORT
# valueFrom:
# secretKeyRef:
# name: scout
# key: imapPort
# - name: SCOUT_IMAP_USERNAME
# valueFrom:
# secretKeyRef:
# name: scout
# key: imapUsername
# - name: SCOUT_IMAP_PASSWORD
# valueFrom:
# secretKeyRef:
# name: scout
# key: imapPassword
# - name: SCOUT_IMAP_ACCESS_TOKEN
# valueFrom:
# secretKeyRef:
# name: scout
# key: imapAccessToken
# - name: SCOUT_STRAPI_API_KEY
# valueFrom:
# secretKeyRef:
# name: scout
# key: strapiApiKey
{{ if eq .Values.managedBy "Helm" }}
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: scout
namespace: futureporn
annotations:
kubernetes.io/ingress.class: nginx
cert-manager.io/cluster-issuer: letsencrypt-staging
spec:
tls:
- secretName: scout-tls
hosts:
- scout.sbtp.xyz
rules:
- host: scout.sbtp.xyz
http:
paths:
- path: /
pathType: Prefix
backend:
service:
name: scout
port:
number: 3000
{{ end }}

View File

@ -10,10 +10,16 @@ next:
capture:
containerName: fp/capture
scout:
containerName: fp/scout
manager:
containerName: fp/scout-manager
worker:
containerName: fp/scout-worker
replicas: 1
pubsubServerUrl: https://realtime.futureporn.svc.cluster.local/faye
certIssuer: letsencrypt-staging
hostname: next.futureporn.svc.cluster.local
cdnBucketUrl: https://fp-dev.b-cdn.net
s3BucketName: fp-dev
strapi:
containerName: fp/strapi
port: 1339

View File

@ -2,8 +2,14 @@ storageClassName: vultr-block-storage-hdd
link2cid:
containerName: gitea.futureporn.net/futureporn/link2cid:latest
scout:
containerName: gitea.futureporn.net/futureporn/scout:latest
manager:
containerName: gitea.futureporn.net/futureporn/scout-manager:latest
worker:
containerName: gitea.futureporn.net/futureporn/scout-worker:latest
replicas: 2
pubsubServerUrl: https://realtime.futureporn.net/faye
cdnBucketUrl: https://futureporn-b2.b-cdn.net
s3BucketName: futureporn
next:
containerName: gitea.futureporn.net/futureporn/next:latest
certIssuer: letsencrypt-staging

View File

@ -1,9 +1,7 @@
FROM node:20-alpine AS base
FROM node:20 AS base
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable
RUN apk update
FROM base AS build
ENV NODE_ENV=production
@ -12,13 +10,17 @@ WORKDIR /usr/src/app
RUN mkdir -p /prod/scout
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
RUN pnpm deploy --filter=scout --prod /prod/scout
# COPY pnpm-lock.yaml ./
# RUN pnpm fetch
# COPY ./packages/scout /app
FROM base AS scout
FROM base AS manager
COPY --from=build /prod/scout /app
WORKDIR /app
ENTRYPOINT ["pnpm"]
CMD ["run", "start"]
CMD ["run", "start:manager"]
FROM base AS worker
COPY --from=build /prod/scout /app
WORKDIR /app
ENTRYPOINT ["pnpm"]
CMD ["run", "start:worker"]

View File

@ -1,24 +1,10 @@
FROM node:18-alpine AS base
FROM node:18 AS base
WORKDIR /usr/src/app/
RUN corepack enable
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable
# Installing libvips-dev for sharp Compatibility
RUN apk update && apk add --no-cache build-base gcc autoconf automake zlib-dev libpng-dev nasm bash vips-dev git
ARG NODE_ENV=development
ENV NODE_ENV=${NODE_ENV}
FROM base AS build
WORKDIR /usr/src/app/
COPY ./packages/strapi/package.json ./packages/strapi/pnpm-lock.yaml .
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
COPY ./packages/strapi/ .
FROM build AS dev
WORKDIR /app
ENV PATH /app/node_modules/.bin:$PATH
COPY --from=build /usr/src/app/ .
RUN chown -R node:node /app
USER node
RUN ["pnpm", "run", "build"]
EXPOSE 1339
CMD ["pnpm", "run", "dev"]
CMD ["pnpm", "run", "dev"]

View File

@ -9,5 +9,6 @@
},
"keywords": [],
"author": "@CJ_Clippy",
"license": "Unlicense"
"license": "Unlicense",
"packageManager": "pnpm@9.2.0+sha512.98a80fd11c2e7096747762304106432b3ddc67dcf54b5a8c01c93f68a2cd5e05e6821849522a06fb76284d41a2660d5e334f2ee3bbf29183bf2e739b1dafa771"
}

View File

@ -1,2 +1,3 @@
engine-strict=true
use-node-version=20.13.1
use-node-version=20.13.1
node-version=20.13.1

View File

@ -8,9 +8,6 @@
"dev": "pnpm nodemon ./index.js",
"start": "node index.js"
},
"engines": {
"node": ">=20.0.0"
},
"keywords": [
"IPFS",
"CID",

View File

@ -1,5 +1,5 @@
{
"name": "@futureporn/next",
"name": "next",
"version": "2.0.0",
"private": true,
"scripts": {
@ -14,58 +14,58 @@
"@fortawesome/fontawesome-svg-core": "^6.5.2",
"@fortawesome/free-brands-svg-icons": "^6.5.2",
"@fortawesome/free-solid-svg-icons": "^6.5.2",
"@fortawesome/react-fontawesome": "^0.2.0",
"@fortawesome/react-fontawesome": "^0.2.2",
"@hookform/error-message": "^2.0.1",
"@hookform/resolvers": "^3.3.4",
"@hookform/resolvers": "^3.6.0",
"@mux/blurhash": "^0.1.2",
"@mux/mux-player": "^2.4.1",
"@mux/mux-player-react": "^2.4.1",
"@mux/mux-player": "^2.7.0",
"@mux/mux-player-react": "^2.7.0",
"@paralleldrive/cuid2": "^2.2.2",
"@react-hookz/web": "^24.0.4",
"@tanstack/react-query": "^5.32.1",
"@tanstack/react-table": "^8.15.3",
"@types/lodash": "^4.17.0",
"@types/qs": "^6.9.14",
"@types/react": "^18.2.75",
"@types/react-dom": "^18.2.24",
"@tanstack/react-query": "^5.40.1",
"@tanstack/react-table": "^8.17.3",
"@types/lodash": "^4.17.4",
"@types/qs": "^6.9.15",
"@types/react": "^18.3.3",
"@types/react-dom": "^18.3.0",
"@uppy/aws-s3": "^3.6.2",
"@uppy/aws-s3-multipart": "^3.11.0",
"@uppy/core": "^3.10.0",
"@uppy/dashboard": "^3.8.0",
"@uppy/aws-s3-multipart": "^3.12.0",
"@uppy/core": "^3.12.0",
"@uppy/dashboard": "^3.8.3",
"@uppy/drag-drop": "^3.1.0",
"@uppy/file-input": "^3.1.0",
"@uppy/file-input": "^3.1.2",
"@uppy/progress-bar": "^3.1.1",
"@uppy/react": "^3.3.0",
"@uppy/react": "^3.3.1",
"@uppy/remote-sources": "^1.2.0",
"bulma": "^1.0.0",
"bulma": "^1.0.1",
"date-fns": "^2.30.0",
"date-fns-tz": "^2.0.1",
"dayjs": "^1.11.10",
"dayjs": "^1.11.11",
"feed": "^4.2.2",
"gray-matter": "^4.0.3",
"hls.js": "^1.5.7",
"hls.js": "^1.5.11",
"lodash": "^4.17.21",
"lunarphase-js": "^2.0.3",
"multiformats": "^13.1.0",
"multiformats": "^13.1.1",
"next": "14.0.4",
"next-goatcounter": "^1.0.5",
"nextjs-toploader": "^1.6.11",
"nextjs-toploader": "^1.6.12",
"plyr": "^3.7.8",
"prism-react-renderer": "^2.3.1",
"qs": "^6.12.0",
"react": "^18.2.0",
"react-data-table-component": "^7.5.4",
"react-dom": "^18.2.0",
"react-hook-form": "^7.51.2",
"qs": "^6.12.1",
"react": "^18.3.1",
"react-data-table-component": "^7.6.2",
"react-dom": "^18.3.1",
"react-hook-form": "^7.51.5",
"react-loading-skeleton": "^3.4.0",
"react-toastify": "^9.1.3",
"sharp": "^0.33.3",
"sharp": "^0.33.4",
"slugify": "^1.6.6",
"styled-components": "5.3.3",
"yup": "^1.4.0"
},
"devDependencies": {
"@types/node": "^20.12.6",
"@types/node": "^20.14.2",
"eslint": "^8.57.0",
"eslint-config-next": "14.0.4",
"tsc": "^2.0.4",

File diff suppressed because it is too large Load Diff

@ -1 +0,0 @@
Subproject commit 02e159182d462d17866f5dee720c315781c2bdec

146
packages/scout/.gitignore vendored Normal file
View File

@ -0,0 +1,146 @@
lib/
# Created by https://www.toptal.com/developers/gitignore/api/node
# Edit at https://www.toptal.com/developers/gitignore?templates=node
### Node ###
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
### Node Patch ###
# Serverless Webpack directories
.webpack/
# Optional stylelint cache
# SvelteKit build / generate output
.svelte-kit
# End of https://www.toptal.com/developers/gitignore/api/node

4
packages/scout/.npmrc Normal file
View File

@ -0,0 +1,4 @@
engine-strict=true
package-manager-strict=true
use-node-version=20.13.1
node-version=20.13.1

View File

@ -1 +0,0 @@
lts/iron

View File

@ -1,18 +1,18 @@
# scout
Vtuber data acquisition.
Vtuber data acquisition. Anything having to do with external WWW data acquisition goes in this module.
## Features
* [x] Ingests going live notification e-mails
* [ ] Sends `startRecording` signals to @futureporn/capture
* [ ] Fetches vtuber data from platform
* [ ] image
* [ ] themeColor
* [x] Fetches vtuber data from platform
* [x] image
* [x] themeColor
* [x] displayName
* [ ] Platform Support
* [ ] fansly
* [ ] chaturbate
* [x] Platform Support
* [x] fansly
* [x] chaturbate
## Design requirements
@ -31,7 +31,6 @@ Vtuber data acquisition.
## Puppeteer
For when we get to the point where we need it, here are the packages we used with success during past testing.

4
packages/scout/node.d.ts vendored Normal file
View File

@ -0,0 +1,4 @@
interface ImportMeta {
dirname: string;
url: string;
}

View File

@ -3,11 +3,13 @@
"type": "module",
"version": "3.3.0",
"description": "vtuber data acquisition",
"main": "src/index.email.js",
"main": "src/index.js",
"scripts": {
"test": "mocha",
"start": "node ./src/index.email.js",
"start:browser": "node ./src/index.browser.js"
"build:worker": "tsc --build ./tsconfig.json",
"start": "echo please use either start:manager or start:worker",
"start:manager": "node --loader ts-node/esm ./src/index.ts",
"start:worker": "node --loader ts-node/esm ./src/temporal/worker.ts"
},
"keywords": [],
"author": "@CJ_Clippy",
@ -16,8 +18,12 @@
"@aws-sdk/client-s3": "^3.583.0",
"@aws-sdk/lib-storage": "^3.588.0",
"@aws-sdk/s3-request-presigner": "^3.588.0",
"@book000/twitterts": "^0.62.50",
"@paralleldrive/cuid2": "^2.2.2",
"@temporalio/client": "^1.9.0",
"@temporalio/worker": "^1.9.0",
"@temporalio/workflow": "^1.9.0",
"@tsconfig/node20": "^20.1.4",
"@types/imapflow": "^1.0.18",
"cheerio": "1.0.0-rc.12",
"concurrently": "^8.2.2",
"date-fns": "^3.6.0",
@ -28,20 +34,21 @@
"imapflow": "^1.0.160",
"limiter": "2.0.1",
"mailparser": "^3.7.1",
"node-vibrant": "3.2.1-alpha.1",
"node-fetch": "^3.3.0",
"nodemon": "^3.1.3",
"p-retry": "^5.1.2",
"pg-pubsub": "workspace:*",
"next": "workspace:*",
"qs": "^6.12.1",
"sharp": "^0.33.4",
"slugify": "^1.6.6",
"ts-node": "^10.9.2",
"tsx": "^4.7.2",
"typescript": "^5.4.5",
"xpath": "^0.0.34"
},
"packageManager": "pnpm@9.1.3",
"packageManager": "pnpm@9.2.0",
"devDependencies": {
"chai": "^5.1.0",
"mocha": "^10.4.0"
},
"engines": {
"node": "^20"
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,45 +0,0 @@
'use strict'
/**
* watches an e-mail inbox for going live notifications
*/
import { checkEmail } from './parsers.js'
import { signalRealtime, createStreamInDb } from './signals.js'
import { Email } from './imap.js'
import fastq from "fastq";
const q = fastq.promise(handleMessage, 1)
async function handleMessage({email, msg}) {
try {
console.log(' ✏️ loading message')
const body = await email.loadMessage(msg.uid)
console.log(' ✏️ checking e-mail')
const { isMatch, url, platform, channel, displayName, date, userId, avatar } = (await checkEmail(body))
if (isMatch) {
console.log(' ✏️✏️ signalling realtime')
await signalRealtime({ url, platform, channel, displayName, date, userId, avatar })
console.log(' ✏️✏️ creating stream entry in db')
await createStreamInDb({ source: 'email', platform, channel, date, url, userId, avatar })
}
console.log(' ✏️ archiving e-mail')
await email.archiveMessage(msg.uid)
} catch (e) {
// console.error('error encoutered')
console.error(` An error was encountered while handling the following e-mail message.\n${JSON.stringify(msg, null, 2)}\nError as follows.`)
console.error(e)
}
}
(async () => {
const email = new Email()
email.on('message', (msg) => q.push({email, msg}))
await email.connect()
})()

View File

@ -0,0 +1,63 @@
'use strict'
/**
* watches an e-mail inbox for going live notifications
*/
import { checkEmail } from './parsers.js'
// import { createStreamInDb } from './signals.js'
import { Email } from './imap.js'
import { Client, Connection } from '@temporalio/client'
import { NotificationData, processEmail } from './temporal/workflows.js'
import { FetchMessageObject } from 'imapflow'
import { createId } from '@paralleldrive/cuid2'
const connection = await Connection.connect({ address: 'temporal-frontend.futureporn.svc.cluster.local:7233' });
const client = new Client({ connection, namespace: 'futureporn' });
async function handleMessage({ email, msg }: { email: Email, msg: FetchMessageObject }) {
try {
console.log(' ✏️ loading message')
const body = await email.loadMessage(msg.uid)
// console.log(' ✏️ checking e-mail')
const { isMatch, url, platform, channel, displayName, date, userId, avatar }: NotificationData = (await checkEmail(body) )
if (isMatch) {
const wfId = `process-email-${createId()}`
// console.log(` ✏️ [DRY] starting Temporal workflow ${wfId} @todo actually start temporal workflow`)
// await signalRealtime({ url, platform, channel, displayName, date, userId, avatar })
// @todo invoke a Temporal workflow here
const handle = await client.workflow.start(processEmail, {
workflowId: wfId,
taskQueue: 'scout',
args: [{ url, platform, channel, displayName, date, userId, avatar }]
});
// const handle = client.getHandle(workflowId);
const result = await handle.result();
console.log(`result of the workflow is as follows`)
console.log(result)
throw new Error('!todo we are stopping after just one (for now) @todo')
// console.log(' ✏️✏️ creating stream entry in db')
// await createStreamInDb({ source: 'email', platform, channel, date, url, userId, avatar })
}
// console.log(' ✏️ archiving e-mail')
// await email.archiveMessage(msg.uid)
} catch (e) {
// console.error('error encoutered')
console.error(` An error was encountered while handling the following e-mail message.\n${JSON.stringify(msg, null, 2)}\nError as follows.`)
console.error(e)
}
}
(async () => {
const email = new Email()
email.once('message', (msg: FetchMessageObject) => handleMessage({ email, msg }))
await email.connect()
})()

View File

@ -9,7 +9,7 @@ import { createId } from '@paralleldrive/cuid2'
import { basename } from 'node:path'
import fs from 'node:fs'
if (!process.env.S3_BUCKET_NAME) throw new Error('S3_BUCKET was undefined in env');
if (!process.env.S3_BUCKET_NAME) throw new Error('S3_BUCKET_NAME was undefined in env');
if (!process.env.SCOUT_NITTER_URL) throw new Error('SCOUT_NITTER_URL was undefined in env');

View File

@ -1,13 +1,12 @@
import 'dotenv/config'
// import { PgPubSub } from '@imqueue/pg-pubsub'; // @see https://github.com/imqueue/pg-pubsub/issues/20
import { PgPubSub } from 'pg-pubsub'
import qs from 'qs'
import { subMinutes, addMinutes } from 'date-fns'
import { fpSlugify, download } from './utils.js'
import { getProminentColor } from './image.js'
import { RateLimiter } from "limiter"
import { getImage } from './vtuber.js'
import fansly from './fansly.js'
// alternative js libraries for postgres notify/wait
// * https://github.com/imqueue/pg-pubsub
@ -17,28 +16,9 @@ import { getImage } from './vtuber.js'
if (!process.env.SCOUT_STRAPI_API_KEY) throw new Error('SCOUT_STRAPI_API_KEY is missing from env');
if (!process.env.STRAPI_URL) throw new Error('STRAPI_URL is missing from env');
if (!process.env.SCOUT_RECENTS_TOKEN) throw new Error('SCOUT_RECENTS_TOKEN is undefined in env');
if (!process.env.POSTGRES_REALTIME_CONNECTION_STRING) throw new Error('POSTGRES_REALTIME_CONNECTION_STRING is undefined in env');
if (!process.env.CDN_BUCKET_URL) throw new Error('CDN_BUCKET_URL is undefined in env');
console.log(`process.env.POSTGRES_REALTIME_CONNECTION_STRING=${process.env.POSTGRES_REALTIME_CONNECTION_STRING}`)
const pubSub = new PgPubSub({
connectionString: process.env.POSTGRES_REALTIME_CONNECTION_STRING,
// @see https://github.com/imqueue/pg-pubsub?tab=readme-ov-file#single-listener-inter-process-locking
singleListener: true
});
(async () => {
await pubSub.connect();
})();
export async function signalRealtime ({ url, platform, channel, displayName, date}) {
// faye.publish('/signals', {
// signal: 'startV2',
// url: url
// })
console.log(`📰📰📰 publishing streamStarted event. url=${url}, platform=${platform}, channel=${channel}, displayName=${displayName}, date=${date}`)
pubSub.notify('streamStarted', { url, platform, channel, displayName, date });
}
/**
@ -56,7 +36,7 @@ export async function signalRealtime ({ url, platform, channel, displayName, dat
*/
export async function createStreamInDb ({ source, platform, channel, date, url, userId }) {
const limiter = new RateLimiter({ tokensPerInterval: 0.3, interval: "second" });
// const limiter = new RateLimiter({ tokensPerInterval: 0.3, interval: "second" });
let vtuberId, streamId
console.log('>> # Step 1')
@ -127,8 +107,8 @@ export async function createStreamInDb ({ source, platform, channel, date, url,
fansly: fansly.url.fromUsername(channel)
}
}
const platformImageUrl = await getImage(limiter, dummyVtuber)
const imageFile = await download({ limiter, url: platformImageUrl })
const platformImageUrl = await getImage(dummyVtuber)
const imageFile = await download({ url: platformImageUrl })
// get themeColor from image
const themeColor = await getProminentColor(imageFile)

View File

@ -0,0 +1,3 @@
export async function greet(name) {
return `Hello, ${name}!`;
}

View File

@ -0,0 +1,42 @@
import { Client, Connection } from '@temporalio/client';
import { example } from './temporal.workflow.js';
import { createId } from '@paralleldrive/cuid2';
async function run() {
// const cert = await fs.readFile('./path-to/your.pem');
// const key = await fs.readFile('./path-to/your.key');
let connectionOptions = {
address: 'temporal-frontend.futureporn.svc.cluster.local',
};
const connection = await Connection.connect(connectionOptions);
const client = new Client({
connection,
namespace: 'futureporn',
});
console.log('>>> WE ARE RUNNING THE WORKFLOW!!!!')
const handle = await client.workflow.start(example, {
taskQueue: 'hello-world',
// type inference works! args: [name: string]
args: ['Temporal'],
// in practice, use a meaningful business ID, like customerId or transactionId
workflowId: 'workflow-' + createId(),
});
console.log(`Started workflow ${handle.workflowId}`);
// optional: wait for client result
console.log(await handle.result()); // Hello, Temporal!
await client.connection.close();
}
run().catch((err) => {
console.error(err);
process.exit(1);
});

View File

@ -0,0 +1,38 @@
import { NativeConnection, Worker } from '@temporalio/worker'
import * as activities from './temporal.activities.js'
import path from 'node:path'
async function run() {
// Step 1: Establish a connection with Temporal server.
//
// Worker code uses `@temporalio/worker.NativeConnection`.
// (But in your application code it's `@temporalio/client.Connection`.)
const connection = await NativeConnection.connect({
address: 'temporal-frontend.futureporn.svc.cluster.local',
// TLS and gRPC metadata configuration goes here.
});
// Step 2: Register Workflows and Activities with the Worker.
const worker = await Worker.create({
connection,
namespace: 'futureporn',
taskQueue: 'hello-world',
// Workflows are registered using a path as they run in a separate JS context.
workflowsPath: path.join(import.meta.dirname, './temporal.workflow.js'),
activities,
});
// Step 3: Start accepting tasks on the `hello-world` queue
//
// The worker runs until it encounters an unexpected error or the process receives a shutdown signal registered on
// the SDK Runtime object.
//
// By default, worker logs are written via the Runtime logger to STDERR at INFO level.
//
// See https://typescript.temporal.io/api/classes/worker.Runtime#install to customize these defaults.
await worker.run();
}
run().catch((err) => {
console.error(err);
process.exit(1);
})

View File

@ -0,0 +1,10 @@
import { proxyActivities } from '@temporalio/workflow';
const { greet } = proxyActivities({
startToCloseTimeout: '1 minute',
});
/** A workflow that simply calls an activity */
export async function example(name) {
return await greet(name);
}

View File

@ -0,0 +1,202 @@
// export function blah(name) {
// return `Today is a blah kind of a day. amirite, ${name}?`;
// }
import fetch from "node-fetch"
import { NotificationData, processEmail } from "./workflows.js"
import qs from 'qs'
import { IVtuberResponse } from 'next'
import { getImage } from '../vtuber.js'
import { fpSlugify, download } from '../utils.js'
import fansly from '../fansly.js'
import { getProminentColor } from '../image.js'
import { uploadFile } from '../s3.js'
export type ChargeResult = {
status: string;
errorMessage?: string;
};
if (!process.env.SCOUT_STRAPI_API_KEY) throw new Error('SCOUT_STRAPI_API_KEY is missing from env');
if (!process.env.STRAPI_URL) throw new Error('STRAPI_URL is missing from env');
if (!process.env.CDN_BUCKET_URL) throw new Error('CDN_BUCKET_URL is missing from env');
if (!process.env.SCOUT_NITTER_URL) throw new Error('SCOUT_NITTER_URL is missing from env');
if (!process.env.SCOUT_NITTER_ACCESS_KEY) throw new Error('SCOUT_NITTER_ACCESS_KEY is missing from env');
/**
* find or create vtuber in Strapi
*/
export async function upsertVtuber({ platform, userId, url, channel }: NotificationData): Promise<number> {
let vtuberId
console.log('>> # Step 1, upsertVtuber')
// # Step 1.
// First we find or create the vtuber
// The vtuber may already be in the db, so we look for that record. All we need is the Vtuber ID.
// If the vtuber is not in the db, we create the vtuber record.
// GET /api/:pluralApiId?filters[field][operator]=value
const findVtubersFilters = (() => {
if (platform === 'chaturbate') {
return { chaturbate: { $eq: url } }
} else if (platform === 'fansly') {
if (!userId) throw new Error('Fansly userId was undefined, but it is required.')
return { fanslyId: { $eq: userId } }
}
})()
console.log('>>>>> the following is findVtubersFilters.')
console.log(findVtubersFilters)
const findVtubersQueryString = qs.stringify({
filters: findVtubersFilters
}, { encode: false })
console.log(`>>>>> platform=${platform}, url=${url}, userId=${userId}`)
console.log('>> findVtuber')
const findVtuberRes = await fetch(`${process.env.STRAPI_URL}/api/vtubers?${findVtubersQueryString}`, {
method: 'GET',
headers: {
'content-type': 'application/json'
}
})
const findVtuberJson = await findVtuberRes.json() as IVtuberResponse
console.log('>> here is the vtuber json')
console.log(findVtuberJson)
if (findVtuberJson?.data && findVtuberJson.data.length > 0) {
console.log('>>a vtuber was FOUND')
if (findVtuberJson.data.length > 1) throw new Error('There was more than one vtuber match. There must only be one.')
vtuberId = findVtuberJson.data[0].id
console.log('here is the findVtuberJson (as follows)')
console.log(findVtuberJson)
console.log(`the matching vtuber has ID=${vtuberId} (${findVtuberJson.data[0].attributes.displayName})`)
}
if (!vtuberId) {
console.log('>> vtuberId was not found so we create')
/**
* We are creating a vtuber record.
* We need a few things.
* * image URL
* * themeColor
*
* To get an image, we have to do a few things.
* * [x] download image from platform
* * [x] get themeColor from image
* * [x] upload image to b2
* * [x] get B2 cdn link to image
*
* To get themeColor, we need the image locally where we can then run
*/
// download image from platform
// vtuber.getImage expects a vtuber object, which we don't have yet, so we create a dummy one
const dummyVtuber = {
attributes: {
slug: fpSlugify(channel),
fansly: fansly.url.fromUsername(channel)
}
}
const platformImageUrl = await getImage(dummyVtuber)
const imageFile = await download({ url: platformImageUrl })
// get themeColor from image
const themeColor = await getProminentColor(imageFile)
// upload image to b2
const b2FileData = await uploadFile(imageFile)
// get b2 cdn link to image
const imageCdnLink = `https://${process.env.CDN_BUCKET_URL}/${b2FileData.Key}`
const createVtuberRes = await fetch(`${process.env.STRAPI_URL}/api/vtubers`, {
method: 'POST',
headers: {
'authorization': `Bearer ${process.env.SCOUT_STRAPI_API_KEY}`,
'content-type': 'application/json'
},
body: JSON.stringify({
data: {
displayName: channel,
fansly: (platform === 'fansly') ? url : null,
fanslyId: (platform === 'fansly') ? userId : null,
chaturbate: (platform === 'chaturbate') ? url : null,
slug: fpSlugify(channel),
description1: ' ',
image: imageCdnLink,
themeColor: themeColor || '#dde1ec'
}
})
})
const createVtuberJson = await createVtuberRes.json() as IVtuberResponse
console.log('>> createVtuberJson as follows')
console.log(JSON.stringify(createVtuberJson, null, 2))
if (createVtuberJson.data) {
vtuberId = createVtuberJson.data.id
console.log(`>>> vtuber created with id=${vtuberId}`)
}
}
return 777
}
export async function upsertPlatformNotification(): Promise<number> {
return 777
}
export async function upsertStream(): Promise<number> {
return 777
}
export async function chargeUser(
userId: string,
itemId: string,
quantity: number,
): Promise<ChargeResult> {
// TODO send request to the payments service that looks up the user's saved
// payment info and the cost of the item and attempts to charge their payment
// method.
console.log(`Charging user ${userId} for ${quantity} of item ${itemId}`);
try {
const response = await fetch("http://httpbin.org/get?status=success");
const body: any = await response.json();
return { status: body.args.status };
} catch (e: any) {
return { status: "failure", errorMessage: e.message };
}
}
export async function checkAndDecrementInventory(
itemId: string,
quantity: number,
): Promise<boolean> {
// TODO a database request that—in a single operation or transaction—checks
// whether there are `quantity` items remaining, and if so, decreases the
// total. Something like:
// const result = await db.collection('items').updateOne(
// { _id: itemId, numAvailable: { $gte: quantity } },
// { $inc: { numAvailable: -quantity } }
// )
// return result.modifiedCount === 1
console.log(`Reserving ${quantity} of item ${itemId}`);
return true;
}
export async function incrementInventory(
itemId: string,
quantity: number,
): Promise<boolean> {
// TODO increment inventory:
// const result = await db.collection('items').updateOne(
// { _id: itemId },
// { $inc: { numAvailable: quantity } }
// )
// return result.modifiedCount === 1
console.log(`Incrementing ${itemId} inventory by ${quantity}`);
return true;
}

View File

@ -0,0 +1,55 @@
import path from "path"
import { NativeConnection, Worker } from "@temporalio/worker"
import * as activities from "./activities.js"
if (!process.env.TEMPORAL_SERVICE_ADDRESS) throw new Error(`TEMPORAL_SERVICE_ADDRESS is missing in env`);
if (!process.env.TEMPORAL_NAMESPACE) throw new Error(`TEMPORAL_NAMESPACE is missing in env`);
if (!process.env.TEMPORAL_TASK_QUEUE) throw new Error(`TEMPORAL_TASK_QUEUE is missing in env`);
console.log(`
process.env.TEMPORAL_SERVICE_ADDRESS=${process.env.TEMPORAL_SERVICE_ADDRESS}
process.env.TEMPORAL_NAMESPACE=${process.env.TEMPORAL_NAMESPACE}
process.env.TEMPORAL_TASK_QUEUE=${process.env.TEMPORAL_TASK_QUEUE}
import.meta.dirname=${import.meta.dirname}
`)
async function run() {
console.log(' scout-worker startup!')
// Step 1: Establish a connection with Temporal server.
//
// Worker code uses `@temporalio/worker.NativeConnection`.
// (But in your application code it's `@temporalio/client.Connection`.)
const connection = await NativeConnection.connect({
address: process.env.TEMPORAL_SERVICE_ADDRESS,
// TLS and gRPC metadata configuration goes here.
});
// Step 2: Register Workflows and Activities with the Worker.
const worker = await Worker.create({
connection,
namespace: process.env.TEMPORAL_NAMESPACE,
taskQueue: ''+process.env.TEMPORAL_TASK_QUEUE,
// Workflows are registered using a path as they run in a separate JS context.
workflowsPath: path.join(import.meta.dirname, './workflows.ts'),
activities,
maxTaskQueueActivitiesPerSecond: 1 // since we are acessing 3rd party API, we use this as a courtesy throttle
});
console.log('scout-worker is running.')
// Step 3: Start accepting tasks on the `hello-world` queue
//
// The worker runs until it encounters an unexpected error or the process receives a shutdown signal registered on
// the SDK Runtime object.
//
// By default, worker logs are written via the Runtime logger to STDERR at INFO level.
//
// See https://typescript.temporal.io/api/classes/worker.Runtime#install to customize these defaults.
await worker.run();
}
run().catch((err) => {
console.error(err);
process.exit(1);
})

View File

@ -0,0 +1,73 @@
import { proxyActivities, continueAsNew, log, sleep } from "@temporalio/workflow"
import type * as activities from "./activities.js"
import { FetchMessageObject } from 'imapflow'
// { isMatch, url, platform, channel, displayName, date, userId, avatar }
export type NotificationData = {
isMatch?: boolean;
url: string;
platform: string;
channel: string;
displayName: string;
date: string;
userId: string | null;
avatar: string;
};
const {
chargeUser,
checkAndDecrementInventory,
incrementInventory,
upsertPlatformNotification,
upsertStream,
upsertVtuber,
} = proxyActivities<typeof activities>({
startToCloseTimeout: "1 minute",
});
export async function processEmail({
url, platform, channel, displayName, date, userId, avatar
}: NotificationData): Promise<{ vtuberId: number, pNotifId: number, streamId: number }> {
console.log(`processEmail begin. platform=${platform}, date=${date}, url=${url}`)
// * In Strapi, we are finding or updating or creating the following content-types.
// * * vtuber
// * * platform-notification
// * * stream
// Step 1
const vtuberId = await upsertVtuber({ url, platform, channel, displayName, date, userId, avatar })
console.log('we have finished upsertVtuber and the vtuberId is '+vtuberId)
throw new Error('Error: Error: error: erorreorororr; @todo');
const pNotifId = await upsertPlatformNotification()
const streamId = await upsertStream()
return { vtuberId, pNotifId, streamId }
}
// export async function order(
// userId: string,
// itemId: string,
// quantity: number,
// ): Promise<string> {
// const haveEnoughInventory: boolean = await checkAndDecrementInventory(
// itemId,
// quantity,
// );
// if (haveEnoughInventory) {
// const result: activities.ChargeResult = await chargeUser(
// userId,
// itemId,
// quantity,
// );
// if (result.status === "success") {
// return `Order successful!`;
// } else {
// await incrementInventory(itemId, quantity);
// return `Unable to complete payment. Error: ${result.errorMessage}`;
// }
// } else {
// return `Sorry, we don't have enough items in stock to fulfill your order.`;
// }
// }

View File

@ -0,0 +1,30 @@
// {
// "compilerOptions": {
// "outDir": "./built",
// "allowJs": true,
// "target": "ES2017",
// "module": "NodeNext"
// },
// "include": ["./src/**/*"]
// }
{
"extends": "@tsconfig/node20/tsconfig.json",
"version": "4.4.2",
"compilerOptions": {
"target": "es2017",
"module": "esnext",
"moduleResolution": "node",
"declaration": true,
"declarationMap": true,
"sourceMap": true,
"rootDir": "./src",
"outDir": "./lib"
},
"include": ["**/*.ts"],
"ts-node": {
"experimentalSpecifierResolution": "node",
"transpileOnly": true,
"esm": true,
},
}

View File

@ -1,2 +1,5 @@
shamefully-hoist=true
use-node-version=18.20.3
engine-strict=true
package-manager-strict=true
use-node-version=18.20.3
node-version=18.20.3

View File

@ -20,12 +20,12 @@
"@mux/mux-node": "^7.3.5",
"@paralleldrive/cuid2": "^2.2.2",
"@radix-ui/react-use-callback-ref": "^1.0.1",
"@strapi/plugin-i18n": "4.24.3",
"@strapi/plugin-users-permissions": "4.24.3",
"@strapi/provider-email-sendgrid": "4.24.3",
"@strapi/provider-upload-cloudinary": "4.24.3",
"@strapi/strapi": "4.24.3",
"@strapi/utils": "4.24.3",
"@strapi/plugin-i18n": "4.24.5",
"@strapi/plugin-users-permissions": "4.24.5",
"@strapi/provider-email-sendgrid": "4.24.5",
"@strapi/provider-upload-cloudinary": "4.24.5",
"@strapi/strapi": "4.24.5",
"@strapi/utils": "4.24.5",
"@testing-library/dom": "8.19.0",
"@testing-library/react": "12.1.4",
"@testing-library/react-hooks": "8.0.1",
@ -55,14 +55,14 @@
"prop-types": "^15.8.1",
"purest": "4.0.2",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react-dom": "^18.0.0",
"react-intl": "6.3.2",
"react-query": "3.24.3",
"react-redux": "8.0.5",
"react-router-dom": "5.3.4",
"react-test-renderer": "^18.3.1",
"semver": "^7.6.2",
"sharp": "0.33.3",
"sharp": "0.32.6",
"strapi-plugin-fuzzy-search": "^2.2.1",
"styled-components": "5.3.3",
"typescript": "^4.7",
@ -74,19 +74,11 @@
"concurrently": "^8.2.2"
},
"author": {
"name": "CJ_Clippy"
"name": "@CJ_Clippy"
},
"packageManager": "pnpm@9.1.3",
"packageManager": "pnpm@9.2.0",
"license": "MIT",
"strapi": {
"uuid": false,
"supportedArchitectures": {
"libc": [
"musl"
]
},
"overrides": {
"sharp": "0.33.3"
}
"uuid": false
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,50 @@
{
"kind": "collectionType",
"collectionName": "platform_notifications",
"info": {
"singularName": "platform-notification",
"pluralName": "platform-notifications",
"displayName": "Platform Notification",
"description": ""
},
"options": {
"draftAndPublish": false
},
"pluginOptions": {},
"attributes": {
"source": {
"type": "enumeration",
"enum": [
"email",
"manual"
],
"default": "manual",
"required": true
},
"platform": {
"type": "enumeration",
"enum": [
"fansly",
"chaturbate"
],
"default": "chaturbate",
"required": true
},
"date": {
"type": "datetime",
"required": true,
"unique": false
},
"date2": {
"type": "string",
"required": true,
"unique": false,
"regex": "\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d\\.\\d+([+-][0-2]\\d:[0-5]\\d|Z)"
},
"vtuber": {
"type": "relation",
"relation": "oneToOne",
"target": "api::vtuber.vtuber"
}
}
}

View File

@ -0,0 +1,9 @@
'use strict';
/**
* platform-notification controller
*/
const { createCoreController } = require('@strapi/strapi').factories;
module.exports = createCoreController('api::platform-notification.platform-notification');

View File

@ -0,0 +1,9 @@
'use strict';
/**
* platform-notification router
*/
const { createCoreRouter } = require('@strapi/strapi').factories;
module.exports = createCoreRouter('api::platform-notification.platform-notification');

View File

@ -0,0 +1,9 @@
'use strict';
/**
* platform-notification service
*/
const { createCoreService } = require('@strapi/strapi').factories;
module.exports = createCoreService('api::platform-notification.platform-notification');

View File

@ -0,0 +1,3 @@
node_modules
lib
.eslintrc.js

View File

@ -0,0 +1,48 @@
const { builtinModules } = require('module');
const ALLOWED_NODE_BUILTINS = new Set(['assert']);
module.exports = {
root: true,
parser: '@typescript-eslint/parser',
parserOptions: {
project: './tsconfig.json',
tsconfigRootDir: __dirname,
},
plugins: ['@typescript-eslint', 'deprecation'],
extends: [
'eslint:recommended',
'plugin:@typescript-eslint/eslint-recommended',
'plugin:@typescript-eslint/recommended',
'prettier',
],
rules: {
// recommended for safety
'@typescript-eslint/no-floating-promises': 'error', // forgetting to await Activities and Workflow APIs is bad
'deprecation/deprecation': 'warn',
// code style preference
'object-shorthand': ['error', 'always'],
// relaxed rules, for convenience
'@typescript-eslint/no-unused-vars': [
'warn',
{
argsIgnorePattern: '^_',
varsIgnorePattern: '^_',
},
],
'@typescript-eslint/no-explicit-any': 'off',
},
overrides: [
{
files: ['src/workflows.ts', 'src/workflows-*.ts', 'src/workflows/*.ts'],
rules: {
'no-restricted-imports': [
'error',
...builtinModules.filter((m) => !ALLOWED_NODE_BUILTINS.has(m)).flatMap((m) => [m, `node:${m}`]),
],
},
},
],
};

2
packages/temporal/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
lib
node_modules

1
packages/temporal/.nvmrc Normal file
View File

@ -0,0 +1 @@
20

View File

@ -0,0 +1 @@
lib

View File

@ -0,0 +1,2 @@
printWidth: 120
singleQuote: true

View File

@ -0,0 +1,46 @@
# Cron Workflows
_DEPRECATED: use [Schedules](https://github.com/temporalio/samples-typescript/tree/main/schedules) instead._
This example demonstrates a working Cron workflow. Note the limitations and caveats listed in the [docs](https://docs.temporal.io/content/what-is-a-temporal-cron-job/).
Differences from the hello world demo:
- The Workflow is started with the `cronSchedule: '* * * * *',` option: [`src/client.ts`](./src/client.ts).
- The Activity actually prints a log, instead of returning a string.
- The Workflow runs forever, so if we want it to stop, we have to cancel it. In our `client.ts` script, we cancel it using the handle (when `Ctrl/Cmd-C` is hit). Usually, we'd use the Workflow ID to cancel—for example:
```js
const handle = client.getHandle('1e793a6c-31e2-41c9-8139-53d114293a9e');
await handle.cancel();
```
Note that when we're changing code and restarting Workers, unless we cancel all previous Workflows, they may get picked up by our Worker (since we likely didn't change our Workflow name or task queue), and their output may conflict/mix with new Workflows we're starting. We can check what is still running in Temporal Web ([localhost:8088](http://localhost:8088) in case we need to kill all previous Workflows.
### Running this sample
1. `temporal server start-dev` to start [Temporal Server](https://github.com/temporalio/cli/#installation).
2. `npm install` to install dependencies.
3. `npm run start.watch` to start the Worker.
4. In another shell, `npm run workflow` to run the Workflow.
Example Worker output:
```bash
Hello from my-schedule, Temporal!
Workflow time: 1636333860201
Activity time: 1636333860241
Hello from my-schedule, Temporal!
Workflow time: 1636333920319
Activity time: 1636333920340
```
The difference between "Workflow time" and "Activity time" reflects the latency between scheduling an Activity and actually starting it.
Each new Workflow is `continuedAsNew` under the hood:
![image](https://user-images.githubusercontent.com/6764957/137712906-2a1d821b-d664-442c-8f17-a174b284c722.png)
And you can see the details in the event history:
![image](https://user-images.githubusercontent.com/6764957/137713250-f19a2987-4e9f-4e76-8e35-c17507731a20.png)

3694
packages/temporal/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,41 @@
{
"name": "cron-workflows",
"version": "0.1.0",
"private": true,
"scripts": {
"build": "tsc --build",
"build.watch": "tsc --build --watch",
"lint": "eslint .",
"start": "ts-node src/worker.ts",
"start.watch": "nodemon src/worker.ts",
"workflow": "ts-node src/client.ts"
},
"nodemonConfig": {
"execMap": {
"ts": "ts-node"
},
"ext": "ts",
"watch": [
"src"
]
},
"dependencies": {
"@temporalio/activity": "^1.9.0",
"@temporalio/client": "^1.9.0",
"@temporalio/worker": "^1.9.0",
"@temporalio/workflow": "^1.9.0"
},
"devDependencies": {
"@tsconfig/node18": "^1.0.0",
"@types/node": "^16.11.43",
"@typescript-eslint/eslint-plugin": "^5.0.0",
"@typescript-eslint/parser": "^5.0.0",
"eslint": "^7.32.0",
"eslint-config-prettier": "^8.3.0",
"eslint-plugin-deprecation": "^1.2.1",
"nodemon": "^2.0.12",
"prettier": "^2.8.8",
"ts-node": "^10.2.1",
"typescript": "^4.4.2"
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,6 @@
import { log, activityInfo } from '@temporalio/activity';
export async function logTime(name: string, wfTime: string): Promise<void> {
const { workflowExecution } = activityInfo();
log.info(`Hello from ${workflowExecution.workflowId}, ${name}!`, { workflowTime: wfTime, activityTime: Date.now() });
}

View File

@ -0,0 +1,39 @@
import { Client } from '@temporalio/client';
import { scheduledWorkflow } from './workflows';
// Save this to later terminate or cancel this schedule
const workflowId = 'my-schedule';
async function run() {
const client = new Client();
const handle = await client.workflow.start(scheduledWorkflow, {
taskQueue: 'cron-workflows',
workflowId: 'my-schedule', // Save this to later terminate or cancel this schedule
cronSchedule: '* * * * *', // start every minute
args: ['Temporal'],
});
console.log('Cron started');
try {
await handle.result(); // await completion of Workflow, which doesn't happen since it's a cron Workflow
} catch (err: any) {
console.error(err.message + ':' + handle.workflowId);
}
}
// just for this demo - cancel the workflow on Ctrl+C
process.on('SIGINT', async () => {
const client = new Client();
const handle = client.workflow.getHandle(workflowId);
await handle.cancel();
console.log(`\nCanceled Workflow ${handle.workflowId}`);
process.exit(0);
});
// you cannot catch SIGKILL
run().catch((err) => {
console.error(err);
process.exit(1);
});

View File

@ -0,0 +1,17 @@
import { Worker } from '@temporalio/worker';
import * as activities from './activities';
async function run() {
// nothing different here compared to hello world
const worker = await Worker.create({
workflowsPath: require.resolve('./workflows'),
activities,
taskQueue: 'cron-workflows',
});
await worker.run();
}
run().catch((err) => {
console.error(err);
process.exit(1);
});

View File

@ -0,0 +1,11 @@
import { proxyActivities } from '@temporalio/workflow';
import type * as activities from './activities';
const { logTime } = proxyActivities<typeof activities>({
startToCloseTimeout: '1 minute',
});
/** A workflow that simply calls an activity */
export async function scheduledWorkflow(name: string): Promise<void> {
await logTime(name, '' + Date.now());
}

View File

@ -0,0 +1,12 @@
{
"extends": "@tsconfig/node18/tsconfig.json",
"version": "4.4.2",
"compilerOptions": {
"declaration": true,
"declarationMap": true,
"sourceMap": true,
"rootDir": "./src",
"outDir": "./lib"
},
"include": ["src/**/*.ts"]
}

View File

@ -0,0 +1,90 @@
#!/bin/sh
bindir=$(dirname "$(readlink -fm "$0")")
source "${bindir}/../.env"
if [ -z $POSTGRES_REALTIME_PASSWORD ]; then
echo "POSTGRES_REALTIME_PASSWORD was missing in env"
exit 5
fi
## Create the temporal databases
kubectl -n futureporn exec postgres -- psql -U postgres --command "\
CREATE DATABASE temporal_visibility \
WITH \
OWNER = postgres \
ENCODING = 'UTF8' \
LOCALE_PROVIDER = 'libc' \
CONNECTION LIMIT = -1 \
IS_TEMPLATE = False;"
kubectl -n futureporn exec postgres -- psql -U postgres --command "\
CREATE DATABASE temporal \
WITH \
OWNER = postgres \
ENCODING = 'UTF8' \
LOCALE_PROVIDER = 'libc' \
CONNECTION LIMIT = -1 \
IS_TEMPLATE = False;"
# ./temporal-sql-tool -u $(SQL_USER) --pw $(SQL_PASSWORD) -p 5432 --pl postgres12 --db $(TEMPORAL_DB) drop -f
# ./temporal-sql-tool -u $(SQL_USER) --pw $(SQL_PASSWORD) -p 5432 --pl postgres12 --db $(TEMPORAL_DB) create
# ./temporal-sql-tool -u $(SQL_USER) --pw $(SQL_PASSWORD) -p 5432 --pl postgres12 --db $(TEMPORAL_DB) setup -v 0.0
# ./temporal-sql-tool -u $(SQL_USER) --pw $(SQL_PASSWORD) -p 5432 --pl postgres12 --db $(TEMPORAL_DB) update-schema -d ./schema/postgresql/v12/temporal/versioned
# ./temporal-sql-tool -u $(SQL_USER) --pw $(SQL_PASSWORD) -p 5432 --pl postgres12 --db $(VISIBILITY_DB) drop -f
# ./temporal-sql-tool -u $(SQL_USER) --pw $(SQL_PASSWORD) -p 5432 --pl postgres12 --db $(VISIBILITY_DB) create
# ./temporal-sql-tool -u $(SQL_USER) --pw $(SQL_PASSWORD) -p 5432 --pl postgres12 --db $(VISIBILITY_DB) setup-schema -v 0.0
# ./temporal-sql-tool -u $(SQL_USER) --pw $(SQL_PASSWORD) -p 5432 --pl postgres12 --db $(VISIBILITY_DB) update-schema -d ./schema/postgresql/v12/visibility/versioned
## Create the futureporn Strapi database
kubectl -n futureporn exec postgres -- psql -U postgres --command "\
CREATE DATABASE futureporn_db \
WITH \
OWNER = postgres \
ENCODING = 'UTF8' \
LOCALE_PROVIDER = 'libc' \
CONNECTION LIMIT = -1 \
IS_TEMPLATE = False;"
## Create the futureporn realtime database (for NOTIFY/AWAIT pubsub)
kubectl -n futureporn exec postgres -- psql -U postgres --command "\
CREATE DATABASE futureporn_realtime \
WITH \
OWNER = postgres \
ENCODING = 'UTF8' \
LOCALE_PROVIDER = 'libc' \
CONNECTION LIMIT = -1 \
IS_TEMPLATE = False;"
## create futureporn user
kubectl -n futureporn exec postgres -- psql -U postgres --command "\
CREATE ROLE futureporn \
WITH \
LOGIN \
NOSUPERUSER \
NOCREATEDB \
NOCREATEROLE \
INHERIT \
NOREPLICATION \
NOBYPASSRLS \
CONNECTION LIMIT -1 \
PASSWORD '$POSTGRES_REALTIME_PASSWORD';"
## grant futureporn user all privs
kubectl -n futureporn exec postgres -- psql -U postgres --command "\
GRANT ALL PRIVILEGES ON DATABASE futureporn_realtime TO futureporn;"
## import schema
## I have a file, schema.psql that I want to import. How do I do that?
# kubectl -n futureporn exec postgres -- psql -U postgres --command "\ ;"
# kubectl -n futureporn exec postgres -- psql -U postgres -f - < "${bindir}/postgres-2024-05-09-futureporn_db-schema-only.psql"

View File

@ -0,0 +1 @@
kubectl -n futureporn exec postgres -- psql -U postgres --command "DROP DATABASE temporal WITH (FORCE);"

View File

@ -1,58 +0,0 @@
#!/bin/sh
bindir=$(dirname "$(readlink -fm "$0")")
source "${bindir}/../.env"
if [ -z $POSTGRES_REALTIME_PASSWORD ]; then
echo "POSTGRES_REALTIME_PASSWORD was missing in env"
exit 5
fi
## Create the futureporn Strapi database
kubectl -n futureporn exec postgres -- psql -U postgres --command "\
CREATE DATABASE futureporn_db \
WITH \
OWNER = postgres \
ENCODING = 'UTF8' \
LOCALE_PROVIDER = 'libc' \
CONNECTION LIMIT = -1 \
IS_TEMPLATE = False;"
## Create the futureporn realtime database (for NOTIFY/AWAIT pubsub)
kubectl -n futureporn exec postgres -- psql -U postgres --command "\
CREATE DATABASE futureporn_realtime \
WITH \
OWNER = postgres \
ENCODING = 'UTF8' \
LOCALE_PROVIDER = 'libc' \
CONNECTION LIMIT = -1 \
IS_TEMPLATE = False;"
## create futureporn user
kubectl -n futureporn exec postgres -- psql -U postgres --command "\
CREATE ROLE futureporn \
WITH \
LOGIN \
NOSUPERUSER \
NOCREATEDB \
NOCREATEROLE \
INHERIT \
NOREPLICATION \
NOBYPASSRLS \
CONNECTION LIMIT -1 \
PASSWORD '$POSTGRES_REALTIME_PASSWORD';"
## grant futureporn user all privs
kubectl -n futureporn exec postgres -- psql -U postgres --command "\
GRANT ALL PRIVILEGES ON DATABASE futureporn_realtime TO futureporn;"
## import schema
## I have a file, schema.psql that I want to import. How do I do that?
# kubectl -n futureporn exec postgres -- psql -U postgres --command "\ ;"
kubectl -n futureporn exec postgres -- psql -U postgres -f - < "${bindir}/postgres-2024-05-09-futureporn_db-schema-only.psql"

View File

@ -0,0 +1,3 @@
#!/bin/bash
temporal operator namespace create futureporn

View File

@ -23,17 +23,49 @@ dotenv(fn='.env')
# )
# args=['--default_config_file=%s' % os.getenv('TILT_NGROK_DEFAULT_CONFIG_FILE')]
load('ext://helm_remote', 'helm_remote')
# helm_remote(
# 'redis',
# repo_name='redis',
# repo_url='https://charts.bitnami.com/bitnami',
# namespace='futureporn',
# version='19.5.0',
# values=['./charts/nitter/redis.values.yaml']
# version='19.5.2',
# set=[
# 'auth.password=%s' % os.getenv('TRIGGER_REDIS_PASSWORD'),
# 'architecture=standalone',
# ],
# )
helm_remote(
'temporal',
repo_name='temporal',
repo_url='https://charts.lemontech.engineering',
namespace='futureporn',
version='0.37.0',
set=[
'admintools.image.tag=1.24.1-tctl-1.18.1-cli-0.12.0',
'web.image.tag=2.27.2',
'prometheus.enabled=false',
'grafana.enabled=false',
'elasticsearch.enabled=false',
'web.config.auth.enabled=true',
'cassandra.enabled=false',
'server.config.persistence.default.driver=sql',
'server.config.persistence.default.sql.driver=postgres12',
'server.config.persistence.default.sql.host=%s' % os.getenv('POSTGRES_HOST'),
'server.config.persistence.default.sql.port=5432',
'server.config.persistence.default.sql.user=%s' % os.getenv('POSTGRES_USER'),
'server.config.persistence.default.sql.password=%s' % os.getenv('POSTGRES_PASSWORD'),
'server.config.persistence.visibility.driver=sql',
'server.config.persistence.visibility.sql.driver=postgres12',
'server.config.persistence.visibility.sql.host=%s' % os.getenv('POSTGRES_HOST'),
'server.config.persistence.visibility.sql.port=5432',
'server.config.persistence.visibility.sql.user=%s' % os.getenv('POSTGRES_USER'),
'server.config.persistence.visibility.sql.password=%s' % os.getenv('POSTGRES_PASSWORD'),
]
)
# helm_remote(
# 'nitter',
# repo_name='truecharts',
@ -70,6 +102,31 @@ k8s_yaml(helm(
'./charts/fp',
values=['./charts/fp/values-dev.yaml'],
))
# k8s_yaml(helm(
# './charts/trigger',
# set=[
# 'trigger.name=trigger',
# 'trigger.replicaCount=2',
# 'trigger.image.tag=self-host-rc.2',
# 'trigger.image.pullPolicy=IfNotPresent',
# 'trigger.env.ENCRYPTION_KEY=%s' % os.getenv('TRIGGER_ENCRYPTION_KEY'),
# 'trigger.env.MAGIC_LINK_SECRET=%s' % os.getenv('TRIGGER_MAGIC_LINK_SECRET'),
# 'trigger.env.DATABASE_URL=%s' % os.getenv('TRIGGER_DATABASE_URL'),
# 'trigger.env.LOGIN_ORIGIN=%s' % os.getenv('TRIGGER_LOGIN_ORIGIN'),
# 'trigger.env.APP_ORIGIN=%s' % os.getenv('TRIGGER_APP_ORIGIN'),
# 'trigger.env.PORT=%s' % os.getenv('TRIGGER_PORT'),
# 'trigger.env.REMIX_APP_PORT=%s' % os.getenv('TRIGGER_REMIX_APP_PORT'),
# 'trigger.env.REDIS_HOST=redis-master.futureporn.svc.cluster.local',
# 'trigger.env.REDIS_PORT=6379',
# 'trigger.ingress.nginx.enabled=false',
# 'trigger.ingress.enabled=false',
# 'postgres.enabled=false'
# ]
# ))
# k8s_resource(
# workload='trigger',
# port_forwards=['3030'],
# )
@ -99,23 +156,29 @@ docker_build(
# )
load('ext://uibutton', 'cmd_button')
# cmd_button('postgres:seed',
# argv=['sh', './scripts/postgres-seed.sh'],
# resource='postgres',
# icon_name='dataset',
# text='seed db with schema',
# )
cmd_button('postgres:create',
argv=['sh', './scripts/postgres-create.sh'],
resource='postgres',
icon_name='dataset',
text='create (empty) databases',
)
cmd_button('postgres:restore',
argv=['sh', './scripts/postgres-restore.sh'],
resource='postgres',
icon_name='cloud_download',
icon_name='upload',
text='restore db from backup',
)
cmd_button('postgres:drop',
argv=['sh', './scripts/postgres-drop.sh'],
resource='postgres',
icon_name='delete',
text='delete the database'
text='DROP futureporn_db'
)
cmd_button('postgres:drop_temporal',
argv=['sh', './scripts/postgres-drop-temporal.sh'],
resource='postgres',
icon_name='delete',
text='DROP temporal'
)
cmd_button('postgres:backup',
argv=['sh', './scripts/postgres-backup.sh'],
@ -123,6 +186,12 @@ cmd_button('postgres:backup',
icon_name='download',
text='backup the database'
)
cmd_button('temporal-web:namespace',
argv=['sh', './scripts/temporal-namespaces.sh'],
resource='temporal-web',
icon_name='badge',
text='create futureporn namespace',
)
## Uncomment the following for fp/next in dev mode
@ -143,14 +212,30 @@ docker_build(
docker_build(
'fp/scout',
'fp/scout-manager',
'.',
only=['./pnpm-lock.yaml', './package.json', './packages/scout', './packages/pg-pubsub', './packages/common'],
only=['./pnpm-lock.yaml', './package.json', './packages/scout', './packages/next'],
dockerfile='d.scout.dockerfile',
target='scout',
target='manager',
live_update=[
sync('./packages/scout', '/app')
]
sync('./packages/scout', '/app'),
run('cd /app && pnpm i', trigger=['./packages/scout/package.json', './packages/scout/pnpm-lock.yaml']),
],
entrypoint='pnpm nodemon --ext js,ts,json,yaml --exec node --no-warnings=ExperimentalWarning --loader ts-node/esm ./src/index.ts'
# entrypoint='pnpm tsx watch ./src/index.ts'
)
docker_build(
'fp/scout-worker',
'.',
only=['./pnpm-lock.yaml', './package.json', './packages/scout', './packages/next'],
dockerfile='d.scout.dockerfile',
target='worker',
live_update=[
sync('./packages/scout', '/app'),
run('cd /app && pnpm i', trigger=['./packages/scout/package.json', './packages/scout/pnpm-lock.yaml']),
],
entrypoint='pnpm nodemon --ext js,ts,json,yaml --exec node --no-warnings=ExperimentalWarning --loader ts-node/esm ./src/temporal/worker.ts'
)
# k8s_resource(
# workload='kubernetes-ingress-controller-manager',
@ -188,7 +273,8 @@ k8s_resource(
port_forwards=['3000'],
links=[
link('https://next.piko.sbtp.xyz'),
]
],
resource_deps=['strapi', 'postgres']
)
k8s_resource(
workload='strapi',
@ -196,7 +282,8 @@ k8s_resource(
links=[
link('http://localhost:1339/admin'),
link('https://strapi.piko.sbtp.xyz'),
]
],
resource_deps=['postgres']
)
# k8s_resource(
# workload='strapi-app',
@ -208,12 +295,34 @@ k8s_resource(
port_forwards=['5432']
)
k8s_resource(
workload='scout-worker',
resource_deps=['postgres', 'strapi', 'temporal-frontend', 'scout-manager']
)
k8s_resource(
workload='scout-manager',
resource_deps=['postgres', 'strapi', 'temporal-frontend']
)
# k8s_resource(
# workload='pgadmin',
# port_forwards=['5050']
# port_forwards=['5050'],
# resource_deps=['postgres']
# )
# k8s_resource(
# workload='nitter',
# port_forwards=['6060:10606'],
# )
# )
k8s_resource(workload='temporal-admintools', labels='temporal', resource_deps=['postgres'])
k8s_resource(workload='temporal-frontend', labels='temporal', port_forwards=['7233'], resource_deps=['postgres'])
k8s_resource(workload='temporal-history', labels='temporal', resource_deps=['postgres'])
k8s_resource(workload='temporal-worker', labels='temporal', resource_deps=['postgres'])
k8s_resource(workload='temporal-web', labels='temporal', port_forwards=['8080'], resource_deps=['postgres'])
k8s_resource(workload='temporal-schema-setup', labels='temporal', resource_deps=['postgres'])
k8s_resource(workload='temporal-schema-update', labels='temporal', resource_deps=['postgres'])
k8s_resource(workload='temporal-matching', labels='temporal', resource_deps=['postgres'])