cleanup
This commit is contained in:
parent
2c8e41be61
commit
ffe9ca2bb5
|
@ -1,3 +0,0 @@
|
|||
https://dokku.com/docs/advanced-usage/deployment-tasks/?h=monorepo#changing-the-appjson-location
|
||||
|
||||
https://dokku.com/docs/deployment/builders/dockerfiles/
|
|
@ -1,8 +0,0 @@
|
|||
{
|
||||
"scripts": {
|
||||
"dokku": {
|
||||
"predeploy": "echo hello-world-predeploy",
|
||||
"postdeploy": "echo hello-world-postdeploy"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -4,10 +4,3 @@ Source Code for https://futureporn.net
|
|||
|
||||
See ./ARCHITECTURE.md for overview
|
||||
|
||||
|
||||
## Jun update todo list
|
||||
|
||||
* [x] external-dns gameplan
|
||||
* [ ] traefik well understood
|
||||
* [ ] staging test with *.futureporn.net domains
|
||||
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
This chart was originally created by Kompose.
|
||||
# Futureporn helm chart
|
||||
|
||||
Then I realized I don't understand kubernetes.
|
||||
This is the chart with templates that define Futureporn kubernetes cluster
|
||||
|
||||
It was too complex.
|
||||
|
||||
I needed to start over, understand each piece before moving on.
|
||||
|
||||
so we're starting small, incrementally migrating services to the cluster.
|
||||
https://helm.sh/docs/topics/charts/
|
|
@ -108,6 +108,9 @@ spec:
|
|||
routes:
|
||||
- match: Host(`echo.fp.sbtp.xyz`)
|
||||
kind: Rule
|
||||
middlewares:
|
||||
- name: redirect
|
||||
namespace: futureporn
|
||||
services:
|
||||
- name: echo
|
||||
namespace: futureporn
|
||||
|
@ -127,9 +130,6 @@ spec:
|
|||
routes:
|
||||
- match: Host(`echo.fp.sbtp.xyz`)
|
||||
kind: Rule
|
||||
middlewares:
|
||||
- name: redirect
|
||||
namespace: futureporn
|
||||
services:
|
||||
- name: echo
|
||||
namespace: futureporn
|
||||
|
|
|
@ -33,7 +33,7 @@ spec:
|
|||
- name: CDN_BUCKET_URL
|
||||
value: "{{ .Values.scout.cdnBucketUrl }}"
|
||||
- name: STRAPI_URL
|
||||
value: https://strapi.piko.sbtp.xyz
|
||||
value: https://strapi.fp.sbtp.xyz
|
||||
- name: S3_BUCKET_APPLICATION_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
|
@ -156,7 +156,7 @@ spec:
|
|||
# - name: CDN_BUCKET_URL
|
||||
# value: "{{ .Values.scout.cdnBucketUrl }}"
|
||||
# - name: STRAPI_URL
|
||||
# value: https://strapi.piko.sbtp.xyz
|
||||
# value: https://strapi.fp.sbtp.xyz
|
||||
# - name: SCOUT_NITTER_ACCESS_KEY
|
||||
# valueFrom:
|
||||
# secretKeyRef:
|
||||
|
|
|
@ -116,13 +116,13 @@ spec:
|
|||
secretKeyRef:
|
||||
name: strapi
|
||||
key: sendgridApiKey
|
||||
- name: STRAPI_URL
|
||||
value: "{{ .Values.strapi.url }}"
|
||||
- name: TRANSFER_TOKEN_SALT
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: strapi
|
||||
key: transferTokenSalt
|
||||
- name: STRAPI_URL
|
||||
value: "{{ .Values.strapi.url }}"
|
||||
- name: PORT
|
||||
value: "{{ .Values.strapi.port }}"
|
||||
resources:
|
||||
|
|
|
@ -18,6 +18,7 @@ ENV NEXT_PUBLIC_SITE_URL ${NEXT_PUBLIC_SITE_URL}
|
|||
ENV NEXT_PUBLIC_STRAPI_URL ${NEXT_PUBLIC_STRAPI_URL}
|
||||
ENV NEXT_PUBLIC_UPPY_COMPANION_URL ${NEXT_PUBLIC_UPPY_COMPANION_URL}
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
ENV NODE_EXTRA_CA_CERTS "/app/letsencrypt-stg-root-x1.pem"
|
||||
COPY pnpm-lock.yaml ./
|
||||
RUN pnpm fetch
|
||||
COPY ./packages/next /app
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
FROM node:20-alpine
|
||||
WORKDIR /app
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
RUN corepack enable
|
||||
RUN apk update
|
||||
|
||||
ENV NODE_ENV=production
|
||||
COPY pnpm-lock.yaml ./
|
||||
RUN pnpm fetch
|
||||
COPY ./packages/realtime /app
|
||||
|
||||
ENTRYPOINT ["pnpm"]
|
||||
CMD ["run", "start"]
|
|
@ -1,19 +0,0 @@
|
|||
FROM node:18-alpine3.18
|
||||
# Installing libvips-dev for sharp Compatibility
|
||||
RUN apk update && apk add --no-cache build-base gcc autoconf automake zlib-dev libpng-dev nasm bash vips-dev git
|
||||
ARG NODE_ENV=development
|
||||
ENV NODE_ENV=${NODE_ENV}
|
||||
|
||||
WORKDIR /opt/
|
||||
COPY ./packages/strapi-app/package.json ./packages/strapi-app/yarn.lock ./
|
||||
RUN yarn global add node-gyp
|
||||
RUN yarn config set network-timeout 600000 -g && yarn install
|
||||
ENV PATH /opt/node_modules/.bin:$PATH
|
||||
|
||||
WORKDIR /opt/app
|
||||
COPY ./packages/strapi-app/ .
|
||||
RUN chown -R node:node /opt/app
|
||||
USER node
|
||||
RUN ["yarn", "build"]
|
||||
EXPOSE 1338
|
||||
CMD ["yarn", "develop", "--debug"]
|
|
@ -0,0 +1,22 @@
|
|||
FROM node:18
|
||||
# Installing libvips-dev for sharp Compatibility
|
||||
# RUN apk update && apk add --no-cache build-base gcc autoconf automake zlib-dev libpng-dev nasm bash vips-dev git
|
||||
RUN corepack enable
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
ARG NODE_ENV=development
|
||||
ENV NODE_ENV=${NODE_ENV}
|
||||
|
||||
WORKDIR /opt/
|
||||
COPY ./packages/strapi/package.json ./packages/strapi/pnpm-lock.yaml ./
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install -g node-gyp
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
|
||||
ENV PATH /opt/node_modules/.bin:$PATH
|
||||
|
||||
WORKDIR /opt/app
|
||||
COPY ./packages/strapi/. .
|
||||
RUN chown -R node:node /opt/app
|
||||
USER node
|
||||
RUN ["pnpm", "run", "build"]
|
||||
EXPOSE 1339
|
||||
CMD ["pnpm", "run", "develop"]
|
|
@ -7,4 +7,4 @@ COPY ./packages/strapi/package.json ./packages/strapi/pnpm-lock.yaml .
|
|||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
|
||||
COPY ./packages/strapi/ .
|
||||
RUN ["pnpm", "run", "build"]
|
||||
CMD ["pnpm", "run", "dev"]
|
||||
CMD ["pnpm", "run", "develop"]
|
||||
|
|
|
@ -1,24 +0,0 @@
|
|||
FROM node:20-alpine as base
|
||||
WORKDIR /app
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
RUN corepack enable
|
||||
RUN apk update && apk add --no-cache build-base gcc autoconf automake zlib-dev libpng-dev vips-dev libc6-compat git nasm bash gcompat
|
||||
|
||||
FROM base AS install
|
||||
COPY ./packages/strapi/pnpm-lock.yaml ./packages/strapi/package.json ./
|
||||
RUN pnpm install --prod --shamefully-hoist && pnpm run build
|
||||
COPY ./packages/strapi .
|
||||
RUN chown -R node:node /app
|
||||
USER node
|
||||
|
||||
|
||||
FROM install AS dev
|
||||
ENV NODE_ENV=development
|
||||
ENTRYPOINT ["pnpm"]
|
||||
CMD ["run", "dev"]
|
||||
|
||||
FROM install AS release
|
||||
ENV NODE_ENV=production
|
||||
ENTRYPOINT ["pnpm"]
|
||||
CMD ["run", "start"]
|
|
@ -1,20 +0,0 @@
|
|||
FROM node:18-alpine3.18
|
||||
RUN echo "do a rebuild, yuou fucking shit!"
|
||||
# Installing libvips-dev for sharp Compatibility
|
||||
RUN apk update && apk add --no-cache build-base gcc autoconf automake zlib-dev libpng-dev nasm bash vips-dev git
|
||||
ARG NODE_ENV=development
|
||||
ENV NODE_ENV=${NODE_ENV}
|
||||
|
||||
WORKDIR /opt/
|
||||
COPY package.json yarn.lock ./
|
||||
RUN yarn global add node-gyp
|
||||
RUN yarn config set network-timeout 600000 -g && yarn install
|
||||
ENV PATH /opt/node_modules/.bin:$PATH
|
||||
|
||||
WORKDIR /opt/app
|
||||
COPY . .
|
||||
RUN chown -R node:node /opt/app
|
||||
USER node
|
||||
RUN ["yarn", "build"]
|
||||
EXPOSE 1339
|
||||
CMD ["yarn", "start"]
|
|
@ -0,0 +1,3 @@
|
|||
# Futureporn flux
|
||||
|
||||
Gitops https://fluxcd.io/flux/get-started/
|
|
@ -0,0 +1,5 @@
|
|||
# Futureporn node packages
|
||||
|
||||
Each folder here is an individual node package
|
||||
|
||||
See https://pnpm.io/workspaces
|
|
@ -14,7 +14,7 @@ export default async function ArchiveProgress ({ vtuber }: IArchiveProgressProps
|
|||
const issueStreams = await getStreamCountForVtuber(vtuber.id, ['issue'])
|
||||
const totalStreams = await getStreamCountForVtuber(vtuber.id)
|
||||
const eligibleStreams = goodStreams+issueStreams
|
||||
const completedPercentage = (eligibleStreams / totalStreams) * 100
|
||||
const completedPercentage = Math.floor((eligibleStreams / totalStreams) * 100)
|
||||
return (
|
||||
<div>
|
||||
{/* <p>
|
||||
|
|
|
@ -7,7 +7,9 @@ import { faPatreon } from '@fortawesome/free-brands-svg-icons';
|
|||
import { useLocalStorageValue } from '@react-hookz/web';
|
||||
import { faRightFromBracket } from '@fortawesome/free-solid-svg-icons';
|
||||
import Skeleton from 'react-loading-skeleton';
|
||||
import { strapiUrl } from '@/lib/constants';
|
||||
import { strapiUrl } from '@/lib/constants';
|
||||
// import NextAuth from 'next-auth'; // this is (pipedream) wishlist
|
||||
// import Providers from 'next-auth/providers';
|
||||
|
||||
export interface IJWT {
|
||||
jwt: string;
|
||||
|
|
|
@ -10,11 +10,15 @@ interface PatronsListProps {
|
|||
export default async function PatronsList({ displayStyle }: PatronsListProps) {
|
||||
const patrons = await getPatrons()
|
||||
|
||||
console.log(`patrons are as follows`)
|
||||
console.log(patrons)
|
||||
|
||||
if (!patrons) return (
|
||||
<SkeletonTheme baseColor="#000" highlightColor="#000">
|
||||
<Skeleton count={3} enableAnimation={false} />
|
||||
</SkeletonTheme>
|
||||
);
|
||||
|
||||
if (displayStyle === 'box') {
|
||||
return (
|
||||
<div className="columns is-multiline">
|
||||
|
|
|
@ -93,7 +93,7 @@ export default function StreamsTable() {
|
|||
accessorFn: d => [
|
||||
(d.attributes.isChaturbateStream && 'CB'),
|
||||
(d.attributes.isFanslyStream && 'Fansly')
|
||||
].filter(Boolean).join(' ') || '???'
|
||||
].filter(Boolean).join(', ') || '???'
|
||||
},
|
||||
{
|
||||
header: 'Status',
|
||||
|
|
|
@ -277,7 +277,7 @@ export async function getAllStreamsForVtuber(vtuberId: number, archiveStatuses =
|
|||
},
|
||||
});
|
||||
|
||||
console.log(`strapiUrl=${strapiUrl}`)
|
||||
// console.log(`strapiUrl=${strapiUrl}`)
|
||||
const response = await fetch(`${strapiUrl}/api/streams?${query}`, fetchStreamsOptions)
|
||||
|
||||
if (response.status !== 200) {
|
||||
|
@ -371,13 +371,13 @@ export async function getStreamCountForVtuber(vtuberId: number, archiveStatuses
|
|||
)
|
||||
const res = await fetch(`${strapiUrl}/api/streams?${query}`, fetchStreamsOptions)
|
||||
const data = await res.json()
|
||||
console.log(`getStreamCountForVtuber with archiveStatuses=${archiveStatuses}`)
|
||||
console.log(JSON.stringify(data, null, 2))
|
||||
// console.log(`getStreamCountForVtuber with archiveStatuses=${archiveStatuses}`)
|
||||
// console.log(JSON.stringify(data, null, 2))
|
||||
return data.meta.pagination.total
|
||||
}
|
||||
|
||||
export async function getStreamsForVtuber(vtuberId: number, page: number = 1, pageSize: number = 25, sortDesc = true): Promise<IStreamsResponse> {
|
||||
console.log(`getStreamsForVtuber() with strapiUrl=${strapiUrl}`)
|
||||
// console.log(`getStreamsForVtuber() with strapiUrl=${strapiUrl}`)
|
||||
const query = qs.stringify(
|
||||
{
|
||||
populate: {
|
||||
|
@ -405,7 +405,7 @@ export async function getStreamsForVtuber(vtuberId: number, page: number = 1, pa
|
|||
)
|
||||
const res = await fetch(`${strapiUrl}/api/streams?${query}`, fetchStreamsOptions)
|
||||
const data = await res.json()
|
||||
console.log(data)
|
||||
// console.log(data)
|
||||
return data
|
||||
}
|
||||
|
||||
|
|
|
@ -11,8 +11,16 @@ import { notFound } from "next/navigation";
|
|||
|
||||
export default async function Page() {
|
||||
const vods = await getVods(1, 9, true);
|
||||
console.log('vods as follows')
|
||||
console.log(JSON.stringify(vods, null, 2))
|
||||
|
||||
|
||||
const vtubers = await getVtubers();
|
||||
if (!vtubers) notFound();
|
||||
console.log(`vtubers as follows`)
|
||||
console.log(JSON.stringify(vtubers, null, 2))
|
||||
|
||||
|
||||
|
||||
// return (
|
||||
// <pre>
|
||||
|
|
|
@ -16,47 +16,47 @@
|
|||
"@fortawesome/free-solid-svg-icons": "^6.5.2",
|
||||
"@fortawesome/react-fontawesome": "^0.2.2",
|
||||
"@hookform/error-message": "^2.0.1",
|
||||
"@hookform/resolvers": "^3.6.0",
|
||||
"@hookform/resolvers": "^3.7.0",
|
||||
"@mux/blurhash": "^0.1.2",
|
||||
"@mux/mux-player": "^2.7.0",
|
||||
"@mux/mux-player-react": "^2.7.0",
|
||||
"@paralleldrive/cuid2": "^2.2.2",
|
||||
"@react-hookz/web": "^24.0.4",
|
||||
"@tanstack/react-query": "^5.40.1",
|
||||
"@tanstack/react-table": "^8.17.3",
|
||||
"@types/lodash": "^4.17.4",
|
||||
"@tanstack/react-query": "^5.49.2",
|
||||
"@tanstack/react-table": "^8.19.2",
|
||||
"@types/lodash": "^4.17.6",
|
||||
"@types/qs": "^6.9.15",
|
||||
"@types/react": "^18.3.3",
|
||||
"@types/react-dom": "^18.3.0",
|
||||
"@uppy/aws-s3": "^3.6.2",
|
||||
"@uppy/aws-s3-multipart": "^3.12.0",
|
||||
"@uppy/core": "^3.12.0",
|
||||
"@uppy/dashboard": "^3.8.3",
|
||||
"@uppy/core": "^3.13.0",
|
||||
"@uppy/dashboard": "^3.9.1",
|
||||
"@uppy/drag-drop": "^3.1.0",
|
||||
"@uppy/file-input": "^3.1.2",
|
||||
"@uppy/progress-bar": "^3.1.1",
|
||||
"@uppy/react": "^3.3.1",
|
||||
"@uppy/remote-sources": "^1.2.0",
|
||||
"@uppy/react": "^3.4.0",
|
||||
"@uppy/remote-sources": "^1.3.0",
|
||||
"bulma": "^1.0.1",
|
||||
"date-fns": "^2.30.0",
|
||||
"date-fns-tz": "^2.0.1",
|
||||
"dayjs": "^1.11.11",
|
||||
"feed": "^4.2.2",
|
||||
"gray-matter": "^4.0.3",
|
||||
"hls.js": "^1.5.11",
|
||||
"hls.js": "^1.5.12",
|
||||
"lodash": "^4.17.21",
|
||||
"lunarphase-js": "^2.0.3",
|
||||
"multiformats": "^13.1.1",
|
||||
"multiformats": "^13.1.3",
|
||||
"next": "14.0.4",
|
||||
"next-goatcounter": "^1.0.5",
|
||||
"nextjs-toploader": "^1.6.12",
|
||||
"plyr": "^3.7.8",
|
||||
"prism-react-renderer": "^2.3.1",
|
||||
"qs": "^6.12.1",
|
||||
"qs": "^6.12.2",
|
||||
"react": "^18.3.1",
|
||||
"react-data-table-component": "^7.6.2",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-hook-form": "^7.51.5",
|
||||
"react-hook-form": "^7.52.1",
|
||||
"react-loading-skeleton": "^3.4.0",
|
||||
"react-toastify": "^9.1.3",
|
||||
"sharp": "^0.33.4",
|
||||
|
@ -65,7 +65,7 @@
|
|||
"yup": "^1.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.14.2",
|
||||
"@types/node": "^20.14.9",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-next": "14.0.4",
|
||||
"tsc": "^2.0.4",
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -55,6 +55,6 @@ async function handleMessage({ email, msg }: { email: Email, msg: FetchMessageOb
|
|||
|
||||
(async () => {
|
||||
const email = new Email()
|
||||
email.on('message', (msg: FetchMessageObject) => handleMessage({ email, msg }))
|
||||
email.once('message', (msg: FetchMessageObject) => handleMessage({ email, msg }))
|
||||
await email.connect()
|
||||
})()
|
|
@ -0,0 +1,118 @@
|
|||
.env*
|
||||
tunnel.conf
|
||||
|
||||
############################
|
||||
# OS X
|
||||
############################
|
||||
|
||||
.DS_Store
|
||||
.AppleDouble
|
||||
.LSOverride
|
||||
Icon
|
||||
.Spotlight-V100
|
||||
.Trashes
|
||||
._*
|
||||
|
||||
|
||||
############################
|
||||
# Linux
|
||||
############################
|
||||
|
||||
*~
|
||||
|
||||
|
||||
############################
|
||||
# Windows
|
||||
############################
|
||||
|
||||
Thumbs.db
|
||||
ehthumbs.db
|
||||
Desktop.ini
|
||||
$RECYCLE.BIN/
|
||||
*.cab
|
||||
*.msi
|
||||
*.msm
|
||||
*.msp
|
||||
|
||||
|
||||
############################
|
||||
# Packages
|
||||
############################
|
||||
|
||||
*.7z
|
||||
*.csv
|
||||
*.dat
|
||||
*.dmg
|
||||
*.gz
|
||||
*.iso
|
||||
*.jar
|
||||
*.rar
|
||||
*.tar
|
||||
*.zip
|
||||
*.com
|
||||
*.class
|
||||
*.dll
|
||||
*.exe
|
||||
*.o
|
||||
*.seed
|
||||
*.so
|
||||
*.swo
|
||||
*.swp
|
||||
*.swn
|
||||
*.swm
|
||||
*.out
|
||||
*.pid
|
||||
|
||||
|
||||
############################
|
||||
# Logs and databases
|
||||
############################
|
||||
|
||||
.tmp
|
||||
*.log
|
||||
*.sql
|
||||
*.sqlite
|
||||
*.sqlite3
|
||||
|
||||
|
||||
############################
|
||||
# Misc.
|
||||
############################
|
||||
|
||||
*#
|
||||
ssl
|
||||
.idea
|
||||
nbproject
|
||||
public/uploads/*
|
||||
!public/uploads/.gitkeep
|
||||
|
||||
############################
|
||||
# Node.js
|
||||
############################
|
||||
|
||||
lib-cov
|
||||
lcov.info
|
||||
pids
|
||||
logs
|
||||
results
|
||||
node_modules
|
||||
.node_history
|
||||
|
||||
############################
|
||||
# Tests
|
||||
############################
|
||||
|
||||
testApp
|
||||
coverage
|
||||
|
||||
############################
|
||||
# Strapi
|
||||
############################
|
||||
|
||||
.env
|
||||
license.txt
|
||||
exports
|
||||
*.cache
|
||||
dist
|
||||
build
|
||||
.strapi-updater.json
|
|
@ -0,0 +1,14 @@
|
|||
/**
|
||||
* This file was automatically generated by Strapi.
|
||||
* Any modifications made will be discarded.
|
||||
*/
|
||||
import i18N from "@strapi/plugin-i18n/strapi-admin";
|
||||
import usersPermissions from "@strapi/plugin-users-permissions/strapi-admin";
|
||||
import { renderAdmin } from "@strapi/strapi/admin";
|
||||
|
||||
renderAdmin(document.getElementById("strapi"), {
|
||||
plugins: {
|
||||
i18n: i18N,
|
||||
"users-permissions": usersPermissions,
|
||||
},
|
||||
});
|
|
@ -0,0 +1,62 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<!--
|
||||
This file was automatically generated by Strapi.
|
||||
Any modifications made will be discarded.
|
||||
-->
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta
|
||||
name="viewport"
|
||||
content="width=device-width, initial-scale=1, viewport-fit=cover"
|
||||
/>
|
||||
<meta name="robots" content="noindex" />
|
||||
<meta name="referrer" content="same-origin" />
|
||||
<title>Strapi Admin</title>
|
||||
<style>
|
||||
html,
|
||||
body,
|
||||
#strapi {
|
||||
height: 100%;
|
||||
}
|
||||
body {
|
||||
margin: 0;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div id="strapi"></div>
|
||||
<noscript
|
||||
><div class="strapi--root">
|
||||
<div class="strapi--no-js">
|
||||
<style type="text/css">
|
||||
.strapi--root {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
right: 0;
|
||||
left: 0;
|
||||
bottom: 0;
|
||||
background: #fff;
|
||||
}
|
||||
|
||||
.strapi--no-js {
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
transform: translate(-50%, -50%);
|
||||
text-align: center;
|
||||
font-family: helvetica, arial, sans-serif;
|
||||
}
|
||||
</style>
|
||||
<h1>JavaScript disabled</h1>
|
||||
<p>
|
||||
Please
|
||||
<a href="https://www.enable-javascript.com/">enable JavaScript</a>
|
||||
in your browser and reload the page to proceed.
|
||||
</p>
|
||||
</div>
|
||||
</div></noscript
|
||||
>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,13 @@
|
|||
## dev notes
|
||||
|
||||
### patreon campaign benefit ids
|
||||
|
||||
* ironmouse "Thank you" (for testing): 4760169
|
||||
* cj_clippy "Full library access" (for production): 9380584
|
||||
* cj_clippy "Your URL displayed on Futureporn.net": 10663202
|
||||
|
||||
### Content-Type Builder (Docker caveat)
|
||||
|
||||
Don't use the web UI to create or update Content-Types! The changes will be lost. This is a side-effect of our hacked together solution for Strapi with pnpm in docker.
|
||||
|
||||
Instead, content-type schemas must be hand-edited in ./src/api/(...). For the changes to take effect, trigger a strapi resource update in Tilt.
|
|
@ -0,0 +1,13 @@
|
|||
module.exports = ({ env }) => ({
|
||||
auth: {
|
||||
secret: env('ADMIN_JWT_SECRET'),
|
||||
},
|
||||
apiToken: {
|
||||
salt: env('API_TOKEN_SALT'),
|
||||
},
|
||||
transfer: {
|
||||
token: {
|
||||
salt: env('TRANSFER_TOKEN_SALT'),
|
||||
},
|
||||
},
|
||||
});
|
|
@ -0,0 +1,7 @@
|
|||
module.exports = {
|
||||
rest: {
|
||||
defaultLimit: 25,
|
||||
maxLimit: 100,
|
||||
withCount: true,
|
||||
},
|
||||
};
|
|
@ -0,0 +1,49 @@
|
|||
const path = require('path');
|
||||
|
||||
module.exports = ({ env }) => {
|
||||
const client = env('DATABASE_CLIENT', 'postgres');
|
||||
|
||||
const connections = {
|
||||
postgres: {
|
||||
connection: {
|
||||
connectionString: env('DATABASE_URL'),
|
||||
host: env('DATABASE_HOST', 'localhost'),
|
||||
port: env.int('DATABASE_PORT', 5432),
|
||||
database: env('DATABASE_NAME', 'strapi'),
|
||||
user: env('DATABASE_USERNAME', 'strapi'),
|
||||
password: env('DATABASE_PASSWORD', 'strapi'),
|
||||
ssl: env.bool('DATABASE_SSL', false) && {
|
||||
key: env('DATABASE_SSL_KEY', undefined),
|
||||
cert: env('DATABASE_SSL_CERT', undefined),
|
||||
ca: env('DATABASE_SSL_CA', undefined),
|
||||
capath: env('DATABASE_SSL_CAPATH', undefined),
|
||||
cipher: env('DATABASE_SSL_CIPHER', undefined),
|
||||
rejectUnauthorized: env.bool(
|
||||
'DATABASE_SSL_REJECT_UNAUTHORIZED',
|
||||
true
|
||||
),
|
||||
},
|
||||
schema: env('DATABASE_SCHEMA', 'public'),
|
||||
},
|
||||
pool: { min: env.int('DATABASE_POOL_MIN', 2), max: env.int('DATABASE_POOL_MAX', 10) },
|
||||
},
|
||||
sqlite: {
|
||||
connection: {
|
||||
filename: path.join(
|
||||
__dirname,
|
||||
'..',
|
||||
env('DATABASE_FILENAME', 'data.db')
|
||||
),
|
||||
},
|
||||
useNullAsDefault: true,
|
||||
},
|
||||
};
|
||||
|
||||
return {
|
||||
connection: {
|
||||
client,
|
||||
...connections[client],
|
||||
acquireConnectionTimeout: env.int('DATABASE_CONNECTION_TIMEOUT', 60000),
|
||||
},
|
||||
};
|
||||
};
|
|
@ -0,0 +1,26 @@
|
|||
module.exports = [
|
||||
'strapi::logger',
|
||||
'strapi::errors',
|
||||
{
|
||||
name: 'strapi::security',
|
||||
config: {
|
||||
contentSecurityPolicy: {
|
||||
useDefaults: true,
|
||||
directives: {
|
||||
'connect-src': ["'self'", 'https:'],
|
||||
'img-src': ["'self'", 'data:', 'blob:', 'dl.airtable.com', 'res.cloudinary.com'],
|
||||
'media-src': ["'self'", 'data:', 'blob:', 'dl.airtable.com', 'res.cloudinary.com'],
|
||||
upgradeInsecureRequests: null,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
'strapi::cors',
|
||||
'strapi::poweredBy',
|
||||
'strapi::query',
|
||||
'strapi::body',
|
||||
'strapi::session',
|
||||
'strapi::favicon',
|
||||
'strapi::public',
|
||||
];
|
|
@ -0,0 +1,75 @@
|
|||
module.exports = ({
|
||||
env
|
||||
}) => ({
|
||||
'fuzzy-search': {
|
||||
enabled: true,
|
||||
config: {
|
||||
contentTypes: [{
|
||||
uid: 'api::tag.tag',
|
||||
modelName: 'tag',
|
||||
transliterate: false,
|
||||
queryConstraints: {
|
||||
where: {
|
||||
'$and': [
|
||||
{
|
||||
publishedAt: {
|
||||
'$notNull': true
|
||||
}
|
||||
},
|
||||
]
|
||||
}
|
||||
},
|
||||
fuzzysortOptions: {
|
||||
characterLimit: 32,
|
||||
threshold: -600,
|
||||
limit: 10,
|
||||
keys: [{
|
||||
name: 'name',
|
||||
weight: 100
|
||||
}]
|
||||
}
|
||||
}]
|
||||
}
|
||||
},
|
||||
upload: {
|
||||
config: {
|
||||
provider: 'cloudinary',
|
||||
providerOptions: {
|
||||
cloud_name: env('CLOUDINARY_NAME'),
|
||||
api_key: env('CLOUDINARY_KEY'),
|
||||
api_secret: env('CLOUDINARY_SECRET'),
|
||||
},
|
||||
actionOptions: {
|
||||
upload: {},
|
||||
uploadStream: {},
|
||||
delete: {},
|
||||
},
|
||||
}
|
||||
},
|
||||
email: {
|
||||
config: {
|
||||
provider: 'sendgrid',
|
||||
providerOptions: {
|
||||
apiKey: env('SENDGRID_API_KEY'),
|
||||
},
|
||||
settings: {
|
||||
defaultFrom: 'welcome@futureporn.net',
|
||||
defaultReplyTo: 'cj@futureporn.net',
|
||||
testAddress: 'grimtech@fastmail.com',
|
||||
},
|
||||
},
|
||||
},
|
||||
"users-permissions": {
|
||||
config: {
|
||||
register: {
|
||||
allowedFields: [
|
||||
"isNamePublic",
|
||||
"isLinkPublic",
|
||||
"avatar",
|
||||
"vanityLink",
|
||||
"patreonBenefits"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
|
@ -0,0 +1,15 @@
|
|||
// greets some
|
||||
|
||||
module.exports = ({ env }) => ({
|
||||
host: env('HOST', '0.0.0.0'),
|
||||
port: env.int('PORT', 1337),
|
||||
proxy: true,
|
||||
app: {
|
||||
keys: env.array('APP_KEYS'),
|
||||
},
|
||||
webhooks: {
|
||||
populateRelations: env.bool('WEBHOOKS_POPULATE_RELATIONS', false)
|
||||
},
|
||||
url: env('STRAPI_URL', 'https://portal.futureporn.net')
|
||||
});
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
#!/bin/bash
|
||||
|
||||
# daily-backup.sh
|
||||
# useful for the dokku server
|
||||
# dokku's backup feature is broken atm https://github.com/dokku/dokku-postgres/issues/274
|
||||
# backups are exported from dokku:postgres plugin before being sent to b2
|
||||
|
||||
|
||||
filename="$(date +'%Y-%m-%d_%H-%M-%S').psql"
|
||||
|
||||
dokku postgres:export futureporn-db > "${filename}"
|
||||
b2-linux upload-file futureporn-db-backup "./${filename}" "${filename}"
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
#!/bin/bash
|
||||
|
||||
|
||||
# Check if the containers already exist
|
||||
pgadmin_exists=$(docker ps -a --filter "name=pgadmin" --format '{{.Names}}')
|
||||
strapi_postgres_exists=$(docker ps -a --filter "name=strapi-postgres" --format '{{.Names}}')
|
||||
|
||||
# Run strapi-postgres container if it doesn't exist or is not running
|
||||
if [ -z "$strapi_postgres_exists" ]; then
|
||||
docker run -d --name strapi-postgres -p 5432:5432 -e POSTGRES_PASSWORD=$POSTGRES_PASSWORD postgres:14.7
|
||||
echo "strapi-postgres container created and started."
|
||||
else
|
||||
container_status=$(docker inspect -f '{{.State.Status}}' strapi-postgres)
|
||||
|
||||
if [ "$container_status" != "running" ]; then
|
||||
docker start strapi-postgres
|
||||
echo "strapi-postgres container started."
|
||||
else
|
||||
echo "strapi-postgres container already exists and is running. Skipping creation."
|
||||
fi
|
||||
fi
|
|
@ -0,0 +1,25 @@
|
|||
module.exports = {
|
||||
async up(knex) {
|
||||
// ... (Create vods_vtuber_links table if not already created)
|
||||
|
||||
// Get vtuber ID for ProjektMelody (assuming it's 1)
|
||||
const vtuberId = 1;
|
||||
|
||||
// Get all VODs from the database
|
||||
const vods = await knex.select('*').from('vods');
|
||||
|
||||
// For each VOD, associate it with the vtuber (vtuber with ID 1) if not already associated
|
||||
for (const [index, vod] of vods.entries()) {
|
||||
const existingAssociation = await knex('vods_vtuber_links')
|
||||
.where({ vtuber_id: vtuberId, vod_id: vod.id })
|
||||
.first();
|
||||
if (!existingAssociation) {
|
||||
await knex('vods_vtuber_links').insert({
|
||||
vtuber_id: vtuberId,
|
||||
vod_id: vod.id,
|
||||
vod_order: index + 1, // Auto-increment the vod_order number
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
|
@ -0,0 +1,18 @@
|
|||
module.exports = {
|
||||
async up(knex) {
|
||||
|
||||
// Get all B2 Files from the database
|
||||
const files = await knex.select('*').from('b2_files');
|
||||
|
||||
// For each B2 File, update cdnUrl
|
||||
// we do this to change
|
||||
// erroneous https://futureporn-b2.b-cdn.net/futureporn/:key
|
||||
// to https://futureporn-b2.b-cdn.net/:key
|
||||
for (const [index, file] of files.entries()) {
|
||||
const key = file.key;
|
||||
const cdnUrl = `https://futureporn-b2.b-cdn.net/${key}`;
|
||||
await knex('b2_files').update({ cdn_url: cdnUrl }).where({ id: file.id });
|
||||
}
|
||||
},
|
||||
};
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
const stripQueryString = function (text) {
|
||||
if (!text) return '';
|
||||
return text.split(/[?#]/)[0];
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
async up(knex) {
|
||||
|
||||
// Get all vods
|
||||
const vods = await knex.select('*').from('vods');
|
||||
|
||||
// For each vod, update videoSrcHash and video240Hash
|
||||
// we remove any existing ?filename(...) qs from the cid
|
||||
for (const [index, vod] of vods.entries()) {
|
||||
const strippedVideoSrcHash = stripQueryString(vod.video_src_hash)
|
||||
const strippedVideo240Hash = stripQueryString(vod.video_240_hash)
|
||||
await knex('vods').update({ video_src_hash: strippedVideoSrcHash }).where({ id: vod.id });
|
||||
await knex('vods').update({ video_240_hash: strippedVideo240Hash }).where({ id: vod.id });
|
||||
}
|
||||
|
||||
},
|
||||
};
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
module.exports = {
|
||||
async up(knex) {
|
||||
|
||||
const toys = await knex.select('*').from('toys');
|
||||
for (const [index, toy] of toys.entries()) {
|
||||
if (toy.image_2) {
|
||||
const existingImageFilename = new URL(toy.image_2).pathname.split('/').at(-1)
|
||||
await knex('toys').update({ image_2: `https://futureporn-b2.b-cdn.net/${existingImageFilename}` }).where({ id: toy.id });
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
module.exports = {
|
||||
async up(knex) {
|
||||
// Add the `image2` field (column) as a short text field
|
||||
await knex.schema.table('toys', (table) => {
|
||||
table.string('image_2', 512);
|
||||
});
|
||||
|
||||
// Get all toys
|
||||
const toys = await knex.select('*').from('toys');
|
||||
|
||||
// Update the image2 field with the previous image URLs
|
||||
for (const toy of toys) {
|
||||
// lookup the file morph which maps toy to (image) file
|
||||
const imageFileId = (await knex.select('file_id').from('files_related_morphs').where({ related_id: toy.id }))[0].file_id
|
||||
|
||||
// get the image data from the file
|
||||
const imageUrl = (await knex.select('url').from('files').where({ id: imageFileId }))[0].url
|
||||
|
||||
if (!imageUrl) continue;
|
||||
|
||||
// Copy the values from image to image2
|
||||
await knex('toys').update({ image_2: imageUrl }).where({ id: toy.id });
|
||||
}
|
||||
|
||||
const hasImageColumn = await knex.schema.hasColumn('toys', 'image');
|
||||
if (hasImageColumn) {
|
||||
// Drop the `image` column
|
||||
table.dropColumn('image');
|
||||
}
|
||||
|
||||
|
||||
},
|
||||
};
|
|
@ -0,0 +1,23 @@
|
|||
module.exports = {
|
||||
async up(knex) {
|
||||
// Check if the 'date_2' column exists in the 'vods' table
|
||||
const hasDate2Column = await knex.schema.hasColumn('vods', 'date_2');
|
||||
|
||||
if (!hasDate2Column) {
|
||||
// Add the new 'date_2' column as a string if it doesn't exist
|
||||
await knex.schema.table('vods', (table) => {
|
||||
table.string('date_2');
|
||||
});
|
||||
|
||||
// Fetch all existing rows from the 'vods' table
|
||||
const existingVods = await knex.select('id', 'date').from('vods');
|
||||
|
||||
// Loop through each row and update 'date_2' with the date value
|
||||
for (const vod of existingVods) {
|
||||
await knex('vods')
|
||||
.where({ id: vod.id })
|
||||
.update({ date_2: vod.date.toISOString() });
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
|
@ -0,0 +1,11 @@
|
|||
module.exports = {
|
||||
async up(knex) {
|
||||
const hasColumn = await knex.schema.hasColumn('toys', 'image');
|
||||
|
||||
if (hasColumn) {
|
||||
await knex.schema.table('toys', (table) => {
|
||||
table.dropColumn('image');
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
|
@ -0,0 +1,11 @@
|
|||
module.exports = {
|
||||
async up(knex) {
|
||||
const hasColumn = await knex.schema.hasColumn('vods', 'video_src');
|
||||
|
||||
if (hasColumn) {
|
||||
await knex.schema.table('vods', (table) => {
|
||||
table.dropColumn('video_src');
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
|
@ -0,0 +1,31 @@
|
|||
|
||||
const generateCuid = require('../../misc/generateCuid');
|
||||
|
||||
module.exports = {
|
||||
async up(knex) {
|
||||
|
||||
console.log(`MIGRATION-- 2023-12-24-add-cuid-to-vods.js`);
|
||||
|
||||
// Check if the 'cuid' column already exists in the 'vods' table
|
||||
const hasCuidColumn = await knex.schema.hasColumn('vods', 'cuid');
|
||||
|
||||
if (!hasCuidColumn) {
|
||||
// Add the 'cuid' column to the 'vods' table
|
||||
await knex.schema.table('vods', (table) => {
|
||||
table.string('cuid');
|
||||
});
|
||||
}
|
||||
|
||||
// Get all vods from the database
|
||||
const vods = await knex.select('*').from('vods');
|
||||
|
||||
// For each vod, populate cuid if it's null or undefined
|
||||
for (const [index, vod] of vods.entries()) {
|
||||
if (!vod.cuid) {
|
||||
await knex('vods').update({ cuid: generateCuid() }).where({ id: vod.id });
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
};
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
|
||||
const { init } = require('@paralleldrive/cuid2');
|
||||
|
||||
module.exports = {
|
||||
async up(knex) {
|
||||
|
||||
console.log(`MIGRATION-- 2023-12-26-add-cuid-to-streams.js`);
|
||||
|
||||
// Check if the 'cuid' column already exists in the 'streams' table
|
||||
const hasCuidColumn = await knex.schema.hasColumn('streams', 'cuid');
|
||||
|
||||
if (!hasCuidColumn) {
|
||||
// Add the 'cuid' column to the 'streams' table
|
||||
await knex.schema.table('streams', (table) => {
|
||||
table.string('cuid');
|
||||
});
|
||||
}
|
||||
|
||||
// Get all streams from the database
|
||||
const streams = await knex.select('*').from('streams');
|
||||
|
||||
// For each stream, populate cuid if it's null or undefined
|
||||
for (const [index, stream] of streams.entries()) {
|
||||
if (!stream.cuid) {
|
||||
const length = 10;
|
||||
const genCuid = init({ length });
|
||||
await knex('streams').update({ cuid: genCuid() }).where({ id: stream.id });
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
};
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
|
||||
const { sub, add } = require('date-fns');
|
||||
|
||||
|
||||
module.exports = {
|
||||
async up(knex) {
|
||||
console.log(`MIGRATION-- 2023-12-27-relate-vods-to-streams.js`);
|
||||
|
||||
// Get all VODs from the database
|
||||
const vods = await knex.select('*').from('vods');
|
||||
|
||||
// For each VOD, associate it with the stream with the nearest date (if not already associated)
|
||||
for (const [index, vod] of vods.entries()) {
|
||||
const existingAssociation = await knex('vods_stream_links')
|
||||
.where({ vod_id: vod.id })
|
||||
.first();
|
||||
|
||||
if (!existingAssociation) {
|
||||
// get nearest stream within +/- 3 hours
|
||||
const date2 = new Date(vod.date_2);
|
||||
const startDate = sub(date2, { hours: 3 })
|
||||
const endDate = add(date2, { hours: 3 });
|
||||
console.log(`vod.id=${vod.id}, vod.date_2=${vod.date_2}, date2=${date2}, startDate=${startDate}, endDate=${endDate}`)
|
||||
const stream = await knex('streams')
|
||||
.whereBetween('date', [startDate, endDate])
|
||||
|
||||
await knex('vods_stream_links').insert({
|
||||
stream_id: stream.id,
|
||||
vod_id: vod.id,
|
||||
vod_order: 1,
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
|
@ -0,0 +1,26 @@
|
|||
|
||||
const fetch = require('node-fetch')
|
||||
|
||||
let problemUrls = []
|
||||
|
||||
async function checkUrl(url) {
|
||||
const res = await fetch(url);
|
||||
if (!res.ok || !res?.headers?.get('x-bz-file-name') || !res?.headers?.get('x-bz-file-id')) problemUrls.push(url)
|
||||
}
|
||||
|
||||
|
||||
|
||||
module.exports = {
|
||||
async up(knex) {
|
||||
|
||||
// Get all VODs from the database
|
||||
const vods = await knex.select('*').from('vods');
|
||||
|
||||
// sanity check every B2 URL
|
||||
for (const vod of vods) {
|
||||
await checkUrl(vod.video_src)
|
||||
}
|
||||
|
||||
process.exit(5923423)
|
||||
},
|
||||
};
|
|
@ -0,0 +1,98 @@
|
|||
|
||||
const fetch = require('node-fetch')
|
||||
|
||||
// greets chatgpt
|
||||
async function getFileDetailsFromUrl(url) {
|
||||
const controller = new AbortController();
|
||||
const signal = controller.signal;
|
||||
|
||||
const options = {
|
||||
signal,
|
||||
};
|
||||
|
||||
let retries = 10;
|
||||
|
||||
while (retries) {
|
||||
console.log(`fetching ${url}`);
|
||||
const timeoutId = setTimeout(() => {
|
||||
console.log('fetch timed out, aborting...');
|
||||
controller.abort();
|
||||
}, 5000);
|
||||
|
||||
try {
|
||||
const res = await fetch(url, options);
|
||||
|
||||
clearTimeout(timeoutId);
|
||||
|
||||
console.log('finished fetch');
|
||||
if (!res.ok) throw new Error(`problem while getting file from url with url ${url}`);
|
||||
if (!res?.headers?.get('x-bz-file-name')) throw new Error(`${url} did not have a x-bz-file-name in the response headers`);
|
||||
if (!res?.headers?.get('x-bz-file-id')) throw new Error(`${url} did not have a x-bz-file-id in the response headers`);
|
||||
|
||||
return {
|
||||
key: res.headers.get('x-bz-file-name'),
|
||||
url: url,
|
||||
uploadId: res.headers.get('x-bz-file-id'),
|
||||
};
|
||||
} catch (err) {
|
||||
clearTimeout(timeoutId);
|
||||
retries--;
|
||||
|
||||
if (retries === 0) {
|
||||
console.error(`Could not fetch file details from URL: ${url}.`);
|
||||
throw err;
|
||||
}
|
||||
|
||||
console.warn(`Retrying fetch (${retries} attempts left)`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
module.exports = {
|
||||
async up(knex) {
|
||||
// You have full access to the Knex.js API with an already initialized connection to the database
|
||||
|
||||
// Get all VODs from the database
|
||||
const vods = await knex.select('*').from('vods');
|
||||
|
||||
|
||||
// Process each VOD
|
||||
for (const vod of vods) {
|
||||
|
||||
// courtesy timer
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000))
|
||||
|
||||
console.log(vod)
|
||||
// Get the file details from the VOD's video source URL
|
||||
if (vod?.video_src) {
|
||||
try {
|
||||
const fileDetails = await getFileDetailsFromUrl(vod.video_src);
|
||||
|
||||
// Insert the B2 file into the database
|
||||
const [file] = await knex('b2_files').insert({
|
||||
url: fileDetails.url,
|
||||
key: fileDetails.key,
|
||||
upload_id: fileDetails.uploadId,
|
||||
}).returning('id');
|
||||
|
||||
console.log(file)
|
||||
console.log(`attempting to insert vod_id:${vod.id}, b_2_file_id:${file.id} for videoSrcB2`)
|
||||
|
||||
// Link the B2 file to the VOD
|
||||
await knex('vods_video_src_b_2_links').insert({
|
||||
vod_id: vod.id,
|
||||
b_2_file_id: file.id,
|
||||
});
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
console.log(`there was an error so we are skipping vod ${vod.id}`)
|
||||
}
|
||||
} else {
|
||||
console.log(`${vod.id} has no video_src. skipping.`)
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
|
@ -0,0 +1,43 @@
|
|||
|
||||
// up until now, tags have been attached directly to each vod object.
|
||||
// now, tags are not attached to vods.
|
||||
// instead, tag-vod-relations are used to associate a tag with a vod
|
||||
|
||||
// what we need to do in this migration is
|
||||
// * create a new tag-vod-relation for each tag in each vod
|
||||
// * delete tags field in vods
|
||||
|
||||
module.exports = {
|
||||
async up(knex) {
|
||||
|
||||
console.log('2023.05.14 - migrate tags to tag_vod_relations')
|
||||
|
||||
// get all tags_vods_links
|
||||
// for each, create a tag-vod-relation
|
||||
const tagsVodsLinks = await knex.select('*').from('tags_vods_links')
|
||||
|
||||
for (const tvl of tagsVodsLinks) {
|
||||
// Create a tag-vod-relation entry for each tag
|
||||
const tvr = await knex('tag_vod_relations')
|
||||
.insert({
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
creator_id: 1
|
||||
})
|
||||
.returning(
|
||||
['id']
|
||||
)
|
||||
|
||||
await knex('tag_vod_relations_tag_links').insert({
|
||||
tag_vod_relation_id: tvr[0].id,
|
||||
tag_id: tvl.tag_id
|
||||
})
|
||||
|
||||
await knex('tag_vod_relations_vod_links').insert({
|
||||
tag_vod_relation_id: tvr[0].id,
|
||||
vod_id: tvl.vod_id
|
||||
})
|
||||
}
|
||||
|
||||
},
|
||||
};
|
|
@ -0,0 +1,12 @@
|
|||
|
||||
// previously, we tagged vods directly on the vod content-type
|
||||
// now, we use tag-vod-relation to relate tags to vods.
|
||||
// thus, we want to get rid of vod.tags
|
||||
// and also tag.vods
|
||||
|
||||
module.exports = {
|
||||
async up(knex) {
|
||||
console.log('2023.05.15 - drop tags_vods_links')
|
||||
await knex.schema.dropTable('tags_vods_links')
|
||||
}
|
||||
}
|
|
@ -0,0 +1,110 @@
|
|||
|
||||
// const fetch = require('node-fetch')
|
||||
|
||||
// // greets chatgpt
|
||||
// async function getFileDetailsFromUrl(url) {
|
||||
// const controller = new AbortController();
|
||||
// const signal = controller.signal;
|
||||
|
||||
// const options = {
|
||||
// signal,
|
||||
// };
|
||||
|
||||
// let retries = 10;
|
||||
|
||||
// while (retries) {
|
||||
// console.log(`fetching ${url}`);
|
||||
// const timeoutId = setTimeout(() => {
|
||||
// console.log('fetch timed out, aborting...');
|
||||
// controller.abort();
|
||||
// }, 5000);
|
||||
|
||||
// try {
|
||||
// const res = await fetch(url, options);
|
||||
|
||||
// clearTimeout(timeoutId);
|
||||
|
||||
// console.log('finished fetch');
|
||||
// if (!res.ok) throw new Error(`problem while getting file from url with url ${url}`);
|
||||
// if (!res?.headers?.get('x-bz-file-name')) throw new Error(`${url} did not have a x-bz-file-name in the response headers`);
|
||||
// if (!res?.headers?.get('x-bz-file-id')) throw new Error(`${url} did not have a x-bz-file-id in the response headers`);
|
||||
|
||||
// return {
|
||||
// key: res.headers.get('x-bz-file-name'),
|
||||
// url: url,
|
||||
// uploadId: res.headers.get('x-bz-file-id'),
|
||||
// };
|
||||
// } catch (err) {
|
||||
// clearTimeout(timeoutId);
|
||||
// retries--;
|
||||
|
||||
// if (retries === 0) {
|
||||
// console.error(`Could not fetch file details from URL: ${url}.`);
|
||||
// throw err;
|
||||
// }
|
||||
|
||||
// console.warn(`Retrying fetch (${retries} attempts left)`);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
|
||||
|
||||
|
||||
module.exports = {
|
||||
async up(knex) {
|
||||
// You have full access to the Knex.js API with an already initialized connection to the database
|
||||
|
||||
|
||||
// we iterate through the local, non-strapi backup db first.
|
||||
// get list of all tags
|
||||
// for each tag
|
||||
// * get list of related vods
|
||||
// * create relation in Strapi
|
||||
// *
|
||||
|
||||
|
||||
|
||||
// Get all VODs from the database
|
||||
const vods = await knex.select('*').from('vods');
|
||||
|
||||
|
||||
// Process each VOD
|
||||
for (const vod of vods) {
|
||||
|
||||
// courtesy timer
|
||||
await new Promise((resolve) => setTimeout(resolve, 10))
|
||||
|
||||
// @todo
|
||||
|
||||
console.log(vod)
|
||||
// Get the file details from the VOD's video source URL
|
||||
if (vod?.video_src) {
|
||||
try {
|
||||
const fileDetails = await getFileDetailsFromUrl(vod.video_src);
|
||||
|
||||
// Insert the B2 file into the database
|
||||
const [file] = await knex('b2_files').insert({
|
||||
url: fileDetails.url,
|
||||
key: fileDetails.key,
|
||||
upload_id: fileDetails.uploadId,
|
||||
}).returning('id');
|
||||
|
||||
console.log(file)
|
||||
console.log(`attempting to insert vod_id:${vod.id}, b_2_file_id:${file.id} for videoSrcB2`)
|
||||
|
||||
// Link the B2 file to the VOD
|
||||
await knex('vods_video_src_b_2_links').insert({
|
||||
vod_id: vod.id,
|
||||
b_2_file_id: file.id,
|
||||
});
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
console.log(`there was an error so we are skipping vod ${vod.id}`)
|
||||
}
|
||||
} else {
|
||||
console.log(`${vod.id} has no video_src. skipping.`)
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
|
@ -0,0 +1,124 @@
|
|||
'use strict'
|
||||
|
||||
|
||||
require('dotenv').config()
|
||||
|
||||
const { Client } = require('pg')
|
||||
const fetch = require('node-fetch')
|
||||
const _ = require('lodash');
|
||||
const ogVods = require('../og-tags.json')
|
||||
|
||||
|
||||
// const slugify = require('slugify')
|
||||
|
||||
|
||||
// function slugifyString (str) {
|
||||
// return slugify(str, {
|
||||
// replacement: '-', // replace spaces with replacement character, defaults to `-`
|
||||
// remove: undefined, // remove characters that match regex, defaults to `undefined`
|
||||
// lower: true, // convert to lower case, defaults to `false`
|
||||
// strict: true, // strip special characters except replacement, defaults to `false`
|
||||
// locale: 'en', // language code of the locale to use
|
||||
// trim: true // trim leading and trailing replacement chars, defaults to `true`
|
||||
// })
|
||||
// }
|
||||
|
||||
|
||||
async function associateTagWithVodsInStrapi (tagId, vodsIds) {
|
||||
const res = await fetch(`${process.env.STRAPI_URL}/api/tags/${tagId}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'authorization': `Bearer ${process.env.STRAPI_API_KEY}`
|
||||
},
|
||||
data: {
|
||||
vods: [vodsIds]
|
||||
}
|
||||
})
|
||||
const json = await res.json()
|
||||
|
||||
|
||||
if (!res.ok) throw new Error(JSON.stringify(json))
|
||||
}
|
||||
|
||||
|
||||
|
||||
async function associateVodWithTagsInStrapi (knex, vodId, tagsIds) {
|
||||
console.log(`updating vodId:${vodId} with tagsIds:${tagsIds}`)
|
||||
for (const tagId of tagsIds) {
|
||||
// see if it exists already
|
||||
const rows = await knex.select('*').from('tags_vods_links').where({
|
||||
'vod_id': vodId,
|
||||
'tag_id': tagId
|
||||
})
|
||||
if (rows.length === 0) {
|
||||
await knex('tags_vods_links').insert({
|
||||
vod_id: vodId,
|
||||
tag_id: tagId
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function getStrapiVodByAnnounceUrl (knex, announceUrl) {
|
||||
const rows = await knex.select('*').from('vods').where('announce_url', announceUrl)
|
||||
return (rows[0])
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
async function getStrapiTagByName (knex, tag) {
|
||||
const rows = await knex.select('*').from('tags').where({ 'name': tag })
|
||||
return rows[0]
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
module.exports = {
|
||||
async up(knex) {
|
||||
// You have full access to the Knex.js API with an already initialized connection to the database
|
||||
|
||||
for (const vod of ogVods) {
|
||||
// get matching vod in strapi
|
||||
console.log(vod)
|
||||
if (vod.announceUrl) {
|
||||
const strapiVod = await getStrapiVodByAnnounceUrl(knex, vod.announceUrl)
|
||||
|
||||
if (strapiVod) {
|
||||
// we've got a matching vod
|
||||
|
||||
if (vod.tags) {
|
||||
console.log(`source vod has tags: ${vod.tags}`)
|
||||
|
||||
let strapiTagsIds = []
|
||||
|
||||
// for each tag, get the matching strapi tag ID
|
||||
for (const tag of vod.tags) {
|
||||
// lookup the strapi tag id
|
||||
const strapiTag = await getStrapiTagByName(knex, tag)
|
||||
if (!!strapiTag) {
|
||||
strapiTagsIds.push(strapiTag.id)
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`we are adding the following strapiTagsIds to vod ID ${strapiVod.id}: ${strapiTagsIds}`)
|
||||
|
||||
// create relations between matching vod and the tags
|
||||
await associateVodWithTagsInStrapi(knex, strapiVod.id, strapiTagsIds)
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get all VODs from the database
|
||||
const vods = await knex.select('*').from('vods');
|
||||
|
||||
// Process each VOD
|
||||
for (const vod of vods) {
|
||||
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
module.exports = {
|
||||
async up(knex) {
|
||||
|
||||
console.log('Create vtubers table')
|
||||
await knex.schema.createTable('vtubers', (table) => {
|
||||
table.increments('id').primary();
|
||||
table.string('displayName').notNullable();
|
||||
table.string('chaturbate');
|
||||
table.string('twitter');
|
||||
table.string('patreon');
|
||||
table.string('twitch');
|
||||
table.string('tiktok');
|
||||
table.string('onlyfans');
|
||||
table.string('youtube');
|
||||
table.string('linktree');
|
||||
table.string('carrd');
|
||||
table.string('fansly');
|
||||
table.string('pornhub');
|
||||
table.string('discord');
|
||||
table.string('reddit');
|
||||
table.string('throne');
|
||||
table.string('instagram');
|
||||
table.string('facebook');
|
||||
table.string('merch');
|
||||
table.string('slug').notNullable();
|
||||
table.text('description1').notNullable();
|
||||
table.text('description2');
|
||||
table.string('image').notNullable();
|
||||
});
|
||||
|
||||
console.log('Create vods_vtuber_links table')
|
||||
await knex.schema.createTable('vods_vtuber_links', (table) => {
|
||||
table.increments('id').primary();
|
||||
table.integer('vod_id').unsigned().references('vods.id');
|
||||
table.integer('vtuber_id').unsigned().references('vtubers.id');
|
||||
table.integer('vod_order').notNullable();
|
||||
});
|
||||
|
||||
|
||||
console.log('Create a vtuber entry for ProjektMelody')
|
||||
const projektMelody = {
|
||||
displayName: 'ProjektMelody',
|
||||
slug: 'projektmelody', // You can customize the slug based on your preference
|
||||
description1: 'Description for ProjektMelody', // Add your vtuber's description here
|
||||
image: 'http://futureporn-b2.b-cdn.net/futureporn/projekt-melody.jpg', // Replace with the image filename for ProjektMelody
|
||||
};
|
||||
|
||||
console.log('Get all VODs from the database')
|
||||
const vods = await knex.select('*').from('vods');
|
||||
|
||||
console.log('get projektmelody id')
|
||||
// const [projektMelodyId] = await knex('vtubers').insert(projektMelody);
|
||||
const projektMelodyId = 1
|
||||
|
||||
console.log(`projektmelodyId is : ${projektMelodyId}`)
|
||||
|
||||
console.log(`For each VOD, associate ProjektMelody vtuber.`)
|
||||
for (const [index, vod] of vods.entries()) {
|
||||
console.log(`Check if vtuber_id exists in the vtubers table`)
|
||||
const vtuber = await knex('vtubers').where('id', projektMelodyId).first();
|
||||
if (vtuber) {
|
||||
await knex('vods_vtuber_links').insert({
|
||||
vtuber_id: projektMelodyId,
|
||||
vod_id: vod.id,
|
||||
vod_order: index + 1, // Auto-increment the vod_order number
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
|
@ -0,0 +1,18 @@
|
|||
module.exports = {
|
||||
async up(knex) {
|
||||
// Add the 'cdn_url' column to the 'b2_files' table
|
||||
await knex.schema.table('b2_files', (table) => {
|
||||
table.string('cdn_url'); // Change the data type if needed (e.g., text, varchar, etc.)
|
||||
});
|
||||
|
||||
// Get all B2 Files from the database
|
||||
const files = await knex.select('*').from('b2_files');
|
||||
|
||||
// For each B2 File, create cdnUrl
|
||||
for (const [index, file] of files.entries()) {
|
||||
const key = file.key;
|
||||
const cdnUrl = `https://futureporn-b2.b-cdn.net/futureporn/${key}`;
|
||||
await knex('b2_files').update({ cdn_url: cdnUrl }).where({ id: file.id });
|
||||
}
|
||||
},
|
||||
};
|
|
@ -0,0 +1,30 @@
|
|||
module.exports = {
|
||||
async up(knex) {
|
||||
|
||||
await knex.schema.createTable('streams', (table) => {
|
||||
table.increments('id').primary();
|
||||
table.string('date_str').notNullable();
|
||||
table.string('vods');
|
||||
table.string('vtuber');
|
||||
table.string('tweet');
|
||||
table.string('date');
|
||||
table.string('cuid');
|
||||
});
|
||||
|
||||
|
||||
// Add the 'cdn_url' column to the 'b2_files' table
|
||||
await knex.schema.table('b2_files', (table) => {
|
||||
table.string('cdn_url'); // Change the data type if needed (e.g., text, varchar, etc.)
|
||||
});
|
||||
|
||||
// Get all B2 Files from the database
|
||||
const files = await knex.select('*').from('b2_files');
|
||||
|
||||
// For each B2 File, create cdnUrl
|
||||
for (const [index, file] of files.entries()) {
|
||||
const key = file.key;
|
||||
const cdnUrl = `https://futureporn-b2.b-cdn.net/futureporn/${key}`;
|
||||
await knex('b2_files').update({ cdn_url: cdnUrl }).where({ id: file.id });
|
||||
}
|
||||
},
|
||||
};
|
|
@ -0,0 +1,29 @@
|
|||
|
||||
module.exports = {
|
||||
async up(knex) {
|
||||
|
||||
console.log(`MIGRATION-- 2024-01-14-add-date2-to-streams.js`);
|
||||
|
||||
// Check if the 'date_2' column already exists in the 'streams' table
|
||||
const hasColumn = await knex.schema.hasColumn('streams', 'date_2');
|
||||
|
||||
if (!hasColumn) {
|
||||
console.log(`Adding the 'date_2' column to the 'streams' table`);
|
||||
await knex.schema.table('streams', (table) => {
|
||||
table.string('date_2');
|
||||
});
|
||||
}
|
||||
|
||||
// Get all streams from the database
|
||||
const streams = await knex.select('*').from('streams');
|
||||
|
||||
// For each stream, populate date_2 if it's null or undefined
|
||||
for (const [index, stream] of streams.entries()) {
|
||||
if (stream.date_2 === null && stream.date_str !== null) {
|
||||
const result = await knex('streams').update({ date_2: stream.date_str }).where({ id: stream.id });
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
};
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
|
||||
module.exports = {
|
||||
async up(knex) {
|
||||
|
||||
console.log(`MIGRATION-- 2024-01-15-add-platform-streams.js`);
|
||||
|
||||
// Check if the 'platform' column already exists in the 'streams' table
|
||||
const hasColumn = await knex.schema.hasColumn('streams', 'platform');
|
||||
|
||||
if (!hasColumn) {
|
||||
console.log(`Adding the 'platform' column to the 'streams' table`);
|
||||
await knex.schema.table('streams', (table) => {
|
||||
table.string('platform');
|
||||
});
|
||||
}
|
||||
|
||||
// Get all streams from the database
|
||||
const streams = await knex.select('*').from('streams');
|
||||
|
||||
// For each stream, populate platform based on the related tweet data
|
||||
for (const [index, stream] of streams.entries()) {
|
||||
|
||||
const tweetLink = await knex('streams_tweet_links')
|
||||
.where({ stream_id: stream.id })
|
||||
.first();
|
||||
|
||||
if (tweetLink) {
|
||||
console.log(tweetLink);
|
||||
|
||||
const tweet = await knex('tweets')
|
||||
.where({ id: tweetLink.tweet_id })
|
||||
.first();
|
||||
|
||||
console.log(tweet);
|
||||
|
||||
if (!!tweet) {
|
||||
console.log(`stream ${stream.id} tweet tweet.is_chaturbate_invite=${tweet.is_chaturbate_invite}, tweet.is_fansly_invite=${tweet.is_fansly_invite}`);
|
||||
await knex('streams').update({
|
||||
is_chaturbate_stream: !!tweet.is_chaturbate_invite,
|
||||
is_fansly_stream: !!tweet.is_fansly_invite
|
||||
}).where({ id: stream.id });
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
},
|
||||
};
|
||||
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
After Width: | Height: | Size: 497 B |
|
@ -0,0 +1,7 @@
|
|||
const { init } = require('@paralleldrive/cuid2');
|
||||
|
||||
module.exports = function() {
|
||||
const length = 10;
|
||||
const genCuid = init({ length });
|
||||
return genCuid();
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue