opentracker dockerfile created
This commit is contained in:
parent
8aa8f231ed
commit
5ebea988fa
|
@ -94,12 +94,27 @@ ssh:
|
|||
#
|
||||
accessories:
|
||||
|
||||
# superstreamer:
|
||||
# host: 45.76.57.101
|
||||
# env:
|
||||
# clear:
|
||||
# PUBLIC_API_ENDPOINT: https://api.superstreamer.futureporn.net
|
||||
# PUBLIC_STITCHER_ENDPOINT: http://localhost:52002
|
||||
qbittorrent:
|
||||
image: lscr.io/linuxserver/qbittorrent:latest
|
||||
host: 45.76.57.101
|
||||
port: "127.0.0.1:8080:8080"
|
||||
env:
|
||||
clear:
|
||||
PUID: "1000"
|
||||
PGID: "1000"
|
||||
TZ: "Etc/UTC"
|
||||
WEBUI_PORT: "8080"
|
||||
TORRENTING_PORT: "6881"
|
||||
proxy:
|
||||
ssl: true
|
||||
forward_headers: true
|
||||
app_port: 8080
|
||||
host: qbittorrent.futureporn.net
|
||||
healthcheck:
|
||||
path: /
|
||||
volumes:
|
||||
- /root/.cache/futureporn:/root/.cache/futureporn
|
||||
|
||||
|
||||
db:
|
||||
image: postgres:15
|
||||
|
|
|
@ -11,7 +11,8 @@
|
|||
"ruby@latest",
|
||||
"chisel@latest",
|
||||
"bento4@latest",
|
||||
"shaka-packager@latest"
|
||||
"shaka-packager@latest",
|
||||
"mktorrent@latest"
|
||||
],
|
||||
"env": {
|
||||
"DEVBOX_COREPACK_ENABLED": "true",
|
||||
|
|
48
devbox.lock
48
devbox.lock
|
@ -341,6 +341,54 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"mktorrent@latest": {
|
||||
"last_modified": "2025-01-25T23:17:58Z",
|
||||
"resolved": "github:NixOS/nixpkgs/b582bb5b0d7af253b05d58314b85ab8ec46b8d19#mktorrent",
|
||||
"source": "devbox-search",
|
||||
"version": "1.1",
|
||||
"systems": {
|
||||
"aarch64-darwin": {
|
||||
"outputs": [
|
||||
{
|
||||
"name": "out",
|
||||
"path": "/nix/store/lwa8h4w9jicy7c67bhnmv78vlix19ma1-mktorrent-1.1",
|
||||
"default": true
|
||||
}
|
||||
],
|
||||
"store_path": "/nix/store/lwa8h4w9jicy7c67bhnmv78vlix19ma1-mktorrent-1.1"
|
||||
},
|
||||
"aarch64-linux": {
|
||||
"outputs": [
|
||||
{
|
||||
"name": "out",
|
||||
"path": "/nix/store/iq1mqwjl37dlzaxli3dnj4lv1bhi6vaf-mktorrent-1.1",
|
||||
"default": true
|
||||
}
|
||||
],
|
||||
"store_path": "/nix/store/iq1mqwjl37dlzaxli3dnj4lv1bhi6vaf-mktorrent-1.1"
|
||||
},
|
||||
"x86_64-darwin": {
|
||||
"outputs": [
|
||||
{
|
||||
"name": "out",
|
||||
"path": "/nix/store/di0fgl55xp7pwjfi0zgxywn8ky36ijar-mktorrent-1.1",
|
||||
"default": true
|
||||
}
|
||||
],
|
||||
"store_path": "/nix/store/di0fgl55xp7pwjfi0zgxywn8ky36ijar-mktorrent-1.1"
|
||||
},
|
||||
"x86_64-linux": {
|
||||
"outputs": [
|
||||
{
|
||||
"name": "out",
|
||||
"path": "/nix/store/rrdq0l681zc8ljlymq7i5jsq7sp2xrrr-mktorrent-1.1",
|
||||
"default": true
|
||||
}
|
||||
],
|
||||
"store_path": "/nix/store/rrdq0l681zc8ljlymq7i5jsq7sp2xrrr-mktorrent-1.1"
|
||||
}
|
||||
}
|
||||
},
|
||||
"nodejs@20": {
|
||||
"last_modified": "2024-12-23T21:10:33Z",
|
||||
"plugin_version": "0.0.2",
|
||||
|
|
|
@ -1,12 +1,52 @@
|
|||
services:
|
||||
|
||||
opentracker:
|
||||
image: anthonyzou/opentracker:latest
|
||||
build:
|
||||
context: .
|
||||
dockerfile: dockerfiles/opentracker.dockerfile
|
||||
container_name: opentracker
|
||||
environment:
|
||||
- WHITELIST_FEED_URL=http://bright:4000/torrents/whitelist?type=json
|
||||
ports:
|
||||
- "6969:6969/tcp"
|
||||
- "6969:6969/udp"
|
||||
- "8666:8666/tcp"
|
||||
volumes:
|
||||
- ./packages/opentracker/opentracker.conf:/etc/opentracker.conf:ro
|
||||
- opentracker-etc:/etc/opentracker
|
||||
- opentracker-var:/var/run/opentracker
|
||||
|
||||
# qbittorrent:
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: dockerfiles/qbittorrent.dockerfile
|
||||
# environment:
|
||||
# - PUID=1000
|
||||
# - PGID=1000
|
||||
# - UMASK=002
|
||||
# - TZ=Etc/UTC
|
||||
# - WEBUI_PORT=8181
|
||||
# - WEBUI_PASSWORD=passwordpassword
|
||||
# volumes:
|
||||
# - cache:/root/.cache/futureporn
|
||||
# container_name: qbittorrent
|
||||
# ports:
|
||||
# - "8181:8181/tcp"
|
||||
|
||||
# ## socat for exposing opentracker's named pipe (adder.fifo) to the docker network
|
||||
# ## we use the named pipe to update the list of whitelisted torrents without having to reload the entire (huge) whitelist
|
||||
# opentracker-socat:
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: dockerfiles/opentracker-socat.dockerfile
|
||||
# container_name: opentracker-socat
|
||||
# ports:
|
||||
# - '8666:8666/tcp'
|
||||
# volumes:
|
||||
# # we use this volume to share adder.fifo
|
||||
# - opentracker-var:/var/run/opentracker
|
||||
# depends_on:
|
||||
# - opentracker
|
||||
|
||||
|
||||
bright:
|
||||
container_name: bright
|
||||
|
@ -22,6 +62,8 @@ services:
|
|||
DATABASE_HOSTNAME: db
|
||||
SUPERSTREAMER_URL: http://superstreamer-api:52001
|
||||
PUBLIC_S3_ENDPOINT: https://fp-dev.b-cdn.net
|
||||
BT_TRACKER_URL: https://tracker.futureporn.net/announce
|
||||
SITE_URL: https://futureporn.net
|
||||
env_file:
|
||||
- .kamal/secrets.development
|
||||
ports:
|
||||
|
@ -29,8 +71,8 @@ services:
|
|||
depends_on:
|
||||
- db
|
||||
# volumes:
|
||||
# - cache:/root/.cache/futureporn
|
||||
# - ./services/bright/lib:/app/lib
|
||||
# volumes:
|
||||
# - /home/cj/Documents/ueberauth_patreon:/app/contrib/ueberauth_patreon
|
||||
develop:
|
||||
watch:
|
||||
|
@ -74,6 +116,7 @@ services:
|
|||
- '5432:5432'
|
||||
|
||||
pgadmin:
|
||||
container_name: pgadmin
|
||||
image: dpage/pgadmin4
|
||||
ports:
|
||||
- '5050:5050'
|
||||
|
@ -86,4 +129,7 @@ services:
|
|||
|
||||
volumes:
|
||||
pg_data:
|
||||
redis_data:
|
||||
redis_data:
|
||||
cache:
|
||||
opentracker-var:
|
||||
opentracker-etc:
|
|
@ -1,28 +0,0 @@
|
|||
## Important! Build context is the ROOT of the project.
|
||||
## this keeps the door open for future possibility of shared code between pnpm workspace packages
|
||||
|
||||
|
||||
FROM oven/bun:1 AS base
|
||||
RUN apt-get update && apt-get install -y \
|
||||
curl
|
||||
|
||||
RUN mkdir -p /tmp/dev
|
||||
WORKDIR /tmp/dev
|
||||
COPY ./contrib/superstreamer .
|
||||
RUN ls -la
|
||||
|
||||
# Install ffmpeg, ffprobe
|
||||
RUN bun run install-bin
|
||||
|
||||
|
||||
FROM oven/bun:1 AS install
|
||||
RUN bun install
|
||||
RUN bun run test
|
||||
RUN bun run build
|
||||
USER bun
|
||||
EXPOSE 7991/tcp
|
||||
WORKDIR /tmp/dev/packages/artisan
|
||||
RUN ls -la ./dist
|
||||
ENTRYPOINT [ "bun", "run", "./dist/index.js" ]
|
||||
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
FROM node:20 AS base
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
WORKDIR /app
|
||||
RUN corepack enable && corepack prepare pnpm@9.6.0 --activate
|
||||
ENTRYPOINT ["pnpm"]
|
||||
|
||||
FROM base AS install
|
||||
COPY pnpm-lock.yaml .npmrc package.json .
|
||||
COPY ./services/bot/ ./services/bot/
|
||||
COPY ./packages/types/ ./packages/types/
|
||||
COPY ./packages/utils/ ./packages/utils/
|
||||
COPY ./packages/fetchers/ ./packages/fetchers/
|
||||
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm fetch
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --recursive --frozen-lockfile --prefer-offline
|
||||
|
||||
|
||||
FROM install AS build
|
||||
RUN pnpm -r build
|
||||
RUN pnpm deploy --filter=bot --prod /prod/bot
|
||||
|
||||
|
||||
FROM install AS dev
|
||||
WORKDIR /app/services/bot
|
||||
CMD ["run", "dev"]
|
||||
|
||||
|
||||
FROM base AS bot
|
||||
COPY --from=build /prod/bot .
|
||||
CMD ["run", "start"]
|
||||
|
|
@ -22,9 +22,11 @@ ARG RUNNER_IMAGE="debian:${DEBIAN_VERSION}"
|
|||
FROM ${BUILDER_IMAGE} AS builder
|
||||
|
||||
# install build dependencies
|
||||
RUN apt-get update -y && apt-get install -y build-essential git inotify-tools ffmpeg \
|
||||
RUN apt-get update -y && apt-get install -y build-essential git inotify-tools ffmpeg python3 python3-pip \
|
||||
&& pip install torrentfile \
|
||||
&& apt-get clean && rm -f /var/lib/apt/lists/*_*
|
||||
|
||||
|
||||
# prepare build dir
|
||||
WORKDIR /app
|
||||
|
||||
|
|
|
@ -1,57 +0,0 @@
|
|||
FROM node:20-alpine AS base
|
||||
## Install dependencies only when needed
|
||||
## Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
|
||||
RUN apk add --no-cache libc6-compat
|
||||
RUN corepack enable && corepack prepare pnpm@9.6.0 --activate
|
||||
|
||||
## Enable `pnpm add --global` on Alpine Linux by setting
|
||||
## home location environment variable to a location already in $PATH
|
||||
## https://github.com/pnpm/pnpm/issues/784#issuecomment-1518582235
|
||||
ENV PNPM_HOME=/usr/local/bin
|
||||
|
||||
# update and install latest dependencies, add dumb-init package
|
||||
# add a non root user
|
||||
RUN apk update && apk upgrade && apk add dumb-init ffmpeg make gcc g++ python3
|
||||
|
||||
## install yt-dlp
|
||||
RUN wget -O /usr/local/bin/yt-dlp https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp && chmod +x /usr/local/bin/yt-dlp
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
FROM base AS build
|
||||
## Copy the manifests and lockfiles into the build context
|
||||
COPY package.json pnpm-lock.yaml pnpm-workspace.yaml .npmrc .
|
||||
COPY ./services/capture/package.json ./services/capture/pnpm-lock.yaml ./services/capture/
|
||||
COPY ./packages/types/package.json ./packages/types/pnpm-lock.yaml ./packages/types/
|
||||
COPY ./packages/utils/package.json ./packages/utils/pnpm-lock.yaml ./packages/utils/
|
||||
COPY ./packages/fetchers/package.json ./packages/fetchers/pnpm-lock.yaml ./packages/fetchers/
|
||||
|
||||
## install npm packages
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm fetch
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --recursive --prefer-offline
|
||||
|
||||
## Copy in all project files
|
||||
COPY ./services/capture/ ./services/capture/
|
||||
COPY ./packages/types/ ./packages/types/
|
||||
COPY ./packages/utils/ ./packages/utils/
|
||||
COPY ./packages/fetchers/ ./packages/fetchers/
|
||||
|
||||
## Run the build process and generate the artifacts
|
||||
RUN pnpm run -r build
|
||||
RUN mkdir -p /prod/capture
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm --filter=@futureporn/capture deploy --prod /prod/capture
|
||||
|
||||
|
||||
|
||||
FROM build AS dev
|
||||
WORKDIR /app/services/capture
|
||||
ENTRYPOINT ["pnpm", "run", "dev"]
|
||||
|
||||
|
||||
## start the app with dumb init to spawn the Node.js runtime process
|
||||
## with signal support
|
||||
## The mode @futureporn/capture uses when starting is determined by FUNCTION environment variable. (worker|api)
|
||||
FROM base AS capture
|
||||
ENV HOSTNAME=0.0.0.0 NODE_ENV=production
|
||||
COPY --from=build /prod/capture .
|
||||
CMD [ "dumb-init", "node", "dist/index.js" ]
|
|
@ -1,63 +0,0 @@
|
|||
## factory.dockerfile
|
||||
##
|
||||
## @futureporn/factory is the system component which processes video segments into a VOD.
|
||||
## factory does tasks such as thumbnail generation, video encoding, file transfers, strapi record creation, etc.
|
||||
|
||||
|
||||
FROM node:20 AS base
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
WORKDIR /app
|
||||
COPY --from=mwader/static-ffmpeg:7.0.2 /ffmpeg /usr/local/bin/
|
||||
COPY --from=mwader/static-ffmpeg:7.0.2 /ffprobe /usr/local/bin/
|
||||
RUN corepack enable && corepack prepare pnpm@9.6.0 --activate
|
||||
ENTRYPOINT ["pnpm"]
|
||||
|
||||
FROM base AS install
|
||||
WORKDIR /app
|
||||
RUN mkdir -p /app/services/factory && mkdir -p /prod/factory
|
||||
|
||||
## Copy manfiests, lockfiles, and configs into docker context
|
||||
COPY package.json pnpm-lock.yaml .npmrc .
|
||||
COPY ./packages/utils/pnpm-lock.yaml ./packages/utils/package.json ./packages/utils/
|
||||
COPY ./packages/fetchers/package.json ./packages/fetchers/pnpm-lock.yaml ./packages/fetchers/
|
||||
COPY ./packages/storage/pnpm-lock.yaml ./packages/storage/package.json ./packages/storage/
|
||||
COPY ./packages/types/pnpm-lock.yaml ./packages/types/package.json ./packages/types/
|
||||
COPY ./services/factory/pnpm-lock.yaml ./services/factory/package.json ./services/factory/
|
||||
|
||||
## Install npm packages
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm fetch
|
||||
## we install node-gyp explicitly in order for sharp to install properly
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install -g node-gyp --prefer-offline
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --recursive --frozen-lockfile --prefer-offline
|
||||
## Copy package code into docker context
|
||||
COPY ./packages/utils/ ./packages/utils/
|
||||
COPY ./packages/fetchers/ ./packages/fetchers/
|
||||
RUN ls -la /app/packages/utils/node_modules/prevvy/
|
||||
RUn cat ./packages/utils/package.json
|
||||
COPY ./packages/storage/ ./packages/storage/
|
||||
COPY ./packages/types/ ./packages/types/
|
||||
COPY ./services/factory/ ./services/factory/
|
||||
# we are grabbing the mp4 files from capture so we can run tests with them
|
||||
COPY ./services/capture/src/fixtures ./services/capture/src/fixtures
|
||||
|
||||
|
||||
FROM install AS build
|
||||
## Transpile TS into JS
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm -r build
|
||||
|
||||
## Copy all production code into one place
|
||||
## `pnpm deploy` copies all dependencies into an isolated node_modules directory inside the target dir
|
||||
## @see https://pnpm.io/cli/deploy
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm deploy --filter=@futureporn/factory --prod /prod/factory
|
||||
|
||||
FROM install AS dev
|
||||
WORKDIR /app/services/factory
|
||||
RUN ls -lash
|
||||
CMD ["run", "dev"]
|
||||
|
||||
FROM base AS prod
|
||||
COPY --from=build /prod/factory .
|
||||
RUN ls -la .
|
||||
CMD ["start"]
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
## Important! Build context is the ROOT of the project.
|
||||
## this keeps the door open for future possibility of shared code between pnpm workspace packages
|
||||
|
||||
# use the official Bun image
|
||||
# see all versions at https://hub.docker.com/r/oven/bun/tags
|
||||
FROM oven/bun:1 AS base
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# install dependencies into temp directory
|
||||
# this will cache them and speed up future builds
|
||||
FROM base AS install
|
||||
RUN mkdir -p /temp/dev
|
||||
COPY ./services/htmx/package.json ./services/htmx/bun.lockb /temp/dev/
|
||||
RUN cd /temp/dev && bun install --frozen-lockfile
|
||||
|
||||
# install with --production (exclude devDependencies)
|
||||
RUN mkdir -p /temp/prod
|
||||
COPY ./services/htmx/package.json ./services/htmx/bun.lockb /temp/prod/
|
||||
RUN cd /temp/prod && bun install --frozen-lockfile --production
|
||||
|
||||
# copy node_modules from temp directory
|
||||
# then copy all (non-ignored) project files into the image
|
||||
FROM base AS prerelease
|
||||
COPY --from=install /temp/dev/node_modules node_modules
|
||||
COPY . .
|
||||
|
||||
# [optional] tests & build
|
||||
ENV NODE_ENV=production
|
||||
RUN bun test
|
||||
RUN bun run build
|
||||
|
||||
# copy production dependencies and source code into final image
|
||||
FROM base AS release
|
||||
COPY --from=install /temp/prod/node_modules node_modules
|
||||
COPY --from=prerelease /usr/src/app/index.ts .
|
||||
COPY --from=prerelease /usr/src/app/package.json .
|
||||
|
||||
# run the app
|
||||
USER bun
|
||||
EXPOSE 7991/tcp
|
||||
ENTRYPOINT [ "bun", "run", "index.ts" ]
|
|
@ -1,14 +0,0 @@
|
|||
# Reference-- https://pnpm.io/docker
|
||||
|
||||
FROM node:20-alpine AS base
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
RUN corepack enable
|
||||
WORKDIR /app
|
||||
COPY ./packages/link2cid/package.json /app
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --prod
|
||||
COPY ./packages/link2cid/index.js /app
|
||||
COPY ./packages/link2cid/src /app/src
|
||||
ENTRYPOINT ["pnpm"]
|
||||
CMD ["start"]
|
||||
|
|
@ -1,58 +0,0 @@
|
|||
## d.mailbox.dockerfile
|
||||
##
|
||||
## @todo future improvement might be merging the dockerfiles for the various monorepo packages.
|
||||
## this is not an easy task, so I'm not doing it right now.
|
||||
## "make it work, make it right, make it fast" (in that order)
|
||||
## Right now we are making things work with separate dockerfiles for each package.
|
||||
## One thing to determine is build speed. If we're developing in Tilt and have to wait 20 minutes for the build to complete
|
||||
## every time we change a file in any dependent package, then merging dockerfiles is not desirable.
|
||||
## One of the slow parts of the docker build is copying all package directories into the build context.
|
||||
## If we have a lot of packages, it takes a long time.
|
||||
## I have yet to determine performance benchmarks, so it's unclear if merging dockerfiles is desirable.
|
||||
##
|
||||
## @todo another performance improvement would almost certainly be to move strapi, next, and similar packages from `packages/*` into `services/*`
|
||||
## this way, when we're building the various @futureporn library-type packages, we don't have to filter and COPY the dependency packages one-by-one.
|
||||
## instead, we add the entire `packages/*` directory and then move on to the next step.
|
||||
|
||||
FROM node:20 AS base
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
WORKDIR /app
|
||||
RUN corepack enable && corepack prepare pnpm@9.6.0 --activate
|
||||
|
||||
FROM base AS build
|
||||
WORKDIR /app
|
||||
RUN mkdir -p /app/services/mailbox && mkdir -p /prod/mailbox
|
||||
|
||||
## Copy manfiests, lockfiles, and configs into docker context
|
||||
COPY package.json pnpm-lock.yaml .npmrc .
|
||||
COPY ./packages/storage/pnpm-lock.yaml ./packages/storage/package.json ./packages/storage/
|
||||
COPY ./packages/types/pnpm-lock.yaml ./packages/types/package.json ./packages/types/
|
||||
COPY ./packages/utils/pnpm-lock.yaml ./packages/utils/package.json ./packages/utils/
|
||||
COPY ./services/mailbox/pnpm-lock.yaml ./services/mailbox/package.json ./services/mailbox/
|
||||
|
||||
## Install npm packages
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm fetch
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --recursive --frozen-lockfile --prefer-offline
|
||||
|
||||
## Copy package code into docker context
|
||||
COPY ./packages/storage/ ./packages/storage/
|
||||
COPY ./packages/types/ ./packages/types/
|
||||
COPY ./packages/utils/ ./packages/utils/
|
||||
COPY ./services/mailbox/ ./services/mailbox/
|
||||
|
||||
## Transpile TS into JS
|
||||
RUN pnpm --filter=@futureporn/mailbox build
|
||||
# RUN pnpm -r build
|
||||
|
||||
## Copy all production code into one place
|
||||
## `pnpm deploy` copies all dependencies into an isolated node_modules directory inside the target dir
|
||||
## @see https://pnpm.io/cli/deploy
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm deploy --filter=@futureporn/mailbox --prod /prod/mailbox
|
||||
|
||||
|
||||
FROM base AS mailbox
|
||||
COPY --from=build /prod/mailbox .
|
||||
RUN ls -la .
|
||||
ENTRYPOINT ["pnpm", "start"]
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
FROM node:20-alpine AS base
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
WORKDIR /app
|
||||
RUN corepack enable && corepack prepare pnpm@9.6.0 --activate
|
||||
|
||||
FROM base AS build
|
||||
COPY ./pnpm-workspace.yaml ./.npmrc .
|
||||
COPY ./services/migrations-data/package.json ./services/migrations-data/pnpm-lock.yaml ./services/migrations-data/
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm fetch
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile --prefer-offline
|
||||
COPY ./services/migrations-data/ ./services/migrations-data/
|
||||
RUN pnpm --filter=@futureporn/migrations-data deploy --prod /prod/migrations-data
|
||||
RUN ls -las /prod/migrations-data
|
||||
|
||||
FROM base AS migrations-data
|
||||
ENV NODE_ENV=production
|
||||
COPY --from=build /prod/migrations-data .
|
||||
ENTRYPOINT ["pnpm", "start"]
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
FROM node:20-alpine AS base
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
WORKDIR /app
|
||||
RUN corepack enable && corepack prepare pnpm@9.6.0 --activate
|
||||
|
||||
FROM base AS build
|
||||
COPY ./pnpm-workspace.yaml ./.npmrc .
|
||||
COPY ./services/migrations-schema/package.json ./services/migrations-schema/pnpm-lock.yaml ./services/migrations-schema/
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm fetch
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile --prefer-offline
|
||||
COPY ./services/migrations-schema/ ./services/migrations-schema/
|
||||
RUN pnpm --filter=@futureporn/migrations-schema deploy --prod /prod/migrations-schema
|
||||
RUN ls -las /prod/migrations-schema
|
||||
|
||||
FROM base AS migrations-schema
|
||||
ENV NODE_ENV=production
|
||||
COPY --from=build /prod/migrations-schema .
|
||||
ENTRYPOINT ["pnpm", "start"]
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
## Important! Build context is the ROOT of the project.
|
||||
## this keeps the door open for future possibility of shared code between pnpm workspace packages
|
||||
|
||||
FROM node:20-slim AS base
|
||||
|
||||
FROM base AS deps
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
RUN corepack enable && corepack prepare pnpm@9.6.0 --activate
|
||||
WORKDIR /app
|
||||
|
||||
|
||||
FROM deps AS install
|
||||
ARG NEXT_PUBLIC_SITE_URL=https://futureporn.net
|
||||
ARG NEXT_PUBLIC_STRAPI_URL=https://portal.futureporn.net
|
||||
ARG NEXT_PUBLIC_UPPY_COMPANION_URL=https://uppy.futureporn.net
|
||||
ENV NEXT_PUBLIC_SITE_URL ${NEXT_PUBLIC_SITE_URL}
|
||||
ENV NEXT_PUBLIC_STRAPI_URL ${NEXT_PUBLIC_STRAPI_URL}
|
||||
ENV NEXT_PUBLIC_UPPY_COMPANION_URL ${NEXT_PUBLIC_UPPY_COMPANION_URL}
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
COPY ./certs ./certs
|
||||
COPY pnpm-lock.yaml ./
|
||||
RUN pnpm fetch
|
||||
# COPY pnpm-lock.yaml .npmrc package.json .
|
||||
COPY ./services/next ./services/next
|
||||
COPY ./packages/types ./packages/types
|
||||
COPY ./packages/fetchers ./packages/fetchers
|
||||
COPY ./packages/utils ./packages/utils
|
||||
# COPY ./packages/strapi ./packages/strapi
|
||||
|
||||
RUN --mount=type=cache,id=pnpm-store,target=/pnpm/store pnpm install --recursive --frozen-lockfile --prefer-offline
|
||||
|
||||
|
||||
FROM install AS dev
|
||||
WORKDIR /app/services/next
|
||||
CMD ["pnpm", "run", "dev"]
|
||||
|
||||
FROM install AS build
|
||||
RUN pnpm run -r build
|
||||
# RUN pnpm --filter=next deploy --prod /prod/next ## using `pnpm deploy` doesn't work and I haven't worked out why
|
||||
RUN echo "next we are next we are"
|
||||
RUN ls -lash /app
|
||||
|
||||
FROM deps AS next
|
||||
RUN apt-get update && apt-get install -y -qq --no-install-recommends dumb-init
|
||||
COPY --chown=node:node --from=build /app/services/next/package.json /app/services/next/pnpm-lock.yaml ./
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --prod --frozen-lockfile
|
||||
COPY --chown=node:node --from=build /app/services/next/public ./public
|
||||
COPY --chown=node:node --from=build /app/services/next/.next/standalone ./
|
||||
COPY --chown=node:node --from=build /app/services/next/.next/static ./.next/static
|
||||
ENV TZ=UTC
|
||||
ENV NODE_ENV=production
|
||||
ENV HOSTNAME="0.0.0.0"
|
||||
CMD [ "dumb-init", "node", "server.js" ]
|
||||
|
|
@ -0,0 +1,75 @@
|
|||
#
|
||||
# Based on wiltonsr/opentracker-docker @see https://github.com/wiltonsr/opentracker-docker/tree/main
|
||||
# ours uses -DWANT_DYNAMIC_ACCESSLIST for incremental whitelist updates via named pipe
|
||||
#
|
||||
FROM gcc:14 AS compile-stage
|
||||
|
||||
RUN apt update ; \
|
||||
apt install cvs -y
|
||||
|
||||
RUN adduser \
|
||||
--system --disabled-login \
|
||||
--uid 6969 --group \
|
||||
--home /etc/opentracker \
|
||||
farmhand
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
# Run libowfat compilation in separated layer to benefit from docker layer cache
|
||||
RUN cvs -d :pserver:cvs@cvs.fefe.de:/cvs -z9 co libowfat ; \
|
||||
git clone git://erdgeist.org/opentracker ; \
|
||||
cd /usr/src/libowfat ; \
|
||||
make
|
||||
|
||||
# http://erdgeist.org/arts/software/opentracker/#build-instructions
|
||||
RUN cd /usr/src/opentracker ; \
|
||||
# Build opentracker statically to use it in scratch image
|
||||
LDFLAGS=-static make \
|
||||
FEATURES+=-DWANT_FULLSCRAPE \
|
||||
FEATURES+=-DWANT_FULLLOG_NETWORKS \
|
||||
FEATURES+=-DWANT_LOG_NUMWANT \
|
||||
FEATURES+=-DWANT_MODEST_FULLSCRAPES \
|
||||
FEATURES+=-DWANT_SPOT_WOODPECKER \
|
||||
FEATURES+=-DWANT_ACCESSLIST_WHITE \
|
||||
FEATURES+=-DWANT_DYNAMIC_ACCESSLIST \
|
||||
;\
|
||||
bash -c 'mkdir -pv /tmp/stage/{etc/opentracker,bin}' ; \
|
||||
bash -c 'touch /tmp/stage/etc/opentracker/{white,black}list' ; \
|
||||
cp -v opentracker.conf.sample /tmp/stage/etc/opentracker/opentracker.conf ; \
|
||||
# Opentrack configuration file
|
||||
sed -ri \
|
||||
-e 's!(.*)(tracker.user)(.*)!\2 farmhand!g;' \
|
||||
-e 's!(.*)(access.whitelist)(.*)!\2 /etc/opentracker/whitelist!g;' \
|
||||
/tmp/stage/etc/opentracker/opentracker.conf ; \
|
||||
install -m 755 opentracker.debug /tmp/stage/bin ; \
|
||||
make DESTDIR=/tmp/stage BINDIR="/bin" install
|
||||
|
||||
|
||||
|
||||
FROM alpine
|
||||
|
||||
RUN apk add --no-cache curl bash socat
|
||||
ARG S6_OVERLAY_VERSION=v3.2.0.2
|
||||
|
||||
|
||||
COPY --from=compile-stage /tmp/stage /
|
||||
COPY --from=compile-stage /etc/passwd /etc/passwd
|
||||
COPY ./packages/opentracker/opentracker.conf /etc/opentracker/opentracker.conf
|
||||
COPY ./packages/opentracker/root/ /
|
||||
|
||||
WORKDIR /etc/opentracker
|
||||
|
||||
|
||||
|
||||
|
||||
EXPOSE 6969/udp
|
||||
EXPOSE 6969/tcp
|
||||
|
||||
## use s6-overlay
|
||||
ADD https://github.com/just-containers/s6-overlay/releases/download/${S6_OVERLAY_VERSION}/s6-overlay-noarch.tar.xz /tmp
|
||||
RUN tar -C / -Jxpf /tmp/s6-overlay-noarch.tar.xz
|
||||
ADD https://github.com/just-containers/s6-overlay/releases/download/${S6_OVERLAY_VERSION}/s6-overlay-x86_64.tar.xz /tmp
|
||||
RUN tar -C / -Jxpf /tmp/s6-overlay-x86_64.tar.xz
|
||||
ENTRYPOINT /init
|
||||
# CMD ["/etc/s6-overlay/s6-rc.d/svc-opentracker/run"] # IDK if this is correct
|
||||
# USER 6969 # I think we can instead drop privs via s6
|
|
@ -1,43 +0,0 @@
|
|||
FROM node:20 AS base
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
WORKDIR /app
|
||||
RUN curl -s https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest | grep "browser_download_url.*yt-dlp_linux\"" | cut -d : -f 2,3 | tr -d "\"" | wget -q -O /usr/local/bin/yt-dlp -i - && chmod +x /usr/local/bin/yt-dlp
|
||||
## @important If pnpm is downloading node during the build, that's a bandwidth-expensive mistake.
|
||||
## Node already exists in the docker image at /usr/local/bin/node.
|
||||
## We should use the node version that exists in the docker image.
|
||||
## The only thing that should be downloaded by corepack is pnpm.
|
||||
## The reason we explicitly set a pnpm version here is because we want to have pnpm cached.
|
||||
## We haven't copied any .npmrc or package.json files at this point in the build, so corepack has no way of knowing which version to get.
|
||||
## There might be a more optimal way of doing this that doesn't require syncing this version with the version in package.json
|
||||
## but I'm not sure what that would look like.
|
||||
##
|
||||
## @important match the pnpm version between all pnpm workspace packages or multiple versions of pnpm will get installed (slow)
|
||||
RUN corepack enable && corepack prepare pnpm@9.6.0 --activate
|
||||
ENTRYPOINT ["pnpm"]
|
||||
|
||||
|
||||
FROM base AS install
|
||||
COPY pnpm-lock.yaml .npmrc package.json .
|
||||
COPY ./services/scout/ ./services/scout/
|
||||
COPY ./packages/types/ ./packages/types/
|
||||
COPY ./packages/utils/ ./packages/utils/
|
||||
COPY ./packages/fetchers/ ./packages/fetchers/
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm fetch
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --recursive --frozen-lockfile --prefer-offline
|
||||
|
||||
|
||||
FROM install AS build
|
||||
RUN pnpm -r build
|
||||
RUN pnpm deploy --filter=scout --prod /prod/scout
|
||||
|
||||
|
||||
FROM install AS dev
|
||||
WORKDIR /app/services/scout
|
||||
CMD ["run", "dev"]
|
||||
|
||||
|
||||
FROM base AS prod
|
||||
COPY --from=build /prod/scout .
|
||||
CMD ["run", "start"]
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
FROM node:20-alpine as base
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
WORKDIR /app
|
||||
RUN corepack enable
|
||||
|
||||
FROM base as build
|
||||
COPY ./packages/uppy/package.json ./packages/uppy/pnpm-lock.yaml /app
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
|
||||
COPY ./packages/uppy/ .
|
||||
|
||||
FROM build as run
|
||||
ENTRYPOINT ["pnpm"]
|
||||
CMD ["start"]
|
||||
|
|
@ -0,0 +1,156 @@
|
|||
# opentracker config file
|
||||
#
|
||||
|
||||
# I) Address opentracker will listen on, using both, tcp AND udp family
|
||||
# (note, that port 6969 is implicit if omitted).
|
||||
#
|
||||
# If no listen option is given (here or on the command line), opentracker
|
||||
# listens on 0.0.0.0:6969 tcp and udp.
|
||||
#
|
||||
# The next variable determines if udp sockets are handled in the event
|
||||
# loop (set it to 0, the default) or are handled in blocking reads in
|
||||
# dedicated worker threads. You have to set this value before the
|
||||
# listen.tcp_udp or listen.udp statements before it takes effect, but you
|
||||
# can re-set it for each listen statement. Normally you should keep it at
|
||||
# the top of the config file.
|
||||
#
|
||||
# listen.udp.workers 4
|
||||
#
|
||||
listen.tcp_udp 0.0.0.0
|
||||
# listen.tcp_udp 192.168.0.1:80
|
||||
# listen.tcp_udp 10.0.0.5:6969
|
||||
#
|
||||
# To only listen on tcp or udp family ports, list them this way:
|
||||
#
|
||||
# listen.tcp 0.0.0.0
|
||||
# listen.udp 192.168.0.1:6969
|
||||
# listen.tcp 127.0.0.1
|
||||
# listen.udp 127.0.0.1:6969
|
||||
#
|
||||
# Note, that using 0.0.0.0 for udp sockets may yield surprising results.
|
||||
# An answer packet sent on that socket will not necessarily have the
|
||||
# source address that the requesting client may expect, but any address
|
||||
# on that interface.
|
||||
#
|
||||
|
||||
# II) If opentracker runs in a non-open mode, point it to files containing
|
||||
# all torrent hashes that it will serve (shell option -w)
|
||||
#
|
||||
access.whitelist /etc/opentracker/whitelist
|
||||
#
|
||||
# or, if opentracker was compiled to allow blacklisting (shell option -b)
|
||||
#
|
||||
# access.blacklist ./blacklist
|
||||
#
|
||||
# It is pointless and hence not possible to compile black AND white
|
||||
# listing, so choose one of those options at compile time. File format
|
||||
# is straight forward: "<hex info hash>\n<hex info hash>\n..."
|
||||
#
|
||||
# IIa) You can enable dynamic changesets to accesslists by enabling
|
||||
# WANT_DYNAMIC_ACCESSLIST.
|
||||
#
|
||||
# The suggested way to work with dynamic changeset lists is to keep a
|
||||
# main accesslist file that is loaded when opentracker (re)starts and
|
||||
# reloaded infrequently (hourly or daily).
|
||||
#
|
||||
# All changes to the accesslist (e.g. from a web frontend) should be
|
||||
# both appended to or removed from that file and sent to opentracker. By
|
||||
# keeping dynamic changeset lists, you can avoid reloading huge
|
||||
# accesslists whenever just a single entry is added or removed.
|
||||
#
|
||||
# Any info_hash (format see above) written to the fifo_add file will be
|
||||
# kept on a dynamic add-changeset, removed from the dynamic
|
||||
# delete-changeset and treated as if it was in the main accesslist file.
|
||||
# The semantic of the respective dynamic changeset depends on whether
|
||||
# WANT_ACCESSLIST_WHITE or WANT_ACCESSLIST_BLACK is enabled.
|
||||
#
|
||||
access.fifo_add /var/run/opentracker/adder.fifo
|
||||
#
|
||||
# Any info_hash (format see above) written to the fifo_delete file will
|
||||
# be kept on a dynamic delete-changeset, removed from the dynamic
|
||||
# add-changeset and treated as if it was not in the main accesslist
|
||||
# file.
|
||||
#
|
||||
# access.fifo_delete /var/run/opentracker/deleter.fifo
|
||||
#
|
||||
# If you reload the accesslist by sending SIGHUP to the tracker process,
|
||||
# the dynamic lists are flushed, as opentracker assumes thoses lists are
|
||||
# merged into the main accesslist.
|
||||
#
|
||||
# NOTE: While you can have multiple writers sending lines to the fifos,
|
||||
# any writes larger than PIPE_BUF (see your limits.h, minimally 512
|
||||
# bytes but usually 4096) may be interleaved with data sent by other
|
||||
# writers. This can lead to unparsable lines of info_hashes.
|
||||
#
|
||||
# IIb)
|
||||
# If you do not want to grant anyone access to your stats, enable the
|
||||
# WANT_RESTRICT_STATS option in Makefile and bless the ip addresses
|
||||
# or network allowed to fetch stats here.
|
||||
#
|
||||
# access.stats 192.168.0.23
|
||||
# access.stats 10.1.1.23
|
||||
#
|
||||
# There is another way of hiding your stats. You can obfuscate the path
|
||||
# to them. Normally it is located at /stats but you can configure it to
|
||||
# appear anywhere on your tracker.
|
||||
#
|
||||
# access.stats_path stats
|
||||
#
|
||||
# II
|
||||
# If opentracker lives behind one or multiple reverse proxies,
|
||||
# every http connection appears to come from these proxies. In order to
|
||||
# take the X-Forwarded-For address instead, compile opentracker with the
|
||||
# WANT_IP_FROM_PROXY option and set your proxy addresses or networkss here.
|
||||
#
|
||||
# access.proxy 10.0.1.23
|
||||
# access.proxy 192.0.0.0/8
|
||||
#
|
||||
|
||||
# III) Live sync uses udp multicast packets to keep a cluster of opentrackers
|
||||
# synchronized. This option tells opentracker which port to listen for
|
||||
# incoming live sync packets. The ip address tells opentracker, on which
|
||||
# interface to join the multicast group, those packets will arrive.
|
||||
# (shell option -i 192.168.0.1 -s 9696), port 9696 is default.
|
||||
#
|
||||
# livesync.cluster.listen 192.168.0.1:9696
|
||||
#
|
||||
# Note that two udp sockets will be opened. One on ip address 0.0.0.0
|
||||
# port 9696, that will join the multicast group 224.0.42.23 for incoming
|
||||
# udp packets and one on ip address 192.168.0.1 port 9696 for outgoing
|
||||
# udp packets.
|
||||
#
|
||||
# As of now one and only one ip address must be given, if opentracker
|
||||
# was built with the WANT_SYNC_LIVE feature.
|
||||
#
|
||||
|
||||
# IV) Sync between trackers running in a cluster is restricted to packets
|
||||
# coming from trusted ip addresses. While source ip verification is far
|
||||
# from perfect, the authors of opentracker trust in the correct
|
||||
# application of tunnels, filters and LAN setups (shell option -A).
|
||||
#
|
||||
# livesync.cluster.node_ip 192.168.0.4
|
||||
# livesync.cluster.node_ip 192.168.0.5
|
||||
# livesync.cluster.node_ip 192.168.0.6
|
||||
#
|
||||
# This is the admin ip address for old style (HTTP based) asynchronus
|
||||
# tracker syncing.
|
||||
#
|
||||
# batchsync.cluster.admin_ip 10.1.1.1
|
||||
#
|
||||
|
||||
# V) Control privilege drop behaviour.
|
||||
# Put in the directory opentracker will chroot/chdir to. All black/white
|
||||
# list files must be put in that directory (shell option -d).
|
||||
#
|
||||
#
|
||||
# tracker.rootdir /usr/local/etc/opentracker
|
||||
#
|
||||
# Tell opentracker which user to setuid to.
|
||||
#
|
||||
tracker.user farmhand
|
||||
#
|
||||
|
||||
# VI) opentracker can be told to answer to a "GET / HTTP"-request with a
|
||||
# redirect to another location (shell option -r).
|
||||
#
|
||||
# tracker.redirect_url https://your.tracker.local/
|
|
@ -0,0 +1,10 @@
|
|||
#!/command/with-contenv sh
|
||||
|
||||
if [ -z "$WHITELIST_FEED_URL" ]; then
|
||||
echo "Error: WHITELIST_FEED_URL is not set" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p /var/run/opentracker
|
||||
mkfifo -m a+rw /var/run/opentracker/adder.fifo
|
||||
curl -sS "$WHITELIST_FEED_URL" -o /etc/opentracker/whitelist
|
|
@ -0,0 +1 @@
|
|||
oneshot
|
|
@ -0,0 +1 @@
|
|||
/etc/s6-overlay/s6-rc.d/init-opentracker/script
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
exec s6-setuidgid farmhand /bin/opentracker -f /etc/opentracker/opentracker.conf
|
|
@ -0,0 +1 @@
|
|||
longrun
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
exec s6-setuidgid farmhand socat -v -u TCP-LISTEN:8666,fork OPEN:/var/run/opentracker/adder.fifo,append
|
|
@ -0,0 +1 @@
|
|||
longrun
|
|
@ -0,0 +1 @@
|
|||
723886c0b0d9d41bfaa5276a9b2552d84ba09dd8a77d9ddcab5c9fa16cdb9770
|
|
@ -29,7 +29,9 @@ config :bright,
|
|||
superstreamer_url: System.get_env("SUPERSTREAMER_URL"),
|
||||
superstreamer_auth_token: System.get_env("SUPERSTREAMER_AUTH_TOKEN"),
|
||||
public_s3_endpoint: System.get_env("PUBLIC_S3_ENDPOINT"),
|
||||
s3_cdn_endpoint: System.get_env("PUBLIC_S3_ENDPOINT")
|
||||
s3_cdn_endpoint: System.get_env("PUBLIC_S3_ENDPOINT"),
|
||||
bittorrent_tracker_url: System.get_env("BT_TRACKER_URL"),
|
||||
site_url: System.get_env("SITE_URL")
|
||||
|
||||
config :bright, :buckets,
|
||||
media: System.get_env("AWS_BUCKET")
|
||||
|
|
|
@ -15,6 +15,7 @@ config :bright, Bright.Repo,
|
|||
username: "postgres",
|
||||
password: "password",
|
||||
hostname: System.cmd("docker", ["inspect", "--format", "{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}", "futureporn-db"]) |> elem(0) |> String.trim(),
|
||||
# hostname: "futureporn-db",
|
||||
database: "bright_test#{System.get_env("MIX_TEST_PARTITION")}",
|
||||
pool: Ecto.Adapters.SQL.Sandbox,
|
||||
pool_size: System.schedulers_online() * 2
|
||||
|
|
|
@ -6,11 +6,12 @@ defmodule Bright.B2 do
|
|||
require Logger
|
||||
|
||||
alias ExAws.S3
|
||||
|
||||
alias Bright.Repo
|
||||
alias Bright.Cache
|
||||
|
||||
alias Bright.B2
|
||||
alias Bright.{
|
||||
Repo,
|
||||
Cache,
|
||||
B2
|
||||
}
|
||||
alias Bright.Streams.Vod
|
||||
|
||||
@doc """
|
||||
Put a file from local disk to Backblaze. This function uses the filename as the S3 key. Use put/2 if you want to specify the key
|
||||
|
@ -52,26 +53,28 @@ defmodule Bright.B2 do
|
|||
end
|
||||
|
||||
|
||||
def get(%Vod{} = vod) do
|
||||
object_key =
|
||||
vod.s3_cdn_url
|
||||
|> URI.parse()
|
||||
|> Map.get(:path)
|
||||
|> String.trim_leading("/")
|
||||
local_file = Cache.generate_filename(object_key)
|
||||
IO.puts "get/1 object_key=#{object_key} local_file=#{local_file}"
|
||||
get(object_key, local_file)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Download a file from Backblaze to local disk
|
||||
"""
|
||||
def get(object_key, local_file) do
|
||||
# B2.get("test/SampleVideo_1280x720_1mb.mp4", local_file)
|
||||
|
||||
bucket = Application.get_env(:bright, :aws_bucket)
|
||||
|
||||
|
||||
S3.download_file(bucket, object_key, local_file)
|
||||
|> ExAws.request
|
||||
|> case do
|
||||
{:ok, :done} -> {:ok, local_file}
|
||||
{:error, reason} -> {:error, reason}
|
||||
end
|
||||
|
||||
|
||||
|
||||
|
||||
end
|
||||
|
||||
|
||||
|
|
|
@ -33,13 +33,6 @@ defmodule Bright.Cache do
|
|||
|
||||
def generate_filename(input, ext) do
|
||||
Path.join(@cache_dir, generate_basename(input, ext))
|
||||
|
||||
# @cache_dir
|
||||
# input
|
||||
# |> generate_basename
|
||||
# |> Path.join(@cache_dir)
|
||||
# |> Path.rootname
|
||||
# |> Kernel.<>(".#{ext}")
|
||||
end
|
||||
|
||||
def get_cache_dir do
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
defmodule Bright.ObanWorkers.CreateTorrent do
|
||||
use Oban.Worker, queue: :default, max_attempts: 3
|
||||
|
||||
alias Bright.Streams
|
||||
alias Bright.Streams.Vod
|
||||
alias Bright.{
|
||||
Repo,
|
||||
Downloader,
|
||||
B2,
|
||||
Images,
|
||||
Cache,
|
||||
Torrent,
|
||||
Tracker
|
||||
}
|
||||
require Logger
|
||||
import Ecto.Query, warn: false
|
||||
|
||||
|
||||
|
||||
def perform(%Oban.Job{args: %{"vod_id" => vod_id}}) do
|
||||
vod = Streams.get_vod!(vod_id)
|
||||
with {:ok, filename} <- B2.get(vod),
|
||||
{:ok, torrent} <- Torrent.create_torrent(vod),
|
||||
{:ok, %{cdn_url: cdn_url}} <- B2.put(torrent.local_path, torrent.basename),
|
||||
:ok <- Tracker.whitelist_info_hash(torrent.info_hash),
|
||||
:ok <- Tracker.announce_torrent(torrent.magnet_link),
|
||||
{:ok, updated_vod} <- Streams.update_vod(vod, %{torrent: cdn_url, magnet_link: torrent.magnet_link}) do
|
||||
{:ok, updated_vod}
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
end
|
|
@ -8,7 +8,8 @@ defmodule Bright.ObanWorkers.ProcessVod do
|
|||
alias Bright.ObanWorkers.{
|
||||
CreateHlsPlaylist,
|
||||
CreateS3Asset,
|
||||
CreateThumbnail
|
||||
CreateThumbnail,
|
||||
CreateTorrent,
|
||||
}
|
||||
|
||||
@impl Oban.Worker
|
||||
|
@ -24,6 +25,8 @@ defmodule Bright.ObanWorkers.ProcessVod do
|
|||
unless vod.thumbnail_url, do: queue_create_thumbnail(vod)
|
||||
end
|
||||
|
||||
Oban.insert!(CreateTorrent.new(%{vod_id: vod_id}))
|
||||
|
||||
|
||||
:ok
|
||||
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
defmodule Bright.RSS do
|
||||
|
||||
use Timex
|
||||
|
||||
def to_rfc822(date) do
|
||||
date
|
||||
|> Timezone.convert("UTC")
|
||||
|> Timex.format!("{WDshort}, {D} {Mshort} {YYYY} {h24}:{m}:{s} {Zname}")
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
# which is better?
|
||||
|
||||
# date
|
||||
# |> Timezone.convert("GMT")
|
||||
# |> Timex.format!("{RFC822}")
|
||||
# # Wed, 27 Aug 20 11:37:46 +0000
|
||||
|
||||
# date
|
||||
# |> Timezone.convert("GMT")
|
||||
# |> Timex.format!("{WDshort}, {D} {Mshort} {YYYY} {h24}:{m}:{s} {Zname}")
|
||||
# # Wed, 27 Aug 2020 11:37:46 GMT
|
|
@ -166,6 +166,16 @@ defmodule Bright.Streams do
|
|||
Repo.all(Vod)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns the most recently updated vod
|
||||
"""
|
||||
def most_recently_updated_vod do
|
||||
Vod
|
||||
|> order_by([v], desc: v.updated_at)
|
||||
|> limit(1)
|
||||
|> Repo.one
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets a single vod.
|
||||
|
||||
|
|
|
@ -13,6 +13,9 @@ defmodule Bright.Streams.Vod do
|
|||
field :notes, :string
|
||||
field :thumbnail_url, :string
|
||||
field :local_path, :string
|
||||
field :magnet_link, :string
|
||||
field :info_hash_v1, :string
|
||||
field :info_hash_v2, :string
|
||||
|
||||
belongs_to :stream, Bright.Streams.Stream
|
||||
# belongs_to :uploader, Bright.Accounts.User, foreign_key: :uploaded_by_id # Metadata for uploader
|
||||
|
@ -23,7 +26,7 @@ defmodule Bright.Streams.Vod do
|
|||
@doc false
|
||||
def changeset(vod, attrs) do
|
||||
vod
|
||||
|> cast(attrs, [:local_path, :s3_cdn_url, :mux_asset_id, :mux_playback_id, :ipfs_cid, :torrent, :stream_id, :origin_temp_input_url, :playlist_url, :thumbnail_url])
|
||||
|> cast(attrs, [:magnet_link, :info_hash_v1, :info_hash_v2, :local_path, :s3_cdn_url, :mux_asset_id, :mux_playback_id, :ipfs_cid, :torrent, :stream_id, :origin_temp_input_url, :playlist_url, :thumbnail_url])
|
||||
|> validate_required([:stream_id])
|
||||
end
|
||||
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
defmodule Bright.Torrent do
|
||||
|
||||
|
||||
alias Bright.Streams.Vod
|
||||
alias Bright.{Cache,Torrentfile,B2}
|
||||
|
||||
|
||||
def bittorrent_tracker_url do
|
||||
Application.fetch_env!(:bright, :bittorrent_tracker_url)
|
||||
end
|
||||
|
||||
def site_url do
|
||||
Application.fetch_env!(:bright, :site_url)
|
||||
end
|
||||
|
||||
|
||||
|
||||
def create_torrent(input_path, output_path, web_seed_url, vod_id) do
|
||||
IO.puts "site_url=#{site_url()}"
|
||||
IO.puts "bittorrent_tracker_url=#{bittorrent_tracker_url()}"
|
||||
tracker_url = bittorrent_tracker_url()
|
||||
source_url = URI.append_path(URI.parse(site_url()), "/vods/#{vod_id}") |> URI.to_string()
|
||||
comment = site_url()
|
||||
meta_version = 3 # hybrid BT v1 & v2
|
||||
|
||||
{:ok, %{btih: btih, btmh: btmh, magnet: magnet, save_path: save_path} = torrentfile} = Torrentfile.create(input_path, output_path, tracker_url, source_url, comment, web_seed_url, meta_version)
|
||||
|
||||
|
||||
# upload to s3
|
||||
basename = Path.basename(save_path)
|
||||
{:ok, asset} = B2.put(save_path, basename)
|
||||
|
||||
{:ok, %{basename: basename, local_path: save_path, magnet_link: magnet, info_hash_v1: btih, info_hash_v2: btmh}}
|
||||
|
||||
|
||||
end
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
end
|
|
@ -0,0 +1,93 @@
|
|||
defmodule Bright.Torrentfile do
|
||||
@moduledoc """
|
||||
Provides functions to work with [torrentfile](https://github.com/alexpdev/torrentfile) CLI program
|
||||
"""
|
||||
|
||||
alias Bright.Cache
|
||||
|
||||
# @spec execute(command :: Command.t) :: {:ok, binary()} | {:error, {Collectable.t, exit_status :: non_neg_integer}}
|
||||
# def execute(%Command{} = command) do
|
||||
# {executable, args} = prepare(command)
|
||||
|
||||
# Rambo.run(executable, args, log: false)
|
||||
# |> format_output()
|
||||
# end
|
||||
|
||||
# @spec prepare(command :: Command.t) :: {binary() | nil, list(binary)}
|
||||
# def prepare(%Command{files: files, global_options: options}) do
|
||||
# options = Enum.map(options, &arg_for_option/1)
|
||||
# cmd_args = List.flatten([options, options_list(files)])
|
||||
# {ffmpeg_path(), cmd_args}
|
||||
# end
|
||||
|
||||
# $ torrentfile \
|
||||
# create \
|
||||
# --prog 0 \
|
||||
# -o test-fixture.torrent \
|
||||
# -a https://tracker.futureporn.net/announce \
|
||||
# --source https://futureporn.net \
|
||||
# --web-seed=https://futureporn-b2.b-cdn.net/test-fixture.ts \
|
||||
# --meta-version 2 \
|
||||
# /home/cj/Documents/futureporn-monorepo/services/bright/test/fixtures/test-fixture.ts
|
||||
|
||||
def version do
|
||||
case Rambo.run(torrentfile_path(), ["-V"]) do
|
||||
{:ok, %Rambo{status: 0, out: output}} ->
|
||||
case Regex.run(~r/(v[\d.]+)/, output) do
|
||||
[_, version] -> {:ok, version}
|
||||
_ -> {:error, "Version not found"}
|
||||
end
|
||||
|
||||
{:error, reason} -> {:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
# def parse_output(output) do
|
||||
# magnet = extract_last(Regex.run(~r/(magnet:\?[^\s]+)/, output))
|
||||
# save_path = extract_last(Regex.run(~r/Torrent Save Path:\s+(.+)/, output))
|
||||
# btih = extract_last(Regex.run(~r/\burn:btih:([A-F\d]+)\b/i, magnet))
|
||||
# btmh = extract_last(Regex.run(~r/\burn:btmh:([A-F\d]+)\b/i, magnet))
|
||||
# %{magnet: magnet, save_path: save_path, btih: btih, btmh: btmh}
|
||||
# end
|
||||
|
||||
def parse_output(output) do
|
||||
magnet = extract_last(Regex.run(~r/(magnet:\?[^\s]+)/, output))
|
||||
save_path = extract_last(Regex.run(~r/Torrent Save Path:\s+(.+)/, output))
|
||||
|
||||
btih = if magnet, do: extract_last(Regex.run(~r/\burn:btih:([A-F\d]+)\b/i, magnet)), else: nil
|
||||
btmh = if magnet, do: extract_last(Regex.run(~r/\burn:btmh:([A-F\d]+)\b/i, magnet)), else: nil
|
||||
|
||||
%{magnet: magnet, save_path: save_path, btih: btih, btmh: btmh}
|
||||
end
|
||||
|
||||
|
||||
|
||||
defp extract_last(nil), do: nil
|
||||
defp extract_last(list) when is_list(list), do: List.last(list)
|
||||
|
||||
def create(input_path, output_path, tracker_url, source_url, comment, web_seed_url, meta_version) do
|
||||
case Rambo.run(torrentfile_path(), [
|
||||
"--magnet",
|
||||
"--prog", "0",
|
||||
"--out", output_path,
|
||||
"-a", tracker_url,
|
||||
"--source", source_url,
|
||||
"--comment", comment,
|
||||
"--web-seed", web_seed_url,
|
||||
"--meta-version", to_string(meta_version),
|
||||
input_path
|
||||
]) do
|
||||
{:error, reason} -> {:error, reason}
|
||||
{:ok, %Rambo{status: 0, out: out, err: ""}} -> {:ok, parse_output(out)}
|
||||
end
|
||||
end
|
||||
|
||||
def torrentfile_path do
|
||||
case Application.get_env(:bright, :torrentfile_path, nil) do
|
||||
nil -> System.find_executable("torrentfile")
|
||||
path -> path
|
||||
end
|
||||
end
|
||||
|
||||
end
|
|
@ -0,0 +1,104 @@
|
|||
defmodule Bright.Torrents do
|
||||
@moduledoc """
|
||||
The Torrents context.
|
||||
"""
|
||||
|
||||
import Ecto.Query, warn: false
|
||||
alias Bright.Repo
|
||||
|
||||
alias Bright.Torrents.Torrent
|
||||
|
||||
@doc """
|
||||
Returns the list of torrent.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> list_torrent()
|
||||
[%Torrent{}, ...]
|
||||
|
||||
"""
|
||||
def list_torrent do
|
||||
Repo.all(Torrent)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets a single torrent.
|
||||
|
||||
Raises `Ecto.NoResultsError` if the Torrent does not exist.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> get_torrent!(123)
|
||||
%Torrent{}
|
||||
|
||||
iex> get_torrent!(456)
|
||||
** (Ecto.NoResultsError)
|
||||
|
||||
"""
|
||||
def get_torrent!(id), do: Repo.get!(Torrent, id)
|
||||
|
||||
@doc """
|
||||
Creates a torrent.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> create_torrent(%{field: value})
|
||||
{:ok, %Torrent{}}
|
||||
|
||||
iex> create_torrent(%{field: bad_value})
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def create_torrent(attrs \\ %{}) do
|
||||
%Torrent{}
|
||||
|> Torrent.changeset(attrs)
|
||||
|> Repo.insert()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates a torrent.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> update_torrent(torrent, %{field: new_value})
|
||||
{:ok, %Torrent{}}
|
||||
|
||||
iex> update_torrent(torrent, %{field: bad_value})
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def update_torrent(%Torrent{} = torrent, attrs) do
|
||||
torrent
|
||||
|> Torrent.changeset(attrs)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deletes a torrent.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> delete_torrent(torrent)
|
||||
{:ok, %Torrent{}}
|
||||
|
||||
iex> delete_torrent(torrent)
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def delete_torrent(%Torrent{} = torrent) do
|
||||
Repo.delete(torrent)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns an `%Ecto.Changeset{}` for tracking torrent changes.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> change_torrent(torrent)
|
||||
%Ecto.Changeset{data: %Torrent{}}
|
||||
|
||||
"""
|
||||
def change_torrent(%Torrent{} = torrent, attrs \\ %{}) do
|
||||
Torrent.changeset(torrent, attrs)
|
||||
end
|
||||
end
|
|
@ -0,0 +1,20 @@
|
|||
defmodule Bright.Torrents.Torrent do
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
|
||||
schema "torrent" do
|
||||
field :info_hash_v1, :string
|
||||
field :info_hash_v2, :string
|
||||
field :cdn_url, :string
|
||||
field :magnet, :string
|
||||
|
||||
timestamps(type: :utc_datetime)
|
||||
end
|
||||
|
||||
@doc false
|
||||
def changeset(torrent, attrs) do
|
||||
torrent
|
||||
|> cast(attrs, [:info_hash_v1, :info_hash_v2, :cdn_url, :magnet])
|
||||
|> validate_required([:info_hash_v1, :info_hash_v2, :cdn_url, :magnet])
|
||||
end
|
||||
end
|
|
@ -0,0 +1,39 @@
|
|||
defmodule Bright.Tracker do
|
||||
|
||||
|
||||
alias Bright.Streams.Vod
|
||||
alias Bright.{Cache,Torrentfile,B2}
|
||||
|
||||
def tracker_url do
|
||||
Application.get_env!(:bright, :bt_tracker_url)
|
||||
end
|
||||
|
||||
|
||||
def announce_torrent(info_hash) do
|
||||
|
||||
end
|
||||
|
||||
def whitelist_info_hash(info_hash) do
|
||||
|
||||
server = "tcp://ncat:8666"
|
||||
port = 8666
|
||||
|
||||
# Open a TCP connection
|
||||
{:ok, socket} = :gen_tcp.connect(server, port, [:binary, packet: :raw, active: false])
|
||||
|
||||
# Send the "hello world" data to the server
|
||||
:gen_tcp.send(socket, "#{info_hash}\n")
|
||||
|
||||
# Close the connection
|
||||
:gen_tcp.close(socket)
|
||||
|
||||
|
||||
# url = "http://ncat:6868"
|
||||
# body = [
|
||||
|
||||
# ]
|
||||
# headers = []
|
||||
# HTTPoison.post(url, body, headers)
|
||||
end
|
||||
|
||||
end
|
|
@ -17,6 +17,7 @@
|
|||
</.live_title>
|
||||
<link phx-track-static rel="stylesheet" href={~p"/assets/app.css"} />
|
||||
<link rel="shortcut icon" href="/favicon.ico" type="image/x-icon" />
|
||||
<link rel="alternate" type="application/rss+xml" title="Futureporn VOD RSS" href={~p"/feed/vods.xml"} />
|
||||
|
||||
<script defer phx-track-static type="text/javascript" src={~p"/assets/app.js"}></script>
|
||||
|
||||
|
|
|
@ -11,7 +11,6 @@
|
|||
</section>
|
||||
|
||||
<div class="section">
|
||||
|
||||
<p>A platform built by fans, for fans, dedicated to preserving the moments that matter in the world of R-18 VTuber live streaming. It all started with a simple need: capturing ProjektMelody's streams on Chaturbate. Chaturbate doesn’t save VODs, and sometimes we missed the magic. Other times, creators like ProjektMelody faced unnecessary de-platforming for simply being unique. We wanted to create a space where this content could endure, unshaken by the tides of censorship or fleeting platforms.</p>
|
||||
</div>
|
||||
|
||||
|
@ -26,4 +25,17 @@
|
|||
<p>Join us as we redefine archiving and fandom, ensuring that no stream is ever lost again. Together, we can create a platform that stands as a testament to creativity, individuality, and the fans who make it all possible.</p>
|
||||
</div>
|
||||
|
||||
<div class="section">
|
||||
<h3 class="title is-3">Goals</h3>
|
||||
<ul>
|
||||
<li>Preserve lewdtuber history</li>
|
||||
<li>Grow the lewdtuber fanbase</li>
|
||||
<li>Introduce groundbreaking interactivity features</li>
|
||||
<li>Beam VODs to LEO, the Moon & Mars base (literally)</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
|
||||
</main>
|
||||
|
|
|
@ -0,0 +1,62 @@
|
|||
# defmodule BrightWeb.RssController do
|
||||
# use BrightWeb, :controller
|
||||
# plug :put_layout, false
|
||||
|
||||
# alias BrightWeb.Streams.Vod
|
||||
|
||||
# def index(conn, _params) do
|
||||
# vods = Vod.list_vods()
|
||||
# updated_at = Vod.most_recently_updated_vod.updated_at
|
||||
|
||||
# conn
|
||||
# |> put_resp_content_type("text/xml")
|
||||
# |> render("index.xml", vods: vods, updated_at: updated_at)
|
||||
# end
|
||||
# end
|
||||
|
||||
|
||||
defmodule BrightWeb.RssController do
|
||||
use BrightWeb, :controller
|
||||
|
||||
alias Bright.Streams
|
||||
alias Bright.Streams.Vod
|
||||
alias Atomex.{Feed, Entry}
|
||||
|
||||
@author "CJ_Clippy"
|
||||
@email "cj@futureporn.net"
|
||||
|
||||
def vods(conn, _params) do
|
||||
vods = Streams.list_vods()
|
||||
feed = build_feed(vods, conn)
|
||||
|
||||
conn
|
||||
|> put_resp_content_type("text/xml")
|
||||
|> send_resp(200, feed)
|
||||
end
|
||||
|
||||
def build_feed(vods, conn) do
|
||||
Feed.new(~p"/", DateTime.utc_now(), "Futureporn VOD RSS")
|
||||
|> Feed.author(@author, email: @email)
|
||||
|> Feed.link(~p"/feeds/vods.xml", rel: "self")
|
||||
|> Feed.entries(Enum.map(vods, &get_entry(conn, &1)))
|
||||
|> Feed.build()
|
||||
|> Atomex.generate_document()
|
||||
end
|
||||
|
||||
defp get_entry(
|
||||
conn,
|
||||
%Vod{id: id, torrent: torrent, origin_temp_input_url: origin_temp_input_url, updated_at: updated_at, playlist_url: playlist_url}
|
||||
) do
|
||||
Entry.new(
|
||||
# Routes.post_url(conn, :show, kind, slug),
|
||||
id,
|
||||
DateTime.from_naive!(updated_at, "Etc/UTC"),
|
||||
"vod #{id}"
|
||||
)
|
||||
# |> Entry.link(Routes.post_url(conn, :show, kind, slug))
|
||||
|> Entry.link("https://futureporn.net/vods/#{id}")
|
||||
|> Entry.content(playlist_url, type: "text")
|
||||
|> Feed.add_field(:guid, %{isPermalink: false}, torrent)
|
||||
|> Entry.build()
|
||||
end
|
||||
end
|
|
@ -0,0 +1,26 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
|
||||
<channel>
|
||||
<title>VOD Title</title>
|
||||
<link><%= Routes.blog_url(@conn, :index) %></link>
|
||||
<atom:link href="<%= Routes.rss_url(@conn, :index) %>" rel="self" type="application/rss+xml" />
|
||||
<description>Your blog's description...</description>
|
||||
<language>en</language>
|
||||
<copyright>Copyright <%= DateTime.utc_now.year %> Your Name</copyright>
|
||||
<lastBuildDate><%= @last_build_date |> to_rfc822 %></lastBuildDate>
|
||||
<category>IT/Internet/Web development</category>
|
||||
<ttl>60</ttl>
|
||||
|
||||
<%= for post <- @posts do %>
|
||||
<item>
|
||||
<title><%= post.title %></title>
|
||||
<link><%= Routes.post_url(@conn, :show, post) %></link>
|
||||
<guid><%= Routes.post_url(@conn, :show, post) %></guid>
|
||||
<description><![CDATA[ <%= post.excerpt %> ]]></description>
|
||||
<category><%= post.category.name %></category>
|
||||
<pubDate><%= post.inserted_at |> to_rfc822 %></pubDate>
|
||||
<source url="<%= Routes.rss_url(@conn, :index) %>">Blog Title</source>
|
||||
</item>
|
||||
<% end %>
|
||||
</channel>
|
||||
</rss>
|
|
@ -77,6 +77,7 @@ defmodule BrightWeb.Router do
|
|||
|
||||
get("/", PageController, :home)
|
||||
|
||||
|
||||
get("/profile", PageController, :profile)
|
||||
|
||||
get("/patrons", PatronController, :index)
|
||||
|
@ -114,6 +115,10 @@ defmodule BrightWeb.Router do
|
|||
|
||||
end
|
||||
|
||||
scope "/feed", BrightWeb do
|
||||
get "/vods.xml", RssController, :vods
|
||||
end
|
||||
|
||||
# Other scopes may use custom stacks.
|
||||
scope "/api", BrightWeb do
|
||||
pipe_through(:api)
|
||||
|
|
|
@ -66,6 +66,7 @@ defmodule Bright.MixProject do
|
|||
{:ffmpex, "~> 0.11.0"},
|
||||
{:sweet_xml, "~> 0.6"},
|
||||
{:ex_m3u8, "~> 0.14.2"},
|
||||
{:atomex, "~> 0.3.0"},
|
||||
# {:membrane_core, "~> 1.0"},
|
||||
# {:membrane_mpeg_ts_plugin, "~> 1.0.3"},
|
||||
# {:membrane_file_plugin, "~> 0.17.2"},
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
%{
|
||||
"atomex": {:hex, :atomex, "0.3.0", "19b5d1a2aef8706dbd307385f7d5d9f6f273869226d317492c396c7bacf26402", [:mix], [{:xml_builder, "~> 2.0.0", [hex: :xml_builder, repo: "hexpm", optional: false]}], "hexpm", "025dbc3a3e99380894791a093019f535d0ef6cf1916f6ec1b778ac107fcfc3e4"},
|
||||
"bandit": {:hex, :bandit, "1.6.6", "f2019a95261d400579075df5bc15641ba8e446cc4777ede6b4ec19e434c3340d", [:mix], [{:hpax, "~> 1.0", [hex: :hpax, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:thousand_island, "~> 1.0", [hex: :thousand_island, repo: "hexpm", optional: false]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "ceb19bf154bc2c07ee0c9addf407d817c48107e36a66351500846fc325451bf9"},
|
||||
"bcrypt_elixir": {:hex, :bcrypt_elixir, "3.2.0", "feab711974beba4cb348147170346fe097eea2e840db4e012a145e180ed4ab75", [:make, :mix], [{:comeonin, "~> 5.3", [hex: :comeonin, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.6", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "563e92a6c77d667b19c5f4ba17ab6d440a085696bdf4c68b9b0f5b30bc5422b8"},
|
||||
"bimap": {:hex, :bimap, "1.3.0", "3ea4832e58dc83a9b5b407c6731e7bae87458aa618e6d11d8e12114a17afa4b3", [:mix], [], "hexpm", "bf5a2b078528465aa705f405a5c638becd63e41d280ada41e0f77e6d255a10b4"},
|
||||
|
@ -8,6 +9,7 @@
|
|||
"bundlex": {:hex, :bundlex, "1.5.4", "3726acd463f4d31894a59bbc177c17f3b574634a524212f13469f41c4834a1d9", [:mix], [{:bunch, "~> 1.0", [hex: :bunch, repo: "hexpm", optional: false]}, {:elixir_uuid, "~> 1.2", [hex: :elixir_uuid, repo: "hexpm", optional: false]}, {:qex, "~> 0.5", [hex: :qex, repo: "hexpm", optional: false]}, {:req, ">= 0.4.0", [hex: :req, repo: "hexpm", optional: false]}, {:zarex, "~> 1.0", [hex: :zarex, repo: "hexpm", optional: false]}], "hexpm", "e745726606a560275182a8ac1c8ebd5e11a659bb7460d8abf30f397e59b4c5d2"},
|
||||
"bunt": {:hex, :bunt, "0.2.1", "e2d4792f7bc0ced7583ab54922808919518d0e57ee162901a16a1b6664ef3b14", [:mix], [], "hexpm", "a330bfb4245239787b15005e66ae6845c9cd524a288f0d141c148b02603777a5"},
|
||||
"castore": {:hex, :castore, "1.0.11", "4bbd584741601eb658007339ea730b082cc61f3554cf2e8f39bf693a11b49073", [:mix], [], "hexpm", "e03990b4db988df56262852f20de0f659871c35154691427a5047f4967a16a62"},
|
||||
"cc_precompiler": {:hex, :cc_precompiler, "0.1.10", "47c9c08d8869cf09b41da36538f62bc1abd3e19e41701c2cea2675b53c704258", [:mix], [{:elixir_make, "~> 0.7", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "f6e046254e53cd6b41c6bacd70ae728011aa82b2742a80d6e2214855c6e06b22"},
|
||||
"certifi": {:hex, :certifi, "2.12.0", "2d1cca2ec95f59643862af91f001478c9863c2ac9cb6e2f89780bfd8de987329", [:rebar3], [], "hexpm", "ee68d85df22e554040cdb4be100f33873ac6051387baf6a8f6ce82272340ff1c"},
|
||||
"coerce": {:hex, :coerce, "1.0.1", "211c27386315dc2894ac11bc1f413a0e38505d808153367bd5c6e75a4003d096", [:mix], [], "hexpm", "b44a691700f7a1a15b4b7e2ff1fa30bebd669929ac8aa43cffe9e2f8bf051cf1"},
|
||||
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
|
||||
|
@ -88,6 +90,7 @@
|
|||
"plug": {:hex, :plug, "1.16.1", "40c74619c12f82736d2214557dedec2e9762029b2438d6d175c5074c933edc9d", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "a13ff6b9006b03d7e33874945b2755253841b238c34071ed85b0e86057f8cddc"},
|
||||
"plug_crypto": {:hex, :plug_crypto, "2.1.0", "f44309c2b06d249c27c8d3f65cfe08158ade08418cf540fd4f72d4d6863abb7b", [:mix], [], "hexpm", "131216a4b030b8f8ce0f26038bc4421ae60e4bb95c5cf5395e1421437824c4fa"},
|
||||
"postgrex": {:hex, :postgrex, "0.19.3", "a0bda6e3bc75ec07fca5b0a89bffd242ca209a4822a9533e7d3e84ee80707e19", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "d31c28053655b78f47f948c85bb1cf86a9c1f8ead346ba1aa0d0df017fa05b61"},
|
||||
"pythonx": {:hex, :pythonx, "0.2.5", "05660dc8548a4ab5da5b7f7977c6a5a3fa16eefadbe54077f9a176c9d386be27", [:make, :mix], [{:cc_precompiler, "~> 0.1", [hex: :cc_precompiler, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.8", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:jason, "~> 1.2", [hex: :jason, repo: "hexpm", optional: false]}, {:req, "~> 0.3", [hex: :req, repo: "hexpm", optional: false]}], "hexpm", "66d2179e37be527cbecf47097c15aea28a1dbcb2c6e965407c89ca1e1ac74d17"},
|
||||
"qex": {:hex, :qex, "0.5.1", "0d82c0f008551d24fffb99d97f8299afcb8ea9cf99582b770bd004ed5af63fd6", [:mix], [], "hexpm", "935a39fdaf2445834b95951456559e9dc2063d0a055742c558a99987b38d6bab"},
|
||||
"rambo": {:hex, :rambo, "0.3.4", "8962ac3bd1a633ee9d0e8b44373c7913e3ce3d875b4151dcd060886092d2dce7", [:mix], [], "hexpm", "0cc54ed089fbbc84b65f4b8a774224ebfe60e5c80186fafc7910b3e379ad58f1"},
|
||||
"ratio": {:hex, :ratio, "4.0.1", "3044166f2fc6890aa53d3aef0c336f84b2bebb889dc57d5f95cc540daa1912f8", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}, {:numbers, "~> 5.2.0", [hex: :numbers, repo: "hexpm", optional: false]}], "hexpm", "c60cbb3ccdff9ffa56e7d6d1654b5c70d9f90f4d753ab3a43a6bf40855b881ce"},
|
||||
|
@ -114,5 +117,6 @@
|
|||
"unifex": {:hex, :unifex, "1.2.1", "6841c170a6e16509fac30b19e4e0a19937c33155a59088b50c15fc2c36251b6b", [:mix], [{:bunch, "~> 1.0", [hex: :bunch, repo: "hexpm", optional: false]}, {:bundlex, "~> 1.4", [hex: :bundlex, repo: "hexpm", optional: false]}, {:shmex, "~> 0.5.0", [hex: :shmex, repo: "hexpm", optional: false]}], "hexpm", "8c9d2e3c48df031e9995dd16865bab3df402c0295ba3a31f38274bb5314c7d37"},
|
||||
"websock": {:hex, :websock, "0.5.3", "2f69a6ebe810328555b6fe5c831a851f485e303a7c8ce6c5f675abeb20ebdadc", [:mix], [], "hexpm", "6105453d7fac22c712ad66fab1d45abdf049868f253cf719b625151460b8b453"},
|
||||
"websock_adapter": {:hex, :websock_adapter, "0.5.8", "3b97dc94e407e2d1fc666b2fb9acf6be81a1798a2602294aac000260a7c4a47d", [:mix], [{:bandit, ">= 0.6.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "315b9a1865552212b5f35140ad194e67ce31af45bcee443d4ecb96b5fd3f3782"},
|
||||
"xml_builder": {:hex, :xml_builder, "2.0.0", "371ed27bb63bf0598dbaf3f0c466e5dc7d16cb4ecb68f06a67f953654062e21b", [:mix], [], "hexpm", "baeb5c8d42204bac2b856ffd50e8cda42d63b622984538d18d92733e4e790fbd"},
|
||||
"zarex": {:hex, :zarex, "1.0.5", "58239e3ee5d75f343262bb4df5cf466555a1c689f920e5d3651a9333972f7c7e", [:mix], [], "hexpm", "9fb72ef0567c2b2742f5119a1ba8a24a2fabb21b8d09820aefbf3e592fa9a46a"},
|
||||
}
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
defmodule Bright.Repo.Migrations.AddMagnetLink do
|
||||
use Ecto.Migration
|
||||
|
||||
def change do
|
||||
alter table(:vods) do
|
||||
add :magnet_link, :string
|
||||
add :info_hash, :string
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,11 @@
|
|||
defmodule Bright.Repo.Migrations.AddInfoHashV2 do
|
||||
use Ecto.Migration
|
||||
|
||||
def change do
|
||||
alter table(:vods) do
|
||||
remove :info_hash
|
||||
add :info_hash_v1, :string
|
||||
add :info_hash_v2, :string
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,14 @@
|
|||
defmodule Bright.Repo.Migrations.CreateTorrent do
|
||||
use Ecto.Migration
|
||||
|
||||
def change do
|
||||
create table(:torrent) do
|
||||
add :info_hash_v1, :text
|
||||
add :info_hash_v2, :text
|
||||
add :cdn_url, :text
|
||||
add :magnet, :text
|
||||
|
||||
timestamps(type: :utc_datetime)
|
||||
end
|
||||
end
|
||||
end
|
|
@ -9,6 +9,7 @@ defmodule Bright.B2Test do
|
|||
|
||||
alias Bright.B2
|
||||
alias Bright.Cache
|
||||
import Bright.StreamsFixtures
|
||||
|
||||
@tag :acceptance
|
||||
test "put/1" do
|
||||
|
@ -27,6 +28,16 @@ defmodule Bright.B2Test do
|
|||
end
|
||||
|
||||
|
||||
@tag :integration
|
||||
test "get/1 with %Vod{}" do
|
||||
stream = stream_fixture()
|
||||
vod = vod_fixture(%{stream_id: stream.id, s3_cdn_url: "https://futureporn-b2.b-cdn.net/test-fixture.ts"})
|
||||
{:ok, filename} = B2.get(vod)
|
||||
assert :ok
|
||||
assert Regex.match?(~r/\.cache\/futureporn.*\.ts/, filename)
|
||||
end
|
||||
|
||||
|
||||
@tag :acceptance
|
||||
test "get/2" do
|
||||
local_file = "/tmp/SampleVideo_1280x720_1mb.mp4"
|
||||
|
|
|
@ -5,6 +5,30 @@ defmodule Bright.CacheTest do
|
|||
|
||||
@sample_url "https://example.com/my_video.mp4"
|
||||
|
||||
|
||||
## IDK what I'm doing here. Ideally I want a redis-like k/v store where I can temporarily put VODs and they expire after 12 hours or so.
|
||||
## this would potentially speed up vod processing because it would prevent having to download the VOD from S3 during every Oban worker performance.
|
||||
## BUT I don't want to implement it myself because of the idiom, "There are only two unsolved problems in CS. Naming things and cache invalidation"
|
||||
## Meaning I don't think I can do any better than the experts in the field.
|
||||
## Anyway, this is FEATURE CREEP! Solve the problem without caching and LET IT BE SLOW.
|
||||
## To implement this cache before the system works is pre-mature optimization!
|
||||
|
||||
# describe "cache k/v" do
|
||||
# test "get/1 with string cache key" do
|
||||
|
||||
# end
|
||||
|
||||
# test "get/1 with %Vod{}" do
|
||||
# stream = stream_fixture()
|
||||
# vod = vod_fixture(%{stream_id: stream.id})
|
||||
# Cache.get(vod)
|
||||
# end
|
||||
|
||||
# test "put/2" do
|
||||
|
||||
# end
|
||||
# end
|
||||
|
||||
describe "cache" do
|
||||
|
||||
@tag :unit
|
||||
|
@ -12,7 +36,6 @@ defmodule Bright.CacheTest do
|
|||
assert Regex.match?(~r/.cache\/futureporn/, Cache.get_cache_dir())
|
||||
end
|
||||
|
||||
|
||||
@tag :unit
|
||||
test "generate_basename/1" do
|
||||
# Test with a URL
|
||||
|
|
|
@ -8,33 +8,39 @@ defmodule Bright.CreateTorrentTest do
|
|||
|
||||
alias Bright.ObanWorkers.{ProcessVod, CreateTorrent}
|
||||
alias Bright.Streams
|
||||
alias Bright.Streams.Stream
|
||||
alias Bright.Streams.{Stream,Vod}
|
||||
|
||||
|
||||
describe "CreateTorrent" do
|
||||
|
||||
import Bright.StreamsFixtures
|
||||
|
||||
@test_video_url "https://futureporn-b2.b-cdn.net/test-fixture.ts"
|
||||
|
||||
@tag :integration
|
||||
test "torrent creation" do
|
||||
stream = stream_fixture()
|
||||
vod = vod_fixture(%{torrent: nil, stream_id: stream.id, origin_temp_input_url: @test_video_url})
|
||||
{:ok, %Vod{torrent: torrent, magnet_link: magnet_link, info_hash_v1: info_hash_v1, info_hash_v2: info_hash_v2}}
|
||||
= perform_job(Bright.ObanWorkers.CreateTorrent, %{vod_id: vod.id})
|
||||
assert Regex.match?(~r/^https:\/\/.*\.torrent$/, torrent)
|
||||
assert Regex.match?(~r/^magnet:/, magnet_link)
|
||||
assert Regex.match?(~r/([A-F\d]+)\b/i, info_hash_v1)
|
||||
assert Regex.match?(~r/([A-F\d]+)\b/i, info_hash_v2)
|
||||
end
|
||||
|
||||
|
||||
@tag :integration
|
||||
test "sheduling upon vod creation" do
|
||||
example_video = "http://example.com/video.ts"
|
||||
stream_attrs = %{date: ~U[2024-12-28 03:31:00Z], title: "some title", notes: "some notes"}
|
||||
{:ok, %Stream{} = stream} = Streams.create_stream(stream_attrs)
|
||||
{:ok, _vod} = Streams.create_vod(%{stream_id: stream.id, origin_temp_input_url: example_video})
|
||||
{:ok, _vod} = Streams.create_vod(%{stream_id: stream.id, origin_temp_input_url: @test_video_url})
|
||||
assert_enqueued worker: ProcessVod, queue: :default
|
||||
assert %{success: 1} = Oban.drain_queue(queue: :default) # ProcessVod is what queues CreateThumbnail so we need to make it run
|
||||
assert %{success: 1} = Oban.drain_queue(queue: :default) # ProcessVod is what queues CreateTorrent so we need to make it run
|
||||
assert_enqueued [worker: CreateTorrent, queue: :default], 1000
|
||||
end
|
||||
|
||||
|
||||
@tag :integration
|
||||
test "not scheduled when origin_temp_input_url is missing" do
|
||||
stream_attrs = %{date: ~U[2024-12-28 03:31:00Z], title: "some title", notes: "some notes"}
|
||||
{:ok, %Stream{} = stream} = Streams.create_stream(stream_attrs)
|
||||
{:ok, _vod} = Streams.create_vod(%{stream_id: stream.id})
|
||||
refute_enqueued worker: CreateTorrent
|
||||
end
|
||||
|
||||
|
||||
end
|
||||
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
defmodule Bright.TorrentTest do
|
||||
use Bright.DataCase
|
||||
|
||||
alias Bright.Torrent
|
||||
|
||||
|
||||
describe "torrent" do
|
||||
|
||||
import Bright.StreamsFixtures
|
||||
alias Bright.{Downloader,Cache}
|
||||
|
||||
@test_fixture "https://futureporn-b2.b-cdn.net/test-fixture.ts"
|
||||
|
||||
# @tag :integration
|
||||
# test "create_torrent/1" do
|
||||
# stream = stream_fixture()
|
||||
# vod = vod_fixture(%{stream_id: stream.id, s3_cdn_url: "https://futureporn-b2.b-cdn.net/test-fixture.ts"})
|
||||
# {:ok, _} = Torrent.create_torrent(vod)
|
||||
# assert :ok
|
||||
# end
|
||||
|
||||
@tag :integration
|
||||
test "create_torrent/7" do
|
||||
stream = stream_fixture()
|
||||
vod = vod_fixture(%{stream_id: stream.id, s3_cdn_url: "https://futureporn-b2.b-cdn.net/test-fixture.ts"})
|
||||
input_path = Path.absname("./test/fixtures/test-fixture.ts")
|
||||
output_path = Cache.generate_filename("test", "torrent")
|
||||
tracker_url = "https://tracker.futureporn.net/announce"
|
||||
source_url = "https://futureporn.net/vods/69"
|
||||
comment = "https://futureporn.net"
|
||||
web_seed_url = @test_fixture
|
||||
meta_version = 3
|
||||
IO.puts "input_path=#{input_path} output_path=#{output_path} tracker_url=#{tracker_url} source_url=#{source_url}"
|
||||
{:ok, %{local_path: local_path, magnet_link: magnet_link, basename: basename, info_hash_v1: info_hash_v1, info_hash_v2: info_hash_v2}}
|
||||
= Torrent.create_torrent(input_path, output_path, web_seed_url, vod.id)
|
||||
assert :ok
|
||||
assert local_path === output_path
|
||||
assert File.exists?(output_path)
|
||||
assert String.starts_with?(magnet_link, "magnet:")
|
||||
assert String.ends_with?(basename, ".torrent")
|
||||
assert is_binary(info_hash_v1)
|
||||
assert is_binary(info_hash_v2)
|
||||
end
|
||||
|
||||
|
||||
|
||||
|
||||
end
|
||||
end
|
|
@ -0,0 +1,95 @@
|
|||
defmodule Bright.TorrentfileTest do
|
||||
use Bright.DataCase
|
||||
|
||||
alias Bright.Torrentfile
|
||||
alias Bright.Cache
|
||||
|
||||
@test_ts_fixture "./test/fixtures/test-fixture.ts"
|
||||
@test_tracker_url "http://localhost:6969/announce"
|
||||
@test_web_seed_url "https://futureporn-b2.b-cdn.net/test-fixture.ts"
|
||||
@test_source_url "https://futureporn.net/vods/69"
|
||||
@test_comment "https://futureporn.net"
|
||||
|
||||
describe "torrentfile" do
|
||||
|
||||
test "version/0" do
|
||||
{:ok, ver_num} = Torrentfile.version()
|
||||
assert :ok
|
||||
assert Regex.match?(~r"v\d\.\d\.\d", ver_num)
|
||||
end
|
||||
|
||||
@tag :unit
|
||||
test "torrentfile_path" do
|
||||
assert Regex.match?(~r"\/torrentfile", Torrentfile.torrentfile_path())
|
||||
end
|
||||
|
||||
|
||||
test "create/7" do
|
||||
input_path = @test_ts_fixture
|
||||
output_path = Cache.generate_filename("test", "torrent")
|
||||
tracker_url = @test_tracker_url
|
||||
comment = @test_comment
|
||||
source_url = @test_source_url
|
||||
web_Seed_url = @test_web_seed_url
|
||||
meta_version = 3
|
||||
{:ok, output} = Torrentfile.create(input_path, output_path, tracker_url, comment, source_url, web_Seed_url, meta_version)
|
||||
|
||||
assert :ok
|
||||
assert is_binary(output.save_path)
|
||||
assert output.save_path === output_path
|
||||
assert is_binary(output.btih)
|
||||
assert is_binary(output.btmh)
|
||||
assert File.exists?(output_path)
|
||||
|
||||
end
|
||||
|
||||
@tag :unit
|
||||
test "parses magnet link, save path, btih and btmh correctly" do
|
||||
output = """
|
||||
magnet:?xt=urn:btih:157835a64d398fd63d83b5fd6dac5612bd60b6c6&xt=urn:btmh:12201bf9590518d84900ca3e4a88a7fe5f6a246deff2cf37d3acc24b7f64a8b0b572&dn=test-fixture.ts&tr=https%3A%2F%2Ftracker.futureporn.net%2Fannounce
|
||||
|
||||
Torrent Save Path: /home/cj/Downloads/test-fixture.torrent
|
||||
"""
|
||||
|
||||
expected = %{
|
||||
magnet: "magnet:?xt=urn:btih:157835a64d398fd63d83b5fd6dac5612bd60b6c6&xt=urn:btmh:12201bf9590518d84900ca3e4a88a7fe5f6a246deff2cf37d3acc24b7f64a8b0b572&dn=test-fixture.ts&tr=https%3A%2F%2Ftracker.futureporn.net%2Fannounce",
|
||||
save_path: "/home/cj/Downloads/test-fixture.torrent",
|
||||
btih: "157835a64d398fd63d83b5fd6dac5612bd60b6c6",
|
||||
btmh: "12201bf9590518d84900ca3e4a88a7fe5f6a246deff2cf37d3acc24b7f64a8b0b572"
|
||||
}
|
||||
|
||||
assert Torrentfile.parse_output(output) == expected
|
||||
end
|
||||
|
||||
|
||||
@tag :unit
|
||||
test "returns nil values when output is empty" do
|
||||
assert Torrentfile.parse_output("") == %{magnet: nil, save_path: nil, btih: nil, btmh: nil}
|
||||
end
|
||||
|
||||
@tag :unit
|
||||
test "handles missing save path" do
|
||||
output = "magnet:?xt=urn:btih:12345"
|
||||
assert Torrentfile.parse_output(output) == %{magnet: "magnet:?xt=urn:btih:12345", save_path: nil, btih: "12345", btmh: nil}
|
||||
end
|
||||
|
||||
@tag :unit
|
||||
test "handles missing magnet link" do
|
||||
output = "Torrent Save Path: /downloads/test.torrent"
|
||||
assert Torrentfile.parse_output(output) == %{magnet: nil, save_path: "/downloads/test.torrent", btih: nil, btmh: nil}
|
||||
end
|
||||
|
||||
@tag :unit
|
||||
test "handles missing btih" do
|
||||
output = "Torrent Save Path: /downloads/test.torrent"
|
||||
assert Torrentfile.parse_output(output) == %{btih: nil, magnet: nil, btmh: nil, save_path: "/downloads/test.torrent"}
|
||||
end
|
||||
|
||||
@tag :unit
|
||||
test "handles missing btmh" do
|
||||
output = "Torrent Save Path: /downloads/test.torrent"
|
||||
assert Torrentfile.parse_output(output) == %{btmh: nil, magnet: nil, btih: nil, save_path: "/downloads/test.torrent"}
|
||||
end
|
||||
|
||||
end
|
||||
end
|
|
@ -0,0 +1,65 @@
|
|||
defmodule Bright.TorrentsTest do
|
||||
use Bright.DataCase
|
||||
|
||||
alias Bright.Torrents
|
||||
|
||||
describe "torrent" do
|
||||
alias Bright.Torrents.Torrent
|
||||
|
||||
import Bright.TorrentsFixtures
|
||||
|
||||
@invalid_attrs %{info_hash_v1: nil, info_hash_v2: nil, cdn_url: nil, magnet: nil}
|
||||
|
||||
test "list_torrent/0 returns all torrent" do
|
||||
torrent = torrent_fixture()
|
||||
assert Torrents.list_torrent() == [torrent]
|
||||
end
|
||||
|
||||
test "get_torrent!/1 returns the torrent with given id" do
|
||||
torrent = torrent_fixture()
|
||||
assert Torrents.get_torrent!(torrent.id) == torrent
|
||||
end
|
||||
|
||||
test "create_torrent/1 with valid data creates a torrent" do
|
||||
valid_attrs = %{info_hash_v1: "some info_hash_v1", info_hash_v2: "some info_hash_v2", cdn_url: "some cdn_url", magnet: "some magnet"}
|
||||
|
||||
assert {:ok, %Torrent{} = torrent} = Torrents.create_torrent(valid_attrs)
|
||||
assert torrent.info_hash_v1 == "some info_hash_v1"
|
||||
assert torrent.info_hash_v2 == "some info_hash_v2"
|
||||
assert torrent.cdn_url == "some cdn_url"
|
||||
assert torrent.magnet == "some magnet"
|
||||
end
|
||||
|
||||
test "create_torrent/1 with invalid data returns error changeset" do
|
||||
assert {:error, %Ecto.Changeset{}} = Torrents.create_torrent(@invalid_attrs)
|
||||
end
|
||||
|
||||
test "update_torrent/2 with valid data updates the torrent" do
|
||||
torrent = torrent_fixture()
|
||||
update_attrs = %{info_hash_v1: "some updated info_hash_v1", info_hash_v2: "some updated info_hash_v2", cdn_url: "some updated cdn_url", magnet: "some updated magnet"}
|
||||
|
||||
assert {:ok, %Torrent{} = torrent} = Torrents.update_torrent(torrent, update_attrs)
|
||||
assert torrent.info_hash_v1 == "some updated info_hash_v1"
|
||||
assert torrent.info_hash_v2 == "some updated info_hash_v2"
|
||||
assert torrent.cdn_url == "some updated cdn_url"
|
||||
assert torrent.magnet == "some updated magnet"
|
||||
end
|
||||
|
||||
test "update_torrent/2 with invalid data returns error changeset" do
|
||||
torrent = torrent_fixture()
|
||||
assert {:error, %Ecto.Changeset{}} = Torrents.update_torrent(torrent, @invalid_attrs)
|
||||
assert torrent == Torrents.get_torrent!(torrent.id)
|
||||
end
|
||||
|
||||
test "delete_torrent/1 deletes the torrent" do
|
||||
torrent = torrent_fixture()
|
||||
assert {:ok, %Torrent{}} = Torrents.delete_torrent(torrent)
|
||||
assert_raise Ecto.NoResultsError, fn -> Torrents.get_torrent!(torrent.id) end
|
||||
end
|
||||
|
||||
test "change_torrent/1 returns a torrent changeset" do
|
||||
torrent = torrent_fixture()
|
||||
assert %Ecto.Changeset{} = Torrents.change_torrent(torrent)
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,46 @@
|
|||
defmodule Bright.TrackerTest do
|
||||
use Bright.DataCase
|
||||
|
||||
alias Bright.Tracker
|
||||
|
||||
describe "tracker" do
|
||||
|
||||
import Bright.StreamsFixtures
|
||||
|
||||
@info_hash_fixture "723886c0b0d9d41bfaa5276a9b2552d84ba09dd8a77d9ddcab5c9fa16cdb9770" # test-fixture.ts (BT info_hash v2)
|
||||
|
||||
@tag :integration
|
||||
test "whitelist_info_hash/1 using a string info_hash" do
|
||||
:ok = Tracker.whitelist_info_hash(@info_hash_fixture)
|
||||
assert :ok
|
||||
end
|
||||
|
||||
@tag :integration
|
||||
test "whitelist_info_hash/1 using a %Vod{}" do
|
||||
stream = stream_fixture()
|
||||
vod = vod_fixture(%{stream_id: stream.id})
|
||||
:ok = Tracker.whitelist_info_hash(vod)
|
||||
assert :ok
|
||||
end
|
||||
|
||||
@tag :integration
|
||||
test "announce_torrent/1 using a string info_hash" do
|
||||
:ok = Tracker.announce_torrent(@info_hash_fixture)
|
||||
assert :ok
|
||||
end
|
||||
|
||||
@tag :integration
|
||||
test "announce_torrent/1 using a %Vod{}" do
|
||||
stream = stream_fixture()
|
||||
vod = vod_fixture(%{stream_id: stream.id, info_hash: @info_hash_fixture})
|
||||
:ok = Tracker.announce_torrent(vod)
|
||||
assert :ok
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
end
|
Binary file not shown.
|
@ -0,0 +1,23 @@
|
|||
defmodule Bright.TorrentsFixtures do
|
||||
@moduledoc """
|
||||
This module defines test helpers for creating
|
||||
entities via the `Bright.Torrents` context.
|
||||
"""
|
||||
|
||||
@doc """
|
||||
Generate a torrent.
|
||||
"""
|
||||
def torrent_fixture(attrs \\ %{}) do
|
||||
{:ok, torrent} =
|
||||
attrs
|
||||
|> Enum.into(%{
|
||||
cdn_url: "some cdn_url",
|
||||
info_hash_v1: "some info_hash_v1",
|
||||
info_hash_v2: "some info_hash_v2",
|
||||
magnet: "some magnet"
|
||||
})
|
||||
|> Bright.Torrents.create_torrent()
|
||||
|
||||
torrent
|
||||
end
|
||||
end
|
Loading…
Reference in New Issue