acceptance tests pass omg omg
This commit is contained in:
parent
ca7b6e4298
commit
873c3e0fd8
@ -13,6 +13,19 @@ jobs:
|
|||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
name: Check out code
|
name: Check out code
|
||||||
|
|
||||||
|
# IDK if I need this
|
||||||
|
# - name: Set docker metadata
|
||||||
|
# id: meta
|
||||||
|
# uses: docker/metadata-action@v5
|
||||||
|
# with:
|
||||||
|
# images: |
|
||||||
|
# gitea.futureporn.net/futureporn/tracker-helper:latest
|
||||||
|
# tags: |
|
||||||
|
# type=ref,event=branch
|
||||||
|
# type=ref,event=pr
|
||||||
|
# type=semver,pattern={{version}}
|
||||||
|
# type=semver,pattern={{major}}.{{minor}}
|
||||||
|
|
||||||
- name: Login to Gitea Docker Registry
|
- name: Login to Gitea Docker Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
@ -26,19 +39,24 @@ jobs:
|
|||||||
context: ./services/tracker-helper
|
context: ./services/tracker-helper
|
||||||
push: true
|
push: true
|
||||||
tags: gitea.futureporn.net/futureporn/tracker-helper:latest
|
tags: gitea.futureporn.net/futureporn/tracker-helper:latest
|
||||||
|
labels: |
|
||||||
|
org.opencontainers.image.description=Opentracker helper service. Adds info_hash whitelisting via HTTP
|
||||||
|
org.opencontainers.image.title=tracker-helper
|
||||||
|
org.opencontainers.image.created={{commit_date 'YYYY-MM-DDTHH:mm:ss.SSS[Z]'}}
|
||||||
|
org.opencontainers.image.licenses=unlicense
|
||||||
|
org.opencontainers.image.source=https://gitea.futureporn.net/futureporn/fp
|
||||||
|
org.opencontainers.image.url=https://gitea.futureporn.net/futureporn/-/packages/container/tracker-helper
|
||||||
secrets: |
|
secrets: |
|
||||||
WL_CREDENTIALS=${{ secrets.WL_CREDENTIALS }}
|
WL_USERNAME=${{ secrets.WL_USERNAME }}
|
||||||
env:
|
WL_PASSWORD=${{ secrets.WL_PASSWORD }}
|
||||||
WL_CREDENTIALS: ${{ secrets.WL_CREDENTIALS }}
|
|
||||||
WL_FIFO_PATH: /tmp/adder.fifo
|
|
||||||
WL_FILE_PATH: /usr/src/app/test/fixtures/whitelist
|
|
||||||
|
|
||||||
- name: Build futureporn/opentracker
|
- name: Build futureporn/aquatic
|
||||||
uses: docker/build-push-action@v6
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
context: ./apps/opentracker
|
context: ./apps/aquatic
|
||||||
|
file: ./apps/aquatic/docker/aquatic_udp.Dockerfile
|
||||||
push: true
|
push: true
|
||||||
tags: gitea.futureporn.net/futureporn/opentracker:latest
|
tags: gitea.futureporn.net/futureporn/aquatic:latest
|
||||||
|
|
||||||
- name: Build futureporn/bright
|
- name: Build futureporn/bright
|
||||||
uses: docker/build-push-action@v6
|
uses: docker/build-push-action@v6
|
||||||
@ -48,3 +66,26 @@ jobs:
|
|||||||
tags: gitea.futureporn.net/futureporn/bright:latest
|
tags: gitea.futureporn.net/futureporn/bright:latest
|
||||||
build-args: |
|
build-args: |
|
||||||
MIX_ENV=prod
|
MIX_ENV=prod
|
||||||
|
labels: |
|
||||||
|
org.opencontainers.image.description=The Galaxy's Best VTuber hentai site
|
||||||
|
org.opencontainers.image.title=bright
|
||||||
|
org.opencontainers.image.created={{commit_date 'YYYY-MM-DDTHH:mm:ss.SSS[Z]'}}
|
||||||
|
org.opencontainers.image.version={{version}}
|
||||||
|
org.opencontainers.image.licenses=unlicense
|
||||||
|
org.opencontainers.image.source=https://gitea.futureporn.net/futureporn/fp
|
||||||
|
org.opencontainers.image.url=https://gitea.futureporn.net/futureporn/-/packages/container/bright
|
||||||
|
|
||||||
|
# - name: Build futureporn/opentracker
|
||||||
|
# uses: docker/build-push-action@v6
|
||||||
|
# with:
|
||||||
|
# context: ./apps/opentracker
|
||||||
|
# push: true
|
||||||
|
# tags: gitea.futureporn.net/futureporn/opentracker:latest
|
||||||
|
# labels: |
|
||||||
|
# org.opencontainers.image.description=opentracker is an open and free bittorrent tracker project.
|
||||||
|
# org.opencontainers.image.title=opentracker
|
||||||
|
# org.opencontainers.image.created={{commit_date 'YYYY-MM-DDTHH:mm:ss.SSS[Z]'}}
|
||||||
|
# org.opencontainers.image.version={{version}}
|
||||||
|
# org.opencontainers.image.licenses=beerware
|
||||||
|
# org.opencontainers.image.source=https://erdgeist.org/arts/software/opentracker
|
||||||
|
# org.opencontainers.image.url=https://gitea.futureporn.net/futureporn/-/packages/container/opentracker
|
||||||
|
@ -12,18 +12,13 @@ jobs:
|
|||||||
test_phoenix:
|
test_phoenix:
|
||||||
name: Tests & Checks
|
name: Tests & Checks
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
timeout-minutes: 600
|
timeout-minutes: 20
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
env:
|
env:
|
||||||
MIX_ENV: test
|
MIX_ENV: test
|
||||||
TRACKER_URL: ${{ vars.TRACKER_URL }}
|
TRACKER_URL: ${{ vars.TRACKER_URL }}
|
||||||
WHITELIST_URL: ${{ vars.WHITELIST_URL }}
|
|
||||||
WHITELIST_USERNAME: ${{ secrets.WHITELIST_USERNAME }}
|
|
||||||
WHITELIST_PASSWORD: ${{ secrets.WHITELIST_PASSWORD }}
|
|
||||||
WHITELIST_PASSWORD_CADDY: ${{ secrets.WHITELIST_PASSWORD_CADDY }}
|
|
||||||
WHITELIST_FEED_URL: ${{ vars.WHITELIST_FEED_URL }}
|
|
||||||
AWS_BUCKET: ${{ vars.AWS_BUCKET }}
|
AWS_BUCKET: ${{ vars.AWS_BUCKET }}
|
||||||
AWS_HOST: ${{ vars.AWS_HOST }}
|
AWS_HOST: ${{ vars.AWS_HOST }}
|
||||||
AWS_REGION: ${{ vars.AWS_REGION }}
|
AWS_REGION: ${{ vars.AWS_REGION }}
|
||||||
@ -32,6 +27,9 @@ jobs:
|
|||||||
PUBLIC_S3_ENDPOINT: ${{ vars.PUBLIC_S3_ENDPOINT }}
|
PUBLIC_S3_ENDPOINT: ${{ vars.PUBLIC_S3_ENDPOINT }}
|
||||||
SITE_URL: https://futureporn.net
|
SITE_URL: https://futureporn.net
|
||||||
SECRET_KEY_BASE: ${{ secrets.SECRET_KEY_BASE }}
|
SECRET_KEY_BASE: ${{ secrets.SECRET_KEY_BASE }}
|
||||||
|
WL_URL: ${{ vars.WL_URL }}
|
||||||
|
WL_USERNAME: ${{ secrets.WL_USERNAME }}
|
||||||
|
WL_PASSWORD: ${{ secrets.WL_PASSWORD }}
|
||||||
|
|
||||||
services:
|
services:
|
||||||
db:
|
db:
|
||||||
@ -42,59 +40,77 @@ jobs:
|
|||||||
POSTGRES_DB: ${{ vars.DB_NAME }}
|
POSTGRES_DB: ${{ vars.DB_NAME }}
|
||||||
POSTGRES_USER: ${{ vars.DB_USER }}
|
POSTGRES_USER: ${{ vars.DB_USER }}
|
||||||
POSTGRES_PASSWORD: ${{ secrets.DB_PASS }}
|
POSTGRES_PASSWORD: ${{ secrets.DB_PASS }}
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
|
||||||
tracker-helper:
|
tracker-helper:
|
||||||
image: gitea.futureporn.net/futureporn/tracker-helper:latest
|
image: gitea.futureporn.net/futureporn/tracker-helper:latest
|
||||||
ports:
|
ports:
|
||||||
- 5063:5063
|
- 5063:5063
|
||||||
env:
|
env:
|
||||||
WL_FIFO_PATH: /etc/opentracker/adder.fifo
|
WL_FILE_PATH: /var/lib/aquatic/whitelist
|
||||||
WL_FILE_PATH: /etc/opentracker/whitelist
|
WL_USERNAME: ${{ secrets.WL_USERNAME }}
|
||||||
WL_CREDENTIALS: ${{ secrets.WL_CREDENTIALS }}
|
WL_PASSWORD: ${{ secrets.WL_PASSWORD }}
|
||||||
WL_PORT: 5063
|
WL_PORT: 5063
|
||||||
volumes:
|
volumes:
|
||||||
- /tmp/futureporn/opentracker:/etc/opentracker
|
- aquatic:/var/lib/aquatic
|
||||||
|
|
||||||
opentracker:
|
aquatic:
|
||||||
image: gitea.futureporn.net/futureporn/opentracker:latest
|
image: gitea.futureporn.net/futureporn/aquatic:latest
|
||||||
ports:
|
ports:
|
||||||
- 6969:6969
|
- 3000:3000
|
||||||
env:
|
- 9000:9000
|
||||||
WHITELIST_FEED_URL: https://bright.futureporn.net/torrents
|
|
||||||
volumes:
|
volumes:
|
||||||
- /tmp/futureporn/opentracker:/etc/opentracker
|
- aquatic:/var/lib/aquatic
|
||||||
|
env:
|
||||||
|
ACCESS_LIST_CONTENTS: ""
|
||||||
|
CONFIG_FILE_CONTENTS: |
|
||||||
|
log_level = 'debug'
|
||||||
|
[network]
|
||||||
|
use_ipv4 = true
|
||||||
|
use_ipv6 = true
|
||||||
|
address_ipv4 = "0.0.0.0:3000"
|
||||||
|
address_ipv6 = "[::]:3000"
|
||||||
|
[statistics]
|
||||||
|
interval = 5
|
||||||
|
print_to_stdout = true
|
||||||
|
run_prometheus_endpoint = true
|
||||||
|
prometheus_endpoint_address = "0.0.0.0:9000"
|
||||||
|
[access_list]
|
||||||
|
mode = "allow"
|
||||||
|
path = "/var/lib/aquatic/whitelist"
|
||||||
|
[privileges]
|
||||||
|
# Chroot and switch group and user after binding to sockets
|
||||||
|
drop_privileges = true
|
||||||
|
# Chroot to this path
|
||||||
|
chroot_path = "/var/lib/aquatic"
|
||||||
|
# Group to switch to after chrooting
|
||||||
|
group = "nogroup"
|
||||||
|
# User to switch to after chrooting
|
||||||
|
user = "nobody"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: wait a few seconds
|
- name: Install apt packages
|
||||||
run: sleep 30
|
run: apt-get update && apt-get install -y iputils-ping postgresql
|
||||||
|
|
||||||
- name: Debug services
|
|
||||||
run: docker ps -a
|
|
||||||
|
|
||||||
# - name: Install apt packages
|
|
||||||
# run: apt-get update && apt-get install -y iputils-ping postgresql
|
|
||||||
|
|
||||||
- name: tracker-helper service check (localhost)
|
- name: tracker-helper service check (localhost)
|
||||||
run: curl http://localhost:5063/health
|
run: curl http://localhost:5063/health
|
||||||
|
|
||||||
- name: tracker-helper service check
|
|
||||||
run: curl http://tracker-helper:5063/health
|
|
||||||
|
|
||||||
# - name: Check opentracker pingability
|
# - name: Check opentracker pingability
|
||||||
# run: ping -c 3 opentracker
|
# run: ping -c 3 opentracker
|
||||||
|
|
||||||
- name: opentracker service check
|
- name: aquatic service check
|
||||||
run: |
|
run: curl -v http://localhost:9000
|
||||||
getent hosts opentracker
|
|
||||||
curl -v http://opentracker:6969/stats
|
|
||||||
|
|
||||||
- name: Check postgres pingability
|
# - name: postgres service check
|
||||||
run: ping -c 3 db
|
# env:
|
||||||
|
# PGPASSWORD: ${{ secrets.DB_PASS }}
|
||||||
- name: postgres service check
|
# run: |
|
||||||
env:
|
# echo "DB_HOST=${{vars.DB_HOST}}, DB_NAME=${{vars.DB_NAME}}, DB_USER=${{vars.DB_USER}}"
|
||||||
PGPASSWORD: ${{ secrets.DB_PASS }}
|
# psql --host=${{ vars.DB_HOST }} --port=5432 --dbname=${{ vars.DB_NAME }} --username=${{ vars.DB_USER }} --list
|
||||||
run: psql --host=db --port=5432 --dbname=${{ vars.DB_NAME }} --username=${{ vars.DB_USER }} --list
|
|
||||||
|
|
||||||
- name: Setup FFmpeg
|
- name: Setup FFmpeg
|
||||||
uses: FedericoCarboni/setup-ffmpeg@v3
|
uses: FedericoCarboni/setup-ffmpeg@v3
|
||||||
|
6
.gitmodules
vendored
6
.gitmodules
vendored
@ -1,3 +1,3 @@
|
|||||||
[submodule "contrib/superstreamer"]
|
[submodule "apps/aquatic"]
|
||||||
path = contrib/superstreamer
|
path = apps/aquatic
|
||||||
url = git@github.com:superstreamerapp/superstreamer.git
|
url = git@github.com:greatest-ape/aquatic.git
|
||||||
|
1
apps/aquatic
Submodule
1
apps/aquatic
Submodule
@ -0,0 +1 @@
|
|||||||
|
Subproject commit 8eff006f79e8bb982bf3f110b22867f306719648
|
15
apps/beep/README.md
Normal file
15
apps/beep/README.md
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# beep
|
||||||
|
|
||||||
|
## usage
|
||||||
|
|
||||||
|
devbox run beep
|
||||||
|
devbox run boop
|
||||||
|
|
||||||
|
audible fail/pass notifications for long running tasks
|
||||||
|
|
||||||
|
devbox run test && devbox run beep || devbox run boop
|
||||||
|
|
||||||
|
## under the hood
|
||||||
|
|
||||||
|
ffplay -nodisp -loglevel quiet -autoexit ./apps/beep/beep2.wav
|
||||||
|
|
BIN
apps/beep/beep1.wav
Normal file
BIN
apps/beep/beep1.wav
Normal file
Binary file not shown.
BIN
apps/beep/beep2.wav
Normal file
BIN
apps/beep/beep2.wav
Normal file
Binary file not shown.
@ -44,7 +44,7 @@ RUN echo "Hello world! MIX_ENV=${MIX_ENV}"
|
|||||||
|
|
||||||
|
|
||||||
# install mix dependencies
|
# install mix dependencies
|
||||||
COPY ./apps/bright/mix.exs /apps/bright/mix.lock ./
|
COPY ./mix.exs ./mix.lock ./
|
||||||
RUN mix deps.get --only $MIX_ENV
|
RUN mix deps.get --only $MIX_ENV
|
||||||
RUN mkdir config
|
RUN mkdir config
|
||||||
RUN mkdir contrib
|
RUN mkdir contrib
|
||||||
@ -53,20 +53,20 @@ RUN mkdir contrib
|
|||||||
# copy compile-time config files before we compile dependencies
|
# copy compile-time config files before we compile dependencies
|
||||||
# to ensure any relevant config change will trigger the dependencies
|
# to ensure any relevant config change will trigger the dependencies
|
||||||
# to be re-compiled.
|
# to be re-compiled.
|
||||||
COPY ./apps/bright/config/config.exs ./apps/bright/config/${MIX_ENV}.exs config/
|
COPY ./config/config.exs ./config/${MIX_ENV}.exs config/
|
||||||
# COPY ./packages/ueberauth_patreon /app/contrib/ueberauth_patreon
|
# COPY ./packages/ueberauth_patreon /app/contrib/ueberauth_patreon
|
||||||
# COPY ./packages/ueberauth_github /app/contrib/ueberauth_github
|
# COPY ./packages/ueberauth_github /app/contrib/ueberauth_github
|
||||||
RUN ls -la /app/contrib/
|
RUN ls -la /app/contrib/
|
||||||
|
|
||||||
RUN mix deps.compile
|
RUN mix deps.compile
|
||||||
|
|
||||||
COPY ./apps/bright/priv priv
|
COPY ./priv priv
|
||||||
|
|
||||||
COPY ./apps/bright/lib lib
|
COPY ./lib lib
|
||||||
|
|
||||||
COPY ./apps/bright/assets assets
|
COPY ./assets assets
|
||||||
|
|
||||||
COPY ./apps/bright/test test
|
# COPY ./test test #test is .dockerignored
|
||||||
|
|
||||||
|
|
||||||
# compile assets
|
# compile assets
|
||||||
@ -76,15 +76,15 @@ RUN mix assets.deploy
|
|||||||
RUN mix compile
|
RUN mix compile
|
||||||
|
|
||||||
# Changes to config/runtime.exs don't require recompiling the code
|
# Changes to config/runtime.exs don't require recompiling the code
|
||||||
COPY ./apps/bright/config/runtime.exs config/
|
COPY ./config/runtime.exs config/
|
||||||
|
|
||||||
COPY ./apps/bright/rel rel
|
COPY ./rel rel
|
||||||
RUN mix release
|
RUN mix release
|
||||||
|
|
||||||
|
|
||||||
## dev target
|
## dev target
|
||||||
FROM builder AS dev
|
FROM builder AS dev
|
||||||
COPY ./apps/bright/config/test.exs config/test.exs
|
COPY ./config/test.exs config/test.exs
|
||||||
RUN ls -la ./contrib/
|
RUN ls -la ./contrib/
|
||||||
RUN mkdir -p ~/.cache/futureporn
|
RUN mkdir -p ~/.cache/futureporn
|
||||||
CMD [ "mix", "phx.server" ]
|
CMD [ "mix", "phx.server" ]
|
@ -32,9 +32,9 @@ config :bright,
|
|||||||
|
|
||||||
config :bright, :torrent,
|
config :bright, :torrent,
|
||||||
tracker_url: System.get_env("TRACKER_URL"),
|
tracker_url: System.get_env("TRACKER_URL"),
|
||||||
whitelist_url: System.get_env("WHITELIST_URL"),
|
whitelist_url: System.get_env("WL_URL"),
|
||||||
whitelist_username: System.get_env("WHITELIST_USERNAME"),
|
whitelist_username: System.get_env("WL_USERNAME"),
|
||||||
whitelist_password: System.get_env("WHITELIST_PASSWORD")
|
whitelist_password: System.get_env("WL_PASSWORD")
|
||||||
|
|
||||||
config :bright, :buckets, media: System.get_env("AWS_BUCKET")
|
config :bright, :buckets, media: System.get_env("AWS_BUCKET")
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ config :bcrypt_elixir, :log_rounds, 1
|
|||||||
# Run `mix help test` for more information.
|
# Run `mix help test` for more information.
|
||||||
config :bright, Bright.Repo,
|
config :bright, Bright.Repo,
|
||||||
database: System.get_env("DB_NAME", "bright"),
|
database: System.get_env("DB_NAME", "bright"),
|
||||||
hostname: System.get_env("DB_HOST", "db"),
|
hostname: System.get_env("DB_HOST", "localhost"),
|
||||||
username: System.get_env("DB_USER", "postgres"),
|
username: System.get_env("DB_USER", "postgres"),
|
||||||
password: System.get_env("DB_PASS", "password"),
|
password: System.get_env("DB_PASS", "password"),
|
||||||
# database: "bright_test#{System.get_env("MIX_TEST_PARTITION")}",
|
# database: "bright_test#{System.get_env("MIX_TEST_PARTITION")}",
|
||||||
|
@ -20,9 +20,20 @@ defmodule Bright.Cache do
|
|||||||
|> String.replace(~r/[^a-zA-Z0-9]/, "")
|
|> String.replace(~r/[^a-zA-Z0-9]/, "")
|
||||||
|
|
||||||
base = Path.basename(input)
|
base = Path.basename(input)
|
||||||
"#{prefix}-#{base}"
|
"#{prefix}/#{base}"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# @doc """
|
||||||
|
# get a filename in the cache directory.
|
||||||
|
# the path contains a sha256 hash of the file basename.
|
||||||
|
# thus for any given input, output filename will always be the same.
|
||||||
|
# """
|
||||||
|
# def deterministic_filename(input) do
|
||||||
|
# input
|
||||||
|
# |> Path.basename()
|
||||||
|
# |> sha256sum_truncate()
|
||||||
|
# end
|
||||||
|
|
||||||
def generate_basename(input, ext) do
|
def generate_basename(input, ext) do
|
||||||
if is_nil(input), do: raise("generate_basename was called with nil argument")
|
if is_nil(input), do: raise("generate_basename was called with nil argument")
|
||||||
|
|
||||||
@ -33,11 +44,15 @@ defmodule Bright.Cache do
|
|||||||
end
|
end
|
||||||
|
|
||||||
def generate_filename(input) do
|
def generate_filename(input) do
|
||||||
Path.join(@cache_dir, generate_basename(input))
|
filename = Path.join(@cache_dir, generate_basename(input))
|
||||||
|
File.mkdir_p!(Path.dirname(filename))
|
||||||
|
filename
|
||||||
end
|
end
|
||||||
|
|
||||||
def generate_filename(input, ext) do
|
def generate_filename(input, ext) do
|
||||||
Path.join(@cache_dir, generate_basename(input, ext))
|
filename = Path.join(@cache_dir, generate_basename(input, ext))
|
||||||
|
File.mkdir_p!(Path.dirname(filename))
|
||||||
|
filename
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_cache_dir do
|
def get_cache_dir do
|
||||||
@ -104,4 +119,23 @@ defmodule Bright.Cache do
|
|||||||
{:error, reason}
|
{:error, reason}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# @doc """
|
||||||
|
# Generates a SHA-256 hash of the input string and truncates it to 10 characters.
|
||||||
|
|
||||||
|
# ## Parameters
|
||||||
|
# - `input`: A string to be hashed.
|
||||||
|
|
||||||
|
# ## Returns
|
||||||
|
# A string representing the first 10 characters of the SHA-256 hash in hexadecimal format.
|
||||||
|
|
||||||
|
# ## Examples
|
||||||
|
# iex> Cache.sha256sum_truncate("hello world")
|
||||||
|
# "2cf24dba5f"
|
||||||
|
# """
|
||||||
|
# defp sha256sum_truncate(input) do
|
||||||
|
# hash = :crypto.hash(:sha256, input) |> Base.encode16(case: :lower)
|
||||||
|
|
||||||
|
# String.slice(hash, 0..9)
|
||||||
|
# end
|
||||||
end
|
end
|
||||||
|
@ -26,9 +26,9 @@ defmodule Bright.ObanWorkers.CreateTorrent do
|
|||||||
cdn_url: cdn_url,
|
cdn_url: cdn_url,
|
||||||
magnet: tf.magnet
|
magnet: tf.magnet
|
||||||
}),
|
}),
|
||||||
{:ok, _} <- Tracker.whitelist_info_hash(torrent.info_hash_v1),
|
{_, _} <- Tracker.whitelist_info_hash(torrent.info_hash_v1),
|
||||||
{:ok, _} <- Tracker.announce_torrent(torrent.info_hash_v1) do
|
{_, _} <- Tracker.whitelist_info_hash(torrent.info_hash_v2),
|
||||||
# {:ok, updated_vod} <- Streams.update_vod(vod, %{}) do
|
{:ok, _updated_vod} <- Streams.update_vod(vod, %{}) do
|
||||||
{:ok, torrent}
|
{:ok, torrent}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -86,13 +86,40 @@ defmodule Bright.Torrentfile do
|
|||||||
|
|
||||||
def create(%Vod{} = vod, input_path) do
|
def create(%Vod{} = vod, input_path) do
|
||||||
output_path = Cache.generate_filename("vod-#{vod.id}", "torrent")
|
output_path = Cache.generate_filename("vod-#{vod.id}", "torrent")
|
||||||
tracker_url = bittorrent_tracker_url()
|
|
||||||
site_url = site_url()
|
# FYI for deterministic test purposes, tracker_url and site_url have no effect on the info_hash.
|
||||||
|
tracker_url = "udp://tracker.futureporn.net/"
|
||||||
|
site_url = "https://futureporn.net/"
|
||||||
comment = site_url
|
comment = site_url
|
||||||
source_url = URI.parse(site_url) |> URI.append_path("/vods/#{vod.id}") |> URI.to_string()
|
|
||||||
|
# Setting the source_url to https://futureporn.net/vods/n would be cool,
|
||||||
|
# but doing that means getting a different info_hash every time during testing.
|
||||||
|
# we want a consistent, deterministic info_hash for our integration tests.
|
||||||
|
# there is probably a way to have our cake and eat it too, but
|
||||||
|
# for now in order to have a consistent info_hash, we settle
|
||||||
|
# for site_url instead of a more specific URL
|
||||||
|
# @see https://stackoverflow.com/a/28601408/1004931
|
||||||
|
source_url = site_url
|
||||||
web_seed_url = vod.s3_cdn_url
|
web_seed_url = vod.s3_cdn_url
|
||||||
meta_version = 3
|
meta_version = 3
|
||||||
create(input_path, output_path, tracker_url, comment, source_url, web_seed_url, meta_version)
|
|
||||||
|
Logger.debug(
|
||||||
|
"source_url=#{source_url}, output_path=#{output_path}, tracker_url=#{tracker_url}"
|
||||||
|
)
|
||||||
|
|
||||||
|
idk =
|
||||||
|
create(
|
||||||
|
input_path,
|
||||||
|
output_path,
|
||||||
|
tracker_url,
|
||||||
|
comment,
|
||||||
|
source_url,
|
||||||
|
web_seed_url,
|
||||||
|
meta_version
|
||||||
|
)
|
||||||
|
|
||||||
|
Logger.debug(inspect(idk))
|
||||||
|
idk
|
||||||
end
|
end
|
||||||
|
|
||||||
def create(
|
def create(
|
||||||
@ -114,7 +141,7 @@ defmodule Bright.Torrentfile do
|
|||||||
"0",
|
"0",
|
||||||
"--out",
|
"--out",
|
||||||
output_path,
|
output_path,
|
||||||
"-a",
|
"--announce",
|
||||||
tracker_url,
|
tracker_url,
|
||||||
"--source",
|
"--source",
|
||||||
source_url,
|
source_url,
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
defmodule Bright.Tracker do
|
defmodule Bright.Tracker do
|
||||||
alias Bright.BittorrentUrlEncoder
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
def tracker_url do
|
def tracker_url do
|
||||||
@ -20,11 +19,15 @@ defmodule Bright.Tracker do
|
|||||||
"""
|
"""
|
||||||
@spec whitelist_url() :: binary()
|
@spec whitelist_url() :: binary()
|
||||||
def whitelist_url do
|
def whitelist_url do
|
||||||
case Application.fetch_env!(:bright, :torrent)[:whitelist_url] do
|
url =
|
||||||
nil -> raise "whitelist_url missing or empty in app config"
|
case Application.fetch_env!(:bright, :torrent)[:whitelist_url] do
|
||||||
"" -> raise "whitelist_url missing or empty in app config"
|
nil -> raise "whitelist_url missing or empty in app config"
|
||||||
url -> url
|
"" -> raise "whitelist_url missing or empty in app config"
|
||||||
end
|
url -> url
|
||||||
|
end
|
||||||
|
|
||||||
|
Logger.debug("whitelist_url=#{url}")
|
||||||
|
url
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec whitelist_username() :: binary()
|
@spec whitelist_username() :: binary()
|
||||||
@ -54,51 +57,6 @@ defmodule Bright.Tracker do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec announce_torrent(binary()) ::
|
|
||||||
{:error, any()} | {:ok, binary() | list() | integer() | map()}
|
|
||||||
def announce_torrent(info_hash) do
|
|
||||||
encoded_info_hash = BittorrentUrlEncoder.encode(info_hash)
|
|
||||||
|
|
||||||
Logger.debug(
|
|
||||||
"announce_torrent with info_hash=#{info_hash}, encoded_info_hash=#{encoded_info_hash}"
|
|
||||||
)
|
|
||||||
|
|
||||||
url =
|
|
||||||
tracker_url()
|
|
||||||
|> URI.parse()
|
|
||||||
|> URI.append_query("info_hash=#{encoded_info_hash}")
|
|
||||||
|> URI.to_string()
|
|
||||||
|
|
||||||
body = []
|
|
||||||
headers = []
|
|
||||||
|
|
||||||
case HTTPoison.get(url, body, headers) do
|
|
||||||
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
|
|
||||||
Logger.debug(inspect(Bento.decode(body)))
|
|
||||||
|
|
||||||
case Bento.decode(body) do
|
|
||||||
{:ok, decoded} ->
|
|
||||||
case decoded do
|
|
||||||
%{"failure reason" => failure_reason} -> {:error, failure_reason}
|
|
||||||
_ -> {:ok, decoded}
|
|
||||||
end
|
|
||||||
|
|
||||||
{:error, reason} ->
|
|
||||||
{:error, reason}
|
|
||||||
end
|
|
||||||
|
|
||||||
{:ok, %HTTPoison.Response{status_code: status, body: body}} ->
|
|
||||||
{:error, %{status: status, body: body}}
|
|
||||||
|
|
||||||
{:error, %HTTPoison.Error{reason: reason}} ->
|
|
||||||
{:error, reason}
|
|
||||||
|
|
||||||
failed ->
|
|
||||||
Logger.error("Failed to POST. #{inspect(failed)}")
|
|
||||||
{:error, :failed}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def whitelist_info_hash(info_hash) do
|
def whitelist_info_hash(info_hash) do
|
||||||
whitelist_url = whitelist_url()
|
whitelist_url = whitelist_url()
|
||||||
username = whitelist_username()
|
username = whitelist_username()
|
||||||
@ -128,7 +86,7 @@ defmodule Bright.Tracker do
|
|||||||
]
|
]
|
||||||
|
|
||||||
case HTTPoison.post(url, info_hash, headers) do
|
case HTTPoison.post(url, info_hash, headers) do
|
||||||
{:ok, %HTTPoison.Response{status_code: 200, body: response_body}} ->
|
{:ok, %HTTPoison.Response{status_code: 201, body: response_body}} ->
|
||||||
Logger.info("Successfully whitelisted info_hash=#{info_hash}")
|
Logger.info("Successfully whitelisted info_hash=#{info_hash}")
|
||||||
{:ok, response_body}
|
{:ok, response_body}
|
||||||
|
|
||||||
|
BIN
apps/bright/test-fixture.torrent
Normal file
BIN
apps/bright/test-fixture.torrent
Normal file
Binary file not shown.
@ -31,34 +31,37 @@ defmodule Bright.CacheTest do
|
|||||||
url = @sample_url
|
url = @sample_url
|
||||||
filename = Cache.generate_basename(url)
|
filename = Cache.generate_basename(url)
|
||||||
|
|
||||||
assert Regex.match?(~r/^[a-zA-Z0-9]+-my_video\.mp4$/, filename)
|
assert Regex.match?(~r/^[a-zA-Z0-9]+\/my_video\.mp4$/, filename)
|
||||||
|
|
||||||
# Test with a file path
|
# Test with a file path
|
||||||
path = "/home/cj/Downloads/taco.mp4"
|
path = "/home/cj/Downloads/taco.mp4"
|
||||||
filename = Cache.generate_basename(path)
|
filename = Cache.generate_basename(path)
|
||||||
|
|
||||||
assert Regex.match?(~r/^[a-zA-Z0-9]+-taco\.mp4$/, filename)
|
assert Regex.match?(~r/^[a-zA-Z0-9]+\/taco\.mp4$/, filename)
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag :unit
|
@tag :unit
|
||||||
test "generate_basename/2" do
|
test "generate_basename/2" do
|
||||||
filename = Cache.generate_basename(@sample_url, "png")
|
filename = Cache.generate_basename(@sample_url, "png")
|
||||||
assert Regex.match?(~r/^[a-zA-Z0-9]+-my_video\.png/, filename)
|
assert Regex.match?(~r/^[a-zA-Z0-9]+\/my_video\.png/, filename)
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag :unit
|
@tag :unit
|
||||||
test "generate_filename/1" do
|
test "generate_filename/1" do
|
||||||
filename = Cache.generate_filename(@sample_url)
|
filename = Cache.generate_filename(@sample_url)
|
||||||
assert Regex.match?(~r/.cache\/futureporn\/.+-my_video\.mp4/, filename)
|
assert Regex.match?(~r/.cache\/futureporn\/.+\/my_video\.mp4/, filename)
|
||||||
|
|
||||||
filename = Cache.generate_filename("/home/cj/Downloads/test.mp4")
|
filename = Cache.generate_filename("/home/cj/Downloads/test.mp4")
|
||||||
assert Regex.match?(~r/.cache\/futureporn\/.+-test\.mp4/, filename)
|
assert Regex.match?(~r/.cache\/futureporn\/.+\/test\.mp4/, filename)
|
||||||
|
|
||||||
|
assert File.exists?(Path.dirname(filename))
|
||||||
|
assert not File.exists?(filename)
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag :unit
|
@tag :unit
|
||||||
test "generate_filename/2" do
|
test "generate_filename/2" do
|
||||||
filename = Cache.generate_filename(@sample_url, "png")
|
filename = Cache.generate_filename(@sample_url, "png")
|
||||||
assert Regex.match?(~r/.cache\/futureporn\/.+-my_video\.png/, filename)
|
assert Regex.match?(~r/.cache\/futureporn\/.+\/my_video\.png/, filename)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -13,7 +13,7 @@ defmodule Bright.DownloaderTest do
|
|||||||
assert File.exists?(local_file)
|
assert File.exists?(local_file)
|
||||||
{:ok, stat} = File.stat(local_file)
|
{:ok, stat} = File.stat(local_file)
|
||||||
assert stat.size > 0, "File is empty"
|
assert stat.size > 0, "File is empty"
|
||||||
assert Regex.match?(~r/.cache\/futureporn\/.+-projekt-melody\.jpg/, local_file)
|
assert Regex.match?(~r/.cache\/futureporn\/.+\/projekt-melody\.jpg/, local_file)
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag :integration
|
@tag :integration
|
||||||
|
@ -13,7 +13,7 @@ defmodule Bright.ImagesTest do
|
|||||||
{:ok, filename} =
|
{:ok, filename} =
|
||||||
Images.create_thumbnail(@test_mp4_fixture)
|
Images.create_thumbnail(@test_mp4_fixture)
|
||||||
|
|
||||||
assert Regex.match?(~r/[a-zA-Z0-9]+-.*\.png$/, filename)
|
assert Regex.match?(~r/^\/root\/\.cache\/futureporn\/[^\/]+\/[^\/]+\.png$/, filename)
|
||||||
assert File.exists?(filename)
|
assert File.exists?(filename)
|
||||||
assert File.stat!(filename).size > 0, "thumbnail file is empty"
|
assert File.stat!(filename).size > 0, "thumbnail file is empty"
|
||||||
end
|
end
|
||||||
|
@ -7,8 +7,7 @@ defmodule Bright.TorrentfileTest do
|
|||||||
@test_ts_fixture "./test/fixtures/test-fixture.ts"
|
@test_ts_fixture "./test/fixtures/test-fixture.ts"
|
||||||
@test_tracker_url "http://localhost:6969/announce"
|
@test_tracker_url "http://localhost:6969/announce"
|
||||||
@test_web_seed_url "https://futureporn-b2.b-cdn.net/test-fixture.ts"
|
@test_web_seed_url "https://futureporn-b2.b-cdn.net/test-fixture.ts"
|
||||||
@test_source_url "https://futureporn.net/vods/69"
|
@test_site_url "https://futureporn.net"
|
||||||
@test_comment "https://futureporn.net"
|
|
||||||
|
|
||||||
describe "torrentfile" do
|
describe "torrentfile" do
|
||||||
import Bright.StreamsFixtures
|
import Bright.StreamsFixtures
|
||||||
@ -27,13 +26,24 @@ defmodule Bright.TorrentfileTest do
|
|||||||
test "create/2" do
|
test "create/2" do
|
||||||
input_path = @test_ts_fixture
|
input_path = @test_ts_fixture
|
||||||
stream = stream_fixture()
|
stream = stream_fixture()
|
||||||
vod = vod_fixture(%{stream_id: stream.id})
|
|
||||||
|
vod =
|
||||||
|
vod_fixture(%{
|
||||||
|
stream_id: stream.id,
|
||||||
|
s3_cdn_url: "https://futureporn-b2.b-cdn.net/test-fixture.ts"
|
||||||
|
})
|
||||||
|
|
||||||
{:ok, output} = Torrentfile.create(vod, input_path)
|
{:ok, output} = Torrentfile.create(vod, input_path)
|
||||||
assert :ok
|
assert :ok
|
||||||
assert is_binary(output.save_path)
|
assert is_binary(output.save_path)
|
||||||
assert output.save_path =~ ".torrent"
|
assert output.save_path =~ ".torrent"
|
||||||
assert is_binary(output.btih)
|
assert is_binary(output.btih)
|
||||||
assert is_binary(output.btmh)
|
assert is_binary(output.btmh)
|
||||||
|
assert output.btih === "7eb6caf98a7e727004ddbdbbd2035cb58300899a"
|
||||||
|
|
||||||
|
assert output.btmh ===
|
||||||
|
"1220f3292c3088ede7ceb29c335ad2ce690c8b934ecd03cde2daaf95ac82327eb25b"
|
||||||
|
|
||||||
assert File.exists?(output.save_path)
|
assert File.exists?(output.save_path)
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -41,8 +51,8 @@ defmodule Bright.TorrentfileTest do
|
|||||||
input_path = @test_ts_fixture
|
input_path = @test_ts_fixture
|
||||||
output_path = Cache.generate_filename("test", "torrent")
|
output_path = Cache.generate_filename("test", "torrent")
|
||||||
tracker_url = @test_tracker_url
|
tracker_url = @test_tracker_url
|
||||||
comment = @test_comment
|
comment = @test_site_url
|
||||||
source_url = @test_source_url
|
source_url = @test_site_url
|
||||||
web_seed_url = @test_web_seed_url
|
web_seed_url = @test_web_seed_url
|
||||||
meta_version = 3
|
meta_version = 3
|
||||||
|
|
||||||
@ -58,10 +68,12 @@ defmodule Bright.TorrentfileTest do
|
|||||||
)
|
)
|
||||||
|
|
||||||
assert :ok
|
assert :ok
|
||||||
|
|
||||||
assert is_binary(output.save_path)
|
assert is_binary(output.save_path)
|
||||||
assert output.save_path === output_path
|
assert output.save_path === output_path
|
||||||
|
assert output.btih === "da4f5b7724bb17e32f8a38792b007f316b33e962"
|
||||||
assert is_binary(output.btih)
|
assert is_binary(output.btih)
|
||||||
assert is_binary(output.btmh)
|
# assert is_binary(output.btmh)
|
||||||
assert File.exists?(output_path)
|
assert File.exists?(output_path)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -90,7 +90,7 @@ defmodule Bright.TorrentsTest do
|
|||||||
|
|
||||||
# input_path = Path.absname("./test/fixtures/test-fixture.ts")
|
# input_path = Path.absname("./test/fixtures/test-fixture.ts")
|
||||||
# output_path = Cache.generate_filename("test", "torrent")
|
# output_path = Cache.generate_filename("test", "torrent")
|
||||||
# tracker_url = "https://tracker.futureporn.net/announce"
|
# tracker_url = "udp://tracker.futureporn.net/"
|
||||||
# source_url = "https://futureporn.net/vods/69"
|
# source_url = "https://futureporn.net/vods/69"
|
||||||
# comment = "https://futureporn.net"
|
# comment = "https://futureporn.net"
|
||||||
# web_seed_url = @test_fixture
|
# web_seed_url = @test_fixture
|
||||||
|
@ -15,35 +15,9 @@ defmodule Bright.TrackerTest do
|
|||||||
@tag :integration
|
@tag :integration
|
||||||
test "whitelist_info_hash/1 using a string info_hash" do
|
test "whitelist_info_hash/1 using a string info_hash" do
|
||||||
case Tracker.whitelist_info_hash(@info_hash_v1_fixture) do
|
case Tracker.whitelist_info_hash(@info_hash_v1_fixture) do
|
||||||
{:ok, result} ->
|
{:ok, info_hash} ->
|
||||||
assert :ok
|
assert :ok
|
||||||
assert result === "Successfully added to whitelist"
|
assert info_hash === @info_hash_v1_fixture
|
||||||
|
|
||||||
{:error, :closed} ->
|
|
||||||
flunk("The connection to opentracker was closed. Is opentracker running?")
|
|
||||||
|
|
||||||
other ->
|
|
||||||
flunk("Unexpected result: #{inspect(other)}")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
@tag :integration
|
|
||||||
test "announce_torrent/1 using a string info_hash" do
|
|
||||||
case Tracker.announce_torrent(@info_hash_v1_fixture) do
|
|
||||||
{:ok, body} ->
|
|
||||||
# Adjust based on expected structure
|
|
||||||
assert is_map(body) or is_list(body)
|
|
||||||
|
|
||||||
{:error, "Requested download is not authorized for use with this tracker."} ->
|
|
||||||
Logger.warning(
|
|
||||||
"info_hash '#{@info_hash_v1_fixture}' is not on the tracker's whitelist."
|
|
||||||
)
|
|
||||||
|
|
||||||
Logger.warning(
|
|
||||||
"Since this is an integration test, and the tracker behavior is not the unit under test, we are passing the test."
|
|
||||||
)
|
|
||||||
|
|
||||||
assert true
|
|
||||||
|
|
||||||
{:error, :closed} ->
|
{:error, :closed} ->
|
||||||
flunk("The connection to opentracker was closed. Is opentracker running?")
|
flunk("The connection to opentracker was closed. Is opentracker running?")
|
||||||
|
@ -4,19 +4,22 @@
|
|||||||
#
|
#
|
||||||
FROM gcc:14 AS compile-stage
|
FROM gcc:14 AS compile-stage
|
||||||
|
|
||||||
ARG TINI_VERSION=v0.19.0
|
|
||||||
ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini-static /tini
|
|
||||||
RUN chmod +x /tini
|
|
||||||
|
|
||||||
RUN apt update ; \
|
RUN apt update ; \
|
||||||
apt install cvs -y
|
apt install cvs -y
|
||||||
|
|
||||||
|
# @todo harden
|
||||||
|
# RUN adduser \
|
||||||
|
# --system --disabled-login \
|
||||||
|
# --uid 6969 --group \
|
||||||
|
# --home /etc/opentracker \
|
||||||
|
# farmhand
|
||||||
RUN adduser \
|
RUN adduser \
|
||||||
--system --disabled-login \
|
--system \
|
||||||
--uid 6969 --group \
|
--uid 6969 --group \
|
||||||
--home /etc/opentracker \
|
--home /etc/opentracker \
|
||||||
farmhand
|
farmhand
|
||||||
|
|
||||||
|
|
||||||
WORKDIR /usr/src
|
WORKDIR /usr/src
|
||||||
|
|
||||||
|
|
||||||
@ -44,7 +47,9 @@ RUN cd /usr/src/opentracker ; \
|
|||||||
# Opentrack configuration file
|
# Opentrack configuration file
|
||||||
sed -ri \
|
sed -ri \
|
||||||
-e 's!(.*)(tracker.user)(.*)!\2 farmhand!g;' \
|
-e 's!(.*)(tracker.user)(.*)!\2 farmhand!g;' \
|
||||||
|
-e 's!(.*)(access.fifo_add)(.*)!\2 /etc/opentracker/adder.fifo!g;' \
|
||||||
-e 's!(.*)(access.whitelist)(.*)!\2 /etc/opentracker/whitelist!g;' \
|
-e 's!(.*)(access.whitelist)(.*)!\2 /etc/opentracker/whitelist!g;' \
|
||||||
|
-e '/^\s*#/d;/^\s*$/d' \
|
||||||
/tmp/stage/etc/opentracker/opentracker.conf ; \
|
/tmp/stage/etc/opentracker/opentracker.conf ; \
|
||||||
install -m 755 opentracker.debug /tmp/stage/bin ; \
|
install -m 755 opentracker.debug /tmp/stage/bin ; \
|
||||||
make DESTDIR=/tmp/stage BINDIR="/bin" install ; \
|
make DESTDIR=/tmp/stage BINDIR="/bin" install ; \
|
||||||
@ -54,27 +59,31 @@ RUN cd /usr/src/opentracker ; \
|
|||||||
|
|
||||||
FROM alpine
|
FROM alpine
|
||||||
|
|
||||||
COPY --from=compile-stage /tini /
|
RUN apk add curl tini
|
||||||
|
|
||||||
COPY --from=compile-stage /tmp/stage /
|
COPY --from=compile-stage /tmp/stage /
|
||||||
COPY --from=compile-stage /etc/passwd /etc/passwd
|
COPY --from=compile-stage /etc/passwd /etc/passwd
|
||||||
COPY ./opentracker.conf /etc/opentracker/opentracker.conf
|
|
||||||
|
|
||||||
|
# da4f5b7724bb17e32f8a38792b007f316b33e962 -- test-fixture.ts
|
||||||
|
# We have some acceptance tests which depend on this info_hash being present in the whitelist.
|
||||||
|
RUN echo "da4f5b7724bb17e32f8a38792b007f316b33e962" >> /etc/opentracker/whitelist
|
||||||
|
|
||||||
|
# adder.fifo gets 0666 perms so tracker-helper (separate container) can write
|
||||||
|
RUN touch /etc/opentracker/whitelist
|
||||||
RUN chown -R 6969:6969 /etc/opentracker ; \
|
RUN chown -R 6969:6969 /etc/opentracker ; \
|
||||||
chmod 0664 /etc/opentracker/whitelist ; \
|
chmod 0664 /etc/opentracker/whitelist ; \
|
||||||
chmod 0664 /etc/opentracker/adder.fifo
|
chmod 0666 /etc/opentracker/adder.fifo
|
||||||
|
|
||||||
WORKDIR /etc/opentracker
|
WORKDIR /etc/opentracker
|
||||||
|
|
||||||
|
|
||||||
USER 6969
|
USER 6969
|
||||||
RUN touch /etc/opentracker/whitelist
|
|
||||||
RUN ls -lash /etc/opentracker/
|
|
||||||
|
|
||||||
EXPOSE 6969/udp
|
EXPOSE 6969/udp
|
||||||
EXPOSE 6969/tcp
|
EXPOSE 6969/tcp
|
||||||
|
|
||||||
HEALTHCHECK --interval=5s --timeout=3s --retries=5 \
|
HEALTHCHECK --interval=5s --timeout=3s --retries=3 \
|
||||||
CMD curl -f http://localhost:6969/stats || exit 1
|
CMD curl -f http://localhost:6969/stats || exit 1
|
||||||
|
|
||||||
ENTRYPOINT ["/tini", "--", "/bin/opentracker"]
|
ENTRYPOINT ["tini", "--", "/bin/opentracker"]
|
||||||
CMD ["-f", "/etc/opentracker/opentracker.conf"]
|
CMD ["-f", "/etc/opentracker/opentracker.conf"]
|
12
apps/opentracker/README.md
Normal file
12
apps/opentracker/README.md
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
# opentracerk
|
||||||
|
|
||||||
|
## statistics
|
||||||
|
|
||||||
|
|
||||||
|
API endpoints
|
||||||
|
|
||||||
|
* http://localhost:6969/stats?mode=everything
|
||||||
|
* http://localhost:6969/stats?mode=conn
|
||||||
|
* http://localhost:6969/stats?mode=version
|
||||||
|
|
||||||
|
more modes listed at https://erdgeist.org/arts/software/opentracker/#toc-entry-7
|
@ -1,5 +1,6 @@
|
|||||||
# opentracker config file
|
# For reference only. this file is not copied to the dockerfile (we use `sed` to modify the existing dockerfile).
|
||||||
#
|
#
|
||||||
|
# opentracker config file
|
||||||
|
|
||||||
# I) Address opentracker will listen on, using both, tcp AND udp family
|
# I) Address opentracker will listen on, using both, tcp AND udp family
|
||||||
# (note, that port 6969 is implicit if omitted).
|
# (note, that port 6969 is implicit if omitted).
|
||||||
|
@ -107,10 +107,31 @@ accessories:
|
|||||||
- opentracker-etc:/etc/opentracker
|
- opentracker-etc:/etc/opentracker
|
||||||
- opentracker-var:/var/run/opentracker
|
- opentracker-var:/var/run/opentracker
|
||||||
|
|
||||||
opentracker:
|
# opentracker:
|
||||||
image: gitea.futureporn.net/futureporn/opentracker:latest
|
# image: gitea.futureporn.net/futureporn/opentracker:latest
|
||||||
|
# host: 45.76.57.101
|
||||||
|
# port: "127.0.0.1:6969:6969"
|
||||||
|
# env:
|
||||||
|
# clear:
|
||||||
|
# WHITELIST_FEED_URL: https://bright.futureporn.net/torrents
|
||||||
|
# secret:
|
||||||
|
# - WHITELIST_USERNAME
|
||||||
|
# - WHITELIST_PASSWORD
|
||||||
|
# proxy:
|
||||||
|
# ssl: true
|
||||||
|
# forward_headers: true
|
||||||
|
# app_port: 6969
|
||||||
|
# host: tracker.futureporn.net
|
||||||
|
# healthcheck:
|
||||||
|
# path: /stats
|
||||||
|
# volumes:
|
||||||
|
# - opentracker-etc:/etc/opentracker
|
||||||
|
# - opentracker-var:/var/run/opentracker
|
||||||
|
|
||||||
|
aquatic:
|
||||||
|
image: gitea.futureporn.net/futureporn/aquatic:latest
|
||||||
host: 45.76.57.101
|
host: 45.76.57.101
|
||||||
port: "127.0.0.1:6969:6969"
|
port: "127.0.0.1:3000:3000"
|
||||||
env:
|
env:
|
||||||
clear:
|
clear:
|
||||||
WHITELIST_FEED_URL: https://bright.futureporn.net/torrents
|
WHITELIST_FEED_URL: https://bright.futureporn.net/torrents
|
||||||
@ -120,10 +141,12 @@ accessories:
|
|||||||
proxy:
|
proxy:
|
||||||
ssl: true
|
ssl: true
|
||||||
forward_headers: true
|
forward_headers: true
|
||||||
app_port: 6969
|
app_port: 3000
|
||||||
host: tracker.futureporn.net
|
host: tracker.futureporn.net
|
||||||
healthcheck:
|
## we can't do the healthcheck on the prometheus port because kamal only allows one port per container
|
||||||
path: /stats
|
## @blocking https://github.com/basecamp/kamal-proxy/issues/48
|
||||||
|
# healthcheck:
|
||||||
|
# path: /stats
|
||||||
volumes:
|
volumes:
|
||||||
- opentracker-etc:/etc/opentracker
|
- opentracker-etc:/etc/opentracker
|
||||||
- opentracker-var:/var/run/opentracker
|
- opentracker-var:/var/run/opentracker
|
||||||
|
11
devbox.json
11
devbox.json
@ -13,7 +13,8 @@
|
|||||||
"bento4@latest",
|
"bento4@latest",
|
||||||
"shaka-packager@latest",
|
"shaka-packager@latest",
|
||||||
"mktorrent@latest",
|
"mktorrent@latest",
|
||||||
"entr@latest"
|
"entr@latest",
|
||||||
|
"act@latest"
|
||||||
],
|
],
|
||||||
"env": {
|
"env": {
|
||||||
"DEVBOX_COREPACK_ENABLED": "true",
|
"DEVBOX_COREPACK_ENABLED": "true",
|
||||||
@ -30,13 +31,17 @@
|
|||||||
"scripts": {
|
"scripts": {
|
||||||
"tunnel": "dotenvx run -f ./.kamal/secrets.development -- chisel client bright.fp.sbtp.xyz:9090 R:4000",
|
"tunnel": "dotenvx run -f ./.kamal/secrets.development -- chisel client bright.fp.sbtp.xyz:9090 R:4000",
|
||||||
"backup": "docker exec -t postgres_db pg_dumpall -c -U postgres > ./backups/dev_`date +%Y-%m-%d_%H_%M_%S`.sql",
|
"backup": "docker exec -t postgres_db pg_dumpall -c -U postgres > ./backups/dev_`date +%Y-%m-%d_%H_%M_%S`.sql",
|
||||||
"act": "act -W ./.gitea/workflows --secret-file .kamal/secrets.development",
|
"act": "dotenvx run -f ./.kamal/secrets.testing -- act -W ./.gitea/workflows --secret-file .kamal/secrets.development",
|
||||||
|
"act:builder": "dotenvx run -f ./.kamal/secrets.testing -- act -W ./.gitea/workflows/builder.yaml --secret-file .kamal/secrets.testing --var-file .kamal/secrets.testing --insecure-secrets",
|
||||||
|
"act:tests": "dotenvx run -f ./.kamal/secrets.testing -- act -W ./.gitea/workflows/tests.yaml --secret-file .kamal/secrets.testing --var-file .kamal/secrets.testing --insecure-secrets",
|
||||||
"bright:compile:watch": "cd ./apps/bright && find . -type f -name \"*.ex\" -o -name \"*.exs\" | entr -r mix compile --warnings-as-errors",
|
"bright:compile:watch": "cd ./apps/bright && find . -type f -name \"*.ex\" -o -name \"*.exs\" | entr -r mix compile --warnings-as-errors",
|
||||||
"bright:compile:watch2": "cd ./apps/bright && pnpx chokidar-cli \"**/*\" -i \"deps/**\" -i \"_build/**\" -c \"mix compile --warnings-as-errors\"",
|
"bright:compile:watch2": "cd ./apps/bright && pnpx chokidar-cli \"**/*\" -i \"deps/**\" -i \"_build/**\" -c \"mix compile --warnings-as-errors\"",
|
||||||
"bright:dev": "cd ./apps/bright && dotenvx run -f ../../.kamal/secrets.development -e MIX_ENV=dev -- mix phx.server",
|
"bright:dev": "cd ./apps/bright && dotenvx run -f ../../.kamal/secrets.development -e MIX_ENV=dev -- mix phx.server",
|
||||||
"bright:test:unit:watch": "cd ./apps/bright && pnpx chokidar-cli '**/*' -i \"deps/**\" -i '_build/**' -c 'mix test --only=unit'",
|
"bright:test:unit:watch": "cd ./apps/bright && pnpx chokidar-cli '**/*' -i \"deps/**\" -i '_build/**' -c 'mix test --only=unit'",
|
||||||
"bright:act": "cd ./apps/bright && act --env MIX_ENV=test -W ./.gitea/workflows/tests.yaml --secret-file .kamal/secrets.development",
|
"bright:act": "cd ./apps/bright && act --env MIX_ENV=test -W ./.gitea/workflows/tests.yaml --secret-file .kamal/secrets.development",
|
||||||
"test": "act -W ./.gitea/workflows/tests.yaml --secret-file .kamal/secrets.development && beep || boop"
|
"test": "act -W ./.gitea/workflows/tests.yaml --secret-file .kamal/secrets.development && devbox run beep || devbox run boop",
|
||||||
|
"beep": "ffplay -nodisp -loglevel quiet -autoexit ./apps/beep/beep2.wav",
|
||||||
|
"boop": "ffplay -nodisp -loglevel quiet -autoexit ./apps/beep/beep1.wav"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
51
devbox.lock
51
devbox.lock
@ -1,6 +1,54 @@
|
|||||||
{
|
{
|
||||||
"lockfile_version": "1",
|
"lockfile_version": "1",
|
||||||
"packages": {
|
"packages": {
|
||||||
|
"act@latest": {
|
||||||
|
"last_modified": "2025-02-07T11:26:36Z",
|
||||||
|
"resolved": "github:NixOS/nixpkgs/d98abf5cf5914e5e4e9d57205e3af55ca90ffc1d#act",
|
||||||
|
"source": "devbox-search",
|
||||||
|
"version": "0.2.72",
|
||||||
|
"systems": {
|
||||||
|
"aarch64-darwin": {
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "out",
|
||||||
|
"path": "/nix/store/69mqjq6ysm38yppm5l0a68zaxfk3jsb5-act-0.2.72",
|
||||||
|
"default": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"store_path": "/nix/store/69mqjq6ysm38yppm5l0a68zaxfk3jsb5-act-0.2.72"
|
||||||
|
},
|
||||||
|
"aarch64-linux": {
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "out",
|
||||||
|
"path": "/nix/store/di3cp7yr4dq07byl8hm8xwnas7hn8xcn-act-0.2.72",
|
||||||
|
"default": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"store_path": "/nix/store/di3cp7yr4dq07byl8hm8xwnas7hn8xcn-act-0.2.72"
|
||||||
|
},
|
||||||
|
"x86_64-darwin": {
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "out",
|
||||||
|
"path": "/nix/store/yblc9543pbzncgy0q4bfdj8h7nrs35am-act-0.2.72",
|
||||||
|
"default": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"store_path": "/nix/store/yblc9543pbzncgy0q4bfdj8h7nrs35am-act-0.2.72"
|
||||||
|
},
|
||||||
|
"x86_64-linux": {
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "out",
|
||||||
|
"path": "/nix/store/k7nqxipi81pzfdbh2a19np9q84qmgj3w-act-0.2.72",
|
||||||
|
"default": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"store_path": "/nix/store/k7nqxipi81pzfdbh2a19np9q84qmgj3w-act-0.2.72"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"bento4@latest": {
|
"bento4@latest": {
|
||||||
"last_modified": "2025-01-25T23:17:58Z",
|
"last_modified": "2025-01-25T23:17:58Z",
|
||||||
"resolved": "github:NixOS/nixpkgs/b582bb5b0d7af253b05d58314b85ab8ec46b8d19#bento4",
|
"resolved": "github:NixOS/nixpkgs/b582bb5b0d7af253b05d58314b85ab8ec46b8d19#bento4",
|
||||||
@ -293,6 +341,9 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"github:NixOS/nixpkgs/nixpkgs-unstable": {
|
||||||
|
"resolved": "github:NixOS/nixpkgs/ba0939c506a03c60a765cd7f7c43794816540eec?lastModified=1739482815&narHash=sha256-%2F5Lwtmp%2F8j%2Bro32gXzitucSdyjJ6QehfJCL58WNA7N0%3D"
|
||||||
|
},
|
||||||
"hcloud@latest": {
|
"hcloud@latest": {
|
||||||
"last_modified": "2024-12-23T21:10:33Z",
|
"last_modified": "2024-12-23T21:10:33Z",
|
||||||
"resolved": "github:NixOS/nixpkgs/de1864217bfa9b5845f465e771e0ecb48b30e02d#hcloud",
|
"resolved": "github:NixOS/nixpkgs/de1864217bfa9b5845f465e771e0ecb48b30e02d#hcloud",
|
||||||
|
@ -2,8 +2,7 @@ services:
|
|||||||
|
|
||||||
opentracker:
|
opentracker:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: ./apps/opentracker
|
||||||
dockerfile: dockerfiles/opentracker.dockerfile
|
|
||||||
container_name: opentracker
|
container_name: opentracker
|
||||||
environment:
|
environment:
|
||||||
- WHITELIST_FEED_URL=http://bright:4000/torrents
|
- WHITELIST_FEED_URL=http://bright:4000/torrents
|
||||||
@ -48,7 +47,7 @@ services:
|
|||||||
# DATABASE_HOSTNAME: db
|
# DATABASE_HOSTNAME: db
|
||||||
# SUPERSTREAMER_URL: http://superstreamer-api:52001
|
# SUPERSTREAMER_URL: http://superstreamer-api:52001
|
||||||
# PUBLIC_S3_ENDPOINT: https://fp-dev.b-cdn.net
|
# PUBLIC_S3_ENDPOINT: https://fp-dev.b-cdn.net
|
||||||
# BT_TRACKER_URL: https://tracker.futureporn.net/announce
|
# BT_TRACKER_URL: udp://tracker.futureporn.net
|
||||||
# BT_TRACKER_ACCESSLIST_URL: http://opentracker:8666
|
# BT_TRACKER_ACCESSLIST_URL: http://opentracker:8666
|
||||||
# SITE_URL: https://futureporn.net
|
# SITE_URL: https://futureporn.net
|
||||||
# env_file:
|
# env_file:
|
||||||
@ -92,7 +91,7 @@ services:
|
|||||||
|
|
||||||
db:
|
db:
|
||||||
image: postgres:15
|
image: postgres:15
|
||||||
container_name: futureporn-db
|
container_name: db
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_USER: postgres
|
POSTGRES_USER: postgres
|
||||||
POSTGRES_PASSWORD: password
|
POSTGRES_PASSWORD: password
|
||||||
|
@ -1,6 +1,11 @@
|
|||||||
|
# syntax=docker/dockerfile:1
|
||||||
|
|
||||||
# use the official Bun image
|
# use the official Bun image
|
||||||
# see all versions at https://hub.docker.com/r/oven/bun/tags
|
# see all versions at https://hub.docker.com/r/oven/bun/tags
|
||||||
FROM oven/bun:1 AS base
|
FROM oven/bun:1-alpine AS base
|
||||||
|
RUN apk add --no-cache curl tini
|
||||||
|
|
||||||
|
|
||||||
WORKDIR /usr/src/app
|
WORKDIR /usr/src/app
|
||||||
|
|
||||||
# install dependencies into temp directory
|
# install dependencies into temp directory
|
||||||
@ -22,8 +27,9 @@ COPY --from=install /temp/dev/node_modules node_modules
|
|||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# [optional] tests & build
|
# [optional] tests & build
|
||||||
ENV NODE_ENV=production WL_FILE_PATH=/usr/src/app/test/fixtures/whitelist WL_FIFO_PATH=/tmp/adder.fifo
|
ENV NODE_ENV=test WL_FILE_PATH=/tmp/whitelist
|
||||||
RUN --mount=type=secret,id=WL_CREDENTIALS,env=WL_CREDENTIALS,required=true \
|
RUN --mount=type=secret,id=WL_USERNAME,env=WL_USERNAME,required=true \
|
||||||
|
--mount=type=secret,id=WL_PASSWORD,env=WL_PASSWORD,required=true \
|
||||||
bun test
|
bun test
|
||||||
|
|
||||||
# copy production dependencies and source code into final image
|
# copy production dependencies and source code into final image
|
||||||
@ -33,7 +39,12 @@ COPY --from=prerelease /usr/src/app/index.ts .
|
|||||||
COPY --from=prerelease /usr/src/app/app.ts .
|
COPY --from=prerelease /usr/src/app/app.ts .
|
||||||
COPY --from=prerelease /usr/src/app/package.json .
|
COPY --from=prerelease /usr/src/app/package.json .
|
||||||
|
|
||||||
# run the app
|
HEALTHCHECK --interval=5s --timeout=3s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:5063/health || exit 1
|
||||||
|
|
||||||
USER bun
|
USER bun
|
||||||
|
WORKDIR /usr/src/app
|
||||||
EXPOSE 5063/tcp
|
EXPOSE 5063/tcp
|
||||||
ENTRYPOINT [ "bun", "run", "index.ts" ]
|
|
||||||
|
ENTRYPOINT ["tini", "--"]
|
||||||
|
CMD ["bun", "run", "/usr/src/app/index.ts"]
|
||||||
|
@ -21,10 +21,10 @@ This project was created using `bun init` in bun v1.1.42. [Bun](https://bun.sh)
|
|||||||
|
|
||||||
## building a docker image
|
## building a docker image
|
||||||
|
|
||||||
tracker-helper unit & integration tests are run during the docker build. That step requires WL_CREDENTIALS env variable, as well as WL_FIFO_PATH and WL_FILE_PATH. WL_CREDENTIALS must be set to admin:admin during that test. Not really a secret at that stage, but to avoid docker complaining about "CREDENTIALS" in env, we pass it as a build `--secret`. The other two env vars are loaded from `secrets.testing`.
|
tracker-helper unit & integration tests are run during the docker build. That step requires WL_CREDENTIALS env variable, as well as WL_FILE_PATH. WL_CREDENTIALS must be set to admin:admin during that test. Not really a secret at that stage, but to avoid docker complaining about "CREDENTIALS" in env, we pass it as a build `--secret`. The other two env vars are loaded from `secrets.testing`.
|
||||||
|
|
||||||
dotenvx run -f ../../.kamal/secrets.testing -- docker build --secret id=WL_CREDENTIALS -t gitea.futureporn.net/futureporn/tracker-helper:latest .
|
dotenvx run -f ../../.kamal/secrets.testing -- docker build --secret id=WL_CREDENTIALS -t gitea.futureporn.net/futureporn/tracker-helper:latest .
|
||||||
|
|
||||||
When validating the container before pushing to production, it can be run as follows
|
When validating the container before pushing to production, it can be run as follows
|
||||||
|
|
||||||
dotenvx run -f ../../.kamal/secrets.production -- docker run -it --init --rm -p 5063:5063 -e WL_CREDENTIALS -e WL_FILE_PATH -e WL_FIFO_PATH fp/tracker-helper
|
dotenvx run -f ../../.kamal/secrets.production -- docker run -it --init --rm -p 5063:5063 -e WL_CREDENTIALS -e WL_FILE_PATH fp/tracker-helper
|
@ -1,71 +1,153 @@
|
|||||||
import { Elysia, t, type Context } from 'elysia'
|
import { Elysia, t, type Context } from 'elysia'
|
||||||
import { version } from './package.json';
|
import { version } from './package.json';
|
||||||
import { basicAuth } from '@eelkevdbos/elysia-basic-auth'
|
import { basicAuth } from '@eelkevdbos/elysia-basic-auth'
|
||||||
|
import net from 'net'
|
||||||
|
import { appendFile } from "node:fs/promises";
|
||||||
|
|
||||||
|
if (!process.env.WL_USERNAME) throw new Error('WL_USERNAME missing in env');
|
||||||
|
if (!process.env.WL_PASSWORD) throw new Error('WL_PASSWORD missing in env');
|
||||||
|
|
||||||
const whitelistFilePath = process.env.WL_FILE_PATH || "/etc/opentracker/whitelist"
|
const whitelistFilePath = process.env.WL_FILE_PATH || "/etc/opentracker/whitelist"
|
||||||
const adderFifoFilePath = process.env.WL_FIFO_PATH || "/var/run/opentracker/adder.fifo"
|
const username = process.env.WL_USERNAME!
|
||||||
|
const password = process.env.WL_PASSWORD!
|
||||||
|
|
||||||
|
interface DockerContainer {
|
||||||
|
Id: string;
|
||||||
|
Command: string;
|
||||||
|
}
|
||||||
|
|
||||||
const authOpts = {
|
const authOpts = {
|
||||||
scope: [
|
scope: [
|
||||||
"/whitelist",
|
"/whitelist",
|
||||||
"/version"
|
"/version"
|
||||||
],
|
],
|
||||||
credentials: {
|
credentials: [
|
||||||
env: 'WL_CREDENTIALS'
|
{
|
||||||
}
|
username: username,
|
||||||
|
password: password
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
const startupChecks = function startupChecks() {
|
const startupChecks = async function startupChecks() {
|
||||||
|
|
||||||
if (!process.env.WL_CREDENTIALS) {
|
|
||||||
const msg = `WL_CREDENTIALS is missing in env!`
|
|
||||||
if (process.env.NODE_ENV === "test") {
|
|
||||||
console.warn(msg)
|
|
||||||
} else {
|
|
||||||
throw new Error(msg)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!process.env.WL_FILE_PATH) {
|
if (!process.env.WL_FILE_PATH) {
|
||||||
console.warn(`WL_FILE_PATH is missing in env. Using default ${whitelistFilePath}`)
|
console.warn(`WL_FILE_PATH is missing in env. Using default ${whitelistFilePath}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!process.env.WL_FIFO_PATH) {
|
|
||||||
console.warn(`WL_FIFO_PATH is missing in env. Using default ${adderFifoFilePath}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
// throw if the whitelist file doesn't exist
|
|
||||||
Bun.file(whitelistFilePath);
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
const getWhitelist = function getWhitelist(ctx: Context) {
|
|
||||||
|
const getWhitelist = async function getWhitelist(ctx: Context) {
|
||||||
const wl = Bun.file(whitelistFilePath); // relative to cwd
|
const wl = Bun.file(whitelistFilePath); // relative to cwd
|
||||||
console.debug(`read from whitelist file at ${whitelistFilePath}. size=${wl.size}, type=${wl.type}`)
|
console.debug(`read from whitelist file at ${whitelistFilePath}. size=${wl.size}, type=${wl.type}`)
|
||||||
return wl.text()
|
return wl.text()
|
||||||
}
|
}
|
||||||
|
|
||||||
const postWhitelist = async function postWhitelist(ctx: Context) {
|
|
||||||
const body = ctx.body
|
|
||||||
console.log(`Whitelister is appending ${body} to fifo at ${adderFifoFilePath}`)
|
|
||||||
const fifo = Bun.file(adderFifoFilePath)
|
async function findOpentrackerContainer(socketPath = "/var/run/docker.sock"): Promise<DockerContainer | null> {
|
||||||
Bun.write(fifo, body + "\n")
|
return new Promise((resolve, reject) => {
|
||||||
console.log(`${body} was sent to the FIFO at ${adderFifoFilePath}`)
|
console.debug(`opening net client at socketPath=${socketPath}`)
|
||||||
return body
|
const client = net.createConnection(socketPath, () => {
|
||||||
|
const request = 'GET /containers/json HTTP/1.0\r\n\r\n';
|
||||||
|
client.write(request);
|
||||||
|
});
|
||||||
|
|
||||||
|
console.debug(`waiting for response from socket`)
|
||||||
|
let response = '';
|
||||||
|
client.on('data', (data) => {
|
||||||
|
console.debug(`client got data`)
|
||||||
|
response += data.toString();
|
||||||
|
});
|
||||||
|
|
||||||
|
console.debug(`waiting for connection end`)
|
||||||
|
client.on('end', () => {
|
||||||
|
console.debug(`client end detected`)
|
||||||
|
try {
|
||||||
|
const body = response.split('\r\n\r\n')[1];
|
||||||
|
const containers: DockerContainer[] = JSON.parse(body);
|
||||||
|
const container = containers.find(c => c.Command.includes('/bin/opentracker'));
|
||||||
|
resolve(container || null);
|
||||||
|
} catch (err) {
|
||||||
|
reject(err);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
client.on('error', (err) => {
|
||||||
|
console.error(`net client encountered error ${err}`)
|
||||||
|
reject(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
startupChecks()
|
async function killContainer(socketPath = "/var/run/docker.sock", containerId: string, signal = "SIGTERM") {
|
||||||
|
|
||||||
|
const request = `POST /containers/${containerId}/kill?signal=${signal} HTTP/1.0\r\n\r\n`;
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const client = net.createConnection(socketPath, () => {
|
||||||
|
client.write(request);
|
||||||
|
});
|
||||||
|
|
||||||
|
client.on('data', (data: any) => {
|
||||||
|
// console.log(data.toString());
|
||||||
|
client.end();
|
||||||
|
resolve(data.toString());
|
||||||
|
});
|
||||||
|
|
||||||
|
client.on('error', (err: any) => {
|
||||||
|
console.error('Error:', err);
|
||||||
|
reject(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const maybeKillContainer = async function maybeKillContainer(signal: string = "SIGUSR1") {
|
||||||
|
|
||||||
|
const sockFile = Bun.file('/var/run/docker.sock')
|
||||||
|
const sockFileExists = await sockFile.exists()
|
||||||
|
if (!sockFileExists) {
|
||||||
|
console.warn("⚠️ docker sock file not found. skipping.")
|
||||||
|
} else {
|
||||||
|
console.debug('looking for opentracker container')
|
||||||
|
const container = await findOpentrackerContainer()
|
||||||
|
if (!container) {
|
||||||
|
console.warn('⚠️ failed to find opentracker container');
|
||||||
|
} else {
|
||||||
|
await killContainer(undefined, container.Id, signal)
|
||||||
|
console.debug('sending SIGUSR1 to container ' + container.Id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
const postWhitelist = async function postWhitelist(ctx: Context) {
|
||||||
|
let body = ctx.body
|
||||||
|
|
||||||
|
console.debug('appending to whitelist at ' + whitelistFilePath)
|
||||||
|
await appendFile(whitelistFilePath, body + "\n");
|
||||||
|
|
||||||
|
await maybeKillContainer("SIGUSR1")
|
||||||
|
|
||||||
|
|
||||||
|
ctx.set.status = 201
|
||||||
|
|
||||||
|
return body
|
||||||
|
}
|
||||||
|
|
||||||
|
await startupChecks()
|
||||||
|
|
||||||
|
|
||||||
const app = new Elysia()
|
const app = new Elysia()
|
||||||
.use(basicAuth(authOpts))
|
.use(basicAuth(authOpts))
|
||||||
.get('/health', () => 'OK')
|
.get('/health', () => 'OK')
|
||||||
.get('/version', () => `version ${version} `)
|
.get('/version', () => `version ${version}`)
|
||||||
.get('/whitelist', getWhitelist)
|
.get('/whitelist', getWhitelist)
|
||||||
.post('/whitelist', postWhitelist, {
|
.post('/whitelist', postWhitelist, {
|
||||||
body: t.String()
|
body: t.String()
|
||||||
|
Binary file not shown.
@ -13,9 +13,12 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@eelkevdbos/elysia-basic-auth": "^2.0.1",
|
"@eelkevdbos/elysia-basic-auth": "^2.0.1",
|
||||||
"@elysiajs/eden": "^1.2.0",
|
"@elysiajs/eden": "^1.2.0",
|
||||||
|
"@types/dockerode": "^3.3.34",
|
||||||
|
"dockerode": "^4.0.4",
|
||||||
"elysia": "^1.2.12"
|
"elysia": "^1.2.12"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
"test": "dotenvx run -f ../../.kamal/secrets.testing -- bun test",
|
||||||
"docker.build": "dotenvx run -f ../../.kamal/secrets.testing -- docker build --secret id=WL_CREDENTIALS -t gitea.futureporn.net/futureporn/tracker-helper:latest .",
|
"docker.build": "dotenvx run -f ../../.kamal/secrets.testing -- docker build --secret id=WL_CREDENTIALS -t gitea.futureporn.net/futureporn/tracker-helper:latest .",
|
||||||
"docker.run": "dotenvx run -f ../../.kamal/secrets.development -- docker run -e WL_CREDENTIALS -p 5063:5063 -t gitea.futureporn.net/futureporn/tracker-helper:latest",
|
"docker.run": "dotenvx run -f ../../.kamal/secrets.development -- docker run -e WL_CREDENTIALS -p 5063:5063 -t gitea.futureporn.net/futureporn/tracker-helper:latest",
|
||||||
"docker.push": "docker push gitea.futureporn.net/futureporn/tracker-helper:latest"
|
"docker.push": "docker push gitea.futureporn.net/futureporn/tracker-helper:latest"
|
||||||
|
@ -2,6 +2,7 @@ import {
|
|||||||
describe
|
describe
|
||||||
, expect
|
, expect
|
||||||
, it
|
, it
|
||||||
|
, beforeEach
|
||||||
} from 'bun:test'
|
} from 'bun:test'
|
||||||
import {
|
import {
|
||||||
Elysia
|
Elysia
|
||||||
@ -10,22 +11,19 @@ import {
|
|||||||
treaty
|
treaty
|
||||||
} from '@elysiajs/eden'
|
} from '@elysiajs/eden'
|
||||||
import app from '../app.ts'
|
import app from '../app.ts'
|
||||||
|
import Docker from 'dockerode'
|
||||||
if (!process.env.WL_FIFO_PATH) throw new Error("WL_FIFO_PATH is missing in env.");
|
|
||||||
if (!process.env.WL_CREDENTIALS) throw new Error("WL_CREDENTIALS is missing in env.");
|
|
||||||
|
|
||||||
|
|
||||||
function getCredentialsFromEnv(envValue?: string): { username: string; password: string } {
|
if (!process.env.WL_FILE_PATH) throw new Error("WL_FILE_PATH is missing in env");
|
||||||
if (!envValue) throw new Error("WL_CREDENTIALS is not set");
|
if (!process.env.WL_USERNAME) throw new Error("WL_USERNAME is missing in env.");
|
||||||
|
if (!process.env.WL_PASSWORD) throw new Error("WL_PASSWORD is missing in env.");
|
||||||
|
|
||||||
const firstCredential = envValue.split(";")[0]; // Get the first username:password pair
|
const whitelistFilePath = process.env.WL_FILE_PATH!
|
||||||
const [username, password] = firstCredential.split(":");
|
const fixture = "3aa5ad5e62eaffd148cff3dbe93ff2e1e9cbcf01"
|
||||||
|
|
||||||
if (!username || !password) throw new Error("Invalid credentials format");
|
|
||||||
|
|
||||||
return { username, password };
|
const username = process.env.WL_USERNAME!
|
||||||
}
|
const password = process.env.WL_PASSWORD!
|
||||||
const { username, password } = getCredentialsFromEnv(process.env.WL_CREDENTIALS)
|
|
||||||
const opts = {
|
const opts = {
|
||||||
headers: {
|
headers: {
|
||||||
authorization: "Basic " + btoa(username + ':' + password)
|
authorization: "Basic " + btoa(username + ':' + password)
|
||||||
@ -34,8 +32,31 @@ const opts = {
|
|||||||
const api = treaty(app)
|
const api = treaty(app)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
describe
|
describe
|
||||||
('Elysia', () => {
|
('tracker-helper', () => {
|
||||||
|
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
let whitelistFilePath = process.env.WL_FILE_PATH!
|
||||||
|
console.log(`Asserting existance of whitelist at ${whitelistFilePath}`)
|
||||||
|
// create whitelist file if it doesn't exist
|
||||||
|
const assertWhitelistExists = async function assertWhitelistExists(whitelistFilePath: string) {
|
||||||
|
const wlFile = Bun.file(whitelistFilePath);
|
||||||
|
const exists = await wlFile.exists()
|
||||||
|
if (!exists) {
|
||||||
|
console.log(`creating whitelist file at ${whitelistFilePath}`)
|
||||||
|
await wlFile.write("")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const clearWhitelist = async function clearWhitelist(whitelistFilePath: string) {
|
||||||
|
const wlFile = Bun.file(whitelistFilePath);
|
||||||
|
await wlFile.write("")
|
||||||
|
}
|
||||||
|
assertWhitelistExists(whitelistFilePath)
|
||||||
|
clearWhitelist(whitelistFilePath)
|
||||||
|
});
|
||||||
|
|
||||||
it('return a health response', async () => {
|
it('return a health response', async () => {
|
||||||
const { data, status } = await api.health.get()
|
const { data, status } = await api.health.get()
|
||||||
expect(status).toBe(200)
|
expect(status).toBe(200)
|
||||||
@ -49,34 +70,81 @@ describe
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('return a whitelist', async () => {
|
it('return a whitelist', async () => {
|
||||||
const { data, status } = await api.whitelist.get(opts)
|
const seedWhitelist = async function clearWhitelist(p: string, f: string) {
|
||||||
expect(status).toBe(200)
|
const wlFile = Bun.file(p);
|
||||||
expect(data).toContain("07b4516336e4afe9232c73bc312642590a7d7e95")
|
await wlFile.write(f)
|
||||||
})
|
|
||||||
|
|
||||||
it('writes a new info_hash to a fifo', async () => {
|
|
||||||
const fifoFilePath = process.env.WL_FIFO_PATH!
|
|
||||||
const fifo = Bun.file(fifoFilePath)
|
|
||||||
const fifoExists = await fifo.exists();
|
|
||||||
|
|
||||||
|
|
||||||
// create fifo if it doesn't exist
|
|
||||||
if (!fifoExists) {
|
|
||||||
await Bun.spawn(["mkfifo", fifoFilePath]).exited;
|
|
||||||
}
|
}
|
||||||
|
await seedWhitelist(whitelistFilePath, fixture)
|
||||||
// Start a process to read from the FIFO
|
const { data, status } = await api.whitelist.get(opts)
|
||||||
const reader = Bun.spawn(["cat", fifoFilePath], { stdout: "pipe" });
|
|
||||||
|
|
||||||
const { data, status } = await api.whitelist.post("3aa5ad5e62eaffd148cff3dbe93ff2e1e9cbcf01", opts)
|
|
||||||
|
|
||||||
const text = await new Response(reader.stdout).text();
|
|
||||||
|
|
||||||
expect(text).toBe("3aa5ad5e62eaffd148cff3dbe93ff2e1e9cbcf01\n")
|
|
||||||
expect(status).toBe(200)
|
expect(status).toBe(200)
|
||||||
expect(data).toContain("3aa5ad5e62eaffd148cff3dbe93ff2e1e9cbcf01")
|
expect(data).toContain("3aa5ad5e62eaffd148cff3dbe93ff2e1e9cbcf01")
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('expects the whitelist to already exist', async () => {
|
||||||
|
const whitelist = Bun.file(whitelistFilePath)
|
||||||
|
const whitelistExists = await whitelist.exists()
|
||||||
|
expect(whitelistExists).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('appends a new info_hash to the whitelist file', async () => {
|
||||||
|
|
||||||
|
|
||||||
|
// make an api call which is supposed to add an entry to the whitelist
|
||||||
|
const { data, status } = await api.whitelist.post(fixture, opts)
|
||||||
|
|
||||||
|
// assert that the entry has been added to the whitelist
|
||||||
|
|
||||||
|
|
||||||
|
const w = Bun.file(whitelistFilePath)
|
||||||
|
const whitelistAfter = await w.text()
|
||||||
|
console.log('whitelistAfter as follows')
|
||||||
|
console.log(whitelistAfter)
|
||||||
|
|
||||||
|
expect(status).toBe(201)
|
||||||
|
expect(data).toMatch(fixture)
|
||||||
|
expect(whitelistAfter).toMatch(fixture)
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
// it('sends a SIGHUP to opentracker', async () => {
|
||||||
|
|
||||||
|
// const { data, status } = await api.whitelist.post(fixture, opts)
|
||||||
|
// const containerId = "act-ci-Tests-Checks-6e6f12196682961041a41a25b9d0dcf00e4d0f8e58f-7cb37eebfe9e1670328d58ad1f7c7bdf0fa078298ca6dd299e67d0141a4b9579"
|
||||||
|
// // await docker.getContainer(containerId).kill({ signal: 'SIGHUP' })
|
||||||
|
// let container = await docker.getContainer(containerId)
|
||||||
|
// container.inspect
|
||||||
|
|
||||||
|
// })
|
||||||
|
|
||||||
|
|
||||||
|
// // This is skipped because I couldn't figure out opentracker's whitelist add/delete via FIFO functionality.
|
||||||
|
// // I got as far as writing to the FIFO, and seeing opentracker acknowledge the line in it's logs.
|
||||||
|
// // Despite this, requests from qbittorrent to opentracker responded with,
|
||||||
|
// // "Requested download is not authorized for use with this tracker"
|
||||||
|
// // About a week on this problem, and I give up! Using the whitelist reloading strat instead.
|
||||||
|
// it.skip('writes a new info_hash to a fifo', async () => {
|
||||||
|
// const fifoFilePath = process.env.WL_FIFO_PATH!
|
||||||
|
// const fifo = Bun.file(fifoFilePath)
|
||||||
|
// const fifoExists = await fifo.exists();
|
||||||
|
|
||||||
|
|
||||||
|
// // create fifo if it doesn't exist
|
||||||
|
// if (!fifoExists) {
|
||||||
|
// await Bun.spawn(["mkfifo", fifoFilePath]).exited;
|
||||||
|
// }
|
||||||
|
|
||||||
|
// // Start a process to read from the FIFO
|
||||||
|
// const reader = Bun.spawn(["cat", fifoFilePath], { stdout: "pipe" });
|
||||||
|
|
||||||
|
// const { data, status } = await api.whitelist.post("3aa5ad5e62eaffd148cff3dbe93ff2e1e9cbcf01", opts)
|
||||||
|
|
||||||
|
// const text = await new Response(reader.stdout).text();
|
||||||
|
|
||||||
|
// expect(text).toBe("3aa5ad5e62eaffd148cff3dbe93ff2e1e9cbcf01\n")
|
||||||
|
// expect(status).toBe(200)
|
||||||
|
// expect(data).toBe("3aa5ad5e62eaffd148cff3dbe93ff2e1e9cbcf01")
|
||||||
|
// })
|
||||||
|
|
||||||
it('returns 401 when username/password is missing from GET /whitelist ', async () => {
|
it('returns 401 when username/password is missing from GET /whitelist ', async () => {
|
||||||
const { status } = await api.whitelist.get()
|
const { status } = await api.whitelist.get()
|
||||||
expect(status).toBe(401)
|
expect(status).toBe(401)
|
||||||
|
@ -1 +1 @@
|
|||||||
07b4516336e4afe9232c73bc312642590a7d7e95
|
3aa5ad5e62eaffd148cff3dbe93ff2e1e9cbcf01
|
||||||
|
Loading…
x
Reference in New Issue
Block a user