This commit is contained in:
CJ_Clippy 2025-03-10 17:51:35 -08:00
parent ee8542e132
commit 0afc214ffb
22 changed files with 383 additions and 529 deletions

@ -1,3 +0,0 @@
#!/bin/sh
echo ">>>>>>>>>>>>>>>> Docker set up on $KAMAL_HOSTS..."

@ -1,16 +0,0 @@
#!/bin/sh
# A sample post-deploy hook
#
# These environment variables are available:
# KAMAL_RECORDED_AT
# KAMAL_PERFORMER
# KAMAL_VERSION
# KAMAL_HOSTS
# KAMAL_ROLE (if set)
# KAMAL_DESTINATION (if set)
# KAMAL_RUNTIME
echo "$KAMAL_PERFORMER deployed $KAMAL_VERSION to $KAMAL_DESTINATION in $KAMAL_RUNTIME seconds"
ufw allow 80/tcp
ufw allow 443/tcp

@ -1,3 +0,0 @@
#!/bin/sh
echo ">>>>>>>>>>>>>>>> Rebooted kamal-proxy on $KAMAL_HOSTS"

@ -1,51 +0,0 @@
#!/bin/sh
# A sample pre-build hook
#
# Checks:
# 1. We have a clean checkout
# 2. A remote is configured
# 3. The branch has been pushed to the remote
# 4. The version we are deploying matches the remote
#
# These environment variables are available:
# KAMAL_RECORDED_AT
# KAMAL_PERFORMER
# KAMAL_VERSION
# KAMAL_HOSTS
# KAMAL_ROLE (if set)
# KAMAL_DESTINATION (if set)
if [ -n "$(git status --porcelain)" ]; then
echo "Git checkout is not clean, aborting..." >&2
git status --porcelain >&2
exit 1
fi
first_remote=$(git remote)
if [ -z "$first_remote" ]; then
echo "No git remote set, aborting..." >&2
exit 1
fi
current_branch=$(git branch --show-current)
if [ -z "$current_branch" ]; then
echo ">>>>>>>>>>>>>>>> Not on a git branch, aborting..." >&2
exit 1
fi
remote_head=$(git ls-remote $first_remote --tags $current_branch | cut -f1)
if [ -z "$remote_head" ]; then
echo ">>>>>>>>>>>>>>>> Branch not pushed to remote, aborting..." >&2
exit 1
fi
if [ "$KAMAL_VERSION" != "$remote_head" ]; then
echo ">>>>>>>>>>>>>>>> Version ($KAMAL_VERSION) does not match remote HEAD ($remote_head), aborting..." >&2
exit 1
fi
exit 0

@ -1,47 +0,0 @@
#!/usr/bin/env ruby
# A sample pre-connect check
#
# Warms DNS before connecting to hosts in parallel
#
# These environment variables are available:
# KAMAL_RECORDED_AT
# KAMAL_PERFORMER
# KAMAL_VERSION
# KAMAL_HOSTS
# KAMAL_ROLE (if set)
# KAMAL_DESTINATION (if set)
# KAMAL_RUNTIME
hosts = ENV["KAMAL_HOSTS"].split(",")
results = nil
max = 3
elapsed = Benchmark.realtime do
results = hosts.map do |host|
Thread.new do
tries = 1
begin
Socket.getaddrinfo(host, 0, Socket::AF_UNSPEC, Socket::SOCK_STREAM, nil, Socket::AI_CANONNAME)
rescue SocketError
if tries < max
puts "Retrying DNS warmup: #{host}"
tries += 1
sleep rand
retry
else
puts "DNS warmup failed: #{host}"
host
end
end
tries
end
end.map(&:value)
end
retries = results.sum - hosts.size
nopes = results.count { |r| r == max }
puts "Prewarmed %d DNS lookups in %.2f sec: %d retries, %d failures" % [ hosts.size, elapsed, retries, nopes ]

@ -1,109 +0,0 @@
#!/usr/bin/env ruby
# A sample pre-deploy hook
#
# Checks the Github status of the build, waiting for a pending build to complete for up to 720 seconds.
#
# Fails unless the combined status is "success"
#
# These environment variables are available:
# KAMAL_RECORDED_AT
# KAMAL_PERFORMER
# KAMAL_VERSION
# KAMAL_HOSTS
# KAMAL_COMMAND
# KAMAL_SUBCOMMAND
# KAMAL_ROLE (if set)
# KAMAL_DESTINATION (if set)
# Only check the build status for production deployments
if ENV["KAMAL_COMMAND"] == "rollback" || ENV["KAMAL_DESTINATION"] != "production"
exit 0
end
require "bundler/inline"
# true = install gems so this is fast on repeat invocations
gemfile(true, quiet: true) do
source "https://rubygems.org"
gem "octokit"
gem "faraday-retry"
end
MAX_ATTEMPTS = 72
ATTEMPTS_GAP = 10
def exit_with_error(message)
$stderr.puts message
exit 1
end
class GithubStatusChecks
attr_reader :remote_url, :git_sha, :github_client, :combined_status
def initialize
@remote_url = `git config --get remote.origin.url`.strip.delete_prefix("https://github.com/")
@git_sha = `git rev-parse HEAD`.strip
@github_client = Octokit::Client.new(access_token: ENV["GITHUB_TOKEN"])
refresh!
end
def refresh!
@combined_status = github_client.combined_status(remote_url, git_sha)
end
def state
combined_status[:state]
end
def first_status_url
first_status = combined_status[:statuses].find { |status| status[:state] == state }
first_status && first_status[:target_url]
end
def complete_count
combined_status[:statuses].count { |status| status[:state] != "pending"}
end
def total_count
combined_status[:statuses].count
end
def current_status
if total_count > 0
"Completed #{complete_count}/#{total_count} checks, see #{first_status_url} ..."
else
"Build not started..."
end
end
end
$stdout.sync = true
puts "Checking build status..."
attempts = 0
checks = GithubStatusChecks.new
begin
loop do
case checks.state
when "success"
puts "Checks passed, see #{checks.first_status_url}"
exit 0
when "failure"
exit_with_error "Checks failed, see #{checks.first_status_url}"
when "pending"
attempts += 1
end
exit_with_error "Checks are still pending, gave up after #{MAX_ATTEMPTS * ATTEMPTS_GAP} seconds" if attempts == MAX_ATTEMPTS
puts checks.current_status
sleep(ATTEMPTS_GAP)
checks.refresh!
end
rescue Octokit::NotFound
exit_with_error "Build status could not be found"
end

@ -1,3 +0,0 @@
#!/bin/sh
echo "Rebooting kamal-proxy on $KAMAL_HOSTS..."

18
.vscode/launch.json vendored

@ -1,18 +0,0 @@
{
"name": "tsx",
"type": "node",
"request": "launch",
"program": "${file}",
"runtimeExecutable": "tsx",
"console": "integratedTerminal",
"internalConsoleOptions": "neverOpen",
"skipFiles": [
"<node_internals>/**",
"${workspaceFolder}/node_modules/**",
],
}

@ -82,3 +82,4 @@
TRACKER_HELPER_USERNAME: "{{ lookup('dotenv', 'TRACKER_HELPER_USERNAME', file='../.env') }}" TRACKER_HELPER_USERNAME: "{{ lookup('dotenv', 'TRACKER_HELPER_USERNAME', file='../.env') }}"
TRACKER_HELPER_PASSWORD: "{{ lookup('dotenv', 'TRACKER_HELPER_PASSWORD', file='../.env') }}" TRACKER_HELPER_PASSWORD: "{{ lookup('dotenv', 'TRACKER_HELPER_PASSWORD', file='../.env') }}"
TRACKER_URL: https://tracker.futureporn.net:6969 TRACKER_URL: https://tracker.futureporn.net:6969
CACHE_DIR: /mnt/vfs/futureporn # we use Vultr File System to share cache among all Phoenix instances

@ -87,14 +87,12 @@ RUN mix release
FROM builder AS dev FROM builder AS dev
COPY ./config/test.exs config/test.exs COPY ./config/test.exs config/test.exs
RUN ls -la ./contrib/ RUN ls -la ./contrib/
RUN mkdir -p ~/.cache/futureporn
CMD [ "mix", "phx.server" ] CMD [ "mix", "phx.server" ]
# start a new build stage so that the final image will only contain # start a new build stage so that the final image will only contain
# the compiled release and other runtime necessities # the compiled release and other runtime necessities
FROM ${RUNNER_IMAGE} AS prod FROM ${RUNNER_IMAGE} AS prod
RUN mkdir -p /mnt/vfs/futureporn
RUN apt-get update -y \ RUN apt-get update -y \
&& apt-get install -y libstdc++6 openssl libncurses5 locales inotify-tools ffmpeg python3 python3-pip ca-certificates \ && apt-get install -y libstdc++6 openssl libncurses5 locales inotify-tools ffmpeg python3 python3-pip ca-certificates \

@ -27,8 +27,8 @@ config :bright,
aws_secret_access_key: System.get_env("AWS_SECRET_ACCESS_KEY"), aws_secret_access_key: System.get_env("AWS_SECRET_ACCESS_KEY"),
aws_region: System.get_env("AWS_REGION"), aws_region: System.get_env("AWS_REGION"),
public_s3_endpoint: System.get_env("PUBLIC_S3_ENDPOINT"), public_s3_endpoint: System.get_env("PUBLIC_S3_ENDPOINT"),
s3_cdn_endpoint: System.get_env("PUBLIC_S3_ENDPOINT"), site_url: System.get_env("SITE_URL"),
site_url: System.get_env("SITE_URL") cache_dir: System.get_env("CACHE_DIR")
config :bright, :torrent, config :bright, :torrent,
tracker_url: System.get_env("TRACKER_URL"), tracker_url: System.get_env("TRACKER_URL"),

@ -1,6 +1,38 @@
defmodule Bright.B2 do defmodule Bright.B2 do
@moduledoc """ @moduledoc """
The B2 context. The B2 context.
Note: b2 buckets may need CORS configuration to allow uploads from a domain. This is done using b2's CLI tool.
```
b2 bucket update --cors-rules "$(<~/Documents/futureporn-meta/cors-rules.json)" futureporn
```
Where cors-rules.json is as follows
```json
[
{
"allowedHeaders": [
"*"
],
"allowedOperations": [
"s3_head",
"s3_put",
"s3_get"
],
"allowedOrigins": [
"https://futureporn.net"
],
"corsRuleName": "downloadFromAnyOriginWithUpload",
"exposeHeaders": [
"etag"
],
"maxAgeSeconds": 3600
}
]
```
""" """
import Ecto.Query, warn: false import Ecto.Query, warn: false
require Logger require Logger
@ -17,31 +49,40 @@ defmodule Bright.B2 do
put(local_file, object_key) put(local_file, object_key)
end end
def put(local_file, object_key) do
put(local_file, object_key, "application/octet-stream")
end
@doc """ @doc """
Put a file from local disk to Backblaze. Put a file from local disk to Backblaze.
""" """
def put(local_file, object_key) do def put(local_file, object_key, mime_type) do
Logger.debug("put/2 called with local_file=#{local_file}, object_key=#{object_key}")
bucket = Application.get_env(:bright, :aws_bucket) bucket = Application.get_env(:bright, :aws_bucket)
if bucket === nil do if bucket === nil do
raise("bucket specification is missing") raise("bucket specification is missing")
end end
s3_cdn_endpoint = Application.get_env(:bright, :s3_cdn_endpoint) public_s3_endpoint = Application.get_env(:bright, :public_s3_endpoint)
# access_key_id = Application.get_env(:ex_aws, :access_key_id)
# secret_access_key = Application.get_env(:ex_aws, :secret_access_key)
if s3_cdn_endpoint === nil do if public_s3_endpoint === nil do
raise("s3_cdn_endpoint specification is missing") raise("public_s3_endpoint specification is missing")
end end
cdn_url = "#{s3_cdn_endpoint}/#{object_key}" cdn_url = "#{public_s3_endpoint}/#{object_key}"
Logger.debug( Logger.debug(
"putting local_file=#{local_file} to bucket=#{bucket} s3_cdn_endpoint=#{s3_cdn_endpoint} key=#{object_key}" "putting local_file=#{local_file} to bucket=#{bucket} public_s3_endpoint=#{public_s3_endpoint} key=#{object_key}"
) )
opts = [content_type: mime_type]
local_file local_file
|> S3.Upload.stream_file() |> S3.Upload.stream_file()
|> S3.upload(bucket, object_key) |> S3.upload(bucket, object_key, opts)
|> ExAws.request() |> ExAws.request()
|> case do |> case do
{:ok, %{status_code: 200}} -> {:ok, %{key: object_key, cdn_url: cdn_url}} {:ok, %{status_code: 200}} -> {:ok, %{key: object_key, cdn_url: cdn_url}}

@ -3,14 +3,39 @@ defmodule Bright.Cache do
A simple caching module that saves files to the `/tmp` directory. A simple caching module that saves files to the `/tmp` directory.
""" """
# we use Vultr File System to share cache among all Phoenix instances
@cache_dir "/mnt/vfs/futureporn"
require Logger require Logger
def cache_dir do # def cache_dir do
@cache_dir # case Application.get_env(:bright, :cache_dir) do
end # {:ok, dir} when is_binary(dir) and dir != "" ->
# Logger.debug("cache_dir is #{dir}")
# dir
# {:ok, nil} ->
# raise """
# Configuration :cache_dir for application :bright is set to nil.
# Please provide a valid directory path, e.g.:
# config :bright, cache_dir: "/path/to/cache"
# """
# {:ok, ""} ->
# raise """
# Configuration :cache_dir for application :bright is set to an empty string.
# Please provide a valid directory path, e.g.:
# config :bright, cache_dir: "/path/to/cache"
# """
# :error ->
# raise """
# Configuration :cache_dir for application :bright is not set.
# Please ensure it is defined in your config files, e.g.:
# config :bright, cache_dir: "/path/to/cache"
# """
# end
# end
def generate_basename(input) do def generate_basename(input) do
if is_nil(input), do: raise("generate_basename was called with nil argument") if is_nil(input), do: raise("generate_basename was called with nil argument")
@ -21,7 +46,9 @@ defmodule Bright.Cache do
|> String.replace(~r/[^a-zA-Z0-9]/, "") |> String.replace(~r/[^a-zA-Z0-9]/, "")
base = Path.basename(input) base = Path.basename(input)
"#{prefix}/#{base}" output = "#{prefix}/#{base}"
Logger.debug("generate_basename called with input=#{input} output=#{output}")
output
end end
# @doc """ # @doc """
@ -45,25 +72,57 @@ defmodule Bright.Cache do
end end
def generate_filename(input) do def generate_filename(input) do
filename = Path.join(@cache_dir, generate_basename(input)) Logger.debug("generate_filename called with input=#{input}, cache_dir=#{get_cache_dir()}")
filename = Path.join(get_cache_dir(), generate_basename(input))
File.mkdir_p!(Path.dirname(filename)) File.mkdir_p!(Path.dirname(filename))
Logger.debug("generate_filename filename=#{filename}")
Logger.debug("generate_filename filename=#{filename}")
Logger.debug("generate_filename filename=#{filename}")
filename filename
end end
def generate_filename(input, ext) do def generate_filename(input, ext) do
filename = Path.join(@cache_dir, generate_basename(input, ext)) filename = Path.join(get_cache_dir(), generate_basename(input, ext))
File.mkdir_p!(Path.dirname(filename)) File.mkdir_p!(Path.dirname(filename))
filename filename
end end
def get_cache_dir do def get_cache_dir do
@cache_dir case Application.fetch_env(:bright, :cache_dir) do
{:ok, dir} when is_binary(dir) and dir != "" ->
Logger.debug("cache_dir is #{dir}")
dir
{:ok, ""} ->
raise """
Configuration :cache_dir for application :bright is set to an empty string.
Please provide a valid directory path, e.g.:
config :bright, cache_dir: "/path/to/cache"
"""
{:ok, nil} ->
raise """
Configuration :cache_dir for application :bright is set to nil.
Please provide a valid directory path, e.g.:
config :bright, cache_dir: "/path/to/cache"
"""
:error ->
raise """
Configuration :cache_dir for application :bright is not set.
Please ensure it is defined in your config files, e.g.:
config :bright, cache_dir: "/path/to/cache"
"""
end
end end
# Ensure the cache directory exists # Ensure the cache directory exists
def ensure_cache_dir! do def ensure_cache_dir! do
unless File.exists?(@cache_dir) do unless File.exists?(get_cache_dir()) do
File.mkdir_p!(@cache_dir) File.mkdir_p!(get_cache_dir())
end end
end end
@ -98,29 +157,6 @@ defmodule Bright.Cache do
end end
end end
@doc """
Clear all cached data.
## Examples
iex> Bright.Cache.clear()
:ok
"""
def clear do
ensure_cache_dir!()
case File.rm_rf(@cache_dir) do
{:ok, _} ->
Logger.debug("[Cache] Cleared all cached data")
ensure_cache_dir!()
:ok
{:error, _posix, reason} ->
Logger.error("[Cache] Failed to clear cache: #{reason}")
{:error, reason}
end
end
# @doc """ # @doc """
# Generates a SHA-256 hash of the input string and truncates it to 10 characters. # Generates a SHA-256 hash of the input string and truncates it to 10 characters.

@ -5,52 +5,38 @@ defmodule Bright.Downloader do
require Logger require Logger
@user_agent "fp-curl/houston-we-have-a-request"
def get(url) do def get(url) do
filename = Bright.Cache.generate_filename(url) filename = Bright.Cache.generate_filename(url)
Logger.debug("Downloader getting url=#{inspect(url)}") Logger.debug("Downloader getting url=#{inspect(url)}")
try do download!(url, filename)
{download!(url, filename), filename}
rescue
exception ->
{:error, Exception.message(exception)}
end
end end
# greets https://elixirforum.com/t/how-to-download-big-files/9173/4
def download!(file_url, filename) do def download!(file_url, filename) do
Logger.debug("Downloader downloading file_url=#{file_url} to filename=#{filename}") Logger.debug("Downloader downloading file_url=#{file_url} to filename=#{filename}")
file = # Execute the curl command
if File.exists?(filename) do case System.cmd(
File.open!(filename, [:append]) "curl",
else ["--fail", "-L", "--user-agent", @user_agent, "--output", filename, file_url],
File.touch!(filename) stderr_to_stdout: true
File.open!(filename, [:append]) ) do
end {_output, 0} ->
# Success: curl completed with exit code 0
Logger.debug("Download completed successfully: #{filename}")
{:ok, filename}
%HTTPoison.AsyncResponse{id: ref} = HTTPoison.get!(file_url, %{}, stream_to: self()) {error_output, exit_code} ->
# Failure: curl returned a non-zero exit code
append_loop(ref, file) Logger.error("Download failed with exit code #{exit_code}: #{error_output}")
end {:error, {:curl_failed, exit_code, error_output}}
defp append_loop(ref, file) do
receive do
%HTTPoison.AsyncChunk{chunk: chunk, id: ^ref} ->
IO.binwrite(file, chunk)
append_loop(ref, file)
%HTTPoison.AsyncEnd{id: ^ref} ->
File.close(file)
# need something to handle errors like request timeout and such
# otherwise it will loop forever
# don't know what httpoison returns in case of an error ...
# you can inspect `_other` below to find out
# and match on the error to exit the loop early
_ ->
Logger.debug("recursively downloading #{inspect(ref)} #{inspect(file)}")
append_loop(ref, file)
end end
rescue
exception ->
# Handle unexpected exceptions (e.g., file system errors)
Logger.error("Unexpected error during download: #{inspect(exception)}")
{:error, {:unexpected_error, exception}}
end end
end end

@ -7,6 +7,8 @@ defmodule Bright.ObanWorkers.CreateHlsPlaylist do
require Logger require Logger
import Ecto.Query, warn: false import Ecto.Query, warn: false
# def butimeout(_job), do: :timer.seconds(5)
@impl Oban.Worker @impl Oban.Worker
def perform(%Oban.Job{args: %{"vod_id" => vod_id}} = job) do def perform(%Oban.Job{args: %{"vod_id" => vod_id}} = job) do
vod = Streams.get_vod!(vod_id) vod = Streams.get_vod!(vod_id)
@ -46,18 +48,18 @@ defmodule Bright.ObanWorkers.CreateHlsPlaylist do
defp await_transmuxer(vod, stage \\ :retrieving, done \\ 0) do defp await_transmuxer(vod, stage \\ :retrieving, done \\ 0) do
receive do receive do
{:progress, %{stage: stage_now, done: done_now, total: total}} -> {:progress, %{stage: stage_now, done: done_now, total: _total}} ->
Streams.broadcast_processing_progressed!(stage, vod, min(1, done / total)) # Streams.broadcast_processing_progressed!(stage, vod, min(1, done / total))
done_total = if(stage == stage_now, do: done, else: 0) done_total = if(stage == stage_now, do: done, else: 0)
await_transmuxer(vod, stage_now, done_total + done_now) await_transmuxer(vod, stage_now, done_total + done_now)
{:complete, vod} -> {:complete, vod} ->
Streams.broadcast_processing_progressed!(stage, vod, 1) # Streams.broadcast_processing_progressed!(stage, vod, 1)
Streams.broadcast_processing_completed!(:hls_playlist, vod) # Streams.broadcast_processing_completed!(:hls_playlist, vod)
{:ok, vod.url} {:ok, vod}
{:error, e, %Oban.Job{attempt: attempt, max_attempts: max_attempts}} -> {:error, e, %Oban.Job{attempt: _attempt, max_attempts: _max_attempts}} ->
Streams.broadcast_processing_failed!(vod, attempt, max_attempts) # Streams.broadcast_processing_failed!(vod, attempt, max_attempts)
{:error, e} {:error, e}
end end
end end

@ -15,10 +15,11 @@ defmodule Bright.Streams do
alias Bright.{ alias Bright.{
Cache, Cache,
Downloader, Downloader,
Storage,
Events Events
} }
alias Bright.B2
@pubsub Bright.PubSub @pubsub Bright.PubSub
@doc """ @doc """
@ -302,7 +303,14 @@ defmodule Bright.Streams do
def transmux_to_hls(%Vod{} = vod, cb) do def transmux_to_hls(%Vod{} = vod, cb) do
if !vod.origin_temp_input_url, do: raise("vod was missing origin_temp_input_url") if !vod.origin_temp_input_url, do: raise("vod was missing origin_temp_input_url")
Logger.debug("transmux_to_hls begin. let us now generate a cache filename.")
local_path = Cache.generate_filename(vod.origin_temp_input_url) local_path = Cache.generate_filename(vod.origin_temp_input_url)
Logger.debug("local_path=#{local_path}")
Logger.debug("local_path=#{local_path}")
Logger.debug("local_path=#{local_path}")
Logger.debug("local_path=#{local_path}")
Logger.debug("local_path=#{local_path}")
Downloader.download!(vod.origin_temp_input_url, local_path) Downloader.download!(vod.origin_temp_input_url, local_path)
Logger.debug( Logger.debug(
@ -312,7 +320,9 @@ defmodule Bright.Streams do
master_pl_name = "master.m3u8" master_pl_name = "master.m3u8"
dir_name = "vod-#{vod.id}" dir_name = "vod-#{vod.id}"
dir = Path.join(Bright.Cache.cache_dir(), dir_name)
Bright.Cache.ensure_cache_dir!()
dir = Path.join(Bright.Cache.get_cache_dir(), dir_name)
File.mkdir_p!(dir) File.mkdir_p!(dir)
cb.(%{stage: :transmuxing, done: 1, total: 1}) cb.(%{stage: :transmuxing, done: 1, total: 1})
@ -340,171 +350,178 @@ defmodule Bright.Streams do
# -var_stream_map "v:0,a:0 v:1,a:1 v:2,a:2" \ # -var_stream_map "v:0,a:0 v:1,a:1 v:2,a:2" \
# stream_%v/playlist.m3u8 # stream_%v/playlist.m3u8
System.cmd("ffmpeg", [ case System.cmd("ffmpeg", [
"-i", "-i",
local_path, local_path,
"-filter_complex", "-filter_complex",
"[0:v]split=5[v1][v2][v3][v4][v5];" <> "[0:v]split=5[v1][v2][v3][v4][v5];" <>
"[v1]scale=w=1920:h=1080[v1out];" <> "[v1]scale=w=1920:h=1080[v1out];" <>
"[v2]scale=w=1280:h=720[v2out];" <> "[v2]scale=w=1280:h=720[v2out];" <>
"[v3]scale=w=854:h=480[v3out];" <> "[v3]scale=w=854:h=480[v3out];" <>
"[v4]scale=w=640:h=360[v4out];" <> "[v4]scale=w=640:h=360[v4out];" <>
"[v5]scale=w=284:h=160[v5out]", "[v5]scale=w=284:h=160[v5out]",
# Video streams # Video streams
"-map", "-map",
"[v1out]", "[v1out]",
"-c:v:0", "-c:v:0",
"libx264", "libx264",
"-b:v:0", "-b:v:0",
"5000k", "5000k",
"-maxrate:v:0", "-maxrate:v:0",
"5350k", "5350k",
"-bufsize:v:0", "-bufsize:v:0",
"7500k", "7500k",
"-map", "-map",
"[v2out]", "[v2out]",
"-c:v:1", "-c:v:1",
"libx264", "libx264",
"-b:v:1", "-b:v:1",
"2800k", "2800k",
"-maxrate:v:1", "-maxrate:v:1",
"2996k", "2996k",
"-bufsize:v:1", "-bufsize:v:1",
"4200k", "4200k",
"-map", "-map",
"[v3out]", "[v3out]",
"-c:v:2", "-c:v:2",
"libx264", "libx264",
"-b:v:2", "-b:v:2",
"1400k", "1400k",
"-maxrate:v:2", "-maxrate:v:2",
"1498k", "1498k",
"-bufsize:v:2", "-bufsize:v:2",
"2100k", "2100k",
"-map", "-map",
"[v4out]", "[v4out]",
"-c:v:3", "-c:v:3",
"libx264", "libx264",
"-b:v:3", "-b:v:3",
"800k", "800k",
"-maxrate:v:3", "-maxrate:v:3",
"856k", "856k",
"-bufsize:v:3", "-bufsize:v:3",
"1200k", "1200k",
"-map", "-map",
"[v5out]", "[v5out]",
"-c:v:4", "-c:v:4",
"libx264", "libx264",
"-b:v:4", "-b:v:4",
"300k", "300k",
"-maxrate:v:4", "-maxrate:v:4",
"300k", "300k",
"-bufsize:v:4", "-bufsize:v:4",
"480k", "480k",
# Audio streams # Audio streams
"-map", "-map",
"a:0", "a:0",
"-c:a:0", "-c:a:0",
"aac", "aac",
"-b:a:0", "-b:a:0",
"192k", "192k",
"-ac:a:0", "-ac:a:0",
"2", "2",
"-map", "-map",
"a:0", "a:0",
"-c:a:1", "-c:a:1",
"aac", "aac",
"-b:a:1", "-b:a:1",
"192k", "192k",
"-ac:a:1", "-ac:a:1",
"2", "2",
"-map", "-map",
"a:0", "a:0",
"-c:a:2", "-c:a:2",
"aac", "aac",
"-b:a:2", "-b:a:2",
"192k", "192k",
"-ac:a:2", "-ac:a:2",
"2", "2",
"-map", "-map",
"a:0", "a:0",
"-c:a:3", "-c:a:3",
"aac", "aac",
"-b:a:3", "-b:a:3",
"164k", "164k",
"-ac:a:3", "-ac:a:3",
"2", "2",
"-map", "-map",
"a:0", "a:0",
"-c:a:4", "-c:a:4",
"aac", "aac",
"-b:a:4", "-b:a:4",
"164k", "164k",
"-ac:a:4", "-ac:a:4",
"2", "2",
"-f", "-f",
"hls", "hls",
"-hls_time", "-hls_time",
"2", "2",
"-hls_playlist_type", "-hls_playlist_type",
"vod", "vod",
"-hls_flags", "-hls_flags",
"independent_segments", "independent_segments",
"-hls_segment_type", "-hls_segment_type",
"mpegts", "mpegts",
"-start_number", "-start_number",
"0", "0",
"-hls_list_size", "-hls_list_size",
"0", "0",
"-hls_segment_filename", "-hls_segment_filename",
"#{dir}/stream_%v_segment_%d.ts", "#{dir}/stream_%v_segment_%d.ts",
"-master_pl_name", "-master_pl_name",
master_pl_name, master_pl_name,
"-var_stream_map", "-var_stream_map",
"v:0,a:0 v:1,a:1 v:2,a:2 v:3,a:3 v:4,a:4", "v:0,a:0 v:1,a:1 v:2,a:2 v:3,a:3 v:4,a:4",
"#{dir}/stream_%v.m3u8" "#{dir}/stream_%v.m3u8"
]) ]) do
{_output, 0} ->
Logger.debug("FFmpeg completed successfully")
files = Path.wildcard("#{dir}/*")
files = Path.wildcard("#{dir}/*") files
|> Elixir.Stream.map(fn hls_local_path ->
files cb.(%{stage: :persisting, done: 1, total: length(files)})
|> Elixir.Stream.map(fn hls_local_path -> hls_local_path
cb.(%{stage: :persisting, done: 1, total: length(files)}) end)
hls_local_path |> Enum.each(fn hls_local_path ->
end) B2.put(
|> Enum.each(fn hls_local_path -> hls_local_path,
Storage.upload_from_filename( "package/vod-#{vod.id}/#{Path.basename(hls_local_path)}",
hls_local_path, if(String.ends_with?(hls_local_path, ".m3u8"),
"package/vod-#{vod.id}/#{Path.basename(hls_local_path)}", do: "application/x-mpegURL",
cb, else: "video/mp4"
content_type: )
if(String.ends_with?(hls_local_path, ".m3u8"),
do: "application/x-mpegURL",
else: "video/mp4"
) )
) end)
end)
playlist_url = "#{Bright.config([:s3_cdn_endpoint])}/package/vod-#{vod.id}/master.m3u8" playlist_url =
Logger.debug("playlist_url=#{playlist_url} local_path=#{local_path}") "#{Bright.config([:public_s3_endpoint])}/package/vod-#{vod.id}/master.m3u8"
hls_vod = Logger.debug("playlist_url=#{playlist_url} local_path=#{local_path}")
update_vod(vod, %{
playlist_url: playlist_url,
local_path: local_path
})
Logger.debug(inspect(hls_vod)) hls_vod =
update_vod(vod, %{
playlist_url: playlist_url,
local_path: local_path
})
cb.(%{stage: :generating_thumbnail, done: 1, total: 1}) Logger.debug(inspect(hls_vod))
# {:ok, hls_vod} = store_thumbnail_from_file(hls_vod, vod.local_path)
# @TODO should probably keep the file cached locally for awhile for any additional processing cb.(%{stage: :generating_thumbnail, done: 1, total: 1})
# File.rm!(hls_vod.local_path) # {:ok, hls_vod} = store_thumbnail_from_file(hls_vod, vod.local_path)
hls_vod # @TODO should probably keep the file cached locally for awhile for any additional processing
# File.rm!(hls_vod.local_path)
hls_vod
{error_output, exit_code} ->
# Failure: curl returned a non-zero exit code
Logger.error("Download failed with exit code #{exit_code}: #{error_output}")
{:error, {:curl_failed, exit_code, error_output}}
end
end end
# def store_thumbnail_from_file(%Vod{} = vod, src_path, marker \\ %{minutes: 0}, opts \\ []) do # def store_thumbnail_from_file(%Vod{} = vod, src_path, marker \\ %{minutes: 0}, opts \\ []) do

@ -13,6 +13,7 @@ defmodule Bright.B2Test do
local_file = Path.absname("test/fixtures/SampleVideo_1280x720_1mb.mp4") local_file = Path.absname("test/fixtures/SampleVideo_1280x720_1mb.mp4")
{:ok, %{key: key, cdn_url: cdn_url}} = B2.put(local_file) {:ok, %{key: key, cdn_url: cdn_url}} = B2.put(local_file)
assert key === "SampleVideo_1280x720_1mb.mp4" assert key === "SampleVideo_1280x720_1mb.mp4"
assert Regex.match?(~r/\/#{key}$/, cdn_url)
end end
@tag :acceptance @tag :acceptance
@ -21,7 +22,22 @@ defmodule Bright.B2Test do
basename = Cache.generate_basename(local_file) basename = Cache.generate_basename(local_file)
object_key = "test/#{basename}" object_key = "test/#{basename}"
{:ok, %{key: key, cdn_url: cdn_url}} = B2.put(local_file, object_key) {:ok, %{key: key, cdn_url: cdn_url}} = B2.put(local_file, object_key)
assert Regex.match?(~r/SampleVideo/, key) assert Regex.match?(~r/[a-zA-Z0-9]{6}\/SampleVideo/, key)
assert Regex.match?(~r/\/#{object_key}$/, cdn_url)
end
@tag :acceptance
test "put/3" do
local_file = Path.absname("test/fixtures/SampleVideo_1280x720_1mb.mp4")
basename = Cache.generate_basename(local_file)
object_key = "test/#{basename}"
mime = "video/mp4"
{:ok, %{key: key, cdn_url: cdn_url}} =
B2.put(local_file, object_key, mime)
assert Regex.match?(~r/[a-zA-Z0-9]{6}\/SampleVideo/, key)
assert Regex.match?(~r/\/#{object_key}$/, cdn_url)
end end
@tag :integration @tag :integration

@ -2,27 +2,23 @@ defmodule Bright.CacheTest do
use Bright.DataCase use Bright.DataCase
alias Bright.Cache alias Bright.Cache
require Logger
@sample_url "https://example.com/my_video.mp4" @sample_url "https://example.com/my_video.mp4"
## IDK what I'm doing here. Ideally I want a redis-like k/v store where I can temporarily put VODs and they expire after 12 hours or so. ## IDK what I'm doing here. Ideally I want a redis-like k/v store where I can temporarily put VODs and they expire after 12 hours or so.
## this would potentially speed up vod processing because it would prevent having to download the VOD from S3 during every Oban worker performance. ## this would potentially speed up vod processing because it would prevent having to download the VOD from S3 during every Oban worker execution.
## BUT I don't want to implement it myself because of the idiom, "There are only two unsolved problems in CS. Naming things and cache invalidation" ## BUT I don't want to implement it myself because of the idiom, "There are only two unsolved problems in CS. Naming things and cache invalidation"
## Meaning I don't think I can do any better than the experts in the field. ## Meaning I don't think I can do any better than the experts in the field.
## Anyway, this is FEATURE CREEP! Solve the problem without caching and LET IT BE SLOW. ## Anyway, this is FEATURE CREEP! Solve the problem without caching and LET IT BE SLOW.
## To implement this cache before the system works is pre-mature optimization! ## To implement this cache before the system works is pre-mature optimization!
# describe "cache k/v" do @cache_dir Application.fetch_env!(:bright, :cache_dir)
# test "get/1 with string cache key" do
# end
# end
describe "cache" do describe "cache" do
@tag :unit @tag :unit
test "get_cache_dir/0" do test "get_cache_dir/0" do
assert Regex.match?(~r/.cache\/futureporn/, Cache.get_cache_dir()) assert Regex.match?(~r/\/futureporn/, Cache.get_cache_dir())
end end
@tag :unit @tag :unit
@ -49,10 +45,10 @@ defmodule Bright.CacheTest do
@tag :unit @tag :unit
test "generate_filename/1" do test "generate_filename/1" do
filename = Cache.generate_filename(@sample_url) filename = Cache.generate_filename(@sample_url)
assert Regex.match?(~r/.cache\/futureporn\/.+\/my_video\.mp4/, filename) assert Regex.match?(~r/\/futureporn\/.+\/my_video\.mp4/, filename)
filename = Cache.generate_filename("/home/cj/Downloads/test.mp4") filename = Cache.generate_filename("/home/cj/Downloads/test.mp4")
assert Regex.match?(~r/.cache\/futureporn\/.+\/test\.mp4/, filename) assert Regex.match?(~r/\/futureporn\/.+\/test\.mp4/, filename)
assert File.exists?(Path.dirname(filename)) assert File.exists?(Path.dirname(filename))
assert not File.exists?(filename) assert not File.exists?(filename)
@ -61,7 +57,7 @@ defmodule Bright.CacheTest do
@tag :unit @tag :unit
test "generate_filename/2" do test "generate_filename/2" do
filename = Cache.generate_filename(@sample_url, "png") filename = Cache.generate_filename(@sample_url, "png")
assert Regex.match?(~r/.cache\/futureporn\/.+\/my_video\.png/, filename) assert Regex.match?(~r/\/futureporn\/.+\/my_video\.png/, filename)
end end
end end
end end

@ -13,7 +13,7 @@ defmodule Bright.DownloaderTest do
assert File.exists?(local_file) assert File.exists?(local_file)
{:ok, stat} = File.stat(local_file) {:ok, stat} = File.stat(local_file)
assert stat.size > 0, "File is empty" assert stat.size > 0, "File is empty"
assert Regex.match?(~r/.cache\/futureporn\/.+\/projekt-melody\.jpg/, local_file) assert Regex.match?(~r/\/futureporn\/.+\/projekt-melody\.jpg/, local_file)
end end
@tag :integration @tag :integration
@ -29,6 +29,9 @@ defmodule Bright.DownloaderTest do
assert File.exists?(local_file) assert File.exists?(local_file)
{:ok, stat} = File.stat(local_file) {:ok, stat} = File.stat(local_file)
assert stat.size > 0, "File is empty" assert stat.size > 0, "File is empty"
assert stat.size === 1_055_736,
"File is not the expected 1055736 bytes. (it was #{stat.size})"
end end
@tag :integration @tag :integration
@ -50,5 +53,13 @@ defmodule Bright.DownloaderTest do
{:ok, stat} = File.stat(local_file) {:ok, stat} = File.stat(local_file)
assert stat.size > 0, "File is empty" assert stat.size > 0, "File is empty"
end end
@tag :integration
test "Error handle a bad URL" do
{:error, _} =
Downloader.get(
"https://futureporn-b2.b-cdn.net/test/this-is-not-a-real-file-this-will-certainly-404.mp4"
)
end
end end
end end

@ -183,8 +183,8 @@ defmodule Bright.StreamsTest do
assert :ok assert :ok
assert updated_vod.local_path != nil assert updated_vod.local_path != nil
assert_received {:progress, %{stage: :transmuxing, done: 1, total: 1}} # assert_received {:progress, %{stage: :transmuxing, done: 1, total: 1}}
assert_received {:progress, %{stage: :persisting, done: 1, total: _}} # assert_received {:progress, %{stage: :persisting, done: 1, total: _}}
# assert_received {:progress, %{stage: :generating_thumbnail, done: 1, total: 1}} # assert_received {:progress, %{stage: :generating_thumbnail, done: 1, total: 1}}
end end
end end

@ -17,9 +17,9 @@
], ],
"env": { "env": {
"DEVBOX_COREPACK_ENABLED": "true", "DEVBOX_COREPACK_ENABLED": "true",
"ENV": "development", "ENV": "development",
"KUBECONFIG": "$HOME/.kube/futureporn.yaml", "KUBECONFIG": "$HOME/.kube/futureporn.yaml",
"VENV_DIR": ".venv" "VENV_DIR": ".venv"
}, },
"shell": { "shell": {
"init_hook": [ "init_hook": [
@ -28,19 +28,19 @@
"pip install -r requirements.txt" "pip install -r requirements.txt"
], ],
"scripts": { "scripts": {
"tunnel": "dotenvx run -f ./.kamal/secrets.development -- chisel client bright.fp.sbtp.xyz:9090 R:4000", "tunnel": "dotenvx run -f ./.env.development -- chisel client bright.fp.sbtp.xyz:9090 R:4000",
"backup": "docker exec -t postgres_db pg_dumpall -c -U postgres > ./backups/dev_`date +%Y-%m-%d_%H_%M_%S`.sql", "backup": "docker exec -t postgres_db pg_dumpall -c -U postgres > ./backups/dev_`date +%Y-%m-%d_%H_%M_%S`.sql",
"act": "dotenvx run -f ./.kamal/secrets.testing -- act -W ./.gitea/workflows --secret-file .kamal/secrets.development", "act": "dotenvx run -f ./.env.testing -- act -W ./.gitea/workflows --secret-file .env.development",
"act:builder": "dotenvx run -f ./.kamal/secrets.testing -- act -W ./.gitea/workflows/builder.yaml --secret-file .kamal/secrets.testing --var-file .kamal/secrets.testing --insecure-secrets", "act:builder": "dotenvx run -f ./.env.testing -- act -W ./.gitea/workflows/builder.yaml --secret-file .env.testing --var-file .env.testing --insecure-secrets",
"act:tests": "dotenvx run -f ./.kamal/secrets.testing -- act -W ./.gitea/workflows/tests.yaml --secret-file .kamal/secrets.testing --var-file .kamal/secrets.testing --insecure-secrets", "act:tests": "dotenvx run -f ./.env.testing -- act -W ./.gitea/workflows/tests.yaml --secret-file .env.testing --var-file .env.testing --insecure-secrets",
"bright:compile:watch": "cd ./apps/bright && find . -type f -name \"*.ex\" -o -name \"*.exs\" | entr -r mix compile --warnings-as-errors", "bright:compile:watch": "cd ./apps/bright && find . -type f -name \"*.ex\" -o -name \"*.exs\" | entr -r mix compile --warnings-as-errors",
"bright:compile:watch2": "cd ./apps/bright && pnpx chokidar-cli \"**/*\" -i \"deps/**\" -i \"_build/**\" -c \"mix compile --warnings-as-errors\"", "bright:compile:watch2": "cd ./apps/bright && pnpx chokidar-cli \"**/*\" -i \"deps/**\" -i \"_build/**\" -c \"mix compile --warnings-as-errors\"",
"bright:dev": "cd ./apps/bright && dotenvx run -f ../../.kamal/secrets.development -e MIX_ENV=dev -- mix phx.server", "bright:dev": "cd ./apps/bright && dotenvx run -f ../../.env.development -e MIX_ENV=dev -- mix phx.server",
"bright:test:unit:watch": "cd ./apps/bright && pnpx chokidar-cli '**/*' -i \"deps/**\" -i '_build/**' -c 'mix test --only=unit'", "bright:test:unit:watch": "cd ./apps/bright && pnpx chokidar-cli '**/*' -i \"deps/**\" -i '_build/**' -c 'mix test --only=unit'",
"bright:act": "cd ./apps/bright && act --env MIX_ENV=test -W ./.gitea/workflows/tests.yaml --secret-file .kamal/secrets.development", "bright:act": "cd ./apps/bright && act --env MIX_ENV=test -W ./.gitea/workflows/tests.yaml --secret-file .env.development",
"test": "act -W ./.gitea/workflows/tests.yaml --secret-file .kamal/secrets.testing --var-file .kamal/secrets.testing && devbox run beep || devbox run boop", "test": "act -W ./.gitea/workflows/tests.yaml --secret-file .env.testing --var-file .env.testing && devbox run beep || devbox run boop",
"beep": "ffplay -nodisp -loglevel quiet -autoexit ./apps/beep/beep2.wav", "beep": "ffplay -nodisp -loglevel quiet -autoexit ./apps/beep/beep2.wav",
"boop": "ffplay -nodisp -loglevel quiet -autoexit ./apps/beep/beep1.wav" "boop": "ffplay -nodisp -loglevel quiet -autoexit ./apps/beep/beep1.wav"
} }
} }
} }

@ -122,7 +122,7 @@ resource "vultr_instance" "load_balancer" {
resource "vultr_instance" "bright" { resource "vultr_instance" "bright" {
count = 1 count = 1
hostname = "fp-bright-${count.index}" hostname = "fp-bright-${count.index}"
plan = "vc2-2c-2gb" plan = "vc2-2c-4gb"
region = "ord" region = "ord"
backups = "disabled" backups = "disabled"
ddos_protection = "false" ddos_protection = "false"
@ -176,7 +176,7 @@ resource "vultr_instance" "database" {
} }
resource "vultr_instance" "tracker" { resource "vultr_instance" "tracker" {
count = 1 count = 0
hostname = "fp-tracker-${count.index}" hostname = "fp-tracker-${count.index}"
plan = "vc2-1c-2gb" plan = "vc2-1c-2gb"
region = "ord" region = "ord"