rm kamal
This commit is contained in:
parent
ee8542e132
commit
0afc214ffb
.kamal/hooks
docker-setup.samplepost-deploy.samplepost-proxy-reboot.samplepre-build.samplepre-connect.samplepre-deploy.samplepre-proxy-reboot.sample
.vscode
ansible/roles/bright/tasks
apps/bright
devbox.jsonterraform
@ -1,3 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
echo ">>>>>>>>>>>>>>>> Docker set up on $KAMAL_HOSTS..."
|
@ -1,16 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# A sample post-deploy hook
|
||||
#
|
||||
# These environment variables are available:
|
||||
# KAMAL_RECORDED_AT
|
||||
# KAMAL_PERFORMER
|
||||
# KAMAL_VERSION
|
||||
# KAMAL_HOSTS
|
||||
# KAMAL_ROLE (if set)
|
||||
# KAMAL_DESTINATION (if set)
|
||||
# KAMAL_RUNTIME
|
||||
|
||||
echo "$KAMAL_PERFORMER deployed $KAMAL_VERSION to $KAMAL_DESTINATION in $KAMAL_RUNTIME seconds"
|
||||
ufw allow 80/tcp
|
||||
ufw allow 443/tcp
|
@ -1,3 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
echo ">>>>>>>>>>>>>>>> Rebooted kamal-proxy on $KAMAL_HOSTS"
|
@ -1,51 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# A sample pre-build hook
|
||||
#
|
||||
# Checks:
|
||||
# 1. We have a clean checkout
|
||||
# 2. A remote is configured
|
||||
# 3. The branch has been pushed to the remote
|
||||
# 4. The version we are deploying matches the remote
|
||||
#
|
||||
# These environment variables are available:
|
||||
# KAMAL_RECORDED_AT
|
||||
# KAMAL_PERFORMER
|
||||
# KAMAL_VERSION
|
||||
# KAMAL_HOSTS
|
||||
# KAMAL_ROLE (if set)
|
||||
# KAMAL_DESTINATION (if set)
|
||||
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo "Git checkout is not clean, aborting..." >&2
|
||||
git status --porcelain >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
first_remote=$(git remote)
|
||||
|
||||
if [ -z "$first_remote" ]; then
|
||||
echo "No git remote set, aborting..." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
current_branch=$(git branch --show-current)
|
||||
|
||||
if [ -z "$current_branch" ]; then
|
||||
echo ">>>>>>>>>>>>>>>> Not on a git branch, aborting..." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
remote_head=$(git ls-remote $first_remote --tags $current_branch | cut -f1)
|
||||
|
||||
if [ -z "$remote_head" ]; then
|
||||
echo ">>>>>>>>>>>>>>>> Branch not pushed to remote, aborting..." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$KAMAL_VERSION" != "$remote_head" ]; then
|
||||
echo ">>>>>>>>>>>>>>>> Version ($KAMAL_VERSION) does not match remote HEAD ($remote_head), aborting..." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
@ -1,47 +0,0 @@
|
||||
#!/usr/bin/env ruby
|
||||
|
||||
# A sample pre-connect check
|
||||
#
|
||||
# Warms DNS before connecting to hosts in parallel
|
||||
#
|
||||
# These environment variables are available:
|
||||
# KAMAL_RECORDED_AT
|
||||
# KAMAL_PERFORMER
|
||||
# KAMAL_VERSION
|
||||
# KAMAL_HOSTS
|
||||
# KAMAL_ROLE (if set)
|
||||
# KAMAL_DESTINATION (if set)
|
||||
# KAMAL_RUNTIME
|
||||
|
||||
hosts = ENV["KAMAL_HOSTS"].split(",")
|
||||
results = nil
|
||||
max = 3
|
||||
|
||||
elapsed = Benchmark.realtime do
|
||||
results = hosts.map do |host|
|
||||
Thread.new do
|
||||
tries = 1
|
||||
|
||||
begin
|
||||
Socket.getaddrinfo(host, 0, Socket::AF_UNSPEC, Socket::SOCK_STREAM, nil, Socket::AI_CANONNAME)
|
||||
rescue SocketError
|
||||
if tries < max
|
||||
puts "Retrying DNS warmup: #{host}"
|
||||
tries += 1
|
||||
sleep rand
|
||||
retry
|
||||
else
|
||||
puts "DNS warmup failed: #{host}"
|
||||
host
|
||||
end
|
||||
end
|
||||
|
||||
tries
|
||||
end
|
||||
end.map(&:value)
|
||||
end
|
||||
|
||||
retries = results.sum - hosts.size
|
||||
nopes = results.count { |r| r == max }
|
||||
|
||||
puts "Prewarmed %d DNS lookups in %.2f sec: %d retries, %d failures" % [ hosts.size, elapsed, retries, nopes ]
|
@ -1,109 +0,0 @@
|
||||
#!/usr/bin/env ruby
|
||||
|
||||
# A sample pre-deploy hook
|
||||
#
|
||||
# Checks the Github status of the build, waiting for a pending build to complete for up to 720 seconds.
|
||||
#
|
||||
# Fails unless the combined status is "success"
|
||||
#
|
||||
# These environment variables are available:
|
||||
# KAMAL_RECORDED_AT
|
||||
# KAMAL_PERFORMER
|
||||
# KAMAL_VERSION
|
||||
# KAMAL_HOSTS
|
||||
# KAMAL_COMMAND
|
||||
# KAMAL_SUBCOMMAND
|
||||
# KAMAL_ROLE (if set)
|
||||
# KAMAL_DESTINATION (if set)
|
||||
|
||||
# Only check the build status for production deployments
|
||||
if ENV["KAMAL_COMMAND"] == "rollback" || ENV["KAMAL_DESTINATION"] != "production"
|
||||
exit 0
|
||||
end
|
||||
|
||||
require "bundler/inline"
|
||||
|
||||
# true = install gems so this is fast on repeat invocations
|
||||
gemfile(true, quiet: true) do
|
||||
source "https://rubygems.org"
|
||||
|
||||
gem "octokit"
|
||||
gem "faraday-retry"
|
||||
end
|
||||
|
||||
MAX_ATTEMPTS = 72
|
||||
ATTEMPTS_GAP = 10
|
||||
|
||||
def exit_with_error(message)
|
||||
$stderr.puts message
|
||||
exit 1
|
||||
end
|
||||
|
||||
class GithubStatusChecks
|
||||
attr_reader :remote_url, :git_sha, :github_client, :combined_status
|
||||
|
||||
def initialize
|
||||
@remote_url = `git config --get remote.origin.url`.strip.delete_prefix("https://github.com/")
|
||||
@git_sha = `git rev-parse HEAD`.strip
|
||||
@github_client = Octokit::Client.new(access_token: ENV["GITHUB_TOKEN"])
|
||||
refresh!
|
||||
end
|
||||
|
||||
def refresh!
|
||||
@combined_status = github_client.combined_status(remote_url, git_sha)
|
||||
end
|
||||
|
||||
def state
|
||||
combined_status[:state]
|
||||
end
|
||||
|
||||
def first_status_url
|
||||
first_status = combined_status[:statuses].find { |status| status[:state] == state }
|
||||
first_status && first_status[:target_url]
|
||||
end
|
||||
|
||||
def complete_count
|
||||
combined_status[:statuses].count { |status| status[:state] != "pending"}
|
||||
end
|
||||
|
||||
def total_count
|
||||
combined_status[:statuses].count
|
||||
end
|
||||
|
||||
def current_status
|
||||
if total_count > 0
|
||||
"Completed #{complete_count}/#{total_count} checks, see #{first_status_url} ..."
|
||||
else
|
||||
"Build not started..."
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
$stdout.sync = true
|
||||
|
||||
puts "Checking build status..."
|
||||
attempts = 0
|
||||
checks = GithubStatusChecks.new
|
||||
|
||||
begin
|
||||
loop do
|
||||
case checks.state
|
||||
when "success"
|
||||
puts "Checks passed, see #{checks.first_status_url}"
|
||||
exit 0
|
||||
when "failure"
|
||||
exit_with_error "Checks failed, see #{checks.first_status_url}"
|
||||
when "pending"
|
||||
attempts += 1
|
||||
end
|
||||
|
||||
exit_with_error "Checks are still pending, gave up after #{MAX_ATTEMPTS * ATTEMPTS_GAP} seconds" if attempts == MAX_ATTEMPTS
|
||||
|
||||
puts checks.current_status
|
||||
sleep(ATTEMPTS_GAP)
|
||||
checks.refresh!
|
||||
end
|
||||
rescue Octokit::NotFound
|
||||
exit_with_error "Build status could not be found"
|
||||
end
|
@ -1,3 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
echo "Rebooting kamal-proxy on $KAMAL_HOSTS..."
|
18
.vscode/launch.json
vendored
18
.vscode/launch.json
vendored
@ -1,18 +0,0 @@
|
||||
{
|
||||
"name": "tsx",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
|
||||
"program": "${file}",
|
||||
|
||||
"runtimeExecutable": "tsx",
|
||||
|
||||
"console": "integratedTerminal",
|
||||
"internalConsoleOptions": "neverOpen",
|
||||
|
||||
"skipFiles": [
|
||||
"<node_internals>/**",
|
||||
|
||||
"${workspaceFolder}/node_modules/**",
|
||||
],
|
||||
}
|
@ -82,3 +82,4 @@
|
||||
TRACKER_HELPER_USERNAME: "{{ lookup('dotenv', 'TRACKER_HELPER_USERNAME', file='../.env') }}"
|
||||
TRACKER_HELPER_PASSWORD: "{{ lookup('dotenv', 'TRACKER_HELPER_PASSWORD', file='../.env') }}"
|
||||
TRACKER_URL: https://tracker.futureporn.net:6969
|
||||
CACHE_DIR: /mnt/vfs/futureporn # we use Vultr File System to share cache among all Phoenix instances
|
||||
|
@ -87,14 +87,12 @@ RUN mix release
|
||||
FROM builder AS dev
|
||||
COPY ./config/test.exs config/test.exs
|
||||
RUN ls -la ./contrib/
|
||||
RUN mkdir -p ~/.cache/futureporn
|
||||
CMD [ "mix", "phx.server" ]
|
||||
|
||||
|
||||
# start a new build stage so that the final image will only contain
|
||||
# the compiled release and other runtime necessities
|
||||
FROM ${RUNNER_IMAGE} AS prod
|
||||
RUN mkdir -p /mnt/vfs/futureporn
|
||||
|
||||
RUN apt-get update -y \
|
||||
&& apt-get install -y libstdc++6 openssl libncurses5 locales inotify-tools ffmpeg python3 python3-pip ca-certificates \
|
||||
|
@ -27,8 +27,8 @@ config :bright,
|
||||
aws_secret_access_key: System.get_env("AWS_SECRET_ACCESS_KEY"),
|
||||
aws_region: System.get_env("AWS_REGION"),
|
||||
public_s3_endpoint: System.get_env("PUBLIC_S3_ENDPOINT"),
|
||||
s3_cdn_endpoint: System.get_env("PUBLIC_S3_ENDPOINT"),
|
||||
site_url: System.get_env("SITE_URL")
|
||||
site_url: System.get_env("SITE_URL"),
|
||||
cache_dir: System.get_env("CACHE_DIR")
|
||||
|
||||
config :bright, :torrent,
|
||||
tracker_url: System.get_env("TRACKER_URL"),
|
||||
|
@ -1,6 +1,38 @@
|
||||
defmodule Bright.B2 do
|
||||
@moduledoc """
|
||||
The B2 context.
|
||||
|
||||
Note: b2 buckets may need CORS configuration to allow uploads from a domain. This is done using b2's CLI tool.
|
||||
|
||||
|
||||
```
|
||||
b2 bucket update --cors-rules "$(<~/Documents/futureporn-meta/cors-rules.json)" futureporn
|
||||
```
|
||||
|
||||
Where cors-rules.json is as follows
|
||||
```json
|
||||
[
|
||||
{
|
||||
"allowedHeaders": [
|
||||
"*"
|
||||
],
|
||||
"allowedOperations": [
|
||||
"s3_head",
|
||||
"s3_put",
|
||||
"s3_get"
|
||||
],
|
||||
"allowedOrigins": [
|
||||
"https://futureporn.net"
|
||||
],
|
||||
"corsRuleName": "downloadFromAnyOriginWithUpload",
|
||||
"exposeHeaders": [
|
||||
"etag"
|
||||
],
|
||||
"maxAgeSeconds": 3600
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
"""
|
||||
import Ecto.Query, warn: false
|
||||
require Logger
|
||||
@ -17,31 +49,40 @@ defmodule Bright.B2 do
|
||||
put(local_file, object_key)
|
||||
end
|
||||
|
||||
def put(local_file, object_key) do
|
||||
put(local_file, object_key, "application/octet-stream")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Put a file from local disk to Backblaze.
|
||||
"""
|
||||
def put(local_file, object_key) do
|
||||
def put(local_file, object_key, mime_type) do
|
||||
Logger.debug("put/2 called with local_file=#{local_file}, object_key=#{object_key}")
|
||||
bucket = Application.get_env(:bright, :aws_bucket)
|
||||
|
||||
if bucket === nil do
|
||||
raise("bucket specification is missing")
|
||||
end
|
||||
|
||||
s3_cdn_endpoint = Application.get_env(:bright, :s3_cdn_endpoint)
|
||||
public_s3_endpoint = Application.get_env(:bright, :public_s3_endpoint)
|
||||
# access_key_id = Application.get_env(:ex_aws, :access_key_id)
|
||||
# secret_access_key = Application.get_env(:ex_aws, :secret_access_key)
|
||||
|
||||
if s3_cdn_endpoint === nil do
|
||||
raise("s3_cdn_endpoint specification is missing")
|
||||
if public_s3_endpoint === nil do
|
||||
raise("public_s3_endpoint specification is missing")
|
||||
end
|
||||
|
||||
cdn_url = "#{s3_cdn_endpoint}/#{object_key}"
|
||||
cdn_url = "#{public_s3_endpoint}/#{object_key}"
|
||||
|
||||
Logger.debug(
|
||||
"putting local_file=#{local_file} to bucket=#{bucket} s3_cdn_endpoint=#{s3_cdn_endpoint} key=#{object_key}"
|
||||
"putting local_file=#{local_file} to bucket=#{bucket} public_s3_endpoint=#{public_s3_endpoint} key=#{object_key}"
|
||||
)
|
||||
|
||||
opts = [content_type: mime_type]
|
||||
|
||||
local_file
|
||||
|> S3.Upload.stream_file()
|
||||
|> S3.upload(bucket, object_key)
|
||||
|> S3.upload(bucket, object_key, opts)
|
||||
|> ExAws.request()
|
||||
|> case do
|
||||
{:ok, %{status_code: 200}} -> {:ok, %{key: object_key, cdn_url: cdn_url}}
|
||||
|
@ -3,14 +3,39 @@ defmodule Bright.Cache do
|
||||
A simple caching module that saves files to the `/tmp` directory.
|
||||
"""
|
||||
|
||||
# we use Vultr File System to share cache among all Phoenix instances
|
||||
@cache_dir "/mnt/vfs/futureporn"
|
||||
|
||||
require Logger
|
||||
|
||||
def cache_dir do
|
||||
@cache_dir
|
||||
end
|
||||
# def cache_dir do
|
||||
# case Application.get_env(:bright, :cache_dir) do
|
||||
# {:ok, dir} when is_binary(dir) and dir != "" ->
|
||||
# Logger.debug("cache_dir is #{dir}")
|
||||
# dir
|
||||
|
||||
# {:ok, nil} ->
|
||||
# raise """
|
||||
# Configuration :cache_dir for application :bright is set to nil.
|
||||
# Please provide a valid directory path, e.g.:
|
||||
|
||||
# config :bright, cache_dir: "/path/to/cache"
|
||||
# """
|
||||
|
||||
# {:ok, ""} ->
|
||||
# raise """
|
||||
# Configuration :cache_dir for application :bright is set to an empty string.
|
||||
# Please provide a valid directory path, e.g.:
|
||||
|
||||
# config :bright, cache_dir: "/path/to/cache"
|
||||
# """
|
||||
|
||||
# :error ->
|
||||
# raise """
|
||||
# Configuration :cache_dir for application :bright is not set.
|
||||
# Please ensure it is defined in your config files, e.g.:
|
||||
|
||||
# config :bright, cache_dir: "/path/to/cache"
|
||||
# """
|
||||
# end
|
||||
# end
|
||||
|
||||
def generate_basename(input) do
|
||||
if is_nil(input), do: raise("generate_basename was called with nil argument")
|
||||
@ -21,7 +46,9 @@ defmodule Bright.Cache do
|
||||
|> String.replace(~r/[^a-zA-Z0-9]/, "")
|
||||
|
||||
base = Path.basename(input)
|
||||
"#{prefix}/#{base}"
|
||||
output = "#{prefix}/#{base}"
|
||||
Logger.debug("generate_basename called with input=#{input} output=#{output}")
|
||||
output
|
||||
end
|
||||
|
||||
# @doc """
|
||||
@ -45,25 +72,57 @@ defmodule Bright.Cache do
|
||||
end
|
||||
|
||||
def generate_filename(input) do
|
||||
filename = Path.join(@cache_dir, generate_basename(input))
|
||||
Logger.debug("generate_filename called with input=#{input}, cache_dir=#{get_cache_dir()}")
|
||||
filename = Path.join(get_cache_dir(), generate_basename(input))
|
||||
File.mkdir_p!(Path.dirname(filename))
|
||||
Logger.debug("generate_filename filename=#{filename}")
|
||||
Logger.debug("generate_filename filename=#{filename}")
|
||||
Logger.debug("generate_filename filename=#{filename}")
|
||||
filename
|
||||
end
|
||||
|
||||
def generate_filename(input, ext) do
|
||||
filename = Path.join(@cache_dir, generate_basename(input, ext))
|
||||
filename = Path.join(get_cache_dir(), generate_basename(input, ext))
|
||||
File.mkdir_p!(Path.dirname(filename))
|
||||
filename
|
||||
end
|
||||
|
||||
def get_cache_dir do
|
||||
@cache_dir
|
||||
case Application.fetch_env(:bright, :cache_dir) do
|
||||
{:ok, dir} when is_binary(dir) and dir != "" ->
|
||||
Logger.debug("cache_dir is #{dir}")
|
||||
dir
|
||||
|
||||
{:ok, ""} ->
|
||||
raise """
|
||||
Configuration :cache_dir for application :bright is set to an empty string.
|
||||
Please provide a valid directory path, e.g.:
|
||||
|
||||
config :bright, cache_dir: "/path/to/cache"
|
||||
"""
|
||||
|
||||
{:ok, nil} ->
|
||||
raise """
|
||||
Configuration :cache_dir for application :bright is set to nil.
|
||||
Please provide a valid directory path, e.g.:
|
||||
|
||||
config :bright, cache_dir: "/path/to/cache"
|
||||
"""
|
||||
|
||||
:error ->
|
||||
raise """
|
||||
Configuration :cache_dir for application :bright is not set.
|
||||
Please ensure it is defined in your config files, e.g.:
|
||||
|
||||
config :bright, cache_dir: "/path/to/cache"
|
||||
"""
|
||||
end
|
||||
end
|
||||
|
||||
# Ensure the cache directory exists
|
||||
def ensure_cache_dir! do
|
||||
unless File.exists?(@cache_dir) do
|
||||
File.mkdir_p!(@cache_dir)
|
||||
unless File.exists?(get_cache_dir()) do
|
||||
File.mkdir_p!(get_cache_dir())
|
||||
end
|
||||
end
|
||||
|
||||
@ -98,29 +157,6 @@ defmodule Bright.Cache do
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Clear all cached data.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> Bright.Cache.clear()
|
||||
:ok
|
||||
"""
|
||||
def clear do
|
||||
ensure_cache_dir!()
|
||||
|
||||
case File.rm_rf(@cache_dir) do
|
||||
{:ok, _} ->
|
||||
Logger.debug("[Cache] Cleared all cached data")
|
||||
ensure_cache_dir!()
|
||||
:ok
|
||||
|
||||
{:error, _posix, reason} ->
|
||||
Logger.error("[Cache] Failed to clear cache: #{reason}")
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
# @doc """
|
||||
# Generates a SHA-256 hash of the input string and truncates it to 10 characters.
|
||||
|
||||
|
@ -5,52 +5,38 @@ defmodule Bright.Downloader do
|
||||
|
||||
require Logger
|
||||
|
||||
@user_agent "fp-curl/houston-we-have-a-request"
|
||||
|
||||
def get(url) do
|
||||
filename = Bright.Cache.generate_filename(url)
|
||||
Logger.debug("Downloader getting url=#{inspect(url)}")
|
||||
|
||||
try do
|
||||
{download!(url, filename), filename}
|
||||
rescue
|
||||
exception ->
|
||||
{:error, Exception.message(exception)}
|
||||
end
|
||||
download!(url, filename)
|
||||
end
|
||||
|
||||
# greets https://elixirforum.com/t/how-to-download-big-files/9173/4
|
||||
def download!(file_url, filename) do
|
||||
Logger.debug("Downloader downloading file_url=#{file_url} to filename=#{filename}")
|
||||
|
||||
file =
|
||||
if File.exists?(filename) do
|
||||
File.open!(filename, [:append])
|
||||
else
|
||||
File.touch!(filename)
|
||||
File.open!(filename, [:append])
|
||||
end
|
||||
# Execute the curl command
|
||||
case System.cmd(
|
||||
"curl",
|
||||
["--fail", "-L", "--user-agent", @user_agent, "--output", filename, file_url],
|
||||
stderr_to_stdout: true
|
||||
) do
|
||||
{_output, 0} ->
|
||||
# Success: curl completed with exit code 0
|
||||
Logger.debug("Download completed successfully: #{filename}")
|
||||
{:ok, filename}
|
||||
|
||||
%HTTPoison.AsyncResponse{id: ref} = HTTPoison.get!(file_url, %{}, stream_to: self())
|
||||
|
||||
append_loop(ref, file)
|
||||
end
|
||||
|
||||
defp append_loop(ref, file) do
|
||||
receive do
|
||||
%HTTPoison.AsyncChunk{chunk: chunk, id: ^ref} ->
|
||||
IO.binwrite(file, chunk)
|
||||
append_loop(ref, file)
|
||||
|
||||
%HTTPoison.AsyncEnd{id: ^ref} ->
|
||||
File.close(file)
|
||||
|
||||
# need something to handle errors like request timeout and such
|
||||
# otherwise it will loop forever
|
||||
# don't know what httpoison returns in case of an error ...
|
||||
# you can inspect `_other` below to find out
|
||||
# and match on the error to exit the loop early
|
||||
_ ->
|
||||
Logger.debug("recursively downloading #{inspect(ref)} #{inspect(file)}")
|
||||
append_loop(ref, file)
|
||||
{error_output, exit_code} ->
|
||||
# Failure: curl returned a non-zero exit code
|
||||
Logger.error("Download failed with exit code #{exit_code}: #{error_output}")
|
||||
{:error, {:curl_failed, exit_code, error_output}}
|
||||
end
|
||||
rescue
|
||||
exception ->
|
||||
# Handle unexpected exceptions (e.g., file system errors)
|
||||
Logger.error("Unexpected error during download: #{inspect(exception)}")
|
||||
{:error, {:unexpected_error, exception}}
|
||||
end
|
||||
end
|
||||
|
@ -7,6 +7,8 @@ defmodule Bright.ObanWorkers.CreateHlsPlaylist do
|
||||
require Logger
|
||||
import Ecto.Query, warn: false
|
||||
|
||||
# def butimeout(_job), do: :timer.seconds(5)
|
||||
|
||||
@impl Oban.Worker
|
||||
def perform(%Oban.Job{args: %{"vod_id" => vod_id}} = job) do
|
||||
vod = Streams.get_vod!(vod_id)
|
||||
@ -46,18 +48,18 @@ defmodule Bright.ObanWorkers.CreateHlsPlaylist do
|
||||
|
||||
defp await_transmuxer(vod, stage \\ :retrieving, done \\ 0) do
|
||||
receive do
|
||||
{:progress, %{stage: stage_now, done: done_now, total: total}} ->
|
||||
Streams.broadcast_processing_progressed!(stage, vod, min(1, done / total))
|
||||
{:progress, %{stage: stage_now, done: done_now, total: _total}} ->
|
||||
# Streams.broadcast_processing_progressed!(stage, vod, min(1, done / total))
|
||||
done_total = if(stage == stage_now, do: done, else: 0)
|
||||
await_transmuxer(vod, stage_now, done_total + done_now)
|
||||
|
||||
{:complete, vod} ->
|
||||
Streams.broadcast_processing_progressed!(stage, vod, 1)
|
||||
Streams.broadcast_processing_completed!(:hls_playlist, vod)
|
||||
{:ok, vod.url}
|
||||
# Streams.broadcast_processing_progressed!(stage, vod, 1)
|
||||
# Streams.broadcast_processing_completed!(:hls_playlist, vod)
|
||||
{:ok, vod}
|
||||
|
||||
{:error, e, %Oban.Job{attempt: attempt, max_attempts: max_attempts}} ->
|
||||
Streams.broadcast_processing_failed!(vod, attempt, max_attempts)
|
||||
{:error, e, %Oban.Job{attempt: _attempt, max_attempts: _max_attempts}} ->
|
||||
# Streams.broadcast_processing_failed!(vod, attempt, max_attempts)
|
||||
{:error, e}
|
||||
end
|
||||
end
|
||||
|
@ -15,10 +15,11 @@ defmodule Bright.Streams do
|
||||
alias Bright.{
|
||||
Cache,
|
||||
Downloader,
|
||||
Storage,
|
||||
Events
|
||||
}
|
||||
|
||||
alias Bright.B2
|
||||
|
||||
@pubsub Bright.PubSub
|
||||
|
||||
@doc """
|
||||
@ -302,7 +303,14 @@ defmodule Bright.Streams do
|
||||
def transmux_to_hls(%Vod{} = vod, cb) do
|
||||
if !vod.origin_temp_input_url, do: raise("vod was missing origin_temp_input_url")
|
||||
|
||||
Logger.debug("transmux_to_hls begin. let us now generate a cache filename.")
|
||||
|
||||
local_path = Cache.generate_filename(vod.origin_temp_input_url)
|
||||
Logger.debug("local_path=#{local_path}")
|
||||
Logger.debug("local_path=#{local_path}")
|
||||
Logger.debug("local_path=#{local_path}")
|
||||
Logger.debug("local_path=#{local_path}")
|
||||
Logger.debug("local_path=#{local_path}")
|
||||
Downloader.download!(vod.origin_temp_input_url, local_path)
|
||||
|
||||
Logger.debug(
|
||||
@ -312,7 +320,9 @@ defmodule Bright.Streams do
|
||||
master_pl_name = "master.m3u8"
|
||||
|
||||
dir_name = "vod-#{vod.id}"
|
||||
dir = Path.join(Bright.Cache.cache_dir(), dir_name)
|
||||
|
||||
Bright.Cache.ensure_cache_dir!()
|
||||
dir = Path.join(Bright.Cache.get_cache_dir(), dir_name)
|
||||
File.mkdir_p!(dir)
|
||||
|
||||
cb.(%{stage: :transmuxing, done: 1, total: 1})
|
||||
@ -340,171 +350,178 @@ defmodule Bright.Streams do
|
||||
# -var_stream_map "v:0,a:0 v:1,a:1 v:2,a:2" \
|
||||
# stream_%v/playlist.m3u8
|
||||
|
||||
System.cmd("ffmpeg", [
|
||||
"-i",
|
||||
local_path,
|
||||
"-filter_complex",
|
||||
"[0:v]split=5[v1][v2][v3][v4][v5];" <>
|
||||
"[v1]scale=w=1920:h=1080[v1out];" <>
|
||||
"[v2]scale=w=1280:h=720[v2out];" <>
|
||||
"[v3]scale=w=854:h=480[v3out];" <>
|
||||
"[v4]scale=w=640:h=360[v4out];" <>
|
||||
"[v5]scale=w=284:h=160[v5out]",
|
||||
case System.cmd("ffmpeg", [
|
||||
"-i",
|
||||
local_path,
|
||||
"-filter_complex",
|
||||
"[0:v]split=5[v1][v2][v3][v4][v5];" <>
|
||||
"[v1]scale=w=1920:h=1080[v1out];" <>
|
||||
"[v2]scale=w=1280:h=720[v2out];" <>
|
||||
"[v3]scale=w=854:h=480[v3out];" <>
|
||||
"[v4]scale=w=640:h=360[v4out];" <>
|
||||
"[v5]scale=w=284:h=160[v5out]",
|
||||
|
||||
# Video streams
|
||||
"-map",
|
||||
"[v1out]",
|
||||
"-c:v:0",
|
||||
"libx264",
|
||||
"-b:v:0",
|
||||
"5000k",
|
||||
"-maxrate:v:0",
|
||||
"5350k",
|
||||
"-bufsize:v:0",
|
||||
"7500k",
|
||||
"-map",
|
||||
"[v2out]",
|
||||
"-c:v:1",
|
||||
"libx264",
|
||||
"-b:v:1",
|
||||
"2800k",
|
||||
"-maxrate:v:1",
|
||||
"2996k",
|
||||
"-bufsize:v:1",
|
||||
"4200k",
|
||||
"-map",
|
||||
"[v3out]",
|
||||
"-c:v:2",
|
||||
"libx264",
|
||||
"-b:v:2",
|
||||
"1400k",
|
||||
"-maxrate:v:2",
|
||||
"1498k",
|
||||
"-bufsize:v:2",
|
||||
"2100k",
|
||||
"-map",
|
||||
"[v4out]",
|
||||
"-c:v:3",
|
||||
"libx264",
|
||||
"-b:v:3",
|
||||
"800k",
|
||||
"-maxrate:v:3",
|
||||
"856k",
|
||||
"-bufsize:v:3",
|
||||
"1200k",
|
||||
"-map",
|
||||
"[v5out]",
|
||||
"-c:v:4",
|
||||
"libx264",
|
||||
"-b:v:4",
|
||||
"300k",
|
||||
"-maxrate:v:4",
|
||||
"300k",
|
||||
"-bufsize:v:4",
|
||||
"480k",
|
||||
# Video streams
|
||||
"-map",
|
||||
"[v1out]",
|
||||
"-c:v:0",
|
||||
"libx264",
|
||||
"-b:v:0",
|
||||
"5000k",
|
||||
"-maxrate:v:0",
|
||||
"5350k",
|
||||
"-bufsize:v:0",
|
||||
"7500k",
|
||||
"-map",
|
||||
"[v2out]",
|
||||
"-c:v:1",
|
||||
"libx264",
|
||||
"-b:v:1",
|
||||
"2800k",
|
||||
"-maxrate:v:1",
|
||||
"2996k",
|
||||
"-bufsize:v:1",
|
||||
"4200k",
|
||||
"-map",
|
||||
"[v3out]",
|
||||
"-c:v:2",
|
||||
"libx264",
|
||||
"-b:v:2",
|
||||
"1400k",
|
||||
"-maxrate:v:2",
|
||||
"1498k",
|
||||
"-bufsize:v:2",
|
||||
"2100k",
|
||||
"-map",
|
||||
"[v4out]",
|
||||
"-c:v:3",
|
||||
"libx264",
|
||||
"-b:v:3",
|
||||
"800k",
|
||||
"-maxrate:v:3",
|
||||
"856k",
|
||||
"-bufsize:v:3",
|
||||
"1200k",
|
||||
"-map",
|
||||
"[v5out]",
|
||||
"-c:v:4",
|
||||
"libx264",
|
||||
"-b:v:4",
|
||||
"300k",
|
||||
"-maxrate:v:4",
|
||||
"300k",
|
||||
"-bufsize:v:4",
|
||||
"480k",
|
||||
|
||||
# Audio streams
|
||||
"-map",
|
||||
"a:0",
|
||||
"-c:a:0",
|
||||
"aac",
|
||||
"-b:a:0",
|
||||
"192k",
|
||||
"-ac:a:0",
|
||||
"2",
|
||||
"-map",
|
||||
"a:0",
|
||||
"-c:a:1",
|
||||
"aac",
|
||||
"-b:a:1",
|
||||
"192k",
|
||||
"-ac:a:1",
|
||||
"2",
|
||||
"-map",
|
||||
"a:0",
|
||||
"-c:a:2",
|
||||
"aac",
|
||||
"-b:a:2",
|
||||
"192k",
|
||||
"-ac:a:2",
|
||||
"2",
|
||||
"-map",
|
||||
"a:0",
|
||||
"-c:a:3",
|
||||
"aac",
|
||||
"-b:a:3",
|
||||
"164k",
|
||||
"-ac:a:3",
|
||||
"2",
|
||||
"-map",
|
||||
"a:0",
|
||||
"-c:a:4",
|
||||
"aac",
|
||||
"-b:a:4",
|
||||
"164k",
|
||||
"-ac:a:4",
|
||||
"2",
|
||||
"-f",
|
||||
"hls",
|
||||
"-hls_time",
|
||||
"2",
|
||||
"-hls_playlist_type",
|
||||
"vod",
|
||||
"-hls_flags",
|
||||
"independent_segments",
|
||||
"-hls_segment_type",
|
||||
"mpegts",
|
||||
"-start_number",
|
||||
"0",
|
||||
"-hls_list_size",
|
||||
"0",
|
||||
"-hls_segment_filename",
|
||||
"#{dir}/stream_%v_segment_%d.ts",
|
||||
"-master_pl_name",
|
||||
master_pl_name,
|
||||
"-var_stream_map",
|
||||
"v:0,a:0 v:1,a:1 v:2,a:2 v:3,a:3 v:4,a:4",
|
||||
"#{dir}/stream_%v.m3u8"
|
||||
])
|
||||
# Audio streams
|
||||
"-map",
|
||||
"a:0",
|
||||
"-c:a:0",
|
||||
"aac",
|
||||
"-b:a:0",
|
||||
"192k",
|
||||
"-ac:a:0",
|
||||
"2",
|
||||
"-map",
|
||||
"a:0",
|
||||
"-c:a:1",
|
||||
"aac",
|
||||
"-b:a:1",
|
||||
"192k",
|
||||
"-ac:a:1",
|
||||
"2",
|
||||
"-map",
|
||||
"a:0",
|
||||
"-c:a:2",
|
||||
"aac",
|
||||
"-b:a:2",
|
||||
"192k",
|
||||
"-ac:a:2",
|
||||
"2",
|
||||
"-map",
|
||||
"a:0",
|
||||
"-c:a:3",
|
||||
"aac",
|
||||
"-b:a:3",
|
||||
"164k",
|
||||
"-ac:a:3",
|
||||
"2",
|
||||
"-map",
|
||||
"a:0",
|
||||
"-c:a:4",
|
||||
"aac",
|
||||
"-b:a:4",
|
||||
"164k",
|
||||
"-ac:a:4",
|
||||
"2",
|
||||
"-f",
|
||||
"hls",
|
||||
"-hls_time",
|
||||
"2",
|
||||
"-hls_playlist_type",
|
||||
"vod",
|
||||
"-hls_flags",
|
||||
"independent_segments",
|
||||
"-hls_segment_type",
|
||||
"mpegts",
|
||||
"-start_number",
|
||||
"0",
|
||||
"-hls_list_size",
|
||||
"0",
|
||||
"-hls_segment_filename",
|
||||
"#{dir}/stream_%v_segment_%d.ts",
|
||||
"-master_pl_name",
|
||||
master_pl_name,
|
||||
"-var_stream_map",
|
||||
"v:0,a:0 v:1,a:1 v:2,a:2 v:3,a:3 v:4,a:4",
|
||||
"#{dir}/stream_%v.m3u8"
|
||||
]) do
|
||||
{_output, 0} ->
|
||||
Logger.debug("FFmpeg completed successfully")
|
||||
files = Path.wildcard("#{dir}/*")
|
||||
|
||||
files = Path.wildcard("#{dir}/*")
|
||||
|
||||
files
|
||||
|> Elixir.Stream.map(fn hls_local_path ->
|
||||
cb.(%{stage: :persisting, done: 1, total: length(files)})
|
||||
hls_local_path
|
||||
end)
|
||||
|> Enum.each(fn hls_local_path ->
|
||||
Storage.upload_from_filename(
|
||||
hls_local_path,
|
||||
"package/vod-#{vod.id}/#{Path.basename(hls_local_path)}",
|
||||
cb,
|
||||
content_type:
|
||||
if(String.ends_with?(hls_local_path, ".m3u8"),
|
||||
do: "application/x-mpegURL",
|
||||
else: "video/mp4"
|
||||
files
|
||||
|> Elixir.Stream.map(fn hls_local_path ->
|
||||
cb.(%{stage: :persisting, done: 1, total: length(files)})
|
||||
hls_local_path
|
||||
end)
|
||||
|> Enum.each(fn hls_local_path ->
|
||||
B2.put(
|
||||
hls_local_path,
|
||||
"package/vod-#{vod.id}/#{Path.basename(hls_local_path)}",
|
||||
if(String.ends_with?(hls_local_path, ".m3u8"),
|
||||
do: "application/x-mpegURL",
|
||||
else: "video/mp4"
|
||||
)
|
||||
)
|
||||
)
|
||||
end)
|
||||
end)
|
||||
|
||||
playlist_url = "#{Bright.config([:s3_cdn_endpoint])}/package/vod-#{vod.id}/master.m3u8"
|
||||
Logger.debug("playlist_url=#{playlist_url} local_path=#{local_path}")
|
||||
playlist_url =
|
||||
"#{Bright.config([:public_s3_endpoint])}/package/vod-#{vod.id}/master.m3u8"
|
||||
|
||||
hls_vod =
|
||||
update_vod(vod, %{
|
||||
playlist_url: playlist_url,
|
||||
local_path: local_path
|
||||
})
|
||||
Logger.debug("playlist_url=#{playlist_url} local_path=#{local_path}")
|
||||
|
||||
Logger.debug(inspect(hls_vod))
|
||||
hls_vod =
|
||||
update_vod(vod, %{
|
||||
playlist_url: playlist_url,
|
||||
local_path: local_path
|
||||
})
|
||||
|
||||
cb.(%{stage: :generating_thumbnail, done: 1, total: 1})
|
||||
# {:ok, hls_vod} = store_thumbnail_from_file(hls_vod, vod.local_path)
|
||||
Logger.debug(inspect(hls_vod))
|
||||
|
||||
# @TODO should probably keep the file cached locally for awhile for any additional processing
|
||||
# File.rm!(hls_vod.local_path)
|
||||
cb.(%{stage: :generating_thumbnail, done: 1, total: 1})
|
||||
# {:ok, hls_vod} = store_thumbnail_from_file(hls_vod, vod.local_path)
|
||||
|
||||
hls_vod
|
||||
# @TODO should probably keep the file cached locally for awhile for any additional processing
|
||||
# File.rm!(hls_vod.local_path)
|
||||
|
||||
hls_vod
|
||||
|
||||
{error_output, exit_code} ->
|
||||
# Failure: curl returned a non-zero exit code
|
||||
Logger.error("Download failed with exit code #{exit_code}: #{error_output}")
|
||||
{:error, {:curl_failed, exit_code, error_output}}
|
||||
end
|
||||
end
|
||||
|
||||
# def store_thumbnail_from_file(%Vod{} = vod, src_path, marker \\ %{minutes: 0}, opts \\ []) do
|
||||
|
@ -13,6 +13,7 @@ defmodule Bright.B2Test do
|
||||
local_file = Path.absname("test/fixtures/SampleVideo_1280x720_1mb.mp4")
|
||||
{:ok, %{key: key, cdn_url: cdn_url}} = B2.put(local_file)
|
||||
assert key === "SampleVideo_1280x720_1mb.mp4"
|
||||
assert Regex.match?(~r/\/#{key}$/, cdn_url)
|
||||
end
|
||||
|
||||
@tag :acceptance
|
||||
@ -21,7 +22,22 @@ defmodule Bright.B2Test do
|
||||
basename = Cache.generate_basename(local_file)
|
||||
object_key = "test/#{basename}"
|
||||
{:ok, %{key: key, cdn_url: cdn_url}} = B2.put(local_file, object_key)
|
||||
assert Regex.match?(~r/SampleVideo/, key)
|
||||
assert Regex.match?(~r/[a-zA-Z0-9]{6}\/SampleVideo/, key)
|
||||
assert Regex.match?(~r/\/#{object_key}$/, cdn_url)
|
||||
end
|
||||
|
||||
@tag :acceptance
|
||||
test "put/3" do
|
||||
local_file = Path.absname("test/fixtures/SampleVideo_1280x720_1mb.mp4")
|
||||
basename = Cache.generate_basename(local_file)
|
||||
object_key = "test/#{basename}"
|
||||
mime = "video/mp4"
|
||||
|
||||
{:ok, %{key: key, cdn_url: cdn_url}} =
|
||||
B2.put(local_file, object_key, mime)
|
||||
|
||||
assert Regex.match?(~r/[a-zA-Z0-9]{6}\/SampleVideo/, key)
|
||||
assert Regex.match?(~r/\/#{object_key}$/, cdn_url)
|
||||
end
|
||||
|
||||
@tag :integration
|
||||
|
@ -2,27 +2,23 @@ defmodule Bright.CacheTest do
|
||||
use Bright.DataCase
|
||||
|
||||
alias Bright.Cache
|
||||
require Logger
|
||||
|
||||
@sample_url "https://example.com/my_video.mp4"
|
||||
|
||||
## IDK what I'm doing here. Ideally I want a redis-like k/v store where I can temporarily put VODs and they expire after 12 hours or so.
|
||||
## this would potentially speed up vod processing because it would prevent having to download the VOD from S3 during every Oban worker performance.
|
||||
## this would potentially speed up vod processing because it would prevent having to download the VOD from S3 during every Oban worker execution.
|
||||
## BUT I don't want to implement it myself because of the idiom, "There are only two unsolved problems in CS. Naming things and cache invalidation"
|
||||
## Meaning I don't think I can do any better than the experts in the field.
|
||||
## Anyway, this is FEATURE CREEP! Solve the problem without caching and LET IT BE SLOW.
|
||||
## To implement this cache before the system works is pre-mature optimization!
|
||||
|
||||
# describe "cache k/v" do
|
||||
# test "get/1 with string cache key" do
|
||||
|
||||
# end
|
||||
|
||||
# end
|
||||
@cache_dir Application.fetch_env!(:bright, :cache_dir)
|
||||
|
||||
describe "cache" do
|
||||
@tag :unit
|
||||
test "get_cache_dir/0" do
|
||||
assert Regex.match?(~r/.cache\/futureporn/, Cache.get_cache_dir())
|
||||
assert Regex.match?(~r/\/futureporn/, Cache.get_cache_dir())
|
||||
end
|
||||
|
||||
@tag :unit
|
||||
@ -49,10 +45,10 @@ defmodule Bright.CacheTest do
|
||||
@tag :unit
|
||||
test "generate_filename/1" do
|
||||
filename = Cache.generate_filename(@sample_url)
|
||||
assert Regex.match?(~r/.cache\/futureporn\/.+\/my_video\.mp4/, filename)
|
||||
assert Regex.match?(~r/\/futureporn\/.+\/my_video\.mp4/, filename)
|
||||
|
||||
filename = Cache.generate_filename("/home/cj/Downloads/test.mp4")
|
||||
assert Regex.match?(~r/.cache\/futureporn\/.+\/test\.mp4/, filename)
|
||||
assert Regex.match?(~r/\/futureporn\/.+\/test\.mp4/, filename)
|
||||
|
||||
assert File.exists?(Path.dirname(filename))
|
||||
assert not File.exists?(filename)
|
||||
@ -61,7 +57,7 @@ defmodule Bright.CacheTest do
|
||||
@tag :unit
|
||||
test "generate_filename/2" do
|
||||
filename = Cache.generate_filename(@sample_url, "png")
|
||||
assert Regex.match?(~r/.cache\/futureporn\/.+\/my_video\.png/, filename)
|
||||
assert Regex.match?(~r/\/futureporn\/.+\/my_video\.png/, filename)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -13,7 +13,7 @@ defmodule Bright.DownloaderTest do
|
||||
assert File.exists?(local_file)
|
||||
{:ok, stat} = File.stat(local_file)
|
||||
assert stat.size > 0, "File is empty"
|
||||
assert Regex.match?(~r/.cache\/futureporn\/.+\/projekt-melody\.jpg/, local_file)
|
||||
assert Regex.match?(~r/\/futureporn\/.+\/projekt-melody\.jpg/, local_file)
|
||||
end
|
||||
|
||||
@tag :integration
|
||||
@ -29,6 +29,9 @@ defmodule Bright.DownloaderTest do
|
||||
assert File.exists?(local_file)
|
||||
{:ok, stat} = File.stat(local_file)
|
||||
assert stat.size > 0, "File is empty"
|
||||
|
||||
assert stat.size === 1_055_736,
|
||||
"File is not the expected 1055736 bytes. (it was #{stat.size})"
|
||||
end
|
||||
|
||||
@tag :integration
|
||||
@ -50,5 +53,13 @@ defmodule Bright.DownloaderTest do
|
||||
{:ok, stat} = File.stat(local_file)
|
||||
assert stat.size > 0, "File is empty"
|
||||
end
|
||||
|
||||
@tag :integration
|
||||
test "Error handle a bad URL" do
|
||||
{:error, _} =
|
||||
Downloader.get(
|
||||
"https://futureporn-b2.b-cdn.net/test/this-is-not-a-real-file-this-will-certainly-404.mp4"
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -183,8 +183,8 @@ defmodule Bright.StreamsTest do
|
||||
|
||||
assert :ok
|
||||
assert updated_vod.local_path != nil
|
||||
assert_received {:progress, %{stage: :transmuxing, done: 1, total: 1}}
|
||||
assert_received {:progress, %{stage: :persisting, done: 1, total: _}}
|
||||
# assert_received {:progress, %{stage: :transmuxing, done: 1, total: 1}}
|
||||
# assert_received {:progress, %{stage: :persisting, done: 1, total: _}}
|
||||
# assert_received {:progress, %{stage: :generating_thumbnail, done: 1, total: 1}}
|
||||
end
|
||||
end
|
||||
|
32
devbox.json
32
devbox.json
@ -17,9 +17,9 @@
|
||||
],
|
||||
"env": {
|
||||
"DEVBOX_COREPACK_ENABLED": "true",
|
||||
"ENV": "development",
|
||||
"KUBECONFIG": "$HOME/.kube/futureporn.yaml",
|
||||
"VENV_DIR": ".venv"
|
||||
"ENV": "development",
|
||||
"KUBECONFIG": "$HOME/.kube/futureporn.yaml",
|
||||
"VENV_DIR": ".venv"
|
||||
},
|
||||
"shell": {
|
||||
"init_hook": [
|
||||
@ -28,19 +28,19 @@
|
||||
"pip install -r requirements.txt"
|
||||
],
|
||||
"scripts": {
|
||||
"tunnel": "dotenvx run -f ./.kamal/secrets.development -- chisel client bright.fp.sbtp.xyz:9090 R:4000",
|
||||
"backup": "docker exec -t postgres_db pg_dumpall -c -U postgres > ./backups/dev_`date +%Y-%m-%d_%H_%M_%S`.sql",
|
||||
"act": "dotenvx run -f ./.kamal/secrets.testing -- act -W ./.gitea/workflows --secret-file .kamal/secrets.development",
|
||||
"act:builder": "dotenvx run -f ./.kamal/secrets.testing -- act -W ./.gitea/workflows/builder.yaml --secret-file .kamal/secrets.testing --var-file .kamal/secrets.testing --insecure-secrets",
|
||||
"act:tests": "dotenvx run -f ./.kamal/secrets.testing -- act -W ./.gitea/workflows/tests.yaml --secret-file .kamal/secrets.testing --var-file .kamal/secrets.testing --insecure-secrets",
|
||||
"bright:compile:watch": "cd ./apps/bright && find . -type f -name \"*.ex\" -o -name \"*.exs\" | entr -r mix compile --warnings-as-errors",
|
||||
"bright:compile:watch2": "cd ./apps/bright && pnpx chokidar-cli \"**/*\" -i \"deps/**\" -i \"_build/**\" -c \"mix compile --warnings-as-errors\"",
|
||||
"bright:dev": "cd ./apps/bright && dotenvx run -f ../../.kamal/secrets.development -e MIX_ENV=dev -- mix phx.server",
|
||||
"tunnel": "dotenvx run -f ./.env.development -- chisel client bright.fp.sbtp.xyz:9090 R:4000",
|
||||
"backup": "docker exec -t postgres_db pg_dumpall -c -U postgres > ./backups/dev_`date +%Y-%m-%d_%H_%M_%S`.sql",
|
||||
"act": "dotenvx run -f ./.env.testing -- act -W ./.gitea/workflows --secret-file .env.development",
|
||||
"act:builder": "dotenvx run -f ./.env.testing -- act -W ./.gitea/workflows/builder.yaml --secret-file .env.testing --var-file .env.testing --insecure-secrets",
|
||||
"act:tests": "dotenvx run -f ./.env.testing -- act -W ./.gitea/workflows/tests.yaml --secret-file .env.testing --var-file .env.testing --insecure-secrets",
|
||||
"bright:compile:watch": "cd ./apps/bright && find . -type f -name \"*.ex\" -o -name \"*.exs\" | entr -r mix compile --warnings-as-errors",
|
||||
"bright:compile:watch2": "cd ./apps/bright && pnpx chokidar-cli \"**/*\" -i \"deps/**\" -i \"_build/**\" -c \"mix compile --warnings-as-errors\"",
|
||||
"bright:dev": "cd ./apps/bright && dotenvx run -f ../../.env.development -e MIX_ENV=dev -- mix phx.server",
|
||||
"bright:test:unit:watch": "cd ./apps/bright && pnpx chokidar-cli '**/*' -i \"deps/**\" -i '_build/**' -c 'mix test --only=unit'",
|
||||
"bright:act": "cd ./apps/bright && act --env MIX_ENV=test -W ./.gitea/workflows/tests.yaml --secret-file .kamal/secrets.development",
|
||||
"test": "act -W ./.gitea/workflows/tests.yaml --secret-file .kamal/secrets.testing --var-file .kamal/secrets.testing && devbox run beep || devbox run boop",
|
||||
"beep": "ffplay -nodisp -loglevel quiet -autoexit ./apps/beep/beep2.wav",
|
||||
"boop": "ffplay -nodisp -loglevel quiet -autoexit ./apps/beep/beep1.wav"
|
||||
"bright:act": "cd ./apps/bright && act --env MIX_ENV=test -W ./.gitea/workflows/tests.yaml --secret-file .env.development",
|
||||
"test": "act -W ./.gitea/workflows/tests.yaml --secret-file .env.testing --var-file .env.testing && devbox run beep || devbox run boop",
|
||||
"beep": "ffplay -nodisp -loglevel quiet -autoexit ./apps/beep/beep2.wav",
|
||||
"boop": "ffplay -nodisp -loglevel quiet -autoexit ./apps/beep/beep1.wav"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -122,7 +122,7 @@ resource "vultr_instance" "load_balancer" {
|
||||
resource "vultr_instance" "bright" {
|
||||
count = 1
|
||||
hostname = "fp-bright-${count.index}"
|
||||
plan = "vc2-2c-2gb"
|
||||
plan = "vc2-2c-4gb"
|
||||
region = "ord"
|
||||
backups = "disabled"
|
||||
ddos_protection = "false"
|
||||
@ -176,7 +176,7 @@ resource "vultr_instance" "database" {
|
||||
}
|
||||
|
||||
resource "vultr_instance" "tracker" {
|
||||
count = 1
|
||||
count = 0
|
||||
hostname = "fp-tracker-${count.index}"
|
||||
plan = "vc2-1c-2gb"
|
||||
region = "ord"
|
||||
|
Loading…
x
Reference in New Issue
Block a user