add ipfs playbook

This commit is contained in:
CJ_Clippy 2025-03-27 23:25:57 -08:00
parent 055a3ab66f
commit b61c55d1e9
30 changed files with 486 additions and 434 deletions

@ -21,7 +21,7 @@ jobs:
# our workaround is to reference service containers two different ways. # our workaround is to reference service containers two different ways.
# - when running nektos/act on localhost, we reference the service as localhost:<port_number>. # - when running nektos/act on localhost, we reference the service as localhost:<port_number>.
# - when running gitea act_runner on gitea, we reference the service's dns name. # - when running gitea act_runner on gitea, we reference the service's dns name.
# these references are defined in environment variables
test_phoenix: test_phoenix:
env: env:
SECRET_KEY_BASE: ${{ secrets.SECRET_KEY_BASE }} SECRET_KEY_BASE: ${{ secrets.SECRET_KEY_BASE }}

@ -50,4 +50,9 @@ In other words, pick something for a name and roll with the punches.
> "But it's also about looking at things anew and what they could be instead of what they are" > "But it's also about looking at things anew and what they could be instead of what they are"
> -- Rodney Mullen > -- Rodney Mullen
### Success requires continuing even when it's painful ### Success requires continuing even when it's painful
### Find what you love and let it kill you
> Find what you love and let it kill you. Let it drain you of your all. Let it cling onto your back and weigh you down into eventual nothingness. Let it kill you and let it devour your remains. For all things will kill you, both slowly and fastly, but it's much better to be killed by a lover.
> -- Charles Bukowski

@ -3,5 +3,6 @@ collections:
- name: cloud.terraform - name: cloud.terraform
- name: community.docker - name: community.docker
- name: community.general - name: community.general
- name: gluster.gluster
roles: roles:
- name: nvjacobo.caddy - name: nvjacobo.caddy

@ -54,7 +54,7 @@
- name: Download and extract IPFS Kubo - name: Download and extract IPFS Kubo
ansible.builtin.unarchive: ansible.builtin.unarchive:
src: https://dist.ipfs.tech/kubo/v0.33.2/kubo_v0.33.2_linux-amd64.tar.gz src: "https://dist.ipfs.tech/kubo/{{ kubo_version }}/kubo_{{ kubo_version }}_linux-amd64.tar.gz"
dest: ~/ dest: ~/
remote_src: true remote_src: true
@ -63,12 +63,15 @@
args: args:
creates: /usr/local/bin/ipfs creates: /usr/local/bin/ipfs
- name: Allow UFW ports - name: Allow UFW ports
community.general.ufw: community.general.ufw:
rule: allow rule: allow
port: "{{ item }}" port: "{{ item }}"
proto: tcp proto: tcp
loop: loop:
- 8081 # npx http-server -p 8081 - 8081 # npx http-server -p 8081
- 8080 # ipfs api - 8080 # ipfs api
- 4001 # ipfs swarm - 4001 # ipfs swarm

@ -0,0 +1,3 @@
---
ipfs_kubo_version: v0.34.1
ipfs_cluster_follow_version: v1.1.2

@ -0,0 +1,16 @@
---
- name: Restart ipfs
ansible.builtin.systemd_service:
name: ipfs
state: restarted
enabled: true
daemon_reload: true
- name: Restart ipfs-cluster-follow
ansible.builtin.systemd_service:
name: ipfs-cluster-follow
state: restarted
enabled: true
daemon_reload: true

@ -0,0 +1,175 @@
---
- name: Configure firewall (UDP & TCP)
community.general.ufw:
rule: allow
port: "{{ item }}"
proto: any
loop:
- 4001
- 24007
- 24008
- name: Configure firewall (TCP)
community.general.ufw:
rule: allow
port: "{{ item }}"
proto: tcp
loop:
- 29152:65535
- name: Install glusterfs
ansible.builtin.apt:
name:
- glusterfs-server
state: present
- name: Start & enable glusterd service
ansible.builtin.systemd_service:
name: glusterd.service
state: started
enabled: true
# - name: Create gluster volume
# gluster.gluster.gluster_volume:
# state: present
# name: ipfs-datastore
# bricks: /bricks/brick1/g1
# rebalance: true
# cluster: "{{ groups['ipfs'] }}"
# run_once: true
# - name: Start gluster volume
# gluster.gluster.gluster_volume:
# state: started
# name: ipfs-datastore
# - name: Limit volume usage
# gluster.gluster.gluster_volume:
# state: present
# name: ipfs-datastore
# directory: /
# quota: 6.0TB
## Example: mount -t glusterfs fp-bright-0:/gv0 /mountme
# - name: Mount gluster volume
# ansible.posix.mount:
# src: "{{ ansible_hostname }}:/g1"
# path: /mnt/g1
# fstype: glusterfs
# state: mounted
- name: Create ipfs group
ansible.builtin.group:
name: ipfs
state: present
- name: Create ipfs user
ansible.builtin.user:
name: ipfs
group: ipfs
create_home: true
home: /home/ipfs
system: true
- name: Download and extract IPFS Kubo
ansible.builtin.unarchive:
src: "https://dist.ipfs.tech/kubo/{{ ipfs_kubo_version }}/kubo_{{ ipfs_kubo_version }}_linux-amd64.tar.gz"
dest: /tmp
remote_src: true
notify:
- Restart ipfs
- name: Install IPFS Kubo
ansible.builtin.copy:
src: /tmp/kubo/ipfs
dest: /usr/local/bin/ipfs
mode: "0755"
remote_src: true
notify:
- Restart ipfs
- name: Download and extract ipfs-cluster-follow
ansible.builtin.unarchive:
src: "https://dist.ipfs.tech/ipfs-cluster-follow/{{ ipfs_cluster_follow_version }}/ipfs-cluster-follow_{{ ipfs_cluster_follow_version }}_linux-amd64.tar.gz"
dest: /tmp
remote_src: true
notify:
- Restart ipfs-cluster-follow
- name: Install ipfs-cluster-follow
ansible.builtin.copy:
src: /tmp/ipfs-cluster-follow/ipfs-cluster-follow
dest: /usr/local/bin/ipfs-cluster-follow
mode: "0755"
remote_src: true
notify:
- Restart ipfs-cluster-follow
- name: Generate random peername
ansible.builtin.set_fact:
cluster_peername: "{{ lookup('password', '/dev/null length=8 chars=hexdigits') }}"
- name: Create ipfs-cluster-follow service
ansible.builtin.template:
src: ipfs-cluster-follow.service.j2
dest: /etc/systemd/system/ipfs-cluster-follow.service
mode: "0644"
notify:
- Restart ipfs-cluster-follow
- name: Create ipfs service
ansible.builtin.template:
src: ipfs.service.j2
dest: /etc/systemd/system/ipfs.service
mode: "0644"
notify:
- Restart ipfs
- name: Check current value of Routing.AcceleratedDHTClient
ansible.builtin.command: ipfs config Routing.AcceleratedDHTClient
register: ipfs_dht_config
changed_when: false # this never changes things, it only gathers data
- name: Enable IPFS Routing.AcceleratedDHTClient
ansible.builtin.command: ipfs config --json Routing.AcceleratedDHTClient true
notify:
- Restart ipfs
when: ipfs_dht_config.stdout != "true"
changed_when: true
- name: Create IPFS directory
ansible.builtin.file:
dest: /home/ipfs/.ipfs
owner: ipfs
group: ipfs
state: directory
mode: "0755"
- name: Check if IPFS config exists
ansible.builtin.stat:
path: /home/ipfs/.ipfs/config
register: ipfs_config
- name: Initialize IPFS
ansible.builtin.command: /usr/local/bin/ipfs init
become: true
become_user: ipfs
args:
chdir: /home/ipfs
when: not ipfs_config.stat.exists
changed_when: true # Explicitly mark this as a change when it runs
notify:
- Restart ipfs
## @todo enable once we get gluster working
# - name: Symlink IPFS blocks directory to gluster brick
# ansible.builtin.file:
# src: /home/ipfs/.ipfs/blocks
# dest: /mnt/g1/.ipfs/blocks
# owner: ipfs
# group: ipfs
# state: link
# notify:
# - Restart ipfs

@ -0,0 +1,14 @@
[Unit]
Description=ipfs-cluster-follow
[Service]
Type=simple
Environment=CLUSTER_PEERNAME="{{cluster_peername}}"
ExecStart=/usr/local/bin/ipfs-cluster-follow futureporn.net run --init https://futureporn.net/api/service.json
User=ipfs
Restart=always
RestartSec=10
[Install]
WantedBy=multi-user.target

@ -0,0 +1,14 @@
[Unit]
Description=IPFS Daemon
[Service]
Type=simple
Environment=IPFS_PATH=/home/ipfs/.ipfs
ExecStart=/usr/local/bin/ipfs daemon
User=ipfs
Restart=always
RestartSec=10
[Install]
WantedBy=multi-user.target

@ -16,6 +16,12 @@
roles: roles:
- common - common
- name: Set up IPFS
hosts: ipfs
gather_facts: true
roles:
- ipfs
- name: Assert Capture dependencies - name: Assert Capture dependencies
hosts: capture hosts: capture
gather_facts: true gather_facts: true
@ -55,3 +61,4 @@
become: false become: false
roles: roles:
- load_balancer - load_balancer

@ -44,15 +44,16 @@ const UppyHook = {
}) })
}, },
createMultipartUpload(file) { createMultipartUpload(file) {
// console.log(`createMultipartUpload with file ${JSON.stringify(file)}`) console.log(`createMultipartUpload with file ${JSON.stringify(file)}`)
let { name, type } = file.meta let { name, type } = file.meta
let payload = { name, type } let size = file.size
let payload = { name, type, size }
return new Promise((resolve) => { return new Promise((resolve) => {
live.pushEvent('initiate_multipart_upload', payload, (reply, ref) => { live.pushEvent('initiate_multipart_upload', payload, (reply, ref) => {
// console.log(`payload=${JSON.stringify(payload)}`) console.log(`payload=${JSON.stringify(payload)}`)
// console.log(`initiate_multipart_upload pushEvent response callback.`) console.log(`initiate_multipart_upload pushEvent response callback.`)
// console.log(`got reply=${JSON.stringify(reply)}`) console.log(`got reply=${JSON.stringify(reply)}`)
// console.log(`got ref=${JSON.stringify(ref)}`) console.log(`got ref=${JSON.stringify(ref)}`)
let output = { let output = {
uploadId: reply?.upload_id, uploadId: reply?.upload_id,

@ -49,11 +49,11 @@ config :ex_aws,
] ]
if config_env() == :prod do if config_env() == :prod do
db_host = System.get_env("DB_HOST") || raise "environment variable DB_HOST is missing." db_host = System.fetch_env!("DB_HOST")
db_user = System.get_env("DB_USER") || raise "environment variable DB_USER is missing." db_user = System.fetch_env!("DB_USER")
db_pass = System.get_env("DB_PASS") || raise "environment variable DB_PASS is missing." db_pass = System.fetch_env!("DB_PASS")
db_port = System.get_env("DB_PORT") || raise "environment variable DB_PORT is missing." db_port = System.fetch_env!("DB_PORT")
db_name = System.get_env("DB_NAME") || raise "environment variable DB_NAME is missing." db_name = System.fetch_env!("DB_NAME")
maybe_ipv6 = if System.get_env("ECTO_IPV6") in ~w(true 1), do: [:inet6], else: [] maybe_ipv6 = if System.get_env("ECTO_IPV6") in ~w(true 1), do: [:inet6], else: []

@ -9,20 +9,24 @@ config :bcrypt_elixir, :log_rounds, 1
# to provide built-in test partitioning in CI environment. # to provide built-in test partitioning in CI environment.
# Run `mix help test` for more information. # Run `mix help test` for more information.
config :bright, Bright.Repo, config :bright, Bright.Repo,
database: System.get_env("DB_NAME", "bright"),
# database: "bright_test#{System.get_env("MIX_TEST_PARTITION")}", # database: "bright_test#{System.get_env("MIX_TEST_PARTITION")}",
hostname: System.get_env("DB_HOST", "localhost"), database: System.fetch_env!("DB_NAME"),
username: System.get_env("DB_USER", "postgres"), hostname: System.fetch_env!("DB_HOST"),
password: System.get_env("DB_PASS", "password"), username: System.fetch_env!("DB_USER"),
port: System.get_env("DB_PORT", "5433"), password: System.fetch_env!("DB_PASS"),
port: System.fetch_env!("DB_PORT"),
pool: Ecto.Adapters.SQL.Sandbox, pool: Ecto.Adapters.SQL.Sandbox,
pool_size: System.schedulers_online() * 4 pool_size: System.schedulers_online() * 4
IO.puts(
"hello this is test.exs and we are checking env vars. db_host=#{System.fetch_env!("DB_HOST")} db_user=#{System.fetch_env!("DB_USER")} db_pass=#{System.fetch_env!("DB_PASS")} db_port=#{System.fetch_env!("DB_PORT")} db_name=#{System.fetch_env!("DB_NAME")}"
)
# We don't run a server during test. If one is required, # We don't run a server during test. If one is required,
# you can enable the server option below. # you can enable the server option below.
config :bright, BrightWeb.Endpoint, config :bright, BrightWeb.Endpoint,
http: [ip: {127, 0, 0, 1}, port: 4002], http: [ip: {127, 0, 0, 1}, port: 4002],
secret_key_base: "#{System.get_env("SECRET_KEY_BASE")}", secret_key_base: "#{System.fetch_env!("SECRET_KEY_BASE")}",
server: false server: false
# Prevent Oban from running jobs and plugins during test runs # Prevent Oban from running jobs and plugins during test runs

@ -616,7 +616,9 @@ defmodule Bright.Streams do
""" """
def list_uploads do def list_uploads do
Repo.all(Upload) Upload
|> Repo.all()
|> Repo.preload([:user])
end end
@doc """ @doc """
@ -633,7 +635,11 @@ defmodule Bright.Streams do
** (Ecto.NoResultsError) ** (Ecto.NoResultsError)
""" """
def get_upload!(id), do: Repo.get!(Upload, id) def get_upload!(id) do
Upload
|> Repo.get!(id)
|> Repo.preload([:user])
end
@doc """ @doc """
Creates a upload. Creates a upload.

@ -6,8 +6,8 @@ defmodule Bright.Streams.Upload do
field :size, :integer field :size, :integer
field :filename, :string field :filename, :string
field :content_type, :string field :content_type, :string
field :user_id, :id
field :vod, :id belongs_to :user, Bright.Users.User
timestamps(type: :utc_datetime) timestamps(type: :utc_datetime)
end end
@ -15,7 +15,8 @@ defmodule Bright.Streams.Upload do
@doc false @doc false
def changeset(upload, attrs) do def changeset(upload, attrs) do
upload upload
|> cast(attrs, [:filename, :size, :content_type]) |> cast(attrs, [:filename, :size, :content_type, :user_id])
|> validate_required([:filename, :size, :content_type]) |> validate_required([:filename, :size, :content_type, :user_id])
|> assoc_constraint(:user)
end end
end end

@ -12,6 +12,8 @@ defmodule Bright.Users.User do
field :patron_tier, :integer field :patron_tier, :integer
field :role, :string field :role, :string
has_many :uploads, Bright.Streams.Upload
timestamps(type: :utc_datetime) timestamps(type: :utc_datetime)
end end
@ -71,3 +73,9 @@ defmodule Bright.Users.User do
user user
end end
end end
defimpl Phoenix.HTML.Safe, for: Bright.Users.User do
def to_iodata(user) do
Phoenix.HTML.Safe.to_iodata("User #{user.id}")
end
end

@ -1,7 +1,7 @@
<.header> <.header>
Listing Uploads Listing Uploads
<:actions> <:actions>
<.link href={~p"/uploads/new"}> <.link href={~p"/upload"}>
<.button>New Upload</.button> <.button>New Upload</.button>
</.link> </.link>
</:actions> </:actions>
@ -11,11 +11,11 @@
<:col :let={upload} label="Filename">{upload.filename}</:col> <:col :let={upload} label="Filename">{upload.filename}</:col>
<:col :let={upload} label="Size">{upload.size}</:col> <:col :let={upload} label="Size">{upload.size}</:col>
<:col :let={upload} label="Content type">{upload.content_type}</:col> <:col :let={upload} label="Content type">{upload.content_type}</:col>
<:col :let={upload} label="Uploader">{upload.user}</:col>
<:action :let={upload}> <:action :let={upload}>
<div class="sr-only"> <div class="sr-only">
<.link navigate={~p"/uploads/#{upload}"}>Show</.link> <.link navigate={~p"/uploads/#{upload}"}>Show</.link>
</div> </div>
<.link navigate={~p"/uploads/#{upload}/edit"}>Edit</.link>
</:action> </:action>
<:action :let={upload}> <:action :let={upload}>
<.link href={~p"/uploads/#{upload}"} method="delete" data-confirm="Are you sure?"> <.link href={~p"/uploads/#{upload}"} method="delete" data-confirm="Are you sure?">

@ -1,17 +1,14 @@
<.header> <.header>
Upload {@upload.id} Upload {@upload.id}
<:subtitle>This is a upload record from your database.</:subtitle> <:subtitle>This is a upload record from your database.</:subtitle>
<:actions> <:actions></:actions>
<.link href={~p"/uploads/#{@upload}/edit"}>
<.button>Edit upload</.button>
</.link>
</:actions>
</.header> </.header>
<.list> <.list>
<:item title="Filename">{@upload.filename}</:item> <:item title="Filename">{@upload.filename}</:item>
<:item title="Size">{@upload.size}</:item> <:item title="Size">{@upload.size}</:item>
<:item title="Content type">{@upload.content_type}</:item> <:item title="Content type">{@upload.content_type}</:item>
<:item title="Uploader">{@upload.user}</:item>
</.list> </.list>
<.back navigate={~p"/uploads"}>Back to uploads</.back> <.back navigate={~p"/uploads"}>Back to uploads</.back>

@ -2,22 +2,12 @@ defmodule BrightWeb.UploadLive.Index do
use BrightWeb, :live_view use BrightWeb, :live_view
alias Bright.Streams alias Bright.Streams
alias Bright.Streams.Upload
alias Bright.Utils alias Bright.Utils
require Logger require Logger
@impl true
def mount(_params, _session, socket) do def mount(_params, _session, socket) do
# {:ok,
# socket
# |> assign(:uploaded_files, [])
# |> allow_upload(:vods,
# accept: ~w(.mp4 .mov .ts .avi .mpeg .ogv .webm .3gp .3g2),
# max_entries: 3,
# max_file_size: 80 * 1_000_000_000,
# external: &presign_upload/2
# )}
# socket = assign(socket, endpoint: System.fetch_env!("UPPY_ENDPOINT"))
socket = socket =
socket socket
|> assign(:uploaded_files, []) |> assign(:uploaded_files, [])
@ -28,135 +18,20 @@ defmodule BrightWeb.UploadLive.Index do
@impl true @impl true
def render(assigns) do def render(assigns) do
~H""" ~H"""
<div> <div class="section">
<h2 class="title is-2 mb-3">Upload a VOD</h2>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/uppy/4.12.2/uppy.min.css" integrity="sha512-oPlr9/HXIlp7YoIRNsexheOu2/P2sEVi8EFQEAWUlHHijx0QbQ9qgihNYmIYtdJP3xOIMbZcnSVhrznIh5DKkg==" crossorigin="anonymous" referrerpolicy="no-referrer" /> <link
rel="stylesheet"
href="https://cdnjs.cloudflare.com/ajax/libs/uppy/4.12.2/uppy.min.css"
integrity="sha512-oPlr9/HXIlp7YoIRNsexheOu2/P2sEVi8EFQEAWUlHHijx0QbQ9qgihNYmIYtdJP3xOIMbZcnSVhrznIh5DKkg=="
crossorigin="anonymous"
referrerpolicy="no-referrer"
/>
<div id="uppy-dashboard" phx-hook="UppyHook" phx-update="ignore"></div> <div id="uppy-dashboard" phx-hook="UppyHook" phx-update="ignore"></div>
</div> </div>
""" """
end end
# @impl true
# def handle_event(
# "presign",
# %{"type" => type, "name" => name},
# socket
# ) do
# {:ok, %{url: url}, socket} = presign_url(name, type, socket)
# Logger.debug("✍️✍️✍️ presign called with name=#{name} type=#{type}. url=#{url}")
# # socket = assign(socket, signatures: {name, url})
# # socket =
# # socket
# # |> update(:uploaded_files, &(&1 ++ uploaded_files))
# # |> push_event("process_upload", %{name: name, url: url, type: type})
# {:reply, %{name: name, type: type, url: url}, socket}
# end
# @impl true
# def handle_event("upload_videos", _params, socket) do
# uploaded_files =
# consume_uploaded_entries(socket, :vods, fn %{path: _path}, entry ->
# IO.puts("@todo ⭐⭐⭐ Handling #{entry.client_type} @todo @todo @todo")
# IO.puts(inspect(entry))
# end)
# socket =
# socket
# |> update(:uploaded_files, &(&1 ++ uploaded_files))
# {:noreply, socket}
# end
# def handle_event("update_preview_srcs", %{"srcs" => srcs}, socket) do
# uploaded_files =
# socket.assigns.uploaded_files
# |> Enum.map(fn entry ->
# if Map.has_key?(srcs, entry.ref) do
# entry
# |> Map.put(:preview_src, Map.fetch!(srcs, entry.ref))
# else
# entry
# end
# end)
# socket =
# socket
# |> assign(:uploaded_files, uploaded_files)
# {:noreply, socket}
# end
# def handle_event("validate_upload", _params, socket) do
# num_remaining_uploads =
# length(socket.assigns.uploaded_files) - socket.assigns.uploads.vods.max_entries
# valid =
# Enum.uniq_by(socket.assigns.uploads.vods.entries, & &1.client_name)
# |> Enum.take(num_remaining_uploads)
# socket =
# Enum.reduce(socket.assigns.uploads.vods.entries, socket, fn entry, socket ->
# if entry in valid do
# socket
# else
# socket
# |> cancel_upload(:vods, entry.ref)
# |> put_flash(
# :error,
# "Uploaded files should be unique and cannot exceed #{socket.assigns.uploads.vods.max_entries} total files."
# )
# end
# end)
# {:noreply, socket}
# end
# def handle_event("cancel_upload", %{"ref" => ref}, socket) do
# {:noreply, cancel_upload(socket, :vods, ref)}
# end
# def handle_event("cancel_upload", _params, socket) do
# socket =
# Enum.reduce(socket.assigns.uploads.vods.entries, socket, fn entry, socket ->
# cancel_upload(socket, :vods, entry.ref)
# end)
# {:noreply, socket}
# end
# def presign_upload(name, type, socket) do
# Logger.debug("presign_upload was called with name=#{inspect(name)} and type=#{inspect(type)}")
# config = ExAws.Config.new(:s3)
# bucket = System.fetch_env!("AWS_BUCKET")
# key = "usc/#{Utils.random_string()}/#{name}"
# {:ok, url} =
# ExAws.S3.presigned_url(config, :put, bucket, key,
# expires_in: 3600,
# query_params: [{"Content-Type", type}]
# )
# {:ok, %{uploader: "S3", key: key, url: url}, socket}
# end
# # @doc @see https://hexdocs.pm/ex_aws_s3/ExAws.S3.html#presigned_post/4
# def presigned_post(name, socket) do
# Logger.debug("presigned_post with name=#{inspect(name)}")
# config = ExAws.Config.new(:s3)
# bucket = System.fetch_env!("AWS_BUCKET")
# key = "usc/#{Utils.random_string()}/#{name}"
# {:ok, url} = ExAws.S3.presigned_post(config, )
# end
defp get_s3_config(name) do defp get_s3_config(name) do
config = ExAws.Config.new(:s3) config = ExAws.Config.new(:s3)
bucket = System.fetch_env!("AWS_BUCKET") bucket = System.fetch_env!("AWS_BUCKET")
@ -164,22 +39,15 @@ defmodule BrightWeb.UploadLive.Index do
%{config: config, bucket: bucket, key: key} %{config: config, bucket: bucket, key: key}
end end
@impl true
def handle_event( def handle_event(
"list_parts", "list_parts",
%{"upload_id" => upload_id, "key" => key}, %{"upload_id" => upload_id, "key" => key},
socket socket
) do ) do
config = ExAws.Config.new(:s3)
bucket = System.fetch_env!("AWS_BUCKET") bucket = System.fetch_env!("AWS_BUCKET")
# key = "usc/#{Utils.random_string()}/#{name}"
# %{config: config, bucket: bucket, key: key} = get_s3_config(name)
case ExAws.S3.list_parts(bucket, key, upload_id) do
# <Part>
# <ETag>"85f30635602dc09bd85957a6e82a2c21"</ETag>
# <LastModified>2023-08-31T18:54:55.693Z</LastModified>
# <PartNumber>1</PartNumber>
# <Size>11</Size>
case ExAws.S3.list_parts(bucket, key, upload_id) do
{:ok, part: %{etag: etag, partNumber: partNumber, size: size}} -> {:ok, part: %{etag: etag, partNumber: partNumber, size: size}} ->
Logger.debug("🦠🦠🦠 we got an etag from list_parts. etag=#{inspect(etag)}") Logger.debug("🦠🦠🦠 we got an etag from list_parts. etag=#{inspect(etag)}")
{:reply, %{etag: etag, partNumber: partNumber, size: size}, socket} {:reply, %{etag: etag, partNumber: partNumber, size: size}, socket}
@ -191,9 +59,10 @@ defmodule BrightWeb.UploadLive.Index do
end end
# @doc @see https://hexdocs.pm/ex_aws_s3/ExAws.S3.html#initiate_multipart_upload/3 # @doc @see https://hexdocs.pm/ex_aws_s3/ExAws.S3.html#initiate_multipart_upload/3
@impl true
def handle_event( def handle_event(
"initiate_multipart_upload", "initiate_multipart_upload",
%{"name" => name, "type" => type}, %{"name" => name, "type" => type, "size" => size},
socket socket
) do ) do
%{config: config, bucket: bucket, key: key} = get_s3_config(name) %{config: config, bucket: bucket, key: key} = get_s3_config(name)
@ -203,9 +72,22 @@ defmodule BrightWeb.UploadLive.Index do
case ExAws.request(operation, config) do case ExAws.request(operation, config) do
{:ok, %{body: %{key: key, upload_id: upload_id}} = response} -> {:ok, %{body: %{key: key, upload_id: upload_id}} = response} ->
Logger.debug( Logger.debug(
"Multipart upload initiated. Upload ID: #{upload_id}, Key: #{key}, response=#{inspect(response)}" "👤 Multipart upload initiated. Upload ID: #{upload_id}, Key: #{key}, response=#{inspect(response)}, user.id=#{socket.assigns.current_user.id}"
) )
case Streams.create_upload(%{
filename: name,
content_type: type,
user_id: socket.assigns.current_user.id,
size: size
}) do
{:ok, stream} ->
Logger.debug("created stream #{stream.id}")
{:error, reason} ->
Logger.error("failed to create stream. reason=#{inspect(reason)}")
end
{:reply, %{upload_id: upload_id, key: key}, socket} {:reply, %{upload_id: upload_id, key: key}, socket}
{:error, reason} -> {:error, reason} ->
@ -214,6 +96,7 @@ defmodule BrightWeb.UploadLive.Index do
end end
end end
@impl true
def handle_event( def handle_event(
"abort_multipart_upload", "abort_multipart_upload",
%{"key" => key, "uploadId" => upload_id}, %{"key" => key, "uploadId" => upload_id},
@ -235,6 +118,7 @@ defmodule BrightWeb.UploadLive.Index do
end end
end end
@impl true
def handle_event( def handle_event(
"get_upload_parameters", "get_upload_parameters",
%{"type" => type, "name" => name}, %{"type" => type, "name" => name},
@ -253,6 +137,7 @@ defmodule BrightWeb.UploadLive.Index do
{:reply, %{type: type, method: "PUT", url: url}, socket} {:reply, %{type: type, method: "PUT", url: url}, socket}
end end
@impl true
def handle_event( def handle_event(
"complete_multipart_upload", "complete_multipart_upload",
%{"key" => key, "uploadId" => upload_id, "parts" => parts}, %{"key" => key, "uploadId" => upload_id, "parts" => parts},
@ -284,6 +169,7 @@ defmodule BrightWeb.UploadLive.Index do
end end
end end
@impl true
def handle_event( def handle_event(
"sign_part", "sign_part",
%{"body" => _body, "key" => key, "partNumber" => part_number, "uploadId" => upload_id}, %{"body" => _body, "key" => key, "partNumber" => part_number, "uploadId" => upload_id},
@ -308,24 +194,6 @@ defmodule BrightWeb.UploadLive.Index do
{:reply, %{url: url}, socket} {:reply, %{url: url}, socket}
end end
defp join_refs(entries), do: Enum.join(entries, ",")
def error_to_string(:too_large), do: "File too large!" def error_to_string(:too_large), do: "File too large!"
def error_to_string(:not_accepted), do: "Bad file type!" def error_to_string(:not_accepted), do: "Bad file type!"
defp to_megabytes_or_kilobytes(bytes) when is_integer(bytes) do
case bytes do
b when b < 1_048_576 ->
kilobytes = (b / 1024) |> Float.round(1)
if kilobytes < 1 do
"#{kilobytes}MB"
else
"#{round(kilobytes)}KB"
end
_ ->
megabytes = (bytes / 1_048_576) |> Float.round(1)
"#{megabytes}MB"
end
end
end end

@ -1,11 +1,7 @@
<.header> <.header>
Upload {@upload.id} Upload {@upload.id}
<:subtitle>This is a upload record from your database.</:subtitle> <:subtitle>This is a upload record from your database.</:subtitle>
<:actions> <:actions></:actions>
<.link patch={~p"/uploads/#{@upload}/show/edit"} phx-click={JS.push_focus()}>
<.button>Edit upload</.button>
</.link>
</:actions>
</.header> </.header>
<.list> <.list>
@ -15,8 +11,12 @@
</.list> </.list>
<.back navigate={~p"/uploads"}>Back to uploads</.back> <.back navigate={~p"/uploads"}>Back to uploads</.back>
<.modal
<.modal :if={@live_action == :edit} id="upload-modal" show on_cancel={JS.patch(~p"/uploads/#{@upload}")}> :if={@live_action == :edit}
id="upload-modal"
show
on_cancel={JS.patch(~p"/uploads/#{@upload}")}
>
<.live_component <.live_component
module={BrightWeb.UploadLive.FormComponent} module={BrightWeb.UploadLive.FormComponent}
id={@upload.id} id={@upload.id}

@ -2,6 +2,7 @@ defmodule BrightWeb.Router do
use BrightWeb, :router use BrightWeb, :router
import Oban.Web.Router import Oban.Web.Router
import Redirect
import BrightWeb.AuthController, import BrightWeb.AuthController,
only: [ only: [
@ -75,7 +76,6 @@ defmodule BrightWeb.Router do
# get "/vtubers/:id/edit", VtuberController, :edit # get "/vtubers/:id/edit", VtuberController, :edit
# end # end
resources("/uploads", UploadController, only: [:show, :index, :delete])
resources("/vods", VodController, only: [:create, :new, :edit, :update, :delete]) resources("/vods", VodController, only: [:create, :new, :edit, :update, :delete])
resources("/vtubers", VtuberController, only: [:delete]) resources("/vtubers", VtuberController, only: [:delete])
@ -128,9 +128,10 @@ defmodule BrightWeb.Router do
live_session :authenticated, live_session :authenticated,
on_mount: [{BrightWeb.AuthController, :ensure_authenticated}] do on_mount: [{BrightWeb.AuthController, :ensure_authenticated}] do
live("/profile", ProfileLive) live("/profile", ProfileLive)
live("/upload", UploadLive.Index, :index) live("/uploads/new", UploadLive.Index, :index)
# live("/upload/presign", , :)
end end
resources("/uploads", UploadController, only: [:show, :index, :delete])
end end
scope "/feeds", BrightWeb do scope "/feeds", BrightWeb do
@ -144,6 +145,8 @@ defmodule BrightWeb.Router do
get("/health", PageController, :health) get("/health", PageController, :health)
end end
redirect("/upload", "/uploads/new", :permanent, preserve_query_string: true)
# Enable LiveDashboard and Swoosh mailbox preview in development # Enable LiveDashboard and Swoosh mailbox preview in development
if Application.compile_env(:bright, :dev_routes) do if Application.compile_env(:bright, :dev_routes) do
# If you want to use the LiveDashboard in production, you should put # If you want to use the LiveDashboard in production, you should put

@ -37,6 +37,7 @@ defmodule Bright.MixProject do
# Type `mix help deps` for examples and options. # Type `mix help deps` for examples and options.
defp deps do defp deps do
[ [
{:redirect, "~> 0.4.0"},
{:bcrypt_elixir, "~> 3.0"}, {:bcrypt_elixir, "~> 3.0"},
{:phoenix, "~> 1.7.17"}, {:phoenix, "~> 1.7.17"},
{:phoenix_ecto, "~> 4.5"}, {:phoenix_ecto, "~> 4.5"},

@ -1,18 +0,0 @@
defmodule Bright.Repo.Migrations.CreateUploads do
use Ecto.Migration
def change do
create table(:uploads) do
add :filename, :string
add :size, :integer
add :content_type, :string
add :user_id, references(:users, on_delete: :nothing)
add :vod, references(:vods, on_delete: :nothing)
timestamps(type: :utc_datetime)
end
create index(:uploads, [:user_id])
create index(:uploads, [:vod])
end
end

@ -104,17 +104,20 @@ defmodule Bright.PlatformsTest do
@invalid_attrs %{url: nil} @invalid_attrs %{url: nil}
test "list_platform_aliases/0 returns all platform_aliases" do test "list_platform_aliases/0 returns all platform_aliases" do
platform_alias = platform_alias_fixture() platform = platform_fixture()
platform_alias = platform_alias_fixture(%{platform_id: platform.id}).preload([:platform])
assert Platforms.list_platform_aliases() == [platform_alias] assert Platforms.list_platform_aliases() == [platform_alias]
end end
test "get_platform_alias!/1 returns the platform_alias with given id" do test "get_platform_alias!/1 returns the platform_alias with given id" do
platform_alias = platform_alias_fixture() platform = platform_fixture()
platform_alias = platform_alias_fixture(%{platform_id: platform.id})
assert Platforms.get_platform_alias!(platform_alias.id) == platform_alias assert Platforms.get_platform_alias!(platform_alias.id) == platform_alias
end end
test "create_platform_alias/1 with valid data creates a platform_alias" do test "create_platform_alias/1 with valid data creates a platform_alias" do
valid_attrs = %{url: "some url"} platform = platform_fixture()
valid_attrs = %{url: "some url", platform_id: platform.id}
assert {:ok, %PlatformAlias{} = platform_alias} = assert {:ok, %PlatformAlias{} = platform_alias} =
Platforms.create_platform_alias(valid_attrs) Platforms.create_platform_alias(valid_attrs)
@ -127,7 +130,8 @@ defmodule Bright.PlatformsTest do
end end
test "update_platform_alias/2 with valid data updates the platform_alias" do test "update_platform_alias/2 with valid data updates the platform_alias" do
platform_alias = platform_alias_fixture() platform = platform_fixture()
platform_alias = platform_alias_fixture(%{platform_id: platform.id})
update_attrs = %{url: "some updated url"} update_attrs = %{url: "some updated url"}
assert {:ok, %PlatformAlias{} = platform_alias} = assert {:ok, %PlatformAlias{} = platform_alias} =
@ -137,7 +141,8 @@ defmodule Bright.PlatformsTest do
end end
test "update_platform_alias/2 with invalid data returns error changeset" do test "update_platform_alias/2 with invalid data returns error changeset" do
platform_alias = platform_alias_fixture() platform = platform_fixture()
platform_alias = platform_alias_fixture(%{platform_id: platform.id})
assert {:error, %Ecto.Changeset{}} = assert {:error, %Ecto.Changeset{}} =
Platforms.update_platform_alias(platform_alias, @invalid_attrs) Platforms.update_platform_alias(platform_alias, @invalid_attrs)
@ -152,7 +157,8 @@ defmodule Bright.PlatformsTest do
end end
test "change_platform_alias/1 returns a platform_alias changeset" do test "change_platform_alias/1 returns a platform_alias changeset" do
platform_alias = platform_alias_fixture() platform = platform_fixture()
platform_alias = platform_alias_fixture(%{platform_id: platform.id})
assert %Ecto.Changeset{} = Platforms.change_platform_alias(platform_alias) assert %Ecto.Changeset{} = Platforms.change_platform_alias(platform_alias)
end end
end end

@ -188,62 +188,4 @@ defmodule Bright.StreamsTest do
# assert_received {:progress, %{stage: :generating_thumbnail, done: 1, total: 1}} # assert_received {:progress, %{stage: :generating_thumbnail, done: 1, total: 1}}
end end
end end
describe "uploads" do
alias Bright.Streams.Upload
import Bright.StreamsFixtures
@invalid_attrs %{size: nil, filename: nil, content_type: nil}
test "list_uploads/0 returns all uploads" do
upload = upload_fixture()
assert Streams.list_uploads() == [upload]
end
test "get_upload!/1 returns the upload with given id" do
upload = upload_fixture()
assert Streams.get_upload!(upload.id) == upload
end
test "create_upload/1 with valid data creates a upload" do
valid_attrs = %{size: 42, filename: "some filename", content_type: "some content_type"}
assert {:ok, %Upload{} = upload} = Streams.create_upload(valid_attrs)
assert upload.size == 42
assert upload.filename == "some filename"
assert upload.content_type == "some content_type"
end
test "create_upload/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Streams.create_upload(@invalid_attrs)
end
test "update_upload/2 with valid data updates the upload" do
upload = upload_fixture()
update_attrs = %{size: 43, filename: "some updated filename", content_type: "some updated content_type"}
assert {:ok, %Upload{} = upload} = Streams.update_upload(upload, update_attrs)
assert upload.size == 43
assert upload.filename == "some updated filename"
assert upload.content_type == "some updated content_type"
end
test "update_upload/2 with invalid data returns error changeset" do
upload = upload_fixture()
assert {:error, %Ecto.Changeset{}} = Streams.update_upload(upload, @invalid_attrs)
assert upload == Streams.get_upload!(upload.id)
end
test "delete_upload/1 deletes the upload" do
upload = upload_fixture()
assert {:ok, %Upload{}} = Streams.delete_upload(upload)
assert_raise Ecto.NoResultsError, fn -> Streams.get_upload!(upload.id) end
end
test "change_upload/1 returns a upload changeset" do
upload = upload_fixture()
assert %Ecto.Changeset{} = Streams.change_upload(upload)
end
end
end end

@ -8,6 +8,8 @@ defmodule BrightWeb.PlatformAliasControllerTest do
@invalid_attrs %{url: nil} @invalid_attrs %{url: nil}
describe "index" do describe "index" do
setup [:create_admin]
test "lists all platform_aliases", %{conn: conn} do test "lists all platform_aliases", %{conn: conn} do
conn = get(conn, ~p"/platform_aliases") conn = get(conn, ~p"/platform_aliases")
assert html_response(conn, 200) =~ "Listing Platform aliases" assert html_response(conn, 200) =~ "Listing Platform aliases"
@ -15,6 +17,8 @@ defmodule BrightWeb.PlatformAliasControllerTest do
end end
describe "new platform_alias" do describe "new platform_alias" do
setup [:create_admin]
test "renders form", %{conn: conn} do test "renders form", %{conn: conn} do
conn = get(conn, ~p"/platform_aliases/new") conn = get(conn, ~p"/platform_aliases/new")
assert html_response(conn, 200) =~ "New Platform alias" assert html_response(conn, 200) =~ "New Platform alias"
@ -22,9 +26,20 @@ defmodule BrightWeb.PlatformAliasControllerTest do
end end
describe "create platform_alias" do describe "create platform_alias" do
test "redirects to show when data is valid", %{conn: conn} do setup [:create_admin]
conn = post(conn, ~p"/platform_aliases", platform_alias: @create_attrs)
test "redirects to show when data is valid", %{conn: conn} do
platform = platform_fixture()
create_attrs = %{
url: "https://example.com",
platform_id: platform.id
}
conn =
post(conn, ~p"/platform_aliases", platform_alias: create_attrs)
# IO.puts(conn.resp_body)
assert %{id: id} = redirected_params(conn) assert %{id: id} = redirected_params(conn)
assert redirected_to(conn) == ~p"/platform_aliases/#{id}" assert redirected_to(conn) == ~p"/platform_aliases/#{id}"
@ -39,7 +54,7 @@ defmodule BrightWeb.PlatformAliasControllerTest do
end end
describe "edit platform_alias" do describe "edit platform_alias" do
setup [:create_platform_alias] setup [:create_platform_alias, :create_admin]
test "renders form for editing chosen platform_alias", %{ test "renders form for editing chosen platform_alias", %{
conn: conn, conn: conn,
@ -51,7 +66,7 @@ defmodule BrightWeb.PlatformAliasControllerTest do
end end
describe "update platform_alias" do describe "update platform_alias" do
setup [:create_platform_alias] setup [:create_platform_alias, :create_admin]
test "redirects when data is valid", %{conn: conn, platform_alias: platform_alias} do test "redirects when data is valid", %{conn: conn, platform_alias: platform_alias} do
conn = put(conn, ~p"/platform_aliases/#{platform_alias}", platform_alias: @update_attrs) conn = put(conn, ~p"/platform_aliases/#{platform_alias}", platform_alias: @update_attrs)
@ -68,7 +83,7 @@ defmodule BrightWeb.PlatformAliasControllerTest do
end end
describe "delete platform_alias" do describe "delete platform_alias" do
setup [:create_platform_alias] setup [:create_platform_alias, :create_admin]
test "deletes chosen platform_alias", %{conn: conn, platform_alias: platform_alias} do test "deletes chosen platform_alias", %{conn: conn, platform_alias: platform_alias} do
conn = delete(conn, ~p"/platform_aliases/#{platform_alias}") conn = delete(conn, ~p"/platform_aliases/#{platform_alias}")
@ -81,7 +96,8 @@ defmodule BrightWeb.PlatformAliasControllerTest do
end end
defp create_platform_alias(_) do defp create_platform_alias(_) do
platform_alias = platform_alias_fixture() platform = platform_fixture()
platform_alias = platform_alias_fixture(%{platform_id: platform.id})
%{platform_alias: platform_alias} %{platform_alias: platform_alias}
end end
end end

@ -3,9 +3,14 @@ defmodule BrightWeb.UploadControllerTest do
import Bright.StreamsFixtures import Bright.StreamsFixtures
@create_attrs %{size: 42, filename: "some filename", content_type: "some content_type"} @create_attrs %{
@update_attrs %{size: 43, filename: "some updated filename", content_type: "some updated content_type"} size: 42,
@invalid_attrs %{size: nil, filename: nil, content_type: nil} filename: "some filename",
content_type: "some content_type",
user_id: 1
}
@invalid_attrs %{size: nil, filename: nil, content_type: nil, user_id: nil}
describe "index" do describe "index" do
test "lists all uploads", %{conn: conn} do test "lists all uploads", %{conn: conn} do
@ -14,56 +19,6 @@ defmodule BrightWeb.UploadControllerTest do
end end
end end
describe "new upload" do
test "renders form", %{conn: conn} do
conn = get(conn, ~p"/uploads/new")
assert html_response(conn, 200) =~ "New Upload"
end
end
describe "create upload" do
test "redirects to show when data is valid", %{conn: conn} do
conn = post(conn, ~p"/uploads", upload: @create_attrs)
assert %{id: id} = redirected_params(conn)
assert redirected_to(conn) == ~p"/uploads/#{id}"
conn = get(conn, ~p"/uploads/#{id}")
assert html_response(conn, 200) =~ "Upload #{id}"
end
test "renders errors when data is invalid", %{conn: conn} do
conn = post(conn, ~p"/uploads", upload: @invalid_attrs)
assert html_response(conn, 200) =~ "New Upload"
end
end
describe "edit upload" do
setup [:create_upload]
test "renders form for editing chosen upload", %{conn: conn, upload: upload} do
conn = get(conn, ~p"/uploads/#{upload}/edit")
assert html_response(conn, 200) =~ "Edit Upload"
end
end
describe "update upload" do
setup [:create_upload]
test "redirects when data is valid", %{conn: conn, upload: upload} do
conn = put(conn, ~p"/uploads/#{upload}", upload: @update_attrs)
assert redirected_to(conn) == ~p"/uploads/#{upload}"
conn = get(conn, ~p"/uploads/#{upload}")
assert html_response(conn, 200) =~ "some updated filename"
end
test "renders errors when data is invalid", %{conn: conn, upload: upload} do
conn = put(conn, ~p"/uploads/#{upload}", upload: @invalid_attrs)
assert html_response(conn, 200) =~ "Edit Upload"
end
end
describe "delete upload" do describe "delete upload" do
setup [:create_upload] setup [:create_upload]

@ -1,113 +1,117 @@
defmodule BrightWeb.UploadLiveTest do # defmodule BrightWeb.UploadLiveTest do
use BrightWeb.ConnCase # use BrightWeb.ConnCase
import Phoenix.LiveViewTest # import Phoenix.LiveViewTest
import Bright.StreamsFixtures # import Bright.StreamsFixtures
@create_attrs %{size: 42, filename: "some filename", content_type: "some content_type"} # @create_attrs %{size: 42, filename: "some filename", content_type: "some content_type"}
@update_attrs %{size: 43, filename: "some updated filename", content_type: "some updated content_type"} # @update_attrs %{
@invalid_attrs %{size: nil, filename: nil, content_type: nil} # size: 43,
# filename: "some updated filename",
# content_type: "some updated content_type"
# }
# @invalid_attrs %{size: nil, filename: nil, content_type: nil}
defp create_upload(_) do # defp create_upload(_) do
upload = upload_fixture() # upload = upload_fixture()
%{upload: upload} # %{upload: upload}
end # end
describe "Index" do # describe "Index" do
setup [:create_upload] # setup [:create_upload, :create_user]
test "lists all uploads", %{conn: conn, upload: upload} do # test "lists all uploads", %{conn: conn, upload: upload} do
{:ok, _index_live, html} = live(conn, ~p"/uploads") # {:ok, _index_live, html} = live(conn, ~p"/uploads")
assert html =~ "Listing Uploads" # assert html =~ "Listing Uploads"
assert html =~ upload.filename # assert html =~ upload.filename
end # end
test "saves new upload", %{conn: conn} do # test "saves new upload", %{conn: conn} do
{:ok, index_live, _html} = live(conn, ~p"/uploads") # {:ok, index_live, _html} = live(conn, ~p"/uploads")
assert index_live |> element("a", "New Upload") |> render_click() =~ # assert index_live |> element("a", "New Upload") |> render_click() =~
"New Upload" # "New Upload"
assert_patch(index_live, ~p"/uploads/new") # assert_patch(index_live, ~p"/uploads/new")
assert index_live # assert index_live
|> form("#upload-form", upload: @invalid_attrs) # |> form("#upload-form", upload: @invalid_attrs)
|> render_change() =~ "can&#39;t be blank" # |> render_change() =~ "can&#39;t be blank"
assert index_live # assert index_live
|> form("#upload-form", upload: @create_attrs) # |> form("#upload-form", upload: @create_attrs)
|> render_submit() # |> render_submit()
assert_patch(index_live, ~p"/uploads") # assert_patch(index_live, ~p"/uploads")
html = render(index_live) # html = render(index_live)
assert html =~ "Upload created successfully" # assert html =~ "Upload created successfully"
assert html =~ "some filename" # assert html =~ "some filename"
end # end
test "updates upload in listing", %{conn: conn, upload: upload} do # test "updates upload in listing", %{conn: conn, upload: upload} do
{:ok, index_live, _html} = live(conn, ~p"/uploads") # {:ok, index_live, _html} = live(conn, ~p"/uploads")
assert index_live |> element("#uploads-#{upload.id} a", "Edit") |> render_click() =~ # assert index_live |> element("#uploads-#{upload.id} a", "Edit") |> render_click() =~
"Edit Upload" # "Edit Upload"
assert_patch(index_live, ~p"/uploads/#{upload}/edit") # assert_patch(index_live, ~p"/uploads/#{upload}/edit")
assert index_live # assert index_live
|> form("#upload-form", upload: @invalid_attrs) # |> form("#upload-form", upload: @invalid_attrs)
|> render_change() =~ "can&#39;t be blank" # |> render_change() =~ "can&#39;t be blank"
assert index_live # assert index_live
|> form("#upload-form", upload: @update_attrs) # |> form("#upload-form", upload: @update_attrs)
|> render_submit() # |> render_submit()
assert_patch(index_live, ~p"/uploads") # assert_patch(index_live, ~p"/uploads")
html = render(index_live) # html = render(index_live)
assert html =~ "Upload updated successfully" # assert html =~ "Upload updated successfully"
assert html =~ "some updated filename" # assert html =~ "some updated filename"
end # end
test "deletes upload in listing", %{conn: conn, upload: upload} do # test "deletes upload in listing", %{conn: conn, upload: upload} do
{:ok, index_live, _html} = live(conn, ~p"/uploads") # {:ok, index_live, _html} = live(conn, ~p"/uploads")
assert index_live |> element("#uploads-#{upload.id} a", "Delete") |> render_click() # assert index_live |> element("#uploads-#{upload.id} a", "Delete") |> render_click()
refute has_element?(index_live, "#uploads-#{upload.id}") # refute has_element?(index_live, "#uploads-#{upload.id}")
end # end
end # end
describe "Show" do # describe "Show" do
setup [:create_upload] # setup [:create_upload, :create_user]
test "displays upload", %{conn: conn, upload: upload} do # test "displays upload", %{conn: conn, upload: upload} do
{:ok, _show_live, html} = live(conn, ~p"/uploads/#{upload}") # {:ok, _show_live, html} = live(conn, ~p"/uploads/#{upload}")
assert html =~ "Show Upload" # assert html =~ "Show Upload"
assert html =~ upload.filename # assert html =~ upload.filename
end # end
test "updates upload within modal", %{conn: conn, upload: upload} do # test "updates upload within modal", %{conn: conn, upload: upload} do
{:ok, show_live, _html} = live(conn, ~p"/uploads/#{upload}") # {:ok, show_live, _html} = live(conn, ~p"/uploads/#{upload}")
assert show_live |> element("a", "Edit") |> render_click() =~ # assert show_live |> element("a", "Edit") |> render_click() =~
"Edit Upload" # "Edit Upload"
assert_patch(show_live, ~p"/uploads/#{upload}/show/edit") # assert_patch(show_live, ~p"/uploads/#{upload}/show/edit")
assert show_live # assert show_live
|> form("#upload-form", upload: @invalid_attrs) # |> form("#upload-form", upload: @invalid_attrs)
|> render_change() =~ "can&#39;t be blank" # |> render_change() =~ "can&#39;t be blank"
assert show_live # assert show_live
|> form("#upload-form", upload: @update_attrs) # |> form("#upload-form", upload: @update_attrs)
|> render_submit() # |> render_submit()
assert_patch(show_live, ~p"/uploads/#{upload}") # assert_patch(show_live, ~p"/uploads/#{upload}")
html = render(show_live) # html = render(show_live)
assert html =~ "Upload updated successfully" # assert html =~ "Upload updated successfully"
assert html =~ "some updated filename" # assert html =~ "some updated filename"
end # end
end # end
end # end

@ -31,8 +31,8 @@
"tunnel": "dotenvx run -f ./.env.development -- chisel client bright.fp.sbtp.xyz:9090 R:4000", "tunnel": "dotenvx run -f ./.env.development -- chisel client bright.fp.sbtp.xyz:9090 R:4000",
"backup": "docker exec -t postgres_db pg_dumpall -c -U postgres > ./backups/dev_`date +%Y-%m-%d_%H_%M_%S`.sql", "backup": "docker exec -t postgres_db pg_dumpall -c -U postgres > ./backups/dev_`date +%Y-%m-%d_%H_%M_%S`.sql",
"act": "dotenvx run -f ./.env.testing -- act -W ./.gitea/workflows --secret-file .env.development", "act": "dotenvx run -f ./.env.testing -- act -W ./.gitea/workflows --secret-file .env.development",
"act:builder": "dotenvx run -f ./.env.testing -- act -W ./.gitea/workflows/builder.yaml --secret-file .env.testing --var-file .env.testing --insecure-secrets", "act:builder": "dotenvx run -f ./.env.testing -- act --env-file .env.testing -W ./.gitea/workflows/builder.yaml --secret-file .env.testing --var-file .env.testing --insecure-secrets",
"act:tests": "dotenvx run -f ./.env.testing -- act -W ./.gitea/workflows/tests.yaml --secret-file .env.testing --var-file .env.testing --insecure-secrets", "act:tests": "dotenvx run -f ./.env.testing -- act --env-file .env.testing -W ./.gitea/workflows/tests.yaml --secret-file .env.testing --var-file .env.testing --insecure-secrets",
"bright:compile:watch": "cd ./apps/bright && find . -type f -name \"*.ex\" -o -name \"*.exs\" | entr -r mix compile --warnings-as-errors", "bright:compile:watch": "cd ./apps/bright && find . -type f -name \"*.ex\" -o -name \"*.exs\" | entr -r mix compile --warnings-as-errors",
"bright:compile:watch2": "cd ./apps/bright && pnpx chokidar-cli \"**/*\" -i \"deps/**\" -i \"_build/**\" -c \"mix compile --warnings-as-errors\"", "bright:compile:watch2": "cd ./apps/bright && pnpx chokidar-cli \"**/*\" -i \"deps/**\" -i \"_build/**\" -c \"mix compile --warnings-as-errors\"",
"bright:dev": "cd ./apps/bright && dotenvx run -f ../../.env.development -e MIX_ENV=dev -- mix phx.server", "bright:dev": "cd ./apps/bright && dotenvx run -f ../../.env.development -e MIX_ENV=dev -- mix phx.server",

@ -3,7 +3,13 @@
// @see https://grep.app/search?q=for+tuple+in+regexall%28 // @see https://grep.app/search?q=for+tuple+in+regexall%28
// @see https://github.com/lrmendess/open-source-datalake/blob/main/minio.tf // @see https://github.com/lrmendess/open-source-datalake/blob/main/minio.tf
locals { locals {
envs = { for tuple in regexall("(.*)=(.*)", file("../.env")) : tuple[0] => sensitive(tuple[1]) } envs = { for tuple in regexall("(.*)=(.*)", file("../.env.production")) : tuple[0] => sensitive(tuple[1]) }
}
variable "ipfs_hosts" {
description = "List of IP addresses for IPFS nodes"
type = list(string)
default = ["161.97.186.203", "38.242.193.246"]
} }
@ -216,6 +222,20 @@ resource "vultr_instance" "tracker" {
reserved_ip_id = vultr_reserved_ip.futureporn_tracker_ip.id reserved_ip_id = vultr_reserved_ip.futureporn_tracker_ip.id
} }
resource "ansible_host" "ipfs_vps" {
for_each = { for idx, host in var.ipfs_hosts : idx => host }
name = each.value
groups = ["ipfs"]
variables = {
ansible_user = "root"
ansible_host = each.value
}
}
resource "ansible_host" "capture_vps" { resource "ansible_host" "capture_vps" {
for_each = { for idx, host in vultr_instance.capture_vps : idx => host } for_each = { for idx, host in vultr_instance.capture_vps : idx => host }
name = each.value.hostname name = each.value.hostname