add ipfs playbook
This commit is contained in:
parent
055a3ab66f
commit
b61c55d1e9
.gitea/workflows
MANTRAS.mdansible
apps/bright
devbox.jsonterraform
@ -21,7 +21,7 @@ jobs:
|
||||
# our workaround is to reference service containers two different ways.
|
||||
# - when running nektos/act on localhost, we reference the service as localhost:<port_number>.
|
||||
# - when running gitea act_runner on gitea, we reference the service's dns name.
|
||||
|
||||
# these references are defined in environment variables
|
||||
test_phoenix:
|
||||
env:
|
||||
SECRET_KEY_BASE: ${{ secrets.SECRET_KEY_BASE }}
|
||||
|
@ -50,4 +50,9 @@ In other words, pick something for a name and roll with the punches.
|
||||
> "But it's also about looking at things anew and what they could be instead of what they are"
|
||||
> -- Rodney Mullen
|
||||
|
||||
### Success requires continuing even when it's painful
|
||||
### Success requires continuing even when it's painful
|
||||
|
||||
### Find what you love and let it kill you
|
||||
|
||||
> Find what you love and let it kill you. Let it drain you of your all. Let it cling onto your back and weigh you down into eventual nothingness. Let it kill you and let it devour your remains. For all things will kill you, both slowly and fastly, but it's much better to be killed by a lover.
|
||||
> -- Charles Bukowski
|
@ -3,5 +3,6 @@ collections:
|
||||
- name: cloud.terraform
|
||||
- name: community.docker
|
||||
- name: community.general
|
||||
- name: gluster.gluster
|
||||
roles:
|
||||
- name: nvjacobo.caddy
|
||||
|
@ -54,7 +54,7 @@
|
||||
|
||||
- name: Download and extract IPFS Kubo
|
||||
ansible.builtin.unarchive:
|
||||
src: https://dist.ipfs.tech/kubo/v0.33.2/kubo_v0.33.2_linux-amd64.tar.gz
|
||||
src: "https://dist.ipfs.tech/kubo/{{ kubo_version }}/kubo_{{ kubo_version }}_linux-amd64.tar.gz"
|
||||
dest: ~/
|
||||
remote_src: true
|
||||
|
||||
@ -63,12 +63,15 @@
|
||||
args:
|
||||
creates: /usr/local/bin/ipfs
|
||||
|
||||
|
||||
|
||||
|
||||
- name: Allow UFW ports
|
||||
community.general.ufw:
|
||||
rule: allow
|
||||
port: "{{ item }}"
|
||||
proto: tcp
|
||||
loop:
|
||||
- 8081 # npx http-server -p 8081
|
||||
- 8081 # npx http-server -p 8081
|
||||
- 8080 # ipfs api
|
||||
- 4001 # ipfs swarm
|
||||
|
3
ansible/roles/ipfs/defaults/main.yml
Normal file
3
ansible/roles/ipfs/defaults/main.yml
Normal file
@ -0,0 +1,3 @@
|
||||
---
|
||||
ipfs_kubo_version: v0.34.1
|
||||
ipfs_cluster_follow_version: v1.1.2
|
16
ansible/roles/ipfs/handlers/main.yml
Normal file
16
ansible/roles/ipfs/handlers/main.yml
Normal file
@ -0,0 +1,16 @@
|
||||
---
|
||||
|
||||
- name: Restart ipfs
|
||||
ansible.builtin.systemd_service:
|
||||
name: ipfs
|
||||
state: restarted
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
|
||||
- name: Restart ipfs-cluster-follow
|
||||
ansible.builtin.systemd_service:
|
||||
name: ipfs-cluster-follow
|
||||
state: restarted
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
|
175
ansible/roles/ipfs/tasks/main.yml
Normal file
175
ansible/roles/ipfs/tasks/main.yml
Normal file
@ -0,0 +1,175 @@
|
||||
---
|
||||
- name: Configure firewall (UDP & TCP)
|
||||
community.general.ufw:
|
||||
rule: allow
|
||||
port: "{{ item }}"
|
||||
proto: any
|
||||
loop:
|
||||
- 4001
|
||||
- 24007
|
||||
- 24008
|
||||
|
||||
- name: Configure firewall (TCP)
|
||||
community.general.ufw:
|
||||
rule: allow
|
||||
port: "{{ item }}"
|
||||
proto: tcp
|
||||
loop:
|
||||
- 29152:65535
|
||||
|
||||
|
||||
- name: Install glusterfs
|
||||
ansible.builtin.apt:
|
||||
name:
|
||||
- glusterfs-server
|
||||
state: present
|
||||
|
||||
- name: Start & enable glusterd service
|
||||
ansible.builtin.systemd_service:
|
||||
name: glusterd.service
|
||||
state: started
|
||||
enabled: true
|
||||
|
||||
# - name: Create gluster volume
|
||||
# gluster.gluster.gluster_volume:
|
||||
# state: present
|
||||
# name: ipfs-datastore
|
||||
# bricks: /bricks/brick1/g1
|
||||
# rebalance: true
|
||||
# cluster: "{{ groups['ipfs'] }}"
|
||||
# run_once: true
|
||||
|
||||
|
||||
# - name: Start gluster volume
|
||||
# gluster.gluster.gluster_volume:
|
||||
# state: started
|
||||
# name: ipfs-datastore
|
||||
|
||||
# - name: Limit volume usage
|
||||
# gluster.gluster.gluster_volume:
|
||||
# state: present
|
||||
# name: ipfs-datastore
|
||||
# directory: /
|
||||
# quota: 6.0TB
|
||||
|
||||
## Example: mount -t glusterfs fp-bright-0:/gv0 /mountme
|
||||
# - name: Mount gluster volume
|
||||
# ansible.posix.mount:
|
||||
# src: "{{ ansible_hostname }}:/g1"
|
||||
# path: /mnt/g1
|
||||
# fstype: glusterfs
|
||||
# state: mounted
|
||||
|
||||
- name: Create ipfs group
|
||||
ansible.builtin.group:
|
||||
name: ipfs
|
||||
state: present
|
||||
|
||||
- name: Create ipfs user
|
||||
ansible.builtin.user:
|
||||
name: ipfs
|
||||
group: ipfs
|
||||
create_home: true
|
||||
home: /home/ipfs
|
||||
system: true
|
||||
|
||||
- name: Download and extract IPFS Kubo
|
||||
ansible.builtin.unarchive:
|
||||
src: "https://dist.ipfs.tech/kubo/{{ ipfs_kubo_version }}/kubo_{{ ipfs_kubo_version }}_linux-amd64.tar.gz"
|
||||
dest: /tmp
|
||||
remote_src: true
|
||||
notify:
|
||||
- Restart ipfs
|
||||
|
||||
- name: Install IPFS Kubo
|
||||
ansible.builtin.copy:
|
||||
src: /tmp/kubo/ipfs
|
||||
dest: /usr/local/bin/ipfs
|
||||
mode: "0755"
|
||||
remote_src: true
|
||||
notify:
|
||||
- Restart ipfs
|
||||
|
||||
- name: Download and extract ipfs-cluster-follow
|
||||
ansible.builtin.unarchive:
|
||||
src: "https://dist.ipfs.tech/ipfs-cluster-follow/{{ ipfs_cluster_follow_version }}/ipfs-cluster-follow_{{ ipfs_cluster_follow_version }}_linux-amd64.tar.gz"
|
||||
dest: /tmp
|
||||
remote_src: true
|
||||
notify:
|
||||
- Restart ipfs-cluster-follow
|
||||
|
||||
- name: Install ipfs-cluster-follow
|
||||
ansible.builtin.copy:
|
||||
src: /tmp/ipfs-cluster-follow/ipfs-cluster-follow
|
||||
dest: /usr/local/bin/ipfs-cluster-follow
|
||||
mode: "0755"
|
||||
remote_src: true
|
||||
notify:
|
||||
- Restart ipfs-cluster-follow
|
||||
|
||||
- name: Generate random peername
|
||||
ansible.builtin.set_fact:
|
||||
cluster_peername: "{{ lookup('password', '/dev/null length=8 chars=hexdigits') }}"
|
||||
|
||||
- name: Create ipfs-cluster-follow service
|
||||
ansible.builtin.template:
|
||||
src: ipfs-cluster-follow.service.j2
|
||||
dest: /etc/systemd/system/ipfs-cluster-follow.service
|
||||
mode: "0644"
|
||||
notify:
|
||||
- Restart ipfs-cluster-follow
|
||||
|
||||
- name: Create ipfs service
|
||||
ansible.builtin.template:
|
||||
src: ipfs.service.j2
|
||||
dest: /etc/systemd/system/ipfs.service
|
||||
mode: "0644"
|
||||
notify:
|
||||
- Restart ipfs
|
||||
|
||||
- name: Check current value of Routing.AcceleratedDHTClient
|
||||
ansible.builtin.command: ipfs config Routing.AcceleratedDHTClient
|
||||
register: ipfs_dht_config
|
||||
changed_when: false # this never changes things, it only gathers data
|
||||
|
||||
- name: Enable IPFS Routing.AcceleratedDHTClient
|
||||
ansible.builtin.command: ipfs config --json Routing.AcceleratedDHTClient true
|
||||
notify:
|
||||
- Restart ipfs
|
||||
when: ipfs_dht_config.stdout != "true"
|
||||
changed_when: true
|
||||
|
||||
- name: Create IPFS directory
|
||||
ansible.builtin.file:
|
||||
dest: /home/ipfs/.ipfs
|
||||
owner: ipfs
|
||||
group: ipfs
|
||||
state: directory
|
||||
mode: "0755"
|
||||
|
||||
- name: Check if IPFS config exists
|
||||
ansible.builtin.stat:
|
||||
path: /home/ipfs/.ipfs/config
|
||||
register: ipfs_config
|
||||
|
||||
- name: Initialize IPFS
|
||||
ansible.builtin.command: /usr/local/bin/ipfs init
|
||||
become: true
|
||||
become_user: ipfs
|
||||
args:
|
||||
chdir: /home/ipfs
|
||||
when: not ipfs_config.stat.exists
|
||||
changed_when: true # Explicitly mark this as a change when it runs
|
||||
notify:
|
||||
- Restart ipfs
|
||||
|
||||
## @todo enable once we get gluster working
|
||||
# - name: Symlink IPFS blocks directory to gluster brick
|
||||
# ansible.builtin.file:
|
||||
# src: /home/ipfs/.ipfs/blocks
|
||||
# dest: /mnt/g1/.ipfs/blocks
|
||||
# owner: ipfs
|
||||
# group: ipfs
|
||||
# state: link
|
||||
# notify:
|
||||
# - Restart ipfs
|
14
ansible/roles/ipfs/templates/ipfs-cluster-follow.service.j2
Normal file
14
ansible/roles/ipfs/templates/ipfs-cluster-follow.service.j2
Normal file
@ -0,0 +1,14 @@
|
||||
[Unit]
|
||||
Description=ipfs-cluster-follow
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
Environment=CLUSTER_PEERNAME="{{cluster_peername}}"
|
||||
ExecStart=/usr/local/bin/ipfs-cluster-follow futureporn.net run --init https://futureporn.net/api/service.json
|
||||
User=ipfs
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
14
ansible/roles/ipfs/templates/ipfs.service.j2
Normal file
14
ansible/roles/ipfs/templates/ipfs.service.j2
Normal file
@ -0,0 +1,14 @@
|
||||
[Unit]
|
||||
Description=IPFS Daemon
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
Environment=IPFS_PATH=/home/ipfs/.ipfs
|
||||
ExecStart=/usr/local/bin/ipfs daemon
|
||||
User=ipfs
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
@ -16,6 +16,12 @@
|
||||
roles:
|
||||
- common
|
||||
|
||||
- name: Set up IPFS
|
||||
hosts: ipfs
|
||||
gather_facts: true
|
||||
roles:
|
||||
- ipfs
|
||||
|
||||
- name: Assert Capture dependencies
|
||||
hosts: capture
|
||||
gather_facts: true
|
||||
@ -55,3 +61,4 @@
|
||||
become: false
|
||||
roles:
|
||||
- load_balancer
|
||||
|
||||
|
@ -44,15 +44,16 @@ const UppyHook = {
|
||||
})
|
||||
},
|
||||
createMultipartUpload(file) {
|
||||
// console.log(`createMultipartUpload with file ${JSON.stringify(file)}`)
|
||||
console.log(`createMultipartUpload with file ${JSON.stringify(file)}`)
|
||||
let { name, type } = file.meta
|
||||
let payload = { name, type }
|
||||
let size = file.size
|
||||
let payload = { name, type, size }
|
||||
return new Promise((resolve) => {
|
||||
live.pushEvent('initiate_multipart_upload', payload, (reply, ref) => {
|
||||
// console.log(`payload=${JSON.stringify(payload)}`)
|
||||
// console.log(`initiate_multipart_upload pushEvent response callback.`)
|
||||
// console.log(`got reply=${JSON.stringify(reply)}`)
|
||||
// console.log(`got ref=${JSON.stringify(ref)}`)
|
||||
console.log(`payload=${JSON.stringify(payload)}`)
|
||||
console.log(`initiate_multipart_upload pushEvent response callback.`)
|
||||
console.log(`got reply=${JSON.stringify(reply)}`)
|
||||
console.log(`got ref=${JSON.stringify(ref)}`)
|
||||
|
||||
let output = {
|
||||
uploadId: reply?.upload_id,
|
||||
|
@ -49,11 +49,11 @@ config :ex_aws,
|
||||
]
|
||||
|
||||
if config_env() == :prod do
|
||||
db_host = System.get_env("DB_HOST") || raise "environment variable DB_HOST is missing."
|
||||
db_user = System.get_env("DB_USER") || raise "environment variable DB_USER is missing."
|
||||
db_pass = System.get_env("DB_PASS") || raise "environment variable DB_PASS is missing."
|
||||
db_port = System.get_env("DB_PORT") || raise "environment variable DB_PORT is missing."
|
||||
db_name = System.get_env("DB_NAME") || raise "environment variable DB_NAME is missing."
|
||||
db_host = System.fetch_env!("DB_HOST")
|
||||
db_user = System.fetch_env!("DB_USER")
|
||||
db_pass = System.fetch_env!("DB_PASS")
|
||||
db_port = System.fetch_env!("DB_PORT")
|
||||
db_name = System.fetch_env!("DB_NAME")
|
||||
|
||||
maybe_ipv6 = if System.get_env("ECTO_IPV6") in ~w(true 1), do: [:inet6], else: []
|
||||
|
||||
|
@ -9,20 +9,24 @@ config :bcrypt_elixir, :log_rounds, 1
|
||||
# to provide built-in test partitioning in CI environment.
|
||||
# Run `mix help test` for more information.
|
||||
config :bright, Bright.Repo,
|
||||
database: System.get_env("DB_NAME", "bright"),
|
||||
# database: "bright_test#{System.get_env("MIX_TEST_PARTITION")}",
|
||||
hostname: System.get_env("DB_HOST", "localhost"),
|
||||
username: System.get_env("DB_USER", "postgres"),
|
||||
password: System.get_env("DB_PASS", "password"),
|
||||
port: System.get_env("DB_PORT", "5433"),
|
||||
database: System.fetch_env!("DB_NAME"),
|
||||
hostname: System.fetch_env!("DB_HOST"),
|
||||
username: System.fetch_env!("DB_USER"),
|
||||
password: System.fetch_env!("DB_PASS"),
|
||||
port: System.fetch_env!("DB_PORT"),
|
||||
pool: Ecto.Adapters.SQL.Sandbox,
|
||||
pool_size: System.schedulers_online() * 4
|
||||
|
||||
IO.puts(
|
||||
"hello this is test.exs and we are checking env vars. db_host=#{System.fetch_env!("DB_HOST")} db_user=#{System.fetch_env!("DB_USER")} db_pass=#{System.fetch_env!("DB_PASS")} db_port=#{System.fetch_env!("DB_PORT")} db_name=#{System.fetch_env!("DB_NAME")}"
|
||||
)
|
||||
|
||||
# We don't run a server during test. If one is required,
|
||||
# you can enable the server option below.
|
||||
config :bright, BrightWeb.Endpoint,
|
||||
http: [ip: {127, 0, 0, 1}, port: 4002],
|
||||
secret_key_base: "#{System.get_env("SECRET_KEY_BASE")}",
|
||||
secret_key_base: "#{System.fetch_env!("SECRET_KEY_BASE")}",
|
||||
server: false
|
||||
|
||||
# Prevent Oban from running jobs and plugins during test runs
|
||||
|
@ -616,7 +616,9 @@ defmodule Bright.Streams do
|
||||
|
||||
"""
|
||||
def list_uploads do
|
||||
Repo.all(Upload)
|
||||
Upload
|
||||
|> Repo.all()
|
||||
|> Repo.preload([:user])
|
||||
end
|
||||
|
||||
@doc """
|
||||
@ -633,7 +635,11 @@ defmodule Bright.Streams do
|
||||
** (Ecto.NoResultsError)
|
||||
|
||||
"""
|
||||
def get_upload!(id), do: Repo.get!(Upload, id)
|
||||
def get_upload!(id) do
|
||||
Upload
|
||||
|> Repo.get!(id)
|
||||
|> Repo.preload([:user])
|
||||
end
|
||||
|
||||
@doc """
|
||||
Creates a upload.
|
||||
|
@ -6,8 +6,8 @@ defmodule Bright.Streams.Upload do
|
||||
field :size, :integer
|
||||
field :filename, :string
|
||||
field :content_type, :string
|
||||
field :user_id, :id
|
||||
field :vod, :id
|
||||
|
||||
belongs_to :user, Bright.Users.User
|
||||
|
||||
timestamps(type: :utc_datetime)
|
||||
end
|
||||
@ -15,7 +15,8 @@ defmodule Bright.Streams.Upload do
|
||||
@doc false
|
||||
def changeset(upload, attrs) do
|
||||
upload
|
||||
|> cast(attrs, [:filename, :size, :content_type])
|
||||
|> validate_required([:filename, :size, :content_type])
|
||||
|> cast(attrs, [:filename, :size, :content_type, :user_id])
|
||||
|> validate_required([:filename, :size, :content_type, :user_id])
|
||||
|> assoc_constraint(:user)
|
||||
end
|
||||
end
|
||||
|
@ -12,6 +12,8 @@ defmodule Bright.Users.User do
|
||||
field :patron_tier, :integer
|
||||
field :role, :string
|
||||
|
||||
has_many :uploads, Bright.Streams.Upload
|
||||
|
||||
timestamps(type: :utc_datetime)
|
||||
end
|
||||
|
||||
@ -71,3 +73,9 @@ defmodule Bright.Users.User do
|
||||
user
|
||||
end
|
||||
end
|
||||
|
||||
defimpl Phoenix.HTML.Safe, for: Bright.Users.User do
|
||||
def to_iodata(user) do
|
||||
Phoenix.HTML.Safe.to_iodata("User #{user.id}")
|
||||
end
|
||||
end
|
||||
|
@ -1,7 +1,7 @@
|
||||
<.header>
|
||||
Listing Uploads
|
||||
<:actions>
|
||||
<.link href={~p"/uploads/new"}>
|
||||
<.link href={~p"/upload"}>
|
||||
<.button>New Upload</.button>
|
||||
</.link>
|
||||
</:actions>
|
||||
@ -11,11 +11,11 @@
|
||||
<:col :let={upload} label="Filename">{upload.filename}</:col>
|
||||
<:col :let={upload} label="Size">{upload.size}</:col>
|
||||
<:col :let={upload} label="Content type">{upload.content_type}</:col>
|
||||
<:col :let={upload} label="Uploader">{upload.user}</:col>
|
||||
<:action :let={upload}>
|
||||
<div class="sr-only">
|
||||
<.link navigate={~p"/uploads/#{upload}"}>Show</.link>
|
||||
</div>
|
||||
<.link navigate={~p"/uploads/#{upload}/edit"}>Edit</.link>
|
||||
</:action>
|
||||
<:action :let={upload}>
|
||||
<.link href={~p"/uploads/#{upload}"} method="delete" data-confirm="Are you sure?">
|
||||
|
@ -1,17 +1,14 @@
|
||||
<.header>
|
||||
Upload {@upload.id}
|
||||
<:subtitle>This is a upload record from your database.</:subtitle>
|
||||
<:actions>
|
||||
<.link href={~p"/uploads/#{@upload}/edit"}>
|
||||
<.button>Edit upload</.button>
|
||||
</.link>
|
||||
</:actions>
|
||||
<:actions></:actions>
|
||||
</.header>
|
||||
|
||||
<.list>
|
||||
<:item title="Filename">{@upload.filename}</:item>
|
||||
<:item title="Size">{@upload.size}</:item>
|
||||
<:item title="Content type">{@upload.content_type}</:item>
|
||||
<:item title="Uploader">{@upload.user}</:item>
|
||||
</.list>
|
||||
|
||||
<.back navigate={~p"/uploads"}>Back to uploads</.back>
|
||||
|
@ -2,22 +2,12 @@ defmodule BrightWeb.UploadLive.Index do
|
||||
use BrightWeb, :live_view
|
||||
|
||||
alias Bright.Streams
|
||||
alias Bright.Streams.Upload
|
||||
alias Bright.Utils
|
||||
|
||||
require Logger
|
||||
|
||||
@impl true
|
||||
def mount(_params, _session, socket) do
|
||||
# {:ok,
|
||||
# socket
|
||||
# |> assign(:uploaded_files, [])
|
||||
# |> allow_upload(:vods,
|
||||
# accept: ~w(.mp4 .mov .ts .avi .mpeg .ogv .webm .3gp .3g2),
|
||||
# max_entries: 3,
|
||||
# max_file_size: 80 * 1_000_000_000,
|
||||
# external: &presign_upload/2
|
||||
# )}
|
||||
|
||||
# socket = assign(socket, endpoint: System.fetch_env!("UPPY_ENDPOINT"))
|
||||
socket =
|
||||
socket
|
||||
|> assign(:uploaded_files, [])
|
||||
@ -28,135 +18,20 @@ defmodule BrightWeb.UploadLive.Index do
|
||||
@impl true
|
||||
def render(assigns) do
|
||||
~H"""
|
||||
<div>
|
||||
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/uppy/4.12.2/uppy.min.css" integrity="sha512-oPlr9/HXIlp7YoIRNsexheOu2/P2sEVi8EFQEAWUlHHijx0QbQ9qgihNYmIYtdJP3xOIMbZcnSVhrznIh5DKkg==" crossorigin="anonymous" referrerpolicy="no-referrer" />
|
||||
|
||||
|
||||
|
||||
|
||||
<div class="section">
|
||||
<h2 class="title is-2 mb-3">Upload a VOD</h2>
|
||||
<link
|
||||
rel="stylesheet"
|
||||
href="https://cdnjs.cloudflare.com/ajax/libs/uppy/4.12.2/uppy.min.css"
|
||||
integrity="sha512-oPlr9/HXIlp7YoIRNsexheOu2/P2sEVi8EFQEAWUlHHijx0QbQ9qgihNYmIYtdJP3xOIMbZcnSVhrznIh5DKkg=="
|
||||
crossorigin="anonymous"
|
||||
referrerpolicy="no-referrer"
|
||||
/>
|
||||
<div id="uppy-dashboard" phx-hook="UppyHook" phx-update="ignore"></div>
|
||||
|
||||
|
||||
|
||||
</div>
|
||||
"""
|
||||
end
|
||||
|
||||
# @impl true
|
||||
# def handle_event(
|
||||
# "presign",
|
||||
# %{"type" => type, "name" => name},
|
||||
# socket
|
||||
# ) do
|
||||
# {:ok, %{url: url}, socket} = presign_url(name, type, socket)
|
||||
# Logger.debug("✍️✍️✍️ presign called with name=#{name} type=#{type}. url=#{url}")
|
||||
# # socket = assign(socket, signatures: {name, url})
|
||||
# # socket =
|
||||
# # socket
|
||||
# # |> update(:uploaded_files, &(&1 ++ uploaded_files))
|
||||
|
||||
# # |> push_event("process_upload", %{name: name, url: url, type: type})
|
||||
|
||||
# {:reply, %{name: name, type: type, url: url}, socket}
|
||||
# end
|
||||
|
||||
# @impl true
|
||||
# def handle_event("upload_videos", _params, socket) do
|
||||
# uploaded_files =
|
||||
# consume_uploaded_entries(socket, :vods, fn %{path: _path}, entry ->
|
||||
# IO.puts("@todo ⭐⭐⭐ Handling #{entry.client_type} @todo @todo @todo")
|
||||
# IO.puts(inspect(entry))
|
||||
# end)
|
||||
|
||||
# socket =
|
||||
# socket
|
||||
# |> update(:uploaded_files, &(&1 ++ uploaded_files))
|
||||
|
||||
# {:noreply, socket}
|
||||
# end
|
||||
|
||||
# def handle_event("update_preview_srcs", %{"srcs" => srcs}, socket) do
|
||||
# uploaded_files =
|
||||
# socket.assigns.uploaded_files
|
||||
# |> Enum.map(fn entry ->
|
||||
# if Map.has_key?(srcs, entry.ref) do
|
||||
# entry
|
||||
# |> Map.put(:preview_src, Map.fetch!(srcs, entry.ref))
|
||||
# else
|
||||
# entry
|
||||
# end
|
||||
# end)
|
||||
|
||||
# socket =
|
||||
# socket
|
||||
# |> assign(:uploaded_files, uploaded_files)
|
||||
|
||||
# {:noreply, socket}
|
||||
# end
|
||||
|
||||
# def handle_event("validate_upload", _params, socket) do
|
||||
# num_remaining_uploads =
|
||||
# length(socket.assigns.uploaded_files) - socket.assigns.uploads.vods.max_entries
|
||||
|
||||
# valid =
|
||||
# Enum.uniq_by(socket.assigns.uploads.vods.entries, & &1.client_name)
|
||||
# |> Enum.take(num_remaining_uploads)
|
||||
|
||||
# socket =
|
||||
# Enum.reduce(socket.assigns.uploads.vods.entries, socket, fn entry, socket ->
|
||||
# if entry in valid do
|
||||
# socket
|
||||
# else
|
||||
# socket
|
||||
# |> cancel_upload(:vods, entry.ref)
|
||||
# |> put_flash(
|
||||
# :error,
|
||||
# "Uploaded files should be unique and cannot exceed #{socket.assigns.uploads.vods.max_entries} total files."
|
||||
# )
|
||||
# end
|
||||
# end)
|
||||
|
||||
# {:noreply, socket}
|
||||
# end
|
||||
|
||||
# def handle_event("cancel_upload", %{"ref" => ref}, socket) do
|
||||
# {:noreply, cancel_upload(socket, :vods, ref)}
|
||||
# end
|
||||
|
||||
# def handle_event("cancel_upload", _params, socket) do
|
||||
# socket =
|
||||
# Enum.reduce(socket.assigns.uploads.vods.entries, socket, fn entry, socket ->
|
||||
# cancel_upload(socket, :vods, entry.ref)
|
||||
# end)
|
||||
|
||||
# {:noreply, socket}
|
||||
# end
|
||||
|
||||
# def presign_upload(name, type, socket) do
|
||||
# Logger.debug("presign_upload was called with name=#{inspect(name)} and type=#{inspect(type)}")
|
||||
# config = ExAws.Config.new(:s3)
|
||||
# bucket = System.fetch_env!("AWS_BUCKET")
|
||||
# key = "usc/#{Utils.random_string()}/#{name}"
|
||||
|
||||
# {:ok, url} =
|
||||
# ExAws.S3.presigned_url(config, :put, bucket, key,
|
||||
# expires_in: 3600,
|
||||
# query_params: [{"Content-Type", type}]
|
||||
# )
|
||||
|
||||
# {:ok, %{uploader: "S3", key: key, url: url}, socket}
|
||||
# end
|
||||
|
||||
# # @doc @see https://hexdocs.pm/ex_aws_s3/ExAws.S3.html#presigned_post/4
|
||||
# def presigned_post(name, socket) do
|
||||
# Logger.debug("presigned_post with name=#{inspect(name)}")
|
||||
# config = ExAws.Config.new(:s3)
|
||||
# bucket = System.fetch_env!("AWS_BUCKET")
|
||||
# key = "usc/#{Utils.random_string()}/#{name}"
|
||||
# {:ok, url} = ExAws.S3.presigned_post(config, )
|
||||
# end
|
||||
|
||||
defp get_s3_config(name) do
|
||||
config = ExAws.Config.new(:s3)
|
||||
bucket = System.fetch_env!("AWS_BUCKET")
|
||||
@ -164,22 +39,15 @@ defmodule BrightWeb.UploadLive.Index do
|
||||
%{config: config, bucket: bucket, key: key}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_event(
|
||||
"list_parts",
|
||||
%{"upload_id" => upload_id, "key" => key},
|
||||
socket
|
||||
) do
|
||||
config = ExAws.Config.new(:s3)
|
||||
bucket = System.fetch_env!("AWS_BUCKET")
|
||||
# key = "usc/#{Utils.random_string()}/#{name}"
|
||||
# %{config: config, bucket: bucket, key: key} = get_s3_config(name)
|
||||
case ExAws.S3.list_parts(bucket, key, upload_id) do
|
||||
# <Part>
|
||||
# <ETag>"85f30635602dc09bd85957a6e82a2c21"</ETag>
|
||||
# <LastModified>2023-08-31T18:54:55.693Z</LastModified>
|
||||
# <PartNumber>1</PartNumber>
|
||||
# <Size>11</Size>
|
||||
|
||||
case ExAws.S3.list_parts(bucket, key, upload_id) do
|
||||
{:ok, part: %{etag: etag, partNumber: partNumber, size: size}} ->
|
||||
Logger.debug("🦠🦠🦠 we got an etag from list_parts. etag=#{inspect(etag)}")
|
||||
{:reply, %{etag: etag, partNumber: partNumber, size: size}, socket}
|
||||
@ -191,9 +59,10 @@ defmodule BrightWeb.UploadLive.Index do
|
||||
end
|
||||
|
||||
# @doc @see https://hexdocs.pm/ex_aws_s3/ExAws.S3.html#initiate_multipart_upload/3
|
||||
@impl true
|
||||
def handle_event(
|
||||
"initiate_multipart_upload",
|
||||
%{"name" => name, "type" => type},
|
||||
%{"name" => name, "type" => type, "size" => size},
|
||||
socket
|
||||
) do
|
||||
%{config: config, bucket: bucket, key: key} = get_s3_config(name)
|
||||
@ -203,9 +72,22 @@ defmodule BrightWeb.UploadLive.Index do
|
||||
case ExAws.request(operation, config) do
|
||||
{:ok, %{body: %{key: key, upload_id: upload_id}} = response} ->
|
||||
Logger.debug(
|
||||
"Multipart upload initiated. Upload ID: #{upload_id}, Key: #{key}, response=#{inspect(response)}"
|
||||
"👤 Multipart upload initiated. Upload ID: #{upload_id}, Key: #{key}, response=#{inspect(response)}, user.id=#{socket.assigns.current_user.id}"
|
||||
)
|
||||
|
||||
case Streams.create_upload(%{
|
||||
filename: name,
|
||||
content_type: type,
|
||||
user_id: socket.assigns.current_user.id,
|
||||
size: size
|
||||
}) do
|
||||
{:ok, stream} ->
|
||||
Logger.debug("created stream #{stream.id}")
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error("failed to create stream. reason=#{inspect(reason)}")
|
||||
end
|
||||
|
||||
{:reply, %{upload_id: upload_id, key: key}, socket}
|
||||
|
||||
{:error, reason} ->
|
||||
@ -214,6 +96,7 @@ defmodule BrightWeb.UploadLive.Index do
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_event(
|
||||
"abort_multipart_upload",
|
||||
%{"key" => key, "uploadId" => upload_id},
|
||||
@ -235,6 +118,7 @@ defmodule BrightWeb.UploadLive.Index do
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_event(
|
||||
"get_upload_parameters",
|
||||
%{"type" => type, "name" => name},
|
||||
@ -253,6 +137,7 @@ defmodule BrightWeb.UploadLive.Index do
|
||||
{:reply, %{type: type, method: "PUT", url: url}, socket}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_event(
|
||||
"complete_multipart_upload",
|
||||
%{"key" => key, "uploadId" => upload_id, "parts" => parts},
|
||||
@ -284,6 +169,7 @@ defmodule BrightWeb.UploadLive.Index do
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_event(
|
||||
"sign_part",
|
||||
%{"body" => _body, "key" => key, "partNumber" => part_number, "uploadId" => upload_id},
|
||||
@ -308,24 +194,6 @@ defmodule BrightWeb.UploadLive.Index do
|
||||
{:reply, %{url: url}, socket}
|
||||
end
|
||||
|
||||
defp join_refs(entries), do: Enum.join(entries, ",")
|
||||
def error_to_string(:too_large), do: "File too large!"
|
||||
def error_to_string(:not_accepted), do: "Bad file type!"
|
||||
|
||||
defp to_megabytes_or_kilobytes(bytes) when is_integer(bytes) do
|
||||
case bytes do
|
||||
b when b < 1_048_576 ->
|
||||
kilobytes = (b / 1024) |> Float.round(1)
|
||||
|
||||
if kilobytes < 1 do
|
||||
"#{kilobytes}MB"
|
||||
else
|
||||
"#{round(kilobytes)}KB"
|
||||
end
|
||||
|
||||
_ ->
|
||||
megabytes = (bytes / 1_048_576) |> Float.round(1)
|
||||
"#{megabytes}MB"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -1,11 +1,7 @@
|
||||
<.header>
|
||||
Upload {@upload.id}
|
||||
<:subtitle>This is a upload record from your database.</:subtitle>
|
||||
<:actions>
|
||||
<.link patch={~p"/uploads/#{@upload}/show/edit"} phx-click={JS.push_focus()}>
|
||||
<.button>Edit upload</.button>
|
||||
</.link>
|
||||
</:actions>
|
||||
<:actions></:actions>
|
||||
</.header>
|
||||
|
||||
<.list>
|
||||
@ -15,8 +11,12 @@
|
||||
</.list>
|
||||
|
||||
<.back navigate={~p"/uploads"}>Back to uploads</.back>
|
||||
|
||||
<.modal :if={@live_action == :edit} id="upload-modal" show on_cancel={JS.patch(~p"/uploads/#{@upload}")}>
|
||||
<.modal
|
||||
:if={@live_action == :edit}
|
||||
id="upload-modal"
|
||||
show
|
||||
on_cancel={JS.patch(~p"/uploads/#{@upload}")}
|
||||
>
|
||||
<.live_component
|
||||
module={BrightWeb.UploadLive.FormComponent}
|
||||
id={@upload.id}
|
||||
|
@ -2,6 +2,7 @@ defmodule BrightWeb.Router do
|
||||
use BrightWeb, :router
|
||||
|
||||
import Oban.Web.Router
|
||||
import Redirect
|
||||
|
||||
import BrightWeb.AuthController,
|
||||
only: [
|
||||
@ -75,7 +76,6 @@ defmodule BrightWeb.Router do
|
||||
# get "/vtubers/:id/edit", VtuberController, :edit
|
||||
# end
|
||||
|
||||
resources("/uploads", UploadController, only: [:show, :index, :delete])
|
||||
resources("/vods", VodController, only: [:create, :new, :edit, :update, :delete])
|
||||
resources("/vtubers", VtuberController, only: [:delete])
|
||||
|
||||
@ -128,9 +128,10 @@ defmodule BrightWeb.Router do
|
||||
live_session :authenticated,
|
||||
on_mount: [{BrightWeb.AuthController, :ensure_authenticated}] do
|
||||
live("/profile", ProfileLive)
|
||||
live("/upload", UploadLive.Index, :index)
|
||||
# live("/upload/presign", , :)
|
||||
live("/uploads/new", UploadLive.Index, :index)
|
||||
end
|
||||
|
||||
resources("/uploads", UploadController, only: [:show, :index, :delete])
|
||||
end
|
||||
|
||||
scope "/feeds", BrightWeb do
|
||||
@ -144,6 +145,8 @@ defmodule BrightWeb.Router do
|
||||
get("/health", PageController, :health)
|
||||
end
|
||||
|
||||
redirect("/upload", "/uploads/new", :permanent, preserve_query_string: true)
|
||||
|
||||
# Enable LiveDashboard and Swoosh mailbox preview in development
|
||||
if Application.compile_env(:bright, :dev_routes) do
|
||||
# If you want to use the LiveDashboard in production, you should put
|
||||
|
@ -37,6 +37,7 @@ defmodule Bright.MixProject do
|
||||
# Type `mix help deps` for examples and options.
|
||||
defp deps do
|
||||
[
|
||||
{:redirect, "~> 0.4.0"},
|
||||
{:bcrypt_elixir, "~> 3.0"},
|
||||
{:phoenix, "~> 1.7.17"},
|
||||
{:phoenix_ecto, "~> 4.5"},
|
||||
|
@ -1,18 +0,0 @@
|
||||
defmodule Bright.Repo.Migrations.CreateUploads do
|
||||
use Ecto.Migration
|
||||
|
||||
def change do
|
||||
create table(:uploads) do
|
||||
add :filename, :string
|
||||
add :size, :integer
|
||||
add :content_type, :string
|
||||
add :user_id, references(:users, on_delete: :nothing)
|
||||
add :vod, references(:vods, on_delete: :nothing)
|
||||
|
||||
timestamps(type: :utc_datetime)
|
||||
end
|
||||
|
||||
create index(:uploads, [:user_id])
|
||||
create index(:uploads, [:vod])
|
||||
end
|
||||
end
|
@ -104,17 +104,20 @@ defmodule Bright.PlatformsTest do
|
||||
@invalid_attrs %{url: nil}
|
||||
|
||||
test "list_platform_aliases/0 returns all platform_aliases" do
|
||||
platform_alias = platform_alias_fixture()
|
||||
platform = platform_fixture()
|
||||
platform_alias = platform_alias_fixture(%{platform_id: platform.id}).preload([:platform])
|
||||
assert Platforms.list_platform_aliases() == [platform_alias]
|
||||
end
|
||||
|
||||
test "get_platform_alias!/1 returns the platform_alias with given id" do
|
||||
platform_alias = platform_alias_fixture()
|
||||
platform = platform_fixture()
|
||||
platform_alias = platform_alias_fixture(%{platform_id: platform.id})
|
||||
assert Platforms.get_platform_alias!(platform_alias.id) == platform_alias
|
||||
end
|
||||
|
||||
test "create_platform_alias/1 with valid data creates a platform_alias" do
|
||||
valid_attrs = %{url: "some url"}
|
||||
platform = platform_fixture()
|
||||
valid_attrs = %{url: "some url", platform_id: platform.id}
|
||||
|
||||
assert {:ok, %PlatformAlias{} = platform_alias} =
|
||||
Platforms.create_platform_alias(valid_attrs)
|
||||
@ -127,7 +130,8 @@ defmodule Bright.PlatformsTest do
|
||||
end
|
||||
|
||||
test "update_platform_alias/2 with valid data updates the platform_alias" do
|
||||
platform_alias = platform_alias_fixture()
|
||||
platform = platform_fixture()
|
||||
platform_alias = platform_alias_fixture(%{platform_id: platform.id})
|
||||
update_attrs = %{url: "some updated url"}
|
||||
|
||||
assert {:ok, %PlatformAlias{} = platform_alias} =
|
||||
@ -137,7 +141,8 @@ defmodule Bright.PlatformsTest do
|
||||
end
|
||||
|
||||
test "update_platform_alias/2 with invalid data returns error changeset" do
|
||||
platform_alias = platform_alias_fixture()
|
||||
platform = platform_fixture()
|
||||
platform_alias = platform_alias_fixture(%{platform_id: platform.id})
|
||||
|
||||
assert {:error, %Ecto.Changeset{}} =
|
||||
Platforms.update_platform_alias(platform_alias, @invalid_attrs)
|
||||
@ -152,7 +157,8 @@ defmodule Bright.PlatformsTest do
|
||||
end
|
||||
|
||||
test "change_platform_alias/1 returns a platform_alias changeset" do
|
||||
platform_alias = platform_alias_fixture()
|
||||
platform = platform_fixture()
|
||||
platform_alias = platform_alias_fixture(%{platform_id: platform.id})
|
||||
assert %Ecto.Changeset{} = Platforms.change_platform_alias(platform_alias)
|
||||
end
|
||||
end
|
||||
|
@ -188,62 +188,4 @@ defmodule Bright.StreamsTest do
|
||||
# assert_received {:progress, %{stage: :generating_thumbnail, done: 1, total: 1}}
|
||||
end
|
||||
end
|
||||
|
||||
describe "uploads" do
|
||||
alias Bright.Streams.Upload
|
||||
|
||||
import Bright.StreamsFixtures
|
||||
|
||||
@invalid_attrs %{size: nil, filename: nil, content_type: nil}
|
||||
|
||||
test "list_uploads/0 returns all uploads" do
|
||||
upload = upload_fixture()
|
||||
assert Streams.list_uploads() == [upload]
|
||||
end
|
||||
|
||||
test "get_upload!/1 returns the upload with given id" do
|
||||
upload = upload_fixture()
|
||||
assert Streams.get_upload!(upload.id) == upload
|
||||
end
|
||||
|
||||
test "create_upload/1 with valid data creates a upload" do
|
||||
valid_attrs = %{size: 42, filename: "some filename", content_type: "some content_type"}
|
||||
|
||||
assert {:ok, %Upload{} = upload} = Streams.create_upload(valid_attrs)
|
||||
assert upload.size == 42
|
||||
assert upload.filename == "some filename"
|
||||
assert upload.content_type == "some content_type"
|
||||
end
|
||||
|
||||
test "create_upload/1 with invalid data returns error changeset" do
|
||||
assert {:error, %Ecto.Changeset{}} = Streams.create_upload(@invalid_attrs)
|
||||
end
|
||||
|
||||
test "update_upload/2 with valid data updates the upload" do
|
||||
upload = upload_fixture()
|
||||
update_attrs = %{size: 43, filename: "some updated filename", content_type: "some updated content_type"}
|
||||
|
||||
assert {:ok, %Upload{} = upload} = Streams.update_upload(upload, update_attrs)
|
||||
assert upload.size == 43
|
||||
assert upload.filename == "some updated filename"
|
||||
assert upload.content_type == "some updated content_type"
|
||||
end
|
||||
|
||||
test "update_upload/2 with invalid data returns error changeset" do
|
||||
upload = upload_fixture()
|
||||
assert {:error, %Ecto.Changeset{}} = Streams.update_upload(upload, @invalid_attrs)
|
||||
assert upload == Streams.get_upload!(upload.id)
|
||||
end
|
||||
|
||||
test "delete_upload/1 deletes the upload" do
|
||||
upload = upload_fixture()
|
||||
assert {:ok, %Upload{}} = Streams.delete_upload(upload)
|
||||
assert_raise Ecto.NoResultsError, fn -> Streams.get_upload!(upload.id) end
|
||||
end
|
||||
|
||||
test "change_upload/1 returns a upload changeset" do
|
||||
upload = upload_fixture()
|
||||
assert %Ecto.Changeset{} = Streams.change_upload(upload)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -8,6 +8,8 @@ defmodule BrightWeb.PlatformAliasControllerTest do
|
||||
@invalid_attrs %{url: nil}
|
||||
|
||||
describe "index" do
|
||||
setup [:create_admin]
|
||||
|
||||
test "lists all platform_aliases", %{conn: conn} do
|
||||
conn = get(conn, ~p"/platform_aliases")
|
||||
assert html_response(conn, 200) =~ "Listing Platform aliases"
|
||||
@ -15,6 +17,8 @@ defmodule BrightWeb.PlatformAliasControllerTest do
|
||||
end
|
||||
|
||||
describe "new platform_alias" do
|
||||
setup [:create_admin]
|
||||
|
||||
test "renders form", %{conn: conn} do
|
||||
conn = get(conn, ~p"/platform_aliases/new")
|
||||
assert html_response(conn, 200) =~ "New Platform alias"
|
||||
@ -22,9 +26,20 @@ defmodule BrightWeb.PlatformAliasControllerTest do
|
||||
end
|
||||
|
||||
describe "create platform_alias" do
|
||||
test "redirects to show when data is valid", %{conn: conn} do
|
||||
conn = post(conn, ~p"/platform_aliases", platform_alias: @create_attrs)
|
||||
setup [:create_admin]
|
||||
|
||||
test "redirects to show when data is valid", %{conn: conn} do
|
||||
platform = platform_fixture()
|
||||
|
||||
create_attrs = %{
|
||||
url: "https://example.com",
|
||||
platform_id: platform.id
|
||||
}
|
||||
|
||||
conn =
|
||||
post(conn, ~p"/platform_aliases", platform_alias: create_attrs)
|
||||
|
||||
# IO.puts(conn.resp_body)
|
||||
assert %{id: id} = redirected_params(conn)
|
||||
assert redirected_to(conn) == ~p"/platform_aliases/#{id}"
|
||||
|
||||
@ -39,7 +54,7 @@ defmodule BrightWeb.PlatformAliasControllerTest do
|
||||
end
|
||||
|
||||
describe "edit platform_alias" do
|
||||
setup [:create_platform_alias]
|
||||
setup [:create_platform_alias, :create_admin]
|
||||
|
||||
test "renders form for editing chosen platform_alias", %{
|
||||
conn: conn,
|
||||
@ -51,7 +66,7 @@ defmodule BrightWeb.PlatformAliasControllerTest do
|
||||
end
|
||||
|
||||
describe "update platform_alias" do
|
||||
setup [:create_platform_alias]
|
||||
setup [:create_platform_alias, :create_admin]
|
||||
|
||||
test "redirects when data is valid", %{conn: conn, platform_alias: platform_alias} do
|
||||
conn = put(conn, ~p"/platform_aliases/#{platform_alias}", platform_alias: @update_attrs)
|
||||
@ -68,7 +83,7 @@ defmodule BrightWeb.PlatformAliasControllerTest do
|
||||
end
|
||||
|
||||
describe "delete platform_alias" do
|
||||
setup [:create_platform_alias]
|
||||
setup [:create_platform_alias, :create_admin]
|
||||
|
||||
test "deletes chosen platform_alias", %{conn: conn, platform_alias: platform_alias} do
|
||||
conn = delete(conn, ~p"/platform_aliases/#{platform_alias}")
|
||||
@ -81,7 +96,8 @@ defmodule BrightWeb.PlatformAliasControllerTest do
|
||||
end
|
||||
|
||||
defp create_platform_alias(_) do
|
||||
platform_alias = platform_alias_fixture()
|
||||
platform = platform_fixture()
|
||||
platform_alias = platform_alias_fixture(%{platform_id: platform.id})
|
||||
%{platform_alias: platform_alias}
|
||||
end
|
||||
end
|
||||
|
@ -3,9 +3,14 @@ defmodule BrightWeb.UploadControllerTest do
|
||||
|
||||
import Bright.StreamsFixtures
|
||||
|
||||
@create_attrs %{size: 42, filename: "some filename", content_type: "some content_type"}
|
||||
@update_attrs %{size: 43, filename: "some updated filename", content_type: "some updated content_type"}
|
||||
@invalid_attrs %{size: nil, filename: nil, content_type: nil}
|
||||
@create_attrs %{
|
||||
size: 42,
|
||||
filename: "some filename",
|
||||
content_type: "some content_type",
|
||||
user_id: 1
|
||||
}
|
||||
|
||||
@invalid_attrs %{size: nil, filename: nil, content_type: nil, user_id: nil}
|
||||
|
||||
describe "index" do
|
||||
test "lists all uploads", %{conn: conn} do
|
||||
@ -14,56 +19,6 @@ defmodule BrightWeb.UploadControllerTest do
|
||||
end
|
||||
end
|
||||
|
||||
describe "new upload" do
|
||||
test "renders form", %{conn: conn} do
|
||||
conn = get(conn, ~p"/uploads/new")
|
||||
assert html_response(conn, 200) =~ "New Upload"
|
||||
end
|
||||
end
|
||||
|
||||
describe "create upload" do
|
||||
test "redirects to show when data is valid", %{conn: conn} do
|
||||
conn = post(conn, ~p"/uploads", upload: @create_attrs)
|
||||
|
||||
assert %{id: id} = redirected_params(conn)
|
||||
assert redirected_to(conn) == ~p"/uploads/#{id}"
|
||||
|
||||
conn = get(conn, ~p"/uploads/#{id}")
|
||||
assert html_response(conn, 200) =~ "Upload #{id}"
|
||||
end
|
||||
|
||||
test "renders errors when data is invalid", %{conn: conn} do
|
||||
conn = post(conn, ~p"/uploads", upload: @invalid_attrs)
|
||||
assert html_response(conn, 200) =~ "New Upload"
|
||||
end
|
||||
end
|
||||
|
||||
describe "edit upload" do
|
||||
setup [:create_upload]
|
||||
|
||||
test "renders form for editing chosen upload", %{conn: conn, upload: upload} do
|
||||
conn = get(conn, ~p"/uploads/#{upload}/edit")
|
||||
assert html_response(conn, 200) =~ "Edit Upload"
|
||||
end
|
||||
end
|
||||
|
||||
describe "update upload" do
|
||||
setup [:create_upload]
|
||||
|
||||
test "redirects when data is valid", %{conn: conn, upload: upload} do
|
||||
conn = put(conn, ~p"/uploads/#{upload}", upload: @update_attrs)
|
||||
assert redirected_to(conn) == ~p"/uploads/#{upload}"
|
||||
|
||||
conn = get(conn, ~p"/uploads/#{upload}")
|
||||
assert html_response(conn, 200) =~ "some updated filename"
|
||||
end
|
||||
|
||||
test "renders errors when data is invalid", %{conn: conn, upload: upload} do
|
||||
conn = put(conn, ~p"/uploads/#{upload}", upload: @invalid_attrs)
|
||||
assert html_response(conn, 200) =~ "Edit Upload"
|
||||
end
|
||||
end
|
||||
|
||||
describe "delete upload" do
|
||||
setup [:create_upload]
|
||||
|
||||
|
@ -1,113 +1,117 @@
|
||||
defmodule BrightWeb.UploadLiveTest do
|
||||
use BrightWeb.ConnCase
|
||||
# defmodule BrightWeb.UploadLiveTest do
|
||||
# use BrightWeb.ConnCase
|
||||
|
||||
import Phoenix.LiveViewTest
|
||||
import Bright.StreamsFixtures
|
||||
# import Phoenix.LiveViewTest
|
||||
# import Bright.StreamsFixtures
|
||||
|
||||
@create_attrs %{size: 42, filename: "some filename", content_type: "some content_type"}
|
||||
@update_attrs %{size: 43, filename: "some updated filename", content_type: "some updated content_type"}
|
||||
@invalid_attrs %{size: nil, filename: nil, content_type: nil}
|
||||
# @create_attrs %{size: 42, filename: "some filename", content_type: "some content_type"}
|
||||
# @update_attrs %{
|
||||
# size: 43,
|
||||
# filename: "some updated filename",
|
||||
# content_type: "some updated content_type"
|
||||
# }
|
||||
# @invalid_attrs %{size: nil, filename: nil, content_type: nil}
|
||||
|
||||
defp create_upload(_) do
|
||||
upload = upload_fixture()
|
||||
%{upload: upload}
|
||||
end
|
||||
# defp create_upload(_) do
|
||||
# upload = upload_fixture()
|
||||
# %{upload: upload}
|
||||
# end
|
||||
|
||||
describe "Index" do
|
||||
setup [:create_upload]
|
||||
# describe "Index" do
|
||||
# setup [:create_upload, :create_user]
|
||||
|
||||
test "lists all uploads", %{conn: conn, upload: upload} do
|
||||
{:ok, _index_live, html} = live(conn, ~p"/uploads")
|
||||
# test "lists all uploads", %{conn: conn, upload: upload} do
|
||||
# {:ok, _index_live, html} = live(conn, ~p"/uploads")
|
||||
|
||||
assert html =~ "Listing Uploads"
|
||||
assert html =~ upload.filename
|
||||
end
|
||||
# assert html =~ "Listing Uploads"
|
||||
# assert html =~ upload.filename
|
||||
# end
|
||||
|
||||
test "saves new upload", %{conn: conn} do
|
||||
{:ok, index_live, _html} = live(conn, ~p"/uploads")
|
||||
# test "saves new upload", %{conn: conn} do
|
||||
# {:ok, index_live, _html} = live(conn, ~p"/uploads")
|
||||
|
||||
assert index_live |> element("a", "New Upload") |> render_click() =~
|
||||
"New Upload"
|
||||
# assert index_live |> element("a", "New Upload") |> render_click() =~
|
||||
# "New Upload"
|
||||
|
||||
assert_patch(index_live, ~p"/uploads/new")
|
||||
# assert_patch(index_live, ~p"/uploads/new")
|
||||
|
||||
assert index_live
|
||||
|> form("#upload-form", upload: @invalid_attrs)
|
||||
|> render_change() =~ "can't be blank"
|
||||
# assert index_live
|
||||
# |> form("#upload-form", upload: @invalid_attrs)
|
||||
# |> render_change() =~ "can't be blank"
|
||||
|
||||
assert index_live
|
||||
|> form("#upload-form", upload: @create_attrs)
|
||||
|> render_submit()
|
||||
# assert index_live
|
||||
# |> form("#upload-form", upload: @create_attrs)
|
||||
# |> render_submit()
|
||||
|
||||
assert_patch(index_live, ~p"/uploads")
|
||||
# assert_patch(index_live, ~p"/uploads")
|
||||
|
||||
html = render(index_live)
|
||||
assert html =~ "Upload created successfully"
|
||||
assert html =~ "some filename"
|
||||
end
|
||||
# html = render(index_live)
|
||||
# assert html =~ "Upload created successfully"
|
||||
# assert html =~ "some filename"
|
||||
# end
|
||||
|
||||
test "updates upload in listing", %{conn: conn, upload: upload} do
|
||||
{:ok, index_live, _html} = live(conn, ~p"/uploads")
|
||||
# test "updates upload in listing", %{conn: conn, upload: upload} do
|
||||
# {:ok, index_live, _html} = live(conn, ~p"/uploads")
|
||||
|
||||
assert index_live |> element("#uploads-#{upload.id} a", "Edit") |> render_click() =~
|
||||
"Edit Upload"
|
||||
# assert index_live |> element("#uploads-#{upload.id} a", "Edit") |> render_click() =~
|
||||
# "Edit Upload"
|
||||
|
||||
assert_patch(index_live, ~p"/uploads/#{upload}/edit")
|
||||
# assert_patch(index_live, ~p"/uploads/#{upload}/edit")
|
||||
|
||||
assert index_live
|
||||
|> form("#upload-form", upload: @invalid_attrs)
|
||||
|> render_change() =~ "can't be blank"
|
||||
# assert index_live
|
||||
# |> form("#upload-form", upload: @invalid_attrs)
|
||||
# |> render_change() =~ "can't be blank"
|
||||
|
||||
assert index_live
|
||||
|> form("#upload-form", upload: @update_attrs)
|
||||
|> render_submit()
|
||||
# assert index_live
|
||||
# |> form("#upload-form", upload: @update_attrs)
|
||||
# |> render_submit()
|
||||
|
||||
assert_patch(index_live, ~p"/uploads")
|
||||
# assert_patch(index_live, ~p"/uploads")
|
||||
|
||||
html = render(index_live)
|
||||
assert html =~ "Upload updated successfully"
|
||||
assert html =~ "some updated filename"
|
||||
end
|
||||
# html = render(index_live)
|
||||
# assert html =~ "Upload updated successfully"
|
||||
# assert html =~ "some updated filename"
|
||||
# end
|
||||
|
||||
test "deletes upload in listing", %{conn: conn, upload: upload} do
|
||||
{:ok, index_live, _html} = live(conn, ~p"/uploads")
|
||||
# test "deletes upload in listing", %{conn: conn, upload: upload} do
|
||||
# {:ok, index_live, _html} = live(conn, ~p"/uploads")
|
||||
|
||||
assert index_live |> element("#uploads-#{upload.id} a", "Delete") |> render_click()
|
||||
refute has_element?(index_live, "#uploads-#{upload.id}")
|
||||
end
|
||||
end
|
||||
# assert index_live |> element("#uploads-#{upload.id} a", "Delete") |> render_click()
|
||||
# refute has_element?(index_live, "#uploads-#{upload.id}")
|
||||
# end
|
||||
# end
|
||||
|
||||
describe "Show" do
|
||||
setup [:create_upload]
|
||||
# describe "Show" do
|
||||
# setup [:create_upload, :create_user]
|
||||
|
||||
test "displays upload", %{conn: conn, upload: upload} do
|
||||
{:ok, _show_live, html} = live(conn, ~p"/uploads/#{upload}")
|
||||
# test "displays upload", %{conn: conn, upload: upload} do
|
||||
# {:ok, _show_live, html} = live(conn, ~p"/uploads/#{upload}")
|
||||
|
||||
assert html =~ "Show Upload"
|
||||
assert html =~ upload.filename
|
||||
end
|
||||
# assert html =~ "Show Upload"
|
||||
# assert html =~ upload.filename
|
||||
# end
|
||||
|
||||
test "updates upload within modal", %{conn: conn, upload: upload} do
|
||||
{:ok, show_live, _html} = live(conn, ~p"/uploads/#{upload}")
|
||||
# test "updates upload within modal", %{conn: conn, upload: upload} do
|
||||
# {:ok, show_live, _html} = live(conn, ~p"/uploads/#{upload}")
|
||||
|
||||
assert show_live |> element("a", "Edit") |> render_click() =~
|
||||
"Edit Upload"
|
||||
# assert show_live |> element("a", "Edit") |> render_click() =~
|
||||
# "Edit Upload"
|
||||
|
||||
assert_patch(show_live, ~p"/uploads/#{upload}/show/edit")
|
||||
# assert_patch(show_live, ~p"/uploads/#{upload}/show/edit")
|
||||
|
||||
assert show_live
|
||||
|> form("#upload-form", upload: @invalid_attrs)
|
||||
|> render_change() =~ "can't be blank"
|
||||
# assert show_live
|
||||
# |> form("#upload-form", upload: @invalid_attrs)
|
||||
# |> render_change() =~ "can't be blank"
|
||||
|
||||
assert show_live
|
||||
|> form("#upload-form", upload: @update_attrs)
|
||||
|> render_submit()
|
||||
# assert show_live
|
||||
# |> form("#upload-form", upload: @update_attrs)
|
||||
# |> render_submit()
|
||||
|
||||
assert_patch(show_live, ~p"/uploads/#{upload}")
|
||||
# assert_patch(show_live, ~p"/uploads/#{upload}")
|
||||
|
||||
html = render(show_live)
|
||||
assert html =~ "Upload updated successfully"
|
||||
assert html =~ "some updated filename"
|
||||
end
|
||||
end
|
||||
end
|
||||
# html = render(show_live)
|
||||
# assert html =~ "Upload updated successfully"
|
||||
# assert html =~ "some updated filename"
|
||||
# end
|
||||
# end
|
||||
# end
|
||||
|
@ -31,8 +31,8 @@
|
||||
"tunnel": "dotenvx run -f ./.env.development -- chisel client bright.fp.sbtp.xyz:9090 R:4000",
|
||||
"backup": "docker exec -t postgres_db pg_dumpall -c -U postgres > ./backups/dev_`date +%Y-%m-%d_%H_%M_%S`.sql",
|
||||
"act": "dotenvx run -f ./.env.testing -- act -W ./.gitea/workflows --secret-file .env.development",
|
||||
"act:builder": "dotenvx run -f ./.env.testing -- act -W ./.gitea/workflows/builder.yaml --secret-file .env.testing --var-file .env.testing --insecure-secrets",
|
||||
"act:tests": "dotenvx run -f ./.env.testing -- act -W ./.gitea/workflows/tests.yaml --secret-file .env.testing --var-file .env.testing --insecure-secrets",
|
||||
"act:builder": "dotenvx run -f ./.env.testing -- act --env-file .env.testing -W ./.gitea/workflows/builder.yaml --secret-file .env.testing --var-file .env.testing --insecure-secrets",
|
||||
"act:tests": "dotenvx run -f ./.env.testing -- act --env-file .env.testing -W ./.gitea/workflows/tests.yaml --secret-file .env.testing --var-file .env.testing --insecure-secrets",
|
||||
"bright:compile:watch": "cd ./apps/bright && find . -type f -name \"*.ex\" -o -name \"*.exs\" | entr -r mix compile --warnings-as-errors",
|
||||
"bright:compile:watch2": "cd ./apps/bright && pnpx chokidar-cli \"**/*\" -i \"deps/**\" -i \"_build/**\" -c \"mix compile --warnings-as-errors\"",
|
||||
"bright:dev": "cd ./apps/bright && dotenvx run -f ../../.env.development -e MIX_ENV=dev -- mix phx.server",
|
||||
|
@ -3,7 +3,13 @@
|
||||
// @see https://grep.app/search?q=for+tuple+in+regexall%28
|
||||
// @see https://github.com/lrmendess/open-source-datalake/blob/main/minio.tf
|
||||
locals {
|
||||
envs = { for tuple in regexall("(.*)=(.*)", file("../.env")) : tuple[0] => sensitive(tuple[1]) }
|
||||
envs = { for tuple in regexall("(.*)=(.*)", file("../.env.production")) : tuple[0] => sensitive(tuple[1]) }
|
||||
}
|
||||
|
||||
variable "ipfs_hosts" {
|
||||
description = "List of IP addresses for IPFS nodes"
|
||||
type = list(string)
|
||||
default = ["161.97.186.203", "38.242.193.246"]
|
||||
}
|
||||
|
||||
|
||||
@ -216,6 +222,20 @@ resource "vultr_instance" "tracker" {
|
||||
reserved_ip_id = vultr_reserved_ip.futureporn_tracker_ip.id
|
||||
}
|
||||
|
||||
resource "ansible_host" "ipfs_vps" {
|
||||
for_each = { for idx, host in var.ipfs_hosts : idx => host }
|
||||
name = each.value
|
||||
groups = ["ipfs"]
|
||||
|
||||
variables = {
|
||||
ansible_user = "root"
|
||||
ansible_host = each.value
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
resource "ansible_host" "capture_vps" {
|
||||
for_each = { for idx, host in vultr_instance.capture_vps : idx => host }
|
||||
name = each.value.hostname
|
||||
|
Loading…
x
Reference in New Issue
Block a user