add find_livestream_invitation
This commit is contained in:
parent
e9bb1ce4fb
commit
f5451ce52c
apps/bright
config
lib
bright
bright_web/controllers/vtuber_html
priv/repo/migrations
20250311210011_add_x_posts.exs20250311231317_add_twitter_rss.exs20250312012612_change_x_posts_raw_to_text.exs20250312021957_enforce_unique_x_post.exs
test
@ -29,7 +29,11 @@ config :bright, Oban,
|
||||
repo: Bright.Repo,
|
||||
plugins: [
|
||||
{Oban.Plugins.Pruner, max_age: 60 * 60 * 24 * 7},
|
||||
{Oban.Plugins.Lifeline, rescue_after: :timer.minutes(30)}
|
||||
{Oban.Plugins.Lifeline, rescue_after: :timer.minutes(30)},
|
||||
{Oban.Plugins.Cron,
|
||||
crontab: [
|
||||
{"*/15 * * * *", Bright.ObanWorkers.ReadPosts}
|
||||
]}
|
||||
]
|
||||
|
||||
# @see https://github.com/ueberauth/ueberauth
|
||||
|
60
apps/bright/lib/bright/oban_workers/save_posts.ex
Normal file
60
apps/bright/lib/bright/oban_workers/save_posts.ex
Normal file
@ -0,0 +1,60 @@
|
||||
defmodule Bright.ObanWorkers.ReadPosts do
|
||||
@moduledoc """
|
||||
Read a vtuber's social media feed and save the posts to the db
|
||||
|
||||
* [ ] X
|
||||
"""
|
||||
|
||||
alias Bright.Vtubers.Vtuber
|
||||
use Oban.Worker, queue: :default, max_attempts: 3
|
||||
|
||||
alias Bright.Repo
|
||||
alias Bright.Socials.XPost
|
||||
|
||||
require Logger
|
||||
|
||||
@impl Oban.Worker
|
||||
def perform(%Oban.Job{args: %{}}) do
|
||||
Logger.info(">>>> Save Posts is performing.")
|
||||
|
||||
vtubers = Repo.all(Vtuber)
|
||||
Logger.debug("there are #{length(vtubers)} vtubers.")
|
||||
|
||||
Enum.each(vtubers, fn vtuber ->
|
||||
case XPost.get_new_posts(vtuber) do
|
||||
{:ok, posts} ->
|
||||
Logger.debug("WE GOT SOME POSTS. vtuber.id=#{vtuber.id} posts=#{inspect(posts)}")
|
||||
|
||||
Enum.each(posts, fn post ->
|
||||
changeset =
|
||||
XPost.changeset(%XPost{}, Map.put(post, :vtuber_id, vtuber.id))
|
||||
|
||||
case Repo.insert(changeset, on_conflict: :nothing, conflict_target: [:id]) do
|
||||
{:ok, _record} ->
|
||||
:ok
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error("Failed to insert post: #{inspect(reason)}")
|
||||
end
|
||||
end)
|
||||
|
||||
# # Add vtuber_id to each post
|
||||
# # we do this because insert_all doesn't support associations.
|
||||
# posts_with_vtuber_id =
|
||||
# Enum.map(posts, fn post ->
|
||||
# Map.put(post, :vtuber_id, vtuber.id)
|
||||
# end)
|
||||
|
||||
# # Insert the posts into the database
|
||||
# # Repo.insert_all(XPost, posts, on_conflict: :nothing)
|
||||
# Repo.insert_all(XPost, posts_with_vtuber_id, on_conflict: :nothing)
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error("Failed to fetch posts for VTuber #{vtuber.id}: #{inspect(reason)}")
|
||||
|
||||
_ ->
|
||||
Logger.debug("No posts fetched for VTuber #{vtuber.id}")
|
||||
end
|
||||
end)
|
||||
end
|
||||
end
|
@ -1,64 +0,0 @@
|
||||
# defmodule Bright.ObanWorkers.ScrapeX do
|
||||
# alias Bright.Vtubers.Vtuber
|
||||
# use Oban.Worker, queue: :default, max_attempts: 3
|
||||
|
||||
# alias Bright.Streams.Vod
|
||||
|
||||
# alias Bright.{
|
||||
# Repo,
|
||||
# Downloader,
|
||||
# B2,
|
||||
# Images
|
||||
# }
|
||||
|
||||
# require Logger
|
||||
|
||||
# @impl Oban.Worker
|
||||
# def perform(%Oban.Job{args: %{"vtuber_id" => vtuber_id}}) do
|
||||
# Logger.info(">>>> Scrape X is performing. with vtuber_id=#{vtuber_id}")
|
||||
|
||||
# # @todo get vtuber from db
|
||||
# # @todo get vtuber's X account
|
||||
# # @todo get nitter URL
|
||||
# # @todo get X posts
|
||||
# # @todo queue posts we haven't yet processed
|
||||
# # @todo parse posts in the queue to find CB/Fansly/OF invite links
|
||||
# # @todo for each post with an invite, create a stream in the db
|
||||
|
||||
# # case Repo.get(Vtuber, vtuber_id) do
|
||||
|
||||
# # nil ->
|
||||
# # Logger.error("Vtuber id #{vtuber_id} not found.")
|
||||
# # {:error, "Vtuber not found"}
|
||||
|
||||
# # %Vtuber{} = vtuber ->
|
||||
# # with {:ok, } <-
|
||||
|
||||
# # end
|
||||
|
||||
# case Repo.get(Vod, vod_id) do
|
||||
# nil ->
|
||||
# Logger.error("VOD ID #{vod_id} not found")
|
||||
# {:error, "VOD not found"}
|
||||
|
||||
# %Vod{origin_temp_input_url: origin_temp_input_url} = vod ->
|
||||
# with {:ok, local_filename} <- Downloader.get(origin_temp_input_url),
|
||||
# {:ok, thumbnail_filename} <- Images.create_thumbnail(local_filename),
|
||||
# {:ok, s3Asset} <- B2.put(thumbnail_filename) do
|
||||
# update_vod_with_thumbnail_url(vod, s3Asset.cdn_url)
|
||||
# else
|
||||
# {:error, reason} ->
|
||||
# Logger.error("Failed to create thumbnail for VOD ID #{vod_id}: #{inspect(reason)}")
|
||||
# {:error, reason}
|
||||
# end
|
||||
# end
|
||||
# end
|
||||
|
||||
# # defp generate_thumbnail_url(basename), do: "#{Application.get_env(:bright, :public_s3_endpoint)}/#{basename}"
|
||||
# defp update_vod_with_thumbnail_url(vod, thumbnail_url) do
|
||||
# case Repo.update(vod |> Ecto.Changeset.change(thumbnail_url: thumbnail_url)) do
|
||||
# {:ok, updated_vod} -> {:ok, updated_vod}
|
||||
# {:error, changeset} -> {:error, changeset}
|
||||
# end
|
||||
# end
|
||||
# end
|
@ -1,5 +1,6 @@
|
||||
|
||||
defmodule Bright.Socials.RSSParser do
|
||||
require Logger
|
||||
|
||||
@moduledoc """
|
||||
Module to parse X RSS feeds and extract URLs, datestamps, and text content from items.
|
||||
"""
|
||||
@ -7,14 +8,9 @@ defmodule Bright.Socials.RSSParser do
|
||||
@doc """
|
||||
Extracts URLs, datestamps, and text content from the RSS feed data.
|
||||
|
||||
# Example usage:
|
||||
# Assuming `data` is the parsed RSS feed data you provided.
|
||||
|
||||
```elixir
|
||||
item_details = RSSParser.extract_item_details(data)
|
||||
IO.inspect(item_details)
|
||||
```
|
||||
|
||||
## Example usage:
|
||||
item_details = RSSParser.extract_item_details(data)
|
||||
IO.inspect(item_details)
|
||||
"""
|
||||
def extract_item_details(data) do
|
||||
data
|
||||
@ -22,8 +18,8 @@ defmodule Bright.Socials.RSSParser do
|
||||
|> Enum.map(fn item ->
|
||||
%{
|
||||
url: extract_value(item, :link),
|
||||
datestamp: extract_value(item, :pubDate),
|
||||
text: extract_value(item, :title)
|
||||
date: extract_value(item, :pubDate) |> parse_rss_datetime(),
|
||||
raw: extract_value(item, :title)
|
||||
}
|
||||
end)
|
||||
end
|
||||
@ -31,19 +27,33 @@ defmodule Bright.Socials.RSSParser do
|
||||
# Helper function to find all :item elements in the nested structure
|
||||
defp find_items(data) do
|
||||
data
|
||||
|> List.wrap() # Ensure data is treated as a list
|
||||
|> List.wrap()
|
||||
|> Enum.flat_map(fn
|
||||
%{name: :item} = item -> [item] # If it's an item, return it
|
||||
%{value: children} -> find_items(children) # Recursively search children
|
||||
_ -> [] # Skip anything else
|
||||
%{name: :item} = item -> [item]
|
||||
%{value: children} -> find_items(children)
|
||||
_ -> []
|
||||
end)
|
||||
end
|
||||
|
||||
# Helper function to extract the value for a given key from an item
|
||||
defp extract_value(item, key) do
|
||||
case Enum.find(item[:value], fn %{name: name} -> name == key end) do
|
||||
%{value: [value]} -> value # Extract the value if found
|
||||
_ -> nil # Return nil if the key is not found
|
||||
%{value: [value]} -> value
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
||||
# Parses RSS datetime format into UTC DateTime
|
||||
defp parse_rss_datetime(nil), do: nil
|
||||
|
||||
defp parse_rss_datetime(datetime_str) do
|
||||
case Timex.parse(datetime_str, "{RFC1123}") do
|
||||
{:ok, datetime} ->
|
||||
datetime
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error("failed to parse time. reason=#{reason}")
|
||||
nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -6,10 +6,7 @@ defmodule Bright.Socials.XPost do
|
||||
alias Quinn
|
||||
require Logger
|
||||
|
||||
## @todo this needs to be pulled from the database
|
||||
## @todo rss.app doesn't scale.
|
||||
## @todo we need a better way to get data from vtuber's X accounts.
|
||||
@sample_rss_feed "https://rss.app/feeds/FhPetvUY036xiFau.xml"
|
||||
@livestream_domains ["chaturbate.com", "fansly.com", "onlyfans.com"]
|
||||
|
||||
@doc """
|
||||
We cache the posts in the db so it's clear which tweets we've read and which ones we haven't.
|
||||
@ -29,41 +26,36 @@ defmodule Bright.Socials.XPost do
|
||||
@doc false
|
||||
def changeset(post, attrs) do
|
||||
post
|
||||
|> cast(attrs, [:raw, :url, :date])
|
||||
|> cast(attrs, [:raw, :url, :date, :vtuber_id])
|
||||
|> validate_required([:raw, :url, :date])
|
||||
|> unique_constraint([:date, :url])
|
||||
end
|
||||
|
||||
|
||||
# def get_posts(vtuber_id) do
|
||||
# case Repo.get(Vtuber, vtuber_id) do
|
||||
# nil ->
|
||||
# Logger.error("Vtuber id #{vtuber_id} not found.")
|
||||
# {:error, "Vtuber not found"}
|
||||
|
||||
# %Vtuber{} = vtuber ->
|
||||
# with {:ok, posts} <- get_posts(@sample_rss_feed) # @todo This feed URL needs to be dynamically looked up
|
||||
# do
|
||||
# save_posts(posts)
|
||||
# end
|
||||
# end
|
||||
|
||||
|
||||
# end
|
||||
|
||||
|
||||
@doc """
|
||||
We read X posts via RSS URL.
|
||||
Get all X posts available in the vtuber's rss feed
|
||||
"""
|
||||
def get_posts(feed_url) do
|
||||
case HTTPoison.get(feed_url) do
|
||||
{:ok, %HTTPoison.Response{ body: body }} ->
|
||||
def get_new_posts(%Vtuber{display_name: display_name, twitter_rss: twitter_rss}) do
|
||||
Logger.debug(
|
||||
"get_new_posts was called with vtuber struct. twitter_rss=#{inspect(twitter_rss)}"
|
||||
)
|
||||
|
||||
case twitter_rss do
|
||||
nil ->
|
||||
{:warning, "#{display_name} doesn't have a twitter_rss URL."}
|
||||
|
||||
_ ->
|
||||
get_new_posts(twitter_rss)
|
||||
end
|
||||
end
|
||||
|
||||
def get_new_posts(feed_url) do
|
||||
case HTTPoison.get(feed_url) do
|
||||
{:ok, %HTTPoison.Response{body: body}} ->
|
||||
data = Quinn.parse(body)
|
||||
extract = RSSParser.extract_item_details(data)
|
||||
Logger.debug("we GETted a rss feed. Parsed data=#{inspect(data)}")
|
||||
Logger.debug("we parsed the rss feed using RSSParser. parsed=#{inspect(extract)}")
|
||||
|
||||
|
||||
{:ok, extract}
|
||||
|
||||
{:error, reason} ->
|
||||
@ -72,7 +64,6 @@ defmodule Bright.Socials.XPost do
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@doc """
|
||||
save the posts to the db
|
||||
"""
|
||||
@ -80,5 +71,12 @@ defmodule Bright.Socials.XPost do
|
||||
Logger.debug("@todo implement save_posts()")
|
||||
end
|
||||
|
||||
|
||||
@doc """
|
||||
return true if there is a livestream invitation in the post, false otherwise
|
||||
"""
|
||||
def find_livestream_invitation(%__MODULE__{raw: raw}) do
|
||||
Enum.any?(@livestream_domains, fn domain ->
|
||||
String.downcase(raw) =~ ~r/#{domain}\/[^\s]*/
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
@ -1,6 +1,7 @@
|
||||
defmodule Bright.Vtubers.Vtuber do
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
alias Bright.Repo
|
||||
|
||||
schema "vtubers" do
|
||||
field :image, :string
|
||||
@ -29,6 +30,9 @@ defmodule Bright.Vtubers.Vtuber do
|
||||
field :fansly_id, :string
|
||||
field :chaturbate_id, :string
|
||||
field :twitter_id, :string
|
||||
field :twitter_rss, :string
|
||||
|
||||
has_many :x_posts, Bright.Socials.XPost
|
||||
|
||||
timestamps(type: :utc_datetime)
|
||||
end
|
||||
@ -62,8 +66,22 @@ defmodule Bright.Vtubers.Vtuber do
|
||||
:theme_color,
|
||||
:fansly_id,
|
||||
:chaturbate_id,
|
||||
:twitter_id
|
||||
:twitter_id,
|
||||
:twitter_rss
|
||||
])
|
||||
|> validate_required([:slug, :display_name, :image, :theme_color])
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns the list of vtubers.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> list_vtubers()
|
||||
[%Vtuber{}, ...]
|
||||
|
||||
"""
|
||||
def list_vtubers do
|
||||
Repo.all(Vtuber)
|
||||
end
|
||||
end
|
||||
|
@ -14,6 +14,7 @@
|
||||
<:item title="Slug">{@vtuber.slug}</:item>
|
||||
<:item title="Display Name">{@vtuber.display_name}</:item>
|
||||
<:item title="Theme Color">{@vtuber.theme_color}</:item>
|
||||
<:item title="Twitter RSS">{@vtuber.twitter_rss}</:item>
|
||||
</.list>
|
||||
|
||||
<.back navigate={~p"/vtubers"}>Back to vtubers</.back>
|
||||
|
@ -26,6 +26,7 @@
|
||||
<.input field={f[:merch]} type="text" label="Merch" />
|
||||
<.input field={f[:description_1]} type="text" label="Description 1" />
|
||||
<.input field={f[:description_2]} type="text" label="Description 2" />
|
||||
<.input field={f[:twitter_rss]} type="text" label="Twitter RSS" />
|
||||
|
||||
<.input field={f[:fansly_id]} type="text" label="Fansly ID" />
|
||||
<.input field={f[:chaturbate_id]} type="text" label="Chaturbate ID" />
|
||||
|
@ -11,7 +11,7 @@
|
||||
"bunt": {:hex, :bunt, "0.2.1", "e2d4792f7bc0ced7583ab54922808919518d0e57ee162901a16a1b6664ef3b14", [:mix], [], "hexpm", "a330bfb4245239787b15005e66ae6845c9cd524a288f0d141c148b02603777a5"},
|
||||
"castore": {:hex, :castore, "1.0.11", "4bbd584741601eb658007339ea730b082cc61f3554cf2e8f39bf693a11b49073", [:mix], [], "hexpm", "e03990b4db988df56262852f20de0f659871c35154691427a5047f4967a16a62"},
|
||||
"cc_precompiler": {:hex, :cc_precompiler, "0.1.10", "47c9c08d8869cf09b41da36538f62bc1abd3e19e41701c2cea2675b53c704258", [:mix], [{:elixir_make, "~> 0.7", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "f6e046254e53cd6b41c6bacd70ae728011aa82b2742a80d6e2214855c6e06b22"},
|
||||
"certifi": {:hex, :certifi, "2.12.0", "2d1cca2ec95f59643862af91f001478c9863c2ac9cb6e2f89780bfd8de987329", [:rebar3], [], "hexpm", "ee68d85df22e554040cdb4be100f33873ac6051387baf6a8f6ce82272340ff1c"},
|
||||
"certifi": {:hex, :certifi, "2.14.0", "ed3bef654e69cde5e6c022df8070a579a79e8ba2368a00acf3d75b82d9aceeed", [:rebar3], [], "hexpm", "ea59d87ef89da429b8e905264fdec3419f84f2215bb3d81e07a18aac919026c3"},
|
||||
"coerce": {:hex, :coerce, "1.0.1", "211c27386315dc2894ac11bc1f413a0e38505d808153367bd5c6e75a4003d096", [:mix], [], "hexpm", "b44a691700f7a1a15b4b7e2ff1fa30bebd669929ac8aa43cffe9e2f8bf051cf1"},
|
||||
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
|
||||
"comeonin": {:hex, :comeonin, "5.5.0", "364d00df52545c44a139bad919d7eacb55abf39e86565878e17cebb787977368", [:mix], [], "hexpm", "6287fc3ba0aad34883cbe3f7949fc1d1e738e5ccdce77165bc99490aa69f47fb"},
|
||||
@ -37,7 +37,7 @@
|
||||
"finch": {:hex, :finch, "0.19.0", "c644641491ea854fc5c1bbaef36bfc764e3f08e7185e1f084e35e0672241b76d", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.6.2 or ~> 1.7", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 1.1", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "fc5324ce209125d1e2fa0fcd2634601c52a787aff1cd33ee833664a5af4ea2b6"},
|
||||
"floki": {:hex, :floki, "0.37.0", "b83e0280bbc6372f2a403b2848013650b16640cd2470aea6701f0632223d719e", [:mix], [], "hexpm", "516a0c15a69f78c47dc8e0b9b3724b29608aa6619379f91b1ffa47109b5d0dd3"},
|
||||
"gettext": {:hex, :gettext, "0.26.2", "5978aa7b21fada6deabf1f6341ddba50bc69c999e812211903b169799208f2a8", [:mix], [{:expo, "~> 0.5.1 or ~> 1.0", [hex: :expo, repo: "hexpm", optional: false]}], "hexpm", "aa978504bcf76511efdc22d580ba08e2279caab1066b76bb9aa81c4a1e0a32a5"},
|
||||
"hackney": {:hex, :hackney, "1.20.1", "8d97aec62ddddd757d128bfd1df6c5861093419f8f7a4223823537bad5d064e2", [:rebar3], [{:certifi, "~> 2.12.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~> 6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~> 1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~> 1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.4.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "fe9094e5f1a2a2c0a7d10918fee36bfec0ec2a979994cff8cfe8058cd9af38e3"},
|
||||
"hackney": {:hex, :hackney, "1.23.0", "55cc09077112bcb4a69e54be46ed9bc55537763a96cd4a80a221663a7eafd767", [:rebar3], [{:certifi, "~> 2.14.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~> 6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~> 1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~> 1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.4.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "6cd1c04cd15c81e5a493f167b226a15f0938a84fc8f0736ebe4ddcab65c0b44e"},
|
||||
"heap": {:hex, :heap, "2.0.2", "d98cb178286cfeb5edbcf17785e2d20af73ca57b5a2cf4af584118afbcf917eb", [:mix], [], "hexpm", "ba9ea2fe99eb4bcbd9a8a28eaf71cbcac449ca1d8e71731596aace9028c9d429"},
|
||||
"heroicons": {:hex, :heroicons, "0.5.6", "95d730e7179c633df32d95c1fdaaecdf81b0da11010b89b737b843ac176a7eb5", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:phoenix_live_view, ">= 0.18.2", [hex: :phoenix_live_view, repo: "hexpm", optional: false]}], "hexpm", "ca267f02a5fa695a4178a737b649fb6644a2e399639d4ba7964c18e8a58c2352"},
|
||||
"hpax": {:hex, :hpax, "1.0.2", "762df951b0c399ff67cc57c3995ec3cf46d696e41f0bba17da0518d94acd4aac", [:mix], [], "hexpm", "2f09b4c1074e0abd846747329eaa26d535be0eb3d189fa69d812bfb8bfefd32f"},
|
||||
@ -117,7 +117,7 @@
|
||||
"thousand_island": {:hex, :thousand_island, "1.3.9", "095db3e2650819443e33237891271943fad3b7f9ba341073947581362582ab5a", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "25ab4c07badadf7f87adb4ab414e0ed374e5f19e72503aa85132caa25776e54f"},
|
||||
"timex": {:hex, :timex, "3.7.11", "bb95cb4eb1d06e27346325de506bcc6c30f9c6dea40d1ebe390b262fad1862d1", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.20", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 1.1", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "8b9024f7efbabaf9bd7aa04f65cf8dcd7c9818ca5737677c7b76acbc6a94d1aa"},
|
||||
"typed_struct": {:hex, :typed_struct, "0.3.0", "939789e3c1dca39d7170c87f729127469d1315dcf99fee8e152bb774b17e7ff7", [:mix], [], "hexpm", "c50bd5c3a61fe4e198a8504f939be3d3c85903b382bde4865579bc23111d1b6d"},
|
||||
"tzdata": {:hex, :tzdata, "1.1.2", "45e5f1fcf8729525ec27c65e163be5b3d247ab1702581a94674e008413eef50b", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "cec7b286e608371602318c414f344941d5eb0375e14cfdab605cca2fe66cba8b"},
|
||||
"tzdata": {:hex, :tzdata, "1.1.3", "b1cef7bb6de1de90d4ddc25d33892b32830f907e7fc2fccd1e7e22778ab7dfbc", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "d4ca85575a064d29d4e94253ee95912edfb165938743dbf002acdf0dcecb0c28"},
|
||||
"ueberauth": {:hex, :ueberauth, "0.7.0", "9c44f41798b5fa27f872561b6f7d2bb0f10f03fdd22b90f454232d7b087f4b75", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "2efad9022e949834f16cc52cd935165049d81fa9e925690f91035c2e4b58d905"},
|
||||
"ueberauth_github": {:hex, :ueberauth_github, "0.8.3", "1c478629b4c1dae446c68834b69194ad5cead3b6c67c913db6fdf64f37f0328f", [:mix], [{:oauth2, "~> 1.0 or ~> 2.0", [hex: :oauth2, repo: "hexpm", optional: false]}, {:ueberauth, "~> 0.7", [hex: :ueberauth, repo: "hexpm", optional: false]}], "hexpm", "ae0ab2879c32cfa51d7287a48219b262bfdab0b7ec6629f24160564247493cc6"},
|
||||
"unicode_util_compat": {:hex, :unicode_util_compat, "0.7.0", "bc84380c9ab48177092f43ac89e4dfa2c6d62b40b8bd132b1059ecc7232f9a78", [:rebar3], [], "hexpm", "25eee6d67df61960cf6a794239566599b09e17e668d3700247bc498638152521"},
|
||||
|
@ -0,0 +1,23 @@
|
||||
defmodule :"Elixir.Bright.Repo.Migrations.AddXPosts" do
|
||||
use Ecto.Migration
|
||||
|
||||
def change do
|
||||
create table(:x_posts) do
|
||||
add :raw, :string, null: false # Raw content of the tweet
|
||||
add :url, :string, null: false # URL of the tweet
|
||||
add :date, :utc_datetime, null: false # Date and time of the tweet
|
||||
add :is_invitation, :boolean, default: false # Whether the tweet contains an invite link
|
||||
|
||||
add :vtuber_id, references(:vtubers, on_delete: :delete_all), null: false
|
||||
|
||||
timestamps(type: :utc_datetime)
|
||||
end
|
||||
|
||||
# Add an index on the `vtuber_id` for faster lookups
|
||||
create index(:x_posts, [:vtuber_id])
|
||||
|
||||
# Add an index on the `date` field for sorting and filtering
|
||||
create index(:x_posts, [:date])
|
||||
end
|
||||
|
||||
end
|
@ -0,0 +1,9 @@
|
||||
defmodule Bright.Repo.Migrations.AddTwitterRss do
|
||||
use Ecto.Migration
|
||||
|
||||
def change do
|
||||
alter table(:vtubers) do
|
||||
add :twitter_rss, :string
|
||||
end
|
||||
end
|
||||
end
|
@ -0,0 +1,9 @@
|
||||
defmodule Bright.Repo.Migrations.ChangeXPostsRawToText do
|
||||
use Ecto.Migration
|
||||
|
||||
def change do
|
||||
alter table(:x_posts) do
|
||||
modify(:raw, :text)
|
||||
end
|
||||
end
|
||||
end
|
@ -0,0 +1,7 @@
|
||||
defmodule Bright.Repo.Migrations.EnforceUniqueXPost do
|
||||
use Ecto.Migration
|
||||
|
||||
def change do
|
||||
create unique_index(:x_posts, [:url], name: :x_posts_url_unique_index)
|
||||
end
|
||||
end
|
@ -37,16 +37,18 @@ defmodule Bright.RSSParserTest do
|
||||
test "extracts URLs, datestamps, and text content from RSS data" do
|
||||
result = RSSParser.extract_item_details(@sample_data)
|
||||
|
||||
DateTime
|
||||
|
||||
assert result == [
|
||||
%{
|
||||
url: "https://example.com/post/1",
|
||||
datestamp: "Wed, 12 Oct 2022 12:00:00 GMT",
|
||||
text: "Test Post 1"
|
||||
date: ~U[2022-10-12 12:00:00Z],
|
||||
raw: "Test Post 1"
|
||||
},
|
||||
%{
|
||||
url: "https://example.com/post/2",
|
||||
datestamp: "Wed, 12 Oct 2022 13:00:00 GMT",
|
||||
text: "Test Post 2"
|
||||
date: ~U[2022-10-12 13:00:00Z],
|
||||
raw: "Test Post 2"
|
||||
}
|
||||
]
|
||||
end
|
||||
|
40
apps/bright/test/bright/socials/x_post_test.exs
Normal file
40
apps/bright/test/bright/socials/x_post_test.exs
Normal file
@ -0,0 +1,40 @@
|
||||
defmodule Bright.XPostTest do
|
||||
use Bright.DataCase
|
||||
|
||||
alias Bright.Socials.XPost
|
||||
alias Bright.Vtubers.Vtuber
|
||||
alias Bright.XPostsFixtures
|
||||
|
||||
@sample_feed "https://rss.app/feeds/FhPetvUY036xiFau.xml"
|
||||
|
||||
describe "x_posts" do
|
||||
@tag :integration
|
||||
test "get_new_posts/1 with URL" do
|
||||
{:ok, posts} = XPost.get_new_posts(@sample_feed)
|
||||
assert length(posts) > 0
|
||||
end
|
||||
|
||||
@tag :integration
|
||||
test "get_new_posts/1 with %Vtuber{}" do
|
||||
vtuber = %Vtuber{twitter_rss: @sample_feed}
|
||||
{:ok, posts} = XPost.get_new_posts(vtuber)
|
||||
assert length(posts) > 0
|
||||
end
|
||||
end
|
||||
|
||||
describe "find_livestream_invitation" do
|
||||
@tag :unit
|
||||
test "identify posts with invitations" do
|
||||
for post <- XPostsFixtures.x_posts_live() do
|
||||
assert XPost.find_livestream_invitation(post) === true
|
||||
end
|
||||
end
|
||||
|
||||
@tag :unit
|
||||
test "identify posts without invitations" do
|
||||
for post <- XPostsFixtures.x_posts_offline() do
|
||||
assert XPost.find_livestream_invitation(post) === false
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
@ -1,17 +0,0 @@
|
||||
defmodule Bright.XPostTest do
|
||||
use Bright.DataCase
|
||||
|
||||
alias Bright.Socials.XPost
|
||||
|
||||
@sample_feed "https://rss.app/feeds/FhPetvUY036xiFau.xml"
|
||||
|
||||
describe "x_posts" do
|
||||
# import Bright.SocialsFixtures
|
||||
|
||||
@tag :integration
|
||||
test "get_posts/1" do
|
||||
{:ok, posts} = XPost.get_posts(@sample_feed)
|
||||
assert length(posts) > 0
|
||||
end
|
||||
end
|
||||
end
|
100
apps/bright/test/support/fixtures/x_posts_fixtures.ex
Normal file
100
apps/bright/test/support/fixtures/x_posts_fixtures.ex
Normal file
@ -0,0 +1,100 @@
|
||||
defmodule Bright.XPostsFixtures do
|
||||
alias Bright.Socials.XPost
|
||||
|
||||
@moduledoc """
|
||||
This module defines test helpers for creating
|
||||
entities via the `Bright.Socials` context.
|
||||
"""
|
||||
|
||||
@doc """
|
||||
A x_post which does NOT contain an invite link to any of Chaturbate, Fansly, OnlyFans.
|
||||
"""
|
||||
@fixtures_offline [
|
||||
%XPost{
|
||||
raw:
|
||||
"sry i got off early, gots a headphone headache but regardles.... I REALLY LIKE MONSTER HUNTER (we kicked 2 monster asses solo today!!!!)",
|
||||
date: ~U[2025-03-12T05:00:00.000Z],
|
||||
url: "https://x.com/ProjektMelody/status/1899686928913412421"
|
||||
},
|
||||
%XPost{
|
||||
raw:
|
||||
"▀▄▀▄▀▄ SCHEDULE ▄▀▄▀▄▀\n⦉ LINKS: http://linktr.ee/projektmelody\n⦉╰( ߬⚈ o⚈ꪷ)╯𖹭 ˗ˏˋ(‿(ᶅ͒)‿) ˎˊ˗ : https://afterdark.market\n",
|
||||
date: ~U[2025-03-05T06:25:35.000Z],
|
||||
url: "https://x.com/ProjektMelody/status/1897171614439223545"
|
||||
},
|
||||
%XPost{
|
||||
raw:
|
||||
"I'M LIVE! FEELIN GOOD! LETS GAEM! \n\nhttp://twitch.tv/projektmelody \nhttp://melody.buzz\n\nWe're also doin' a lil giveaway, as @GFuelEnergy\n\n sponsored us :D ---- use !join in twitch chat at spec. times!\n\nFeelin' shoppy? Code MEL gets ur butt 20% OFF ur order-- http://gfuel.ly/mel",
|
||||
date: ~U[2025-02-25T03:22:57.000Z],
|
||||
url: "https://x.com/ProjektMelody/status/1894226549215281249"
|
||||
},
|
||||
%XPost{
|
||||
raw:
|
||||
"HAPPY TWITCHIVERSARY!\n\nsadly ive succombed to doom scrolling & family issues lately. i couldnt push myself to bring my A-game.... so, we're 2D. however, @iJinzu is joining us! he offered to coach me in monster hunter! :,)\n\nlive: http://twitch.tv/projektmelody\nhttp://melody.buzz",
|
||||
date: ~U[2025-03-07T22:15:00.000Z],
|
||||
url: "https://x.com/ProjektMelody/status/1898135320874344448"
|
||||
}
|
||||
]
|
||||
|
||||
@doc """
|
||||
A x_post which contains an invite link to any of Fansly, Chaturbate, OnlyFans.
|
||||
"""
|
||||
@fixtures_live [
|
||||
%XPost{
|
||||
raw: "🥯fansly: melody.buzz \n📷onlyfans.com/?ref=16786030 \n📷chaturbate.com/projektmelody",
|
||||
date: ~U[2025-03-05T18:30:00.000Z],
|
||||
url: "https://x.com/ProjektMelody/status/1897385055640805666"
|
||||
},
|
||||
%XPost{
|
||||
raw:
|
||||
"bruh wassup, it's movie night~\n(have a faptastic day!!!!)\n\n🥯fansly: http://melody.buzz \n🍆http://onlyfans.com/?ref=16786030 \n💦http://chaturbate.com/projektmelody",
|
||||
date: ~U[2025-02-26T02:14:59.000Z],
|
||||
url: "https://x.com/ProjektMelody/status/1894571836408504825"
|
||||
},
|
||||
%XPost{
|
||||
raw:
|
||||
"oh, damn---if @Lovense\never starts making tip-assissted ejaculating dildos. my community would frost my ass like a 30-layer wedding cake. i;d become a literal cannoli-hole... hmmm...\nANYWAY, i'm live: \n🥯fansly: http://melody.buzz \n🍆http://onlyfans.com/?ref=16786030 \n💦http://chaturbate.com/projektmelody\n",
|
||||
date: ~U[2025-03-01T01:05:12.000Z],
|
||||
url: "https://x.com/ProjektMelody/status/1895641435187151207"
|
||||
}
|
||||
]
|
||||
|
||||
@doc """
|
||||
Generates a basic x_post fixture.
|
||||
"""
|
||||
def x_post_fixture(attrs \\ %{}) do
|
||||
defaults = %{
|
||||
raw: "default raw content",
|
||||
date: ~U[2025-01-01T00:00:00.000Z],
|
||||
url: "https://x.com/default/status/1234"
|
||||
}
|
||||
|
||||
{:ok, x_post} =
|
||||
attrs
|
||||
|> Enum.into(defaults)
|
||||
|> XPost.create_x_post()
|
||||
|
||||
x_post
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns all live x_posts.
|
||||
"""
|
||||
def x_posts_live() do
|
||||
@fixtures_live
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns all offline x_posts.
|
||||
"""
|
||||
def x_posts_offline() do
|
||||
@fixtures_offline
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns all x_posts (both live and offline).
|
||||
"""
|
||||
def all_x_posts() do
|
||||
x_posts_offline() ++ x_posts_live()
|
||||
end
|
||||
end
|
@ -8,7 +8,8 @@ services:
|
||||
# - "6969:6969/udp"
|
||||
# - "9000:9000/tcp"
|
||||
|
||||
|
||||
## we don't run bright inside docker while we're working on it.
|
||||
## we run it separately using `devbox run bright:dev`
|
||||
# bright:
|
||||
# container_name: bright
|
||||
# build:
|
||||
@ -90,7 +91,7 @@ services:
|
||||
PGADMIN_DEFAULT_EMAIL: cj@futureporn.net
|
||||
PGADMIN_DEFAULT_PASSWORD: "password"
|
||||
env_file:
|
||||
- .kamal/secrets.development
|
||||
- .env.development
|
||||
|
||||
volumes:
|
||||
pg_data:
|
||||
|
Loading…
x
Reference in New Issue
Block a user