diff --git a/backend/config/config.exs b/backend/config/config.exs index 22d1bc9..0e20258 100644 --- a/backend/config/config.exs +++ b/backend/config/config.exs @@ -16,10 +16,12 @@ config :backend, BackendWeb.Endpoint, secret_key_base: System.get_env("SECRET_KEY_BASE"), render_errors: [view: BackendWeb.ErrorView, accepts: ~w(json)] +config :backend, :http, Backend.Http + config :backend, Backend.Repo, queue_target: 5000 config :backend, Backend.Elasticsearch.Cluster, - url: "http://elastic:9200", + url: "http://localhost:9200", api: Elasticsearch.API.HTTP, json_library: Jason diff --git a/backend/lib/backend/application.ex b/backend/lib/backend/application.ex index 2d8ec13..86c08bf 100644 --- a/backend/lib/backend/application.ex +++ b/backend/lib/backend/application.ex @@ -31,9 +31,10 @@ defmodule Backend.Application do ] children = - case Enum.member?(["true", 1, "1"], System.get_env("SKIP_CRAWL")) do - true -> children - false -> children ++ [Backend.Crawler.StaleInstanceManager] + if Enum.member?(["true", 1, "1"], System.get_env("SKIP_CRAWL")) or Mix.env() == :test do + children + else + children ++ [Backend.Crawler.StaleInstanceManager] end add_appsignal_probes() diff --git a/backend/lib/backend/crawler/crawler.ex b/backend/lib/backend/crawler/crawler.ex index 9f69866..62526d0 100644 --- a/backend/lib/backend/crawler/crawler.ex +++ b/backend/lib/backend/crawler/crawler.ex @@ -117,8 +117,8 @@ defmodule Backend.Crawler do try do %Crawler{state | result: curr.crawl(domain, result), found_api?: true} rescue - e in HTTPoison.Error -> - Map.put(state, :error, "HTTPoison error: " <> HTTPoison.Error.message(e)) + e in Backend.HttpBehaviour.Error -> + Map.put(state, :error, "HTTP error: " <> e.message) e in Jason.DecodeError -> Map.put(state, :error, "Jason DecodeError: " <> Jason.DecodeError.message(e)) diff --git a/backend/lib/backend/crawler/crawlers/friendica.ex b/backend/lib/backend/crawler/crawlers/friendica.ex index 27c748d..7ec90e3 100644 --- a/backend/lib/backend/crawler/crawlers/friendica.ex +++ b/backend/lib/backend/crawler/crawlers/friendica.ex @@ -51,7 +51,7 @@ defmodule Backend.Crawler.Crawlers.Friendica do |> Map.merge(nodeinfo_result) peers = - case get_and_decode("https://#{domain}/poco/@server") do + case http_client().get_and_decode("https://#{domain}/poco/@server") do {:ok, p} -> p {:error, _err} -> [] end @@ -71,7 +71,7 @@ defmodule Backend.Crawler.Crawlers.Friendica do end defp get_statistics(domain) do - get_and_decode("https://#{domain}/statistics.json") + http_client().get_and_decode("https://#{domain}/statistics.json") end defp to_domain(url) do diff --git a/backend/lib/backend/crawler/crawlers/gnu_social.ex b/backend/lib/backend/crawler/crawlers/gnu_social.ex index 67f55ea..c7e2307 100644 --- a/backend/lib/backend/crawler/crawlers/gnu_social.ex +++ b/backend/lib/backend/crawler/crawlers/gnu_social.ex @@ -14,7 +14,7 @@ defmodule Backend.Crawler.Crawlers.GnuSocial do if nodeinfo_result != nil do Map.get(nodeinfo_result, :instance_type) == :gnusocial else - case get_and_decode("https://#{domain}/api/statuses/public_timeline.json") do + case http_client().get_and_decode("https://#{domain}/api/statuses/public_timeline.json") do {:ok, statuses} -> is_list(statuses) {:error, _other} -> false end @@ -86,7 +86,7 @@ defmodule Backend.Crawler.Crawlers.GnuSocial do Logger.debug("Crawling #{endpoint}") - statuses = get_and_decode!(endpoint) + statuses = http_client().get_and_decode!(endpoint) # Filter to statuses that are in the correct timeframe filtered_statuses = diff --git a/backend/lib/backend/crawler/crawlers/mastodon.ex b/backend/lib/backend/crawler/crawlers/mastodon.ex index dc9d2a6..2109339 100644 --- a/backend/lib/backend/crawler/crawlers/mastodon.ex +++ b/backend/lib/backend/crawler/crawlers/mastodon.ex @@ -21,7 +21,7 @@ defmodule Backend.Crawler.Crawlers.Mastodon do true -> false end else - case get_and_decode("https://#{domain}/api/v1/instance") do + case http_client().get_and_decode("https://#{domain}/api/v1/instance") do {:ok, %{"title" => _title}} -> true _other -> false end @@ -41,7 +41,7 @@ defmodule Backend.Crawler.Crawlers.Mastodon do @impl ApiCrawler def crawl(domain, nodeinfo) do - instance = get_and_decode!("https://#{domain}/api/v1/instance") + instance = http_client().get_and_decode!("https://#{domain}/api/v1/instance") user_count = get_in(instance, ["stats", "user_count"]) if is_above_user_threshold?(user_count) or has_opted_in?(domain) do @@ -117,7 +117,7 @@ defmodule Backend.Crawler.Crawlers.Mastodon do Logger.debug("Crawling #{endpoint}") - statuses = get_and_decode!(endpoint) + statuses = http_client().get_and_decode!(endpoint) filtered_statuses = statuses @@ -161,7 +161,7 @@ defmodule Backend.Crawler.Crawlers.Mastodon do defp get_peers(domain) do # servers may not publish peers - case get_and_decode("https://#{domain}/api/v1/instance/peers") do + case http_client().get_and_decode("https://#{domain}/api/v1/instance/peers") do {:ok, peers} -> peers {:error, _err} -> [] end diff --git a/backend/lib/backend/crawler/crawlers/misskey.ex b/backend/lib/backend/crawler/crawlers/misskey.ex index fe8f116..9ee734b 100644 --- a/backend/lib/backend/crawler/crawlers/misskey.ex +++ b/backend/lib/backend/crawler/crawlers/misskey.ex @@ -3,6 +3,7 @@ defmodule Backend.Crawler.Crawlers.Misskey do Crawler for Misskey servers. """ alias Backend.Crawler.ApiCrawler + alias Backend.Http @behaviour ApiCrawler import Backend.Crawler.Util @@ -37,7 +38,7 @@ defmodule Backend.Crawler.Crawlers.Misskey do @impl ApiCrawler def crawl(domain, nodeinfo) do with {:ok, %{"originalUsersCount" => user_count, "originalNotesCount" => status_count}} <- - post_and_decode("https://#{domain}/api/stats") do + http_client().post_and_decode("https://#{domain}/api/stats") do if is_above_user_threshold?(user_count) or has_opted_in?(domain) do Map.merge(nodeinfo, crawl_large_instance(domain, user_count, status_count)) else @@ -109,7 +110,7 @@ defmodule Backend.Crawler.Crawlers.Misskey do Logger.debug("Crawling #{endpoint} with untilId=#{until_id}") - statuses = post_and_decode!(endpoint, Jason.encode!(params)) + statuses = http_client().post_and_decode!(endpoint, Jason.encode!(params)) filtered_statuses = statuses @@ -153,9 +154,9 @@ defmodule Backend.Crawler.Crawlers.Misskey do end @spec get_version_and_description(String.t()) :: - {:ok, {String.t(), String.t()}} | {:error, Jason.DecodeError.t() | HTTPoison.Error.t()} + {:ok, {String.t(), String.t()}} | {:error, Jason.DecodeError.t() | Http.Error.t()} defp get_version_and_description(domain) do - case post_and_decode("https://#{domain}/api/meta") do + case http_client().post_and_decode("https://#{domain}/api/meta") do {:ok, %{"version" => version, "description" => description}} -> {:ok, {version, description}} @@ -166,7 +167,7 @@ defmodule Backend.Crawler.Crawlers.Misskey do @spec get_peers(String.t()) :: {:ok, [String.t()]} | {:error, Jason.DecodeError.t()} defp get_peers(domain) do - case get_and_decode("https://#{domain}/api/v1/instance/peers") do + case http_client().get_and_decode("https://#{domain}/api/v1/instance/peers") do {:ok, peers} -> {:ok, peers} {:error, _} -> {:ok, []} end diff --git a/backend/lib/backend/crawler/crawlers/nodeinfo.ex b/backend/lib/backend/crawler/crawlers/nodeinfo.ex index c3c002d..d26cdec 100644 --- a/backend/lib/backend/crawler/crawlers/nodeinfo.ex +++ b/backend/lib/backend/crawler/crawlers/nodeinfo.ex @@ -5,6 +5,7 @@ defmodule Backend.Crawler.Crawlers.Nodeinfo do """ alias Backend.Crawler.ApiCrawler + alias Backend.Http require Logger import Backend.Util import Backend.Crawler.Util @@ -13,7 +14,7 @@ defmodule Backend.Crawler.Crawlers.Nodeinfo do @impl ApiCrawler def allows_crawling?(domain) do [ - ".well-known/nodeinfo" + "/.well-known/nodeinfo" ] |> Enum.map(fn endpoint -> "https://#{domain}#{endpoint}" end) |> urls_are_crawlable?() @@ -36,26 +37,40 @@ defmodule Backend.Crawler.Crawlers.Nodeinfo do end @spec get_nodeinfo_url(String.t()) :: - {:ok, String.t()} | {:error, Jason.DecodeError.t() | HTTPoison.Error.t()} + {:ok, String.t()} | {:error, Jason.DecodeError.t() | Http.Error.t() | :invalid_body} defp get_nodeinfo_url(domain) do - case get_and_decode("https://#{domain}/.well-known/nodeinfo") do - {:ok, response} -> {:ok, process_nodeinfo_url(response)} - {:error, err} -> {:error, err} + with {:ok, response} <- + http_client().get_and_decode("https://#{domain}/.well-known/nodeinfo"), + {:ok, nodeinfo_url} <- process_nodeinfo_url(response) do + {:ok, nodeinfo_url} + else + {:error, error} -> {:error, error} + :error -> {:error, :invalid_body} end end - @spec process_nodeinfo_url(any()) :: String.t() + @spec process_nodeinfo_url(any()) :: {:ok, String.t()} | :error defp process_nodeinfo_url(response) do - response - |> Map.get("links") - |> Enum.filter(fn %{"rel" => rel} -> is_compatible_nodeinfo_version?(rel) end) - |> Kernel.hd() - |> Map.get("href") + links = + response + |> Map.get("links", []) + |> Enum.filter(fn %{"rel" => rel} -> is_compatible_nodeinfo_version?(rel) end) + + if Enum.empty?(links) do + :error + else + href = + links + |> Kernel.hd() + |> Map.get("href") + + {:ok, href} + end end @spec get_nodeinfo(String.t()) :: ApiCrawler.t() defp get_nodeinfo(nodeinfo_url) do - case get_and_decode(nodeinfo_url) do + case http_client().get_and_decode(nodeinfo_url) do {:ok, nodeinfo} -> {:ok, process_nodeinfo(nodeinfo)} {:error, err} -> {:error, err} end diff --git a/backend/lib/backend/http.ex b/backend/lib/backend/http.ex new file mode 100644 index 0000000..876de0f --- /dev/null +++ b/backend/lib/backend/http.ex @@ -0,0 +1,71 @@ +defmodule Backend.Http do + @moduledoc """ + A wrapper around HTTPoison. Using this wrapper makes it easy for us + to mock web responses in tests, and we can easily switch out HTTPoison for + another library if we want to. + """ + @behaviour Backend.HttpBehaviour + alias Backend.HttpBehaviour.Error + + import Backend.Util + + @doc """ + GETs from the given URL and returns the JSON-decoded response. + If the response is unsuccessful and a default value is given, this returns the default value. + Otherwise, unsuccessful responses return an error. + """ + @impl true + def get_and_decode(url, pool \\ :default, timeout \\ 15_000, default \\ nil) do + case HTTPoison.get(url, [{"User-Agent", get_config(:user_agent)}], + hackney: [pool: pool], + recv_timeout: timeout, + timeout: timeout + ) do + {:ok, %HTTPoison.Response{body: body, status_code: status_code}} + when status_code >= 200 and status_code <= 299 -> + Jason.decode(body) + + {:ok, %HTTPoison.Response{body: body, status_code: status_code}} -> + if not is_nil(default) do + {:ok, default} + else + {:error, + %Error{ + message: "HTTP request failed with status code #{status_code}", + status_code: status_code, + body: body + }} + end + + {:error, %HTTPoison.Error{} = error} -> + {:error, %Error{message: HTTPoison.Error.message(error)}} + end + end + + @impl true + def get_and_decode!(url, pool \\ :default, timeout \\ 15_000, default \\ nil) do + case get_and_decode(url, pool, timeout, default) do + {:ok, decoded} -> decoded + {:error, error} -> raise error + end + end + + @impl true + def post_and_decode(url, body \\ nil) do + case HTTPoison.post(url, body, [{"User-Agent", get_config(:user_agent)}]) do + {:ok, %HTTPoison.Response{body: body}} -> + Jason.decode(body) + + {:error, %HTTPoison.Error{} = error} -> + {:error, %Error{message: HTTPoison.Error.message(error)}} + end + end + + @impl true + def post_and_decode!(url, body \\ nil) do + case post_and_decode(url, body) do + {:ok, decoded} -> decoded + {:error, error} -> raise error + end + end +end diff --git a/backend/lib/backend/http_behaviour.ex b/backend/lib/backend/http_behaviour.ex new file mode 100644 index 0000000..6dd0df4 --- /dev/null +++ b/backend/lib/backend/http_behaviour.ex @@ -0,0 +1,23 @@ +defmodule Backend.HttpBehaviour do + @moduledoc """ + This module defines the behavior for HTTP requests. + """ + + defmodule Error do + defstruct message: nil, status_code: nil, body: nil + @type t :: %__MODULE__{message: String.t(), status_code: integer | nil, body: term | nil} + end + + @type response :: {:ok, Response.t()} | {:error, __MODULE__.Error.t() | Jason.DecodeError.t()} + + @callback get_and_decode(String.t()) :: response + @callback get_and_decode(String.t(), Atom.t(), Integer.t(), any()) :: response + + @callback get_and_decode!(String.t()) :: Response.t() + @callback get_and_decode!(String.t(), Atom.t(), Integer.t(), any()) :: Response.t() + + @callback post_and_decode(String.t()) :: response() + @callback post_and_decode(String.t(), String.t()) :: response() + @callback post_and_decode!(String.t()) :: Response.t() + @callback post_and_decode!(String.t(), String.t()) :: Response.t() +end diff --git a/backend/lib/backend/util.ex b/backend/lib/backend/util.ex index ccbf0ed..3ed35bc 100644 --- a/backend/lib/backend/util.ex +++ b/backend/lib/backend/util.ex @@ -143,69 +143,12 @@ defmodule Backend.Util do map |> Map.new(fn {k, v} -> {String.to_atom(k), v} end) end - @doc """ - Gets and decodes a HTTP response. - """ - @spec get_and_decode(String.t(), Atom.t(), Integer.t()) :: - {:ok, any()} | {:error, Jason.DecodeError.t() | HTTPoison.Error.t()} - def get_and_decode(url, pool \\ :crawler, timeout \\ 15_000) do - case HTTPoison.get(url, [{"User-Agent", get_config(:user_agent)}], - hackney: [pool: pool], - recv_timeout: timeout, - timeout: timeout - ) do - {:ok, %{status_code: 200, body: body}} -> - Jason.decode(body) - - {:ok, %{status_code: 401}} -> - Jason.decode("[]") - - {:ok, %{status_code: 404}} -> - Jason.decode("[]") - - {:ok, %{body: body}} -> - {:error, %HTTPoison.Error{reason: "Non-200 response. Body: #{body}"}} - - {:error, err} -> - {:error, err} - end - end - - @spec get_and_decode!(String.t()) :: any() - def get_and_decode!(url) do - case get_and_decode(url) do - {:ok, decoded} -> decoded - {:error, error} -> raise error - end - end - - @doc """ - POSTS to a HTTP endpoint and decodes the JSON response. - """ - @spec post_and_decode(String.t(), String.t()) :: - {:ok, any()} | {:error, Jason.DecodeError.t() | HTTPoison.Error.t()} - def post_and_decode(url, body \\ "") do - case HTTPoison.post(url, body, [{"User-Agent", get_config(:user_agent)}], - hackney: [pool: :crawler], - recv_timeout: 15_000, - timeout: 15_000 - ) do - {:ok, %{status_code: 200, body: response_body}} -> Jason.decode(response_body) - {:ok, _} -> {:error, %HTTPoison.Error{reason: "Non-200 response"}} - {:error, err} -> {:error, err} - end - end - - @spec post_and_decode!(String.t(), String.t()) :: any() - def post_and_decode!(url, body \\ "") do - case post_and_decode(url, body) do - {:ok, decoded} -> decoded - {:error, error} -> raise error - end - end - @spec is_valid_domain?(String.t()) :: boolean def is_valid_domain?(domain) do Regex.match?(~r/^[\pL\d\.\-_]+\.[a-zA-Z]+$/, domain) end + + def http_client() do + Application.get_env(:backend, :http, Backend.Http) + end end diff --git a/backend/lib/backend_web/controllers/admin_login_controller.ex b/backend/lib/backend_web/controllers/admin_login_controller.ex index 3b781f8..974a300 100644 --- a/backend/lib/backend_web/controllers/admin_login_controller.ex +++ b/backend/lib/backend_web/controllers/admin_login_controller.ex @@ -5,7 +5,7 @@ defmodule BackendWeb.AdminLoginController do alias Backend.Mailer.UserEmail alias Mastodon.Messenger - action_fallback BackendWeb.FallbackController + action_fallback(BackendWeb.FallbackController) @doc """ Given an instance, looks up the login types (email or admin account) and returns them. The user can then @@ -24,7 +24,7 @@ defmodule BackendWeb.AdminLoginController do [error: "It is only possible to administer Mastodon and Pleroma instances."] true -> - case get_and_decode("https://#{cleaned_domain}/api/v1/instance") do + case http_client().get_and_decode("https://#{cleaned_domain}/api/v1/instance") do {:ok, instance_data} -> [instance_data: instance_data, cleaned_domain: cleaned_domain] @@ -40,7 +40,7 @@ defmodule BackendWeb.AdminLoginController do cleaned_domain = clean_domain(domain) {data_state, instance_data} = - get_and_decode("https://#{cleaned_domain}/api/v1/instance", + http_client().get_and_decode("https://#{cleaned_domain}/api/v1/instance", pool: :admin_login, timeout: 20_000 ) diff --git a/backend/mix.exs b/backend/mix.exs index cc02d54..036aa42 100644 --- a/backend/mix.exs +++ b/backend/mix.exs @@ -72,7 +72,8 @@ defmodule Backend.MixProject do {:scrivener_ecto, "~> 2.2"}, {:recase, "~> 0.7"}, {:ex_rated, "~> 2.1"}, - {:html_sanitize_ex, "~> 1.4"} + {:html_sanitize_ex, "~> 1.4"}, + {:mox, "~> 1.0", only: [:test]} ] end diff --git a/backend/mix.lock b/backend/mix.lock index 02b8ea6..bbf6e1b 100644 --- a/backend/mix.lock +++ b/backend/mix.lock @@ -42,6 +42,7 @@ "mime": {:hex, :mime, "2.0.5", "dc34c8efd439abe6ae0343edbb8556f4d63f178594894720607772a041b04b02", [:mix], [], "hexpm", "da0d64a365c45bc9935cc5c8a7fc5e49a0e0f9932a761c55d6c52b142780a05c"}, "mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm", "f278585650aa581986264638ebf698f8bb19df297f66ad91b18910dfc6e19323"}, "mochiweb": {:hex, :mochiweb, "2.22.0", "f104d6747c01a330c38613561977e565b788b9170055c5241ac9dd6e4617cba5", [:rebar3], [], "hexpm", "cbbd1fd315d283c576d1c8a13e0738f6dafb63dc840611249608697502a07655"}, + "mox": {:hex, :mox, "1.0.2", "dc2057289ac478b35760ba74165b4b3f402f68803dd5aecd3bfd19c183815d64", [:mix], [], "hexpm", "f9864921b3aaf763c8741b5b8e6f908f44566f1e427b2630e89e9a73b981fef2"}, "nebulex": {:hex, :nebulex, "2.4.2", "b3d2d86d57b15896fb8e6d6dd49b4a9dee2eedd6eddfb3b69bfdb616a09c2817", [:mix], [{:decorator, "~> 1.4", [hex: :decorator, repo: "hexpm", optional: true]}, {:shards, "~> 1.0", [hex: :shards, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "c9f888e5770fd47614c95990d0a02c3515216d51dc72e3c830eaf28f5649ba52"}, "parse_trans": {:hex, :parse_trans, "3.3.1", "16328ab840cc09919bd10dab29e431da3af9e9e7e7e6f0089dd5a2d2820011d8", [:rebar3], [], "hexpm", "07cd9577885f56362d414e8c4c4e6bdf10d43a8767abb92d24cbe8b24c54888b"}, "phoenix": {:hex, :phoenix, "1.7.3", "4d8eca2c020c9ed81a28e7a8c60e0a4f6f9f7f6e12eb91dfd01301eac07424c1", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.4", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "6b1bc308758f95ecf3e0d795389440a2ca88a903e0fda1f921c780918c16d640"}, diff --git a/backend/test/backend/crawler/crawlers/nodeinfo_test.exs b/backend/test/backend/crawler/crawlers/nodeinfo_test.exs new file mode 100644 index 0000000..92fe547 --- /dev/null +++ b/backend/test/backend/crawler/crawlers/nodeinfo_test.exs @@ -0,0 +1,138 @@ +defmodule Backend.Crawler.Crawlers.NodeinfoTest do + use Backend.DataCase + + alias Backend.Crawler.Crawlers.Nodeinfo + import Mox + + setup :verify_on_exit! + + describe "crawl/2" do + test "handles valid nodeinfo" do + expect(HttpMock, :get_and_decode, fn "https://mastodon.social/.well-known/nodeinfo" -> + {:ok, + %{ + "links" => [ + %{ + "rel" => "http://nodeinfo.diaspora.software/ns/schema/2.0", + "href" => "https://mastodon.social/nodeinfo/2.0" + } + ] + }} + end) + + expect(HttpMock, :get_and_decode, fn "https://mastodon.social/nodeinfo/2.0" -> + {:ok, + %{ + "version" => "2.0", + "software" => %{ + "name" => "Mastodon", + "version" => "1.2.3" + }, + "protocols" => ["activitypub"], + "services" => %{ + "inbound" => [], + "outbound" => [] + }, + "usage" => %{ + "users" => %{ + "total" => 100, + "activeMonth" => 1, + "activeHalfYear" => 2 + }, + "localPosts" => 3 + }, + "openRegistrations" => true, + "metadata" => %{} + }} + end) + + result = Nodeinfo.crawl("mastodon.social", %{}) + + assert result == %{ + description: nil, + user_count: 100, + status_count: 3, + statuses_seen: 0, + instance_type: :mastodon, + version: "1.2.3", + federation_restrictions: [], + interactions: %{}, + peers: [] + } + end + + test "handles small instances" do + expect(HttpMock, :get_and_decode, fn "https://mastodon.social/.well-known/nodeinfo" -> + {:ok, + %{ + "links" => [ + %{ + "rel" => "http://nodeinfo.diaspora.software/ns/schema/2.0", + "href" => "https://mastodon.social/nodeinfo/2.0" + } + ] + }} + end) + + expect(HttpMock, :get_and_decode, fn "https://mastodon.social/nodeinfo/2.0" -> + {:ok, + %{ + "version" => "2.0", + "software" => %{ + "name" => "Mastodon", + "version" => "1.2.3" + }, + "protocols" => ["activitypub"], + "services" => %{ + "inbound" => [], + "outbound" => [] + }, + "usage" => %{ + "users" => %{ + "total" => 1, + "activeMonth" => 1, + "activeHalfYear" => 1 + }, + "localPosts" => 3 + }, + "openRegistrations" => true, + "metadata" => %{} + }} + end) + + result = Nodeinfo.crawl("mastodon.social", %{}) + + assert result == %{ + description: nil, + user_count: 1, + status_count: nil, + statuses_seen: 0, + instance_type: nil, + version: nil, + federation_restrictions: [], + interactions: %{}, + peers: [] + } + end + + test "handles missing nodeinfo" do + expect(HttpMock, :get_and_decode, fn "https://mastodon.social/.well-known/nodeinfo" -> + {:ok, %{}} + end) + + result = Nodeinfo.crawl("mastodon.social", %{}) + + assert result == %{ + description: nil, + user_count: nil, + status_count: nil, + statuses_seen: 0, + instance_type: nil, + version: nil, + federation_restrictions: [], + interactions: %{}, + peers: [] + } + end + end +end diff --git a/backend/test/test_helper.exs b/backend/test/test_helper.exs index a7d0703..292fc2d 100644 --- a/backend/test/test_helper.exs +++ b/backend/test/test_helper.exs @@ -1,2 +1,5 @@ +Mox.defmock(HttpMock, for: Backend.HttpBehaviour) +Application.put_env(:backend, :http, HttpMock) + ExUnit.start() Ecto.Adapters.SQL.Sandbox.mode(Backend.Repo, :manual)