diff options
Diffstat (limited to 'test')
22 files changed, 1713 insertions, 164 deletions
diff --git a/test/fixtures/unindexed_fk.sql b/test/fixtures/unindexed_fk.sql new file mode 100644 index 000000000..3b71679cf --- /dev/null +++ b/test/fixtures/unindexed_fk.sql @@ -0,0 +1,27 @@ +-- Unindexed FK -- Missing indexes - For CI + +WITH y AS ( +SELECT +pg_catalog.format('%I', c1.relname) AS referencing_tbl, +pg_catalog.quote_ident(a1.attname) AS referencing_column, +(SELECT pg_get_expr(indpred, indrelid) FROM pg_catalog.pg_index WHERE indrelid = t.conrelid AND indkey[0] = t.conkey[1] AND indpred IS NOT NULL LIMIT 1) partial_statement +FROM pg_catalog.pg_constraint t +JOIN pg_catalog.pg_attribute a1 ON a1.attrelid = t.conrelid AND a1.attnum = t.conkey[1] +JOIN pg_catalog.pg_class c1 ON c1.oid = t.conrelid +JOIN pg_catalog.pg_namespace n1 ON n1.oid = c1.relnamespace +JOIN pg_catalog.pg_class c2 ON c2.oid = t.confrelid +JOIN pg_catalog.pg_namespace n2 ON n2.oid = c2.relnamespace +JOIN pg_catalog.pg_attribute a2 ON a2.attrelid = t.confrelid AND a2.attnum = t.confkey[1] +WHERE t.contype = 'f' +AND NOT EXISTS ( +SELECT 1 +FROM pg_catalog.pg_index i +WHERE i.indrelid = t.conrelid +AND i.indkey[0] = t.conkey[1] +AND indpred IS NULL +) +) +SELECT referencing_tbl || '.' || referencing_column as "column" +FROM y +WHERE (partial_statement IS NULL OR partial_statement <> ('(' || referencing_column || ' IS NOT NULL)')) +ORDER BY 1;
\ No newline at end of file diff --git a/test/mix/tasks/pleroma/database_test.exs b/test/mix/tasks/pleroma/database_test.exs index fbc939171..d773038cb 100644 --- a/test/mix/tasks/pleroma/database_test.exs +++ b/test/mix/tasks/pleroma/database_test.exs @@ -7,6 +7,7 @@ defmodule Mix.Tasks.Pleroma.DatabaseTest do use Oban.Testing, repo: Pleroma.Repo alias Pleroma.Activity + alias Pleroma.Bookmark alias Pleroma.Object alias Pleroma.Repo alias Pleroma.User @@ -45,28 +46,509 @@ defmodule Mix.Tasks.Pleroma.DatabaseTest do end describe "prune_objects" do - test "it prunes old objects from the database" do - insert(:note) + setup do deadline = Pleroma.Config.get([:instance, :remote_post_retention_days]) + 1 - date = + old_insert_date = Timex.now() |> Timex.shift(days: -deadline) |> Timex.to_naive_datetime() |> NaiveDateTime.truncate(:second) - %{id: id} = + %{old_insert_date: old_insert_date} + end + + test "it prunes old objects from the database", %{old_insert_date: old_insert_date} do + insert(:note) + + %{id: note_remote_public_id} = :note |> insert() - |> Ecto.Changeset.change(%{inserted_at: date}) + |> Ecto.Changeset.change(%{updated_at: old_insert_date}) |> Repo.update!() + note_remote_non_public = + %{id: note_remote_non_public_id, data: note_remote_non_public_data} = + :note + |> insert() + + note_remote_non_public + |> Ecto.Changeset.change(%{ + updated_at: old_insert_date, + data: note_remote_non_public_data |> update_in(["to"], fn _ -> [] end) + }) + |> Repo.update!() + + assert length(Repo.all(Object)) == 3 + + Mix.Tasks.Pleroma.Database.run(["prune_objects"]) + + assert length(Repo.all(Object)) == 1 + refute Object.get_by_id(note_remote_public_id) + refute Object.get_by_id(note_remote_non_public_id) + end + + test "it cleans up bookmarks", %{old_insert_date: old_insert_date} do + user = insert(:user) + {:ok, old_object_activity} = CommonAPI.post(user, %{status: "yadayada"}) + + Repo.one(Object) + |> Ecto.Changeset.change(%{updated_at: old_insert_date}) + |> Repo.update!() + + {:ok, new_object_activity} = CommonAPI.post(user, %{status: "yadayada"}) + + {:ok, _} = Bookmark.create(user.id, old_object_activity.id) + {:ok, _} = Bookmark.create(user.id, new_object_activity.id) + assert length(Repo.all(Object)) == 2 + assert length(Repo.all(Bookmark)) == 2 Mix.Tasks.Pleroma.Database.run(["prune_objects"]) assert length(Repo.all(Object)) == 1 - refute Object.get_by_id(id) + assert length(Repo.all(Bookmark)) == 1 + refute Bookmark.get(user.id, old_object_activity.id) + end + + test "with the --keep-non-public option it still keeps non-public posts even if they are not local", + %{old_insert_date: old_insert_date} do + insert(:note) + + %{id: note_remote_id} = + :note + |> insert() + |> Ecto.Changeset.change(%{updated_at: old_insert_date}) + |> Repo.update!() + + note_remote_non_public = + %{data: note_remote_non_public_data} = + :note + |> insert() + + note_remote_non_public + |> Ecto.Changeset.change(%{ + updated_at: old_insert_date, + data: note_remote_non_public_data |> update_in(["to"], fn _ -> [] end) + }) + |> Repo.update!() + + assert length(Repo.all(Object)) == 3 + + Mix.Tasks.Pleroma.Database.run(["prune_objects", "--keep-non-public"]) + + assert length(Repo.all(Object)) == 2 + refute Object.get_by_id(note_remote_id) + end + + test "with the --keep-threads and --keep-non-public option it keeps old threads with non-public replies even if the interaction is not local", + %{old_insert_date: old_insert_date} do + # For non-public we only check Create Activities because only these are relevant for threads + # Flags are always non-public, Announces from relays can be non-public... + + remote_user1 = insert(:user, local: false) + remote_user2 = insert(:user, local: false) + + # Old remote non-public reply (should be kept) + {:ok, old_remote_post1_activity} = + CommonAPI.post(remote_user1, %{status: "some thing", local: false}) + + old_remote_post1_activity + |> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date}) + |> Repo.update!() + + {:ok, old_remote_non_public_reply_activity} = + CommonAPI.post(remote_user2, %{ + status: "some reply", + in_reply_to_status_id: old_remote_post1_activity.id + }) + + old_remote_non_public_reply_activity + |> Ecto.Changeset.change(%{ + local: false, + updated_at: old_insert_date, + data: old_remote_non_public_reply_activity.data |> update_in(["to"], fn _ -> [] end) + }) + |> Repo.update!() + + # Old remote non-public Announce (should be removed) + {:ok, old_remote_post2_activity = %{data: %{"object" => old_remote_post2_id}}} = + CommonAPI.post(remote_user1, %{status: "some thing", local: false}) + + old_remote_post2_activity + |> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date}) + |> Repo.update!() + + {:ok, old_remote_non_public_repeat_activity} = + CommonAPI.repeat(old_remote_post2_activity.id, remote_user2) + + old_remote_non_public_repeat_activity + |> Ecto.Changeset.change(%{ + local: false, + updated_at: old_insert_date, + data: old_remote_non_public_repeat_activity.data |> update_in(["to"], fn _ -> [] end) + }) + |> Repo.update!() + + assert length(Repo.all(Object)) == 3 + + Mix.Tasks.Pleroma.Database.run(["prune_objects", "--keep-threads", "--keep-non-public"]) + + Repo.all(Pleroma.Activity) + assert length(Repo.all(Object)) == 2 + refute Object.get_by_ap_id(old_remote_post2_id) + end + + test "with the --keep-threads option it still keeps non-old threads even with no local interactions" do + remote_user = insert(:user, local: false) + remote_user2 = insert(:user, local: false) + + {:ok, remote_post_activity} = + CommonAPI.post(remote_user, %{status: "some thing", local: false}) + + {:ok, remote_post_reply_activity} = + CommonAPI.post(remote_user2, %{ + status: "some reply", + in_reply_to_status_id: remote_post_activity.id + }) + + remote_post_activity + |> Ecto.Changeset.change(%{local: false}) + |> Repo.update!() + + remote_post_reply_activity + |> Ecto.Changeset.change(%{local: false}) + |> Repo.update!() + + assert length(Repo.all(Object)) == 2 + + Mix.Tasks.Pleroma.Database.run(["prune_objects", "--keep-threads"]) + + assert length(Repo.all(Object)) == 2 + end + + test "with the --keep-threads option it deletes old threads with no local interaction", %{ + old_insert_date: old_insert_date + } do + remote_user = insert(:user, local: false) + remote_user2 = insert(:user, local: false) + + {:ok, old_remote_post_activity} = + CommonAPI.post(remote_user, %{status: "some thing", local: false}) + + old_remote_post_activity + |> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date}) + |> Repo.update!() + + {:ok, old_remote_post_reply_activity} = + CommonAPI.post(remote_user2, %{ + status: "some reply", + in_reply_to_status_id: old_remote_post_activity.id + }) + + old_remote_post_reply_activity + |> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date}) + |> Repo.update!() + + {:ok, old_favourite_activity} = + CommonAPI.favorite(remote_user2, old_remote_post_activity.id) + + old_favourite_activity + |> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date}) + |> Repo.update!() + + {:ok, old_repeat_activity} = CommonAPI.repeat(old_remote_post_activity.id, remote_user2) + + old_repeat_activity + |> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date}) + |> Repo.update!() + + assert length(Repo.all(Object)) == 2 + + Mix.Tasks.Pleroma.Database.run(["prune_objects", "--keep-threads"]) + + assert length(Repo.all(Object)) == 0 + end + + test "with the --keep-threads option it keeps old threads with local interaction", %{ + old_insert_date: old_insert_date + } do + remote_user = insert(:user, local: false) + local_user = insert(:user, local: true) + + # local reply + {:ok, old_remote_post1_activity} = + CommonAPI.post(remote_user, %{status: "some thing", local: false}) + + old_remote_post1_activity + |> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date}) + |> Repo.update!() + + {:ok, old_local_post2_reply_activity} = + CommonAPI.post(local_user, %{ + status: "some reply", + in_reply_to_status_id: old_remote_post1_activity.id + }) + + old_local_post2_reply_activity + |> Ecto.Changeset.change(%{local: true, updated_at: old_insert_date}) + |> Repo.update!() + + # local Like + {:ok, old_remote_post3_activity} = + CommonAPI.post(remote_user, %{status: "some thing", local: false}) + + old_remote_post3_activity + |> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date}) + |> Repo.update!() + + {:ok, old_favourite_activity} = CommonAPI.favorite(local_user, old_remote_post3_activity.id) + + old_favourite_activity + |> Ecto.Changeset.change(%{local: true, updated_at: old_insert_date}) + |> Repo.update!() + + # local Announce + {:ok, old_remote_post4_activity} = + CommonAPI.post(remote_user, %{status: "some thing", local: false}) + + old_remote_post4_activity + |> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date}) + |> Repo.update!() + + {:ok, old_repeat_activity} = CommonAPI.repeat(old_remote_post4_activity.id, local_user) + + old_repeat_activity + |> Ecto.Changeset.change(%{local: true, updated_at: old_insert_date}) + |> Repo.update!() + + assert length(Repo.all(Object)) == 4 + + Mix.Tasks.Pleroma.Database.run(["prune_objects", "--keep-threads"]) + + assert length(Repo.all(Object)) == 4 + end + + test "with the --keep-threads option it keeps old threads with bookmarked posts", %{ + old_insert_date: old_insert_date + } do + remote_user = insert(:user, local: false) + local_user = insert(:user, local: true) + + {:ok, old_remote_post_activity} = + CommonAPI.post(remote_user, %{status: "some thing", local: false}) + + old_remote_post_activity + |> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date}) + |> Repo.update!() + + Pleroma.Bookmark.create(local_user.id, old_remote_post_activity.id) + + assert length(Repo.all(Object)) == 1 + + Mix.Tasks.Pleroma.Database.run(["prune_objects", "--keep-threads"]) + + assert length(Repo.all(Object)) == 1 + end + + test "We don't have unexpected tables which may contain objects that are referenced by activities" do + # We can delete orphaned activities. For that we look for the objects + # they reference in the 'objects', 'activities', and 'users' table. + # If someone adds another table with objects (idk, maybe with separate + # relations, or collections or w/e), then we need to make sure we + # add logic for that in the 'prune_objects' task so that we don't + # wrongly delete their corresponding activities. + # So when someone adds (or removes) a table, this test will fail. + # Either the table contains objects which can be referenced from the + # activities table + # => in that case the prune_objects job should be adapted so we don't + # delete activities who still have the referenced object. + # Or it doesn't contain objects which can be referenced from the activities table + # => in that case you can add/remove the table to/from this (sorted) list. + + assert Repo.query!( + "SELECT table_name FROM information_schema.tables WHERE table_schema='public' AND table_type='BASE TABLE';" + ).rows + |> Enum.sort() == [ + ["activities"], + ["announcement_read_relationships"], + ["announcements"], + ["apps"], + ["backups"], + ["bookmark_folders"], + ["bookmarks"], + ["chat_message_references"], + ["chats"], + ["config"], + ["conversation_participation_recipient_ships"], + ["conversation_participations"], + ["conversations"], + ["counter_cache"], + ["data_migration_failed_ids"], + ["data_migrations"], + ["deliveries"], + ["filters"], + ["following_relationships"], + ["hashtags"], + ["hashtags_objects"], + ["instances"], + ["lists"], + ["markers"], + ["mfa_tokens"], + ["moderation_log"], + ["notifications"], + ["oauth_authorizations"], + ["oauth_tokens"], + ["oban_jobs"], + ["oban_peers"], + ["objects"], + ["password_reset_tokens"], + ["push_subscriptions"], + ["registrations"], + ["report_notes"], + ["rich_media_card"], + ["rules"], + ["scheduled_activities"], + ["schema_migrations"], + ["thread_mutes"], + # ["user_follows_hashtag"], # not in pleroma + # ["user_frontend_setting_profiles"], # not in pleroma + ["user_invite_tokens"], + ["user_notes"], + ["user_relationships"], + ["users"] + ] + end + + test "it prunes orphaned activities with the --prune-orphaned-activities" do + # Add a remote activity which references an Object + %Object{} |> Map.merge(%{data: %{"id" => "object_for_activity"}}) |> Repo.insert() + + %Activity{} + |> Map.merge(%{ + local: false, + data: %{"id" => "remote_activity_with_object", "object" => "object_for_activity"} + }) + |> Repo.insert() + + # Add a remote activity which references an activity + %Activity{} + |> Map.merge(%{ + local: false, + data: %{ + "id" => "remote_activity_with_activity", + "object" => "remote_activity_with_object" + } + }) + |> Repo.insert() + + # Add a remote activity which references an Actor + %User{} |> Map.merge(%{ap_id: "actor"}) |> Repo.insert() + + %Activity{} + |> Map.merge(%{ + local: false, + data: %{"id" => "remote_activity_with_actor", "object" => "actor"} + }) + |> Repo.insert() + + # Add a remote activity without existing referenced object, activity or actor + %Activity{} + |> Map.merge(%{ + local: false, + data: %{ + "id" => "remote_activity_without_existing_referenced_object", + "object" => "non_existing" + } + }) + |> Repo.insert() + + # Add a local activity without existing referenced object, activity or actor + %Activity{} + |> Map.merge(%{ + local: true, + data: %{"id" => "local_activity_with_actor", "object" => "non_existing"} + }) + |> Repo.insert() + + # The remote activities without existing reference, + # and only the remote activities without existing reference, are deleted + # if, and only if, we provide the --prune-orphaned-activities option + assert length(Repo.all(Activity)) == 5 + Mix.Tasks.Pleroma.Database.run(["prune_objects"]) + assert length(Repo.all(Activity)) == 5 + Mix.Tasks.Pleroma.Database.run(["prune_objects", "--prune-orphaned-activities"]) + activities = Repo.all(Activity) + + assert "remote_activity_without_existing_referenced_object" not in Enum.map( + activities, + fn a -> a.data["id"] end + ) + + assert length(activities) == 4 + end + + test "it prunes orphaned activities with the --prune-orphaned-activities when the objects are referenced from an array" do + %Object{} |> Map.merge(%{data: %{"id" => "existing_object"}}) |> Repo.insert() + %User{} |> Map.merge(%{ap_id: "existing_actor"}) |> Repo.insert() + + # Multiple objects, one object exists (keep) + %Activity{} + |> Map.merge(%{ + local: false, + data: %{ + "id" => "remote_activity_existing_object", + "object" => ["non_ existing_object", "existing_object"] + } + }) + |> Repo.insert() + + # Multiple objects, one actor exists (keep) + %Activity{} + |> Map.merge(%{ + local: false, + data: %{ + "id" => "remote_activity_existing_actor", + "object" => ["non_ existing_object", "existing_actor"] + } + }) + |> Repo.insert() + + # Multiple objects, one activity exists (keep) + %Activity{} + |> Map.merge(%{ + local: false, + data: %{ + "id" => "remote_activity_existing_activity", + "object" => ["non_ existing_object", "remote_activity_existing_actor"] + } + }) + |> Repo.insert() + + # Multiple objects none exist (prune) + %Activity{} + |> Map.merge(%{ + local: false, + data: %{ + "id" => "remote_activity_without_existing_referenced_object", + "object" => ["owo", "whats_this"] + } + }) + |> Repo.insert() + + assert length(Repo.all(Activity)) == 4 + Mix.Tasks.Pleroma.Database.run(["prune_objects"]) + assert length(Repo.all(Activity)) == 4 + Mix.Tasks.Pleroma.Database.run(["prune_objects", "--prune-orphaned-activities"]) + activities = Repo.all(Activity) + assert length(activities) == 3 + + assert "remote_activity_without_existing_referenced_object" not in Enum.map( + activities, + fn a -> a.data["id"] end + ) + + assert length(activities) == 3 end end diff --git a/test/pleroma/notification_test.exs b/test/pleroma/notification_test.exs index ecdb32e32..2c582c708 100644 --- a/test/pleroma/notification_test.exs +++ b/test/pleroma/notification_test.exs @@ -859,22 +859,6 @@ defmodule Pleroma.NotificationTest do assert Enum.empty?(Notification.for_user(user)) end - test "replying to a deleted post without tagging does not generate a notification" do - user = insert(:user) - other_user = insert(:user) - - {:ok, activity} = CommonAPI.post(user, %{status: "test post"}) - {:ok, _deletion_activity} = CommonAPI.delete(activity.id, user) - - {:ok, _reply_activity} = - CommonAPI.post(other_user, %{ - status: "test reply", - in_reply_to_status_id: activity.id - }) - - assert Enum.empty?(Notification.for_user(user)) - end - test "notifications are deleted if a local user is deleted" do user = insert(:user) other_user = insert(:user) diff --git a/test/pleroma/scheduled_activity_test.exs b/test/pleroma/scheduled_activity_test.exs index 4818e8bcf..aaf643cfc 100644 --- a/test/pleroma/scheduled_activity_test.exs +++ b/test/pleroma/scheduled_activity_test.exs @@ -31,8 +31,7 @@ defmodule Pleroma.ScheduledActivityTest do {:ok, sa1} = ScheduledActivity.create(user, attrs) {:ok, sa2} = ScheduledActivity.create(user, attrs) - jobs = - Repo.all(from(j in Oban.Job, where: j.queue == "scheduled_activities", select: j.args)) + jobs = Repo.all(from(j in Oban.Job, where: j.queue == "federator_outgoing", select: j.args)) assert jobs == [%{"activity_id" => sa1.id}, %{"activity_id" => sa2.id}] end diff --git a/test/pleroma/schema_test.exs b/test/pleroma/schema_test.exs new file mode 100644 index 000000000..9bddd2031 --- /dev/null +++ b/test/pleroma/schema_test.exs @@ -0,0 +1,17 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.SchemaTest do + use Pleroma.DataCase, async: true + + alias Pleroma.Repo + + test "No unindexed foreign keys" do + query = File.read!("test/fixtures/unindexed_fk.sql") + + {:ok, result} = Repo.query(query) + + assert Enum.empty?(result.rows) + end +end diff --git a/test/pleroma/search/healthcheck_test.exs b/test/pleroma/search/healthcheck_test.exs new file mode 100644 index 000000000..e7649d949 --- /dev/null +++ b/test/pleroma/search/healthcheck_test.exs @@ -0,0 +1,49 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Search.HealthcheckTest do + use Pleroma.DataCase + + import Tesla.Mock + + alias Pleroma.Search.Healthcheck + + @good1 "http://good1.example.com/healthz" + @good2 "http://good2.example.com/health" + @bad "http://bad.example.com/healthy" + + setup do + mock(fn + %{method: :get, url: @good1} -> + %Tesla.Env{ + status: 200, + body: "" + } + + %{method: :get, url: @good2} -> + %Tesla.Env{ + status: 200, + body: "" + } + + %{method: :get, url: @bad} -> + %Tesla.Env{ + status: 503, + body: "" + } + end) + + :ok + end + + test "true for 200 responses" do + assert Healthcheck.check([@good1]) + assert Healthcheck.check([@good1, @good2]) + end + + test "false if any response is not a 200" do + refute Healthcheck.check([@bad]) + refute Healthcheck.check([@good1, @bad]) + end +end diff --git a/test/pleroma/search/qdrant_search_test.exs b/test/pleroma/search/qdrant_search_test.exs new file mode 100644 index 000000000..47a77a391 --- /dev/null +++ b/test/pleroma/search/qdrant_search_test.exs @@ -0,0 +1,199 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Search.QdrantSearchTest do + use Pleroma.DataCase, async: true + use Oban.Testing, repo: Pleroma.Repo + + import Pleroma.Factory + import Mox + + alias Pleroma.Search.QdrantSearch + alias Pleroma.UnstubbedConfigMock, as: Config + alias Pleroma.Web.CommonAPI + alias Pleroma.Workers.SearchIndexingWorker + + describe "Qdrant search" do + test "returns the correct healthcheck endpoints" do + # No openai healthcheck URL + Config + |> expect(:get, 2, fn + [Pleroma.Search.QdrantSearch, key], nil -> + %{qdrant_url: "https://qdrant.url"}[key] + end) + + [health_endpoint] = QdrantSearch.healthcheck_endpoints() + + assert "https://qdrant.url/healthz" == health_endpoint + + # Set openai healthcheck URL + Config + |> expect(:get, 2, fn + [Pleroma.Search.QdrantSearch, key], nil -> + %{qdrant_url: "https://qdrant.url", openai_healthcheck_url: "https://openai.url/health"}[ + key + ] + end) + + [_, health_endpoint] = QdrantSearch.healthcheck_endpoints() + + assert "https://openai.url/health" == health_endpoint + end + + test "searches for a term by encoding it and sending it to qdrant" do + user = insert(:user) + + {:ok, activity} = + CommonAPI.post(user, %{ + status: "guys i just don't wanna leave the swamp", + visibility: "public" + }) + + Config + |> expect(:get, 3, fn + [Pleroma.Search, :module], nil -> + QdrantSearch + + [Pleroma.Search.QdrantSearch, key], nil -> + %{ + openai_model: "a_model", + openai_url: "https://openai.url", + qdrant_url: "https://qdrant.url" + }[key] + end) + + Tesla.Mock.mock(fn + %{url: "https://openai.url/v1/embeddings", method: :post} -> + Tesla.Mock.json(%{ + data: [%{embedding: [1, 2, 3]}] + }) + + %{url: "https://qdrant.url/collections/posts/points/search", method: :post, body: body} -> + data = Jason.decode!(body) + refute data["filter"] + + Tesla.Mock.json(%{ + result: [%{"id" => activity.id |> FlakeId.from_string() |> Ecto.UUID.cast!()}] + }) + end) + + results = QdrantSearch.search(nil, "guys i just don't wanna leave the swamp", %{}) + + assert results == [activity] + end + + test "for a given actor, ask for only relevant matches" do + user = insert(:user) + + {:ok, activity} = + CommonAPI.post(user, %{ + status: "guys i just don't wanna leave the swamp", + visibility: "public" + }) + + Config + |> expect(:get, 3, fn + [Pleroma.Search, :module], nil -> + QdrantSearch + + [Pleroma.Search.QdrantSearch, key], nil -> + %{ + openai_model: "a_model", + openai_url: "https://openai.url", + qdrant_url: "https://qdrant.url" + }[key] + end) + + Tesla.Mock.mock(fn + %{url: "https://openai.url/v1/embeddings", method: :post} -> + Tesla.Mock.json(%{ + data: [%{embedding: [1, 2, 3]}] + }) + + %{url: "https://qdrant.url/collections/posts/points/search", method: :post, body: body} -> + data = Jason.decode!(body) + + assert data["filter"] == %{ + "must" => [%{"key" => "actor", "match" => %{"value" => user.ap_id}}] + } + + Tesla.Mock.json(%{ + result: [%{"id" => activity.id |> FlakeId.from_string() |> Ecto.UUID.cast!()}] + }) + end) + + results = + QdrantSearch.search(nil, "guys i just don't wanna leave the swamp", %{author: user}) + + assert results == [activity] + end + + test "indexes a public post on creation, deletes from the index on deletion" do + user = insert(:user) + + Tesla.Mock.mock(fn + %{method: :post, url: "https://openai.url/v1/embeddings"} -> + send(self(), "posted_to_openai") + + Tesla.Mock.json(%{ + data: [%{embedding: [1, 2, 3]}] + }) + + %{method: :put, url: "https://qdrant.url/collections/posts/points", body: body} -> + send(self(), "posted_to_qdrant") + + data = Jason.decode!(body) + %{"points" => [%{"vector" => vector, "payload" => payload}]} = data + + assert vector == [1, 2, 3] + assert payload["actor"] + assert payload["published_at"] + + Tesla.Mock.json("ok") + + %{method: :post, url: "https://qdrant.url/collections/posts/points/delete"} -> + send(self(), "deleted_from_qdrant") + Tesla.Mock.json("ok") + end) + + Config + |> expect(:get, 6, fn + [Pleroma.Search, :module], nil -> + QdrantSearch + + [Pleroma.Search.QdrantSearch, key], nil -> + %{ + openai_model: "a_model", + openai_url: "https://openai.url", + qdrant_url: "https://qdrant.url" + }[key] + end) + + {:ok, activity} = + CommonAPI.post(user, %{ + status: "guys i just don't wanna leave the swamp", + visibility: "public" + }) + + args = %{"op" => "add_to_index", "activity" => activity.id} + + assert_enqueued( + worker: SearchIndexingWorker, + args: args + ) + + assert :ok = perform_job(SearchIndexingWorker, args) + assert_received("posted_to_openai") + assert_received("posted_to_qdrant") + + {:ok, _} = CommonAPI.delete(activity.id, user) + + delete_args = %{"op" => "remove_from_index", "object" => activity.object.id} + assert_enqueued(worker: SearchIndexingWorker, args: delete_args) + assert :ok = perform_job(SearchIndexingWorker, delete_args) + + assert_received("deleted_from_qdrant") + end + end +end diff --git a/test/pleroma/signature_test.exs b/test/pleroma/signature_test.exs index 8edf67a7b..572d7acc3 100644 --- a/test/pleroma/signature_test.exs +++ b/test/pleroma/signature_test.exs @@ -67,6 +67,14 @@ defmodule Pleroma.SignatureTest do end end + describe "get_actor_id/1" do + test "it returns actor id" do + ap_id = "https://mastodon.social/users/lambadalambda" + + assert Signature.get_actor_id(make_fake_conn(ap_id)) == {:ok, ap_id} + end + end + describe "sign/2" do test "it returns signature headers" do user = diff --git a/test/pleroma/uploaders/ipfs_test.exs b/test/pleroma/uploaders/ipfs_test.exs new file mode 100644 index 000000000..bdf2933ac --- /dev/null +++ b/test/pleroma/uploaders/ipfs_test.exs @@ -0,0 +1,155 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Uploaders.IPFSTest do + use Pleroma.DataCase + + alias Pleroma.Uploaders.IPFS + alias Tesla.Multipart + + import ExUnit.CaptureLog + import Mock + import Mox + + alias Pleroma.UnstubbedConfigMock, as: Config + + describe "get_file/1" do + setup do + Config + |> expect(:get, fn [Pleroma.Upload, :uploader] -> Pleroma.Uploaders.IPFS end) + |> expect(:get, fn [Pleroma.Upload, :base_url] -> nil end) + |> expect(:get, fn [Pleroma.Uploaders.IPFS, :public_endpoint] -> nil end) + + :ok + end + + test "it returns path to ipfs file with cid as subdomain" do + Config + |> expect(:get, fn [Pleroma.Uploaders.IPFS, :get_gateway_url] -> + "https://{CID}.ipfs.mydomain.com" + end) + + assert IPFS.get_file("testcid") == { + :ok, + {:url, "https://testcid.ipfs.mydomain.com"} + } + end + + test "it returns path to ipfs file with cid as path" do + Config + |> expect(:get, fn [Pleroma.Uploaders.IPFS, :get_gateway_url] -> + "https://ipfs.mydomain.com/ipfs/{CID}" + end) + + assert IPFS.get_file("testcid") == { + :ok, + {:url, "https://ipfs.mydomain.com/ipfs/testcid"} + } + end + end + + describe "put_file/1" do + setup do + Config + |> expect(:get, fn [Pleroma.Uploaders.IPFS, :post_gateway_url] -> + "http://localhost:5001" + end) + + file_upload = %Pleroma.Upload{ + name: "image-tet.jpg", + content_type: "image/jpeg", + path: "test_folder/image-tet.jpg", + tempfile: Path.absname("test/instance_static/add/shortcode.png") + } + + mp = + Multipart.new() + |> Multipart.add_content_type_param("charset=utf-8") + |> Multipart.add_file(file_upload.tempfile) + + [file_upload: file_upload, mp: mp] + end + + test "save file", %{file_upload: file_upload} do + with_mock Pleroma.HTTP, + post: fn "http://localhost:5001/api/v0/add", + _mp, + [], + params: ["cid-version": "1"], + pool: :upload -> + {:ok, + %Tesla.Env{ + status: 200, + body: + "{\"Name\":\"image-tet.jpg\",\"Size\":\"5000\", \"Hash\":\"bafybeicrh7ltzx52yxcwrvxxckfmwhqdgsb6qym6dxqm2a4ymsakeshwoi\"}" + }} + end do + assert IPFS.put_file(file_upload) == + {:ok, {:file, "bafybeicrh7ltzx52yxcwrvxxckfmwhqdgsb6qym6dxqm2a4ymsakeshwoi"}} + end + end + + test "returns error", %{file_upload: file_upload} do + with_mock Pleroma.HTTP, + post: fn "http://localhost:5001/api/v0/add", + _mp, + [], + params: ["cid-version": "1"], + pool: :upload -> + {:error, "IPFS Gateway upload failed"} + end do + assert capture_log(fn -> + assert IPFS.put_file(file_upload) == {:error, "IPFS Gateway upload failed"} + end) =~ "Elixir.Pleroma.Uploaders.IPFS: {:error, \"IPFS Gateway upload failed\"}" + end + end + + test "returns error if JSON decode fails", %{file_upload: file_upload} do + with_mock Pleroma.HTTP, [], + post: fn "http://localhost:5001/api/v0/add", + _mp, + [], + params: ["cid-version": "1"], + pool: :upload -> + {:ok, %Tesla.Env{status: 200, body: "invalid"}} + end do + assert capture_log(fn -> + assert IPFS.put_file(file_upload) == {:error, "JSON decode failed"} + end) =~ + "Elixir.Pleroma.Uploaders.IPFS: {:error, %Jason.DecodeError" + end + end + + test "returns error if JSON body doesn't contain Hash key", %{file_upload: file_upload} do + with_mock Pleroma.HTTP, [], + post: fn "http://localhost:5001/api/v0/add", + _mp, + [], + params: ["cid-version": "1"], + pool: :upload -> + {:ok, %Tesla.Env{status: 200, body: "{\"key\": \"value\"}"}} + end do + assert IPFS.put_file(file_upload) == {:error, "JSON doesn't contain Hash key"} + end + end + end + + describe "delete_file/1" do + setup do + Config + |> expect(:get, fn [Pleroma.Uploaders.IPFS, :post_gateway_url] -> + "http://localhost:5001" + end) + + :ok + end + + test_with_mock "deletes file", Pleroma.HTTP, + post: fn "http://localhost:5001/api/v0/files/rm", "", [], params: [arg: "image.jpg"] -> + {:ok, %{status: 204}} + end do + assert :ok = IPFS.delete_file("image.jpg") + end + end +end diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs index 524294385..d278125ee 100644 --- a/test/pleroma/web/activity_pub/activity_pub_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_test.exs @@ -291,9 +291,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do body: featured_data, headers: [{"content-type", "application/activity+json"}] } - end) - Tesla.Mock.mock_global(fn %{ method: :get, url: ^object_url @@ -306,7 +304,18 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do end) {:ok, user} = ActivityPub.make_user_from_ap_id(ap_id) - Process.sleep(50) + + assert_enqueued( + worker: Pleroma.Workers.RemoteFetcherWorker, + args: %{ + "op" => "fetch_remote", + "id" => object_url, + "depth" => 1 + } + ) + + # wait for oban + Pleroma.Tests.ObanHelpers.perform_all() assert user.featured_address == featured_url assert Map.has_key?(user.pinned_objects, object_url) @@ -368,9 +377,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do body: featured_data, headers: [{"content-type", "application/activity+json"}] } - end) - Tesla.Mock.mock_global(fn %{ method: :get, url: ^object_url @@ -383,7 +390,18 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do end) {:ok, user} = ActivityPub.make_user_from_ap_id(ap_id) - Process.sleep(50) + + assert_enqueued( + worker: Pleroma.Workers.RemoteFetcherWorker, + args: %{ + "op" => "fetch_remote", + "id" => object_url, + "depth" => 1 + } + ) + + # wait for oban + Pleroma.Tests.ObanHelpers.perform_all() assert user.featured_address == featured_url assert Map.has_key?(user.pinned_objects, object_url) diff --git a/test/pleroma/web/activity_pub/mrf/anti_mention_spam_policy_test.exs b/test/pleroma/web/activity_pub/mrf/anti_mention_spam_policy_test.exs new file mode 100644 index 000000000..63947858c --- /dev/null +++ b/test/pleroma/web/activity_pub/mrf/anti_mention_spam_policy_test.exs @@ -0,0 +1,65 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.MRF.AntiMentionSpamPolicyTest do + use Pleroma.DataCase + import Pleroma.Factory + alias Pleroma.Web.ActivityPub.MRF.AntiMentionSpamPolicy + + test "it allows posts without mentions" do + user = insert(:user, local: false) + assert user.note_count == 0 + + message = %{ + "type" => "Create", + "actor" => user.ap_id + } + + {:ok, _message} = AntiMentionSpamPolicy.filter(message) + end + + test "it allows posts from users with followers, posts, and age" do + user = + insert(:user, + local: false, + follower_count: 1, + note_count: 1, + inserted_at: ~N[1970-01-01 00:00:00] + ) + + message = %{ + "type" => "Create", + "actor" => user.ap_id + } + + {:ok, _message} = AntiMentionSpamPolicy.filter(message) + end + + test "it allows posts from local users" do + user = insert(:user, local: true) + + message = %{ + "type" => "Create", + "actor" => user.ap_id + } + + {:ok, _message} = AntiMentionSpamPolicy.filter(message) + end + + test "it rejects posts with mentions from users without followers" do + user = insert(:user, local: false, follower_count: 0) + + message = %{ + "type" => "Create", + "actor" => user.ap_id, + "object" => %{ + "to" => ["https://pleroma.soykaf.com/users/1"], + "cc" => ["https://pleroma.soykaf.com/users/1"], + "actor" => user.ap_id + } + } + + {:reject, _message} = AntiMentionSpamPolicy.filter(message) + end +end diff --git a/test/pleroma/web/activity_pub/mrf/nsfw_api_policy_test.exs b/test/pleroma/web/activity_pub/mrf/nsfw_api_policy_test.exs new file mode 100644 index 000000000..0beb9c2cb --- /dev/null +++ b/test/pleroma/web/activity_pub/mrf/nsfw_api_policy_test.exs @@ -0,0 +1,267 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.MRF.NsfwApiPolicyTest do + use Pleroma.DataCase + + import ExUnit.CaptureLog + import Pleroma.Factory + + alias Pleroma.Constants + alias Pleroma.Web.ActivityPub.MRF.NsfwApiPolicy + + require Pleroma.Constants + + @policy :mrf_nsfw_api + + @sfw_url "https://kittens.co/kitty.gif" + @nsfw_url "https://b00bies.com/nsfw.jpg" + @timeout_url "http://time.out/i.jpg" + + setup_all do + clear_config(@policy, + url: "http://127.0.0.1:5000/", + threshold: 0.7, + mark_sensitive: true, + unlist: false, + reject: false + ) + end + + setup do + Tesla.Mock.mock(fn + # NSFW URL + %{method: :get, url: "http://127.0.0.1:5000/?url=#{@nsfw_url}"} -> + %Tesla.Env{status: 200, body: ~s({"score":0.99772077798843384,"url":"#{@nsfw_url}"})} + + # SFW URL + %{method: :get, url: "http://127.0.0.1:5000/?url=#{@sfw_url}"} -> + %Tesla.Env{status: 200, body: ~s({"score":0.00011714912398019806,"url":"#{@sfw_url}"})} + + # Timeout URL + %{method: :get, url: "http://127.0.0.1:5000/?url=#{@timeout_url}"} -> + {:error, :timeout} + + # Fallback URL + %{method: :get, url: "http://127.0.0.1:5000/?url=" <> url} -> + body = + ~s({"error_code":500,"error_reason":"[Errno -2] Name or service not known","url":"#{url}"}) + + %Tesla.Env{status: 500, body: body} + end) + + :ok + end + + describe "build_request_url/1" do + test "it works" do + expected = "http://127.0.0.1:5000/?url=https://b00bies.com/nsfw.jpg" + assert NsfwApiPolicy.build_request_url(@nsfw_url) == expected + end + + test "it adds a trailing slash" do + clear_config([@policy, :url], "http://localhost:5000") + + expected = "http://localhost:5000/?url=https://b00bies.com/nsfw.jpg" + assert NsfwApiPolicy.build_request_url(@nsfw_url) == expected + end + + test "it adds a trailing slash preserving the path" do + clear_config([@policy, :url], "http://localhost:5000/nsfw_api") + + expected = "http://localhost:5000/nsfw_api/?url=https://b00bies.com/nsfw.jpg" + assert NsfwApiPolicy.build_request_url(@nsfw_url) == expected + end + end + + describe "parse_url/1" do + test "returns decoded JSON from the API server" do + expected = %{"score" => 0.99772077798843384, "url" => @nsfw_url} + assert NsfwApiPolicy.parse_url(@nsfw_url) == {:ok, expected} + end + + test "warns when the API server fails" do + expected = "[NsfwApiPolicy]: The API server failed. Skipping." + assert capture_log(fn -> NsfwApiPolicy.parse_url(@timeout_url) end) =~ expected + end + + test "returns {:error, _} tuple when the API server fails" do + capture_log(fn -> + assert {:error, _} = NsfwApiPolicy.parse_url(@timeout_url) + end) + end + end + + describe "check_url_nsfw/1" do + test "returns {:nsfw, _} tuple" do + expected = {:nsfw, %{url: @nsfw_url, score: 0.99772077798843384, threshold: 0.7}} + assert NsfwApiPolicy.check_url_nsfw(@nsfw_url) == expected + end + + test "returns {:sfw, _} tuple" do + expected = {:sfw, %{url: @sfw_url, score: 0.00011714912398019806, threshold: 0.7}} + assert NsfwApiPolicy.check_url_nsfw(@sfw_url) == expected + end + + test "returns {:sfw, _} on failure" do + expected = {:sfw, %{url: @timeout_url, score: nil, threshold: 0.7}} + + capture_log(fn -> + assert NsfwApiPolicy.check_url_nsfw(@timeout_url) == expected + end) + end + + test "works with map URL" do + expected = {:nsfw, %{url: @nsfw_url, score: 0.99772077798843384, threshold: 0.7}} + assert NsfwApiPolicy.check_url_nsfw(%{"href" => @nsfw_url}) == expected + end + end + + describe "check_attachment_nsfw/1" do + test "returns {:nsfw, _} if any items are NSFW" do + attachment = %{"url" => [%{"href" => @nsfw_url}, @nsfw_url, @sfw_url]} + assert NsfwApiPolicy.check_attachment_nsfw(attachment) == {:nsfw, attachment} + end + + test "returns {:sfw, _} if all items are SFW" do + attachment = %{"url" => [%{"href" => @sfw_url}, @sfw_url, @sfw_url]} + assert NsfwApiPolicy.check_attachment_nsfw(attachment) == {:sfw, attachment} + end + + test "works with binary URL" do + attachment = %{"url" => @nsfw_url} + assert NsfwApiPolicy.check_attachment_nsfw(attachment) == {:nsfw, attachment} + end + end + + describe "check_object_nsfw/1" do + test "returns {:nsfw, _} if any items are NSFW" do + object = %{"attachment" => [%{"url" => [%{"href" => @nsfw_url}, @sfw_url]}]} + assert NsfwApiPolicy.check_object_nsfw(object) == {:nsfw, object} + end + + test "returns {:sfw, _} if all items are SFW" do + object = %{"attachment" => [%{"url" => [%{"href" => @sfw_url}, @sfw_url]}]} + assert NsfwApiPolicy.check_object_nsfw(object) == {:sfw, object} + end + + test "works with embedded object" do + object = %{"object" => %{"attachment" => [%{"url" => [%{"href" => @nsfw_url}, @sfw_url]}]}} + assert NsfwApiPolicy.check_object_nsfw(object) == {:nsfw, object} + end + end + + describe "unlist/1" do + test "unlist addressing" do + user = insert(:user) + + object = %{ + "to" => [Constants.as_public()], + "cc" => [user.follower_address, "https://hello.world/users/alex"], + "actor" => user.ap_id + } + + expected = %{ + "to" => [user.follower_address], + "cc" => [Constants.as_public(), "https://hello.world/users/alex"], + "actor" => user.ap_id + } + + assert NsfwApiPolicy.unlist(object) == expected + end + + test "raise if user isn't found" do + object = %{ + "to" => [Constants.as_public()], + "cc" => [], + "actor" => "https://hello.world/users/alex" + } + + assert_raise(RuntimeError, fn -> + NsfwApiPolicy.unlist(object) + end) + end + end + + describe "mark_sensitive/1" do + test "adds nsfw tag and marks sensitive" do + object = %{"tag" => ["yolo"]} + expected = %{"tag" => ["yolo", "nsfw"], "sensitive" => true} + assert NsfwApiPolicy.mark_sensitive(object) == expected + end + + test "works with embedded object" do + object = %{"object" => %{"tag" => ["yolo"]}} + expected = %{"object" => %{"tag" => ["yolo", "nsfw"], "sensitive" => true}} + assert NsfwApiPolicy.mark_sensitive(object) == expected + end + end + + describe "filter/1" do + setup do + user = insert(:user) + + nsfw_object = %{ + "to" => [Constants.as_public()], + "cc" => [user.follower_address], + "actor" => user.ap_id, + "attachment" => [%{"url" => @nsfw_url}] + } + + sfw_object = %{ + "to" => [Constants.as_public()], + "cc" => [user.follower_address], + "actor" => user.ap_id, + "attachment" => [%{"url" => @sfw_url}] + } + + %{user: user, nsfw_object: nsfw_object, sfw_object: sfw_object} + end + + test "passes SFW object through", %{sfw_object: object} do + {:ok, _} = NsfwApiPolicy.filter(object) + end + + test "passes NSFW object through when actions are disabled", %{nsfw_object: object} do + clear_config([@policy, :mark_sensitive], false) + clear_config([@policy, :unlist], false) + clear_config([@policy, :reject], false) + {:ok, _} = NsfwApiPolicy.filter(object) + end + + test "passes NSFW object through when :threshold is 1", %{nsfw_object: object} do + clear_config([@policy, :reject], true) + clear_config([@policy, :threshold], 1) + {:ok, _} = NsfwApiPolicy.filter(object) + end + + test "rejects SFW object through when :threshold is 0", %{sfw_object: object} do + clear_config([@policy, :reject], true) + clear_config([@policy, :threshold], 0) + {:reject, _} = NsfwApiPolicy.filter(object) + end + + test "rejects NSFW when :reject is enabled", %{nsfw_object: object} do + clear_config([@policy, :reject], true) + {:reject, _} = NsfwApiPolicy.filter(object) + end + + test "passes NSFW through when :reject is disabled", %{nsfw_object: object} do + clear_config([@policy, :reject], false) + {:ok, _} = NsfwApiPolicy.filter(object) + end + + test "unlists NSFW when :unlist is enabled", %{user: user, nsfw_object: object} do + clear_config([@policy, :unlist], true) + {:ok, object} = NsfwApiPolicy.filter(object) + assert object["to"] == [user.follower_address] + end + + test "passes NSFW through when :unlist is disabled", %{nsfw_object: object} do + clear_config([@policy, :unlist], false) + {:ok, object} = NsfwApiPolicy.filter(object) + assert object["to"] == [Constants.as_public()] + end + end +end diff --git a/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs b/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs index a615c1d9a..6627fa6db 100644 --- a/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs +++ b/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs @@ -27,19 +27,22 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidatorTest do end test "works with honkerific attachments" do - attachment = %{ + honk = %{ "mediaType" => "", - "name" => "", - "summary" => "298p3RG7j27tfsZ9RQ.jpg", + "summary" => "Select your spirit chonk", + "name" => "298p3RG7j27tfsZ9RQ.jpg", "type" => "Document", "url" => "https://honk.tedunangst.com/d/298p3RG7j27tfsZ9RQ.jpg" } assert {:ok, attachment} = - AttachmentValidator.cast_and_validate(attachment) + honk + |> AttachmentValidator.cast_and_validate() |> Ecto.Changeset.apply_action(:insert) assert attachment.mediaType == "application/octet-stream" + assert attachment.summary == "Select your spirit chonk" + assert attachment.name == "298p3RG7j27tfsZ9RQ.jpg" end test "works with an unknown but valid mime type" do diff --git a/test/pleroma/web/mastodon_api/controllers/scheduled_activity_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/scheduled_activity_controller_test.exs index 632242221..2d6b2aee2 100644 --- a/test/pleroma/web/mastodon_api/controllers/scheduled_activity_controller_test.exs +++ b/test/pleroma/web/mastodon_api/controllers/scheduled_activity_controller_test.exs @@ -3,6 +3,7 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.MastodonAPI.ScheduledActivityControllerTest do + use Oban.Testing, repo: Pleroma.Repo use Pleroma.Web.ConnCase, async: true alias Pleroma.Repo @@ -78,7 +79,7 @@ defmodule Pleroma.Web.MastodonAPI.ScheduledActivityControllerTest do } ) - job = Repo.one(from(j in Oban.Job, where: j.queue == "scheduled_activities")) + job = Repo.one(from(j in Oban.Job, where: j.queue == "federator_outgoing")) assert job.args == %{"activity_id" => scheduled_activity.id} assert DateTime.truncate(job.scheduled_at, :second) == to_datetime(scheduled_at) @@ -124,9 +125,11 @@ defmodule Pleroma.Web.MastodonAPI.ScheduledActivityControllerTest do } ) - job = Repo.one(from(j in Oban.Job, where: j.queue == "scheduled_activities")) - - assert job.args == %{"activity_id" => scheduled_activity.id} + assert_enqueued( + worker: Pleroma.Workers.ScheduledActivityWorker, + args: %{"activity_id" => scheduled_activity.id}, + queue: :federator_outgoing + ) res_conn = conn @@ -135,7 +138,11 @@ defmodule Pleroma.Web.MastodonAPI.ScheduledActivityControllerTest do assert %{} = json_response_and_validate_schema(res_conn, 200) refute Repo.get(ScheduledActivity, scheduled_activity.id) - refute Repo.get(Oban.Job, job.id) + + refute_enqueued( + worker: Pleroma.Workers.ScheduledActivityWorker, + args: %{"activity_id" => scheduled_activity.id} + ) res_conn = conn diff --git a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs index 80c1ed099..f34911e5b 100644 --- a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs +++ b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs @@ -235,6 +235,16 @@ defmodule Pleroma.Web.MastodonAPI.StatusControllerTest do assert Activity.get_in_reply_to_activity(activity).id == replied_to.id end + test "replying to a deleted status", %{user: user, conn: conn} do + {:ok, status} = CommonAPI.post(user, %{status: "cofe"}) + {:ok, _deleted_status} = CommonAPI.delete(status.id, user) + + conn + |> put_req_header("content-type", "application/json") + |> post("/api/v1/statuses", %{"status" => "xD", "in_reply_to_id" => status.id}) + |> json_response_and_validate_schema(422) + end + test "replying to a direct message with visibility other than direct", %{ user: user, conn: conn diff --git a/test/pleroma/web/mastodon_api/views/status_view_test.exs b/test/pleroma/web/mastodon_api/views/status_view_test.exs index 1c2d7f7fd..167692dfb 100644 --- a/test/pleroma/web/mastodon_api/views/status_view_test.exs +++ b/test/pleroma/web/mastodon_api/views/status_view_test.exs @@ -591,45 +591,78 @@ defmodule Pleroma.Web.MastodonAPI.StatusViewTest do assert mention.url == recipient.ap_id end - test "attachments" do - object = %{ - "type" => "Image", - "url" => [ - %{ - "mediaType" => "image/png", - "href" => "someurl", - "width" => 200, - "height" => 100 - } - ], - "blurhash" => "UJJ8X[xYW,%Jtq%NNFbXB5j]IVM|9GV=WHRn", - "uuid" => 6 - } + describe "attachments" do + test "Complete Mastodon style" do + object = %{ + "type" => "Image", + "url" => [ + %{ + "mediaType" => "image/png", + "href" => "someurl", + "width" => 200, + "height" => 100 + } + ], + "blurhash" => "UJJ8X[xYW,%Jtq%NNFbXB5j]IVM|9GV=WHRn", + "uuid" => 6 + } - expected = %{ - id: "1638338801", - type: "image", - url: "someurl", - remote_url: "someurl", - preview_url: "someurl", - text_url: "someurl", - description: nil, - pleroma: %{mime_type: "image/png"}, - meta: %{original: %{width: 200, height: 100, aspect: 2}}, - blurhash: "UJJ8X[xYW,%Jtq%NNFbXB5j]IVM|9GV=WHRn" - } + expected = %{ + id: "1638338801", + type: "image", + url: "someurl", + remote_url: "someurl", + preview_url: "someurl", + text_url: "someurl", + description: nil, + pleroma: %{mime_type: "image/png"}, + meta: %{original: %{width: 200, height: 100, aspect: 2}}, + blurhash: "UJJ8X[xYW,%Jtq%NNFbXB5j]IVM|9GV=WHRn" + } - api_spec = Pleroma.Web.ApiSpec.spec() + api_spec = Pleroma.Web.ApiSpec.spec() - assert expected == StatusView.render("attachment.json", %{attachment: object}) - assert_schema(expected, "Attachment", api_spec) + assert expected == StatusView.render("attachment.json", %{attachment: object}) + assert_schema(expected, "Attachment", api_spec) - # If theres a "id", use that instead of the generated one - object = Map.put(object, "id", 2) - result = StatusView.render("attachment.json", %{attachment: object}) + # If theres a "id", use that instead of the generated one + object = Map.put(object, "id", 2) + result = StatusView.render("attachment.json", %{attachment: object}) - assert %{id: "2"} = result - assert_schema(result, "Attachment", api_spec) + assert %{id: "2"} = result + assert_schema(result, "Attachment", api_spec) + end + + test "Honkerific" do + object = %{ + "type" => "Image", + "url" => [ + %{ + "mediaType" => "image/png", + "href" => "someurl" + } + ], + "name" => "fool.jpeg", + "summary" => "they have played us for absolute fools." + } + + expected = %{ + blurhash: nil, + description: "they have played us for absolute fools.", + id: "1638338801", + pleroma: %{mime_type: "image/png", name: "fool.jpeg"}, + preview_url: "someurl", + remote_url: "someurl", + text_url: "someurl", + type: "image", + url: "someurl" + } + + api_spec = Pleroma.Web.ApiSpec.spec() + + assert expected == StatusView.render("attachment.json", %{attachment: object}) + assert_schema(expected, "Attachment", api_spec) + end end test "put the url advertised in the Activity in to the url attribute" do diff --git a/test/pleroma/web/plugs/http_security_plug_test.exs b/test/pleroma/web/plugs/http_security_plug_test.exs index c79170382..11a351a41 100644 --- a/test/pleroma/web/plugs/http_security_plug_test.exs +++ b/test/pleroma/web/plugs/http_security_plug_test.exs @@ -3,14 +3,52 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.Plugs.HTTPSecurityPlugTest do - use Pleroma.Web.ConnCase + use Pleroma.Web.ConnCase, async: true alias Plug.Conn + import Mox + + setup do + base_config = Pleroma.Config.get([:http_security]) + %{base_config: base_config} + end + + defp mock_config(config, additional \\ %{}) do + Pleroma.StaticStubbedConfigMock + |> stub(:get, fn + [:http_security, key] -> config[key] + key -> additional[key] + end) + end + describe "http security enabled" do - setup do: clear_config([:http_security, :enabled], true) + setup %{base_config: base_config} do + %{base_config: Keyword.put(base_config, :enabled, true)} + end + + test "it does not contain unsafe-eval", %{conn: conn, base_config: base_config} do + mock_config(base_config) + + conn = get(conn, "/api/v1/instance") + [header] = Conn.get_resp_header(conn, "content-security-policy") + refute header =~ ~r/unsafe-eval/ + end + + test "with allow_unsafe_eval set, it does contain it", %{conn: conn, base_config: base_config} do + base_config = + base_config + |> Keyword.put(:allow_unsafe_eval, true) + + mock_config(base_config) + + conn = get(conn, "/api/v1/instance") + [header] = Conn.get_resp_header(conn, "content-security-policy") + assert header =~ ~r/unsafe-eval/ + end - test "it sends CSP headers when enabled", %{conn: conn} do + test "it sends CSP headers when enabled", %{conn: conn, base_config: base_config} do + mock_config(base_config) conn = get(conn, "/api/v1/instance") refute Conn.get_resp_header(conn, "x-xss-protection") == [] @@ -22,8 +60,10 @@ defmodule Pleroma.Web.Plugs.HTTPSecurityPlugTest do refute Conn.get_resp_header(conn, "content-security-policy") == [] end - test "it sends STS headers when enabled", %{conn: conn} do - clear_config([:http_security, :sts], true) + test "it sends STS headers when enabled", %{conn: conn, base_config: base_config} do + base_config + |> Keyword.put(:sts, true) + |> mock_config() conn = get(conn, "/api/v1/instance") @@ -31,8 +71,10 @@ defmodule Pleroma.Web.Plugs.HTTPSecurityPlugTest do refute Conn.get_resp_header(conn, "expect-ct") == [] end - test "it does not send STS headers when disabled", %{conn: conn} do - clear_config([:http_security, :sts], false) + test "it does not send STS headers when disabled", %{conn: conn, base_config: base_config} do + base_config + |> Keyword.put(:sts, false) + |> mock_config() conn = get(conn, "/api/v1/instance") @@ -40,19 +82,30 @@ defmodule Pleroma.Web.Plugs.HTTPSecurityPlugTest do assert Conn.get_resp_header(conn, "expect-ct") == [] end - test "referrer-policy header reflects configured value", %{conn: conn} do - resp = get(conn, "/api/v1/instance") + test "referrer-policy header reflects configured value", %{ + conn: conn, + base_config: base_config + } do + mock_config(base_config) + resp = get(conn, "/api/v1/instance") assert Conn.get_resp_header(resp, "referrer-policy") == ["same-origin"] - clear_config([:http_security, :referrer_policy], "no-referrer") + base_config + |> Keyword.put(:referrer_policy, "no-referrer") + |> mock_config resp = get(conn, "/api/v1/instance") assert Conn.get_resp_header(resp, "referrer-policy") == ["no-referrer"] end - test "it sends `report-to` & `report-uri` CSP response headers", %{conn: conn} do + test "it sends `report-to` & `report-uri` CSP response headers", %{ + conn: conn, + base_config: base_config + } do + mock_config(base_config) + conn = get(conn, "/api/v1/instance") [csp] = Conn.get_resp_header(conn, "content-security-policy") @@ -65,7 +118,11 @@ defmodule Pleroma.Web.Plugs.HTTPSecurityPlugTest do "{\"endpoints\":[{\"url\":\"https://endpoint.com\"}],\"group\":\"csp-endpoint\",\"max-age\":10886400}" end - test "default values for img-src and media-src with disabled media proxy", %{conn: conn} do + test "default values for img-src and media-src with disabled media proxy", %{ + conn: conn, + base_config: base_config + } do + mock_config(base_config) conn = get(conn, "/api/v1/instance") [csp] = Conn.get_resp_header(conn, "content-security-policy") @@ -73,60 +130,129 @@ defmodule Pleroma.Web.Plugs.HTTPSecurityPlugTest do assert csp =~ "img-src 'self' data: blob: https:;" end - test "it sets the Service-Worker-Allowed header", %{conn: conn} do - clear_config([:http_security, :enabled], true) - clear_config([:frontends, :primary], %{"name" => "fedi-fe", "ref" => "develop"}) - - clear_config([:frontends, :available], %{ - "fedi-fe" => %{ - "name" => "fedi-fe", - "custom-http-headers" => [{"service-worker-allowed", "/"}] - } - }) - + test "it sets the Service-Worker-Allowed header", %{conn: conn, base_config: base_config} do + base_config + |> Keyword.put(:enabled, true) + + additional_config = + %{} + |> Map.put([:frontends, :primary], %{"name" => "fedi-fe", "ref" => "develop"}) + |> Map.put( + [:frontends, :available], + %{ + "fedi-fe" => %{ + "name" => "fedi-fe", + "custom-http-headers" => [{"service-worker-allowed", "/"}] + } + } + ) + + mock_config(base_config, additional_config) conn = get(conn, "/api/v1/instance") assert Conn.get_resp_header(conn, "service-worker-allowed") == ["/"] end end describe "img-src and media-src" do - setup do - clear_config([:http_security, :enabled], true) - clear_config([:media_proxy, :enabled], true) - clear_config([:media_proxy, :proxy_opts, :redirect_on_failure], false) + setup %{base_config: base_config} do + base_config = + base_config + |> Keyword.put(:enabled, true) + + additional_config = + %{} + |> Map.put([:media_proxy, :enabled], true) + |> Map.put([:media_proxy, :proxy_opts, :redirect_on_failure], false) + |> Map.put([:media_proxy, :whitelist], []) + + %{base_config: base_config, additional_config: additional_config} end - test "media_proxy with base_url", %{conn: conn} do + test "media_proxy with base_url", %{ + conn: conn, + base_config: base_config, + additional_config: additional_config + } do url = "https://example.com" - clear_config([:media_proxy, :base_url], url) + + additional_config = + additional_config + |> Map.put([:media_proxy, :base_url], url) + + mock_config(base_config, additional_config) + assert_media_img_src(conn, url) end - test "upload with base url", %{conn: conn} do + test "upload with base url", %{ + conn: conn, + base_config: base_config, + additional_config: additional_config + } do url = "https://example2.com" - clear_config([Pleroma.Upload, :base_url], url) + + additional_config = + additional_config + |> Map.put([Pleroma.Upload, :base_url], url) + + mock_config(base_config, additional_config) + assert_media_img_src(conn, url) end - test "with S3 public endpoint", %{conn: conn} do + test "with S3 public endpoint", %{ + conn: conn, + base_config: base_config, + additional_config: additional_config + } do url = "https://example3.com" - clear_config([Pleroma.Uploaders.S3, :public_endpoint], url) + + additional_config = + additional_config + |> Map.put([Pleroma.Uploaders.S3, :public_endpoint], url) + + mock_config(base_config, additional_config) assert_media_img_src(conn, url) end - test "with captcha endpoint", %{conn: conn} do - clear_config([Pleroma.Captcha.Mock, :endpoint], "https://captcha.com") + test "with captcha endpoint", %{ + conn: conn, + base_config: base_config, + additional_config: additional_config + } do + additional_config = + additional_config + |> Map.put([Pleroma.Captcha.Mock, :endpoint], "https://captcha.com") + |> Map.put([Pleroma.Captcha, :method], Pleroma.Captcha.Mock) + + mock_config(base_config, additional_config) assert_media_img_src(conn, "https://captcha.com") end - test "with media_proxy whitelist", %{conn: conn} do - clear_config([:media_proxy, :whitelist], ["https://example6.com", "https://example7.com"]) + test "with media_proxy whitelist", %{ + conn: conn, + base_config: base_config, + additional_config: additional_config + } do + additional_config = + additional_config + |> Map.put([:media_proxy, :whitelist], ["https://example6.com", "https://example7.com"]) + + mock_config(base_config, additional_config) assert_media_img_src(conn, "https://example7.com https://example6.com") end # TODO: delete after removing support bare domains for media proxy whitelist - test "with media_proxy bare domains whitelist (deprecated)", %{conn: conn} do - clear_config([:media_proxy, :whitelist], ["example4.com", "example5.com"]) + test "with media_proxy bare domains whitelist (deprecated)", %{ + conn: conn, + base_config: base_config, + additional_config: additional_config + } do + additional_config = + additional_config + |> Map.put([:media_proxy, :whitelist], ["example4.com", "example5.com"]) + + mock_config(base_config, additional_config) assert_media_img_src(conn, "example5.com example4.com") end end @@ -138,8 +264,10 @@ defmodule Pleroma.Web.Plugs.HTTPSecurityPlugTest do assert csp =~ "img-src 'self' data: blob: #{url};" end - test "it does not send CSP headers when disabled", %{conn: conn} do - clear_config([:http_security, :enabled], false) + test "it does not send CSP headers when disabled", %{conn: conn, base_config: base_config} do + base_config + |> Keyword.put(:enabled, false) + |> mock_config conn = get(conn, "/api/v1/instance") diff --git a/test/pleroma/web/plugs/http_signature_plug_test.exs b/test/pleroma/web/plugs/http_signature_plug_test.exs index 2d8fba3cd..9d07270bb 100644 --- a/test/pleroma/web/plugs/http_signature_plug_test.exs +++ b/test/pleroma/web/plugs/http_signature_plug_test.exs @@ -3,77 +3,89 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.Plugs.HTTPSignaturePlugTest do - use Pleroma.Web.ConnCase + use Pleroma.Web.ConnCase, async: true + + alias Pleroma.StaticStubbedConfigMock, as: ConfigMock + alias Pleroma.StubbedHTTPSignaturesMock, as: HTTPSignaturesMock alias Pleroma.Web.Plugs.HTTPSignaturePlug - import Plug.Conn + import Mox import Phoenix.Controller, only: [put_format: 2] - import Mock + import Plug.Conn - test "it call HTTPSignatures to check validity if the actor sighed it" do + test "it calls HTTPSignatures to check validity if the actor signed it" do params = %{"actor" => "http://mastodon.example.org/users/admin"} conn = build_conn(:get, "/doesntmattter", params) - with_mock HTTPSignatures, validate_conn: fn _ -> true end do - conn = - conn - |> put_req_header( - "signature", - "keyId=\"http://mastodon.example.org/users/admin#main-key" - ) - |> put_format("activity+json") - |> HTTPSignaturePlug.call(%{}) + HTTPSignaturesMock + |> expect(:validate_conn, fn _ -> true end) - assert conn.assigns.valid_signature == true - assert conn.halted == false - assert called(HTTPSignatures.validate_conn(:_)) - end + conn = + conn + |> put_req_header( + "signature", + "keyId=\"http://mastodon.example.org/users/admin#main-key" + ) + |> put_format("activity+json") + |> HTTPSignaturePlug.call(%{}) + + assert conn.assigns.valid_signature == true + assert conn.halted == false end describe "requires a signature when `authorized_fetch_mode` is enabled" do setup do - clear_config([:activitypub, :authorized_fetch_mode], true) - params = %{"actor" => "http://mastodon.example.org/users/admin"} conn = build_conn(:get, "/doesntmattter", params) |> put_format("activity+json") [conn: conn] end - test "when signature header is present", %{conn: conn} do - with_mock HTTPSignatures, validate_conn: fn _ -> false end do - conn = - conn - |> put_req_header( - "signature", - "keyId=\"http://mastodon.example.org/users/admin#main-key" - ) - |> HTTPSignaturePlug.call(%{}) - - assert conn.assigns.valid_signature == false - assert conn.halted == true - assert conn.status == 401 - assert conn.state == :sent - assert conn.resp_body == "Request not signed" - assert called(HTTPSignatures.validate_conn(:_)) - end - - with_mock HTTPSignatures, validate_conn: fn _ -> true end do - conn = - conn - |> put_req_header( - "signature", - "keyId=\"http://mastodon.example.org/users/admin#main-key" - ) - |> HTTPSignaturePlug.call(%{}) - - assert conn.assigns.valid_signature == true - assert conn.halted == false - assert called(HTTPSignatures.validate_conn(:_)) - end + test "when signature header is present", %{conn: orig_conn} do + ConfigMock + |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end) + |> expect(:get, fn [:activitypub, :authorized_fetch_mode_exceptions], [] -> [] end) + + HTTPSignaturesMock + |> expect(:validate_conn, 2, fn _ -> false end) + + conn = + orig_conn + |> put_req_header( + "signature", + "keyId=\"http://mastodon.example.org/users/admin#main-key" + ) + |> HTTPSignaturePlug.call(%{}) + + assert conn.assigns.valid_signature == false + assert conn.halted == true + assert conn.status == 401 + assert conn.state == :sent + assert conn.resp_body == "Request not signed" + + ConfigMock + |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end) + + HTTPSignaturesMock + |> expect(:validate_conn, fn _ -> true end) + + conn = + orig_conn + |> put_req_header( + "signature", + "keyId=\"http://mastodon.example.org/users/admin#main-key" + ) + |> HTTPSignaturePlug.call(%{}) + + assert conn.assigns.valid_signature == true + assert conn.halted == false end test "halts the connection when `signature` header is not present", %{conn: conn} do + ConfigMock + |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end) + |> expect(:get, fn [:activitypub, :authorized_fetch_mode_exceptions], [] -> [] end) + conn = HTTPSignaturePlug.call(conn, %{}) assert conn.assigns[:valid_signature] == nil assert conn.halted == true @@ -81,5 +93,73 @@ defmodule Pleroma.Web.Plugs.HTTPSignaturePlugTest do assert conn.state == :sent assert conn.resp_body == "Request not signed" end + + test "exempts specific IPs from `authorized_fetch_mode_exceptions`", %{conn: conn} do + ConfigMock + |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end) + |> expect(:get, fn [:activitypub, :authorized_fetch_mode_exceptions], [] -> + ["192.168.0.0/24"] + end) + |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end) + + HTTPSignaturesMock + |> expect(:validate_conn, 2, fn _ -> false end) + + conn = + conn + |> Map.put(:remote_ip, {192, 168, 0, 1}) + |> put_req_header( + "signature", + "keyId=\"http://mastodon.example.org/users/admin#main-key" + ) + |> HTTPSignaturePlug.call(%{}) + + assert conn.remote_ip == {192, 168, 0, 1} + assert conn.halted == false + end + end + + test "rejects requests from `rejected_instances` when `authorized_fetch_mode` is enabled" do + ConfigMock + |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end) + |> expect(:get, fn [:instance, :rejected_instances] -> + [{"mastodon.example.org", "no reason"}] + end) + + HTTPSignaturesMock + |> expect(:validate_conn, fn _ -> true end) + + conn = + build_conn(:get, "/doesntmattter", %{"actor" => "http://mastodon.example.org/users/admin"}) + |> put_req_header( + "signature", + "keyId=\"http://mastodon.example.org/users/admin#main-key" + ) + |> put_format("activity+json") + |> HTTPSignaturePlug.call(%{}) + + assert conn.assigns.valid_signature == true + assert conn.halted == true + + ConfigMock + |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end) + |> expect(:get, fn [:instance, :rejected_instances] -> + [{"mastodon.example.org", "no reason"}] + end) + + HTTPSignaturesMock + |> expect(:validate_conn, fn _ -> true end) + + conn = + build_conn(:get, "/doesntmattter", %{"actor" => "http://allowed.example.org/users/admin"}) + |> put_req_header( + "signature", + "keyId=\"http://allowed.example.org/users/admin#main-key" + ) + |> put_format("activity+json") + |> HTTPSignaturePlug.call(%{}) + + assert conn.assigns.valid_signature == true + assert conn.halted == false end end diff --git a/test/pleroma/web/rich_media/parser/ttl/aws_signed_url_test.exs b/test/pleroma/web/rich_media/parser/ttl/aws_signed_url_test.exs index cd8be8675..cc28aa7f3 100644 --- a/test/pleroma/web/rich_media/parser/ttl/aws_signed_url_test.exs +++ b/test/pleroma/web/rich_media/parser/ttl/aws_signed_url_test.exs @@ -10,6 +10,7 @@ defmodule Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrlTest do alias Pleroma.UnstubbedConfigMock, as: ConfigMock alias Pleroma.Web.RichMedia.Card + alias Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl setup do ConfigMock @@ -82,6 +83,12 @@ defmodule Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrlTest do assert DateTime.diff(scheduled_at, timestamp_dt) == valid_till end + test "AWS URL for an image without expiration works" do + og_data = %{"image" => "https://amazonaws.com/image.png"} + + assert is_nil(AwsSignedUrl.ttl(og_data, "")) + end + defp construct_s3_url(timestamp, valid_till) do "https://pleroma.s3.ap-southeast-1.amazonaws.com/sachin%20%281%29%20_a%20-%25%2Aasdasd%20BNN%20bnnn%20.png?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAIBLWWK6RGDQXDLJQ%2F20190716%2Fap-southeast-1%2Fs3%2Faws4_request&X-Amz-Date=#{timestamp}&X-Amz-Expires=#{valid_till}&X-Amz-Signature=04ffd6b98634f4b1bbabc62e0fac4879093cd54a6eed24fe8eb38e8369526bbf&X-Amz-SignedHeaders=host" end diff --git a/test/support/data_case.ex b/test/support/data_case.ex index 14403f0b8..52d4bef1a 100644 --- a/test/support/data_case.ex +++ b/test/support/data_case.ex @@ -116,6 +116,7 @@ defmodule Pleroma.DataCase do Mox.stub_with(Pleroma.Web.FederatorMock, Pleroma.Web.Federator) Mox.stub_with(Pleroma.ConfigMock, Pleroma.Config) Mox.stub_with(Pleroma.StaticStubbedConfigMock, Pleroma.Test.StaticConfig) + Mox.stub_with(Pleroma.StubbedHTTPSignaturesMock, Pleroma.Test.HTTPSignaturesProxy) end def ensure_local_uploader(context) do diff --git a/test/support/http_signatures_proxy.ex b/test/support/http_signatures_proxy.ex new file mode 100644 index 000000000..4c6b39d19 --- /dev/null +++ b/test/support/http_signatures_proxy.ex @@ -0,0 +1,9 @@ +defmodule Pleroma.Test.HTTPSignaturesProxy do + @behaviour Pleroma.HTTPSignaturesAPI + + @impl true + defdelegate validate_conn(conn), to: HTTPSignatures + + @impl true + defdelegate signature_for_conn(conn), to: HTTPSignatures +end diff --git a/test/support/mocks.ex b/test/support/mocks.ex index d906f0e1d..63cbc49ab 100644 --- a/test/support/mocks.ex +++ b/test/support/mocks.ex @@ -28,6 +28,7 @@ Mox.defmock(Pleroma.Web.FederatorMock, for: Pleroma.Web.Federator.Publishing) Mox.defmock(Pleroma.ConfigMock, for: Pleroma.Config.Getting) Mox.defmock(Pleroma.UnstubbedConfigMock, for: Pleroma.Config.Getting) Mox.defmock(Pleroma.StaticStubbedConfigMock, for: Pleroma.Config.Getting) +Mox.defmock(Pleroma.StubbedHTTPSignaturesMock, for: Pleroma.HTTPSignaturesAPI) Mox.defmock(Pleroma.LoggerMock, for: Pleroma.Logging) |