From 17bfd000d7d44ff13cf7becbfd9ce08b896d66eb Mon Sep 17 00:00:00 2001 From: Sachin Joshi Date: Wed, 22 May 2019 06:39:19 +0200 Subject: Ability to reset avatar, profile banner and backgroud --- .../web/twitter_api/twitter_api_controller.ex | 35 ++++++++++++++++++++++ 1 file changed, 35 insertions(+) (limited to 'lib') diff --git a/lib/pleroma/web/twitter_api/twitter_api_controller.ex b/lib/pleroma/web/twitter_api/twitter_api_controller.ex index 31e86685a..a796e38fd 100644 --- a/lib/pleroma/web/twitter_api/twitter_api_controller.ex +++ b/lib/pleroma/web/twitter_api/twitter_api_controller.ex @@ -456,6 +456,16 @@ defmodule Pleroma.Web.TwitterAPI.Controller do end end + def update_avatar(%{assigns: %{user: user}} = conn, %{"img" => ""}) do + change = Changeset.change(user, %{avatar: nil}) + {:ok, user} = User.update_and_set_cache(change) + CommonAPI.update(user) + + conn + |> put_view(UserView) + |> render("show.json", %{user: user, for: user}) + end + def update_avatar(%{assigns: %{user: user}} = conn, params) do {:ok, object} = ActivityPub.upload(params, type: :avatar) change = Changeset.change(user, %{avatar: object.data}) @@ -467,6 +477,19 @@ defmodule Pleroma.Web.TwitterAPI.Controller do |> render("show.json", %{user: user, for: user}) end + def update_banner(%{assigns: %{user: user}} = conn, %{"banner" => ""}) do + with new_info <- %{"banner" => %{}}, + info_cng <- User.Info.profile_update(user.info, new_info), + changeset <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng), + {:ok, user} <- User.update_and_set_cache(changeset) do + CommonAPI.update(user) + response = %{url: nil} |> Jason.encode!() + + conn + |> json_reply(200, response) + end + end + def update_banner(%{assigns: %{user: user}} = conn, params) do with {:ok, object} <- ActivityPub.upload(%{"img" => params["banner"]}, type: :banner), new_info <- %{"banner" => object.data}, @@ -482,6 +505,18 @@ defmodule Pleroma.Web.TwitterAPI.Controller do end end + def update_background(%{assigns: %{user: user}} = conn, %{"img" => ""}) do + with new_info <- %{"background" => %{}}, + info_cng <- User.Info.profile_update(user.info, new_info), + changeset <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng), + {:ok, _user} <- User.update_and_set_cache(changeset) do + response = %{url: nil} |> Jason.encode!() + + conn + |> json_reply(200, response) + end + end + def update_background(%{assigns: %{user: user}} = conn, params) do with {:ok, object} <- ActivityPub.upload(params, type: :background), new_info <- %{"background" => object.data}, -- cgit v1.2.3 From 1452a96ad6cfd7d250e3f7c10805994cc92016a7 Mon Sep 17 00:00:00 2001 From: Sachin Joshi Date: Mon, 27 May 2019 15:31:01 +0545 Subject: ability to set and reset avatar, profile banner and backgroud in Mastodon API --- .../web/mastodon_api/mastodon_api_controller.ex | 63 ++++++++++++++++++++++ lib/pleroma/web/router.ex | 4 ++ 2 files changed, 67 insertions(+) (limited to 'lib') diff --git a/lib/pleroma/web/mastodon_api/mastodon_api_controller.ex b/lib/pleroma/web/mastodon_api/mastodon_api_controller.ex index 1ec0f30a1..1ff839e9e 100644 --- a/lib/pleroma/web/mastodon_api/mastodon_api_controller.ex +++ b/lib/pleroma/web/mastodon_api/mastodon_api_controller.ex @@ -152,6 +152,69 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPIController do end end + def update_avatar(%{assigns: %{user: user}} = conn, %{"img" => ""}) do + change = Changeset.change(user, %{avatar: nil}) + {:ok, user} = User.update_and_set_cache(change) + CommonAPI.update(user) + + json(conn, %{url: nil}) + end + + def update_avatar(%{assigns: %{user: user}} = conn, params) do + {:ok, object} = ActivityPub.upload(params, type: :avatar) + change = Changeset.change(user, %{avatar: object.data}) + {:ok, user} = User.update_and_set_cache(change) + CommonAPI.update(user) + %{"url" => [%{"href" => href} | _]} = object.data + + json(conn, %{url: href}) + end + + def update_banner(%{assigns: %{user: user}} = conn, %{"banner" => ""}) do + with new_info <- %{"banner" => %{}}, + info_cng <- User.Info.profile_update(user.info, new_info), + changeset <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng), + {:ok, user} <- User.update_and_set_cache(changeset) do + CommonAPI.update(user) + + json(conn, %{url: nil}) + end + end + + def update_banner(%{assigns: %{user: user}} = conn, params) do + with {:ok, object} <- ActivityPub.upload(%{"img" => params["banner"]}, type: :banner), + new_info <- %{"banner" => object.data}, + info_cng <- User.Info.profile_update(user.info, new_info), + changeset <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng), + {:ok, user} <- User.update_and_set_cache(changeset) do + CommonAPI.update(user) + %{"url" => [%{"href" => href} | _]} = object.data + + json(conn, %{url: href}) + end + end + + def update_background(%{assigns: %{user: user}} = conn, %{"img" => ""}) do + with new_info <- %{"background" => %{}}, + info_cng <- User.Info.profile_update(user.info, new_info), + changeset <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng), + {:ok, _user} <- User.update_and_set_cache(changeset) do + json(conn, %{url: nil}) + end + end + + def update_background(%{assigns: %{user: user}} = conn, params) do + with {:ok, object} <- ActivityPub.upload(params, type: :background), + new_info <- %{"background" => object.data}, + info_cng <- User.Info.profile_update(user.info, new_info), + changeset <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng), + {:ok, _user} <- User.update_and_set_cache(changeset) do + %{"url" => [%{"href" => href} | _]} = object.data + + json(conn, %{url: href}) + end + end + def verify_credentials(%{assigns: %{user: user}} = conn, _) do account = AccountView.render("account.json", %{user: user, for: user}) json(conn, account) diff --git a/lib/pleroma/web/router.ex b/lib/pleroma/web/router.ex index 352268b96..42ef64c4f 100644 --- a/lib/pleroma/web/router.ex +++ b/lib/pleroma/web/router.ex @@ -318,6 +318,10 @@ defmodule Pleroma.Web.Router do patch("/accounts/update_credentials", MastodonAPIController, :update_credentials) + patch("/accounts/update_avatar", MastodonAPIController, :update_avatar) + patch("/accounts/update_banner", MastodonAPIController, :update_banner) + patch("/accounts/update_background", MastodonAPIController, :update_background) + post("/statuses", MastodonAPIController, :post_status) delete("/statuses/:id", MastodonAPIController, :delete_status) -- cgit v1.2.3 From 5b7b1040b38d262b1815276f86036b50847851c7 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov Date: Sat, 29 Jun 2019 20:04:50 +0300 Subject: [#161] Limited replies depth on incoming federation in order to prevent memory leaks on recursive replies fetching. --- lib/pleroma/object.ex | 24 +++-- lib/pleroma/object/fetcher.ex | 8 +- lib/pleroma/web/activity_pub/transmogrifier.ex | 125 +++++++++++++++-------- lib/pleroma/web/federator/federator.ex | 6 ++ lib/pleroma/web/ostatus/handlers/note_handler.ex | 13 ++- lib/pleroma/web/ostatus/ostatus.ex | 22 ++-- 6 files changed, 124 insertions(+), 74 deletions(-) (limited to 'lib') diff --git a/lib/pleroma/object.ex b/lib/pleroma/object.ex index 4b181ec59..b8647dd26 100644 --- a/lib/pleroma/object.ex +++ b/lib/pleroma/object.ex @@ -44,20 +44,20 @@ defmodule Pleroma.Object do Repo.one(from(object in Object, where: fragment("(?)->>'id' = ?", object.data, ^ap_id))) end - def normalize(_, fetch_remote \\ true) + def normalize(_, fetch_remote \\ true, options \\ []) # If we pass an Activity to Object.normalize(), we can try to use the preloaded object. # Use this whenever possible, especially when walking graphs in an O(N) loop! - def normalize(%Object{} = object, _), do: object - def normalize(%Activity{object: %Object{} = object}, _), do: object + def normalize(%Object{} = object, _, _), do: object + def normalize(%Activity{object: %Object{} = object}, _, _), do: object # A hack for fake activities - def normalize(%Activity{data: %{"object" => %{"fake" => true} = data}}, _) do + def normalize(%Activity{data: %{"object" => %{"fake" => true} = data}}, _, _) do %Object{id: "pleroma:fake_object_id", data: data} end # Catch and log Object.normalize() calls where the Activity's child object is not # preloaded. - def normalize(%Activity{data: %{"object" => %{"id" => ap_id}}}, fetch_remote) do + def normalize(%Activity{data: %{"object" => %{"id" => ap_id}}}, fetch_remote, _) do Logger.debug( "Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object!" ) @@ -67,7 +67,7 @@ defmodule Pleroma.Object do normalize(ap_id, fetch_remote) end - def normalize(%Activity{data: %{"object" => ap_id}}, fetch_remote) do + def normalize(%Activity{data: %{"object" => ap_id}}, fetch_remote, _) do Logger.debug( "Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object!" ) @@ -78,10 +78,14 @@ defmodule Pleroma.Object do end # Old way, try fetching the object through cache. - def normalize(%{"id" => ap_id}, fetch_remote), do: normalize(ap_id, fetch_remote) - def normalize(ap_id, false) when is_binary(ap_id), do: get_cached_by_ap_id(ap_id) - def normalize(ap_id, true) when is_binary(ap_id), do: Fetcher.fetch_object_from_id!(ap_id) - def normalize(_, _), do: nil + def normalize(%{"id" => ap_id}, fetch_remote, _), do: normalize(ap_id, fetch_remote) + def normalize(ap_id, false, _) when is_binary(ap_id), do: get_cached_by_ap_id(ap_id) + + def normalize(ap_id, true, options) when is_binary(ap_id) do + Fetcher.fetch_object_from_id!(ap_id, options) + end + + def normalize(_, _, _), do: nil # Owned objects can only be mutated by their owner def authorize_mutation(%Object{data: %{"actor" => actor}}, %User{ap_id: ap_id}), diff --git a/lib/pleroma/object/fetcher.ex b/lib/pleroma/object/fetcher.ex index c422490ac..fffbf2bbb 100644 --- a/lib/pleroma/object/fetcher.ex +++ b/lib/pleroma/object/fetcher.ex @@ -22,7 +22,7 @@ defmodule Pleroma.Object.Fetcher do # TODO: # This will create a Create activity, which we need internally at the moment. - def fetch_object_from_id(id) do + def fetch_object_from_id(id, options \\ []) do if object = Object.get_cached_by_ap_id(id) do {:ok, object} else @@ -38,7 +38,7 @@ defmodule Pleroma.Object.Fetcher do "object" => data }, :ok <- Containment.contain_origin(id, params), - {:ok, activity} <- Transmogrifier.handle_incoming(params), + {:ok, activity} <- Transmogrifier.handle_incoming(params, options), {:object, _data, %Object{} = object} <- {:object, data, Object.normalize(activity, false)} do {:ok, object} @@ -63,8 +63,8 @@ defmodule Pleroma.Object.Fetcher do end end - def fetch_object_from_id!(id) do - with {:ok, object} <- fetch_object_from_id(id) do + def fetch_object_from_id!(id, options \\ []) do + with {:ok, object} <- fetch_object_from_id(id, options) do object else _e -> diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index 3bb8b40b5..d5ced2d1d 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -14,6 +14,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.ActivityPub.Visibility + alias Pleroma.Web.Federator import Ecto.Query @@ -22,20 +23,20 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do @doc """ Modifies an incoming AP object (mastodon format) to our internal format. """ - def fix_object(object) do + def fix_object(object, options \\ []) do object |> fix_actor |> fix_url |> fix_attachments |> fix_context - |> fix_in_reply_to + |> fix_in_reply_to(options) |> fix_emoji |> fix_tag |> fix_content_map |> fix_likes |> fix_addressing |> fix_summary - |> fix_type + |> fix_type(options) end def fix_summary(%{"summary" => nil} = object) do @@ -164,7 +165,9 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do object end - def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object) + def fix_in_reply_to(object, options \\ []) + + def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object, options) when not is_nil(in_reply_to) do in_reply_to_id = cond do @@ -182,28 +185,34 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do "" end - case get_obj_helper(in_reply_to_id) do - {:ok, replied_object} -> - with %Activity{} = _activity <- - Activity.get_create_by_object_ap_id(replied_object.data["id"]) do - object - |> Map.put("inReplyTo", replied_object.data["id"]) - |> Map.put("inReplyToAtomUri", object["inReplyToAtomUri"] || in_reply_to_id) - |> Map.put("conversation", replied_object.data["context"] || object["conversation"]) - |> Map.put("context", replied_object.data["context"] || object["conversation"]) - else - e -> - Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}") + object = Map.put(object, "inReplyToAtomUri", in_reply_to_id) + + if (options[:depth] || 1) <= Federator.max_replies_depth() do + case get_obj_helper(in_reply_to_id, options) do + {:ok, replied_object} -> + with %Activity{} = _activity <- + Activity.get_create_by_object_ap_id(replied_object.data["id"]) do object - end + |> Map.put("inReplyTo", replied_object.data["id"]) + |> Map.put("inReplyToAtomUri", object["inReplyToAtomUri"] || in_reply_to_id) + |> Map.put("conversation", replied_object.data["context"] || object["conversation"]) + |> Map.put("context", replied_object.data["context"] || object["conversation"]) + else + e -> + Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}") + object + end - e -> - Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}") - object + e -> + Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}") + object + end + else + object end end - def fix_in_reply_to(object), do: object + def fix_in_reply_to(object, _options), do: object def fix_context(object) do context = object["context"] || object["conversation"] || Utils.generate_context_id() @@ -336,8 +345,15 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do def fix_content_map(object), do: object - def fix_type(%{"inReplyTo" => reply_id} = object) when is_binary(reply_id) do - reply = Object.normalize(reply_id) + def fix_type(object, options \\ []) + + def fix_type(%{"inReplyTo" => reply_id} = object, options) when is_binary(reply_id) do + reply = + if (options[:depth] || 1) <= Federator.max_replies_depth() do + Object.normalize(reply_id, true) + else + nil + end if reply && (reply.data["type"] == "Question" and object["name"]) do Map.put(object, "type", "Answer") @@ -346,7 +362,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do end end - def fix_type(object), do: object + def fix_type(object, _), do: object defp mastodon_follow_hack(%{"id" => id, "actor" => follower_id}, followed) do with true <- id =~ "follows", @@ -374,9 +390,11 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do end end + def handle_incoming(data, options \\ []) + # Flag objects are placed ahead of the ID check because Mastodon 2.8 and earlier send them # with nil ID. - def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} = data) do + def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} = data, _options) do with context <- data["context"] || Utils.generate_context_id(), content <- data["content"] || "", %User{} = actor <- User.get_cached_by_ap_id(actor), @@ -409,15 +427,19 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do end # disallow objects with bogus IDs - def handle_incoming(%{"id" => nil}), do: :error - def handle_incoming(%{"id" => ""}), do: :error + def handle_incoming(%{"id" => nil}, _options), do: :error + def handle_incoming(%{"id" => ""}, _options), do: :error # length of https:// = 8, should validate better, but good enough for now. - def handle_incoming(%{"id" => id}) when not (is_binary(id) and length(id) > 8), do: :error + def handle_incoming(%{"id" => id}, _options) when not (is_binary(id) and length(id) > 8), + do: :error # TODO: validate those with a Ecto scheme # - tags # - emoji - def handle_incoming(%{"type" => "Create", "object" => %{"type" => objtype} = object} = data) + def handle_incoming( + %{"type" => "Create", "object" => %{"type" => objtype} = object} = data, + options + ) when objtype in ["Article", "Note", "Video", "Page", "Question", "Answer"] do actor = Containment.get_actor(data) @@ -427,7 +449,8 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do with nil <- Activity.get_create_by_object_ap_id(object["id"]), {:ok, %User{} = user} <- User.get_or_fetch_by_ap_id(data["actor"]) do - object = fix_object(data["object"]) + options = Keyword.put(options, :depth, (options[:depth] || 0) + 1) + object = fix_object(data["object"], options) params = %{ to: data["to"], @@ -452,7 +475,8 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do end def handle_incoming( - %{"type" => "Follow", "object" => followed, "actor" => follower, "id" => id} = data + %{"type" => "Follow", "object" => followed, "actor" => follower, "id" => id} = data, + _options ) do with %User{local: true} = followed <- User.get_cached_by_ap_id(followed), {:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower), @@ -503,7 +527,8 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do end def handle_incoming( - %{"type" => "Accept", "object" => follow_object, "actor" => _actor, "id" => _id} = data + %{"type" => "Accept", "object" => follow_object, "actor" => _actor, "id" => _id} = data, + _options ) do with actor <- Containment.get_actor(data), {:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor), @@ -524,7 +549,8 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do end def handle_incoming( - %{"type" => "Reject", "object" => follow_object, "actor" => _actor, "id" => _id} = data + %{"type" => "Reject", "object" => follow_object, "actor" => _actor, "id" => _id} = data, + _options ) do with actor <- Containment.get_actor(data), {:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor), @@ -548,7 +574,8 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do end def handle_incoming( - %{"type" => "Like", "object" => object_id, "actor" => _actor, "id" => id} = data + %{"type" => "Like", "object" => object_id, "actor" => _actor, "id" => id} = data, + _options ) do with actor <- Containment.get_actor(data), {:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor), @@ -561,7 +588,8 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do end def handle_incoming( - %{"type" => "Announce", "object" => object_id, "actor" => _actor, "id" => id} = data + %{"type" => "Announce", "object" => object_id, "actor" => _actor, "id" => id} = data, + _options ) do with actor <- Containment.get_actor(data), {:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor), @@ -576,7 +604,8 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do def handle_incoming( %{"type" => "Update", "object" => %{"type" => object_type} = object, "actor" => actor_id} = - data + data, + _options ) when object_type in ["Person", "Application", "Service", "Organization"] do with %User{ap_id: ^actor_id} = actor <- User.get_cached_by_ap_id(object["id"]) do @@ -614,7 +643,8 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do # an error or a tombstone. This would allow us to verify that a deletion actually took # place. def handle_incoming( - %{"type" => "Delete", "object" => object_id, "actor" => _actor, "id" => _id} = data + %{"type" => "Delete", "object" => object_id, "actor" => _actor, "id" => _id} = data, + _options ) do object_id = Utils.get_ap_id(object_id) @@ -635,7 +665,8 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do "object" => %{"type" => "Announce", "object" => object_id}, "actor" => _actor, "id" => id - } = data + } = data, + _options ) do with actor <- Containment.get_actor(data), {:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor), @@ -653,7 +684,8 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do "object" => %{"type" => "Follow", "object" => followed}, "actor" => follower, "id" => id - } = _data + } = _data, + _options ) do with %User{local: true} = followed <- User.get_cached_by_ap_id(followed), {:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower), @@ -671,7 +703,8 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do "object" => %{"type" => "Block", "object" => blocked}, "actor" => blocker, "id" => id - } = _data + } = _data, + _options ) do with true <- Pleroma.Config.get([:activitypub, :accept_blocks]), %User{local: true} = blocked <- User.get_cached_by_ap_id(blocked), @@ -685,7 +718,8 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do end def handle_incoming( - %{"type" => "Block", "object" => blocked, "actor" => blocker, "id" => id} = _data + %{"type" => "Block", "object" => blocked, "actor" => blocker, "id" => id} = _data, + _options ) do with true <- Pleroma.Config.get([:activitypub, :accept_blocks]), %User{local: true} = blocked = User.get_cached_by_ap_id(blocked), @@ -705,7 +739,8 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do "object" => %{"type" => "Like", "object" => object_id}, "actor" => _actor, "id" => id - } = data + } = data, + _options ) do with actor <- Containment.get_actor(data), {:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor), @@ -717,10 +752,10 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do end end - def handle_incoming(_), do: :error + def handle_incoming(_, _), do: :error - def get_obj_helper(id) do - if object = Object.normalize(id), do: {:ok, object}, else: nil + def get_obj_helper(id, options \\ []) do + if object = Object.normalize(id, true, options), do: {:ok, object}, else: nil end def set_reply_to_uri(%{"inReplyTo" => in_reply_to} = object) when is_binary(in_reply_to) do diff --git a/lib/pleroma/web/federator/federator.ex b/lib/pleroma/web/federator/federator.ex index f4c9fe284..7c13ff323 100644 --- a/lib/pleroma/web/federator/federator.ex +++ b/lib/pleroma/web/federator/federator.ex @@ -22,6 +22,12 @@ defmodule Pleroma.Web.Federator do refresh_subscriptions() end + @max_replies_depth 100 + + @doc "Addresses [memory leaks on recursive replies fetching](https://git.pleroma.social/pleroma/pleroma/issues/161)" + # credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength + def max_replies_depth, do: @max_replies_depth + # Client API def incoming_doc(doc) do diff --git a/lib/pleroma/web/ostatus/handlers/note_handler.ex b/lib/pleroma/web/ostatus/handlers/note_handler.ex index ec6e5cfaf..6f8f3ddcb 100644 --- a/lib/pleroma/web/ostatus/handlers/note_handler.ex +++ b/lib/pleroma/web/ostatus/handlers/note_handler.ex @@ -10,6 +10,7 @@ defmodule Pleroma.Web.OStatus.NoteHandler do alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.CommonAPI + alias Pleroma.Web.Federator alias Pleroma.Web.OStatus alias Pleroma.Web.XML @@ -88,14 +89,15 @@ defmodule Pleroma.Web.OStatus.NoteHandler do Map.put(note, "external_url", url) end - def fetch_replied_to_activity(entry, in_reply_to) do + def fetch_replied_to_activity(entry, in_reply_to, options \\ []) do with %Activity{} = activity <- Activity.get_create_by_object_ap_id(in_reply_to) do activity else _e -> - with in_reply_to_href when not is_nil(in_reply_to_href) <- + with true <- (options[:depth] || 1) <= Federator.max_replies_depth(), + in_reply_to_href when not is_nil(in_reply_to_href) <- XML.string_from_xpath("//thr:in-reply-to[1]/@href", entry), - {:ok, [activity | _]} <- OStatus.fetch_activity_from_url(in_reply_to_href) do + {:ok, [activity | _]} <- OStatus.fetch_activity_from_url(in_reply_to_href, options) do activity else _e -> nil @@ -104,7 +106,7 @@ defmodule Pleroma.Web.OStatus.NoteHandler do end # TODO: Clean this up a bit. - def handle_note(entry, doc \\ nil) do + def handle_note(entry, doc \\ nil, options \\ []) do with id <- XML.string_from_xpath("//id", entry), activity when is_nil(activity) <- Activity.get_create_by_object_ap_id_with_object(id), [author] <- :xmerl_xpath.string('//author[1]', doc), @@ -112,7 +114,8 @@ defmodule Pleroma.Web.OStatus.NoteHandler do content_html <- OStatus.get_content(entry), cw <- OStatus.get_cw(entry), in_reply_to <- XML.string_from_xpath("//thr:in-reply-to[1]/@ref", entry), - in_reply_to_activity <- fetch_replied_to_activity(entry, in_reply_to), + options <- Keyword.put(options, :depth, (options[:depth] || 0) + 1), + in_reply_to_activity <- fetch_replied_to_activity(entry, in_reply_to, options), in_reply_to_object <- (in_reply_to_activity && Object.normalize(in_reply_to_activity)) || nil, in_reply_to <- (in_reply_to_object && in_reply_to_object.data["id"]) || in_reply_to, diff --git a/lib/pleroma/web/ostatus/ostatus.ex b/lib/pleroma/web/ostatus/ostatus.ex index 6ed089d84..502410c83 100644 --- a/lib/pleroma/web/ostatus/ostatus.ex +++ b/lib/pleroma/web/ostatus/ostatus.ex @@ -54,7 +54,7 @@ defmodule Pleroma.Web.OStatus do "#{Web.base_url()}/ostatus_subscribe?acct={uri}" end - def handle_incoming(xml_string) do + def handle_incoming(xml_string, options \\ []) do with doc when doc != :error <- parse_document(xml_string) do with {:ok, actor_user} <- find_make_or_update_user(doc), do: Pleroma.Instances.set_reachable(actor_user.ap_id) @@ -91,10 +91,12 @@ defmodule Pleroma.Web.OStatus do _ -> case object_type do 'http://activitystrea.ms/schema/1.0/note' -> - with {:ok, activity} <- NoteHandler.handle_note(entry, doc), do: activity + with {:ok, activity} <- NoteHandler.handle_note(entry, doc, options), + do: activity 'http://activitystrea.ms/schema/1.0/comment' -> - with {:ok, activity} <- NoteHandler.handle_note(entry, doc), do: activity + with {:ok, activity} <- NoteHandler.handle_note(entry, doc, options), + do: activity _ -> Logger.error("Couldn't parse incoming document") @@ -359,7 +361,7 @@ defmodule Pleroma.Web.OStatus do end end - def fetch_activity_from_atom_url(url) do + def fetch_activity_from_atom_url(url, options \\ []) do with true <- String.starts_with?(url, "http"), {:ok, %{body: body, status: code}} when code in 200..299 <- HTTP.get( @@ -367,7 +369,7 @@ defmodule Pleroma.Web.OStatus do [{:Accept, "application/atom+xml"}] ) do Logger.debug("Got document from #{url}, handling...") - handle_incoming(body) + handle_incoming(body, options) else e -> Logger.debug("Couldn't get #{url}: #{inspect(e)}") @@ -375,13 +377,13 @@ defmodule Pleroma.Web.OStatus do end end - def fetch_activity_from_html_url(url) do + def fetch_activity_from_html_url(url, options \\ []) do Logger.debug("Trying to fetch #{url}") with true <- String.starts_with?(url, "http"), {:ok, %{body: body}} <- HTTP.get(url, []), {:ok, atom_url} <- get_atom_url(body) do - fetch_activity_from_atom_url(atom_url) + fetch_activity_from_atom_url(atom_url, options) else e -> Logger.debug("Couldn't get #{url}: #{inspect(e)}") @@ -389,11 +391,11 @@ defmodule Pleroma.Web.OStatus do end end - def fetch_activity_from_url(url) do - with {:ok, [_ | _] = activities} <- fetch_activity_from_atom_url(url) do + def fetch_activity_from_url(url, options \\ []) do + with {:ok, [_ | _] = activities} <- fetch_activity_from_atom_url(url, options) do {:ok, activities} else - _e -> fetch_activity_from_html_url(url) + _e -> fetch_activity_from_html_url(url, options) end rescue e -> -- cgit v1.2.3 From 2b9d914089755297f6ac24ffbb81934cf3c70cdd Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov Date: Sun, 30 Jun 2019 15:58:50 +0300 Subject: [#161] Refactoring, documentation. --- lib/pleroma/web/activity_pub/transmogrifier.ex | 6 ++---- lib/pleroma/web/federator/federator.ex | 12 +++++++++--- lib/pleroma/web/ostatus/handlers/note_handler.ex | 2 +- 3 files changed, 12 insertions(+), 8 deletions(-) (limited to 'lib') diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index d5ced2d1d..543d4bb7d 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -187,7 +187,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do object = Map.put(object, "inReplyToAtomUri", in_reply_to_id) - if (options[:depth] || 1) <= Federator.max_replies_depth() do + if Federator.allowed_incoming_reply_depth?(options[:depth]) do case get_obj_helper(in_reply_to_id, options) do {:ok, replied_object} -> with %Activity{} = _activity <- @@ -349,10 +349,8 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do def fix_type(%{"inReplyTo" => reply_id} = object, options) when is_binary(reply_id) do reply = - if (options[:depth] || 1) <= Federator.max_replies_depth() do + if Federator.allowed_incoming_reply_depth?(options[:depth]) do Object.normalize(reply_id, true) - else - nil end if reply && (reply.data["type"] == "Question" and object["name"]) do diff --git a/lib/pleroma/web/federator/federator.ex b/lib/pleroma/web/federator/federator.ex index 7c13ff323..f4f9e83e0 100644 --- a/lib/pleroma/web/federator/federator.ex +++ b/lib/pleroma/web/federator/federator.ex @@ -22,11 +22,17 @@ defmodule Pleroma.Web.Federator do refresh_subscriptions() end - @max_replies_depth 100 - @doc "Addresses [memory leaks on recursive replies fetching](https://git.pleroma.social/pleroma/pleroma/issues/161)" # credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength - def max_replies_depth, do: @max_replies_depth + def allowed_incoming_reply_depth?(depth) do + max_replies_depth = Pleroma.Config.get([:instance, :federation_incoming_replies_max_depth]) + + if max_replies_depth do + (depth || 1) <= max_replies_depth + else + true + end + end # Client API diff --git a/lib/pleroma/web/ostatus/handlers/note_handler.ex b/lib/pleroma/web/ostatus/handlers/note_handler.ex index 6f8f3ddcb..8e0adad91 100644 --- a/lib/pleroma/web/ostatus/handlers/note_handler.ex +++ b/lib/pleroma/web/ostatus/handlers/note_handler.ex @@ -94,7 +94,7 @@ defmodule Pleroma.Web.OStatus.NoteHandler do activity else _e -> - with true <- (options[:depth] || 1) <= Federator.max_replies_depth(), + with true <- Federator.allowed_incoming_reply_depth?(options[:depth]), in_reply_to_href when not is_nil(in_reply_to_href) <- XML.string_from_xpath("//thr:in-reply-to[1]/@href", entry), {:ok, [activity | _]} <- OStatus.fetch_activity_from_url(in_reply_to_href, options) do -- cgit v1.2.3 From 4f42093220393a13033fcfd306acf54c8791e98f Mon Sep 17 00:00:00 2001 From: Egor Kislitsyn Date: Wed, 3 Jul 2019 14:11:04 +0700 Subject: Remove Uploaders.Swift --- lib/pleroma/uploaders/swift/keystone.ex | 51 --------------------------------- lib/pleroma/uploaders/swift/swift.ex | 29 ------------------- lib/pleroma/uploaders/swift/uploader.ex | 19 ------------ 3 files changed, 99 deletions(-) delete mode 100644 lib/pleroma/uploaders/swift/keystone.ex delete mode 100644 lib/pleroma/uploaders/swift/swift.ex delete mode 100644 lib/pleroma/uploaders/swift/uploader.ex (limited to 'lib') diff --git a/lib/pleroma/uploaders/swift/keystone.ex b/lib/pleroma/uploaders/swift/keystone.ex deleted file mode 100644 index dd44c7561..000000000 --- a/lib/pleroma/uploaders/swift/keystone.ex +++ /dev/null @@ -1,51 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2019 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Uploaders.Swift.Keystone do - use HTTPoison.Base - - def process_url(url) do - Enum.join( - [Pleroma.Config.get!([Pleroma.Uploaders.Swift, :auth_url]), url], - "/" - ) - end - - def process_response_body(body) do - body - |> Jason.decode!() - end - - def get_token do - settings = Pleroma.Config.get(Pleroma.Uploaders.Swift) - username = Keyword.fetch!(settings, :username) - password = Keyword.fetch!(settings, :password) - tenant_id = Keyword.fetch!(settings, :tenant_id) - - case post( - "/tokens", - make_auth_body(username, password, tenant_id), - ["Content-Type": "application/json"], - hackney: [:insecure] - ) do - {:ok, %Tesla.Env{status: 200, body: body}} -> - body["access"]["token"]["id"] - - {:ok, %Tesla.Env{status: _}} -> - "" - end - end - - def make_auth_body(username, password, tenant) do - Jason.encode!(%{ - :auth => %{ - :passwordCredentials => %{ - :username => username, - :password => password - }, - :tenantId => tenant - } - }) - end -end diff --git a/lib/pleroma/uploaders/swift/swift.ex b/lib/pleroma/uploaders/swift/swift.ex deleted file mode 100644 index 2b0f2ad04..000000000 --- a/lib/pleroma/uploaders/swift/swift.ex +++ /dev/null @@ -1,29 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2019 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Uploaders.Swift.Client do - use HTTPoison.Base - - def process_url(url) do - Enum.join( - [Pleroma.Config.get!([Pleroma.Uploaders.Swift, :storage_url]), url], - "/" - ) - end - - def upload_file(filename, body, content_type) do - token = Pleroma.Uploaders.Swift.Keystone.get_token() - - case put("#{filename}", body, "X-Auth-Token": token, "Content-Type": content_type) do - {:ok, %Tesla.Env{status: 201}} -> - {:ok, {:file, filename}} - - {:ok, %Tesla.Env{status: 401}} -> - {:error, "Unauthorized, Bad Token"} - - {:error, _} -> - {:error, "Swift Upload Error"} - end - end -end diff --git a/lib/pleroma/uploaders/swift/uploader.ex b/lib/pleroma/uploaders/swift/uploader.ex deleted file mode 100644 index d122b09e7..000000000 --- a/lib/pleroma/uploaders/swift/uploader.ex +++ /dev/null @@ -1,19 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2019 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Uploaders.Swift do - @behaviour Pleroma.Uploaders.Uploader - - def get_file(name) do - {:ok, {:url, Path.join([Pleroma.Config.get!([__MODULE__, :object_url]), name])}} - end - - def put_file(upload) do - Pleroma.Uploaders.Swift.Client.upload_file( - upload.path, - File.read!(upload.tmpfile), - upload.content_type - ) - end -end -- cgit v1.2.3 From 6c50fbcd14b56665979199150659635575fd1b25 Mon Sep 17 00:00:00 2001 From: Maxim Filippov Date: Fri, 5 Jul 2019 19:33:53 +0300 Subject: Admin API: Allow querying user by ID --- lib/pleroma/web/admin_api/admin_api_controller.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'lib') diff --git a/lib/pleroma/web/admin_api/admin_api_controller.ex b/lib/pleroma/web/admin_api/admin_api_controller.ex index 498beb56a..0a2482a8c 100644 --- a/lib/pleroma/web/admin_api/admin_api_controller.ex +++ b/lib/pleroma/web/admin_api/admin_api_controller.ex @@ -74,7 +74,7 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do end def user_show(conn, %{"nickname" => nickname}) do - with %User{} = user <- User.get_cached_by_nickname(nickname) do + with %User{} = user <- User.get_cached_by_nickname_or_id(nickname) do conn |> json(AccountView.render("show.json", %{user: user})) else -- cgit v1.2.3 From a7885748c7f37db232a097aa24132ed59229360c Mon Sep 17 00:00:00 2001 From: KokaKiwi Date: Sat, 22 Jun 2019 19:45:21 +0200 Subject: MastoAPI streaming: Replace access_token with Sec-WebSocket-Protocol --- lib/pleroma/web/mastodon_api/websocket_handler.ex | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'lib') diff --git a/lib/pleroma/web/mastodon_api/websocket_handler.ex b/lib/pleroma/web/mastodon_api/websocket_handler.ex index 3299e1721..db6ae23b0 100644 --- a/lib/pleroma/web/mastodon_api/websocket_handler.ex +++ b/lib/pleroma/web/mastodon_api/websocket_handler.ex @@ -29,7 +29,7 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do def init(%{qs: qs} = req, state) do with params <- :cow_qs.parse_qs(qs), - access_token <- List.keyfind(params, "access_token", 0), + access_token <- :cowboy_req.header("sec-websocket-protocol", req, 0), {_, stream} <- List.keyfind(params, "stream", 0), {:ok, user} <- allow_request(stream, access_token), topic when is_binary(topic) <- expand_topic(stream, params) do @@ -89,7 +89,7 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do end # Authenticated streams. - defp allow_request(stream, {"access_token", access_token}) when stream in @streams do + defp allow_request(stream, access_token) when stream in @streams do with %Token{user_id: user_id} <- Repo.get_by(Token, token: access_token), user = %User{} <- User.get_cached_by_id(user_id) do {:ok, user} -- cgit v1.2.3 From e174614eb9d9ea465611aac694912bbdbaf2a03c Mon Sep 17 00:00:00 2001 From: "Haelwenn (lanodan) Monnier" Date: Sat, 6 Jul 2019 20:26:08 +0200 Subject: MastoAPI Streaming: Keep compatibility with access_token --- lib/pleroma/web/mastodon_api/websocket_handler.ex | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) (limited to 'lib') diff --git a/lib/pleroma/web/mastodon_api/websocket_handler.ex b/lib/pleroma/web/mastodon_api/websocket_handler.ex index db6ae23b0..dbd3542ea 100644 --- a/lib/pleroma/web/mastodon_api/websocket_handler.ex +++ b/lib/pleroma/web/mastodon_api/websocket_handler.ex @@ -29,9 +29,10 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do def init(%{qs: qs} = req, state) do with params <- :cow_qs.parse_qs(qs), - access_token <- :cowboy_req.header("sec-websocket-protocol", req, 0), + sec_websocket <- :cowboy_req.header("sec-websocket-protocol", req, nil), + access_token <- List.keyfind(params, "access_token", 0), {_, stream} <- List.keyfind(params, "stream", 0), - {:ok, user} <- allow_request(stream, access_token), + {:ok, user} <- allow_request(stream, [access_token, sec_websocket]), topic when is_binary(topic) <- expand_topic(stream, params) do {:cowboy_websocket, req, %{user: user, topic: topic}, %{idle_timeout: @timeout}} else @@ -84,13 +85,21 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do end # Public streams without authentication. - defp allow_request(stream, nil) when stream in @anonymous_streams do + defp allow_request(stream, [nil, nil]) when stream in @anonymous_streams do {:ok, nil} end # Authenticated streams. - defp allow_request(stream, access_token) when stream in @streams do - with %Token{user_id: user_id} <- Repo.get_by(Token, token: access_token), + defp allow_request(stream, [access_token, sec_websocket]) when stream in @streams do + token = + with {"access_token", token} <- access_token do + token + else + _ -> sec_websocket + end + + with true <- is_bitstring(token), + %Token{user_id: user_id} <- Repo.get_by(Token, token: token), user = %User{} <- User.get_cached_by_id(user_id) do {:ok, user} else -- cgit v1.2.3 From f5ad4309747e85192e6034fd362103b0b71869d0 Mon Sep 17 00:00:00 2001 From: Sachin Joshi Date: Sun, 7 Jul 2019 14:13:40 +0545 Subject: make sure the url used by proxy is same as origin url encoding or decoding it breaks some of the signed url --- lib/pleroma/web/media_proxy/media_proxy.ex | 23 +---------------------- 1 file changed, 1 insertion(+), 22 deletions(-) (limited to 'lib') diff --git a/lib/pleroma/web/media_proxy/media_proxy.ex b/lib/pleroma/web/media_proxy/media_proxy.ex index cee6d8481..dd8888a02 100644 --- a/lib/pleroma/web/media_proxy/media_proxy.ex +++ b/lib/pleroma/web/media_proxy/media_proxy.ex @@ -33,20 +33,7 @@ defmodule Pleroma.Web.MediaProxy do def encode_url(url) do secret = Pleroma.Config.get([Pleroma.Web.Endpoint, :secret_key_base]) - - # Must preserve `%2F` for compatibility with S3 - # https://git.pleroma.social/pleroma/pleroma/issues/580 - replacement = get_replacement(url, ":2F:") - - # The URL is url-decoded and encoded again to ensure it is correctly encoded and not twice. - base64 = - url - |> String.replace("%2F", replacement) - |> URI.decode() - |> URI.encode() - |> String.replace(replacement, "%2F") - |> Base.url_encode64(@base64_opts) - + base64 = Base.url_encode64(url, @base64_opts) sig = :crypto.hmac(:sha, secret, base64) sig64 = sig |> Base.url_encode64(@base64_opts) @@ -80,12 +67,4 @@ defmodule Pleroma.Web.MediaProxy do |> Enum.filter(fn value -> value end) |> Path.join() end - - defp get_replacement(url, replacement) do - if String.contains?(url, replacement) do - get_replacement(url, replacement <> replacement) - else - replacement - end - end end -- cgit v1.2.3 From 72b88c82bc038c8ecf6eba2012582f495f30ef43 Mon Sep 17 00:00:00 2001 From: rinpatch Date: Mon, 8 Jul 2019 11:47:40 +0300 Subject: Mastodon API: Fix embedded relationships not being rendered inside of statuses --- lib/pleroma/web/mastodon_api/views/status_view.ex | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'lib') diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex index 6836d331a..ec582b919 100644 --- a/lib/pleroma/web/mastodon_api/views/status_view.ex +++ b/lib/pleroma/web/mastodon_api/views/status_view.ex @@ -104,7 +104,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do id: to_string(activity.id), uri: activity_object.data["id"], url: activity_object.data["id"], - account: AccountView.render("account.json", %{user: user}), + account: AccountView.render("account.json", %{user: user, for: opts[:for]}), in_reply_to_id: nil, in_reply_to_account_id: nil, reblog: reblogged, @@ -221,7 +221,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do id: to_string(activity.id), uri: object.data["id"], url: url, - account: AccountView.render("account.json", %{user: user}), + account: AccountView.render("account.json", %{user: user, for: opts[:for]}), in_reply_to_id: reply_to && to_string(reply_to.id), in_reply_to_account_id: reply_to_user && to_string(reply_to_user.id), reblog: nil, -- cgit v1.2.3 From 7f609ee8f4608b25428f070e54db2346a69fb239 Mon Sep 17 00:00:00 2001 From: rinpatch Date: Mon, 8 Jul 2019 12:16:32 +0300 Subject: OGP/TwitterCard: Add fallbacks in case the attachment key is nonexistent --- lib/pleroma/web/metadata/opengraph.ex | 2 ++ lib/pleroma/web/metadata/twitter_card.ex | 1 + 2 files changed, 3 insertions(+) (limited to 'lib') diff --git a/lib/pleroma/web/metadata/opengraph.ex b/lib/pleroma/web/metadata/opengraph.ex index 357b80a2d..4033ec38f 100644 --- a/lib/pleroma/web/metadata/opengraph.ex +++ b/lib/pleroma/web/metadata/opengraph.ex @@ -121,4 +121,6 @@ defmodule Pleroma.Web.Metadata.Providers.OpenGraph do acc ++ rendered_tags end) end + + defp build_attachments(_), do: [] end diff --git a/lib/pleroma/web/metadata/twitter_card.ex b/lib/pleroma/web/metadata/twitter_card.ex index 040b872e7..9baf5ac97 100644 --- a/lib/pleroma/web/metadata/twitter_card.ex +++ b/lib/pleroma/web/metadata/twitter_card.ex @@ -116,6 +116,7 @@ defmodule Pleroma.Web.Metadata.Providers.TwitterCard do acc ++ rendered_tags end) end + defp build_attachments(_id, _object), do: [] defp player_url(id) do Pleroma.Web.Router.Helpers.o_status_url(Pleroma.Web.Endpoint, :notice_player, id) -- cgit v1.2.3 From 9e58d3c62471d5a9c0230a9ada19efc2722ff46a Mon Sep 17 00:00:00 2001 From: rinpatch Date: Mon, 8 Jul 2019 12:59:49 +0300 Subject: FallbackRedirector: Do not crash on Metadata rendering errors --- lib/pleroma/web/router.ex | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) (limited to 'lib') diff --git a/lib/pleroma/web/router.ex b/lib/pleroma/web/router.ex index 055289dc5..ff9ed1640 100644 --- a/lib/pleroma/web/router.ex +++ b/lib/pleroma/web/router.ex @@ -724,6 +724,7 @@ end defmodule Fallback.RedirectController do use Pleroma.Web, :controller + require Logger alias Pleroma.User alias Pleroma.Web.Metadata @@ -750,7 +751,20 @@ defmodule Fallback.RedirectController do def redirector_with_meta(conn, params) do {:ok, index_content} = File.read(index_file_path()) - tags = Metadata.build_tags(params) + + tags = + try do + Metadata.build_tags(params) + rescue + e -> + Logger.error( + "Metadata rendering for #{conn.request_path} failed.\n" <> + Exception.format(:error, e, __STACKTRACE__) + ) + + "" + end + response = String.replace(index_content, "", tags) conn -- cgit v1.2.3 From 44b2e1fdad7e7379fa03bc4ba0342e576fb7bf75 Mon Sep 17 00:00:00 2001 From: rinpatch Date: Mon, 8 Jul 2019 14:05:57 +0300 Subject: Formatting --- lib/pleroma/web/metadata/twitter_card.ex | 1 + 1 file changed, 1 insertion(+) (limited to 'lib') diff --git a/lib/pleroma/web/metadata/twitter_card.ex b/lib/pleroma/web/metadata/twitter_card.ex index 9baf5ac97..8dd01e0d5 100644 --- a/lib/pleroma/web/metadata/twitter_card.ex +++ b/lib/pleroma/web/metadata/twitter_card.ex @@ -116,6 +116,7 @@ defmodule Pleroma.Web.Metadata.Providers.TwitterCard do acc ++ rendered_tags end) end + defp build_attachments(_id, _object), do: [] defp player_url(id) do -- cgit v1.2.3 From abe2e8881f946aafc2012edd43373c22837387af Mon Sep 17 00:00:00 2001 From: lain Date: Tue, 9 Jul 2019 15:30:51 +0900 Subject: Testing: Don't federate in testing. --- lib/pleroma/web/activity_pub/utils.ex | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) (limited to 'lib') diff --git a/lib/pleroma/web/activity_pub/utils.ex b/lib/pleroma/web/activity_pub/utils.ex index 514266cee..4288ea4c8 100644 --- a/lib/pleroma/web/activity_pub/utils.ex +++ b/lib/pleroma/web/activity_pub/utils.ex @@ -170,14 +170,17 @@ defmodule Pleroma.Web.ActivityPub.Utils do Enqueues an activity for federation if it's local """ def maybe_federate(%Activity{local: true} = activity) do - priority = - case activity.data["type"] do - "Delete" -> 10 - "Create" -> 1 - _ -> 5 - end + if Pleroma.Config.get!([:instance, :federating]) do + priority = + case activity.data["type"] do + "Delete" -> 10 + "Create" -> 1 + _ -> 5 + end + + Pleroma.Web.Federator.publish(activity, priority) + end - Pleroma.Web.Federator.publish(activity, priority) :ok end -- cgit v1.2.3 From 23d4781e73c4a34fcc8d442cf1d3e2863a07ad84 Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov Date: Tue, 9 Jul 2019 08:52:49 +0000 Subject: change for local user search --- lib/pleroma/user/search.ex | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) (limited to 'lib') diff --git a/lib/pleroma/user/search.ex b/lib/pleroma/user/search.ex index 7680c2afd..64eb6d2bc 100644 --- a/lib/pleroma/user/search.ex +++ b/lib/pleroma/user/search.ex @@ -150,7 +150,7 @@ defmodule Pleroma.User.Search do @spec fts_search_subquery(User.t() | Ecto.Query.t(), String.t()) :: Ecto.Query.t() defp fts_search_subquery(query, term) do processed_query = - term + String.trim_trailing(term, "@" <> local_domain()) |> String.replace(~r/\W+/, " ") |> String.trim() |> String.split() @@ -192,6 +192,8 @@ defmodule Pleroma.User.Search do @spec trigram_search_subquery(User.t() | Ecto.Query.t(), String.t()) :: Ecto.Query.t() defp trigram_search_subquery(query, term) do + term = String.trim_trailing(term, "@" <> local_domain()) + from( u in query, select_merge: %{ @@ -209,4 +211,6 @@ defmodule Pleroma.User.Search do ) |> User.restrict_deactivated() end + + defp local_domain, do: Pleroma.Config.get([Pleroma.Web.Endpoint, :url, :host]) end -- cgit v1.2.3 From 4e6e5d80428a40f0403560b3c8381ea48cf4373e Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov Date: Tue, 9 Jul 2019 16:54:13 +0000 Subject: reverse proxy tests --- lib/mix/tasks/pleroma/ecto/ecto.ex | 1 + lib/pleroma/reverse_proxy.ex | 382 ----------------------------- lib/pleroma/reverse_proxy/client.ex | 24 ++ lib/pleroma/reverse_proxy/reverse_proxy.ex | 382 +++++++++++++++++++++++++++++ 4 files changed, 407 insertions(+), 382 deletions(-) delete mode 100644 lib/pleroma/reverse_proxy.ex create mode 100644 lib/pleroma/reverse_proxy/client.ex create mode 100644 lib/pleroma/reverse_proxy/reverse_proxy.ex (limited to 'lib') diff --git a/lib/mix/tasks/pleroma/ecto/ecto.ex b/lib/mix/tasks/pleroma/ecto/ecto.ex index 324f57fdd..b66f63376 100644 --- a/lib/mix/tasks/pleroma/ecto/ecto.ex +++ b/lib/mix/tasks/pleroma/ecto/ecto.ex @@ -1,6 +1,7 @@ # Pleroma: A lightweight social networking server # Copyright © 2017-2018 Pleroma Authors # SPDX-License-Identifier: AGPL-3.0-onl + defmodule Mix.Tasks.Pleroma.Ecto do @doc """ Ensures the given repository's migrations path exists on the file system. diff --git a/lib/pleroma/reverse_proxy.ex b/lib/pleroma/reverse_proxy.ex deleted file mode 100644 index de0f6e1bc..000000000 --- a/lib/pleroma/reverse_proxy.ex +++ /dev/null @@ -1,382 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2019 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.ReverseProxy do - alias Pleroma.HTTP - - @keep_req_headers ~w(accept user-agent accept-encoding cache-control if-modified-since) ++ - ~w(if-unmodified-since if-none-match if-range range) - @resp_cache_headers ~w(etag date last-modified cache-control) - @keep_resp_headers @resp_cache_headers ++ - ~w(content-type content-disposition content-encoding content-range) ++ - ~w(accept-ranges vary) - @default_cache_control_header "public, max-age=1209600" - @valid_resp_codes [200, 206, 304] - @max_read_duration :timer.seconds(30) - @max_body_length :infinity - @methods ~w(GET HEAD) - - @moduledoc """ - A reverse proxy. - - Pleroma.ReverseProxy.call(conn, url, options) - - It is not meant to be added into a plug pipeline, but to be called from another plug or controller. - - Supports `#{inspect(@methods)}` HTTP methods, and only allows `#{inspect(@valid_resp_codes)}` status codes. - - Responses are chunked to the client while downloading from the upstream. - - Some request / responses headers are preserved: - - * request: `#{inspect(@keep_req_headers)}` - * response: `#{inspect(@keep_resp_headers)}` - - If no caching headers (`#{inspect(@resp_cache_headers)}`) are returned by upstream, `cache-control` will be - set to `#{inspect(@default_cache_control_header)}`. - - Options: - - * `redirect_on_failure` (default `false`). Redirects the client to the real remote URL if there's any HTTP - errors. Any error during body processing will not be redirected as the response is chunked. This may expose - remote URL, clients IPs, …. - - * `max_body_length` (default `#{inspect(@max_body_length)}`): limits the content length to be approximately the - specified length. It is validated with the `content-length` header and also verified when proxying. - - * `max_read_duration` (default `#{inspect(@max_read_duration)}` ms): the total time the connection is allowed to - read from the remote upstream. - - * `inline_content_types`: - * `true` will not alter `content-disposition` (up to the upstream), - * `false` will add `content-disposition: attachment` to any request, - * a list of whitelisted content types - - * `keep_user_agent` will forward the client's user-agent to the upstream. This may be useful if the upstream is - doing content transformation (encoding, …) depending on the request. - - * `req_headers`, `resp_headers` additional headers. - - * `http`: options for [hackney](https://github.com/benoitc/hackney). - - """ - @default_hackney_options [] - - @inline_content_types [ - "image/gif", - "image/jpeg", - "image/jpg", - "image/png", - "image/svg+xml", - "audio/mpeg", - "audio/mp3", - "video/webm", - "video/mp4", - "video/quicktime" - ] - - require Logger - import Plug.Conn - - @type option() :: - {:keep_user_agent, boolean} - | {:max_read_duration, :timer.time() | :infinity} - | {:max_body_length, non_neg_integer() | :infinity} - | {:http, []} - | {:req_headers, [{String.t(), String.t()}]} - | {:resp_headers, [{String.t(), String.t()}]} - | {:inline_content_types, boolean() | [String.t()]} - | {:redirect_on_failure, boolean()} - - @spec call(Plug.Conn.t(), url :: String.t(), [option()]) :: Plug.Conn.t() - def call(_conn, _url, _opts \\ []) - - def call(conn = %{method: method}, url, opts) when method in @methods do - hackney_opts = - @default_hackney_options - |> Keyword.merge(Keyword.get(opts, :http, [])) - |> HTTP.process_request_options() - - req_headers = build_req_headers(conn.req_headers, opts) - - opts = - if filename = Pleroma.Web.MediaProxy.filename(url) do - Keyword.put_new(opts, :attachment_name, filename) - else - opts - end - - with {:ok, code, headers, client} <- request(method, url, req_headers, hackney_opts), - :ok <- header_length_constraint(headers, Keyword.get(opts, :max_body_length)) do - response(conn, client, url, code, headers, opts) - else - {:ok, code, headers} -> - head_response(conn, url, code, headers, opts) - |> halt() - - {:error, {:invalid_http_response, code}} -> - Logger.error("#{__MODULE__}: request to #{inspect(url)} failed with HTTP status #{code}") - - conn - |> error_or_redirect( - url, - code, - "Request failed: " <> Plug.Conn.Status.reason_phrase(code), - opts - ) - |> halt() - - {:error, error} -> - Logger.error("#{__MODULE__}: request to #{inspect(url)} failed: #{inspect(error)}") - - conn - |> error_or_redirect(url, 500, "Request failed", opts) - |> halt() - end - end - - def call(conn, _, _) do - conn - |> send_resp(400, Plug.Conn.Status.reason_phrase(400)) - |> halt() - end - - defp request(method, url, headers, hackney_opts) do - Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}") - method = method |> String.downcase() |> String.to_existing_atom() - - case hackney().request(method, url, headers, "", hackney_opts) do - {:ok, code, headers, client} when code in @valid_resp_codes -> - {:ok, code, downcase_headers(headers), client} - - {:ok, code, headers} when code in @valid_resp_codes -> - {:ok, code, downcase_headers(headers)} - - {:ok, code, _, _} -> - {:error, {:invalid_http_response, code}} - - {:error, error} -> - {:error, error} - end - end - - defp response(conn, client, url, status, headers, opts) do - result = - conn - |> put_resp_headers(build_resp_headers(headers, opts)) - |> send_chunked(status) - |> chunk_reply(client, opts) - - case result do - {:ok, conn} -> - halt(conn) - - {:error, :closed, conn} -> - :hackney.close(client) - halt(conn) - - {:error, error, conn} -> - Logger.warn( - "#{__MODULE__} request to #{url} failed while reading/chunking: #{inspect(error)}" - ) - - :hackney.close(client) - halt(conn) - end - end - - defp chunk_reply(conn, client, opts) do - chunk_reply(conn, client, opts, 0, 0) - end - - defp chunk_reply(conn, client, opts, sent_so_far, duration) do - with {:ok, duration} <- - check_read_duration( - duration, - Keyword.get(opts, :max_read_duration, @max_read_duration) - ), - {:ok, data} <- hackney().stream_body(client), - {:ok, duration} <- increase_read_duration(duration), - sent_so_far = sent_so_far + byte_size(data), - :ok <- body_size_constraint(sent_so_far, Keyword.get(opts, :max_body_size)), - {:ok, conn} <- chunk(conn, data) do - chunk_reply(conn, client, opts, sent_so_far, duration) - else - :done -> {:ok, conn} - {:error, error} -> {:error, error, conn} - end - end - - defp head_response(conn, _url, code, headers, opts) do - conn - |> put_resp_headers(build_resp_headers(headers, opts)) - |> send_resp(code, "") - end - - defp error_or_redirect(conn, url, code, body, opts) do - if Keyword.get(opts, :redirect_on_failure, false) do - conn - |> Phoenix.Controller.redirect(external: url) - |> halt() - else - conn - |> send_resp(code, body) - |> halt - end - end - - defp downcase_headers(headers) do - Enum.map(headers, fn {k, v} -> - {String.downcase(k), v} - end) - end - - defp get_content_type(headers) do - {_, content_type} = - List.keyfind(headers, "content-type", 0, {"content-type", "application/octet-stream"}) - - [content_type | _] = String.split(content_type, ";") - content_type - end - - defp put_resp_headers(conn, headers) do - Enum.reduce(headers, conn, fn {k, v}, conn -> - put_resp_header(conn, k, v) - end) - end - - defp build_req_headers(headers, opts) do - headers - |> downcase_headers() - |> Enum.filter(fn {k, _} -> k in @keep_req_headers end) - |> (fn headers -> - headers = headers ++ Keyword.get(opts, :req_headers, []) - - if Keyword.get(opts, :keep_user_agent, false) do - List.keystore( - headers, - "user-agent", - 0, - {"user-agent", Pleroma.Application.user_agent()} - ) - else - headers - end - end).() - end - - defp build_resp_headers(headers, opts) do - headers - |> Enum.filter(fn {k, _} -> k in @keep_resp_headers end) - |> build_resp_cache_headers(opts) - |> build_resp_content_disposition_header(opts) - |> (fn headers -> headers ++ Keyword.get(opts, :resp_headers, []) end).() - end - - defp build_resp_cache_headers(headers, _opts) do - has_cache? = Enum.any?(headers, fn {k, _} -> k in @resp_cache_headers end) - has_cache_control? = List.keymember?(headers, "cache-control", 0) - - cond do - has_cache? && has_cache_control? -> - headers - - has_cache? -> - # There's caching header present but no cache-control -- we need to explicitely override it - # to public as Plug defaults to "max-age=0, private, must-revalidate" - List.keystore(headers, "cache-control", 0, {"cache-control", "public"}) - - true -> - List.keystore( - headers, - "cache-control", - 0, - {"cache-control", @default_cache_control_header} - ) - end - end - - defp build_resp_content_disposition_header(headers, opts) do - opt = Keyword.get(opts, :inline_content_types, @inline_content_types) - - content_type = get_content_type(headers) - - attachment? = - cond do - is_list(opt) && !Enum.member?(opt, content_type) -> true - opt == false -> true - true -> false - end - - if attachment? do - name = - try do - {{"content-disposition", content_disposition_string}, _} = - List.keytake(headers, "content-disposition", 0) - - [name | _] = - Regex.run( - ~r/filename="((?:[^"\\]|\\.)*)"/u, - content_disposition_string || "", - capture: :all_but_first - ) - - name - rescue - MatchError -> Keyword.get(opts, :attachment_name, "attachment") - end - - disposition = "attachment; filename=\"#{name}\"" - - List.keystore(headers, "content-disposition", 0, {"content-disposition", disposition}) - else - headers - end - end - - defp header_length_constraint(headers, limit) when is_integer(limit) and limit > 0 do - with {_, size} <- List.keyfind(headers, "content-length", 0), - {size, _} <- Integer.parse(size), - true <- size <= limit do - :ok - else - false -> - {:error, :body_too_large} - - _ -> - :ok - end - end - - defp header_length_constraint(_, _), do: :ok - - defp body_size_constraint(size, limit) when is_integer(limit) and limit > 0 and size >= limit do - {:error, :body_too_large} - end - - defp body_size_constraint(_, _), do: :ok - - defp check_read_duration(duration, max) - when is_integer(duration) and is_integer(max) and max > 0 do - if duration > max do - {:error, :read_duration_exceeded} - else - {:ok, {duration, :erlang.system_time(:millisecond)}} - end - end - - defp check_read_duration(_, _), do: {:ok, :no_duration_limit, :no_duration_limit} - - defp increase_read_duration({previous_duration, started}) - when is_integer(previous_duration) and is_integer(started) do - duration = :erlang.system_time(:millisecond) - started - {:ok, previous_duration + duration} - end - - defp increase_read_duration(_) do - {:ok, :no_duration_limit, :no_duration_limit} - end - - defp hackney, do: Pleroma.Config.get(:hackney, :hackney) -end diff --git a/lib/pleroma/reverse_proxy/client.ex b/lib/pleroma/reverse_proxy/client.ex new file mode 100644 index 000000000..57c2d2cfd --- /dev/null +++ b/lib/pleroma/reverse_proxy/client.ex @@ -0,0 +1,24 @@ +defmodule Pleroma.ReverseProxy.Client do + @callback request(atom(), String.t(), [tuple()], String.t(), list()) :: + {:ok, pos_integer(), [tuple()], reference() | map()} + | {:ok, pos_integer(), [tuple()]} + | {:ok, reference()} + | {:error, term()} + + @callback stream_body(reference() | pid() | map()) :: + {:ok, binary()} | :done | {:error, String.t()} + + @callback close(reference() | pid() | map()) :: :ok + + def request(method, url, headers, "", opts \\ []) do + client().request(method, url, headers, "", opts) + end + + def stream_body(ref), do: client().stream_body(ref) + + def close(ref), do: client().close(ref) + + defp client do + Pleroma.Config.get([Pleroma.ReverseProxy.Client], :hackney) + end +end diff --git a/lib/pleroma/reverse_proxy/reverse_proxy.ex b/lib/pleroma/reverse_proxy/reverse_proxy.ex new file mode 100644 index 000000000..bf31e9cba --- /dev/null +++ b/lib/pleroma/reverse_proxy/reverse_proxy.ex @@ -0,0 +1,382 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2019 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.ReverseProxy do + alias Pleroma.HTTP + + @keep_req_headers ~w(accept user-agent accept-encoding cache-control if-modified-since) ++ + ~w(if-unmodified-since if-none-match if-range range) + @resp_cache_headers ~w(etag date last-modified cache-control) + @keep_resp_headers @resp_cache_headers ++ + ~w(content-type content-disposition content-encoding content-range) ++ + ~w(accept-ranges vary) + @default_cache_control_header "public, max-age=1209600" + @valid_resp_codes [200, 206, 304] + @max_read_duration :timer.seconds(30) + @max_body_length :infinity + @methods ~w(GET HEAD) + + @moduledoc """ + A reverse proxy. + + Pleroma.ReverseProxy.call(conn, url, options) + + It is not meant to be added into a plug pipeline, but to be called from another plug or controller. + + Supports `#{inspect(@methods)}` HTTP methods, and only allows `#{inspect(@valid_resp_codes)}` status codes. + + Responses are chunked to the client while downloading from the upstream. + + Some request / responses headers are preserved: + + * request: `#{inspect(@keep_req_headers)}` + * response: `#{inspect(@keep_resp_headers)}` + + If no caching headers (`#{inspect(@resp_cache_headers)}`) are returned by upstream, `cache-control` will be + set to `#{inspect(@default_cache_control_header)}`. + + Options: + + * `redirect_on_failure` (default `false`). Redirects the client to the real remote URL if there's any HTTP + errors. Any error during body processing will not be redirected as the response is chunked. This may expose + remote URL, clients IPs, …. + + * `max_body_length` (default `#{inspect(@max_body_length)}`): limits the content length to be approximately the + specified length. It is validated with the `content-length` header and also verified when proxying. + + * `max_read_duration` (default `#{inspect(@max_read_duration)}` ms): the total time the connection is allowed to + read from the remote upstream. + + * `inline_content_types`: + * `true` will not alter `content-disposition` (up to the upstream), + * `false` will add `content-disposition: attachment` to any request, + * a list of whitelisted content types + + * `keep_user_agent` will forward the client's user-agent to the upstream. This may be useful if the upstream is + doing content transformation (encoding, …) depending on the request. + + * `req_headers`, `resp_headers` additional headers. + + * `http`: options for [hackney](https://github.com/benoitc/hackney). + + """ + @default_hackney_options [] + + @inline_content_types [ + "image/gif", + "image/jpeg", + "image/jpg", + "image/png", + "image/svg+xml", + "audio/mpeg", + "audio/mp3", + "video/webm", + "video/mp4", + "video/quicktime" + ] + + require Logger + import Plug.Conn + + @type option() :: + {:keep_user_agent, boolean} + | {:max_read_duration, :timer.time() | :infinity} + | {:max_body_length, non_neg_integer() | :infinity} + | {:http, []} + | {:req_headers, [{String.t(), String.t()}]} + | {:resp_headers, [{String.t(), String.t()}]} + | {:inline_content_types, boolean() | [String.t()]} + | {:redirect_on_failure, boolean()} + + @spec call(Plug.Conn.t(), url :: String.t(), [option()]) :: Plug.Conn.t() + def call(_conn, _url, _opts \\ []) + + def call(conn = %{method: method}, url, opts) when method in @methods do + hackney_opts = + @default_hackney_options + |> Keyword.merge(Keyword.get(opts, :http, [])) + |> HTTP.process_request_options() + + req_headers = build_req_headers(conn.req_headers, opts) + + opts = + if filename = Pleroma.Web.MediaProxy.filename(url) do + Keyword.put_new(opts, :attachment_name, filename) + else + opts + end + + with {:ok, code, headers, client} <- request(method, url, req_headers, hackney_opts), + :ok <- header_length_constraint(headers, Keyword.get(opts, :max_body_length)) do + response(conn, client, url, code, headers, opts) + else + {:ok, code, headers} -> + head_response(conn, url, code, headers, opts) + |> halt() + + {:error, {:invalid_http_response, code}} -> + Logger.error("#{__MODULE__}: request to #{inspect(url)} failed with HTTP status #{code}") + + conn + |> error_or_redirect( + url, + code, + "Request failed: " <> Plug.Conn.Status.reason_phrase(code), + opts + ) + |> halt() + + {:error, error} -> + Logger.error("#{__MODULE__}: request to #{inspect(url)} failed: #{inspect(error)}") + + conn + |> error_or_redirect(url, 500, "Request failed", opts) + |> halt() + end + end + + def call(conn, _, _) do + conn + |> send_resp(400, Plug.Conn.Status.reason_phrase(400)) + |> halt() + end + + defp request(method, url, headers, hackney_opts) do + Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}") + method = method |> String.downcase() |> String.to_existing_atom() + + case client().request(method, url, headers, "", hackney_opts) do + {:ok, code, headers, client} when code in @valid_resp_codes -> + {:ok, code, downcase_headers(headers), client} + + {:ok, code, headers} when code in @valid_resp_codes -> + {:ok, code, downcase_headers(headers)} + + {:ok, code, _, _} -> + {:error, {:invalid_http_response, code}} + + {:error, error} -> + {:error, error} + end + end + + defp response(conn, client, url, status, headers, opts) do + result = + conn + |> put_resp_headers(build_resp_headers(headers, opts)) + |> send_chunked(status) + |> chunk_reply(client, opts) + + case result do + {:ok, conn} -> + halt(conn) + + {:error, :closed, conn} -> + client().close(client) + halt(conn) + + {:error, error, conn} -> + Logger.warn( + "#{__MODULE__} request to #{url} failed while reading/chunking: #{inspect(error)}" + ) + + client().close(client) + halt(conn) + end + end + + defp chunk_reply(conn, client, opts) do + chunk_reply(conn, client, opts, 0, 0) + end + + defp chunk_reply(conn, client, opts, sent_so_far, duration) do + with {:ok, duration} <- + check_read_duration( + duration, + Keyword.get(opts, :max_read_duration, @max_read_duration) + ), + {:ok, data} <- client().stream_body(client), + {:ok, duration} <- increase_read_duration(duration), + sent_so_far = sent_so_far + byte_size(data), + :ok <- body_size_constraint(sent_so_far, Keyword.get(opts, :max_body_size)), + {:ok, conn} <- chunk(conn, data) do + chunk_reply(conn, client, opts, sent_so_far, duration) + else + :done -> {:ok, conn} + {:error, error} -> {:error, error, conn} + end + end + + defp head_response(conn, _url, code, headers, opts) do + conn + |> put_resp_headers(build_resp_headers(headers, opts)) + |> send_resp(code, "") + end + + defp error_or_redirect(conn, url, code, body, opts) do + if Keyword.get(opts, :redirect_on_failure, false) do + conn + |> Phoenix.Controller.redirect(external: url) + |> halt() + else + conn + |> send_resp(code, body) + |> halt + end + end + + defp downcase_headers(headers) do + Enum.map(headers, fn {k, v} -> + {String.downcase(k), v} + end) + end + + defp get_content_type(headers) do + {_, content_type} = + List.keyfind(headers, "content-type", 0, {"content-type", "application/octet-stream"}) + + [content_type | _] = String.split(content_type, ";") + content_type + end + + defp put_resp_headers(conn, headers) do + Enum.reduce(headers, conn, fn {k, v}, conn -> + put_resp_header(conn, k, v) + end) + end + + defp build_req_headers(headers, opts) do + headers + |> downcase_headers() + |> Enum.filter(fn {k, _} -> k in @keep_req_headers end) + |> (fn headers -> + headers = headers ++ Keyword.get(opts, :req_headers, []) + + if Keyword.get(opts, :keep_user_agent, false) do + List.keystore( + headers, + "user-agent", + 0, + {"user-agent", Pleroma.Application.user_agent()} + ) + else + headers + end + end).() + end + + defp build_resp_headers(headers, opts) do + headers + |> Enum.filter(fn {k, _} -> k in @keep_resp_headers end) + |> build_resp_cache_headers(opts) + |> build_resp_content_disposition_header(opts) + |> (fn headers -> headers ++ Keyword.get(opts, :resp_headers, []) end).() + end + + defp build_resp_cache_headers(headers, _opts) do + has_cache? = Enum.any?(headers, fn {k, _} -> k in @resp_cache_headers end) + has_cache_control? = List.keymember?(headers, "cache-control", 0) + + cond do + has_cache? && has_cache_control? -> + headers + + has_cache? -> + # There's caching header present but no cache-control -- we need to explicitely override it + # to public as Plug defaults to "max-age=0, private, must-revalidate" + List.keystore(headers, "cache-control", 0, {"cache-control", "public"}) + + true -> + List.keystore( + headers, + "cache-control", + 0, + {"cache-control", @default_cache_control_header} + ) + end + end + + defp build_resp_content_disposition_header(headers, opts) do + opt = Keyword.get(opts, :inline_content_types, @inline_content_types) + + content_type = get_content_type(headers) + + attachment? = + cond do + is_list(opt) && !Enum.member?(opt, content_type) -> true + opt == false -> true + true -> false + end + + if attachment? do + name = + try do + {{"content-disposition", content_disposition_string}, _} = + List.keytake(headers, "content-disposition", 0) + + [name | _] = + Regex.run( + ~r/filename="((?:[^"\\]|\\.)*)"/u, + content_disposition_string || "", + capture: :all_but_first + ) + + name + rescue + MatchError -> Keyword.get(opts, :attachment_name, "attachment") + end + + disposition = "attachment; filename=\"#{name}\"" + + List.keystore(headers, "content-disposition", 0, {"content-disposition", disposition}) + else + headers + end + end + + defp header_length_constraint(headers, limit) when is_integer(limit) and limit > 0 do + with {_, size} <- List.keyfind(headers, "content-length", 0), + {size, _} <- Integer.parse(size), + true <- size <= limit do + :ok + else + false -> + {:error, :body_too_large} + + _ -> + :ok + end + end + + defp header_length_constraint(_, _), do: :ok + + defp body_size_constraint(size, limit) when is_integer(limit) and limit > 0 and size >= limit do + {:error, :body_too_large} + end + + defp body_size_constraint(_, _), do: :ok + + defp check_read_duration(duration, max) + when is_integer(duration) and is_integer(max) and max > 0 do + if duration > max do + {:error, :read_duration_exceeded} + else + {:ok, {duration, :erlang.system_time(:millisecond)}} + end + end + + defp check_read_duration(_, _), do: {:ok, :no_duration_limit, :no_duration_limit} + + defp increase_read_duration({previous_duration, started}) + when is_integer(previous_duration) and is_integer(started) do + duration = :erlang.system_time(:millisecond) - started + {:ok, previous_duration + duration} + end + + defp increase_read_duration(_) do + {:ok, :no_duration_limit, :no_duration_limit} + end + + defp client, do: Pleroma.ReverseProxy.Client +end -- cgit v1.2.3 From d6b0fce6e944e8a3dd05091ef2388c610362f824 Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov Date: Tue, 9 Jul 2019 17:36:35 +0000 Subject: Fix/1019 correct count remote users --- lib/pleroma/application.ex | 6 ++- lib/pleroma/user.ex | 66 ++++++++++++++++++++++++++++-- lib/pleroma/user/query.ex | 19 ++++++++- lib/pleroma/user/synchronization.ex | 60 +++++++++++++++++++++++++++ lib/pleroma/user/synchronization_worker.ex | 32 +++++++++++++++ 5 files changed, 177 insertions(+), 6 deletions(-) create mode 100644 lib/pleroma/user/synchronization.ex create mode 100644 lib/pleroma/user/synchronization_worker.ex (limited to 'lib') diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex index ba4cf8486..86c348a0d 100644 --- a/lib/pleroma/application.ex +++ b/lib/pleroma/application.ex @@ -151,7 +151,11 @@ defmodule Pleroma.Application do start: {Pleroma.Web.Endpoint, :start_link, []}, type: :supervisor }, - %{id: Pleroma.Gopher.Server, start: {Pleroma.Gopher.Server, :start_link, []}} + %{id: Pleroma.Gopher.Server, start: {Pleroma.Gopher.Server, :start_link, []}}, + %{ + id: Pleroma.User.SynchronizationWorker, + start: {Pleroma.User.SynchronizationWorker, :start_link, []} + } ] # See http://elixir-lang.org/docs/stable/elixir/Supervisor.html diff --git a/lib/pleroma/user.ex b/lib/pleroma/user.ex index 09f86aaa2..d03810d1a 100644 --- a/lib/pleroma/user.ex +++ b/lib/pleroma/user.ex @@ -107,15 +107,25 @@ defmodule Pleroma.User do def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers" - def user_info(%User{} = user) do + def user_info(%User{} = user, args \\ %{}) do + following_count = + if args[:following_count], do: args[:following_count], else: following_count(user) + + follower_count = + if args[:follower_count], do: args[:follower_count], else: user.info.follower_count + %{ - following_count: following_count(user), note_count: user.info.note_count, - follower_count: user.info.follower_count, locked: user.info.locked, confirmation_pending: user.info.confirmation_pending, default_scope: user.info.default_scope } + |> Map.put(:following_count, following_count) + |> Map.put(:follower_count, follower_count) + end + + def set_info_cache(user, args) do + Cachex.put(:user_cache, "user_info:#{user.id}", user_info(user, args)) end def restrict_deactivated(query) do @@ -1000,6 +1010,56 @@ defmodule Pleroma.User do ) end + @spec sync_follow_counter() :: :ok + def sync_follow_counter, + do: PleromaJobQueue.enqueue(:background, __MODULE__, [:sync_follow_counters]) + + @spec perform(:sync_follow_counters) :: :ok + def perform(:sync_follow_counters) do + {:ok, _pid} = Agent.start_link(fn -> %{} end, name: :domain_errors) + config = Pleroma.Config.get([:instance, :external_user_synchronization]) + + :ok = sync_follow_counters(config) + Agent.stop(:domain_errors) + end + + @spec sync_follow_counters(keyword()) :: :ok + def sync_follow_counters(opts \\ []) do + users = external_users(opts) + + if length(users) > 0 do + errors = Agent.get(:domain_errors, fn state -> state end) + {last, updated_errors} = User.Synchronization.call(users, errors, opts) + Agent.update(:domain_errors, fn _state -> updated_errors end) + sync_follow_counters(max_id: last.id, limit: opts[:limit]) + else + :ok + end + end + + @spec external_users(keyword()) :: [User.t()] + def external_users(opts \\ []) do + query = + User.Query.build(%{ + external: true, + active: true, + order_by: :id, + select: [:id, :ap_id, :info] + }) + + query = + if opts[:max_id], + do: where(query, [u], u.id > ^opts[:max_id]), + else: query + + query = + if opts[:limit], + do: limit(query, ^opts[:limit]), + else: query + + Repo.all(query) + end + def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers), do: PleromaJobQueue.enqueue(:background, __MODULE__, [ diff --git a/lib/pleroma/user/query.ex b/lib/pleroma/user/query.ex index ace9c05f2..f9bcc9e19 100644 --- a/lib/pleroma/user/query.ex +++ b/lib/pleroma/user/query.ex @@ -7,7 +7,7 @@ defmodule Pleroma.User.Query do User query builder module. Builds query from new query or another user query. ## Example: - query = Pleroma.User.Query(%{nickname: "nickname"}) + query = Pleroma.User.Query.build(%{nickname: "nickname"}) another_query = Pleroma.User.Query.build(query, %{email: "email@example.com"}) Pleroma.Repo.all(query) Pleroma.Repo.all(another_query) @@ -47,7 +47,10 @@ defmodule Pleroma.User.Query do friends: User.t(), recipients_from_activity: [String.t()], nickname: [String.t()], - ap_id: [String.t()] + ap_id: [String.t()], + order_by: term(), + select: term(), + limit: pos_integer() } | %{} @@ -141,6 +144,18 @@ defmodule Pleroma.User.Query do where(query, [u], u.ap_id in ^to or fragment("? && ?", u.following, ^to)) end + defp compose_query({:order_by, key}, query) do + order_by(query, [u], field(u, ^key)) + end + + defp compose_query({:select, keys}, query) do + select(query, [u], ^keys) + end + + defp compose_query({:limit, limit}, query) do + limit(query, ^limit) + end + defp compose_query(_unsupported_param, query), do: query defp prepare_tag_criteria(tag, query) do diff --git a/lib/pleroma/user/synchronization.ex b/lib/pleroma/user/synchronization.ex new file mode 100644 index 000000000..93660e08c --- /dev/null +++ b/lib/pleroma/user/synchronization.ex @@ -0,0 +1,60 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2018 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.User.Synchronization do + alias Pleroma.HTTP + alias Pleroma.User + + @spec call([User.t()], map(), keyword()) :: {User.t(), map()} + def call(users, errors, opts \\ []) do + do_call(users, errors, opts) + end + + defp do_call([user | []], errors, opts) do + updated = fetch_counters(user, errors, opts) + {user, updated} + end + + defp do_call([user | others], errors, opts) do + updated = fetch_counters(user, errors, opts) + do_call(others, updated, opts) + end + + defp fetch_counters(user, errors, opts) do + %{host: host} = URI.parse(user.ap_id) + + info = %{} + {following, errors} = fetch_counter(user.ap_id <> "/following", host, errors, opts) + info = if following, do: Map.put(info, :following_count, following), else: info + + {followers, errors} = fetch_counter(user.ap_id <> "/followers", host, errors, opts) + info = if followers, do: Map.put(info, :follower_count, followers), else: info + + User.set_info_cache(user, info) + errors + end + + defp available_domain?(domain, errors, opts) do + max_retries = Keyword.get(opts, :max_retries, 3) + not (Map.has_key?(errors, domain) && errors[domain] >= max_retries) + end + + defp fetch_counter(url, host, errors, opts) do + with true <- available_domain?(host, errors, opts), + {:ok, %{body: body, status: code}} when code in 200..299 <- + HTTP.get( + url, + [{:Accept, "application/activity+json"}] + ), + {:ok, data} <- Jason.decode(body) do + {data["totalItems"], errors} + else + false -> + {nil, errors} + + _ -> + {nil, Map.update(errors, host, 1, &(&1 + 1))} + end + end +end diff --git a/lib/pleroma/user/synchronization_worker.ex b/lib/pleroma/user/synchronization_worker.ex new file mode 100644 index 000000000..ba9cc3556 --- /dev/null +++ b/lib/pleroma/user/synchronization_worker.ex @@ -0,0 +1,32 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2018 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-onl + +defmodule Pleroma.User.SynchronizationWorker do + use GenServer + + def start_link do + config = Pleroma.Config.get([:instance, :external_user_synchronization]) + + if config[:enabled] do + GenServer.start_link(__MODULE__, interval: config[:interval]) + else + :ignore + end + end + + def init(opts) do + schedule_next(opts) + {:ok, opts} + end + + def handle_info(:sync_follow_counters, opts) do + Pleroma.User.sync_follow_counter() + schedule_next(opts) + {:noreply, opts} + end + + defp schedule_next(opts) do + Process.send_after(self(), :sync_follow_counters, opts[:interval]) + end +end -- cgit v1.2.3