diff options
79 files changed, 2259 insertions, 365 deletions
diff --git a/CHANGELOG.md b/CHANGELOG.md index 50484aaef..43f2bb638 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ## Unreleased (Patch) +### Fixed + +- Try to save exported ConfigDB settings (migrate_from_db) in the system temp directory if default location is not writable. + ## [2.3.0] - 2020-03-01 ### Security @@ -24,6 +28,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - **Breaking**: Changed `mix pleroma.user toggle_confirmed` to `mix pleroma.user confirm` - **Breaking**: Changed `mix pleroma.user toggle_activated` to `mix pleroma.user activate/deactivate` +- **Breaking:** NSFW hashtag is no longer added on sensitive posts - Polls now always return a `voters_count`, even if they are single-choice. - Admin Emails: The ap id is used as the user link in emails now. - Improved registration workflow for email confirmation and account approval modes. @@ -50,6 +55,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - Pleroma API: Reroute `/api/pleroma/*` to `/api/v1/pleroma/*` </details> +- Improved hashtag timeline performance (requires a background migration). ### Added @@ -69,6 +75,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - Ability to define custom HTTP headers per each frontend - MRF (`NoEmptyPolicy`): New MRF Policy which will deny empty statuses or statuses of only mentions from being created by local users - New users will receive a simple email confirming their registration if no other emails will be dispatched. (e.g., Welcome, Confirmation, or Approval Required) +- MRF (`FollowBotPolicy`): New MRF Policy which makes a designated local Bot account attempt to follow all users in public Notes received by your instance. Users who require approving follower requests or have #nobot in their profile are excluded. <details> <summary>API Changes</summary> @@ -505,7 +512,6 @@ switched to a new configuration mechanism, however it was not officially removed - Static-FE: Fix remote posts not being sanitized ### Fixed -======= - Rate limiter crashes when there is no explicitly specified ip in the config - 500 errors when no `Accept` header is present if Static-FE is enabled - Instance panel not being updated immediately due to wrong `Cache-Control` headers diff --git a/config/config.exs b/config/config.exs index 66aee3264..4381068ac 100644 --- a/config/config.exs +++ b/config/config.exs @@ -391,6 +391,11 @@ config :pleroma, :mrf_keyword, federated_timeline_removal: [], replace: [] +config :pleroma, :mrf_hashtag, + sensitive: ["nsfw"], + reject: [], + federated_timeline_removal: [] + config :pleroma, :mrf_subchain, match_actor: %{} config :pleroma, :mrf_activity_expiration, days: 365 @@ -404,6 +409,8 @@ config :pleroma, :mrf_object_age, threshold: 604_800, actions: [:delist, :strip_followers] +config :pleroma, :mrf_follow_bot, follower_nickname: nil + config :pleroma, :rich_media, enabled: true, ignore_hosts: [], @@ -654,6 +661,10 @@ config :pleroma, :oauth2, config :pleroma, :database, rum_enabled: false +config :pleroma, :features, improved_hashtag_timeline: :auto + +config :pleroma, :populate_hashtags_table, fault_rate_allowance: 0.01 + config :pleroma, :env, Mix.env() config :http_signatures, diff --git a/config/description.exs b/config/description.exs index d9b15e684..bb1f43305 100644 --- a/config/description.exs +++ b/config/description.exs @@ -461,6 +461,42 @@ config :pleroma, :config_description, [ }, %{ group: :pleroma, + key: :features, + type: :group, + description: "Customizable features", + children: [ + %{ + key: :improved_hashtag_timeline, + type: {:dropdown, :atom}, + description: + "Setting to force toggle / force disable improved hashtags timeline. `:enabled` forces hashtags to be fetched from `hashtags` table for hashtags timeline. `:disabled` forces object-embedded hashtags to be used (slower). Keep it `:auto` for automatic behaviour (it is auto-set to `:enabled` [unless overridden] when HashtagsTableMigrator completes).", + suggestions: [:auto, :enabled, :disabled] + } + ] + }, + %{ + group: :pleroma, + key: :populate_hashtags_table, + type: :group, + description: "`populate_hashtags_table` background migration settings", + children: [ + %{ + key: :fault_rate_allowance, + type: :float, + description: + "Max accepted rate of objects that failed in the migration. Any value from 0.0 which tolerates no errors to 1.0 which will enable the feature even if hashtags transfer failed for all records.", + suggestions: [0.01] + }, + %{ + key: :sleep_interval_ms, + type: :integer, + description: + "Sleep interval between each chunk of processed records in order to decrease the load on the system (defaults to 0 and should be keep default on most instances)." + } + ] + }, + %{ + group: :pleroma, key: :instance, type: :group, description: "Instance-related settings", @@ -2908,6 +2944,23 @@ config :pleroma, :config_description, [ }, %{ group: :pleroma, + key: :mrf_follow_bot, + tab: :mrf, + related_policy: "Pleroma.Web.ActivityPub.MRF.FollowBotPolicy", + label: "MRF FollowBot Policy", + type: :group, + description: "Automatically follows newly discovered accounts.", + children: [ + %{ + key: :follower_nickname, + type: :string, + description: "The name of the bot account to use for following newly discovered users.", + suggestions: ["followbot"] + } + ] + }, + %{ + group: :pleroma, key: :modules, type: :group, description: "Custom Runtime Modules", diff --git a/docs/administration/CLI_tasks/config.md b/docs/administration/CLI_tasks/config.md index 000ed4d98..fc9f3cbd5 100644 --- a/docs/administration/CLI_tasks/config.md +++ b/docs/administration/CLI_tasks/config.md @@ -32,16 +32,20 @@ config :pleroma, configurable_from_database: false ``` -To delete transferred settings from database optional flag `-d` can be used. `<env>` is `prod` by default. +Options: + +- `<path>` - where to save migrated config. E.g. `--path=/tmp`. If file saved into non standart folder, you must manually copy file into directory where Pleroma can read it. For OTP install path will be `PLEROMA_CONFIG_PATH` or `/etc/pleroma`. For installation from source - `config` directory in the pleroma folder. +- `<env>` - environment, for which is migrated config. By default is `prod`. +- To delete transferred settings from database optional flag `-d` can be used === "OTP" ```sh - ./bin/pleroma_ctl config migrate_from_db [--env=<env>] [-d] + ./bin/pleroma_ctl config migrate_from_db [--env=<env>] [-d] [--path=<path>] ``` === "From Source" ```sh - mix pleroma.config migrate_from_db [--env=<env>] [-d] + mix pleroma.config migrate_from_db [--env=<env>] [-d] [--path=<path>] ``` ## Dump all of the config settings defined in the database diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md index 028c5e91d..069421722 100644 --- a/docs/configuration/cheatsheet.md +++ b/docs/configuration/cheatsheet.md @@ -65,6 +65,13 @@ To add configuration to your config file, you can copy it from the base config. * `show_reactions`: Let favourites and emoji reactions be viewed through the API (default: `true`). * `password_reset_token_validity`: The time after which reset tokens aren't accepted anymore, in seconds (default: one day). +## :database +* `improved_hashtag_timeline`: Setting to force toggle / force disable improved hashtags timeline. `:enabled` forces hashtags to be fetched from `hashtags` table for hashtags timeline. `:disabled` forces object-embedded hashtags to be used (slower). Keep it `:auto` for automatic behaviour (it is auto-set to `:enabled` [unless overridden] when HashtagsTableMigrator completes). + +## Background migrations +* `populate_hashtags_table/sleep_interval_ms`: Sleep interval between each chunk of processed records in order to decrease the load on the system (defaults to 0 and should be keep default on most instances). +* `populate_hashtags_table/fault_rate_allowance`: Max rate of failed objects to actually processed objects in order to enable the feature (any value from 0.0 which tolerates no errors to 1.0 which will enable the feature even if hashtags transfer failed for all records). + ## Welcome * `direct_message`: - welcome message sent as a direct message. * `enabled`: Enables the send a direct message to a newly registered user. Defaults to `false`. @@ -117,6 +124,7 @@ To add configuration to your config file, you can copy it from the base config. * `Pleroma.Web.ActivityPub.MRF.ObjectAgePolicy`: Rejects or delists posts based on their age when received. (See [`:mrf_object_age`](#mrf_object_age)). * `Pleroma.Web.ActivityPub.MRF.ActivityExpirationPolicy`: Sets a default expiration on all posts made by users of the local instance. Requires `Pleroma.Workers.PurgeExpiredActivity` to be enabled for processing the scheduled delections. * `Pleroma.Web.ActivityPub.MRF.ForceBotUnlistedPolicy`: Makes all bot posts to disappear from public timelines. + * `Pleroma.Web.ActivityPub.MRF.FollowBotPolicy`: Automatically follows newly discovered users from the specified bot account. Local accounts, locked accounts, and users with "#nobot" in their bio are respected and excluded from being followed. * `transparency`: Make the content of your Message Rewrite Facility settings public (via nodeinfo). * `transparency_exclusions`: Exclude specific instance names from MRF transparency. The use of the exclusions feature will be disclosed in nodeinfo as a boolean value. @@ -203,6 +211,21 @@ config :pleroma, :mrf_user_allowlist, %{ * `days`: Default global expiration time for all local Create activities (in days) +#### :mrf_hashtag + +* `sensitive`: List of hashtags to mark activities as sensitive (default: `nsfw`) +* `federated_timeline_removal`: List of hashtags to remove activities from the federated timeline (aka TWNK) +* `reject`: List of hashtags to reject activities from + +Notes: +- The hashtags in the configuration do not have a leading `#`. +- This MRF Policy is always enabled, if you want to disable it you have to set empty lists + +#### :mrf_follow_bot + +* `follower_nickname`: The name of the bot account to use for following newly discovered users. Using `followbot` or similar is strongly suggested. + + ### :activitypub * `unfollow_blocked`: Whether blocks result in people getting unfollowed * `outgoing_blocks`: Whether to federate blocks to other instances diff --git a/lib/mix/tasks/pleroma/config.ex b/lib/mix/tasks/pleroma/config.ex index 1962154b9..22502a522 100644 --- a/lib/mix/tasks/pleroma/config.ex +++ b/lib/mix/tasks/pleroma/config.ex @@ -27,7 +27,7 @@ defmodule Mix.Tasks.Pleroma.Config do {opts, _} = OptionParser.parse!(options, - strict: [env: :string, delete: :boolean], + strict: [env: :string, delete: :boolean, path: :string], aliases: [d: :delete] ) @@ -259,18 +259,43 @@ defmodule Mix.Tasks.Pleroma.Config do defp migrate_from_db(opts) do env = opts[:env] || Pleroma.Config.get(:env) + filename = "#{env}.exported_from_db.secret.exs" + config_path = - if Pleroma.Config.get(:release) do - :config_path - |> Pleroma.Config.get() - |> Path.dirname() - else - "config" + cond do + opts[:path] -> + opts[:path] + + Pleroma.Config.get(:release) -> + :config_path + |> Pleroma.Config.get() + |> Path.dirname() + + true -> + "config" end - |> Path.join("#{env}.exported_from_db.secret.exs") + |> Path.join(filename) - file = File.open!(config_path, [:write, :utf8]) + with {:ok, file} <- File.open(config_path, [:write, :utf8]) do + write_config(file, config_path, opts) + shell_info("Database configuration settings have been exported to #{config_path}") + else + _ -> + shell_error("Impossible to save settings to this directory #{Path.dirname(config_path)}") + tmp_config_path = Path.join(System.tmp_dir!(), filename) + file = File.open!(tmp_config_path) + + shell_info( + "Saving database configuration settings to #{tmp_config_path}. Copy it to the #{ + Path.dirname(config_path) + } manually." + ) + write_config(file, tmp_config_path, opts) + end + end + + defp write_config(file, path, opts) do IO.write(file, config_header()) ConfigDB @@ -278,11 +303,7 @@ defmodule Mix.Tasks.Pleroma.Config do |> Enum.each(&write_and_delete(&1, file, opts[:delete])) :ok = File.close(file) - System.cmd("mix", ["format", config_path]) - - shell_info( - "Database configuration settings have been exported to config/#{env}.exported_from_db.secret.exs" - ) + System.cmd("mix", ["format", path]) end if Code.ensure_loaded?(Config.Reader) do diff --git a/lib/mix/tasks/pleroma/database.ex b/lib/mix/tasks/pleroma/database.ex index 2403ed581..e7f4b67a4 100644 --- a/lib/mix/tasks/pleroma/database.ex +++ b/lib/mix/tasks/pleroma/database.ex @@ -8,10 +8,13 @@ defmodule Mix.Tasks.Pleroma.Database do alias Pleroma.Object alias Pleroma.Repo alias Pleroma.User + require Logger require Pleroma.Constants + import Ecto.Query import Mix.Pleroma + use Mix.Task @shortdoc "A collection of database related tasks" @@ -214,4 +217,32 @@ defmodule Mix.Tasks.Pleroma.Database do shell_info('Done.') end end + + # Rolls back a specific migration (leaving subsequent migrations applied). + # WARNING: imposes a risk of unrecoverable data loss — proceed at your own responsibility. + # Based on https://stackoverflow.com/a/53825840 + def run(["rollback", version]) do + prompt = "SEVERE WARNING: this operation may result in unrecoverable data loss. Continue?" + + if shell_prompt(prompt, "n") in ~w(Yn Y y) do + {_, result, _} = + Ecto.Migrator.with_repo(Pleroma.Repo, fn repo -> + version = String.to_integer(version) + re = ~r/^#{version}_.*\.exs/ + path = Ecto.Migrator.migrations_path(repo) + + with {_, "" <> file} <- {:find, Enum.find(File.ls!(path), &String.match?(&1, re))}, + {_, [{mod, _} | _]} <- {:compile, Code.compile_file(Path.join(path, file))}, + {_, :ok} <- {:rollback, Ecto.Migrator.down(repo, version, mod)} do + {:ok, "Reversed migration: #{file}"} + else + {:find, _} -> {:error, "No migration found with version prefix: #{version}"} + {:compile, e} -> {:error, "Problem compiling migration module: #{inspect(e)}"} + {:rollback, e} -> {:error, "Problem reversing migration: #{inspect(e)}"} + end + end) + + shell_info(inspect(result)) + end + end end diff --git a/lib/pleroma/activity.ex b/lib/pleroma/activity.ex index 6542e684e..d59403884 100644 --- a/lib/pleroma/activity.ex +++ b/lib/pleroma/activity.ex @@ -113,6 +113,7 @@ defmodule Pleroma.Activity do from([a] in query, left_join: b in Bookmark, on: b.user_id == ^user.id and b.activity_id == a.id, + as: :bookmark, preload: [bookmark: b] ) end @@ -123,6 +124,7 @@ defmodule Pleroma.Activity do from([a] in query, left_join: r in ReportNote, on: a.id == r.activity_id, + as: :report_note, preload: [report_notes: r] ) end diff --git a/lib/pleroma/activity/ir/topics.ex b/lib/pleroma/activity/ir/topics.ex index d94395fc1..7a603a615 100644 --- a/lib/pleroma/activity/ir/topics.ex +++ b/lib/pleroma/activity/ir/topics.ex @@ -48,14 +48,12 @@ defmodule Pleroma.Activity.Ir.Topics do tags end - defp hashtags_to_topics(%{data: %{"tag" => tags}}) do - tags - |> Enum.filter(&is_bitstring(&1)) - |> Enum.map(fn tag -> "hashtag:" <> tag end) + defp hashtags_to_topics(object) do + object + |> Object.hashtags() + |> Enum.map(fn hashtag -> "hashtag:" <> hashtag end) end - defp hashtags_to_topics(_), do: [] - defp remote_topics(%{local: true}), do: [] defp remote_topics(%{actor: actor}) when is_binary(actor), diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex index c853a2bb4..06d399b2e 100644 --- a/lib/pleroma/application.ex +++ b/lib/pleroma/application.ex @@ -103,9 +103,7 @@ defmodule Pleroma.Application do task_children(@mix_env) ++ dont_run_in_test(@mix_env) ++ chat_child(chat_enabled?()) ++ - [ - Pleroma.Gopher.Server - ] + [Pleroma.Gopher.Server] # See http://elixir-lang.org/docs/stable/elixir/Supervisor.html # for other strategies and supported options @@ -230,6 +228,12 @@ defmodule Pleroma.Application do keys: :duplicate, partitions: System.schedulers_online() ]} + ] ++ background_migrators() + end + + defp background_migrators do + [ + Pleroma.Migrators.HashtagsTableMigrator ] end diff --git a/lib/pleroma/config.ex b/lib/pleroma/config.ex index 2e15a3719..54e332595 100644 --- a/lib/pleroma/config.ex +++ b/lib/pleroma/config.ex @@ -99,4 +99,8 @@ defmodule Pleroma.Config do def oauth_consumer_strategies, do: get([:auth, :oauth_consumer_strategies], []) def oauth_consumer_enabled?, do: oauth_consumer_strategies() != [] + + def feature_enabled?(feature_name) do + get([:features, feature_name]) not in [nil, false, :disabled, :auto] + end end diff --git a/lib/pleroma/data_migration.ex b/lib/pleroma/data_migration.ex new file mode 100644 index 000000000..1377af16e --- /dev/null +++ b/lib/pleroma/data_migration.ex @@ -0,0 +1,45 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.DataMigration do + use Ecto.Schema + + alias Pleroma.DataMigration + alias Pleroma.DataMigration.State + alias Pleroma.Repo + + import Ecto.Changeset + import Ecto.Query + + schema "data_migrations" do + field(:name, :string) + field(:state, State, default: :pending) + field(:feature_lock, :boolean, default: false) + field(:params, :map, default: %{}) + field(:data, :map, default: %{}) + + timestamps() + end + + def changeset(data_migration, params \\ %{}) do + data_migration + |> cast(params, [:name, :state, :feature_lock, :params, :data]) + |> validate_required([:name]) + |> unique_constraint(:name) + end + + def update_one_by_id(id, params \\ %{}) do + with {1, _} <- + from(dm in DataMigration, where: dm.id == ^id) + |> Repo.update_all(set: params) do + :ok + end + end + + def get_by_name(name) do + Repo.get_by(DataMigration, name: name) + end + + def populate_hashtags_table, do: get_by_name("populate_hashtags_table") +end diff --git a/lib/pleroma/delivery.ex b/lib/pleroma/delivery.ex index e8d536767..511d5cf58 100644 --- a/lib/pleroma/delivery.ex +++ b/lib/pleroma/delivery.ex @@ -9,7 +9,6 @@ defmodule Pleroma.Delivery do alias Pleroma.Object alias Pleroma.Repo alias Pleroma.User - alias Pleroma.User import Ecto.Changeset import Ecto.Query diff --git a/lib/pleroma/ecto_enums.ex b/lib/pleroma/ecto_enums.ex index f198cccb7..2a9addabc 100644 --- a/lib/pleroma/ecto_enums.ex +++ b/lib/pleroma/ecto_enums.ex @@ -17,3 +17,11 @@ defenum(Pleroma.FollowingRelationship.State, follow_accept: 2, follow_reject: 3 ) + +defenum(Pleroma.DataMigration.State, + pending: 1, + running: 2, + complete: 3, + failed: 4, + manual: 5 +) diff --git a/lib/pleroma/hashtag.ex b/lib/pleroma/hashtag.ex new file mode 100644 index 000000000..53e2e9c89 --- /dev/null +++ b/lib/pleroma/hashtag.ex @@ -0,0 +1,106 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Hashtag do + use Ecto.Schema + + import Ecto.Changeset + import Ecto.Query + + alias Ecto.Multi + alias Pleroma.Hashtag + alias Pleroma.Object + alias Pleroma.Repo + + schema "hashtags" do + field(:name, :string) + + many_to_many(:objects, Object, join_through: "hashtags_objects", on_replace: :delete) + + timestamps() + end + + def normalize_name(name) do + name + |> String.downcase() + |> String.trim() + end + + def get_or_create_by_name(name) do + changeset = changeset(%Hashtag{}, %{name: name}) + + Repo.insert( + changeset, + on_conflict: [set: [name: get_field(changeset, :name)]], + conflict_target: :name, + returning: true + ) + end + + def get_or_create_by_names(names) when is_list(names) do + names = Enum.map(names, &normalize_name/1) + timestamp = NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second) + + structs = + Enum.map(names, fn name -> + %Hashtag{} + |> changeset(%{name: name}) + |> Map.get(:changes) + |> Map.merge(%{inserted_at: timestamp, updated_at: timestamp}) + end) + + try do + with {:ok, %{query_op: hashtags}} <- + Multi.new() + |> Multi.insert_all(:insert_all_op, Hashtag, structs, + on_conflict: :nothing, + conflict_target: :name + ) + |> Multi.run(:query_op, fn _repo, _changes -> + {:ok, Repo.all(from(ht in Hashtag, where: ht.name in ^names))} + end) + |> Repo.transaction() do + {:ok, hashtags} + else + {:error, _name, value, _changes_so_far} -> {:error, value} + end + rescue + e -> {:error, e} + end + end + + def changeset(%Hashtag{} = struct, params) do + struct + |> cast(params, [:name]) + |> update_change(:name, &normalize_name/1) + |> validate_required([:name]) + |> unique_constraint(:name) + end + + def unlink(%Object{id: object_id}) do + with {_, hashtag_ids} <- + from(hto in "hashtags_objects", + where: hto.object_id == ^object_id, + select: hto.hashtag_id + ) + |> Repo.delete_all(), + {:ok, unreferenced_count} <- delete_unreferenced(hashtag_ids) do + {:ok, length(hashtag_ids), unreferenced_count} + end + end + + @delete_unreferenced_query """ + DELETE FROM hashtags WHERE id IN + (SELECT hashtags.id FROM hashtags + LEFT OUTER JOIN hashtags_objects + ON hashtags_objects.hashtag_id = hashtags.id + WHERE hashtags_objects.hashtag_id IS NULL AND hashtags.id = ANY($1)); + """ + + def delete_unreferenced(ids) do + with {:ok, %{num_rows: deleted_count}} <- Repo.query(@delete_unreferenced_query, [ids]) do + {:ok, deleted_count} + end + end +end diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex new file mode 100644 index 000000000..b84058e11 --- /dev/null +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -0,0 +1,208 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Migrators.HashtagsTableMigrator do + defmodule State do + use Pleroma.Migrators.Support.BaseMigratorState + + @impl Pleroma.Migrators.Support.BaseMigratorState + defdelegate data_migration(), to: Pleroma.DataMigration, as: :populate_hashtags_table + end + + use Pleroma.Migrators.Support.BaseMigrator + + alias Pleroma.Hashtag + alias Pleroma.Migrators.Support.BaseMigrator + alias Pleroma.Object + + @impl BaseMigrator + def feature_config_path, do: [:features, :improved_hashtag_timeline] + + @impl BaseMigrator + def fault_rate_allowance, do: Config.get([:populate_hashtags_table, :fault_rate_allowance], 0) + + @impl BaseMigrator + def perform do + data_migration_id = data_migration_id() + max_processed_id = get_stat(:max_processed_id, 0) + + Logger.info("Transferring embedded hashtags to `hashtags` (from oid: #{max_processed_id})...") + + query() + |> where([object], object.id > ^max_processed_id) + |> Repo.chunk_stream(100, :batches, timeout: :infinity) + |> Stream.each(fn objects -> + object_ids = Enum.map(objects, & &1.id) + + results = Enum.map(objects, &transfer_object_hashtags(&1)) + + failed_ids = + results + |> Enum.filter(&(elem(&1, 0) == :error)) + |> Enum.map(&elem(&1, 1)) + + # Count of objects with hashtags: `{:noop, id}` is returned for objects having other AS2 tags + chunk_affected_count = + results + |> Enum.filter(&(elem(&1, 0) == :ok)) + |> length() + + for failed_id <- failed_ids do + _ = + Repo.query( + "INSERT INTO data_migration_failed_ids(data_migration_id, record_id) " <> + "VALUES ($1, $2) ON CONFLICT DO NOTHING;", + [data_migration_id, failed_id] + ) + end + + _ = + Repo.query( + "DELETE FROM data_migration_failed_ids " <> + "WHERE data_migration_id = $1 AND record_id = ANY($2)", + [data_migration_id, object_ids -- failed_ids] + ) + + max_object_id = Enum.at(object_ids, -1) + + put_stat(:max_processed_id, max_object_id) + increment_stat(:iteration_processed_count, length(object_ids)) + increment_stat(:processed_count, length(object_ids)) + increment_stat(:failed_count, length(failed_ids)) + increment_stat(:affected_count, chunk_affected_count) + put_stat(:records_per_second, records_per_second()) + persist_state() + + # A quick and dirty approach to controlling the load this background migration imposes + sleep_interval = Config.get([:populate_hashtags_table, :sleep_interval_ms], 0) + Process.sleep(sleep_interval) + end) + |> Stream.run() + end + + @impl BaseMigrator + def query do + # Note: most objects have Mention-type AS2 tags and no hashtags (but we can't filter them out) + # Note: not checking activity type, expecting remove_non_create_objects_hashtags/_ to clean up + from( + object in Object, + where: + fragment("(?)->'tag' IS NOT NULL AND (?)->'tag' != '[]'::jsonb", object.data, object.data), + select: %{ + id: object.id, + tag: fragment("(?)->'tag'", object.data) + } + ) + |> join(:left, [o], hashtags_objects in fragment("SELECT object_id FROM hashtags_objects"), + on: hashtags_objects.object_id == o.id + ) + |> where([_o, hashtags_objects], is_nil(hashtags_objects.object_id)) + end + + @spec transfer_object_hashtags(Map.t()) :: {:noop | :ok | :error, integer()} + defp transfer_object_hashtags(object) do + embedded_tags = if Map.has_key?(object, :tag), do: object.tag, else: object.data["tag"] + hashtags = Object.object_data_hashtags(%{"tag" => embedded_tags}) + + if Enum.any?(hashtags) do + transfer_object_hashtags(object, hashtags) + else + {:noop, object.id} + end + end + + defp transfer_object_hashtags(object, hashtags) do + Repo.transaction(fn -> + with {:ok, hashtag_records} <- Hashtag.get_or_create_by_names(hashtags) do + maps = Enum.map(hashtag_records, &%{hashtag_id: &1.id, object_id: object.id}) + base_error = "ERROR when inserting hashtags_objects for object with id #{object.id}" + + try do + with {rows_count, _} when is_integer(rows_count) <- + Repo.insert_all("hashtags_objects", maps, on_conflict: :nothing) do + object.id + else + e -> + Logger.error("#{base_error}: #{inspect(e)}") + Repo.rollback(object.id) + end + rescue + e -> + Logger.error("#{base_error}: #{inspect(e)}") + Repo.rollback(object.id) + end + else + e -> + error = "ERROR: could not create hashtags for object #{object.id}: #{inspect(e)}" + Logger.error(error) + Repo.rollback(object.id) + end + end) + end + + @impl BaseMigrator + def retry_failed do + data_migration_id = data_migration_id() + + failed_objects_query() + |> Repo.chunk_stream(100, :one) + |> Stream.each(fn object -> + with {res, _} when res != :error <- transfer_object_hashtags(object) do + _ = + Repo.query( + "DELETE FROM data_migration_failed_ids " <> + "WHERE data_migration_id = $1 AND record_id = $2", + [data_migration_id, object.id] + ) + end + end) + |> Stream.run() + + put_stat(:failed_count, failures_count()) + persist_state() + + force_continue() + end + + defp failed_objects_query do + from(o in Object) + |> join(:inner, [o], dmf in fragment("SELECT * FROM data_migration_failed_ids"), + on: dmf.record_id == o.id + ) + |> where([_o, dmf], dmf.data_migration_id == ^data_migration_id()) + |> order_by([o], asc: o.id) + end + + @doc """ + Service func to delete `hashtags_objects` for legacy objects not associated with Create activity. + Also deletes unreferenced `hashtags` records (might occur after deletion of `hashtags_objects`). + """ + def delete_non_create_activities_hashtags do + hashtags_objects_cleanup_query = """ + DELETE FROM hashtags_objects WHERE object_id IN + (SELECT DISTINCT objects.id FROM objects + JOIN hashtags_objects ON hashtags_objects.object_id = objects.id LEFT JOIN activities + ON COALESCE(activities.data->'object'->>'id', activities.data->>'object') = + (objects.data->>'id') + AND activities.data->>'type' = 'Create' + WHERE activities.id IS NULL); + """ + + hashtags_cleanup_query = """ + DELETE FROM hashtags WHERE id IN + (SELECT hashtags.id FROM hashtags + LEFT OUTER JOIN hashtags_objects + ON hashtags_objects.hashtag_id = hashtags.id + WHERE hashtags_objects.hashtag_id IS NULL); + """ + + {:ok, %{num_rows: hashtags_objects_count}} = + Repo.query(hashtags_objects_cleanup_query, [], timeout: :infinity) + + {:ok, %{num_rows: hashtags_count}} = + Repo.query(hashtags_cleanup_query, [], timeout: :infinity) + + {:ok, hashtags_objects_count, hashtags_count} + end +end diff --git a/lib/pleroma/migrators/support/base_migrator.ex b/lib/pleroma/migrators/support/base_migrator.ex new file mode 100644 index 000000000..1f8a5402b --- /dev/null +++ b/lib/pleroma/migrators/support/base_migrator.ex @@ -0,0 +1,210 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Migrators.Support.BaseMigrator do + @moduledoc """ + Base background migrator functionality. + """ + + @callback perform() :: any() + @callback retry_failed() :: any() + @callback feature_config_path() :: list(atom()) + @callback query() :: Ecto.Query.t() + @callback fault_rate_allowance() :: integer() | float() + + defmacro __using__(_opts) do + quote do + use GenServer + + require Logger + + import Ecto.Query + + alias __MODULE__.State + alias Pleroma.Config + alias Pleroma.Repo + + @behaviour Pleroma.Migrators.Support.BaseMigrator + + defdelegate data_migration(), to: State + defdelegate data_migration_id(), to: State + defdelegate state(), to: State + defdelegate persist_state(), to: State, as: :persist_to_db + defdelegate get_stat(key, value \\ nil), to: State, as: :get_data_key + defdelegate put_stat(key, value), to: State, as: :put_data_key + defdelegate increment_stat(key, increment), to: State, as: :increment_data_key + + @reg_name {:global, __MODULE__} + + def whereis, do: GenServer.whereis(@reg_name) + + def start_link(_) do + case whereis() do + nil -> + GenServer.start_link(__MODULE__, nil, name: @reg_name) + + pid -> + {:ok, pid} + end + end + + @impl true + def init(_) do + {:ok, nil, {:continue, :init_state}} + end + + @impl true + def handle_continue(:init_state, _state) do + {:ok, _} = State.start_link(nil) + + data_migration = data_migration() + manual_migrations = Config.get([:instance, :manual_data_migrations], []) + + cond do + Config.get(:env) == :test -> + update_status(:noop) + + is_nil(data_migration) -> + message = "Data migration does not exist." + update_status(:failed, message) + Logger.error("#{__MODULE__}: #{message}") + + data_migration.state == :manual or data_migration.name in manual_migrations -> + message = "Data migration is in manual execution or manual fix mode." + update_status(:manual, message) + Logger.warn("#{__MODULE__}: #{message}") + + data_migration.state == :complete -> + on_complete(data_migration) + + true -> + send(self(), :perform) + end + + {:noreply, nil} + end + + @impl true + def handle_info(:perform, state) do + State.reinit() + + update_status(:running) + put_stat(:iteration_processed_count, 0) + put_stat(:started_at, NaiveDateTime.utc_now()) + + perform() + + fault_rate = fault_rate() + put_stat(:fault_rate, fault_rate) + fault_rate_allowance = fault_rate_allowance() + + cond do + fault_rate == 0 -> + set_complete() + + is_float(fault_rate) and fault_rate <= fault_rate_allowance -> + message = """ + Done with fault rate of #{fault_rate} which doesn't exceed #{fault_rate_allowance}. + Putting data migration to manual fix mode. Try running `#{__MODULE__}.retry_failed/0`. + """ + + Logger.warn("#{__MODULE__}: #{message}") + update_status(:manual, message) + on_complete(data_migration()) + + true -> + message = "Too many failures. Try running `#{__MODULE__}.retry_failed/0`." + Logger.error("#{__MODULE__}: #{message}") + update_status(:failed, message) + end + + persist_state() + {:noreply, state} + end + + defp on_complete(data_migration) do + if data_migration.feature_lock || feature_state() == :disabled do + Logger.warn( + "#{__MODULE__}: migration complete but feature is locked; consider enabling." + ) + + :noop + else + Config.put(feature_config_path(), :enabled) + :ok + end + end + + @doc "Approximate count for current iteration (including processed records count)" + def count(force \\ false, timeout \\ :infinity) do + stored_count = get_stat(:count) + + if stored_count && !force do + stored_count + else + processed_count = get_stat(:processed_count, 0) + max_processed_id = get_stat(:max_processed_id, 0) + query = where(query(), [entity], entity.id > ^max_processed_id) + + count = Repo.aggregate(query, :count, :id, timeout: timeout) + processed_count + put_stat(:count, count) + persist_state() + + count + end + end + + def failures_count do + with {:ok, %{rows: [[count]]}} <- + Repo.query( + "SELECT COUNT(record_id) FROM data_migration_failed_ids WHERE data_migration_id = $1;", + [data_migration_id()] + ) do + count + end + end + + def feature_state, do: Config.get(feature_config_path()) + + def force_continue do + send(whereis(), :perform) + end + + def force_restart do + :ok = State.reset() + force_continue() + end + + def set_complete do + update_status(:complete) + persist_state() + on_complete(data_migration()) + end + + defp update_status(status, message \\ nil) do + put_stat(:state, status) + put_stat(:message, message) + end + + defp fault_rate do + with failures_count when is_integer(failures_count) <- failures_count() do + failures_count / Enum.max([get_stat(:affected_count, 0), 1]) + else + _ -> :error + end + end + + defp records_per_second do + get_stat(:iteration_processed_count, 0) / Enum.max([running_time(), 1]) + end + + defp running_time do + NaiveDateTime.diff( + NaiveDateTime.utc_now(), + get_stat(:started_at, NaiveDateTime.utc_now()) + ) + end + end + end +end diff --git a/lib/pleroma/migrators/support/base_migrator_state.ex b/lib/pleroma/migrators/support/base_migrator_state.ex new file mode 100644 index 000000000..b698587f2 --- /dev/null +++ b/lib/pleroma/migrators/support/base_migrator_state.ex @@ -0,0 +1,117 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Migrators.Support.BaseMigratorState do + @moduledoc """ + Base background migrator state functionality. + """ + + @callback data_migration() :: Pleroma.DataMigration.t() + + defmacro __using__(_opts) do + quote do + use Agent + + alias Pleroma.DataMigration + + @behaviour Pleroma.Migrators.Support.BaseMigratorState + @reg_name {:global, __MODULE__} + + def start_link(_) do + Agent.start_link(fn -> load_state_from_db() end, name: @reg_name) + end + + def data_migration, do: raise("data_migration/0 is not implemented") + defoverridable data_migration: 0 + + defp load_state_from_db do + data_migration = data_migration() + + data = + if data_migration do + Map.new(data_migration.data, fn {k, v} -> {String.to_atom(k), v} end) + else + %{} + end + + %{ + data_migration_id: data_migration && data_migration.id, + data: data + } + end + + def persist_to_db do + %{data_migration_id: data_migration_id, data: data} = state() + + if data_migration_id do + DataMigration.update_one_by_id(data_migration_id, data: data) + else + {:error, :nil_data_migration_id} + end + end + + def reset do + %{data_migration_id: data_migration_id} = state() + + with false <- is_nil(data_migration_id), + :ok <- + DataMigration.update_one_by_id(data_migration_id, + state: :pending, + data: %{} + ) do + reinit() + else + true -> {:error, :nil_data_migration_id} + e -> e + end + end + + def reinit do + Agent.update(@reg_name, fn _state -> load_state_from_db() end) + end + + def state do + Agent.get(@reg_name, & &1) + end + + def get_data_key(key, default \\ nil) do + get_in(state(), [:data, key]) || default + end + + def put_data_key(key, value) do + _ = persist_non_data_change(key, value) + + Agent.update(@reg_name, fn state -> + put_in(state, [:data, key], value) + end) + end + + def increment_data_key(key, increment \\ 1) do + Agent.update(@reg_name, fn state -> + initial_value = get_in(state, [:data, key]) || 0 + updated_value = initial_value + increment + put_in(state, [:data, key], updated_value) + end) + end + + defp persist_non_data_change(:state, value) do + with true <- get_data_key(:state) != value, + true <- value in Pleroma.DataMigration.State.__valid_values__(), + %{data_migration_id: data_migration_id} when not is_nil(data_migration_id) <- + state() do + DataMigration.update_one_by_id(data_migration_id, state: value) + else + false -> :ok + _ -> {:error, :nil_data_migration_id} + end + end + + defp persist_non_data_change(_, _) do + nil + end + + def data_migration_id, do: Map.get(state(), :data_migration_id) + end + end +end diff --git a/lib/pleroma/object.ex b/lib/pleroma/object.ex index aaf123840..3ba749d1a 100644 --- a/lib/pleroma/object.ex +++ b/lib/pleroma/object.ex @@ -10,6 +10,7 @@ defmodule Pleroma.Object do alias Pleroma.Activity alias Pleroma.Config + alias Pleroma.Hashtag alias Pleroma.Object alias Pleroma.Object.Fetcher alias Pleroma.ObjectTombstone @@ -28,6 +29,8 @@ defmodule Pleroma.Object do schema "objects" do field(:data, :map) + many_to_many(:hashtags, Hashtag, join_through: "hashtags_objects", on_replace: :delete) + timestamps() end @@ -49,7 +52,8 @@ defmodule Pleroma.Object do end def create(data) do - Object.change(%Object{}, %{data: data}) + %Object{} + |> Object.change(%{data: data}) |> Repo.insert() end @@ -58,8 +62,41 @@ defmodule Pleroma.Object do |> cast(params, [:data]) |> validate_required([:data]) |> unique_constraint(:ap_id, name: :objects_unique_apid_index) + # Expecting `maybe_handle_hashtags_change/1` to run last: + |> maybe_handle_hashtags_change(struct) + end + + # Note: not checking activity type (assuming non-legacy objects are associated with Create act.) + defp maybe_handle_hashtags_change(changeset, struct) do + with %Ecto.Changeset{valid?: true} <- changeset, + data_hashtags_change = get_change(changeset, :data), + {_, true} <- {:changed, hashtags_changed?(struct, data_hashtags_change)}, + {:ok, hashtag_records} <- + data_hashtags_change + |> object_data_hashtags() + |> Hashtag.get_or_create_by_names() do + put_assoc(changeset, :hashtags, hashtag_records) + else + %{valid?: false} -> + changeset + + {:changed, false} -> + changeset + + {:error, _} -> + validate_change(changeset, :data, fn _, _ -> + [data: "error referencing hashtags"] + end) + end + end + + defp hashtags_changed?(%Object{} = struct, %{"tag" => _} = data) do + Enum.sort(embedded_hashtags(struct)) != + Enum.sort(object_data_hashtags(data)) end + defp hashtags_changed?(_, _), do: false + def get_by_id(nil), do: nil def get_by_id(id), do: Repo.get(Object, id) @@ -187,9 +224,13 @@ defmodule Pleroma.Object do def swap_object_with_tombstone(object) do tombstone = make_tombstone(object) - object - |> Object.change(%{data: tombstone}) - |> Repo.update() + with {:ok, object} <- + object + |> Object.change(%{data: tombstone}) + |> Repo.update() do + Hashtag.unlink(object) + {:ok, object} + end end def delete(%Object{data: %{"id" => id}} = object) do @@ -349,4 +390,39 @@ defmodule Pleroma.Object do def self_replies(object, opts \\ []), do: replies(object, Keyword.put(opts, :self_only, true)) + + def tags(%Object{data: %{"tag" => tags}}) when is_list(tags), do: tags + + def tags(_), do: [] + + def hashtags(%Object{} = object) do + # Note: always using embedded hashtags regardless whether they are migrated to hashtags table + # (embedded hashtags stay in sync anyways, and we avoid extra joins and preload hassle) + embedded_hashtags(object) + end + + def embedded_hashtags(%Object{data: data}) do + object_data_hashtags(data) + end + + def embedded_hashtags(_), do: [] + + def object_data_hashtags(%{"tag" => tags}) when is_list(tags) do + tags + |> Enum.filter(fn + %{"type" => "Hashtag"} = data -> Map.has_key?(data, "name") + plain_text when is_bitstring(plain_text) -> true + _ -> false + end) + |> Enum.map(fn + %{"name" => "#" <> hashtag} -> String.downcase(hashtag) + %{"name" => hashtag} -> String.downcase(hashtag) + hashtag when is_bitstring(hashtag) -> String.downcase(hashtag) + end) + |> Enum.uniq() + # Note: "" elements (plain text) might occur in `data.tag` for incoming objects + |> Enum.filter(&(&1 not in [nil, ""])) + end + + def object_data_hashtags(_), do: [] end diff --git a/lib/pleroma/pagination.ex b/lib/pleroma/pagination.ex index 0d24e1010..33e45a0eb 100644 --- a/lib/pleroma/pagination.ex +++ b/lib/pleroma/pagination.ex @@ -93,6 +93,7 @@ defmodule Pleroma.Pagination do max_id: :string, offset: :integer, limit: :integer, + skip_extra_order: :boolean, skip_order: :boolean } @@ -114,6 +115,8 @@ defmodule Pleroma.Pagination do defp restrict(query, :order, %{skip_order: true}, _), do: query + defp restrict(%{order_bys: [_ | _]} = query, :order, %{skip_extra_order: true}, _), do: query + defp restrict(query, :order, %{min_id: _}, table_binding) do order_by( query, diff --git a/lib/pleroma/repo.ex b/lib/pleroma/repo.ex index 4556352d0..b8ea06e33 100644 --- a/lib/pleroma/repo.ex +++ b/lib/pleroma/repo.ex @@ -8,6 +8,8 @@ defmodule Pleroma.Repo do adapter: Ecto.Adapters.Postgres, migration_timestamps: [type: :naive_datetime_usec] + use Ecto.Explain + import Ecto.Query require Logger @@ -63,8 +65,8 @@ defmodule Pleroma.Repo do iex> Pleroma.Repo.chunk_stream(Pleroma.Activity.Queries.by_actor(ap_id), 500, :batches) """ @spec chunk_stream(Ecto.Query.t(), integer(), atom()) :: Enumerable.t() - def chunk_stream(query, chunk_size, returns_as \\ :one) do - # We don't actually need start and end funcitons of resource streaming, + def chunk_stream(query, chunk_size, returns_as \\ :one, query_options \\ []) do + # We don't actually need start and end functions of resource streaming, # but it seems to be the only way to not fetch records one-by-one and # have individual records be the elements of the stream, instead of # lists of records @@ -76,7 +78,7 @@ defmodule Pleroma.Repo do |> order_by(asc: :id) |> where([r], r.id > ^last_id) |> limit(^chunk_size) - |> all() + |> all(query_options) |> case do [] -> {:halt, last_id} diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index 5b45e2ca1..efbf92c70 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -10,6 +10,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do alias Pleroma.Conversation alias Pleroma.Conversation.Participation alias Pleroma.Filter + alias Pleroma.Hashtag alias Pleroma.Maps alias Pleroma.Notification alias Pleroma.Object @@ -465,6 +466,23 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do |> Repo.one() end + defp fetch_paginated_optimized(query, opts, pagination) do + # Note: tag-filtering funcs may apply "ORDER BY objects.id DESC", + # and extra sorting on "activities.id DESC NULLS LAST" would worse the query plan + opts = Map.put(opts, :skip_extra_order, true) + + Pagination.fetch_paginated(query, opts, pagination) + end + + def fetch_activities(recipients, opts \\ %{}, pagination \\ :keyset) do + list_memberships = Pleroma.List.memberships(opts[:user]) + + fetch_activities_query(recipients ++ list_memberships, opts) + |> fetch_paginated_optimized(opts, pagination) + |> Enum.reverse() + |> maybe_update_cc(list_memberships, opts[:user]) + end + @spec fetch_public_or_unlisted_activities(map(), Pagination.type()) :: [Activity.t()] def fetch_public_or_unlisted_activities(opts \\ %{}, pagination \\ :keyset) do opts = Map.delete(opts, :user) @@ -472,7 +490,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do [Constants.as_public()] |> fetch_activities_query(opts) |> restrict_unlisted(opts) - |> Pagination.fetch_paginated(opts, pagination) + |> fetch_paginated_optimized(opts, pagination) end @spec fetch_public_activities(map(), Pagination.type()) :: [Activity.t()] @@ -693,51 +711,143 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do defp restrict_since(query, _), do: query - defp restrict_tag_reject(_query, %{tag_reject: _tag_reject, skip_preload: true}) do - raise "Can't use the child object without preloading!" + defp restrict_embedded_tag_all(_query, %{tag_all: _tag_all, skip_preload: true}) do + raise_on_missing_preload() + end + + defp restrict_embedded_tag_all(query, %{tag_all: [_ | _] = tag_all}) do + from( + [_activity, object] in query, + where: fragment("(?)->'tag' \\?& (?)", object.data, ^tag_all) + ) + end + + defp restrict_embedded_tag_all(query, %{tag_all: tag}) when is_binary(tag) do + restrict_embedded_tag_any(query, %{tag: tag}) + end + + defp restrict_embedded_tag_all(query, _), do: query + + defp restrict_embedded_tag_any(_query, %{tag: _tag, skip_preload: true}) do + raise_on_missing_preload() + end + + defp restrict_embedded_tag_any(query, %{tag: [_ | _] = tag_any}) do + from( + [_activity, object] in query, + where: fragment("(?)->'tag' \\?| (?)", object.data, ^tag_any) + ) end - defp restrict_tag_reject(query, %{tag_reject: [_ | _] = tag_reject}) do + defp restrict_embedded_tag_any(query, %{tag: tag}) when is_binary(tag) do + restrict_embedded_tag_any(query, %{tag: [tag]}) + end + + defp restrict_embedded_tag_any(query, _), do: query + + defp restrict_embedded_tag_reject_any(_query, %{tag_reject: _tag_reject, skip_preload: true}) do + raise_on_missing_preload() + end + + defp restrict_embedded_tag_reject_any(query, %{tag_reject: [_ | _] = tag_reject}) do from( [_activity, object] in query, where: fragment("not (?)->'tag' \\?| (?)", object.data, ^tag_reject) ) end - defp restrict_tag_reject(query, _), do: query + defp restrict_embedded_tag_reject_any(query, %{tag_reject: tag_reject}) + when is_binary(tag_reject) do + restrict_embedded_tag_reject_any(query, %{tag_reject: [tag_reject]}) + end + + defp restrict_embedded_tag_reject_any(query, _), do: query - defp restrict_tag_all(_query, %{tag_all: _tag_all, skip_preload: true}) do - raise "Can't use the child object without preloading!" + defp object_ids_query_for_tags(tags) do + from(hto in "hashtags_objects") + |> join(:inner, [hto], ht in Pleroma.Hashtag, on: hto.hashtag_id == ht.id) + |> where([hto, ht], ht.name in ^tags) + |> select([hto], hto.object_id) + |> distinct([hto], true) end - defp restrict_tag_all(query, %{tag_all: [_ | _] = tag_all}) do + defp restrict_hashtag_all(_query, %{tag_all: _tag, skip_preload: true}) do + raise_on_missing_preload() + end + + defp restrict_hashtag_all(query, %{tag_all: [single_tag]}) do + restrict_hashtag_any(query, %{tag: single_tag}) + end + + defp restrict_hashtag_all(query, %{tag_all: [_ | _] = tags}) do from( [_activity, object] in query, - where: fragment("(?)->'tag' \\?& (?)", object.data, ^tag_all) + where: + fragment( + """ + (SELECT array_agg(hashtags.name) FROM hashtags JOIN hashtags_objects + ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?) + AND hashtags_objects.object_id = ?) @> ? + """, + ^tags, + object.id, + ^tags + ) ) end - defp restrict_tag_all(query, _), do: query + defp restrict_hashtag_all(query, %{tag_all: tag}) when is_binary(tag) do + restrict_hashtag_all(query, %{tag_all: [tag]}) + end - defp restrict_tag(_query, %{tag: _tag, skip_preload: true}) do - raise "Can't use the child object without preloading!" + defp restrict_hashtag_all(query, _), do: query + + defp restrict_hashtag_any(_query, %{tag: _tag, skip_preload: true}) do + raise_on_missing_preload() end - defp restrict_tag(query, %{tag: tag}) when is_list(tag) do + defp restrict_hashtag_any(query, %{tag: [_ | _] = tags}) do + hashtag_ids = + from(ht in Hashtag, where: ht.name in ^tags, select: ht.id) + |> Repo.all() + + # Note: NO extra ordering should be done on "activities.id desc nulls last" for optimal plan from( [_activity, object] in query, - where: fragment("(?)->'tag' \\?| (?)", object.data, ^tag) + join: hto in "hashtags_objects", + on: hto.object_id == object.id, + where: hto.hashtag_id in ^hashtag_ids, + distinct: [desc: object.id], + order_by: [desc: object.id] ) end - defp restrict_tag(query, %{tag: tag}) when is_binary(tag) do + defp restrict_hashtag_any(query, %{tag: tag}) when is_binary(tag) do + restrict_hashtag_any(query, %{tag: [tag]}) + end + + defp restrict_hashtag_any(query, _), do: query + + defp restrict_hashtag_reject_any(_query, %{tag_reject: _tag_reject, skip_preload: true}) do + raise_on_missing_preload() + end + + defp restrict_hashtag_reject_any(query, %{tag_reject: [_ | _] = tags_reject}) do from( [_activity, object] in query, - where: fragment("(?)->'tag' \\? (?)", object.data, ^tag) + where: object.id not in subquery(object_ids_query_for_tags(tags_reject)) ) end - defp restrict_tag(query, _), do: query + defp restrict_hashtag_reject_any(query, %{tag_reject: tag_reject}) when is_binary(tag_reject) do + restrict_hashtag_reject_any(query, %{tag_reject: [tag_reject]}) + end + + defp restrict_hashtag_reject_any(query, _), do: query + + defp raise_on_missing_preload do + raise "Can't use the child object without preloading!" + end defp restrict_recipients(query, [], _user), do: query @@ -1098,6 +1208,26 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do defp maybe_order(query, _), do: query + defp normalize_fetch_activities_query_opts(opts) do + Enum.reduce([:tag, :tag_all, :tag_reject], opts, fn key, opts -> + case opts[key] do + value when is_bitstring(value) -> + Map.put(opts, key, Hashtag.normalize_name(value)) + + value when is_list(value) -> + normalized_value = + value + |> Enum.map(&Hashtag.normalize_name/1) + |> Enum.uniq() + + Map.put(opts, key, normalized_value) + + _ -> + opts + end + end) + end + defp fetch_activities_query_ap_ids_ops(opts) do source_user = opts[:muting_user] ap_id_relationships = if source_user, do: [:mute, :reblog_mute], else: [] @@ -1121,6 +1251,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do end def fetch_activities_query(recipients, opts \\ %{}) do + opts = normalize_fetch_activities_query_opts(opts) + {restrict_blocked_opts, restrict_muted_opts, restrict_muted_reblogs_opts} = fetch_activities_query_ap_ids_ops(opts) @@ -1128,50 +1260,51 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do skip_thread_containment: Config.get([:instance, :skip_thread_containment]) } - Activity - |> maybe_preload_objects(opts) - |> maybe_preload_bookmarks(opts) - |> maybe_preload_report_notes(opts) - |> maybe_set_thread_muted_field(opts) - |> maybe_order(opts) - |> restrict_recipients(recipients, opts[:user]) - |> restrict_replies(opts) - |> restrict_tag(opts) - |> restrict_tag_reject(opts) - |> restrict_tag_all(opts) - |> restrict_since(opts) - |> restrict_local(opts) - |> restrict_remote(opts) - |> restrict_actor(opts) - |> restrict_type(opts) - |> restrict_state(opts) - |> restrict_favorited_by(opts) - |> restrict_blocked(restrict_blocked_opts) - |> restrict_muted(restrict_muted_opts) - |> restrict_filtered(opts) - |> restrict_media(opts) - |> restrict_visibility(opts) - |> restrict_thread_visibility(opts, config) - |> restrict_reblogs(opts) - |> restrict_pinned(opts) - |> restrict_muted_reblogs(restrict_muted_reblogs_opts) - |> restrict_instance(opts) - |> restrict_announce_object_actor(opts) - |> restrict_filtered(opts) - |> Activity.restrict_deactivated_users() - |> exclude_poll_votes(opts) - |> exclude_chat_messages(opts) - |> exclude_invisible_actors(opts) - |> exclude_visibility(opts) - end - - def fetch_activities(recipients, opts \\ %{}, pagination \\ :keyset) do - list_memberships = Pleroma.List.memberships(opts[:user]) - - fetch_activities_query(recipients ++ list_memberships, opts) - |> Pagination.fetch_paginated(opts, pagination) - |> Enum.reverse() - |> maybe_update_cc(list_memberships, opts[:user]) + query = + Activity + |> maybe_preload_objects(opts) + |> maybe_preload_bookmarks(opts) + |> maybe_preload_report_notes(opts) + |> maybe_set_thread_muted_field(opts) + |> maybe_order(opts) + |> restrict_recipients(recipients, opts[:user]) + |> restrict_replies(opts) + |> restrict_since(opts) + |> restrict_local(opts) + |> restrict_remote(opts) + |> restrict_actor(opts) + |> restrict_type(opts) + |> restrict_state(opts) + |> restrict_favorited_by(opts) + |> restrict_blocked(restrict_blocked_opts) + |> restrict_muted(restrict_muted_opts) + |> restrict_filtered(opts) + |> restrict_media(opts) + |> restrict_visibility(opts) + |> restrict_thread_visibility(opts, config) + |> restrict_reblogs(opts) + |> restrict_pinned(opts) + |> restrict_muted_reblogs(restrict_muted_reblogs_opts) + |> restrict_instance(opts) + |> restrict_announce_object_actor(opts) + |> restrict_filtered(opts) + |> Activity.restrict_deactivated_users() + |> exclude_poll_votes(opts) + |> exclude_chat_messages(opts) + |> exclude_invisible_actors(opts) + |> exclude_visibility(opts) + + if Config.feature_enabled?(:improved_hashtag_timeline) do + query + |> restrict_hashtag_any(opts) + |> restrict_hashtag_all(opts) + |> restrict_hashtag_reject_any(opts) + else + query + |> restrict_embedded_tag_any(opts) + |> restrict_embedded_tag_all(opts) + |> restrict_embedded_tag_reject_any(opts) + end end @doc """ @@ -1250,21 +1383,17 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do defp get_actor_url(_url), do: nil - defp object_to_user_data(data) do - avatar = - data["icon"]["url"] && - %{ - "type" => "Image", - "url" => [%{"href" => data["icon"]["url"]}] - } + defp normalize_image(%{"url" => url}) do + %{ + "type" => "Image", + "url" => [%{"href" => url}] + } + end - banner = - data["image"]["url"] && - %{ - "type" => "Image", - "url" => [%{"href" => data["image"]["url"]}] - } + defp normalize_image(urls) when is_list(urls), do: urls |> List.first() |> normalize_image() + defp normalize_image(_), do: nil + defp object_to_user_data(data) do fields = data |> Map.get("attachment", []) @@ -1308,13 +1437,13 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do ap_id: data["id"], uri: get_actor_url(data["url"]), ap_enabled: true, - banner: banner, + banner: normalize_image(data["image"]), fields: fields, emoji: emojis, is_locked: is_locked, is_discoverable: is_discoverable, invisible: invisible, - avatar: avatar, + avatar: normalize_image(data["icon"]), name: data["name"], follower_address: data["followers"], following_address: data["following"], diff --git a/lib/pleroma/web/activity_pub/mrf.ex b/lib/pleroma/web/activity_pub/mrf.ex index ef5a09a93..f2fec3ff6 100644 --- a/lib/pleroma/web/activity_pub/mrf.ex +++ b/lib/pleroma/web/activity_pub/mrf.ex @@ -92,7 +92,9 @@ defmodule Pleroma.Web.ActivityPub.MRF do end def get_policies do - Pleroma.Config.get([:mrf, :policies], []) |> get_policies() + Pleroma.Config.get([:mrf, :policies], []) + |> get_policies() + |> Enum.concat([Pleroma.Web.ActivityPub.MRF.HashtagPolicy]) end defp get_policies(policy) when is_atom(policy), do: [policy] diff --git a/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex new file mode 100644 index 000000000..7307c9c14 --- /dev/null +++ b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex @@ -0,0 +1,59 @@ +defmodule Pleroma.Web.ActivityPub.MRF.FollowBotPolicy do + @behaviour Pleroma.Web.ActivityPub.MRF + alias Pleroma.Config + alias Pleroma.User + alias Pleroma.Web.CommonAPI + + require Logger + + @impl true + def filter(message) do + with follower_nickname <- Config.get([:mrf_follow_bot, :follower_nickname]), + %User{actor_type: "Service"} = follower <- + User.get_cached_by_nickname(follower_nickname), + %{"type" => "Create", "object" => %{"type" => "Note"}} <- message do + try_follow(follower, message) + else + nil -> + Logger.warn( + "#{__MODULE__} skipped because of missing `:mrf_follow_bot, :follower_nickname` configuration, the :follower_nickname + account does not exist, or the account is not correctly configured as a bot." + ) + + {:ok, message} + + _ -> + {:ok, message} + end + end + + defp try_follow(follower, message) do + to = Map.get(message, "to", []) + cc = Map.get(message, "cc", []) + actor = [message["actor"]] + + Enum.concat([to, cc, actor]) + |> List.flatten() + |> Enum.uniq() + |> User.get_all_by_ap_id() + |> Enum.each(fn user -> + with false <- user.local, + false <- User.following?(follower, user), + false <- User.locked?(user), + false <- (user.bio || "") |> String.downcase() |> String.contains?("nobot") do + Logger.debug( + "#{__MODULE__}: Follow request from #{follower.nickname} to #{user.nickname}" + ) + + CommonAPI.follow(follower, user) + end + end) + + {:ok, message} + end + + @impl true + def describe do + {:ok, %{}} + end +end diff --git a/lib/pleroma/web/activity_pub/mrf/hashtag_policy.ex b/lib/pleroma/web/activity_pub/mrf/hashtag_policy.ex new file mode 100644 index 000000000..def0c437c --- /dev/null +++ b/lib/pleroma/web/activity_pub/mrf/hashtag_policy.ex @@ -0,0 +1,116 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.MRF.HashtagPolicy do + require Pleroma.Constants + + alias Pleroma.Config + alias Pleroma.Object + + @moduledoc """ + Reject, TWKN-remove or Set-Sensitive messsages with specific hashtags (without the leading #) + + Note: This MRF Policy is always enabled, if you want to disable it you have to set empty lists. + """ + + @behaviour Pleroma.Web.ActivityPub.MRF + + defp check_reject(message, hashtags) do + if Enum.any?(Config.get([:mrf_hashtag, :reject]), fn match -> match in hashtags end) do + {:reject, "[HashtagPolicy] Matches with rejected keyword"} + else + {:ok, message} + end + end + + defp check_ftl_removal(%{"to" => to} = message, hashtags) do + if Pleroma.Constants.as_public() in to and + Enum.any?(Config.get([:mrf_hashtag, :federated_timeline_removal]), fn match -> + match in hashtags + end) do + to = List.delete(to, Pleroma.Constants.as_public()) + cc = [Pleroma.Constants.as_public() | message["cc"] || []] + + message = + message + |> Map.put("to", to) + |> Map.put("cc", cc) + |> Kernel.put_in(["object", "to"], to) + |> Kernel.put_in(["object", "cc"], cc) + + {:ok, message} + else + {:ok, message} + end + end + + defp check_ftl_removal(message, _hashtags), do: {:ok, message} + + defp check_sensitive(message, hashtags) do + if Enum.any?(Config.get([:mrf_hashtag, :sensitive]), fn match -> match in hashtags end) do + {:ok, Kernel.put_in(message, ["object", "sensitive"], true)} + else + {:ok, message} + end + end + + @impl true + def filter(%{"type" => "Create", "object" => object} = message) do + hashtags = Object.hashtags(%Object{data: object}) + + if hashtags != [] do + with {:ok, message} <- check_reject(message, hashtags), + {:ok, message} <- check_ftl_removal(message, hashtags), + {:ok, message} <- check_sensitive(message, hashtags) do + {:ok, message} + end + else + {:ok, message} + end + end + + @impl true + def filter(message), do: {:ok, message} + + @impl true + def describe do + mrf_hashtag = + Config.get(:mrf_hashtag) + |> Enum.into(%{}) + + {:ok, %{mrf_hashtag: mrf_hashtag}} + end + + @impl true + def config_description do + %{ + key: :mrf_hashtag, + related_policy: "Pleroma.Web.ActivityPub.MRF.HashtagPolicy", + label: "MRF Hashtag", + description: @moduledoc, + children: [ + %{ + key: :reject, + type: {:list, :string}, + description: "A list of hashtags which result in message being rejected.", + suggestions: ["foo"] + }, + %{ + key: :federated_timeline_removal, + type: {:list, :string}, + description: + "A list of hashtags which result in message being removed from federated timelines (a.k.a unlisted).", + suggestions: ["foo"] + }, + %{ + key: :sensitive, + type: {:list, :string}, + description: + "A list of hashtags which result in message being set as sensitive (a.k.a NSFW/R-18)", + suggestions: ["nsfw", "r18"] + } + ] + } + end +end diff --git a/lib/pleroma/web/activity_pub/mrf/simple_policy.ex b/lib/pleroma/web/activity_pub/mrf/simple_policy.ex index bb3838d2c..62024c58c 100644 --- a/lib/pleroma/web/activity_pub/mrf/simple_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/simple_policy.ex @@ -64,20 +64,16 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicy do %{host: actor_host} = _actor_info, %{ "type" => "Create", - "object" => child_object + "object" => %{} = _child_object } = object - ) - when is_map(child_object) do + ) do media_nsfw = Config.get([:mrf_simple, :media_nsfw]) |> MRF.subdomains_regex() object = if MRF.subdomain_match?(media_nsfw, actor_host) do - tags = (child_object["tag"] || []) ++ ["nsfw"] - child_object = Map.put(child_object, "tag", tags) - child_object = Map.put(child_object, "sensitive", true) - Map.put(object, "object", child_object) + Kernel.put_in(object, ["object", "sensitive"], true) else object end diff --git a/lib/pleroma/web/activity_pub/mrf/tag_policy.ex b/lib/pleroma/web/activity_pub/mrf/tag_policy.ex index 5739cee63..528093ac0 100644 --- a/lib/pleroma/web/activity_pub/mrf/tag_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/tag_policy.ex @@ -28,20 +28,11 @@ defmodule Pleroma.Web.ActivityPub.MRF.TagPolicy do "mrf_tag:media-force-nsfw", %{ "type" => "Create", - "object" => %{"attachment" => child_attachment} = object + "object" => %{"attachment" => child_attachment} } = message ) when length(child_attachment) > 0 do - tags = (object["tag"] || []) ++ ["nsfw"] - - object = - object - |> Map.put("tag", tags) - |> Map.put("sensitive", true) - - message = Map.put(message, "object", object) - - {:ok, message} + {:ok, Kernel.put_in(message, ["object", "sensitive"], true)} end defp process_tag( diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index 4d9a5617e..8c7d6a747 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -32,18 +32,17 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do """ def fix_object(object, options \\ []) do object - |> strip_internal_fields - |> fix_actor - |> fix_url - |> fix_attachments - |> fix_context + |> strip_internal_fields() + |> fix_actor() + |> fix_url() + |> fix_attachments() + |> fix_context() |> fix_in_reply_to(options) - |> fix_emoji - |> fix_tag - |> set_sensitive - |> fix_content_map - |> fix_addressing - |> fix_summary + |> fix_emoji() + |> fix_tag() + |> fix_content_map() + |> fix_addressing() + |> fix_summary() |> fix_type(options) end @@ -315,10 +314,9 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do tags = tag |> Enum.filter(fn data -> data["type"] == "Hashtag" and data["name"] end) - |> Enum.map(fn %{"name" => name} -> - name - |> String.slice(1..-1) - |> String.downcase() + |> Enum.map(fn + %{"name" => "#" <> hashtag} -> String.downcase(hashtag) + %{"name" => hashtag} -> String.downcase(hashtag) end) Map.put(object, "tag", tag ++ tags) @@ -742,7 +740,6 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do # Prepares the object of an outgoing create activity. def prepare_object(object) do object - |> set_sensitive |> add_hashtags |> add_mention_tags |> add_emoji_tags @@ -933,15 +930,6 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do Map.put(object, "conversation", object["context"]) end - def set_sensitive(%{"sensitive" => _} = object) do - object - end - - def set_sensitive(object) do - tags = object["tag"] || [] - Map.put(object, "sensitive", "nsfw" in tags) - end - def set_type(%{"type" => "Answer"} = object) do Map.put(object, "type", "Note") end diff --git a/lib/pleroma/web/api_spec/cast_and_validate.ex b/lib/pleroma/web/api_spec/cast_and_validate.ex index a3da856ff..d23a7dcb6 100644 --- a/lib/pleroma/web/api_spec/cast_and_validate.ex +++ b/lib/pleroma/web/api_spec/cast_and_validate.ex @@ -15,6 +15,7 @@ defmodule Pleroma.Web.ApiSpec.CastAndValidate do @behaviour Plug + alias OpenApiSpex.Plug.PutApiSpec alias Plug.Conn @impl Plug @@ -25,12 +26,10 @@ defmodule Pleroma.Web.ApiSpec.CastAndValidate do end @impl Plug - def call(%{private: %{open_api_spex: private_data}} = conn, %{ - operation_id: operation_id, - render_error: render_error - }) do - spec = private_data.spec - operation = private_data.operation_lookup[operation_id] + + def call(conn, %{operation_id: operation_id, render_error: render_error}) do + {spec, operation_lookup} = PutApiSpec.get_spec_and_operation_lookup(conn) + operation = operation_lookup[operation_id] content_type = case Conn.get_req_header(conn, "content-type") do @@ -43,8 +42,7 @@ defmodule Pleroma.Web.ApiSpec.CastAndValidate do "application/json" end - private_data = Map.put(private_data, :operation_id, operation_id) - conn = Conn.put_private(conn, :open_api_spex, private_data) + conn = Conn.put_private(conn, :operation_id, operation_id) case cast_and_validate(spec, operation, conn, content_type, strict?()) do {:ok, conn} -> @@ -64,25 +62,22 @@ defmodule Pleroma.Web.ApiSpec.CastAndValidate do private: %{ phoenix_controller: controller, phoenix_action: action, - open_api_spex: private_data + open_api_spex: %{spec_module: spec_module} } } = conn, opts ) do + {spec, operation_lookup} = PutApiSpec.get_spec_and_operation_lookup(conn) + operation = - case private_data.operation_lookup[{controller, action}] do + case operation_lookup[{controller, action}] do nil -> operation_id = controller.open_api_operation(action).operationId - operation = private_data.operation_lookup[operation_id] + operation = operation_lookup[operation_id] - operation_lookup = - private_data.operation_lookup - |> Map.put({controller, action}, operation) + operation_lookup = Map.put(operation_lookup, {controller, action}, operation) - OpenApiSpex.Plug.Cache.adapter().put( - private_data.spec_module, - {private_data.spec, operation_lookup} - ) + OpenApiSpex.Plug.Cache.adapter().put(spec_module, {spec, operation_lookup}) operation diff --git a/lib/pleroma/web/api_spec/operations/account_operation.ex b/lib/pleroma/web/api_spec/operations/account_operation.ex index 54e5ebc76..08d68893a 100644 --- a/lib/pleroma/web/api_spec/operations/account_operation.ex +++ b/lib/pleroma/web/api_spec/operations/account_operation.ex @@ -482,7 +482,7 @@ defmodule Pleroma.Web.ApiSpec.AccountOperation do access_token: %Schema{type: :string}, refresh_token: %Schema{type: :string}, scope: %Schema{type: :string}, - created_at: %Schema{type: :integer, format: :"date-time"}, + created_at: %Schema{type: :string, format: :"date-time"}, me: %Schema{type: :string}, expires_in: %Schema{type: :integer}, # diff --git a/lib/pleroma/web/api_spec/operations/status_operation.ex b/lib/pleroma/web/api_spec/operations/status_operation.ex index 40edc747d..4bdb8e281 100644 --- a/lib/pleroma/web/api_spec/operations/status_operation.ex +++ b/lib/pleroma/web/api_spec/operations/status_operation.ex @@ -59,7 +59,7 @@ defmodule Pleroma.Web.ApiSpec.StatusOperation do Operation.response( "Status. When `scheduled_at` is present, ScheduledStatus is returned instead", "application/json", - %Schema{oneOf: [Status, ScheduledStatus]} + %Schema{anyOf: [Status, ScheduledStatus]} ), 422 => Operation.response("Bad Request / MRF Rejection", "application/json", ApiError) } diff --git a/lib/pleroma/web/api_spec/schemas/boolean_like.ex b/lib/pleroma/web/api_spec/schemas/boolean_like.ex index eb001c5bb..778158f66 100644 --- a/lib/pleroma/web/api_spec/schemas/boolean_like.ex +++ b/lib/pleroma/web/api_spec/schemas/boolean_like.ex @@ -3,6 +3,7 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.ApiSpec.Schemas.BooleanLike do + alias OpenApiSpex.Cast alias OpenApiSpex.Schema require OpenApiSpex @@ -27,10 +28,13 @@ defmodule Pleroma.Web.ApiSpec.Schemas.BooleanLike do %Schema{type: :boolean}, %Schema{type: :string}, %Schema{type: :integer} - ] + ], + "x-validate": __MODULE__ }) - def after_cast(value, _schmea) do - {:ok, Pleroma.Web.ControllerHelper.truthy_param?(value)} + def cast(%Cast{value: value} = context) do + context + |> Map.put(:value, Pleroma.Web.ControllerHelper.truthy_param?(value)) + |> Cast.ok() end end diff --git a/lib/pleroma/web/common_api/activity_draft.ex b/lib/pleroma/web/common_api/activity_draft.ex index 73f1b0931..80a9fa7bb 100644 --- a/lib/pleroma/web/common_api/activity_draft.ex +++ b/lib/pleroma/web/common_api/activity_draft.ex @@ -5,6 +5,7 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do alias Pleroma.Activity alias Pleroma.Conversation.Participation + alias Pleroma.Object alias Pleroma.Web.CommonAPI alias Pleroma.Web.CommonAPI.Utils @@ -179,13 +180,39 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do end defp sensitive(draft) do - sensitive = draft.params[:sensitive] || Enum.member?(draft.tags, {"#nsfw", "nsfw"}) + sensitive = draft.params[:sensitive] %__MODULE__{draft | sensitive: sensitive} end defp object(draft) do emoji = Map.merge(Pleroma.Emoji.Formatter.get_emoji_map(draft.full_payload), draft.emoji) + # Sometimes people create posts with subject containing emoji, + # since subjects are usually copied this will result in a broken + # subject when someone replies from an instance that does not have + # the emoji or has it under different shortcode. This is an attempt + # to mitigate this by copying emoji from inReplyTo if they are present + # in the subject. + summary_emoji = + with %Activity{} <- draft.in_reply_to, + %Object{data: %{"tag" => [_ | _] = tag}} <- Object.normalize(draft.in_reply_to) do + Enum.reduce(tag, %{}, fn + %{"type" => "Emoji", "name" => name, "icon" => %{"url" => url}}, acc -> + if String.contains?(draft.summary, name) do + Map.put(acc, name, url) + else + acc + end + + _, acc -> + acc + end) + else + _ -> %{} + end + + emoji = Map.merge(emoji, summary_emoji) + object = Utils.make_note_data(draft) |> Map.put("emoji", emoji) diff --git a/lib/pleroma/web/common_api/utils.ex b/lib/pleroma/web/common_api/utils.ex index 9587dfa25..4e6a3feb0 100644 --- a/lib/pleroma/web/common_api/utils.ex +++ b/lib/pleroma/web/common_api/utils.ex @@ -217,7 +217,6 @@ defmodule Pleroma.Web.CommonAPI.Utils do draft.status |> format_input(content_type, options) |> maybe_add_attachments(draft.attachments, attachment_links) - |> maybe_add_nsfw_tag(draft.params) end defp get_content_type(content_type) do @@ -228,13 +227,6 @@ defmodule Pleroma.Web.CommonAPI.Utils do end end - defp maybe_add_nsfw_tag({text, mentions, tags}, %{"sensitive" => sensitive}) - when sensitive in [true, "True", "true", "1"] do - {text, mentions, [{"#nsfw", "nsfw"} | tags]} - end - - defp maybe_add_nsfw_tag(data, _), do: data - def make_context(_, %Participation{} = participation) do Repo.preload(participation, :conversation).conversation.ap_id end diff --git a/lib/pleroma/web/feed/feed_view.ex b/lib/pleroma/web/feed/feed_view.ex index df97d2f46..66940f311 100644 --- a/lib/pleroma/web/feed/feed_view.ex +++ b/lib/pleroma/web/feed/feed_view.ex @@ -32,6 +32,7 @@ defmodule Pleroma.Web.Feed.FeedView do %{ activity: activity, + object: object, data: Map.get(object, :data), actor: actor } diff --git a/lib/pleroma/web/mastodon_api/controllers/instance_controller.ex b/lib/pleroma/web/mastodon_api/controllers/instance_controller.ex index 267d0f03b..c7a5267d4 100644 --- a/lib/pleroma/web/mastodon_api/controllers/instance_controller.ex +++ b/lib/pleroma/web/mastodon_api/controllers/instance_controller.ex @@ -5,7 +5,7 @@ defmodule Pleroma.Web.MastodonAPI.InstanceController do use Pleroma.Web, :controller - plug(OpenApiSpex.Plug.CastAndValidate) + plug(Pleroma.Web.ApiSpec.CastAndValidate) plug( :skip_plug, diff --git a/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex b/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex index cef299aa4..c611958be 100644 --- a/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex +++ b/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex @@ -133,34 +133,25 @@ defmodule Pleroma.Web.MastodonAPI.TimelineController do end defp hashtag_fetching(params, user, local_only) do - tags = + # Note: not sanitizing tag options at this stage (may be mix-cased, have duplicates etc.) + tags_any = [params[:tag], params[:any]] |> List.flatten() - |> Enum.uniq() - |> Enum.reject(&is_nil/1) - |> Enum.map(&String.downcase/1) - - tag_all = - params - |> Map.get(:all, []) - |> Enum.map(&String.downcase/1) - - tag_reject = - params - |> Map.get(:none, []) - |> Enum.map(&String.downcase/1) - - _activities = - params - |> Map.put(:type, "Create") - |> Map.put(:local_only, local_only) - |> Map.put(:blocking_user, user) - |> Map.put(:muting_user, user) - |> Map.put(:user, user) - |> Map.put(:tag, tags) - |> Map.put(:tag_all, tag_all) - |> Map.put(:tag_reject, tag_reject) - |> ActivityPub.fetch_public_activities() + |> Enum.filter(& &1) + + tag_all = Map.get(params, :all, []) + tag_reject = Map.get(params, :none, []) + + params + |> Map.put(:type, "Create") + |> Map.put(:local_only, local_only) + |> Map.put(:blocking_user, user) + |> Map.put(:muting_user, user) + |> Map.put(:user, user) + |> Map.put(:tag, tags_any) + |> Map.put(:tag_all, tag_all) + |> Map.put(:tag_reject, tag_reject) + |> ActivityPub.fetch_public_activities() end # GET /api/v1/timelines/tag/:tag diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex index f3f54e03d..3753588f2 100644 --- a/lib/pleroma/web/mastodon_api/views/status_view.ex +++ b/lib/pleroma/web/mastodon_api/views/status_view.ex @@ -198,8 +198,10 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do like_count = object.data["like_count"] || 0 announcement_count = object.data["announcement_count"] || 0 - tags = object.data["tag"] || [] - sensitive = object.data["sensitive"] || Enum.member?(tags, "nsfw") + hashtags = Object.hashtags(object) + sensitive = object.data["sensitive"] || Enum.member?(hashtags, "nsfw") + + tags = Object.tags(object) tag_mentions = tags @@ -379,12 +381,15 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do page_url = page_url_data |> to_string - image_url = + image_url_data = if is_binary(rich_media["image"]) do - URI.merge(page_url_data, URI.parse(rich_media["image"])) - |> to_string + URI.parse(rich_media["image"]) + else + nil end + image_url = build_image_url(image_url_data, page_url_data) + %{ type: "link", provider_name: page_url_data.host, @@ -540,4 +545,23 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do do: %{name: name, website: url} defp build_application(_), do: nil + + # Workaround for Elixir issue #10771 + # Avoid applying URI.merge unless necessary + # TODO: revert to always attempting URI.merge(image_url_data, page_url_data) + # when Elixir 1.12 is the minimum supported version + @spec build_image_url(struct() | nil, struct()) :: String.t() | nil + defp build_image_url( + %URI{scheme: image_scheme, host: image_host} = image_url_data, + %URI{} = _page_url_data + ) + when not is_nil(image_scheme) and not is_nil(image_host) do + image_url_data |> to_string + end + + defp build_image_url(%URI{} = image_url_data, %URI{} = page_url_data) do + URI.merge(page_url_data, image_url_data) |> to_string + end + + defp build_image_url(_, _), do: nil end diff --git a/lib/pleroma/web/media_proxy.ex b/lib/pleroma/web/media_proxy.ex index 27f337138..d0d4bb4b3 100644 --- a/lib/pleroma/web/media_proxy.ex +++ b/lib/pleroma/web/media_proxy.ex @@ -121,6 +121,11 @@ defmodule Pleroma.Web.MediaProxy do end end + def decode_url(encoded) do + [_, "proxy", sig, base64 | _] = URI.parse(encoded).path |> String.split("/") + decode_url(sig, base64) + end + defp signed_url(url) do :crypto.hmac(:sha, Config.get([Web.Endpoint, :secret_key_base]), url) end diff --git a/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex b/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex index 315657e9c..fc5d16771 100644 --- a/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex +++ b/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex @@ -10,7 +10,7 @@ defmodule Pleroma.Web.PleromaAPI.BackupController do action_fallback(Pleroma.Web.MastodonAPI.FallbackController) plug(OAuthScopesPlug, %{scopes: ["read:accounts"]} when action in [:index, :create]) - plug(OpenApiSpex.Plug.CastAndValidate, render_error: Pleroma.Web.ApiSpec.RenderError) + plug(Pleroma.Web.ApiSpec.CastAndValidate) defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.PleromaBackupOperation diff --git a/lib/pleroma/web/pleroma_api/controllers/chat_controller.ex b/lib/pleroma/web/pleroma_api/controllers/chat_controller.ex index 4adc685fe..dcd54b1af 100644 --- a/lib/pleroma/web/pleroma_api/controllers/chat_controller.ex +++ b/lib/pleroma/web/pleroma_api/controllers/chat_controller.ex @@ -38,7 +38,7 @@ defmodule Pleroma.Web.PleromaAPI.ChatController do %{scopes: ["read:chats"]} when action in [:messages, :index, :index2, :show] ) - plug(OpenApiSpex.Plug.CastAndValidate, render_error: Pleroma.Web.ApiSpec.RenderError) + plug(Pleroma.Web.ApiSpec.CastAndValidate) defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.ChatOperation diff --git a/lib/pleroma/web/pleroma_api/controllers/user_import_controller.ex b/lib/pleroma/web/pleroma_api/controllers/user_import_controller.ex index 6d9a11fb6..078d470d9 100644 --- a/lib/pleroma/web/pleroma_api/controllers/user_import_controller.ex +++ b/lib/pleroma/web/pleroma_api/controllers/user_import_controller.ex @@ -15,7 +15,7 @@ defmodule Pleroma.Web.PleromaAPI.UserImportController do plug(OAuthScopesPlug, %{scopes: ["follow", "write:blocks"]} when action == :blocks) plug(OAuthScopesPlug, %{scopes: ["follow", "write:mutes"]} when action == :mutes) - plug(OpenApiSpex.Plug.CastAndValidate) + plug(Pleroma.Web.ApiSpec.CastAndValidate) defdelegate open_api_operation(action), to: ApiSpec.UserImportOperation def follow(%{body_params: %{list: %Plug.Upload{path: path}}} = conn, _) do diff --git a/lib/pleroma/web/templates/feed/feed/_activity.atom.eex b/lib/pleroma/web/templates/feed/feed/_activity.atom.eex index 3fd150c4e..6688830ba 100644 --- a/lib/pleroma/web/templates/feed/feed/_activity.atom.eex +++ b/lib/pleroma/web/templates/feed/feed/_activity.atom.eex @@ -22,7 +22,7 @@ <link type="text/html" href='<%= @data["external_url"] %>' rel="alternate"/> <% end %> - <%= for tag <- @data["tag"] || [] do %> + <%= for tag <- Pleroma.Object.hashtags(@object) do %> <category term="<%= tag %>"></category> <% end %> diff --git a/lib/pleroma/web/templates/feed/feed/_activity.rss.eex b/lib/pleroma/web/templates/feed/feed/_activity.rss.eex index 947bbb099..592b9dcdc 100644 --- a/lib/pleroma/web/templates/feed/feed/_activity.rss.eex +++ b/lib/pleroma/web/templates/feed/feed/_activity.rss.eex @@ -22,7 +22,7 @@ <link rel="ostatus:conversation"><%= activity_context(@activity) %></link> - <%= for tag <- @data["tag"] || [] do %> + <%= for tag <- Pleroma.Object.hashtags(@object) do %> <category term="<%= tag %>"></category> <% end %> diff --git a/lib/pleroma/web/templates/feed/feed/_tag_activity.atom.eex b/lib/pleroma/web/templates/feed/feed/_tag_activity.atom.eex index cf5874a91..c2de28fe4 100644 --- a/lib/pleroma/web/templates/feed/feed/_tag_activity.atom.eex +++ b/lib/pleroma/web/templates/feed/feed/_tag_activity.atom.eex @@ -41,7 +41,7 @@ <% end %> <% end %> - <%= for tag <- @data["tag"] || [] do %> + <%= for tag <- Pleroma.Object.hashtags(@object) do %> <category term="<%= tag %>"></category> <% end %> diff --git a/lib/pleroma/web/web_finger.ex b/lib/pleroma/web/web_finger.ex index 15002b29f..21b10e654 100644 --- a/lib/pleroma/web/web_finger.ex +++ b/lib/pleroma/web/web_finger.ex @@ -94,52 +94,56 @@ defmodule Pleroma.Web.WebFinger do |> XmlBuilder.to_doc() end - defp webfinger_from_xml(doc) do - subject = XML.string_from_xpath("//Subject", doc) - - subscribe_address = - ~s{//Link[@rel="http://ostatus.org/schema/1.0/subscribe"]/@template} - |> XML.string_from_xpath(doc) - - ap_id = - ~s{//Link[@rel="self" and @type="application/activity+json"]/@href} - |> XML.string_from_xpath(doc) - - data = %{ - "subject" => subject, - "subscribe_address" => subscribe_address, - "ap_id" => ap_id - } + defp webfinger_from_xml(body) do + with {:ok, doc} <- XML.parse_document(body) do + subject = XML.string_from_xpath("//Subject", doc) + + subscribe_address = + ~s{//Link[@rel="http://ostatus.org/schema/1.0/subscribe"]/@template} + |> XML.string_from_xpath(doc) + + ap_id = + ~s{//Link[@rel="self" and @type="application/activity+json"]/@href} + |> XML.string_from_xpath(doc) + + data = %{ + "subject" => subject, + "subscribe_address" => subscribe_address, + "ap_id" => ap_id + } - {:ok, data} + {:ok, data} + end end - defp webfinger_from_json(doc) do - data = - Enum.reduce(doc["links"], %{"subject" => doc["subject"]}, fn link, data -> - case {link["type"], link["rel"]} do - {"application/activity+json", "self"} -> - Map.put(data, "ap_id", link["href"]) + defp webfinger_from_json(body) do + with {:ok, doc} <- Jason.decode(body) do + data = + Enum.reduce(doc["links"], %{"subject" => doc["subject"]}, fn link, data -> + case {link["type"], link["rel"]} do + {"application/activity+json", "self"} -> + Map.put(data, "ap_id", link["href"]) - {"application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\"", "self"} -> - Map.put(data, "ap_id", link["href"]) + {"application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\"", "self"} -> + Map.put(data, "ap_id", link["href"]) - {nil, "http://ostatus.org/schema/1.0/subscribe"} -> - Map.put(data, "subscribe_address", link["template"]) + {nil, "http://ostatus.org/schema/1.0/subscribe"} -> + Map.put(data, "subscribe_address", link["template"]) - _ -> - Logger.debug("Unhandled type: #{inspect(link["type"])}") - data - end - end) + _ -> + Logger.debug("Unhandled type: #{inspect(link["type"])}") + data + end + end) - {:ok, data} + {:ok, data} + end end def get_template_from_xml(body) do xpath = "//Link[@rel='lrdd']/@template" - with doc when doc != :error <- XML.parse_document(body), + with {:ok, doc} <- XML.parse_document(body), template when template != nil <- XML.string_from_xpath(xpath, doc) do {:ok, template} end @@ -192,15 +196,23 @@ defmodule Pleroma.Web.WebFinger do address, [{"accept", "application/xrd+xml,application/jrd+json"}] ), - {:ok, %{status: status, body: body}} when status in 200..299 <- response do - doc = XML.parse_document(body) - - if doc != :error do - webfinger_from_xml(doc) - else - with {:ok, doc} <- Jason.decode(body) do - webfinger_from_json(doc) - end + {:ok, %{status: status, body: body, headers: headers}} when status in 200..299 <- + response do + case List.keyfind(headers, "content-type", 0) do + {_, content_type} -> + case Plug.Conn.Utils.media_type(content_type) do + {:ok, "application", subtype, _} when subtype in ~w(xrd+xml xml) -> + webfinger_from_xml(body) + + {:ok, "application", subtype, _} when subtype in ~w(jrd+json json) -> + webfinger_from_json(body) + + _ -> + {:error, {:content_type, content_type}} + end + + _ -> + {:error, {:content_type, nil}} end else e -> diff --git a/lib/pleroma/web/xml.ex b/lib/pleroma/web/xml.ex index 2b34611ac..0ab6e9d32 100644 --- a/lib/pleroma/web/xml.ex +++ b/lib/pleroma/web/xml.ex @@ -31,7 +31,7 @@ defmodule Pleroma.Web.XML do |> :binary.bin_to_list() |> :xmerl_scan.string(quiet: true) - doc + {:ok, doc} rescue _e -> Logger.debug("Couldn't parse XML: #{inspect(text)}") @@ -121,6 +121,7 @@ defmodule Pleroma.Mixfile do {:phoenix_pubsub, "~> 2.0"}, {:phoenix_ecto, "~> 4.0"}, {:ecto_enum, "~> 1.4"}, + {:ecto_explain, "~> 0.1.2"}, {:ecto_sql, "~> 3.4.4"}, {:postgrex, ">= 0.15.5"}, {:oban, "~> 2.3.4"}, @@ -195,9 +196,7 @@ defmodule Pleroma.Mixfile do {:majic, git: "https://git.pleroma.social/pleroma/elixir-libraries/majic.git", ref: "289cda1b6d0d70ccb2ba508a2b0bd24638db2880"}, - {:open_api_spex, - git: "https://git.pleroma.social/pleroma/elixir-libraries/open_api_spex.git", - ref: "f296ac0924ba3cf79c7a588c4c252889df4c2edd"}, + {:open_api_spex, "~> 3.10"}, ## dev & test {:ex_doc, "~> 0.22", only: :dev, runtime: false}, @@ -31,6 +31,7 @@ "earmark_parser": {:hex, :earmark_parser, "1.4.10", "6603d7a603b9c18d3d20db69921527f82ef09990885ed7525003c7fe7dc86c56", [:mix], [], "hexpm", "8e2d5370b732385db2c9b22215c3f59c84ac7dda7ed7e544d7c459496ae519c0"}, "ecto": {:hex, :ecto, "3.4.6", "08f7afad3257d6eb8613309af31037e16c36808dfda5a3cd0cb4e9738db030e4", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "6f13a9e2a62e75c2dcfc7207bfc65645ab387af8360db4c89fee8b5a4bf3f70b"}, "ecto_enum": {:hex, :ecto_enum, "1.4.0", "d14b00e04b974afc69c251632d1e49594d899067ee2b376277efd8233027aec8", [:mix], [{:ecto, ">= 3.0.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "> 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:mariaex, ">= 0.0.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.0.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "8fb55c087181c2b15eee406519dc22578fa60dd82c088be376d0010172764ee4"}, + "ecto_explain": {:hex, :ecto_explain, "0.1.2", "a9d504cbd4adc809911f796d5ef7ebb17a576a6d32286c3d464c015bd39d5541", [:mix], [], "hexpm", "1d0e7798ae30ecf4ce34e912e5354a0c1c832b7ebceba39298270b9a9f316330"}, "ecto_sql": {:hex, :ecto_sql, "3.4.5", "30161f81b167d561a9a2df4329c10ae05ff36eca7ccc84628f2c8b9fa1e43323", [:mix], [{:db_connection, "~> 2.2", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.4.3", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.3.0 or ~> 0.4.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.0", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "31990c6a3579b36a3c0841d34a94c275e727de8b84f58509da5f1b2032c98ac2"}, "eimp": {:hex, :eimp, "1.0.14", "fc297f0c7e2700457a95a60c7010a5f1dcb768a083b6d53f49cd94ab95a28f22", [:rebar3], [{:p1_utils, "1.0.18", [hex: :p1_utils, repo: "hexpm", optional: false]}], "hexpm", "501133f3112079b92d9e22da8b88bf4f0e13d4d67ae9c15c42c30bd25ceb83b6"}, "elixir_make": {:hex, :elixir_make, "0.6.2", "7dffacd77dec4c37b39af867cedaabb0b59f6a871f89722c25b28fcd4bd70530", [:mix], [], "hexpm", "03e49eadda22526a7e5279d53321d1cced6552f344ba4e03e619063de75348d9"}, @@ -51,7 +52,7 @@ "gen_smtp": {:hex, :gen_smtp, "0.15.0", "9f51960c17769b26833b50df0b96123605a8024738b62db747fece14eb2fbfcc", [:rebar3], [], "hexpm", "29bd14a88030980849c7ed2447b8db6d6c9278a28b11a44cafe41b791205440f"}, "gen_stage": {:hex, :gen_stage, "0.14.3", "d0c66f1c87faa301c1a85a809a3ee9097a4264b2edf7644bf5c123237ef732bf", [:mix], [], "hexpm"}, "gen_state_machine": {:hex, :gen_state_machine, "2.0.5", "9ac15ec6e66acac994cc442dcc2c6f9796cf380ec4b08267223014be1c728a95", [:mix], [], "hexpm"}, - "gettext": {:hex, :gettext, "0.18.0", "406d6b9e0e3278162c2ae1de0a60270452c553536772167e2d701f028116f870", [:mix], [], "hexpm", "c3f850be6367ebe1a08616c2158affe4a23231c70391050bf359d5f92f66a571"}, + "gettext": {:hex, :gettext, "0.18.2", "7df3ea191bb56c0309c00a783334b288d08a879f53a7014341284635850a6e55", [:mix], [], "hexpm", "f9f537b13d4fdd30f3039d33cb80144c3aa1f8d9698e47d7bcbcc8df93b1f5c5"}, "gun": {:git, "https://github.com/ninenines/gun.git", "921c47146b2d9567eac7e9a4d2ccc60fffd4f327", [ref: "921c47146b2d9567eac7e9a4d2ccc60fffd4f327"]}, "hackney": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/hackney.git", "7d7119f0651515d6d7669c78393fd90950a3ec6e", [ref: "7d7119f0651515d6d7669c78393fd90950a3ec6e"]}, "html_entities": {:hex, :html_entities, "0.5.1", "1c9715058b42c35a2ab65edc5b36d0ea66dd083767bef6e3edb57870ef556549", [:mix], [], "hexpm", "30efab070904eb897ff05cd52fa61c1025d7f8ef3a9ca250bc4e6513d16c32de"}, @@ -82,7 +83,7 @@ "nimble_pool": {:hex, :nimble_pool, "0.1.0", "ffa9d5be27eee2b00b0c634eb649aa27f97b39186fec3c493716c2a33e784ec6", [:mix], [], "hexpm", "343a1eaa620ddcf3430a83f39f2af499fe2370390d4f785cd475b4df5acaf3f9"}, "nodex": {:git, "https://git.pleroma.social/pleroma/nodex", "cb6730f943cfc6aad674c92161be23a8411f15d1", [ref: "cb6730f943cfc6aad674c92161be23a8411f15d1"]}, "oban": {:hex, :oban, "2.3.4", "ec7509b9af2524d55f529cb7aee93d36131ae0bf0f37706f65d2fe707f4d9fd8", [:mix], [{:ecto_sql, ">= 3.4.3", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.14", [hex: :postgrex, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c70ca0434758fd1805422ea4446af5e910ddc697c0c861549c8f0eb0cfbd2fdf"}, - "open_api_spex": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/open_api_spex.git", "f296ac0924ba3cf79c7a588c4c252889df4c2edd", [ref: "f296ac0924ba3cf79c7a588c4c252889df4c2edd"]}, + "open_api_spex": {:hex, :open_api_spex, "3.10.0", "94e9521ad525b3fcf6dc77da7c45f87fdac24756d4de588cb0816b413e7c1844", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 3.1", [hex: :poison, repo: "hexpm", optional: true]}], "hexpm", "2dbb2bde3d2b821f06936e8dfaf3284331186556291946d84eeba3750ac28765"}, "p1_utils": {:hex, :p1_utils, "1.0.18", "3fe224de5b2e190d730a3c5da9d6e8540c96484cf4b4692921d1e28f0c32b01c", [:rebar3], [], "hexpm", "1fc8773a71a15553b179c986b22fbeead19b28fe486c332d4929700ffeb71f88"}, "parse_trans": {:git, "https://github.com/uwiger/parse_trans.git", "76abb347c3c1d00fb0ccf9e4b43e22b3d2288484", [tag: "3.3.0"]}, "pbkdf2_elixir": {:hex, :pbkdf2_elixir, "1.2.1", "9cbe354b58121075bd20eb83076900a3832324b7dd171a6895fab57b6bb2752c", [:mix], [{:comeonin, "~> 5.3", [hex: :comeonin, repo: "hexpm", optional: false]}], "hexpm", "d3b40a4a4630f0b442f19eca891fcfeeee4c40871936fed2f68e1c4faa30481f"}, @@ -116,9 +117,9 @@ "syslog": {:hex, :syslog, "1.1.0", "6419a232bea84f07b56dc575225007ffe34d9fdc91abe6f1b2f254fd71d8efc2", [:rebar3], [], "hexpm", "4c6a41373c7e20587be33ef841d3de6f3beba08519809329ecc4d27b15b659e1"}, "telemetry": {:hex, :telemetry, "0.4.2", "2808c992455e08d6177322f14d3bdb6b625fbcfd233a73505870d8738a2f4599", [:rebar3], [], "hexpm", "2d1419bd9dda6a206d7b5852179511722e2b18812310d304620c7bd92a13fcef"}, "tesla": {:hex, :tesla, "1.4.0", "1081bef0124b8bdec1c3d330bbe91956648fb008cf0d3950a369cda466a31a87", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:finch, "~> 0.3", [hex: :finch, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, "~> 1.3", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "~> 4.4.0", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "bf1374a5569f5fca8e641363b63f7347d680d91388880979a33bc12a6eb3e0aa"}, - "timex": {:hex, :timex, "3.6.2", "845cdeb6119e2fef10751c0b247b6c59d86d78554c83f78db612e3290f819bc2", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 0.1.8 or ~> 0.5 or ~> 1.0.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "26030b46199d02a590be61c2394b37ea25a3664c02fafbeca0b24c972025d47a"}, + "timex": {:hex, :timex, "3.7.3", "df8a2ea814749d700d6878ab9eacac9fdb498ecee2f507cb0002ec172bc24d0f", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "8691c1d86ca3a7bc14a156e2199dc8927be95d1a8f0e3b69e4bb2d6262c53ac6"}, "trailing_format_plug": {:hex, :trailing_format_plug, "0.0.7", "64b877f912cf7273bed03379936df39894149e35137ac9509117e59866e10e45", [:mix], [{:plug, "> 0.12.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "bd4fde4c15f3e993a999e019d64347489b91b7a9096af68b2bdadd192afa693f"}, - "tzdata": {:hex, :tzdata, "1.0.4", "a3baa4709ea8dba552dca165af6ae97c624a2d6ac14bd265165eaa8e8af94af6", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "b02637db3df1fd66dd2d3c4f194a81633d0e4b44308d36c1b2fdfd1e4e6f169b"}, + "tzdata": {:hex, :tzdata, "1.0.5", "69f1ee029a49afa04ad77801febaf69385f3d3e3d1e4b56b9469025677b89a28", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "55519aa2a99e5d2095c1e61cc74c9be69688f8ab75c27da724eb8279ff402a5a"}, "ueberauth": {:hex, :ueberauth, "0.6.3", "d42ace28b870e8072cf30e32e385579c57b9cc96ec74fa1f30f30da9c14f3cc0", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "afc293d8a1140d6591b53e3eaf415ca92842cb1d32fad3c450c6f045f7f91b60"}, "unicode_util_compat": {:git, "https://github.com/benoitc/unicode_util_compat.git", "38d7bc105f51159e8ea3279c40121db9db1e652f", [tag: "0.3.1"]}, "unsafe": {:hex, :unsafe, "1.0.1", "a27e1874f72ee49312e0a9ec2e0b27924214a05e3ddac90e91727bc76f8613d8", [:mix], [], "hexpm", "6c7729a2d214806450d29766abc2afaa7a2cbecf415be64f36a6691afebb50e5"}, diff --git a/priv/repo/migrations/20190711042021_create_safe_jsonb_set.exs b/priv/repo/migrations/20190711042021_create_safe_jsonb_set.exs index 43d616705..bfac09f9e 100644 --- a/priv/repo/migrations/20190711042021_create_safe_jsonb_set.exs +++ b/priv/repo/migrations/20190711042021_create_safe_jsonb_set.exs @@ -9,7 +9,7 @@ defmodule Pleroma.Repo.Migrations.CreateSafeJsonbSet do begin result := jsonb_set(target, path, coalesce(new_value, 'null'::jsonb), create_missing); if result is NULL then - raise 'jsonb_set tried to wipe the object, please report this incindent to Pleroma bug tracker. https://git.pleroma.social/pleroma/pleroma/issues/new'; + raise 'jsonb_set tried to wipe the object, please report this incident to Pleroma bug tracker. https://git.pleroma.social/pleroma/pleroma/issues/new'; return target; else return result; diff --git a/priv/repo/migrations/20201221202251_create_hashtags.exs b/priv/repo/migrations/20201221202251_create_hashtags.exs new file mode 100644 index 000000000..8d2e9ae66 --- /dev/null +++ b/priv/repo/migrations/20201221202251_create_hashtags.exs @@ -0,0 +1,13 @@ +defmodule Pleroma.Repo.Migrations.CreateHashtags do + use Ecto.Migration + + def change do + create_if_not_exists table(:hashtags) do + add(:name, :citext, null: false) + + timestamps() + end + + create_if_not_exists(unique_index(:hashtags, [:name])) + end +end diff --git a/priv/repo/migrations/20201221202252_remove_data_from_hashtags.exs b/priv/repo/migrations/20201221202252_remove_data_from_hashtags.exs new file mode 100644 index 000000000..0442c3b87 --- /dev/null +++ b/priv/repo/migrations/20201221202252_remove_data_from_hashtags.exs @@ -0,0 +1,15 @@ +defmodule Pleroma.Repo.Migrations.RemoveDataFromHashtags do + use Ecto.Migration + + def up do + alter table(:hashtags) do + remove_if_exists(:data, :map) + end + end + + def down do + alter table(:hashtags) do + add_if_not_exists(:data, :map, default: %{}) + end + end +end diff --git a/priv/repo/migrations/20201221203824_create_hashtags_objects.exs b/priv/repo/migrations/20201221203824_create_hashtags_objects.exs new file mode 100644 index 000000000..581f32b3c --- /dev/null +++ b/priv/repo/migrations/20201221203824_create_hashtags_objects.exs @@ -0,0 +1,13 @@ +defmodule Pleroma.Repo.Migrations.CreateHashtagsObjects do + use Ecto.Migration + + def change do + create_if_not_exists table(:hashtags_objects, primary_key: false) do + add(:hashtag_id, references(:hashtags), null: false, primary_key: true) + add(:object_id, references(:objects), null: false, primary_key: true) + end + + # Note: PK index: "hashtags_objects_pkey" PRIMARY KEY, btree (hashtag_id, object_id) + create_if_not_exists(index(:hashtags_objects, [:object_id])) + end +end diff --git a/priv/repo/migrations/20210105195018_create_data_migrations.exs b/priv/repo/migrations/20210105195018_create_data_migrations.exs new file mode 100644 index 000000000..5f2e8d96c --- /dev/null +++ b/priv/repo/migrations/20210105195018_create_data_migrations.exs @@ -0,0 +1,17 @@ +defmodule Pleroma.Repo.Migrations.CreateDataMigrations do + use Ecto.Migration + + def change do + create_if_not_exists table(:data_migrations) do + add(:name, :string, null: false) + add(:state, :integer, default: 1) + add(:feature_lock, :boolean, default: false) + add(:params, :map, default: %{}) + add(:data, :map, default: %{}) + + timestamps() + end + + create_if_not_exists(unique_index(:data_migrations, [:name])) + end +end diff --git a/priv/repo/migrations/20210106183301_data_migration_create_populate_hashtags_table.exs b/priv/repo/migrations/20210106183301_data_migration_create_populate_hashtags_table.exs new file mode 100644 index 000000000..cf3cf26a0 --- /dev/null +++ b/priv/repo/migrations/20210106183301_data_migration_create_populate_hashtags_table.exs @@ -0,0 +1,16 @@ +defmodule Pleroma.Repo.Migrations.DataMigrationCreatePopulateHashtagsTable do + use Ecto.Migration + + def up do + dt = NaiveDateTime.utc_now() + + execute( + "INSERT INTO data_migrations(name, inserted_at, updated_at) " <> + "VALUES ('populate_hashtags_table', '#{dt}', '#{dt}') ON CONFLICT DO NOTHING;" + ) + end + + def down do + execute("DELETE FROM data_migrations WHERE name = 'populate_hashtags_table';") + end +end diff --git a/priv/repo/migrations/20210111172254_create_data_migration_failed_ids.exs b/priv/repo/migrations/20210111172254_create_data_migration_failed_ids.exs new file mode 100644 index 000000000..18afa74ac --- /dev/null +++ b/priv/repo/migrations/20210111172254_create_data_migration_failed_ids.exs @@ -0,0 +1,14 @@ +defmodule Pleroma.Repo.Migrations.CreateDataMigrationFailedIds do + use Ecto.Migration + + def change do + create_if_not_exists table(:data_migration_failed_ids, primary_key: false) do + add(:data_migration_id, references(:data_migrations), null: false, primary_key: true) + add(:record_id, :bigint, null: false, primary_key: true) + end + + create_if_not_exists( + unique_index(:data_migration_failed_ids, [:data_migration_id, :record_id]) + ) + end +end diff --git a/priv/repo/migrations/20210222183840_remove_hashtags_objects_duplicate_index.exs b/priv/repo/migrations/20210222183840_remove_hashtags_objects_duplicate_index.exs new file mode 100644 index 000000000..6c4a2dfdc --- /dev/null +++ b/priv/repo/migrations/20210222183840_remove_hashtags_objects_duplicate_index.exs @@ -0,0 +1,11 @@ +defmodule Pleroma.Repo.Migrations.RemoveHashtagsObjectsDuplicateIndex do + use Ecto.Migration + + @moduledoc "Removes `hashtags_objects_hashtag_id_object_id_index` index (duplicate of PK index)." + + def up do + drop_if_exists(unique_index(:hashtags_objects, [:hashtag_id, :object_id])) + end + + def down, do: nil +end diff --git a/priv/repo/migrations/20210222184616_change_hashtags_name_to_text.exs b/priv/repo/migrations/20210222184616_change_hashtags_name_to_text.exs new file mode 100644 index 000000000..8940b6ca3 --- /dev/null +++ b/priv/repo/migrations/20210222184616_change_hashtags_name_to_text.exs @@ -0,0 +1,15 @@ +defmodule Pleroma.Repo.Migrations.ChangeHashtagsNameToText do + use Ecto.Migration + + def up do + alter table(:hashtags) do + modify(:name, :text) + end + end + + def down do + alter table(:hashtags) do + modify(:name, :citext) + end + end +end diff --git a/test/fixtures/bridgy/actor.json b/test/fixtures/bridgy/actor.json new file mode 100644 index 000000000..5b2d8982b --- /dev/null +++ b/test/fixtures/bridgy/actor.json @@ -0,0 +1,80 @@ +{ + "id": "https://fed.brid.gy/jk.nipponalba.scot", + "url": "https://fed.brid.gy/r/https://jk.nipponalba.scot", + "urls": [ + { + "value": "https://jk.nipponalba.scot" + }, + { + "value": "https://social.nipponalba.scot/jk" + }, + { + "value": "https://px.nipponalba.scot/jk" + } + ], + "@context": "https://www.w3.org/ns/activitystreams", + "type": "Person", + "name": "J K 🇯🇵🏴", + "image": [ + { + "url": "https://jk.nipponalba.scot/images/profile.jpg", + "type": "Image", + "name": "profile picture" + } + ], + "tag": [ + { + "type": "Tag", + "name": "Craft Beer" + }, + { + "type": "Tag", + "name": "Single Malt Whisky" + }, + { + "type": "Tag", + "name": "Homebrewing" + }, + { + "type": "Tag", + "name": "Scottish Politics" + }, + { + "type": "Tag", + "name": "Scottish History" + }, + { + "type": "Tag", + "name": "Japanese History" + }, + { + "type": "Tag", + "name": "Tech" + }, + { + "type": "Tag", + "name": "Veganism" + }, + { + "type": "Tag", + "name": "Cooking" + } + ], + "icon": [ + { + "url": "https://jk.nipponalba.scot/images/profile.jpg", + "type": "Image", + "name": "profile picture" + } + ], + "preferredUsername": "jk.nipponalba.scot", + "summary": "", + "publicKey": { + "id": "jk.nipponalba.scot", + "publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDdarxwzxnNbJ2hneWOYHkYJowk\npyigQtxlUd0VjgSQHwxU9kWqfbrHBVADyTtcqi/4dAzQd3UnCI1TPNnn4LPZY9PW\noiWd3Zl1/EfLFxO7LU9GS7fcSLQkyj5JNhSlN3I8QPudZbybrgRDVZYooDe1D+52\n5KLGqC2ajrIVOiDRTQIDAQAB\n-----END PUBLIC KEY-----" + }, + "inbox": "https://fed.brid.gy/jk.nipponalba.scot/inbox", + "outbox": "https://fed.brid.gy/jk.nipponalba.scot/outbox", + "following": "https://fed.brid.gy/jk.nipponalba.scot/following", + "followers": "https://fed.brid.gy/jk.nipponalba.scot/followers" +} diff --git a/test/fixtures/tesla_mock/emoji-in-summary.json b/test/fixtures/tesla_mock/emoji-in-summary.json new file mode 100644 index 000000000..f77c6e2e8 --- /dev/null +++ b/test/fixtures/tesla_mock/emoji-in-summary.json @@ -0,0 +1,49 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://patch.cx/schemas/litepub-0.1.jsonld", + { + "@language": "und" + } + ], + "actor": "https://patch.cx/users/rin", + "attachment": [], + "attributedTo": "https://patch.cx/users/rin", + "cc": [ + "https://patch.cx/users/rin/followers" + ], + "content": ":joker_disapprove: <br><br>just grabbing a test fixture, nevermind me", + "context": "https://patch.cx/contexts/2c3ce4b4-18b1-4b1a-8965-3932027b5326", + "conversation": "https://patch.cx/contexts/2c3ce4b4-18b1-4b1a-8965-3932027b5326", + "id": "https://patch.cx/objects/a399c28e-c821-4820-bc3e-4afeb044c16f", + "published": "2021-03-22T16:54:46.461939Z", + "sensitive": null, + "source": ":joker_disapprove: \r\n\r\njust grabbing a test fixture, nevermind me", + "summary": ":joker_smile: ", + "tag": [ + { + "icon": { + "type": "Image", + "url": "https://patch.cx/emoji/custom/joker_disapprove.png" + }, + "id": "https://patch.cx/emoji/custom/joker_disapprove.png", + "name": ":joker_disapprove:", + "type": "Emoji", + "updated": "1970-01-01T00:00:00Z" + }, + { + "icon": { + "type": "Image", + "url": "https://patch.cx/emoji/custom/joker_smile.png" + }, + "id": "https://patch.cx/emoji/custom/joker_smile.png", + "name": ":joker_smile:", + "type": "Emoji", + "updated": "1970-01-01T00:00:00Z" + } + ], + "to": [ + "https://www.w3.org/ns/activitystreams#Public" + ], + "type": "Note" +} diff --git a/test/fixtures/tesla_mock/xn--q9jyb4c_host_meta b/test/fixtures/tesla_mock/xn--q9jyb4c_host_meta deleted file mode 100644 index 45d260e55..000000000 --- a/test/fixtures/tesla_mock/xn--q9jyb4c_host_meta +++ /dev/null @@ -1,4 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<XRD xmlns="http://docs.oasis-open.org/ns/xri/xrd-1.0"> - <Link rel="lrdd" template="https://zetsubou.xn--q9jyb4c/.well-known/webfinger?resource={uri}" type="application/xrd+xml" /> -</XRD> diff --git a/test/mix/tasks/pleroma/config_test.exs b/test/mix/tasks/pleroma/config_test.exs index 21f8f2286..3ed1e94b8 100644 --- a/test/mix/tasks/pleroma/config_test.exs +++ b/test/mix/tasks/pleroma/config_test.exs @@ -200,6 +200,44 @@ defmodule Mix.Tasks.Pleroma.ConfigTest do end end + describe "migrate_from_db/1" do + setup do: clear_config(:configurable_from_database, true) + + setup do + insert_config_record(:pleroma, :setting_first, key: "value", key2: ["Activity"]) + insert_config_record(:pleroma, :setting_second, key: "value2", key2: [Repo]) + insert_config_record(:quack, :level, :info) + + path = "test/instance_static" + file_path = Path.join(path, "temp.exported_from_db.secret.exs") + + on_exit(fn -> File.rm!(file_path) end) + + [file_path: file_path] + end + + test "with path parameter", %{file_path: file_path} do + MixTask.run(["migrate_from_db", "--env", "temp", "--path", Path.dirname(file_path)]) + + file = File.read!(file_path) + assert file =~ "config :pleroma, :setting_first," + assert file =~ "config :pleroma, :setting_second," + assert file =~ "config :quack, :level, :info" + end + + test "release", %{file_path: file_path} do + clear_config(:release, true) + clear_config(:config_path, file_path) + + MixTask.run(["migrate_from_db", "--env", "temp"]) + + file = File.read!(file_path) + assert file =~ "config :pleroma, :setting_first," + assert file =~ "config :pleroma, :setting_second," + assert file =~ "config :quack, :level, :info" + end + end + describe "operations on database config" do setup do: clear_config(:configurable_from_database, true) diff --git a/test/pleroma/activity/ir/topics_test.exs b/test/pleroma/activity/ir/topics_test.exs index 6b848e04d..9c8e5d932 100644 --- a/test/pleroma/activity/ir/topics_test.exs +++ b/test/pleroma/activity/ir/topics_test.exs @@ -11,6 +11,8 @@ defmodule Pleroma.Activity.Ir.TopicsTest do require Pleroma.Constants + import Mock + describe "poll answer" do test "produce no topics" do activity = %Activity{object: %Object{data: %{"type" => "Answer"}}} @@ -77,14 +79,13 @@ defmodule Pleroma.Activity.Ir.TopicsTest do refute Enum.member?(topics, "public:local:media") end - test "converts tags to hash tags", %{activity: %{object: %{data: data} = object} = activity} do - tagged_data = Map.put(data, "tag", ["foo", "bar"]) - activity = %{activity | object: %{object | data: tagged_data}} - - topics = Topics.get_activity_topics(activity) + test "converts tags to hash tags", %{activity: activity} do + with_mock(Object, [:passthrough], hashtags: fn _ -> ["foo", "bar"] end) do + topics = Topics.get_activity_topics(activity) - assert Enum.member?(topics, "hashtag:foo") - assert Enum.member?(topics, "hashtag:bar") + assert Enum.member?(topics, "hashtag:foo") + assert Enum.member?(topics, "hashtag:bar") + end end test "only converts strings to hash tags", %{ diff --git a/test/pleroma/hashtag_test.exs b/test/pleroma/hashtag_test.exs new file mode 100644 index 000000000..0264dea0b --- /dev/null +++ b/test/pleroma/hashtag_test.exs @@ -0,0 +1,17 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.HashtagTest do + use Pleroma.DataCase + + alias Pleroma.Hashtag + + describe "changeset validations" do + test "ensure non-blank :name" do + changeset = Hashtag.changeset(%Hashtag{}, %{name: ""}) + + assert {:name, {"can't be blank", [validation: :required]}} in changeset.errors + end + end +end diff --git a/test/pleroma/object_test.exs b/test/pleroma/object_test.exs index db7678d5d..8320660a5 100644 --- a/test/pleroma/object_test.exs +++ b/test/pleroma/object_test.exs @@ -5,10 +5,13 @@ defmodule Pleroma.ObjectTest do use Pleroma.DataCase use Oban.Testing, repo: Pleroma.Repo + import ExUnit.CaptureLog import Pleroma.Factory import Tesla.Mock + alias Pleroma.Activity + alias Pleroma.Hashtag alias Pleroma.Object alias Pleroma.Repo alias Pleroma.Tests.ObanHelpers @@ -417,4 +420,28 @@ defmodule Pleroma.ObjectTest do assert updated_object.data["like_count"] == 1 end end + + describe ":hashtags association" do + test "Hashtag records are created with Object record and updated on its change" do + user = insert(:user) + + {:ok, %{object: object}} = + CommonAPI.post(user, %{status: "some text #hashtag1 #hashtag2 ..."}) + + assert [%Hashtag{name: "hashtag1"}, %Hashtag{name: "hashtag2"}] = + Enum.sort_by(object.hashtags, & &1.name) + + {:ok, object} = Object.update_data(object, %{"tag" => []}) + + assert [] = object.hashtags + + object = Object.get_by_id(object.id) |> Repo.preload(:hashtags) + assert [] = object.hashtags + + {:ok, object} = Object.update_data(object, %{"tag" => ["abc", "def"]}) + + assert [%Hashtag{name: "abc"}, %Hashtag{name: "def"}] = + Enum.sort_by(object.hashtags, & &1.name) + end + end end diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs index f4023856c..c7fa452f7 100644 --- a/test/pleroma/web/activity_pub/activity_pub_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_test.exs @@ -208,37 +208,96 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do assert user.name == "Bernie2020 group" assert user.actor_type == "Group" end + + test "works for bridgy actors" do + user_id = "https://fed.brid.gy/jk.nipponalba.scot" + + Tesla.Mock.mock(fn + %{method: :get, url: ^user_id} -> + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/bridgy/actor.json"), + headers: [{"content-type", "application/activity+json"}] + } + end) + + {:ok, user} = ActivityPub.make_user_from_ap_id(user_id) + + assert user.actor_type == "Person" + + assert user.avatar == %{ + "type" => "Image", + "url" => [%{"href" => "https://jk.nipponalba.scot/images/profile.jpg"}] + } + + assert user.banner == %{ + "type" => "Image", + "url" => [%{"href" => "https://jk.nipponalba.scot/images/profile.jpg"}] + } + end end test "it fetches the appropriate tag-restricted posts" do user = insert(:user) - {:ok, status_one} = CommonAPI.post(user, %{status: ". #test"}) + {:ok, status_one} = CommonAPI.post(user, %{status: ". #TEST"}) {:ok, status_two} = CommonAPI.post(user, %{status: ". #essais"}) - {:ok, status_three} = CommonAPI.post(user, %{status: ". #test #reject"}) + {:ok, status_three} = CommonAPI.post(user, %{status: ". #test #Reject"}) - fetch_one = ActivityPub.fetch_activities([], %{type: "Create", tag: "test"}) + {:ok, status_four} = CommonAPI.post(user, %{status: ". #Any1 #any2"}) + {:ok, status_five} = CommonAPI.post(user, %{status: ". #Any2 #any1"}) - fetch_two = ActivityPub.fetch_activities([], %{type: "Create", tag: ["test", "essais"]}) + for hashtag_timeline_strategy <- [:enabled, :disabled] do + clear_config([:features, :improved_hashtag_timeline], hashtag_timeline_strategy) - fetch_three = - ActivityPub.fetch_activities([], %{ - type: "Create", - tag: ["test", "essais"], - tag_reject: ["reject"] - }) + fetch_one = ActivityPub.fetch_activities([], %{type: "Create", tag: "test"}) - fetch_four = - ActivityPub.fetch_activities([], %{ - type: "Create", - tag: ["test"], - tag_all: ["test", "reject"] - }) + fetch_two = ActivityPub.fetch_activities([], %{type: "Create", tag: ["TEST", "essais"]}) + + fetch_three = + ActivityPub.fetch_activities([], %{ + type: "Create", + tag: ["test", "Essais"], + tag_reject: ["reject"] + }) - assert fetch_one == [status_one, status_three] - assert fetch_two == [status_one, status_two, status_three] - assert fetch_three == [status_one, status_two] - assert fetch_four == [status_three] + fetch_four = + ActivityPub.fetch_activities([], %{ + type: "Create", + tag: ["test"], + tag_all: ["test", "REJECT"] + }) + + # Testing that deduplication (if needed) is done on DB (not Ecto) level; :limit is important + fetch_five = + ActivityPub.fetch_activities([], %{ + type: "Create", + tag: ["ANY1", "any2"], + limit: 2 + }) + + fetch_six = + ActivityPub.fetch_activities([], %{ + type: "Create", + tag: ["any1", "Any2"], + tag_all: [], + tag_reject: [] + }) + + # Regression test: passing empty lists as filter options shouldn't affect the results + assert fetch_five == fetch_six + + [fetch_one, fetch_two, fetch_three, fetch_four, fetch_five] = + Enum.map([fetch_one, fetch_two, fetch_three, fetch_four, fetch_five], fn statuses -> + Enum.map(statuses, fn s -> Repo.preload(s, object: :hashtags) end) + end) + + assert fetch_one == [status_one, status_three] + assert fetch_two == [status_one, status_two, status_three] + assert fetch_three == [status_one, status_two] + assert fetch_four == [status_three] + assert fetch_five == [status_four, status_five] + end end describe "insertion" do diff --git a/test/pleroma/web/activity_pub/mrf/follow_bot_policy_test.exs b/test/pleroma/web/activity_pub/mrf/follow_bot_policy_test.exs new file mode 100644 index 000000000..a61562558 --- /dev/null +++ b/test/pleroma/web/activity_pub/mrf/follow_bot_policy_test.exs @@ -0,0 +1,126 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.MRF.FollowBotPolicyTest do + use Pleroma.DataCase, async: true + + alias Pleroma.User + alias Pleroma.Web.ActivityPub.MRF.FollowBotPolicy + + import Pleroma.Factory + + describe "FollowBotPolicy" do + test "follows remote users" do + bot = insert(:user, actor_type: "Service") + remote_user = insert(:user, local: false) + clear_config([:mrf_follow_bot, :follower_nickname], bot.nickname) + + message = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "to" => [remote_user.follower_address], + "cc" => ["https://www.w3.org/ns/activitystreams#Public"], + "type" => "Create", + "object" => %{ + "content" => "Test post", + "type" => "Note", + "attributedTo" => remote_user.ap_id, + "inReplyTo" => nil + }, + "actor" => remote_user.ap_id + } + + refute User.following?(bot, remote_user) + + assert User.get_follow_requests(remote_user) |> length == 0 + + FollowBotPolicy.filter(message) + + assert User.get_follow_requests(remote_user) |> length == 1 + end + + test "does not follow users with #nobot in bio" do + bot = insert(:user, actor_type: "Service") + remote_user = insert(:user, %{local: false, bio: "go away bots! #nobot"}) + clear_config([:mrf_follow_bot, :follower_nickname], bot.nickname) + + message = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "to" => [remote_user.follower_address], + "cc" => ["https://www.w3.org/ns/activitystreams#Public"], + "type" => "Create", + "object" => %{ + "content" => "I don't like follow bots", + "type" => "Note", + "attributedTo" => remote_user.ap_id, + "inReplyTo" => nil + }, + "actor" => remote_user.ap_id + } + + refute User.following?(bot, remote_user) + + assert User.get_follow_requests(remote_user) |> length == 0 + + FollowBotPolicy.filter(message) + + assert User.get_follow_requests(remote_user) |> length == 0 + end + + test "does not follow local users" do + bot = insert(:user, actor_type: "Service") + local_user = insert(:user, local: true) + clear_config([:mrf_follow_bot, :follower_nickname], bot.nickname) + + message = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "to" => [local_user.follower_address], + "cc" => ["https://www.w3.org/ns/activitystreams#Public"], + "type" => "Create", + "object" => %{ + "content" => "Hi I'm a local user", + "type" => "Note", + "attributedTo" => local_user.ap_id, + "inReplyTo" => nil + }, + "actor" => local_user.ap_id + } + + refute User.following?(bot, local_user) + + assert User.get_follow_requests(local_user) |> length == 0 + + FollowBotPolicy.filter(message) + + assert User.get_follow_requests(local_user) |> length == 0 + end + + test "does not follow users requiring follower approval" do + bot = insert(:user, actor_type: "Service") + remote_user = insert(:user, %{local: false, is_locked: true}) + clear_config([:mrf_follow_bot, :follower_nickname], bot.nickname) + + message = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "to" => [remote_user.follower_address], + "cc" => ["https://www.w3.org/ns/activitystreams#Public"], + "type" => "Create", + "object" => %{ + "content" => "I don't like randos following me", + "type" => "Note", + "attributedTo" => remote_user.ap_id, + "inReplyTo" => nil + }, + "actor" => remote_user.ap_id + } + + refute User.following?(bot, remote_user) + + assert User.get_follow_requests(remote_user) |> length == 0 + + FollowBotPolicy.filter(message) + + assert User.get_follow_requests(remote_user) |> length == 0 + end + end +end diff --git a/test/pleroma/web/activity_pub/mrf/hashtag_policy_test.exs b/test/pleroma/web/activity_pub/mrf/hashtag_policy_test.exs new file mode 100644 index 000000000..13415bb79 --- /dev/null +++ b/test/pleroma/web/activity_pub/mrf/hashtag_policy_test.exs @@ -0,0 +1,31 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.MRF.HashtagPolicyTest do + use Oban.Testing, repo: Pleroma.Repo + use Pleroma.DataCase + + alias Pleroma.Web.ActivityPub.Transmogrifier + alias Pleroma.Web.CommonAPI + + import Pleroma.Factory + + test "it sets the sensitive property with relevant hashtags" do + user = insert(:user) + + {:ok, activity} = CommonAPI.post(user, %{status: "#nsfw hey"}) + {:ok, modified} = Transmogrifier.prepare_outgoing(activity.data) + + assert modified["object"]["sensitive"] + end + + test "it doesn't sets the sensitive property with irrelevant hashtags" do + user = insert(:user) + + {:ok, activity} = CommonAPI.post(user, %{status: "#cofe hey"}) + {:ok, modified} = Transmogrifier.prepare_outgoing(activity.data) + + refute modified["object"]["sensitive"] + end +end diff --git a/test/pleroma/web/activity_pub/mrf/simple_policy_test.exs b/test/pleroma/web/activity_pub/mrf/simple_policy_test.exs index f48e5b39b..5c0aff26e 100644 --- a/test/pleroma/web/activity_pub/mrf/simple_policy_test.exs +++ b/test/pleroma/web/activity_pub/mrf/simple_policy_test.exs @@ -75,10 +75,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do local_message = build_local_message() assert SimplePolicy.filter(media_message) == - {:ok, - media_message - |> put_in(["object", "tag"], ["foo", "nsfw"]) - |> put_in(["object", "sensitive"], true)} + {:ok, put_in(media_message, ["object", "sensitive"], true)} assert SimplePolicy.filter(local_message) == {:ok, local_message} end @@ -89,10 +86,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do local_message = build_local_message() assert SimplePolicy.filter(media_message) == - {:ok, - media_message - |> put_in(["object", "tag"], ["foo", "nsfw"]) - |> put_in(["object", "sensitive"], true)} + {:ok, put_in(media_message, ["object", "sensitive"], true)} assert SimplePolicy.filter(local_message) == {:ok, local_message} end diff --git a/test/pleroma/web/activity_pub/mrf/tag_policy_test.exs b/test/pleroma/web/activity_pub/mrf/tag_policy_test.exs index 66e98b7ee..faaadff79 100644 --- a/test/pleroma/web/activity_pub/mrf/tag_policy_test.exs +++ b/test/pleroma/web/activity_pub/mrf/tag_policy_test.exs @@ -114,7 +114,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.TagPolicyTest do except_message = %{ "actor" => actor.ap_id, "type" => "Create", - "object" => %{"tag" => ["test", "nsfw"], "attachment" => ["file1"], "sensitive" => true} + "object" => %{"tag" => ["test"], "attachment" => ["file1"], "sensitive" => true} } assert TagPolicy.filter(message) == {:ok, except_message} diff --git a/test/pleroma/web/activity_pub/mrf_test.exs b/test/pleroma/web/activity_pub/mrf_test.exs index 7c1eef7e0..61d308b97 100644 --- a/test/pleroma/web/activity_pub/mrf_test.exs +++ b/test/pleroma/web/activity_pub/mrf_test.exs @@ -68,7 +68,12 @@ defmodule Pleroma.Web.ActivityPub.MRFTest do clear_config([:mrf, :policies], [Pleroma.Web.ActivityPub.MRF.NoOpPolicy]) expected = %{ - mrf_policies: ["NoOpPolicy"], + mrf_policies: ["NoOpPolicy", "HashtagPolicy"], + mrf_hashtag: %{ + federated_timeline_removal: [], + reject: [], + sensitive: ["nsfw"] + }, exclusions: false } @@ -79,8 +84,13 @@ defmodule Pleroma.Web.ActivityPub.MRFTest do clear_config([:mrf, :policies], [MRFModuleMock]) expected = %{ - mrf_policies: ["MRFModuleMock"], + mrf_policies: ["MRFModuleMock", "HashtagPolicy"], mrf_module_mock: "some config data", + mrf_hashtag: %{ + federated_timeline_removal: [], + reject: [], + sensitive: ["nsfw"] + }, exclusions: false } diff --git a/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs b/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs index 31586abc9..deb956410 100644 --- a/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs +++ b/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs @@ -39,7 +39,8 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier.NoteHandlingTest do {:ok, %Activity{data: data, local: false}} = Transmogrifier.handle_incoming(data) object = Object.normalize(data["object"], fetch: false) - assert "test" in object.data["tag"] + assert "test" in Object.tags(object) + assert Object.hashtags(object) == ["test"] end test "it cleans up incoming notices which are not really DMs" do @@ -220,7 +221,8 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier.NoteHandlingTest do {:ok, %Activity{data: data, local: false}} = Transmogrifier.handle_incoming(data) object = Object.normalize(data["object"], fetch: false) - assert Enum.at(object.data["tag"], 2) == "moo" + assert Enum.at(Object.tags(object), 2) == "moo" + assert Object.hashtags(object) == ["moo"] end test "it works for incoming notices with contentMap" do diff --git a/test/pleroma/web/activity_pub/transmogrifier_test.exs b/test/pleroma/web/activity_pub/transmogrifier_test.exs index 211e535a5..4c3fcb44a 100644 --- a/test/pleroma/web/activity_pub/transmogrifier_test.exs +++ b/test/pleroma/web/activity_pub/transmogrifier_test.exs @@ -153,15 +153,6 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do end end - test "it adds the sensitive property" do - user = insert(:user) - - {:ok, activity} = CommonAPI.post(user, %{status: "#nsfw hey"}) - {:ok, modified} = Transmogrifier.prepare_outgoing(activity.data) - - assert modified["object"]["sensitive"] - end - test "it adds the json-ld context and the conversation property" do user = insert(:user) diff --git a/test/pleroma/web/common_api_test.exs b/test/pleroma/web/common_api_test.exs index adfe58def..6619f8fc8 100644 --- a/test/pleroma/web/common_api_test.exs +++ b/test/pleroma/web/common_api_test.exs @@ -25,6 +25,11 @@ defmodule Pleroma.Web.CommonAPITest do require Pleroma.Constants + setup_all do + Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end) + :ok + end + setup do: clear_config([:instance, :safe_dm_mentions]) setup do: clear_config([:instance, :limit]) setup do: clear_config([:instance, :max_pinned_statuses]) @@ -493,7 +498,7 @@ defmodule Pleroma.Web.CommonAPITest do object = Object.normalize(activity, fetch: false) - assert object.data["tag"] == ["2hu"] + assert Object.tags(object) == ["2hu"] end test "it adds emoji in the object" do @@ -517,6 +522,27 @@ defmodule Pleroma.Web.CommonAPITest do assert url == "#{Pleroma.Web.base_url()}/emoji/blank.png" end + test "it copies emoji from the subject of the parent post" do + %Object{} = + object = + Object.normalize("https://patch.cx/objects/a399c28e-c821-4820-bc3e-4afeb044c16f", + fetch: true + ) + + activity = Activity.get_create_by_object_ap_id(object.data["id"]) + user = insert(:user) + + {:ok, reply_activity} = + CommonAPI.post(user, %{ + in_reply_to_id: activity.id, + status: ":joker_disapprove:", + spoiler_text: ":joker_smile:" + }) + + assert Object.normalize(reply_activity).data["emoji"][":joker_smile:"] + refute Object.normalize(reply_activity).data["emoji"][":joker_disapprove:"] + end + test "deactivated users can't post" do user = insert(:user, is_active: false) assert {:error, _} = CommonAPI.post(user, %{status: "ye"}) diff --git a/test/pleroma/web/mastodon_api/views/status_view_test.exs b/test/pleroma/web/mastodon_api/views/status_view_test.exs index 2de3afc4f..4172cc294 100644 --- a/test/pleroma/web/mastodon_api/views/status_view_test.exs +++ b/test/pleroma/web/mastodon_api/views/status_view_test.exs @@ -262,8 +262,8 @@ defmodule Pleroma.Web.MastodonAPI.StatusViewTest do mentions: [], tags: [ %{ - name: "#{object_data["tag"]}", - url: "http://localhost:4001/tag/#{object_data["tag"]}" + name: "#{hd(object_data["tag"])}", + url: "http://localhost:4001/tag/#{hd(object_data["tag"])}" } ], application: nil, diff --git a/test/pleroma/web/media_proxy_test.exs b/test/pleroma/web/media_proxy_test.exs index 7411d0a7a..b5ee6328d 100644 --- a/test/pleroma/web/media_proxy_test.exs +++ b/test/pleroma/web/media_proxy_test.exs @@ -11,8 +11,7 @@ defmodule Pleroma.Web.MediaProxyTest do alias Pleroma.Web.MediaProxy defp decode_result(encoded) do - [_, "proxy", sig, base64 | _] = URI.parse(encoded).path |> String.split("/") - {:ok, decoded} = MediaProxy.decode_url(sig, base64) + {:ok, decoded} = MediaProxy.decode_url(encoded) decoded end diff --git a/test/pleroma/web/web_finger_test.exs b/test/pleroma/web/web_finger_test.exs index 84477d5a1..2d7b4a40b 100644 --- a/test/pleroma/web/web_finger_test.exs +++ b/test/pleroma/web/web_finger_test.exs @@ -45,6 +45,26 @@ defmodule Pleroma.Web.WebFingerTest do assert {:error, _} = WebFinger.finger("pleroma.social") end + test "returns error when there is no content-type header" do + Tesla.Mock.mock(fn + %{url: "http://social.heldscal.la/.well-known/host-meta"} -> + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/tesla_mock/social.heldscal.la_host_meta") + }} + + %{ + url: + "https://social.heldscal.la/.well-known/webfinger?resource=acct:invalid_content@social.heldscal.la" + } -> + {:ok, %Tesla.Env{status: 200, body: ""}} + end) + + user = "invalid_content@social.heldscal.la" + assert {:error, {:content_type, nil}} = WebFinger.finger(user) + end + test "returns error when fails parse xml or json" do user = "invalid_content@social.heldscal.la" assert {:error, %Jason.DecodeError{}} = WebFinger.finger(user) @@ -113,5 +133,52 @@ defmodule Pleroma.Web.WebFingerTest do ap_id = "https://" <> to_string(:idna.encode("zetsubou.みんな")) <> "/users/lain" {:ok, _data} = WebFinger.finger(ap_id) end + + test "respects json content-type" do + Tesla.Mock.mock(fn + %{ + url: + "https://mastodon.social/.well-known/webfinger?resource=acct:emelie@mastodon.social" + } -> + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/tesla_mock/webfinger_emelie.json"), + headers: [{"content-type", "application/jrd+json"}] + }} + + %{url: "http://mastodon.social/.well-known/host-meta"} -> + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/tesla_mock/mastodon.social_host_meta") + }} + end) + + {:ok, _data} = WebFinger.finger("emelie@mastodon.social") + end + + test "respects xml content-type" do + Tesla.Mock.mock(fn + %{ + url: "https://pawoo.net/.well-known/webfinger?resource=acct:pekorino@pawoo.net" + } -> + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/tesla_mock/https___pawoo.net_users_pekorino.xml"), + headers: [{"content-type", "application/xrd+xml"}] + }} + + %{url: "http://pawoo.net/.well-known/host-meta"} -> + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/tesla_mock/pawoo.net_host_meta") + }} + end) + + {:ok, _data} = WebFinger.finger("pekorino@pawoo.net") + end end end diff --git a/test/support/conn_case.ex b/test/support/conn_case.ex index 953aa010a..deee98599 100644 --- a/test/support/conn_case.ex +++ b/test/support/conn_case.ex @@ -67,13 +67,11 @@ defmodule Pleroma.Web.ConnCase do end defp json_response_and_validate_schema( - %{ - private: %{ - open_api_spex: %{operation_id: op_id, operation_lookup: lookup, spec: spec} - } - } = conn, + %{private: %{operation_id: op_id}} = conn, status ) do + {spec, lookup} = OpenApiSpex.Plug.PutApiSpec.get_spec_and_operation_lookup(conn) + content_type = conn |> Plug.Conn.get_resp_header("content-type") diff --git a/test/support/http_request_mock.ex b/test/support/http_request_mock.ex index 1328d6225..eb692fab5 100644 --- a/test/support/http_request_mock.ex +++ b/test/support/http_request_mock.ex @@ -122,7 +122,7 @@ defmodule HttpRequestMock do %Tesla.Env{ status: 200, body: File.read!("test/fixtures/tesla_mock/mike@osada.macgirvin.com.json"), - headers: activitypub_object_headers() + headers: [{"content-type", "application/jrd+json"}] }} end @@ -187,7 +187,8 @@ defmodule HttpRequestMock do {:ok, %Tesla.Env{ status: 200, - body: File.read!("test/fixtures/tesla_mock/lain_squeet.me_webfinger.xml") + body: File.read!("test/fixtures/tesla_mock/lain_squeet.me_webfinger.xml"), + headers: [{"content-type", "application/xrd+xml"}] }} end @@ -526,22 +527,6 @@ defmodule HttpRequestMock do }} end - def get("http://zetsubou.xn--q9jyb4c/.well-known/host-meta", _, _, _) do - {:ok, - %Tesla.Env{ - status: 200, - body: File.read!("test/fixtures/tesla_mock/xn--q9jyb4c_host_meta") - }} - end - - def get("https://zetsubou.xn--q9jyb4c/.well-known/host-meta", _, _, _) do - {:ok, - %Tesla.Env{ - status: 200, - body: File.read!("test/fixtures/tesla_mock/xn--q9jyb4c_host_meta") - }} - end - def get("http://pleroma.soykaf.com/.well-known/host-meta", _, _, _) do {:ok, %Tesla.Env{ @@ -786,7 +771,8 @@ defmodule HttpRequestMock do {:ok, %Tesla.Env{ status: 200, - body: File.read!("test/fixtures/tesla_mock/shp@social.heldscal.la.xml") + body: File.read!("test/fixtures/tesla_mock/shp@social.heldscal.la.xml"), + headers: [{"content-type", "application/xrd+xml"}] }} end @@ -796,7 +782,7 @@ defmodule HttpRequestMock do _, [{"accept", "application/xrd+xml,application/jrd+json"}] ) do - {:ok, %Tesla.Env{status: 200, body: ""}} + {:ok, %Tesla.Env{status: 200, body: "", headers: [{"content-type", "application/jrd+json"}]}} end def get("http://framatube.org/.well-known/host-meta", _, _, _) do @@ -816,7 +802,7 @@ defmodule HttpRequestMock do {:ok, %Tesla.Env{ status: 200, - headers: [{"content-type", "application/json"}], + headers: [{"content-type", "application/jrd+json"}], body: File.read!("test/fixtures/tesla_mock/framasoft@framatube.org.json") }} end @@ -876,7 +862,7 @@ defmodule HttpRequestMock do {:ok, %Tesla.Env{ status: 200, - headers: [{"content-type", "application/json"}], + headers: [{"content-type", "application/jrd+json"}], body: File.read!("test/fixtures/tesla_mock/kaniini@gerzilla.de.json") }} end @@ -1074,7 +1060,8 @@ defmodule HttpRequestMock do {:ok, %Tesla.Env{ status: 200, - body: File.read!("test/fixtures/lain.xml") + body: File.read!("test/fixtures/lain.xml"), + headers: [{"content-type", "application/xrd+xml"}] }} end @@ -1087,7 +1074,16 @@ defmodule HttpRequestMock do {:ok, %Tesla.Env{ status: 200, - body: File.read!("test/fixtures/lain.xml") + body: File.read!("test/fixtures/lain.xml"), + headers: [{"content-type", "application/xrd+xml"}] + }} + end + + def get("http://zetsubou.xn--q9jyb4c/.well-known/host-meta", _, _, _) do + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/host-meta-zetsubou.xn--q9jyb4c.xml") }} end @@ -1153,7 +1149,8 @@ defmodule HttpRequestMock do {:ok, %Tesla.Env{ status: 200, - body: File.read!("test/fixtures/tesla_mock/kpherox@mstdn.jp.xml") + body: File.read!("test/fixtures/tesla_mock/kpherox@mstdn.jp.xml"), + headers: [{"content-type", "application/xrd+xml"}] }} end @@ -1281,6 +1278,15 @@ defmodule HttpRequestMock do }} end + def get("https://patch.cx/objects/a399c28e-c821-4820-bc3e-4afeb044c16f", _, _, _) do + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/tesla_mock/emoji-in-summary.json"), + headers: activitypub_object_headers() + }} + end + def get(url, query, body, headers) do {:error, "Mock response not implemented for GET #{inspect(url)}, #{query}, #{inspect(body)}, #{ |