summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--changelog.d/backups-refactor.change1
-rw-r--r--config/config.exs4
-rw-r--r--config/description.exs15
-rw-r--r--config/test.exs2
-rw-r--r--docs/configuration/cheatsheet.md1
-rw-r--r--lib/pleroma/ecto_enums.ex8
-rw-r--r--lib/pleroma/emails/user_email.ex35
-rw-r--r--lib/pleroma/user/backup.ex360
-rw-r--r--lib/pleroma/web/admin_api/controllers/admin_api_controller.ex5
-rw-r--r--lib/pleroma/web/api_spec/operations/pleroma_backup_operation.ex10
-rw-r--r--lib/pleroma/web/pleroma_api/controllers/backup_controller.ex2
-rw-r--r--lib/pleroma/web/pleroma_api/views/backup_view.ex10
-rw-r--r--lib/pleroma/workers/backup_worker.ex50
-rw-r--r--priv/repo/migrations/20240622175346_backup_refactor.exs19
-rw-r--r--test/pleroma/user/backup_async_test.exs49
-rw-r--r--test/pleroma/user/backup_test.exs171
-rw-r--r--test/pleroma/web/admin_api/controllers/admin_api_controller_test.exs8
-rw-r--r--test/pleroma/web/pleroma_api/controllers/backup_controller_test.exs4
-rw-r--r--test/pleroma/web/pleroma_api/views/backup_view_test.exs35
-rw-r--r--test/support/mocks.ex2
20 files changed, 297 insertions, 494 deletions
diff --git a/changelog.d/backups-refactor.change b/changelog.d/backups-refactor.change
new file mode 100644
index 000000000..47fc74138
--- /dev/null
+++ b/changelog.d/backups-refactor.change
@@ -0,0 +1 @@
+Refactor the user backups code and improve test coverage
diff --git a/config/config.exs b/config/config.exs
index 30626d4a1..2f0d9d7d2 100644
--- a/config/config.exs
+++ b/config/config.exs
@@ -910,8 +910,8 @@ config :pleroma, Pleroma.User.Backup,
purge_after_days: 30,
limit_days: 7,
dir: nil,
- process_wait_time: 30_000,
- process_chunk_size: 100
+ process_chunk_size: 100,
+ timeout: :timer.minutes(30)
config :pleroma, ConcurrentLimiter, [
{Pleroma.Search, [max_running: 30, max_waiting: 50]}
diff --git a/config/description.exs b/config/description.exs
index 10a6e9cdf..2809e9130 100644
--- a/config/description.exs
+++ b/config/description.exs
@@ -3356,19 +3356,18 @@ config :pleroma, :config_description, [
suggestions: [7]
},
%{
- key: :process_wait_time,
- type: :integer,
- label: "Process Wait Time",
- description:
- "The amount of time to wait for backup to report progress, in milliseconds. If no progress is received from the backup job for that much time, terminate it and deem it failed.",
- suggestions: [30_000]
- },
- %{
key: :process_chunk_size,
type: :integer,
label: "Process Chunk Size",
description: "The number of activities to fetch in the backup job for each chunk.",
suggestions: [100]
+ },
+ %{
+ key: :timeout,
+ type: :integer,
+ label: "Timeout",
+ description: "The amount of time to wait for backup to complete in seconds.",
+ suggestions: [1_800]
}
]
},
diff --git a/config/test.exs b/config/test.exs
index 8a5694054..6fe84478a 100644
--- a/config/test.exs
+++ b/config/test.exs
@@ -188,6 +188,8 @@ config :pleroma, Pleroma.Web.RichMedia.Backfill,
config :pleroma, Pleroma.Web.Plugs.HTTPSecurityPlug, enable: false
+config :pleroma, Pleroma.User.Backup, tempdir: "test/tmp"
+
if File.exists?("./config/test.secret.exs") do
import_config "test.secret.exs"
else
diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md
index ab0d1c78d..0b4e53b6f 100644
--- a/docs/configuration/cheatsheet.md
+++ b/docs/configuration/cheatsheet.md
@@ -1171,6 +1171,7 @@ Control favicons for instances.
3. the directory named by the TMP environment variable
4. C:\TMP on Windows or /tmp on Unix-like operating systems
5. as a last resort, the current working directory
+* `:timeout` an integer representing seconds
## Frontend management
diff --git a/lib/pleroma/ecto_enums.ex b/lib/pleroma/ecto_enums.ex
index b346b39d6..a4890b489 100644
--- a/lib/pleroma/ecto_enums.ex
+++ b/lib/pleroma/ecto_enums.ex
@@ -27,11 +27,3 @@ defenum(Pleroma.DataMigration.State,
failed: 4,
manual: 5
)
-
-defenum(Pleroma.User.Backup.State,
- pending: 1,
- running: 2,
- complete: 3,
- failed: 4,
- invalid: 5
-)
diff --git a/lib/pleroma/emails/user_email.ex b/lib/pleroma/emails/user_email.ex
index 95b963764..10d89d2f3 100644
--- a/lib/pleroma/emails/user_email.ex
+++ b/lib/pleroma/emails/user_email.ex
@@ -345,37 +345,22 @@ defmodule Pleroma.Emails.UserEmail do
Router.Helpers.subscription_url(Endpoint, :unsubscribe, token)
end
- def backup_is_ready_email(backup, admin_user_id \\ nil) do
+ def backup_is_ready_email(backup) do
%{user: user} = Pleroma.Repo.preload(backup, :user)
Gettext.with_locale_or_default user.language do
download_url = Pleroma.Web.PleromaAPI.BackupView.download_url(backup)
html_body =
- if is_nil(admin_user_id) do
- Gettext.dpgettext(
- "static_pages",
- "account archive email body - self-requested",
- """
- <p>You requested a full backup of your Pleroma account. It's ready for download:</p>
- <p><a href="%{download_url}">%{download_url}</a></p>
- """,
- download_url: download_url
- )
- else
- admin = Pleroma.Repo.get(User, admin_user_id)
-
- Gettext.dpgettext(
- "static_pages",
- "account archive email body - admin requested",
- """
- <p>Admin @%{admin_nickname} requested a full backup of your Pleroma account. It's ready for download:</p>
- <p><a href="%{download_url}">%{download_url}</a></p>
- """,
- admin_nickname: admin.nickname,
- download_url: download_url
- )
- end
+ Gettext.dpgettext(
+ "static_pages",
+ "account archive email body",
+ """
+ <p>A full backup of your Pleroma account was requested. It's ready for download:</p>
+ <p><a href="%{download_url}">%{download_url}</a></p>
+ """,
+ download_url: download_url
+ )
new()
|> to(recipient(user))
diff --git a/lib/pleroma/user/backup.ex b/lib/pleroma/user/backup.ex
index 1821de667..7feaa22bf 100644
--- a/lib/pleroma/user/backup.ex
+++ b/lib/pleroma/user/backup.ex
@@ -14,9 +14,10 @@ defmodule Pleroma.User.Backup do
alias Pleroma.Activity
alias Pleroma.Bookmark
+ alias Pleroma.Config
alias Pleroma.Repo
+ alias Pleroma.Uploaders.Uploader
alias Pleroma.User
- alias Pleroma.User.Backup.State
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.ActivityPub.UserView
@@ -29,71 +30,111 @@ defmodule Pleroma.User.Backup do
field(:file_name, :string)
field(:file_size, :integer, default: 0)
field(:processed, :boolean, default: false)
- field(:state, State, default: :invalid)
- field(:processed_number, :integer, default: 0)
+ field(:tempdir, :string)
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
timestamps()
end
- @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
+ @doc """
+ Schedules a job to backup a user if the number of backup requests has not exceeded the limit.
- def create(user, admin_id \\ nil) do
- with :ok <- validate_limit(user, admin_id),
- {:ok, backup} <- user |> new() |> Repo.insert() do
- BackupWorker.process(backup, admin_id)
+ Admins can directly call new/1 and schedule_backup/1 to bypass the limit.
+ """
+ @spec user(User.t()) :: {:ok, t()} | {:error, any()}
+ def user(user) do
+ days = Config.get([__MODULE__, :limit_days])
+
+ with true <- permitted?(user),
+ %__MODULE__{} = backup <- new(user),
+ {:ok, inserted_backup} <- Repo.insert(backup),
+ {:ok, %Oban.Job{}} <- schedule_backup(inserted_backup) do
+ {:ok, inserted_backup}
+ else
+ false ->
+ {:error,
+ dngettext(
+ "errors",
+ "Last export was less than a day ago",
+ "Last export was less than %{days} days ago",
+ days,
+ days: days
+ )}
+
+ e ->
+ {:error, e}
end
end
+ @doc "Generates a %Backup{} for a user with a random file name"
+ @spec new(User.t()) :: t()
def new(user) do
rand_str = :crypto.strong_rand_bytes(32) |> Base.url_encode64(padding: false)
datetime = Calendar.NaiveDateTime.Format.iso8601_basic(NaiveDateTime.utc_now())
name = "archive-#{user.nickname}-#{datetime}-#{rand_str}.zip"
%__MODULE__{
- user_id: user.id,
content_type: "application/zip",
file_name: name,
- state: :pending
+ tempdir: tempdir(),
+ user: user
}
end
- def delete(backup) do
- uploader = Pleroma.Config.get([Pleroma.Upload, :uploader])
+ @doc "Schedules the execution of the provided backup"
+ @spec schedule_backup(t()) :: {:ok, Oban.Job.t()} | {:error, any()}
+ def schedule_backup(backup) do
+ with false <- is_nil(backup.id) do
+ %{"op" => "process", "backup_id" => backup.id}
+ |> BackupWorker.new()
+ |> Oban.insert()
+ else
+ true ->
+ {:error, "Backup is missing id. Please insert it into the Repo first."}
+
+ e ->
+ {:error, e}
+ end
+ end
+
+ @doc "Deletes the backup archive file and removes the database record"
+ @spec delete_archive(t()) :: {:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
+ def delete_archive(backup) do
+ uploader = Config.get([Pleroma.Upload, :uploader])
with :ok <- uploader.delete_file(Path.join("backups", backup.file_name)) do
Repo.delete(backup)
end
end
- defp validate_limit(_user, admin_id) when is_binary(admin_id), do: :ok
-
- defp validate_limit(user, nil) do
- case get_last(user.id) do
- %__MODULE__{inserted_at: inserted_at} ->
- days = Pleroma.Config.get([__MODULE__, :limit_days])
- diff = Timex.diff(NaiveDateTime.utc_now(), inserted_at, :days)
-
- if diff > days do
- :ok
- else
- {:error,
- dngettext(
- "errors",
- "Last export was less than a day ago",
- "Last export was less than %{days} days ago",
- days,
- days: days
- )}
- end
+ @doc "Schedules a job to delete the backup archive"
+ @spec schedule_delete(t()) :: {:ok, Oban.Job.t()} | {:error, any()}
+ def schedule_delete(backup) do
+ days = Config.get([__MODULE__, :purge_after_days])
+ time = 60 * 60 * 24 * days
+ scheduled_at = Calendar.NaiveDateTime.add!(backup.inserted_at, time)
- nil ->
- :ok
+ %{"op" => "delete", "backup_id" => backup.id}
+ |> BackupWorker.new(scheduled_at: scheduled_at)
+ |> Oban.insert()
+ end
+
+ defp permitted?(user) do
+ with {_, %__MODULE__{inserted_at: inserted_at}} <- {:last, get_last(user)},
+ days = Config.get([__MODULE__, :limit_days]),
+ diff = Timex.diff(NaiveDateTime.utc_now(), inserted_at, :days),
+ {_, true} <- {:diff, diff > days} do
+ true
+ else
+ {:last, nil} -> true
+ {:diff, false} -> false
end
end
- def get_last(user_id) do
+ @doc "Returns last backup for the provided user"
+ @spec get_last(User.t()) :: t()
+ def get_last(%User{id: user_id}) do
__MODULE__
|> where(user_id: ^user_id)
|> order_by(desc: :id)
@@ -101,6 +142,8 @@ defmodule Pleroma.User.Backup do
|> Repo.one()
end
+ @doc "Lists all existing backups for a user"
+ @spec list(User.t()) :: [Ecto.Schema.t() | term()]
def list(%User{id: user_id}) do
__MODULE__
|> where(user_id: ^user_id)
@@ -108,92 +151,35 @@ defmodule Pleroma.User.Backup do
|> Repo.all()
end
- def remove_outdated(%__MODULE__{id: latest_id, user_id: user_id}) do
- __MODULE__
- |> where(user_id: ^user_id)
- |> where([b], b.id != ^latest_id)
- |> Repo.all()
- |> Enum.each(&BackupWorker.delete/1)
+ @doc "Schedules deletion of all but the the most recent backup"
+ @spec remove_outdated(User.t()) :: :ok
+ def remove_outdated(user) do
+ with %__MODULE__{} = latest_backup <- get_last(user) do
+ __MODULE__
+ |> where(user_id: ^user.id)
+ |> where([b], b.id != ^latest_backup.id)
+ |> Repo.all()
+ |> Enum.each(&schedule_delete/1)
+ else
+ _ -> :ok
+ end
end
- def get(id), do: Repo.get(__MODULE__, id)
-
- defp set_state(backup, state, processed_number \\ nil) do
- struct =
- %{state: state}
- |> Pleroma.Maps.put_if_present(:processed_number, processed_number)
+ def get_by_id(id), do: Repo.get(__MODULE__, id)
+ @doc "Generates changeset for %Pleroma.User.Backup{}"
+ @spec changeset(%__MODULE__{}, map()) :: %Ecto.Changeset{}
+ def changeset(backup \\ %__MODULE__{}, attrs) do
backup
- |> cast(struct, [:state, :processed_number])
- |> Repo.update()
- end
-
- def process(
- %__MODULE__{} = backup,
- processor_module \\ __MODULE__.Processor
- ) do
- set_state(backup, :running, 0)
-
- current_pid = self()
-
- task =
- Task.Supervisor.async_nolink(
- Pleroma.TaskSupervisor,
- processor_module,
- :do_process,
- [backup, current_pid]
- )
-
- wait_backup(backup, backup.processed_number, task)
+ |> cast(attrs, [:content_type, :file_name, :file_size, :processed, :tempdir])
end
- defp wait_backup(backup, current_processed, task) do
- wait_time = @config_impl.get([__MODULE__, :process_wait_time])
-
- receive do
- {:progress, new_processed} ->
- total_processed = current_processed + new_processed
-
- set_state(backup, :running, total_processed)
- wait_backup(backup, total_processed, task)
-
- {:DOWN, _ref, _proc, _pid, reason} ->
- backup = get(backup.id)
-
- if reason != :normal do
- Logger.error("Backup #{backup.id} process ended abnormally: #{inspect(reason)}")
-
- {:ok, backup} = set_state(backup, :failed)
-
- cleanup(backup)
-
- {:error,
- %{
- backup: backup,
- reason: :exit,
- details: reason
- }}
- else
- {:ok, backup}
- end
- after
- wait_time ->
- Logger.error(
- "Backup #{backup.id} timed out after no response for #{wait_time}ms, terminating"
- )
-
- Task.Supervisor.terminate_child(Pleroma.TaskSupervisor, task.pid)
-
- {:ok, backup} = set_state(backup, :failed)
-
- cleanup(backup)
-
- {:error,
- %{
- backup: backup,
- reason: :timeout
- }}
- end
+ @doc "Updates the backup record"
+ @spec update_record(%__MODULE__{}, map()) :: {:ok, %__MODULE__{}} | {:error, %Ecto.Changeset{}}
+ def update_record(%__MODULE__{} = backup, attrs) do
+ backup
+ |> changeset(attrs)
+ |> Repo.update()
end
@files [
@@ -204,53 +190,68 @@ defmodule Pleroma.User.Backup do
~c"followers.json",
~c"following.json"
]
- @spec export(Pleroma.User.Backup.t(), pid()) :: {:ok, String.t()} | :error
- def export(%__MODULE__{} = backup, caller_pid) do
+
+ @spec run(t()) :: {:ok, t()} | {:error, :failed}
+ def run(%__MODULE__{} = backup) do
backup = Repo.preload(backup, :user)
- dir = backup_tempdir(backup)
-
- with :ok <- File.mkdir(dir),
- :ok <- actor(dir, backup.user, caller_pid),
- :ok <- statuses(dir, backup.user, caller_pid),
- :ok <- likes(dir, backup.user, caller_pid),
- :ok <- bookmarks(dir, backup.user, caller_pid),
- :ok <- followers(dir, backup.user, caller_pid),
- :ok <- following(dir, backup.user, caller_pid),
- {:ok, zip_path} <- :zip.create(backup.file_name, @files, cwd: dir),
- {:ok, _} <- File.rm_rf(dir) do
- {:ok, zip_path}
+ tempfile = Path.join([backup.tempdir, backup.file_name])
+
+ with {_, :ok} <- {:mkdir, File.mkdir_p(backup.tempdir)},
+ {_, :ok} <- {:actor, actor(backup.tempdir, backup.user)},
+ {_, :ok} <- {:statuses, statuses(backup.tempdir, backup.user)},
+ {_, :ok} <- {:likes, likes(backup.tempdir, backup.user)},
+ {_, :ok} <- {:bookmarks, bookmarks(backup.tempdir, backup.user)},
+ {_, :ok} <- {:followers, followers(backup.tempdir, backup.user)},
+ {_, :ok} <- {:following, following(backup.tempdir, backup.user)},
+ {_, {:ok, _zip_path}} <-
+ {:zip, :zip.create(to_charlist(tempfile), @files, cwd: to_charlist(backup.tempdir))},
+ {_, {:ok, %File.Stat{size: zip_size}}} <- {:filestat, File.stat(tempfile)},
+ {:ok, updated_backup} <- update_record(backup, %{file_size: zip_size}) do
+ {:ok, updated_backup}
else
- _ -> :error
+ _ ->
+ File.rm_rf(backup.tempdir)
+ {:error, :failed}
end
end
- def dir(name) do
- dir = Pleroma.Config.get([__MODULE__, :dir]) || System.tmp_dir!()
- Path.join(dir, name)
+ defp tempdir do
+ rand = :crypto.strong_rand_bytes(8) |> Base.url_encode64(padding: false)
+ subdir = "backup-#{rand}"
+
+ case Config.get([__MODULE__, :tempdir]) do
+ nil ->
+ Path.join([System.tmp_dir!(), subdir])
+
+ path ->
+ Path.join([path, subdir])
+ end
end
- def upload(%__MODULE__{} = backup, zip_path) do
- uploader = Pleroma.Config.get([Pleroma.Upload, :uploader])
+ @doc "Uploads the completed backup and marks it as processed"
+ @spec upload(t()) :: {:ok, t()}
+ def upload(%__MODULE__{tempdir: tempdir} = backup) when is_binary(tempdir) do
+ uploader = Config.get([Pleroma.Upload, :uploader])
upload = %Pleroma.Upload{
name: backup.file_name,
- tempfile: zip_path,
+ tempfile: Path.join([tempdir, backup.file_name]),
content_type: backup.content_type,
path: Path.join("backups", backup.file_name)
}
- with {:ok, _} <- Pleroma.Uploaders.Uploader.put_file(uploader, upload),
- :ok <- File.rm(zip_path) do
- {:ok, upload}
+ with {:ok, _} <- Uploader.put_file(uploader, upload),
+ {:ok, uploaded_backup} <- update_record(backup, %{processed: true}),
+ {:ok, _} <- File.rm_rf(tempdir) do
+ {:ok, uploaded_backup}
end
end
- defp actor(dir, user, caller_pid) do
+ defp actor(dir, user) do
with {:ok, json} <-
UserView.render("user.json", %{user: user})
|> Map.merge(%{"likes" => "likes.json", "bookmarks" => "bookmarks.json"})
|> Jason.encode() do
- send(caller_pid, {:progress, 1})
File.write(Path.join(dir, "actor.json"), json)
end
end
@@ -269,22 +270,10 @@ defmodule Pleroma.User.Backup do
)
end
- defp should_report?(num, chunk_size), do: rem(num, chunk_size) == 0
-
- defp backup_tempdir(backup) do
- name = String.trim_trailing(backup.file_name, ".zip")
- dir(name)
- end
-
- defp cleanup(backup) do
- dir = backup_tempdir(backup)
- File.rm_rf(dir)
- end
-
- defp write(query, dir, name, fun, caller_pid) do
+ defp write(query, dir, name, fun) do
path = Path.join(dir, "#{name}.json")
- chunk_size = Pleroma.Config.get([__MODULE__, :process_chunk_size])
+ chunk_size = Config.get([__MODULE__, :process_chunk_size])
with {:ok, file} <- File.open(path, [:write, :utf8]),
:ok <- write_header(file, name) do
@@ -300,10 +289,6 @@ defmodule Pleroma.User.Backup do
end),
{:ok, str} <- Jason.encode(data),
:ok <- IO.write(file, str <> ",\n") do
- if should_report?(acc + 1, chunk_size) do
- send(caller_pid, {:progress, chunk_size})
- end
-
acc + 1
else
{:error, e} ->
@@ -318,31 +303,29 @@ defmodule Pleroma.User.Backup do
end
end)
- send(caller_pid, {:progress, rem(total, chunk_size)})
-
with :ok <- :file.pwrite(file, {:eof, -2}, "\n],\n \"totalItems\": #{total}}") do
File.close(file)
end
end
end
- defp bookmarks(dir, %{id: user_id} = _user, caller_pid) do
+ defp bookmarks(dir, %{id: user_id} = _user) do
Bookmark
|> where(user_id: ^user_id)
|> join(:inner, [b], activity in assoc(b, :activity))
|> select([b, a], %{id: b.id, object: fragment("(?)->>'object'", a.data)})
- |> write(dir, "bookmarks", fn a -> {:ok, a.object} end, caller_pid)
+ |> write(dir, "bookmarks", fn a -> {:ok, a.object} end)
end
- defp likes(dir, user, caller_pid) do
+ defp likes(dir, user) do
user.ap_id
|> Activity.Queries.by_actor()
|> Activity.Queries.by_type("Like")
|> select([like], %{id: like.id, object: fragment("(?)->>'object'", like.data)})
- |> write(dir, "likes", fn a -> {:ok, a.object} end, caller_pid)
+ |> write(dir, "likes", fn a -> {:ok, a.object} end)
end
- defp statuses(dir, user, caller_pid) do
+ defp statuses(dir, user) do
opts =
%{}
|> Map.put(:type, ["Create", "Announce"])
@@ -362,52 +345,17 @@ defmodule Pleroma.User.Backup do
with {:ok, activity} <- Transmogrifier.prepare_outgoing(a.data) do
{:ok, Map.delete(activity, "@context")}
end
- end,
- caller_pid
+ end
)
end
- defp followers(dir, user, caller_pid) do
+ defp followers(dir, user) do
User.get_followers_query(user)
- |> write(dir, "followers", fn a -> {:ok, a.ap_id} end, caller_pid)
+ |> write(dir, "followers", fn a -> {:ok, a.ap_id} end)
end
- defp following(dir, user, caller_pid) do
+ defp following(dir, user) do
User.get_friends_query(user)
- |> write(dir, "following", fn a -> {:ok, a.ap_id} end, caller_pid)
- end
-end
-
-defmodule Pleroma.User.Backup.ProcessorAPI do
- @callback do_process(%Pleroma.User.Backup{}, pid()) ::
- {:ok, %Pleroma.User.Backup{}} | {:error, any()}
-end
-
-defmodule Pleroma.User.Backup.Processor do
- @behaviour Pleroma.User.Backup.ProcessorAPI
-
- alias Pleroma.Repo
- alias Pleroma.User.Backup
-
- import Ecto.Changeset
-
- @impl true
- def do_process(backup, current_pid) do
- with {:ok, zip_file} <- Backup.export(backup, current_pid),
- {:ok, %{size: size}} <- File.stat(zip_file),
- {:ok, _upload} <- Backup.upload(backup, zip_file) do
- backup
- |> cast(
- %{
- file_size: size,
- processed: true,
- state: :complete
- },
- [:file_size, :processed, :state]
- )
- |> Repo.update()
- else
- e -> {:error, e}
- end
+ |> write(dir, "following", fn a -> {:ok, a.ap_id} end)
end
end
diff --git a/lib/pleroma/web/admin_api/controllers/admin_api_controller.ex b/lib/pleroma/web/admin_api/controllers/admin_api_controller.ex
index 1894000ff..0f22dd538 100644
--- a/lib/pleroma/web/admin_api/controllers/admin_api_controller.ex
+++ b/lib/pleroma/web/admin_api/controllers/admin_api_controller.ex
@@ -13,6 +13,7 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do
alias Pleroma.ModerationLog
alias Pleroma.Stats
alias Pleroma.User
+ alias Pleroma.User.Backup
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.AdminAPI
alias Pleroma.Web.AdminAPI.AccountView
@@ -429,7 +430,9 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do
def create_backup(%{assigns: %{user: admin}} = conn, %{"nickname" => nickname}) do
with %User{} = user <- User.get_by_nickname(nickname),
- {:ok, _} <- Pleroma.User.Backup.create(user, admin.id) do
+ %Backup{} = backup <- Backup.new(user),
+ {:ok, inserted_backup} <- Pleroma.Repo.insert(backup),
+ {:ok, %Oban.Job{}} <- Backup.schedule_backup(inserted_backup) do
ModerationLog.insert_log(%{actor: admin, subject: user, action: "create_backup"})
json(conn, "")
diff --git a/lib/pleroma/web/api_spec/operations/pleroma_backup_operation.ex b/lib/pleroma/web/api_spec/operations/pleroma_backup_operation.ex
index 400f3825d..86f709515 100644
--- a/lib/pleroma/web/api_spec/operations/pleroma_backup_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/pleroma_backup_operation.ex
@@ -65,12 +65,7 @@ defmodule Pleroma.Web.ApiSpec.PleromaBackupOperation do
file_name: %Schema{type: :string},
file_size: %Schema{type: :integer},
processed: %Schema{type: :boolean, description: "whether this backup has succeeded"},
- state: %Schema{
- type: :string,
- description: "the state of the backup",
- enum: ["pending", "running", "complete", "failed"]
- },
- processed_number: %Schema{type: :integer, description: "the number of records processed"}
+ tempdir: %Schema{type: :string}
},
example: %{
"content_type" => "application/zip",
@@ -79,8 +74,7 @@ defmodule Pleroma.Web.ApiSpec.PleromaBackupOperation do
"file_size" => 4105,
"inserted_at" => "2020-09-08T16:42:07.000Z",
"processed" => true,
- "state" => "complete",
- "processed_number" => 20
+ "tempdir" => "/tmp/PZIMw40vmpM"
}
}
end
diff --git a/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex b/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex
index b9daed22b..0115ec645 100644
--- a/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex
+++ b/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex
@@ -20,7 +20,7 @@ defmodule Pleroma.Web.PleromaAPI.BackupController do
end
def create(%{assigns: %{user: user}} = conn, _params) do
- with {:ok, _} <- Backup.create(user) do
+ with {:ok, _} <- Backup.user(user) do
backups = Backup.list(user)
render(conn, "index.json", backups: backups)
end
diff --git a/lib/pleroma/web/pleroma_api/views/backup_view.ex b/lib/pleroma/web/pleroma_api/views/backup_view.ex
index 20403aeee..d778590f0 100644
--- a/lib/pleroma/web/pleroma_api/views/backup_view.ex
+++ b/lib/pleroma/web/pleroma_api/views/backup_view.ex
@@ -9,22 +9,12 @@ defmodule Pleroma.Web.PleromaAPI.BackupView do
alias Pleroma.Web.CommonAPI.Utils
def render("show.json", %{backup: %Backup{} = backup}) do
- # To deal with records before the migration
- state =
- if backup.state == :invalid do
- if backup.processed, do: :complete, else: :failed
- else
- backup.state
- end
-
%{
id: backup.id,
content_type: backup.content_type,
url: download_url(backup),
file_size: backup.file_size,
processed: backup.processed,
- state: to_string(state),
- processed_number: backup.processed_number,
inserted_at: Utils.to_masto_date(backup.inserted_at)
}
end
diff --git a/lib/pleroma/workers/backup_worker.ex b/lib/pleroma/workers/backup_worker.ex
index 54ac31a3c..d1b6fcdad 100644
--- a/lib/pleroma/workers/backup_worker.ex
+++ b/lib/pleroma/workers/backup_worker.ex
@@ -6,64 +6,46 @@ defmodule Pleroma.Workers.BackupWorker do
use Oban.Worker, queue: :slow, max_attempts: 1
alias Oban.Job
+ alias Pleroma.Config.Getting, as: Config
alias Pleroma.User.Backup
- def process(backup, admin_user_id \\ nil) do
- %{"op" => "process", "backup_id" => backup.id, "admin_user_id" => admin_user_id}
- |> new()
- |> Oban.insert()
- end
-
- def schedule_deletion(backup) do
- days = Pleroma.Config.get([Backup, :purge_after_days])
- time = 60 * 60 * 24 * days
- scheduled_at = Calendar.NaiveDateTime.add!(backup.inserted_at, time)
-
- %{"op" => "delete", "backup_id" => backup.id}
- |> new(scheduled_at: scheduled_at)
- |> Oban.insert()
- end
-
- def delete(backup) do
- %{"op" => "delete", "backup_id" => backup.id}
- |> new()
- |> Oban.insert()
- end
-
@impl Oban.Worker
def perform(%Job{
- args: %{"op" => "process", "backup_id" => backup_id, "admin_user_id" => admin_user_id}
+ args: %{"op" => "process", "backup_id" => backup_id}
}) do
- with {:ok, %Backup{} = backup} <-
- backup_id |> Backup.get() |> Backup.process(),
- {:ok, _job} <- schedule_deletion(backup),
- :ok <- Backup.remove_outdated(backup),
- :ok <- maybe_deliver_email(backup, admin_user_id) do
- {:ok, backup}
+ with {_, %Backup{} = backup} <- {:get, Backup.get_by_id(backup_id)},
+ {_, {:ok, updated_backup}} <- {:run, Backup.run(backup)},
+ {_, {:ok, uploaded_backup}} <- {:upload, Backup.upload(updated_backup)},
+ {_, {:ok, _job}} <- {:delete, Backup.schedule_delete(uploaded_backup)},
+ {_, :ok} <- {:outdated, Backup.remove_outdated(uploaded_backup.user)},
+ {_, :ok} <- {:email, maybe_deliver_email(uploaded_backup)} do
+ {:ok, uploaded_backup}
+ else
+ e -> {:error, e}
end
end
def perform(%Job{args: %{"op" => "delete", "backup_id" => backup_id}}) do
- case Backup.get(backup_id) do
- %Backup{} = backup -> Backup.delete(backup)
+ case Backup.get_by_id(backup_id) do
+ %Backup{} = backup -> Backup.delete_archive(backup)
nil -> :ok
end
end
@impl Oban.Worker
- def timeout(_job), do: :infinity
+ def timeout(_job), do: Config.get([Backup, :timeout], :timer.minutes(30))
defp has_email?(user) do
not is_nil(user.email) and user.email != ""
end
- defp maybe_deliver_email(backup, admin_user_id) do
+ defp maybe_deliver_email(backup) do
has_mailer = Pleroma.Config.get([Pleroma.Emails.Mailer, :enabled])
backup = backup |> Pleroma.Repo.preload(:user)
if has_email?(backup.user) and has_mailer do
backup
- |> Pleroma.Emails.UserEmail.backup_is_ready_email(admin_user_id)
+ |> Pleroma.Emails.UserEmail.backup_is_ready_email()
|> Pleroma.Emails.Mailer.deliver()
:ok
diff --git a/priv/repo/migrations/20240622175346_backup_refactor.exs b/priv/repo/migrations/20240622175346_backup_refactor.exs
new file mode 100644
index 000000000..5dfc55789
--- /dev/null
+++ b/priv/repo/migrations/20240622175346_backup_refactor.exs
@@ -0,0 +1,19 @@
+defmodule Pleroma.Repo.Migrations.BackupRefactor do
+ use Ecto.Migration
+
+ def up do
+ alter table("backups") do
+ remove(:state)
+ remove(:processed_number)
+ add(:tempdir, :string)
+ end
+ end
+
+ def down do
+ alter table("backups") do
+ add(:state, :integer, default: 5)
+ add(:processed_number, :integer, default: 0)
+ remove(:tempdir)
+ end
+ end
+end
diff --git a/test/pleroma/user/backup_async_test.exs b/test/pleroma/user/backup_async_test.exs
deleted file mode 100644
index b0e9413be..000000000
--- a/test/pleroma/user/backup_async_test.exs
+++ /dev/null
@@ -1,49 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.User.BackupAsyncTest do
- use Pleroma.DataCase, async: true
-
- import Pleroma.Factory
- import Mox
-
- alias Pleroma.UnstubbedConfigMock, as: ConfigMock
- alias Pleroma.User.Backup
- alias Pleroma.User.Backup.ProcessorMock
-
- setup do
- user = insert(:user, %{nickname: "cofe", name: "Cofe", ap_id: "http://cofe.io/users/cofe"})
-
- {:ok, backup} = user |> Backup.new() |> Repo.insert()
- %{backup: backup}
- end
-
- test "it handles unrecoverable exceptions", %{backup: backup} do
- ProcessorMock
- |> expect(:do_process, fn _, _ ->
- raise "mock exception"
- end)
-
- ConfigMock
- |> stub_with(Pleroma.Config)
-
- {:error, %{backup: backup, reason: :exit}} = Backup.process(backup, ProcessorMock)
-
- assert backup.state == :failed
- end
-
- test "it handles timeouts", %{backup: backup} do
- ProcessorMock
- |> expect(:do_process, fn _, _ ->
- Process.sleep(:timer.seconds(4))
- end)
-
- ConfigMock
- |> expect(:get, fn [Pleroma.User.Backup, :process_wait_time] -> :timer.seconds(2) end)
-
- {:error, %{backup: backup, reason: :timeout}} = Backup.process(backup, ProcessorMock)
-
- assert backup.state == :failed
- end
-end
diff --git a/test/pleroma/user/backup_test.exs b/test/pleroma/user/backup_test.exs
index d82d1719b..24fe09f7e 100644
--- a/test/pleroma/user/backup_test.exs
+++ b/test/pleroma/user/backup_test.exs
@@ -6,7 +6,6 @@ defmodule Pleroma.User.BackupTest do
use Oban.Testing, repo: Pleroma.Repo
use Pleroma.DataCase
- import Mock
import Pleroma.Factory
import Swoosh.TestAssertions
import Mox
@@ -16,7 +15,6 @@ defmodule Pleroma.User.BackupTest do
alias Pleroma.UnstubbedConfigMock, as: ConfigMock
alias Pleroma.Uploaders.S3.ExAwsMock
alias Pleroma.User.Backup
- alias Pleroma.User.Backup.ProcessorMock
alias Pleroma.Web.CommonAPI
alias Pleroma.Workers.BackupWorker
@@ -28,79 +26,56 @@ defmodule Pleroma.User.BackupTest do
ConfigMock
|> stub_with(Pleroma.Config)
- ProcessorMock
- |> stub_with(Pleroma.User.Backup.Processor)
-
:ok
end
test "it does not requrie enabled email" do
clear_config([Pleroma.Emails.Mailer, :enabled], false)
user = insert(:user)
- assert {:ok, _} = Backup.create(user)
+ assert {:ok, _} = Backup.user(user)
end
test "it does not require user's email" do
user = insert(:user, %{email: nil})
- assert {:ok, _} = Backup.create(user)
+ assert {:ok, _} = Backup.user(user)
end
test "it creates a backup record and an Oban job" do
- %{id: user_id} = user = insert(:user)
- assert {:ok, %Oban.Job{args: args}} = Backup.create(user)
+ user = insert(:user)
+ assert {:ok, %Backup{} = backup} = Backup.user(user)
+ assert {:ok, %Oban.Job{args: args}} = Backup.schedule_backup(backup)
assert_enqueued(worker: BackupWorker, args: args)
- backup = Backup.get(args["backup_id"])
- assert %Backup{user_id: ^user_id, processed: false, file_size: 0, state: :pending} = backup
+ backup = Backup.get_by_id(args["backup_id"])
+ assert %Backup{processed: false, file_size: 0} = backup
end
test "it return an error if the export limit is over" do
- %{id: user_id} = user = insert(:user)
+ user = insert(:user)
limit_days = Pleroma.Config.get([Backup, :limit_days])
- assert {:ok, %Oban.Job{args: args}} = Backup.create(user)
- backup = Backup.get(args["backup_id"])
- assert %Backup{user_id: ^user_id, processed: false, file_size: 0} = backup
+ {:ok, first_backup} = Backup.user(user)
+ {:ok, _run_backup} = Backup.run(first_backup)
- assert Backup.create(user) == {:error, "Last export was less than #{limit_days} days ago"}
+ assert Backup.user(user) == {:error, "Last export was less than #{limit_days} days ago"}
end
test "it process a backup record" do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
%{id: user_id} = user = insert(:user)
- assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id} = args}} = Backup.create(user)
- assert {:ok, backup} = perform_job(BackupWorker, args)
- assert backup.file_size > 0
- assert %Backup{id: ^backup_id, processed: true, user_id: ^user_id, state: :complete} = backup
-
- delete_job_args = %{"op" => "delete", "backup_id" => backup_id}
-
- assert_enqueued(worker: BackupWorker, args: delete_job_args)
- assert {:ok, backup} = perform_job(BackupWorker, delete_job_args)
- refute Backup.get(backup_id)
-
- email = Pleroma.Emails.UserEmail.backup_is_ready_email(backup)
+ assert {:ok, %Backup{id: backup_id}} = Backup.user(user)
- assert_email_sent(
- to: {user.name, user.email},
- html_body: email.html_body
- )
- end
+ oban_args = %{"op" => "process", "backup_id" => backup_id}
- test "it updates states of the backup" do
- clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
- %{id: user_id} = user = insert(:user)
-
- assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id} = args}} = Backup.create(user)
- assert {:ok, backup} = perform_job(BackupWorker, args)
+ assert {:ok, backup} = perform_job(BackupWorker, oban_args)
assert backup.file_size > 0
- assert %Backup{id: ^backup_id, processed: true, user_id: ^user_id, state: :complete} = backup
+ assert match?(%Backup{id: ^backup_id, processed: true, user_id: ^user_id}, backup)
delete_job_args = %{"op" => "delete", "backup_id" => backup_id}
assert_enqueued(worker: BackupWorker, args: delete_job_args)
assert {:ok, backup} = perform_job(BackupWorker, delete_job_args)
- refute Backup.get(backup_id)
+ refute Backup.get_by_id(backup_id)
email = Pleroma.Emails.UserEmail.backup_is_ready_email(backup)
@@ -114,10 +89,15 @@ defmodule Pleroma.User.BackupTest do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
%{id: user_id} = user = insert(:user, %{email: nil})
- assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id} = args}} = Backup.create(user)
- assert {:ok, backup} = perform_job(BackupWorker, args)
- assert backup.file_size > 0
- assert %Backup{id: ^backup_id, processed: true, user_id: ^user_id} = backup
+ assert {:ok, %Backup{} = backup} = Backup.user(user)
+
+ expected_args = %{"op" => "process", "backup_id" => backup.id}
+
+ assert_enqueued(worker: BackupWorker, args: %{"backup_id" => backup.id})
+ assert {:ok, completed_backup} = perform_job(BackupWorker, expected_args)
+ assert completed_backup.file_size > 0
+ assert completed_backup.processed
+ assert completed_backup.user_id == user_id
assert_no_email_sent()
end
@@ -127,10 +107,13 @@ defmodule Pleroma.User.BackupTest do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
%{id: user_id} = user = insert(:user)
- assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id} = args}} = Backup.create(user)
- assert {:ok, backup} = perform_job(BackupWorker, args)
+ assert {:ok, %Backup{id: backup_id}} = Backup.user(user)
+
+ oban_args = %{"op" => "process", "backup_id" => backup_id}
+
+ assert {:ok, backup} = perform_job(BackupWorker, oban_args)
assert backup.file_size > 0
- assert %Backup{id: ^backup_id, processed: true, user_id: ^user_id} = backup
+ assert match?(%Backup{id: ^backup_id, processed: true, user_id: ^user_id}, backup)
assert_no_email_sent()
end
@@ -139,10 +122,15 @@ defmodule Pleroma.User.BackupTest do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
%{id: user_id} = user = insert(:user, %{email: ""})
- assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id} = args}} = Backup.create(user)
- assert {:ok, backup} = perform_job(BackupWorker, args)
+ assert {:ok, %Backup{id: backup_id} = backup} = Backup.user(user)
+
+ expected_args = %{"op" => "process", "backup_id" => backup.id}
+
+ assert_enqueued(worker: BackupWorker, args: expected_args)
+
+ assert {:ok, backup} = perform_job(BackupWorker, expected_args)
assert backup.file_size > 0
- assert %Backup{id: ^backup_id, processed: true, user_id: ^user_id} = backup
+ assert match?(%Backup{id: ^backup_id, processed: true, user_id: ^user_id}, backup)
assert_no_email_sent()
end
@@ -152,16 +140,13 @@ defmodule Pleroma.User.BackupTest do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
user = insert(:user)
- assert {:ok, job1} = Backup.create(user)
-
- assert {:ok, %Backup{}} = ObanHelpers.perform(job1)
- assert {:ok, job2} = Backup.create(user)
- assert Pleroma.Repo.aggregate(Backup, :count) == 2
- assert {:ok, backup2} = ObanHelpers.perform(job2)
+ assert {:ok, %{id: backup_one_id}} = Backup.user(user)
+ assert {:ok, %{id: _backup_two_id}} = Backup.user(user)
+ # Run the backups
ObanHelpers.perform_all()
- assert [^backup2] = Pleroma.Repo.all(Backup)
+ assert_enqueued(worker: BackupWorker, args: %{"op" => "delete", "backup_id" => backup_one_id})
end
test "it creates a zip archive with user data" do
@@ -185,9 +170,12 @@ defmodule Pleroma.User.BackupTest do
CommonAPI.follow(other_user, user)
- assert {:ok, backup} = user |> Backup.new() |> Repo.insert()
- assert {:ok, path} = Backup.export(backup, self())
- assert {:ok, zipfile} = :zip.zip_open(String.to_charlist(path), [:memory])
+ assert {:ok, backup} = Backup.user(user)
+ assert {:ok, run_backup} = Backup.run(backup)
+
+ tempfile = Path.join([run_backup.tempdir, run_backup.file_name])
+
+ assert {:ok, zipfile} = :zip.zip_open(String.to_charlist(tempfile), [:memory])
assert {:ok, {~c"actor.json", json}} = :zip.zip_get(~c"actor.json", zipfile)
assert %{
@@ -275,10 +263,10 @@ defmodule Pleroma.User.BackupTest do
} = Jason.decode!(json)
:zip.zip_close(zipfile)
- File.rm!(path)
+ File.rm_rf!(run_backup.tempdir)
end
- test "it counts the correct number processed" do
+ test "correct number processed" do
user = insert(:user, %{nickname: "cofe", name: "Cofe", ap_id: "http://cofe.io/users/cofe"})
Enum.map(1..120, fn i ->
@@ -288,43 +276,21 @@ defmodule Pleroma.User.BackupTest do
end)
assert {:ok, backup} = user |> Backup.new() |> Repo.insert()
- {:ok, backup} = Backup.process(backup)
+ {:ok, backup} = Backup.run(backup)
- assert backup.processed_number == 1 + 120 + 120 + 120
+ zip_path = Path.join([backup.tempdir, backup.file_name])
- Backup.delete(backup)
- end
+ assert {:ok, zipfile} = :zip.zip_open(String.to_charlist(zip_path), [:memory])
- test "it handles errors" do
- user = insert(:user, %{nickname: "cofe", name: "Cofe", ap_id: "http://cofe.io/users/cofe"})
+ backup_parts = [~c"likes.json", ~c"bookmarks.json", ~c"outbox.json"]
- Enum.map(1..120, fn i ->
- {:ok, _status} = CommonAPI.post(user, %{status: "status #{i}"})
+ Enum.each(backup_parts, fn part ->
+ assert {:ok, {_part, part_json}} = :zip.zip_get(part, zipfile)
+ {:ok, decoded_part} = Jason.decode(part_json)
+ assert decoded_part["totalItems"] == 120
end)
- assert {:ok, backup} = user |> Backup.new() |> Repo.insert()
-
- with_mock Pleroma.Web.ActivityPub.Transmogrifier,
- [:passthrough],
- prepare_outgoing: fn data ->
- object =
- data["object"]
- |> Pleroma.Object.normalize(fetch: false)
- |> Map.get(:data)
-
- data = data |> Map.put("object", object)
-
- if String.contains?(data["object"]["content"], "119"),
- do: raise(%Postgrex.Error{}),
- else: {:ok, data}
- end do
- {:ok, backup} = Backup.process(backup)
- assert backup.processed
- assert backup.state == :complete
- assert backup.processed_number == 1 + 119
-
- Backup.delete(backup)
- end
+ Backup.delete_archive(backup)
end
describe "it uploads and deletes a backup archive" do
@@ -343,12 +309,11 @@ defmodule Pleroma.User.BackupTest do
Bookmark.create(user.id, status3.id)
assert {:ok, backup} = user |> Backup.new() |> Repo.insert()
- assert {:ok, path} = Backup.export(backup, self())
- [path: path, backup: backup]
+ [backup: backup]
end
- test "S3", %{path: path, backup: backup} do
+ test "S3", %{backup: backup} do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.S3)
clear_config([Pleroma.Uploaders.S3, :streaming_enabled], false)
@@ -358,15 +323,17 @@ defmodule Pleroma.User.BackupTest do
%{http_method: :delete} -> {:ok, %{status_code: 204}}
end)
- assert {:ok, %Pleroma.Upload{}} = Backup.upload(backup, path)
- assert {:ok, _backup} = Backup.delete(backup)
+ assert {:ok, backup} = Backup.run(backup)
+ assert {:ok, %Backup{processed: true}} = Backup.upload(backup)
+ assert {:ok, _backup} = Backup.delete_archive(backup)
end
- test "Local", %{path: path, backup: backup} do
+ test "Local", %{backup: backup} do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
- assert {:ok, %Pleroma.Upload{}} = Backup.upload(backup, path)
- assert {:ok, _backup} = Backup.delete(backup)
+ assert {:ok, backup} = Backup.run(backup)
+ assert {:ok, %Backup{processed: true}} = Backup.upload(backup)
+ assert {:ok, _backup} = Backup.delete_archive(backup)
end
end
end
diff --git a/test/pleroma/web/admin_api/controllers/admin_api_controller_test.exs b/test/pleroma/web/admin_api/controllers/admin_api_controller_test.exs
index a7ee8359d..6614d1409 100644
--- a/test/pleroma/web/admin_api/controllers/admin_api_controller_test.exs
+++ b/test/pleroma/web/admin_api/controllers/admin_api_controller_test.exs
@@ -1096,9 +1096,13 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIControllerTest do
ObanHelpers.perform_all()
- email = Pleroma.Emails.UserEmail.backup_is_ready_email(backup, admin.id)
+ email = Pleroma.Emails.UserEmail.backup_is_ready_email(backup)
+
+ assert String.contains?(
+ email.html_body,
+ "A full backup of your Pleroma account was requested"
+ )
- assert String.contains?(email.html_body, "Admin @#{admin.nickname} requested a full backup")
assert_email_sent(to: {user.name, user.email}, html_body: email.html_body)
log_message = "@#{admin_nickname} requested account backup for @#{user_nickname}"
diff --git a/test/pleroma/web/pleroma_api/controllers/backup_controller_test.exs b/test/pleroma/web/pleroma_api/controllers/backup_controller_test.exs
index 21e619fa4..1e056adb5 100644
--- a/test/pleroma/web/pleroma_api/controllers/backup_controller_test.exs
+++ b/test/pleroma/web/pleroma_api/controllers/backup_controller_test.exs
@@ -20,9 +20,7 @@ defmodule Pleroma.Web.PleromaAPI.BackupControllerTest do
end
test "GET /api/v1/pleroma/backups", %{user: user, conn: conn} do
- assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id}}} = Backup.create(user)
-
- backup = Backup.get(backup_id)
+ assert {:ok, %Backup{} = backup} = Backup.user(user)
response =
conn
diff --git a/test/pleroma/web/pleroma_api/views/backup_view_test.exs b/test/pleroma/web/pleroma_api/views/backup_view_test.exs
index b125b8872..303547f3b 100644
--- a/test/pleroma/web/pleroma_api/views/backup_view_test.exs
+++ b/test/pleroma/web/pleroma_api/views/backup_view_test.exs
@@ -27,42 +27,11 @@ defmodule Pleroma.Web.PleromaAPI.BackupViewTest do
assert result.id == backup.id
end
- test "it renders the state and processed_number" do
+ test "it renders the processed state" do
user = insert(:user)
backup = Backup.new(user)
result = BackupView.render("show.json", backup: backup)
- assert result.state == to_string(backup.state)
- assert result.processed_number == backup.processed_number
- end
-
- test "it renders failed state with legacy records" do
- backup = %Backup{
- id: 0,
- content_type: "application/zip",
- file_name: "dummy",
- file_size: 1,
- state: :invalid,
- processed: true,
- processed_number: 1,
- inserted_at: NaiveDateTime.utc_now()
- }
-
- result = BackupView.render("show.json", backup: backup)
- assert result.state == "complete"
-
- backup = %Backup{
- id: 0,
- content_type: "application/zip",
- file_name: "dummy",
- file_size: 1,
- state: :invalid,
- processed: false,
- processed_number: 1,
- inserted_at: NaiveDateTime.utc_now()
- }
-
- result = BackupView.render("show.json", backup: backup)
- assert result.state == "failed"
+ refute result.processed
end
end
diff --git a/test/support/mocks.ex b/test/support/mocks.ex
index 63cbc49ab..d84958e15 100644
--- a/test/support/mocks.ex
+++ b/test/support/mocks.ex
@@ -32,6 +32,4 @@ Mox.defmock(Pleroma.StubbedHTTPSignaturesMock, for: Pleroma.HTTPSignaturesAPI)
Mox.defmock(Pleroma.LoggerMock, for: Pleroma.Logging)
-Mox.defmock(Pleroma.User.Backup.ProcessorMock, for: Pleroma.User.Backup.ProcessorAPI)
-
Mox.defmock(Pleroma.Uploaders.S3.ExAwsMock, for: Pleroma.Uploaders.S3.ExAwsAPI)