From 8f285a787f7af0436de18ac140712801a69eff05 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Fri, 21 Jun 2024 13:24:47 -0400 Subject: [PATCH 1/6] Refactor backups to be fully controlled by Oban --- changelog.d/backups-refactor.change | 1 + config/test.exs | 2 + lib/pleroma/ecto_enums.ex | 8 - lib/pleroma/emails/user_email.ex | 35 +- lib/pleroma/user/backup.ex | 366 ++++++++---------- .../controllers/admin_api_controller.ex | 5 +- .../operations/pleroma_backup_operation.ex | 10 +- .../controllers/backup_controller.ex | 2 +- .../web/pleroma_api/views/backup_view.ex | 10 - lib/pleroma/workers/backup_worker.ex | 47 +-- .../20240622175346_backup_refactor.exs | 19 + 11 files changed, 209 insertions(+), 296 deletions(-) create mode 100644 changelog.d/backups-refactor.change create mode 100644 priv/repo/migrations/20240622175346_backup_refactor.exs diff --git a/changelog.d/backups-refactor.change b/changelog.d/backups-refactor.change new file mode 100644 index 000000000..47fc74138 --- /dev/null +++ b/changelog.d/backups-refactor.change @@ -0,0 +1 @@ +Refactor the user backups code and improve test coverage diff --git a/config/test.exs b/config/test.exs index 410b9bbab..2a007a22d 100644 --- a/config/test.exs +++ b/config/test.exs @@ -187,6 +187,8 @@ config :pleroma, Pleroma.Web.RichMedia.Backfill, stream_out: Pleroma.Web.ActivityPub.ActivityPubMock +config :pleroma, Pleroma.User.Backup, tempdir: "test/tmp" + if File.exists?("./config/test.secret.exs") do import_config "test.secret.exs" else diff --git a/lib/pleroma/ecto_enums.ex b/lib/pleroma/ecto_enums.ex index b346b39d6..a4890b489 100644 --- a/lib/pleroma/ecto_enums.ex +++ b/lib/pleroma/ecto_enums.ex @@ -27,11 +27,3 @@ failed: 4, manual: 5 ) - -defenum(Pleroma.User.Backup.State, - pending: 1, - running: 2, - complete: 3, - failed: 4, - invalid: 5 -) diff --git a/lib/pleroma/emails/user_email.ex b/lib/pleroma/emails/user_email.ex index 95b963764..10d89d2f3 100644 --- a/lib/pleroma/emails/user_email.ex +++ b/lib/pleroma/emails/user_email.ex @@ -345,37 +345,22 @@ def unsubscribe_url(user, notifications_type) do Router.Helpers.subscription_url(Endpoint, :unsubscribe, token) end - def backup_is_ready_email(backup, admin_user_id \\ nil) do + def backup_is_ready_email(backup) do %{user: user} = Pleroma.Repo.preload(backup, :user) Gettext.with_locale_or_default user.language do download_url = Pleroma.Web.PleromaAPI.BackupView.download_url(backup) html_body = - if is_nil(admin_user_id) do - Gettext.dpgettext( - "static_pages", - "account archive email body - self-requested", - """ -

You requested a full backup of your Pleroma account. It's ready for download:

-

%{download_url}

- """, - download_url: download_url - ) - else - admin = Pleroma.Repo.get(User, admin_user_id) - - Gettext.dpgettext( - "static_pages", - "account archive email body - admin requested", - """ -

Admin @%{admin_nickname} requested a full backup of your Pleroma account. It's ready for download:

-

%{download_url}

- """, - admin_nickname: admin.nickname, - download_url: download_url - ) - end + Gettext.dpgettext( + "static_pages", + "account archive email body", + """ +

A full backup of your Pleroma account was requested. It's ready for download:

+

%{download_url}

+ """, + download_url: download_url + ) new() |> to(recipient(user)) diff --git a/lib/pleroma/user/backup.ex b/lib/pleroma/user/backup.ex index 1821de667..2568089a4 100644 --- a/lib/pleroma/user/backup.ex +++ b/lib/pleroma/user/backup.ex @@ -14,9 +14,10 @@ defmodule Pleroma.User.Backup do alias Pleroma.Activity alias Pleroma.Bookmark + alias Pleroma.Config alias Pleroma.Repo + alias Pleroma.Uploaders.Uploader alias Pleroma.User - alias Pleroma.User.Backup.State alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.Transmogrifier alias Pleroma.Web.ActivityPub.UserView @@ -29,71 +30,111 @@ defmodule Pleroma.User.Backup do field(:file_name, :string) field(:file_size, :integer, default: 0) field(:processed, :boolean, default: false) - field(:state, State, default: :invalid) - field(:processed_number, :integer, default: 0) + field(:tempdir, :string) belongs_to(:user, User, type: FlakeId.Ecto.CompatType) timestamps() end - @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config) + @doc """ + Schedules a job to backup a user if the number of backup requests has not exceeded the limit. - def create(user, admin_id \\ nil) do - with :ok <- validate_limit(user, admin_id), - {:ok, backup} <- user |> new() |> Repo.insert() do - BackupWorker.process(backup, admin_id) + Admins can directly call new/1 and schedule_backup/1 to bypass the limit. + """ + @spec user(User.t()) :: {:ok, t()} | {:error, any()} + def user(user) do + days = Config.get([__MODULE__, :limit_days]) + + with true <- permitted?(user), + %__MODULE__{} = backup <- new(user), + {:ok, inserted_backup} <- Repo.insert(backup), + {:ok, %Oban.Job{}} <- schedule_backup(inserted_backup) do + {:ok, inserted_backup} + else + false -> + {:error, + dngettext( + "errors", + "Last export was less than a day ago", + "Last export was less than %{days} days ago", + days, + days: days + )} + + e -> + {:error, e} end end + @doc "Generates a %Backup{} for a user with a random file name" + @spec new(User.t()) :: t() def new(user) do rand_str = :crypto.strong_rand_bytes(32) |> Base.url_encode64(padding: false) datetime = Calendar.NaiveDateTime.Format.iso8601_basic(NaiveDateTime.utc_now()) name = "archive-#{user.nickname}-#{datetime}-#{rand_str}.zip" %__MODULE__{ - user_id: user.id, content_type: "application/zip", file_name: name, - state: :pending + tempdir: tempdir(), + user: user } end - def delete(backup) do - uploader = Pleroma.Config.get([Pleroma.Upload, :uploader]) + @doc "Schedules the execution of the provided backup" + @spec schedule_backup(t()) :: {:ok, Oban.Job.t()} | {:error, any()} + def schedule_backup(backup) do + with false <- is_nil(backup.id) do + %{"op" => "process", "backup_id" => backup.id} + |> BackupWorker.new() + |> Oban.insert() + else + true -> + {:error, "Backup is missing id. Please insert it into the Repo first."} + + e -> + {:error, e} + end + end + + @doc "Deletes the backup archive file and removes the database record" + @spec delete_archive(t()) :: {:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()} + def delete_archive(backup) do + uploader = Config.get([Pleroma.Upload, :uploader]) with :ok <- uploader.delete_file(Path.join("backups", backup.file_name)) do Repo.delete(backup) end end - defp validate_limit(_user, admin_id) when is_binary(admin_id), do: :ok + @doc "Schedules a job to delete the backup archive" + @spec schedule_delete(t()) :: {:ok, Oban.Job.t()} | {:error, any()} + def schedule_delete(backup) do + days = Config.get([__MODULE__, :purge_after_days]) + time = 60 * 60 * 24 * days + scheduled_at = Calendar.NaiveDateTime.add!(backup.inserted_at, time) - defp validate_limit(user, nil) do - case get_last(user.id) do - %__MODULE__{inserted_at: inserted_at} -> - days = Pleroma.Config.get([__MODULE__, :limit_days]) - diff = Timex.diff(NaiveDateTime.utc_now(), inserted_at, :days) + %{"op" => "delete", "backup_id" => backup.id} + |> BackupWorker.new(scheduled_at: scheduled_at) + |> Oban.insert() + end - if diff > days do - :ok - else - {:error, - dngettext( - "errors", - "Last export was less than a day ago", - "Last export was less than %{days} days ago", - days, - days: days - )} - end - - nil -> - :ok + defp permitted?(user) do + with {_, %__MODULE__{inserted_at: inserted_at}} <- {:last, get_last(user)}, + days = Config.get([__MODULE__, :limit_days]), + diff = Timex.diff(NaiveDateTime.utc_now(), inserted_at, :days), + {_, true} <- {:diff, diff > days} do + true + else + {:last, nil} -> true + {:diff, false} -> false end end - def get_last(user_id) do + @doc "Returns last backup for the provided user" + @spec get_last(User.t()) :: t() + def get_last(%User{id: user_id}) do __MODULE__ |> where(user_id: ^user_id) |> order_by(desc: :id) @@ -101,6 +142,8 @@ def get_last(user_id) do |> Repo.one() end + @doc "Lists all existing backups for a user" + @spec list(User.t()) :: [Ecto.Schema.t() | term()] def list(%User{id: user_id}) do __MODULE__ |> where(user_id: ^user_id) @@ -108,94 +151,37 @@ def list(%User{id: user_id}) do |> Repo.all() end - def remove_outdated(%__MODULE__{id: latest_id, user_id: user_id}) do - __MODULE__ - |> where(user_id: ^user_id) - |> where([b], b.id != ^latest_id) - |> Repo.all() - |> Enum.each(&BackupWorker.delete/1) - end - - def get(id), do: Repo.get(__MODULE__, id) - - defp set_state(backup, state, processed_number \\ nil) do - struct = - %{state: state} - |> Pleroma.Maps.put_if_present(:processed_number, processed_number) - - backup - |> cast(struct, [:state, :processed_number]) - |> Repo.update() - end - - def process( - %__MODULE__{} = backup, - processor_module \\ __MODULE__.Processor - ) do - set_state(backup, :running, 0) - - current_pid = self() - - task = - Task.Supervisor.async_nolink( - Pleroma.TaskSupervisor, - processor_module, - :do_process, - [backup, current_pid] - ) - - wait_backup(backup, backup.processed_number, task) - end - - defp wait_backup(backup, current_processed, task) do - wait_time = @config_impl.get([__MODULE__, :process_wait_time]) - - receive do - {:progress, new_processed} -> - total_processed = current_processed + new_processed - - set_state(backup, :running, total_processed) - wait_backup(backup, total_processed, task) - - {:DOWN, _ref, _proc, _pid, reason} -> - backup = get(backup.id) - - if reason != :normal do - Logger.error("Backup #{backup.id} process ended abnormally: #{inspect(reason)}") - - {:ok, backup} = set_state(backup, :failed) - - cleanup(backup) - - {:error, - %{ - backup: backup, - reason: :exit, - details: reason - }} - else - {:ok, backup} - end - after - wait_time -> - Logger.error( - "Backup #{backup.id} timed out after no response for #{wait_time}ms, terminating" - ) - - Task.Supervisor.terminate_child(Pleroma.TaskSupervisor, task.pid) - - {:ok, backup} = set_state(backup, :failed) - - cleanup(backup) - - {:error, - %{ - backup: backup, - reason: :timeout - }} + @doc "Schedules deletion of all but the the most recent backup" + @spec remove_outdated(User.t()) :: :ok + def remove_outdated(user) do + with %__MODULE__{} = latest_backup <- get_last(user) do + __MODULE__ + |> where(user_id: ^user.id) + |> where([b], b.id != ^latest_backup.id) + |> Repo.all() + |> Enum.each(&schedule_delete/1) + else + _ -> :ok end end + def get_by_id(id), do: Repo.get(__MODULE__, id) + + @doc "Generates changeset for %Pleroma.User.Backup{}" + @spec changeset(%__MODULE__{}, map()) :: %Ecto.Changeset{} + def changeset(backup \\ %__MODULE__{}, attrs) do + backup + |> cast(attrs, [:content_type, :file_name, :file_size, :processed, :tempdir]) + end + + @doc "Updates the backup record" + @spec update_record(%__MODULE__{}, map()) :: {:ok, %__MODULE__{}} | {:error, %Ecto.Changeset{}} + def update_record(%__MODULE__{} = backup, attrs) do + backup + |> changeset(attrs) + |> Repo.update() + end + @files [ ~c"actor.json", ~c"outbox.json", @@ -204,53 +190,66 @@ defp wait_backup(backup, current_processed, task) do ~c"followers.json", ~c"following.json" ] - @spec export(Pleroma.User.Backup.t(), pid()) :: {:ok, String.t()} | :error - def export(%__MODULE__{} = backup, caller_pid) do - backup = Repo.preload(backup, :user) - dir = backup_tempdir(backup) - with :ok <- File.mkdir(dir), - :ok <- actor(dir, backup.user, caller_pid), - :ok <- statuses(dir, backup.user, caller_pid), - :ok <- likes(dir, backup.user, caller_pid), - :ok <- bookmarks(dir, backup.user, caller_pid), - :ok <- followers(dir, backup.user, caller_pid), - :ok <- following(dir, backup.user, caller_pid), - {:ok, zip_path} <- :zip.create(backup.file_name, @files, cwd: dir), - {:ok, _} <- File.rm_rf(dir) do - {:ok, zip_path} + @spec run(t()) :: {:ok, t()} | {:error, :failed} + def run(%__MODULE__{} = backup) do + backup = Repo.preload(backup, :user) + tempfile = Path.join([backup.tempdir, backup.file_name]) + + with {_, :ok} <- {:mkdir, File.mkdir_p(backup.tempdir)}, + {_, :ok} <- {:actor, actor(backup.tempdir, backup.user)}, + {_, :ok} <- {:statuses, statuses(backup.tempdir, backup.user)}, + {_, :ok} <- {:likes, likes(backup.tempdir, backup.user)}, + {_, :ok} <- {:bookmarks, bookmarks(backup.tempdir, backup.user)}, + {_, :ok} <- {:followers, followers(backup.tempdir, backup.user)}, + {_, :ok} <- {:following, following(backup.tempdir, backup.user)}, + {_, {:ok, _zip_path}} <- + {:zip, :zip.create(to_charlist(tempfile), @files, cwd: to_charlist(backup.tempdir))}, + {_, {:ok, %File.Stat{size: zip_size}}} <- {:filestat, File.stat(tempfile)}, + {:ok, updated_backup} <- update_record(backup, %{file_size: zip_size}) do + {:ok, updated_backup} else - _ -> :error + _ -> + File.rm_rf(backup.tempdir) + {:error, :failed} end end - def dir(name) do - dir = Pleroma.Config.get([__MODULE__, :dir]) || System.tmp_dir!() - Path.join(dir, name) + defp tempdir do + case Config.get([__MODULE__, :tempdir]) do + nil -> + System.tmp_dir!() + + path -> + rand = :crypto.strong_rand_bytes(8) |> Base.url_encode64(padding: false) + Path.join([path, rand]) + end end - def upload(%__MODULE__{} = backup, zip_path) do - uploader = Pleroma.Config.get([Pleroma.Upload, :uploader]) + @doc "Uploads the completed backup and marks it as processed" + @spec upload(t()) :: {:ok, t()} + def upload(%__MODULE__{tempdir: tempdir} = backup) when is_binary(tempdir) do + uploader = Config.get([Pleroma.Upload, :uploader]) upload = %Pleroma.Upload{ name: backup.file_name, - tempfile: zip_path, + tempfile: Path.join([tempdir, backup.file_name]), content_type: backup.content_type, path: Path.join("backups", backup.file_name) } - with {:ok, _} <- Pleroma.Uploaders.Uploader.put_file(uploader, upload), - :ok <- File.rm(zip_path) do - {:ok, upload} + with {:ok, _} <- Uploader.put_file(uploader, upload), + {:ok, uploaded_backup} <- update_record(backup, %{processed: true}), + {:ok, _} <- File.rm_rf(tempdir) do + {:ok, uploaded_backup} end end - defp actor(dir, user, caller_pid) do + defp actor(dir, user) do with {:ok, json} <- UserView.render("user.json", %{user: user}) |> Map.merge(%{"likes" => "likes.json", "bookmarks" => "bookmarks.json"}) |> Jason.encode() do - send(caller_pid, {:progress, 1}) File.write(Path.join(dir, "actor.json"), json) end end @@ -269,22 +268,10 @@ defp write_header(file, name) do ) end - defp should_report?(num, chunk_size), do: rem(num, chunk_size) == 0 - - defp backup_tempdir(backup) do - name = String.trim_trailing(backup.file_name, ".zip") - dir(name) - end - - defp cleanup(backup) do - dir = backup_tempdir(backup) - File.rm_rf(dir) - end - - defp write(query, dir, name, fun, caller_pid) do + defp write(query, dir, name, fun) do path = Path.join(dir, "#{name}.json") - chunk_size = Pleroma.Config.get([__MODULE__, :process_chunk_size]) + chunk_size = Config.get([__MODULE__, :process_chunk_size]) with {:ok, file} <- File.open(path, [:write, :utf8]), :ok <- write_header(file, name) do @@ -300,10 +287,6 @@ defp write(query, dir, name, fun, caller_pid) do end), {:ok, str} <- Jason.encode(data), :ok <- IO.write(file, str <> ",\n") do - if should_report?(acc + 1, chunk_size) do - send(caller_pid, {:progress, chunk_size}) - end - acc + 1 else {:error, e} -> @@ -318,31 +301,29 @@ defp write(query, dir, name, fun, caller_pid) do end end) - send(caller_pid, {:progress, rem(total, chunk_size)}) - with :ok <- :file.pwrite(file, {:eof, -2}, "\n],\n \"totalItems\": #{total}}") do File.close(file) end end end - defp bookmarks(dir, %{id: user_id} = _user, caller_pid) do + defp bookmarks(dir, %{id: user_id} = _user) do Bookmark |> where(user_id: ^user_id) |> join(:inner, [b], activity in assoc(b, :activity)) |> select([b, a], %{id: b.id, object: fragment("(?)->>'object'", a.data)}) - |> write(dir, "bookmarks", fn a -> {:ok, a.object} end, caller_pid) + |> write(dir, "bookmarks", fn a -> {:ok, a.object} end) end - defp likes(dir, user, caller_pid) do + defp likes(dir, user) do user.ap_id |> Activity.Queries.by_actor() |> Activity.Queries.by_type("Like") |> select([like], %{id: like.id, object: fragment("(?)->>'object'", like.data)}) - |> write(dir, "likes", fn a -> {:ok, a.object} end, caller_pid) + |> write(dir, "likes", fn a -> {:ok, a.object} end) end - defp statuses(dir, user, caller_pid) do + defp statuses(dir, user) do opts = %{} |> Map.put(:type, ["Create", "Announce"]) @@ -362,52 +343,17 @@ defp statuses(dir, user, caller_pid) do with {:ok, activity} <- Transmogrifier.prepare_outgoing(a.data) do {:ok, Map.delete(activity, "@context")} end - end, - caller_pid + end ) end - defp followers(dir, user, caller_pid) do + defp followers(dir, user) do User.get_followers_query(user) - |> write(dir, "followers", fn a -> {:ok, a.ap_id} end, caller_pid) + |> write(dir, "followers", fn a -> {:ok, a.ap_id} end) end - defp following(dir, user, caller_pid) do + defp following(dir, user) do User.get_friends_query(user) - |> write(dir, "following", fn a -> {:ok, a.ap_id} end, caller_pid) - end -end - -defmodule Pleroma.User.Backup.ProcessorAPI do - @callback do_process(%Pleroma.User.Backup{}, pid()) :: - {:ok, %Pleroma.User.Backup{}} | {:error, any()} -end - -defmodule Pleroma.User.Backup.Processor do - @behaviour Pleroma.User.Backup.ProcessorAPI - - alias Pleroma.Repo - alias Pleroma.User.Backup - - import Ecto.Changeset - - @impl true - def do_process(backup, current_pid) do - with {:ok, zip_file} <- Backup.export(backup, current_pid), - {:ok, %{size: size}} <- File.stat(zip_file), - {:ok, _upload} <- Backup.upload(backup, zip_file) do - backup - |> cast( - %{ - file_size: size, - processed: true, - state: :complete - }, - [:file_size, :processed, :state] - ) - |> Repo.update() - else - e -> {:error, e} - end + |> write(dir, "following", fn a -> {:ok, a.ap_id} end) end end diff --git a/lib/pleroma/web/admin_api/controllers/admin_api_controller.ex b/lib/pleroma/web/admin_api/controllers/admin_api_controller.ex index 1894000ff..0f22dd538 100644 --- a/lib/pleroma/web/admin_api/controllers/admin_api_controller.ex +++ b/lib/pleroma/web/admin_api/controllers/admin_api_controller.ex @@ -13,6 +13,7 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do alias Pleroma.ModerationLog alias Pleroma.Stats alias Pleroma.User + alias Pleroma.User.Backup alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.AdminAPI alias Pleroma.Web.AdminAPI.AccountView @@ -429,7 +430,9 @@ def stats(conn, params) do def create_backup(%{assigns: %{user: admin}} = conn, %{"nickname" => nickname}) do with %User{} = user <- User.get_by_nickname(nickname), - {:ok, _} <- Pleroma.User.Backup.create(user, admin.id) do + %Backup{} = backup <- Backup.new(user), + {:ok, inserted_backup} <- Pleroma.Repo.insert(backup), + {:ok, %Oban.Job{}} <- Backup.schedule_backup(inserted_backup) do ModerationLog.insert_log(%{actor: admin, subject: user, action: "create_backup"}) json(conn, "") diff --git a/lib/pleroma/web/api_spec/operations/pleroma_backup_operation.ex b/lib/pleroma/web/api_spec/operations/pleroma_backup_operation.ex index 400f3825d..86f709515 100644 --- a/lib/pleroma/web/api_spec/operations/pleroma_backup_operation.ex +++ b/lib/pleroma/web/api_spec/operations/pleroma_backup_operation.ex @@ -65,12 +65,7 @@ defp backup do file_name: %Schema{type: :string}, file_size: %Schema{type: :integer}, processed: %Schema{type: :boolean, description: "whether this backup has succeeded"}, - state: %Schema{ - type: :string, - description: "the state of the backup", - enum: ["pending", "running", "complete", "failed"] - }, - processed_number: %Schema{type: :integer, description: "the number of records processed"} + tempdir: %Schema{type: :string} }, example: %{ "content_type" => "application/zip", @@ -79,8 +74,7 @@ defp backup do "file_size" => 4105, "inserted_at" => "2020-09-08T16:42:07.000Z", "processed" => true, - "state" => "complete", - "processed_number" => 20 + "tempdir" => "/tmp/PZIMw40vmpM" } } end diff --git a/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex b/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex index b9daed22b..0115ec645 100644 --- a/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex +++ b/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex @@ -20,7 +20,7 @@ def index(%{assigns: %{user: user}} = conn, _params) do end def create(%{assigns: %{user: user}} = conn, _params) do - with {:ok, _} <- Backup.create(user) do + with {:ok, _} <- Backup.user(user) do backups = Backup.list(user) render(conn, "index.json", backups: backups) end diff --git a/lib/pleroma/web/pleroma_api/views/backup_view.ex b/lib/pleroma/web/pleroma_api/views/backup_view.ex index 20403aeee..d778590f0 100644 --- a/lib/pleroma/web/pleroma_api/views/backup_view.ex +++ b/lib/pleroma/web/pleroma_api/views/backup_view.ex @@ -9,22 +9,12 @@ defmodule Pleroma.Web.PleromaAPI.BackupView do alias Pleroma.Web.CommonAPI.Utils def render("show.json", %{backup: %Backup{} = backup}) do - # To deal with records before the migration - state = - if backup.state == :invalid do - if backup.processed, do: :complete, else: :failed - else - backup.state - end - %{ id: backup.id, content_type: backup.content_type, url: download_url(backup), file_size: backup.file_size, processed: backup.processed, - state: to_string(state), - processed_number: backup.processed_number, inserted_at: Utils.to_masto_date(backup.inserted_at) } end diff --git a/lib/pleroma/workers/backup_worker.ex b/lib/pleroma/workers/backup_worker.ex index 54ac31a3c..41f404e69 100644 --- a/lib/pleroma/workers/backup_worker.ex +++ b/lib/pleroma/workers/backup_worker.ex @@ -8,44 +8,25 @@ defmodule Pleroma.Workers.BackupWorker do alias Oban.Job alias Pleroma.User.Backup - def process(backup, admin_user_id \\ nil) do - %{"op" => "process", "backup_id" => backup.id, "admin_user_id" => admin_user_id} - |> new() - |> Oban.insert() - end - - def schedule_deletion(backup) do - days = Pleroma.Config.get([Backup, :purge_after_days]) - time = 60 * 60 * 24 * days - scheduled_at = Calendar.NaiveDateTime.add!(backup.inserted_at, time) - - %{"op" => "delete", "backup_id" => backup.id} - |> new(scheduled_at: scheduled_at) - |> Oban.insert() - end - - def delete(backup) do - %{"op" => "delete", "backup_id" => backup.id} - |> new() - |> Oban.insert() - end - @impl Oban.Worker def perform(%Job{ - args: %{"op" => "process", "backup_id" => backup_id, "admin_user_id" => admin_user_id} + args: %{"op" => "process", "backup_id" => backup_id} }) do - with {:ok, %Backup{} = backup} <- - backup_id |> Backup.get() |> Backup.process(), - {:ok, _job} <- schedule_deletion(backup), - :ok <- Backup.remove_outdated(backup), - :ok <- maybe_deliver_email(backup, admin_user_id) do - {:ok, backup} + with {_, %Backup{} = backup} <- {:get, Backup.get_by_id(backup_id)}, + {_, {:ok, updated_backup}} <- {:run, Backup.run(backup)}, + {_, {:ok, uploaded_backup}} <- {:upload, Backup.upload(updated_backup)}, + {_, {:ok, _job}} <- {:delete, Backup.schedule_delete(uploaded_backup)}, + {_, :ok} <- {:outdated, Backup.remove_outdated(uploaded_backup.user)}, + {_, :ok} <- {:email, maybe_deliver_email(uploaded_backup)} do + {:ok, uploaded_backup} + else + e -> {:error, e} end end def perform(%Job{args: %{"op" => "delete", "backup_id" => backup_id}}) do - case Backup.get(backup_id) do - %Backup{} = backup -> Backup.delete(backup) + case Backup.get_by_id(backup_id) do + %Backup{} = backup -> Backup.delete_archive(backup) nil -> :ok end end @@ -57,13 +38,13 @@ defp has_email?(user) do not is_nil(user.email) and user.email != "" end - defp maybe_deliver_email(backup, admin_user_id) do + defp maybe_deliver_email(backup) do has_mailer = Pleroma.Config.get([Pleroma.Emails.Mailer, :enabled]) backup = backup |> Pleroma.Repo.preload(:user) if has_email?(backup.user) and has_mailer do backup - |> Pleroma.Emails.UserEmail.backup_is_ready_email(admin_user_id) + |> Pleroma.Emails.UserEmail.backup_is_ready_email() |> Pleroma.Emails.Mailer.deliver() :ok diff --git a/priv/repo/migrations/20240622175346_backup_refactor.exs b/priv/repo/migrations/20240622175346_backup_refactor.exs new file mode 100644 index 000000000..5dfc55789 --- /dev/null +++ b/priv/repo/migrations/20240622175346_backup_refactor.exs @@ -0,0 +1,19 @@ +defmodule Pleroma.Repo.Migrations.BackupRefactor do + use Ecto.Migration + + def up do + alter table("backups") do + remove(:state) + remove(:processed_number) + add(:tempdir, :string) + end + end + + def down do + alter table("backups") do + add(:state, :integer, default: 5) + add(:processed_number, :integer, default: 0) + remove(:tempdir) + end + end +end From e5a738d465fa3cb16a5737b03561523059ed727c Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 24 Jun 2024 10:10:53 -0400 Subject: [PATCH 2/6] Refactor tests for Backups --- test/pleroma/user/backup_async_test.exs | 49 ----- test/pleroma/user/backup_test.exs | 175 +++++++----------- .../controllers/admin_api_controller_test.exs | 8 +- .../controllers/backup_controller_test.exs | 4 +- .../pleroma_api/views/backup_view_test.exs | 35 +--- test/support/mocks.ex | 2 - 6 files changed, 80 insertions(+), 193 deletions(-) delete mode 100644 test/pleroma/user/backup_async_test.exs diff --git a/test/pleroma/user/backup_async_test.exs b/test/pleroma/user/backup_async_test.exs deleted file mode 100644 index b0e9413be..000000000 --- a/test/pleroma/user/backup_async_test.exs +++ /dev/null @@ -1,49 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2023 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.User.BackupAsyncTest do - use Pleroma.DataCase, async: true - - import Pleroma.Factory - import Mox - - alias Pleroma.UnstubbedConfigMock, as: ConfigMock - alias Pleroma.User.Backup - alias Pleroma.User.Backup.ProcessorMock - - setup do - user = insert(:user, %{nickname: "cofe", name: "Cofe", ap_id: "http://cofe.io/users/cofe"}) - - {:ok, backup} = user |> Backup.new() |> Repo.insert() - %{backup: backup} - end - - test "it handles unrecoverable exceptions", %{backup: backup} do - ProcessorMock - |> expect(:do_process, fn _, _ -> - raise "mock exception" - end) - - ConfigMock - |> stub_with(Pleroma.Config) - - {:error, %{backup: backup, reason: :exit}} = Backup.process(backup, ProcessorMock) - - assert backup.state == :failed - end - - test "it handles timeouts", %{backup: backup} do - ProcessorMock - |> expect(:do_process, fn _, _ -> - Process.sleep(:timer.seconds(4)) - end) - - ConfigMock - |> expect(:get, fn [Pleroma.User.Backup, :process_wait_time] -> :timer.seconds(2) end) - - {:error, %{backup: backup, reason: :timeout}} = Backup.process(backup, ProcessorMock) - - assert backup.state == :failed - end -end diff --git a/test/pleroma/user/backup_test.exs b/test/pleroma/user/backup_test.exs index d82d1719b..24fe09f7e 100644 --- a/test/pleroma/user/backup_test.exs +++ b/test/pleroma/user/backup_test.exs @@ -6,7 +6,6 @@ defmodule Pleroma.User.BackupTest do use Oban.Testing, repo: Pleroma.Repo use Pleroma.DataCase - import Mock import Pleroma.Factory import Swoosh.TestAssertions import Mox @@ -16,7 +15,6 @@ defmodule Pleroma.User.BackupTest do alias Pleroma.UnstubbedConfigMock, as: ConfigMock alias Pleroma.Uploaders.S3.ExAwsMock alias Pleroma.User.Backup - alias Pleroma.User.Backup.ProcessorMock alias Pleroma.Web.CommonAPI alias Pleroma.Workers.BackupWorker @@ -28,79 +26,56 @@ defmodule Pleroma.User.BackupTest do ConfigMock |> stub_with(Pleroma.Config) - ProcessorMock - |> stub_with(Pleroma.User.Backup.Processor) - :ok end test "it does not requrie enabled email" do clear_config([Pleroma.Emails.Mailer, :enabled], false) user = insert(:user) - assert {:ok, _} = Backup.create(user) + assert {:ok, _} = Backup.user(user) end test "it does not require user's email" do user = insert(:user, %{email: nil}) - assert {:ok, _} = Backup.create(user) + assert {:ok, _} = Backup.user(user) end test "it creates a backup record and an Oban job" do - %{id: user_id} = user = insert(:user) - assert {:ok, %Oban.Job{args: args}} = Backup.create(user) + user = insert(:user) + assert {:ok, %Backup{} = backup} = Backup.user(user) + assert {:ok, %Oban.Job{args: args}} = Backup.schedule_backup(backup) assert_enqueued(worker: BackupWorker, args: args) - backup = Backup.get(args["backup_id"]) - assert %Backup{user_id: ^user_id, processed: false, file_size: 0, state: :pending} = backup + backup = Backup.get_by_id(args["backup_id"]) + assert %Backup{processed: false, file_size: 0} = backup end test "it return an error if the export limit is over" do - %{id: user_id} = user = insert(:user) + user = insert(:user) limit_days = Pleroma.Config.get([Backup, :limit_days]) - assert {:ok, %Oban.Job{args: args}} = Backup.create(user) - backup = Backup.get(args["backup_id"]) - assert %Backup{user_id: ^user_id, processed: false, file_size: 0} = backup + {:ok, first_backup} = Backup.user(user) + {:ok, _run_backup} = Backup.run(first_backup) - assert Backup.create(user) == {:error, "Last export was less than #{limit_days} days ago"} + assert Backup.user(user) == {:error, "Last export was less than #{limit_days} days ago"} end test "it process a backup record" do clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local) %{id: user_id} = user = insert(:user) - assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id} = args}} = Backup.create(user) - assert {:ok, backup} = perform_job(BackupWorker, args) + assert {:ok, %Backup{id: backup_id}} = Backup.user(user) + + oban_args = %{"op" => "process", "backup_id" => backup_id} + + assert {:ok, backup} = perform_job(BackupWorker, oban_args) assert backup.file_size > 0 - assert %Backup{id: ^backup_id, processed: true, user_id: ^user_id, state: :complete} = backup + assert match?(%Backup{id: ^backup_id, processed: true, user_id: ^user_id}, backup) delete_job_args = %{"op" => "delete", "backup_id" => backup_id} assert_enqueued(worker: BackupWorker, args: delete_job_args) assert {:ok, backup} = perform_job(BackupWorker, delete_job_args) - refute Backup.get(backup_id) - - email = Pleroma.Emails.UserEmail.backup_is_ready_email(backup) - - assert_email_sent( - to: {user.name, user.email}, - html_body: email.html_body - ) - end - - test "it updates states of the backup" do - clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local) - %{id: user_id} = user = insert(:user) - - assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id} = args}} = Backup.create(user) - assert {:ok, backup} = perform_job(BackupWorker, args) - assert backup.file_size > 0 - assert %Backup{id: ^backup_id, processed: true, user_id: ^user_id, state: :complete} = backup - - delete_job_args = %{"op" => "delete", "backup_id" => backup_id} - - assert_enqueued(worker: BackupWorker, args: delete_job_args) - assert {:ok, backup} = perform_job(BackupWorker, delete_job_args) - refute Backup.get(backup_id) + refute Backup.get_by_id(backup_id) email = Pleroma.Emails.UserEmail.backup_is_ready_email(backup) @@ -114,10 +89,15 @@ test "it does not send an email if the user does not have an email" do clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local) %{id: user_id} = user = insert(:user, %{email: nil}) - assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id} = args}} = Backup.create(user) - assert {:ok, backup} = perform_job(BackupWorker, args) - assert backup.file_size > 0 - assert %Backup{id: ^backup_id, processed: true, user_id: ^user_id} = backup + assert {:ok, %Backup{} = backup} = Backup.user(user) + + expected_args = %{"op" => "process", "backup_id" => backup.id} + + assert_enqueued(worker: BackupWorker, args: %{"backup_id" => backup.id}) + assert {:ok, completed_backup} = perform_job(BackupWorker, expected_args) + assert completed_backup.file_size > 0 + assert completed_backup.processed + assert completed_backup.user_id == user_id assert_no_email_sent() end @@ -127,10 +107,13 @@ test "it does not send an email if mailer is not on" do clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local) %{id: user_id} = user = insert(:user) - assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id} = args}} = Backup.create(user) - assert {:ok, backup} = perform_job(BackupWorker, args) + assert {:ok, %Backup{id: backup_id}} = Backup.user(user) + + oban_args = %{"op" => "process", "backup_id" => backup_id} + + assert {:ok, backup} = perform_job(BackupWorker, oban_args) assert backup.file_size > 0 - assert %Backup{id: ^backup_id, processed: true, user_id: ^user_id} = backup + assert match?(%Backup{id: ^backup_id, processed: true, user_id: ^user_id}, backup) assert_no_email_sent() end @@ -139,10 +122,15 @@ test "it does not send an email if the user has an empty email" do clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local) %{id: user_id} = user = insert(:user, %{email: ""}) - assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id} = args}} = Backup.create(user) - assert {:ok, backup} = perform_job(BackupWorker, args) + assert {:ok, %Backup{id: backup_id} = backup} = Backup.user(user) + + expected_args = %{"op" => "process", "backup_id" => backup.id} + + assert_enqueued(worker: BackupWorker, args: expected_args) + + assert {:ok, backup} = perform_job(BackupWorker, expected_args) assert backup.file_size > 0 - assert %Backup{id: ^backup_id, processed: true, user_id: ^user_id} = backup + assert match?(%Backup{id: ^backup_id, processed: true, user_id: ^user_id}, backup) assert_no_email_sent() end @@ -152,16 +140,13 @@ test "it removes outdated backups after creating a fresh one" do clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local) user = insert(:user) - assert {:ok, job1} = Backup.create(user) - - assert {:ok, %Backup{}} = ObanHelpers.perform(job1) - assert {:ok, job2} = Backup.create(user) - assert Pleroma.Repo.aggregate(Backup, :count) == 2 - assert {:ok, backup2} = ObanHelpers.perform(job2) + assert {:ok, %{id: backup_one_id}} = Backup.user(user) + assert {:ok, %{id: _backup_two_id}} = Backup.user(user) + # Run the backups ObanHelpers.perform_all() - assert [^backup2] = Pleroma.Repo.all(Backup) + assert_enqueued(worker: BackupWorker, args: %{"op" => "delete", "backup_id" => backup_one_id}) end test "it creates a zip archive with user data" do @@ -185,9 +170,12 @@ test "it creates a zip archive with user data" do CommonAPI.follow(other_user, user) - assert {:ok, backup} = user |> Backup.new() |> Repo.insert() - assert {:ok, path} = Backup.export(backup, self()) - assert {:ok, zipfile} = :zip.zip_open(String.to_charlist(path), [:memory]) + assert {:ok, backup} = Backup.user(user) + assert {:ok, run_backup} = Backup.run(backup) + + tempfile = Path.join([run_backup.tempdir, run_backup.file_name]) + + assert {:ok, zipfile} = :zip.zip_open(String.to_charlist(tempfile), [:memory]) assert {:ok, {~c"actor.json", json}} = :zip.zip_get(~c"actor.json", zipfile) assert %{ @@ -275,10 +263,10 @@ test "it creates a zip archive with user data" do } = Jason.decode!(json) :zip.zip_close(zipfile) - File.rm!(path) + File.rm_rf!(run_backup.tempdir) end - test "it counts the correct number processed" do + test "correct number processed" do user = insert(:user, %{nickname: "cofe", name: "Cofe", ap_id: "http://cofe.io/users/cofe"}) Enum.map(1..120, fn i -> @@ -288,43 +276,21 @@ test "it counts the correct number processed" do end) assert {:ok, backup} = user |> Backup.new() |> Repo.insert() - {:ok, backup} = Backup.process(backup) + {:ok, backup} = Backup.run(backup) - assert backup.processed_number == 1 + 120 + 120 + 120 + zip_path = Path.join([backup.tempdir, backup.file_name]) - Backup.delete(backup) - end + assert {:ok, zipfile} = :zip.zip_open(String.to_charlist(zip_path), [:memory]) - test "it handles errors" do - user = insert(:user, %{nickname: "cofe", name: "Cofe", ap_id: "http://cofe.io/users/cofe"}) + backup_parts = [~c"likes.json", ~c"bookmarks.json", ~c"outbox.json"] - Enum.map(1..120, fn i -> - {:ok, _status} = CommonAPI.post(user, %{status: "status #{i}"}) + Enum.each(backup_parts, fn part -> + assert {:ok, {_part, part_json}} = :zip.zip_get(part, zipfile) + {:ok, decoded_part} = Jason.decode(part_json) + assert decoded_part["totalItems"] == 120 end) - assert {:ok, backup} = user |> Backup.new() |> Repo.insert() - - with_mock Pleroma.Web.ActivityPub.Transmogrifier, - [:passthrough], - prepare_outgoing: fn data -> - object = - data["object"] - |> Pleroma.Object.normalize(fetch: false) - |> Map.get(:data) - - data = data |> Map.put("object", object) - - if String.contains?(data["object"]["content"], "119"), - do: raise(%Postgrex.Error{}), - else: {:ok, data} - end do - {:ok, backup} = Backup.process(backup) - assert backup.processed - assert backup.state == :complete - assert backup.processed_number == 1 + 119 - - Backup.delete(backup) - end + Backup.delete_archive(backup) end describe "it uploads and deletes a backup archive" do @@ -343,12 +309,11 @@ test "it handles errors" do Bookmark.create(user.id, status3.id) assert {:ok, backup} = user |> Backup.new() |> Repo.insert() - assert {:ok, path} = Backup.export(backup, self()) - [path: path, backup: backup] + [backup: backup] end - test "S3", %{path: path, backup: backup} do + test "S3", %{backup: backup} do clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.S3) clear_config([Pleroma.Uploaders.S3, :streaming_enabled], false) @@ -358,15 +323,17 @@ test "S3", %{path: path, backup: backup} do %{http_method: :delete} -> {:ok, %{status_code: 204}} end) - assert {:ok, %Pleroma.Upload{}} = Backup.upload(backup, path) - assert {:ok, _backup} = Backup.delete(backup) + assert {:ok, backup} = Backup.run(backup) + assert {:ok, %Backup{processed: true}} = Backup.upload(backup) + assert {:ok, _backup} = Backup.delete_archive(backup) end - test "Local", %{path: path, backup: backup} do + test "Local", %{backup: backup} do clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local) - assert {:ok, %Pleroma.Upload{}} = Backup.upload(backup, path) - assert {:ok, _backup} = Backup.delete(backup) + assert {:ok, backup} = Backup.run(backup) + assert {:ok, %Backup{processed: true}} = Backup.upload(backup) + assert {:ok, _backup} = Backup.delete_archive(backup) end end end diff --git a/test/pleroma/web/admin_api/controllers/admin_api_controller_test.exs b/test/pleroma/web/admin_api/controllers/admin_api_controller_test.exs index a7ee8359d..6614d1409 100644 --- a/test/pleroma/web/admin_api/controllers/admin_api_controller_test.exs +++ b/test/pleroma/web/admin_api/controllers/admin_api_controller_test.exs @@ -1096,9 +1096,13 @@ test "it creates a backup", %{conn: conn} do ObanHelpers.perform_all() - email = Pleroma.Emails.UserEmail.backup_is_ready_email(backup, admin.id) + email = Pleroma.Emails.UserEmail.backup_is_ready_email(backup) + + assert String.contains?( + email.html_body, + "A full backup of your Pleroma account was requested" + ) - assert String.contains?(email.html_body, "Admin @#{admin.nickname} requested a full backup") assert_email_sent(to: {user.name, user.email}, html_body: email.html_body) log_message = "@#{admin_nickname} requested account backup for @#{user_nickname}" diff --git a/test/pleroma/web/pleroma_api/controllers/backup_controller_test.exs b/test/pleroma/web/pleroma_api/controllers/backup_controller_test.exs index 21e619fa4..1e056adb5 100644 --- a/test/pleroma/web/pleroma_api/controllers/backup_controller_test.exs +++ b/test/pleroma/web/pleroma_api/controllers/backup_controller_test.exs @@ -20,9 +20,7 @@ defmodule Pleroma.Web.PleromaAPI.BackupControllerTest do end test "GET /api/v1/pleroma/backups", %{user: user, conn: conn} do - assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id}}} = Backup.create(user) - - backup = Backup.get(backup_id) + assert {:ok, %Backup{} = backup} = Backup.user(user) response = conn diff --git a/test/pleroma/web/pleroma_api/views/backup_view_test.exs b/test/pleroma/web/pleroma_api/views/backup_view_test.exs index b125b8872..303547f3b 100644 --- a/test/pleroma/web/pleroma_api/views/backup_view_test.exs +++ b/test/pleroma/web/pleroma_api/views/backup_view_test.exs @@ -27,42 +27,11 @@ test "it renders the ID" do assert result.id == backup.id end - test "it renders the state and processed_number" do + test "it renders the processed state" do user = insert(:user) backup = Backup.new(user) result = BackupView.render("show.json", backup: backup) - assert result.state == to_string(backup.state) - assert result.processed_number == backup.processed_number - end - - test "it renders failed state with legacy records" do - backup = %Backup{ - id: 0, - content_type: "application/zip", - file_name: "dummy", - file_size: 1, - state: :invalid, - processed: true, - processed_number: 1, - inserted_at: NaiveDateTime.utc_now() - } - - result = BackupView.render("show.json", backup: backup) - assert result.state == "complete" - - backup = %Backup{ - id: 0, - content_type: "application/zip", - file_name: "dummy", - file_size: 1, - state: :invalid, - processed: false, - processed_number: 1, - inserted_at: NaiveDateTime.utc_now() - } - - result = BackupView.render("show.json", backup: backup) - assert result.state == "failed" + refute result.processed end end diff --git a/test/support/mocks.ex b/test/support/mocks.ex index 63cbc49ab..d84958e15 100644 --- a/test/support/mocks.ex +++ b/test/support/mocks.ex @@ -32,6 +32,4 @@ Mox.defmock(Pleroma.LoggerMock, for: Pleroma.Logging) -Mox.defmock(Pleroma.User.Backup.ProcessorMock, for: Pleroma.User.Backup.ProcessorAPI) - Mox.defmock(Pleroma.Uploaders.S3.ExAwsMock, for: Pleroma.Uploaders.S3.ExAwsAPI) From ece063586b5e2c2e8c4eb6e92e50b3dcc09d2836 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 24 Jun 2024 16:36:43 -0400 Subject: [PATCH 3/6] Limit backup jobs to 5 minutes --- lib/pleroma/workers/backup_worker.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pleroma/workers/backup_worker.ex b/lib/pleroma/workers/backup_worker.ex index 41f404e69..fb366de4a 100644 --- a/lib/pleroma/workers/backup_worker.ex +++ b/lib/pleroma/workers/backup_worker.ex @@ -32,7 +32,7 @@ def perform(%Job{args: %{"op" => "delete", "backup_id" => backup_id}}) do end @impl Oban.Worker - def timeout(_job), do: :infinity + def timeout(_job), do: :timer.minutes(5) defp has_email?(user) do not is_nil(user.email) and user.email != "" From 3f60d7bf644fc38e8e5d9f525a1675526ade0037 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 24 Jun 2024 22:52:21 -0400 Subject: [PATCH 4/6] Better random tempdir format --- lib/pleroma/user/backup.ex | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/pleroma/user/backup.ex b/lib/pleroma/user/backup.ex index 2568089a4..7feaa22bf 100644 --- a/lib/pleroma/user/backup.ex +++ b/lib/pleroma/user/backup.ex @@ -216,13 +216,15 @@ def run(%__MODULE__{} = backup) do end defp tempdir do + rand = :crypto.strong_rand_bytes(8) |> Base.url_encode64(padding: false) + subdir = "backup-#{rand}" + case Config.get([__MODULE__, :tempdir]) do nil -> - System.tmp_dir!() + Path.join([System.tmp_dir!(), subdir]) path -> - rand = :crypto.strong_rand_bytes(8) |> Base.url_encode64(padding: false) - Path.join([path, rand]) + Path.join([path, subdir]) end end From e5cbbaf3f0385492580b27a624b936c1d74757a1 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 24 Jun 2024 22:58:38 -0400 Subject: [PATCH 5/6] Extend the backup job time limit to 30 minutes --- lib/pleroma/workers/backup_worker.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pleroma/workers/backup_worker.ex b/lib/pleroma/workers/backup_worker.ex index fb366de4a..1791c558d 100644 --- a/lib/pleroma/workers/backup_worker.ex +++ b/lib/pleroma/workers/backup_worker.ex @@ -32,7 +32,7 @@ def perform(%Job{args: %{"op" => "delete", "backup_id" => backup_id}}) do end @impl Oban.Worker - def timeout(_job), do: :timer.minutes(5) + def timeout(_job), do: :timer.minutes(30) defp has_email?(user) do not is_nil(user.email) and user.email != "" From 187897874bddc27ef23606730e79522e3583ec43 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 22 Jul 2024 14:00:54 -0400 Subject: [PATCH 6/6] Make backup timeout configurable --- config/config.exs | 4 ++-- config/description.exs | 15 +++++++-------- docs/configuration/cheatsheet.md | 1 + lib/pleroma/workers/backup_worker.ex | 3 ++- 4 files changed, 12 insertions(+), 11 deletions(-) diff --git a/config/config.exs b/config/config.exs index 044f951f6..3058160ea 100644 --- a/config/config.exs +++ b/config/config.exs @@ -908,8 +908,8 @@ purge_after_days: 30, limit_days: 7, dir: nil, - process_wait_time: 30_000, - process_chunk_size: 100 + process_chunk_size: 100, + timeout: :timer.minutes(30) config :pleroma, ConcurrentLimiter, [ {Pleroma.Search, [max_running: 30, max_waiting: 50]} diff --git a/config/description.exs b/config/description.exs index b7d86dc63..179eea3f7 100644 --- a/config/description.exs +++ b/config/description.exs @@ -3355,20 +3355,19 @@ description: "Limit user to export not more often than once per N days", suggestions: [7] }, - %{ - key: :process_wait_time, - type: :integer, - label: "Process Wait Time", - description: - "The amount of time to wait for backup to report progress, in milliseconds. If no progress is received from the backup job for that much time, terminate it and deem it failed.", - suggestions: [30_000] - }, %{ key: :process_chunk_size, type: :integer, label: "Process Chunk Size", description: "The number of activities to fetch in the backup job for each chunk.", suggestions: [100] + }, + %{ + key: :timeout, + type: :integer, + label: "Timeout", + description: "The amount of time to wait for backup to complete in seconds.", + suggestions: [1_800] } ] }, diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md index 9c5659988..5689d3be5 100644 --- a/docs/configuration/cheatsheet.md +++ b/docs/configuration/cheatsheet.md @@ -1171,6 +1171,7 @@ Control favicons for instances. 3. the directory named by the TMP environment variable 4. C:\TMP on Windows or /tmp on Unix-like operating systems 5. as a last resort, the current working directory +* `:timeout` an integer representing seconds ## Frontend management diff --git a/lib/pleroma/workers/backup_worker.ex b/lib/pleroma/workers/backup_worker.ex index 1791c558d..d1b6fcdad 100644 --- a/lib/pleroma/workers/backup_worker.ex +++ b/lib/pleroma/workers/backup_worker.ex @@ -6,6 +6,7 @@ defmodule Pleroma.Workers.BackupWorker do use Oban.Worker, queue: :slow, max_attempts: 1 alias Oban.Job + alias Pleroma.Config.Getting, as: Config alias Pleroma.User.Backup @impl Oban.Worker @@ -32,7 +33,7 @@ def perform(%Job{args: %{"op" => "delete", "backup_id" => backup_id}}) do end @impl Oban.Worker - def timeout(_job), do: :timer.minutes(30) + def timeout(_job), do: Config.get([Backup, :timeout], :timer.minutes(30)) defp has_email?(user) do not is_nil(user.email) and user.email != ""