Merge branch 'oban/backup' into 'develop'

Backups Refactoring

See merge request pleroma/pleroma!4158
This commit is contained in:
feld 2024-07-29 16:07:35 +00:00
commit c9042763b0
20 changed files with 303 additions and 500 deletions

View file

@ -0,0 +1 @@
Refactor the user backups code and improve test coverage

View file

@ -910,8 +910,8 @@
purge_after_days: 30, purge_after_days: 30,
limit_days: 7, limit_days: 7,
dir: nil, dir: nil,
process_wait_time: 30_000, process_chunk_size: 100,
process_chunk_size: 100 timeout: :timer.minutes(30)
config :pleroma, ConcurrentLimiter, [ config :pleroma, ConcurrentLimiter, [
{Pleroma.Search, [max_running: 30, max_waiting: 50]} {Pleroma.Search, [max_running: 30, max_waiting: 50]}

View file

@ -3355,20 +3355,19 @@
description: "Limit user to export not more often than once per N days", description: "Limit user to export not more often than once per N days",
suggestions: [7] suggestions: [7]
}, },
%{
key: :process_wait_time,
type: :integer,
label: "Process Wait Time",
description:
"The amount of time to wait for backup to report progress, in milliseconds. If no progress is received from the backup job for that much time, terminate it and deem it failed.",
suggestions: [30_000]
},
%{ %{
key: :process_chunk_size, key: :process_chunk_size,
type: :integer, type: :integer,
label: "Process Chunk Size", label: "Process Chunk Size",
description: "The number of activities to fetch in the backup job for each chunk.", description: "The number of activities to fetch in the backup job for each chunk.",
suggestions: [100] suggestions: [100]
},
%{
key: :timeout,
type: :integer,
label: "Timeout",
description: "The amount of time to wait for backup to complete in seconds.",
suggestions: [1_800]
} }
] ]
}, },

View file

@ -188,6 +188,8 @@
config :pleroma, Pleroma.Web.Plugs.HTTPSecurityPlug, enable: false config :pleroma, Pleroma.Web.Plugs.HTTPSecurityPlug, enable: false
config :pleroma, Pleroma.User.Backup, tempdir: "test/tmp"
if File.exists?("./config/test.secret.exs") do if File.exists?("./config/test.secret.exs") do
import_config "test.secret.exs" import_config "test.secret.exs"
else else

View file

@ -1171,6 +1171,7 @@ Control favicons for instances.
3. the directory named by the TMP environment variable 3. the directory named by the TMP environment variable
4. C:\TMP on Windows or /tmp on Unix-like operating systems 4. C:\TMP on Windows or /tmp on Unix-like operating systems
5. as a last resort, the current working directory 5. as a last resort, the current working directory
* `:timeout` an integer representing seconds
## Frontend management ## Frontend management

View file

@ -27,11 +27,3 @@
failed: 4, failed: 4,
manual: 5 manual: 5
) )
defenum(Pleroma.User.Backup.State,
pending: 1,
running: 2,
complete: 3,
failed: 4,
invalid: 5
)

View file

@ -345,37 +345,22 @@ def unsubscribe_url(user, notifications_type) do
Router.Helpers.subscription_url(Endpoint, :unsubscribe, token) Router.Helpers.subscription_url(Endpoint, :unsubscribe, token)
end end
def backup_is_ready_email(backup, admin_user_id \\ nil) do def backup_is_ready_email(backup) do
%{user: user} = Pleroma.Repo.preload(backup, :user) %{user: user} = Pleroma.Repo.preload(backup, :user)
Gettext.with_locale_or_default user.language do Gettext.with_locale_or_default user.language do
download_url = Pleroma.Web.PleromaAPI.BackupView.download_url(backup) download_url = Pleroma.Web.PleromaAPI.BackupView.download_url(backup)
html_body = html_body =
if is_nil(admin_user_id) do
Gettext.dpgettext( Gettext.dpgettext(
"static_pages", "static_pages",
"account archive email body - self-requested", "account archive email body",
""" """
<p>You requested a full backup of your Pleroma account. It's ready for download:</p> <p>A full backup of your Pleroma account was requested. It's ready for download:</p>
<p><a href="%{download_url}">%{download_url}</a></p> <p><a href="%{download_url}">%{download_url}</a></p>
""", """,
download_url: download_url download_url: download_url
) )
else
admin = Pleroma.Repo.get(User, admin_user_id)
Gettext.dpgettext(
"static_pages",
"account archive email body - admin requested",
"""
<p>Admin @%{admin_nickname} requested a full backup of your Pleroma account. It's ready for download:</p>
<p><a href="%{download_url}">%{download_url}</a></p>
""",
admin_nickname: admin.nickname,
download_url: download_url
)
end
new() new()
|> to(recipient(user)) |> to(recipient(user))

View file

@ -14,9 +14,10 @@ defmodule Pleroma.User.Backup do
alias Pleroma.Activity alias Pleroma.Activity
alias Pleroma.Bookmark alias Pleroma.Bookmark
alias Pleroma.Config
alias Pleroma.Repo alias Pleroma.Repo
alias Pleroma.Uploaders.Uploader
alias Pleroma.User alias Pleroma.User
alias Pleroma.User.Backup.State
alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Transmogrifier alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.ActivityPub.UserView alias Pleroma.Web.ActivityPub.UserView
@ -29,55 +30,29 @@ defmodule Pleroma.User.Backup do
field(:file_name, :string) field(:file_name, :string)
field(:file_size, :integer, default: 0) field(:file_size, :integer, default: 0)
field(:processed, :boolean, default: false) field(:processed, :boolean, default: false)
field(:state, State, default: :invalid) field(:tempdir, :string)
field(:processed_number, :integer, default: 0)
belongs_to(:user, User, type: FlakeId.Ecto.CompatType) belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
timestamps() timestamps()
end end
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config) @doc """
Schedules a job to backup a user if the number of backup requests has not exceeded the limit.
def create(user, admin_id \\ nil) do Admins can directly call new/1 and schedule_backup/1 to bypass the limit.
with :ok <- validate_limit(user, admin_id), """
{:ok, backup} <- user |> new() |> Repo.insert() do @spec user(User.t()) :: {:ok, t()} | {:error, any()}
BackupWorker.process(backup, admin_id) def user(user) do
end days = Config.get([__MODULE__, :limit_days])
end
def new(user) do with true <- permitted?(user),
rand_str = :crypto.strong_rand_bytes(32) |> Base.url_encode64(padding: false) %__MODULE__{} = backup <- new(user),
datetime = Calendar.NaiveDateTime.Format.iso8601_basic(NaiveDateTime.utc_now()) {:ok, inserted_backup} <- Repo.insert(backup),
name = "archive-#{user.nickname}-#{datetime}-#{rand_str}.zip" {:ok, %Oban.Job{}} <- schedule_backup(inserted_backup) do
{:ok, inserted_backup}
%__MODULE__{
user_id: user.id,
content_type: "application/zip",
file_name: name,
state: :pending
}
end
def delete(backup) do
uploader = Pleroma.Config.get([Pleroma.Upload, :uploader])
with :ok <- uploader.delete_file(Path.join("backups", backup.file_name)) do
Repo.delete(backup)
end
end
defp validate_limit(_user, admin_id) when is_binary(admin_id), do: :ok
defp validate_limit(user, nil) do
case get_last(user.id) do
%__MODULE__{inserted_at: inserted_at} ->
days = Pleroma.Config.get([__MODULE__, :limit_days])
diff = Timex.diff(NaiveDateTime.utc_now(), inserted_at, :days)
if diff > days do
:ok
else else
false ->
{:error, {:error,
dngettext( dngettext(
"errors", "errors",
@ -86,14 +61,80 @@ defp validate_limit(user, nil) do
days, days,
days: days days: days
)} )}
end
nil -> e ->
:ok {:error, e}
end end
end end
def get_last(user_id) do @doc "Generates a %Backup{} for a user with a random file name"
@spec new(User.t()) :: t()
def new(user) do
rand_str = :crypto.strong_rand_bytes(32) |> Base.url_encode64(padding: false)
datetime = Calendar.NaiveDateTime.Format.iso8601_basic(NaiveDateTime.utc_now())
name = "archive-#{user.nickname}-#{datetime}-#{rand_str}.zip"
%__MODULE__{
content_type: "application/zip",
file_name: name,
tempdir: tempdir(),
user: user
}
end
@doc "Schedules the execution of the provided backup"
@spec schedule_backup(t()) :: {:ok, Oban.Job.t()} | {:error, any()}
def schedule_backup(backup) do
with false <- is_nil(backup.id) do
%{"op" => "process", "backup_id" => backup.id}
|> BackupWorker.new()
|> Oban.insert()
else
true ->
{:error, "Backup is missing id. Please insert it into the Repo first."}
e ->
{:error, e}
end
end
@doc "Deletes the backup archive file and removes the database record"
@spec delete_archive(t()) :: {:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
def delete_archive(backup) do
uploader = Config.get([Pleroma.Upload, :uploader])
with :ok <- uploader.delete_file(Path.join("backups", backup.file_name)) do
Repo.delete(backup)
end
end
@doc "Schedules a job to delete the backup archive"
@spec schedule_delete(t()) :: {:ok, Oban.Job.t()} | {:error, any()}
def schedule_delete(backup) do
days = Config.get([__MODULE__, :purge_after_days])
time = 60 * 60 * 24 * days
scheduled_at = Calendar.NaiveDateTime.add!(backup.inserted_at, time)
%{"op" => "delete", "backup_id" => backup.id}
|> BackupWorker.new(scheduled_at: scheduled_at)
|> Oban.insert()
end
defp permitted?(user) do
with {_, %__MODULE__{inserted_at: inserted_at}} <- {:last, get_last(user)},
days = Config.get([__MODULE__, :limit_days]),
diff = Timex.diff(NaiveDateTime.utc_now(), inserted_at, :days),
{_, true} <- {:diff, diff > days} do
true
else
{:last, nil} -> true
{:diff, false} -> false
end
end
@doc "Returns last backup for the provided user"
@spec get_last(User.t()) :: t()
def get_last(%User{id: user_id}) do
__MODULE__ __MODULE__
|> where(user_id: ^user_id) |> where(user_id: ^user_id)
|> order_by(desc: :id) |> order_by(desc: :id)
@ -101,6 +142,8 @@ def get_last(user_id) do
|> Repo.one() |> Repo.one()
end end
@doc "Lists all existing backups for a user"
@spec list(User.t()) :: [Ecto.Schema.t() | term()]
def list(%User{id: user_id}) do def list(%User{id: user_id}) do
__MODULE__ __MODULE__
|> where(user_id: ^user_id) |> where(user_id: ^user_id)
@ -108,92 +151,35 @@ def list(%User{id: user_id}) do
|> Repo.all() |> Repo.all()
end end
def remove_outdated(%__MODULE__{id: latest_id, user_id: user_id}) do @doc "Schedules deletion of all but the the most recent backup"
@spec remove_outdated(User.t()) :: :ok
def remove_outdated(user) do
with %__MODULE__{} = latest_backup <- get_last(user) do
__MODULE__ __MODULE__
|> where(user_id: ^user_id) |> where(user_id: ^user.id)
|> where([b], b.id != ^latest_id) |> where([b], b.id != ^latest_backup.id)
|> Repo.all() |> Repo.all()
|> Enum.each(&BackupWorker.delete/1) |> Enum.each(&schedule_delete/1)
end
def get(id), do: Repo.get(__MODULE__, id)
defp set_state(backup, state, processed_number \\ nil) do
struct =
%{state: state}
|> Pleroma.Maps.put_if_present(:processed_number, processed_number)
backup
|> cast(struct, [:state, :processed_number])
|> Repo.update()
end
def process(
%__MODULE__{} = backup,
processor_module \\ __MODULE__.Processor
) do
set_state(backup, :running, 0)
current_pid = self()
task =
Task.Supervisor.async_nolink(
Pleroma.TaskSupervisor,
processor_module,
:do_process,
[backup, current_pid]
)
wait_backup(backup, backup.processed_number, task)
end
defp wait_backup(backup, current_processed, task) do
wait_time = @config_impl.get([__MODULE__, :process_wait_time])
receive do
{:progress, new_processed} ->
total_processed = current_processed + new_processed
set_state(backup, :running, total_processed)
wait_backup(backup, total_processed, task)
{:DOWN, _ref, _proc, _pid, reason} ->
backup = get(backup.id)
if reason != :normal do
Logger.error("Backup #{backup.id} process ended abnormally: #{inspect(reason)}")
{:ok, backup} = set_state(backup, :failed)
cleanup(backup)
{:error,
%{
backup: backup,
reason: :exit,
details: reason
}}
else else
{:ok, backup} _ -> :ok
end end
after
wait_time ->
Logger.error(
"Backup #{backup.id} timed out after no response for #{wait_time}ms, terminating"
)
Task.Supervisor.terminate_child(Pleroma.TaskSupervisor, task.pid)
{:ok, backup} = set_state(backup, :failed)
cleanup(backup)
{:error,
%{
backup: backup,
reason: :timeout
}}
end end
def get_by_id(id), do: Repo.get(__MODULE__, id)
@doc "Generates changeset for %Pleroma.User.Backup{}"
@spec changeset(%__MODULE__{}, map()) :: %Ecto.Changeset{}
def changeset(backup \\ %__MODULE__{}, attrs) do
backup
|> cast(attrs, [:content_type, :file_name, :file_size, :processed, :tempdir])
end
@doc "Updates the backup record"
@spec update_record(%__MODULE__{}, map()) :: {:ok, %__MODULE__{}} | {:error, %Ecto.Changeset{}}
def update_record(%__MODULE__{} = backup, attrs) do
backup
|> changeset(attrs)
|> Repo.update()
end end
@files [ @files [
@ -204,53 +190,68 @@ defp wait_backup(backup, current_processed, task) do
~c"followers.json", ~c"followers.json",
~c"following.json" ~c"following.json"
] ]
@spec export(Pleroma.User.Backup.t(), pid()) :: {:ok, String.t()} | :error
def export(%__MODULE__{} = backup, caller_pid) do @spec run(t()) :: {:ok, t()} | {:error, :failed}
def run(%__MODULE__{} = backup) do
backup = Repo.preload(backup, :user) backup = Repo.preload(backup, :user)
dir = backup_tempdir(backup) tempfile = Path.join([backup.tempdir, backup.file_name])
with :ok <- File.mkdir(dir), with {_, :ok} <- {:mkdir, File.mkdir_p(backup.tempdir)},
:ok <- actor(dir, backup.user, caller_pid), {_, :ok} <- {:actor, actor(backup.tempdir, backup.user)},
:ok <- statuses(dir, backup.user, caller_pid), {_, :ok} <- {:statuses, statuses(backup.tempdir, backup.user)},
:ok <- likes(dir, backup.user, caller_pid), {_, :ok} <- {:likes, likes(backup.tempdir, backup.user)},
:ok <- bookmarks(dir, backup.user, caller_pid), {_, :ok} <- {:bookmarks, bookmarks(backup.tempdir, backup.user)},
:ok <- followers(dir, backup.user, caller_pid), {_, :ok} <- {:followers, followers(backup.tempdir, backup.user)},
:ok <- following(dir, backup.user, caller_pid), {_, :ok} <- {:following, following(backup.tempdir, backup.user)},
{:ok, zip_path} <- :zip.create(backup.file_name, @files, cwd: dir), {_, {:ok, _zip_path}} <-
{:ok, _} <- File.rm_rf(dir) do {:zip, :zip.create(to_charlist(tempfile), @files, cwd: to_charlist(backup.tempdir))},
{:ok, zip_path} {_, {:ok, %File.Stat{size: zip_size}}} <- {:filestat, File.stat(tempfile)},
{:ok, updated_backup} <- update_record(backup, %{file_size: zip_size}) do
{:ok, updated_backup}
else else
_ -> :error _ ->
File.rm_rf(backup.tempdir)
{:error, :failed}
end end
end end
def dir(name) do defp tempdir do
dir = Pleroma.Config.get([__MODULE__, :dir]) || System.tmp_dir!() rand = :crypto.strong_rand_bytes(8) |> Base.url_encode64(padding: false)
Path.join(dir, name) subdir = "backup-#{rand}"
case Config.get([__MODULE__, :tempdir]) do
nil ->
Path.join([System.tmp_dir!(), subdir])
path ->
Path.join([path, subdir])
end
end end
def upload(%__MODULE__{} = backup, zip_path) do @doc "Uploads the completed backup and marks it as processed"
uploader = Pleroma.Config.get([Pleroma.Upload, :uploader]) @spec upload(t()) :: {:ok, t()}
def upload(%__MODULE__{tempdir: tempdir} = backup) when is_binary(tempdir) do
uploader = Config.get([Pleroma.Upload, :uploader])
upload = %Pleroma.Upload{ upload = %Pleroma.Upload{
name: backup.file_name, name: backup.file_name,
tempfile: zip_path, tempfile: Path.join([tempdir, backup.file_name]),
content_type: backup.content_type, content_type: backup.content_type,
path: Path.join("backups", backup.file_name) path: Path.join("backups", backup.file_name)
} }
with {:ok, _} <- Pleroma.Uploaders.Uploader.put_file(uploader, upload), with {:ok, _} <- Uploader.put_file(uploader, upload),
:ok <- File.rm(zip_path) do {:ok, uploaded_backup} <- update_record(backup, %{processed: true}),
{:ok, upload} {:ok, _} <- File.rm_rf(tempdir) do
{:ok, uploaded_backup}
end end
end end
defp actor(dir, user, caller_pid) do defp actor(dir, user) do
with {:ok, json} <- with {:ok, json} <-
UserView.render("user.json", %{user: user}) UserView.render("user.json", %{user: user})
|> Map.merge(%{"likes" => "likes.json", "bookmarks" => "bookmarks.json"}) |> Map.merge(%{"likes" => "likes.json", "bookmarks" => "bookmarks.json"})
|> Jason.encode() do |> Jason.encode() do
send(caller_pid, {:progress, 1})
File.write(Path.join(dir, "actor.json"), json) File.write(Path.join(dir, "actor.json"), json)
end end
end end
@ -269,22 +270,10 @@ defp write_header(file, name) do
) )
end end
defp should_report?(num, chunk_size), do: rem(num, chunk_size) == 0 defp write(query, dir, name, fun) do
defp backup_tempdir(backup) do
name = String.trim_trailing(backup.file_name, ".zip")
dir(name)
end
defp cleanup(backup) do
dir = backup_tempdir(backup)
File.rm_rf(dir)
end
defp write(query, dir, name, fun, caller_pid) do
path = Path.join(dir, "#{name}.json") path = Path.join(dir, "#{name}.json")
chunk_size = Pleroma.Config.get([__MODULE__, :process_chunk_size]) chunk_size = Config.get([__MODULE__, :process_chunk_size])
with {:ok, file} <- File.open(path, [:write, :utf8]), with {:ok, file} <- File.open(path, [:write, :utf8]),
:ok <- write_header(file, name) do :ok <- write_header(file, name) do
@ -300,10 +289,6 @@ defp write(query, dir, name, fun, caller_pid) do
end), end),
{:ok, str} <- Jason.encode(data), {:ok, str} <- Jason.encode(data),
:ok <- IO.write(file, str <> ",\n") do :ok <- IO.write(file, str <> ",\n") do
if should_report?(acc + 1, chunk_size) do
send(caller_pid, {:progress, chunk_size})
end
acc + 1 acc + 1
else else
{:error, e} -> {:error, e} ->
@ -318,31 +303,29 @@ defp write(query, dir, name, fun, caller_pid) do
end end
end) end)
send(caller_pid, {:progress, rem(total, chunk_size)})
with :ok <- :file.pwrite(file, {:eof, -2}, "\n],\n \"totalItems\": #{total}}") do with :ok <- :file.pwrite(file, {:eof, -2}, "\n],\n \"totalItems\": #{total}}") do
File.close(file) File.close(file)
end end
end end
end end
defp bookmarks(dir, %{id: user_id} = _user, caller_pid) do defp bookmarks(dir, %{id: user_id} = _user) do
Bookmark Bookmark
|> where(user_id: ^user_id) |> where(user_id: ^user_id)
|> join(:inner, [b], activity in assoc(b, :activity)) |> join(:inner, [b], activity in assoc(b, :activity))
|> select([b, a], %{id: b.id, object: fragment("(?)->>'object'", a.data)}) |> select([b, a], %{id: b.id, object: fragment("(?)->>'object'", a.data)})
|> write(dir, "bookmarks", fn a -> {:ok, a.object} end, caller_pid) |> write(dir, "bookmarks", fn a -> {:ok, a.object} end)
end end
defp likes(dir, user, caller_pid) do defp likes(dir, user) do
user.ap_id user.ap_id
|> Activity.Queries.by_actor() |> Activity.Queries.by_actor()
|> Activity.Queries.by_type("Like") |> Activity.Queries.by_type("Like")
|> select([like], %{id: like.id, object: fragment("(?)->>'object'", like.data)}) |> select([like], %{id: like.id, object: fragment("(?)->>'object'", like.data)})
|> write(dir, "likes", fn a -> {:ok, a.object} end, caller_pid) |> write(dir, "likes", fn a -> {:ok, a.object} end)
end end
defp statuses(dir, user, caller_pid) do defp statuses(dir, user) do
opts = opts =
%{} %{}
|> Map.put(:type, ["Create", "Announce"]) |> Map.put(:type, ["Create", "Announce"])
@ -362,52 +345,17 @@ defp statuses(dir, user, caller_pid) do
with {:ok, activity} <- Transmogrifier.prepare_outgoing(a.data) do with {:ok, activity} <- Transmogrifier.prepare_outgoing(a.data) do
{:ok, Map.delete(activity, "@context")} {:ok, Map.delete(activity, "@context")}
end end
end, end
caller_pid
) )
end end
defp followers(dir, user, caller_pid) do defp followers(dir, user) do
User.get_followers_query(user) User.get_followers_query(user)
|> write(dir, "followers", fn a -> {:ok, a.ap_id} end, caller_pid) |> write(dir, "followers", fn a -> {:ok, a.ap_id} end)
end end
defp following(dir, user, caller_pid) do defp following(dir, user) do
User.get_friends_query(user) User.get_friends_query(user)
|> write(dir, "following", fn a -> {:ok, a.ap_id} end, caller_pid) |> write(dir, "following", fn a -> {:ok, a.ap_id} end)
end
end
defmodule Pleroma.User.Backup.ProcessorAPI do
@callback do_process(%Pleroma.User.Backup{}, pid()) ::
{:ok, %Pleroma.User.Backup{}} | {:error, any()}
end
defmodule Pleroma.User.Backup.Processor do
@behaviour Pleroma.User.Backup.ProcessorAPI
alias Pleroma.Repo
alias Pleroma.User.Backup
import Ecto.Changeset
@impl true
def do_process(backup, current_pid) do
with {:ok, zip_file} <- Backup.export(backup, current_pid),
{:ok, %{size: size}} <- File.stat(zip_file),
{:ok, _upload} <- Backup.upload(backup, zip_file) do
backup
|> cast(
%{
file_size: size,
processed: true,
state: :complete
},
[:file_size, :processed, :state]
)
|> Repo.update()
else
e -> {:error, e}
end
end end
end end

View file

@ -13,6 +13,7 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do
alias Pleroma.ModerationLog alias Pleroma.ModerationLog
alias Pleroma.Stats alias Pleroma.Stats
alias Pleroma.User alias Pleroma.User
alias Pleroma.User.Backup
alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.AdminAPI alias Pleroma.Web.AdminAPI
alias Pleroma.Web.AdminAPI.AccountView alias Pleroma.Web.AdminAPI.AccountView
@ -429,7 +430,9 @@ def stats(conn, params) do
def create_backup(%{assigns: %{user: admin}} = conn, %{"nickname" => nickname}) do def create_backup(%{assigns: %{user: admin}} = conn, %{"nickname" => nickname}) do
with %User{} = user <- User.get_by_nickname(nickname), with %User{} = user <- User.get_by_nickname(nickname),
{:ok, _} <- Pleroma.User.Backup.create(user, admin.id) do %Backup{} = backup <- Backup.new(user),
{:ok, inserted_backup} <- Pleroma.Repo.insert(backup),
{:ok, %Oban.Job{}} <- Backup.schedule_backup(inserted_backup) do
ModerationLog.insert_log(%{actor: admin, subject: user, action: "create_backup"}) ModerationLog.insert_log(%{actor: admin, subject: user, action: "create_backup"})
json(conn, "") json(conn, "")

View file

@ -65,12 +65,7 @@ defp backup do
file_name: %Schema{type: :string}, file_name: %Schema{type: :string},
file_size: %Schema{type: :integer}, file_size: %Schema{type: :integer},
processed: %Schema{type: :boolean, description: "whether this backup has succeeded"}, processed: %Schema{type: :boolean, description: "whether this backup has succeeded"},
state: %Schema{ tempdir: %Schema{type: :string}
type: :string,
description: "the state of the backup",
enum: ["pending", "running", "complete", "failed"]
},
processed_number: %Schema{type: :integer, description: "the number of records processed"}
}, },
example: %{ example: %{
"content_type" => "application/zip", "content_type" => "application/zip",
@ -79,8 +74,7 @@ defp backup do
"file_size" => 4105, "file_size" => 4105,
"inserted_at" => "2020-09-08T16:42:07.000Z", "inserted_at" => "2020-09-08T16:42:07.000Z",
"processed" => true, "processed" => true,
"state" => "complete", "tempdir" => "/tmp/PZIMw40vmpM"
"processed_number" => 20
} }
} }
end end

View file

@ -20,7 +20,7 @@ def index(%{assigns: %{user: user}} = conn, _params) do
end end
def create(%{assigns: %{user: user}} = conn, _params) do def create(%{assigns: %{user: user}} = conn, _params) do
with {:ok, _} <- Backup.create(user) do with {:ok, _} <- Backup.user(user) do
backups = Backup.list(user) backups = Backup.list(user)
render(conn, "index.json", backups: backups) render(conn, "index.json", backups: backups)
end end

View file

@ -9,22 +9,12 @@ defmodule Pleroma.Web.PleromaAPI.BackupView do
alias Pleroma.Web.CommonAPI.Utils alias Pleroma.Web.CommonAPI.Utils
def render("show.json", %{backup: %Backup{} = backup}) do def render("show.json", %{backup: %Backup{} = backup}) do
# To deal with records before the migration
state =
if backup.state == :invalid do
if backup.processed, do: :complete, else: :failed
else
backup.state
end
%{ %{
id: backup.id, id: backup.id,
content_type: backup.content_type, content_type: backup.content_type,
url: download_url(backup), url: download_url(backup),
file_size: backup.file_size, file_size: backup.file_size,
processed: backup.processed, processed: backup.processed,
state: to_string(state),
processed_number: backup.processed_number,
inserted_at: Utils.to_masto_date(backup.inserted_at) inserted_at: Utils.to_masto_date(backup.inserted_at)
} }
end end

View file

@ -6,64 +6,46 @@ defmodule Pleroma.Workers.BackupWorker do
use Oban.Worker, queue: :slow, max_attempts: 1 use Oban.Worker, queue: :slow, max_attempts: 1
alias Oban.Job alias Oban.Job
alias Pleroma.Config.Getting, as: Config
alias Pleroma.User.Backup alias Pleroma.User.Backup
def process(backup, admin_user_id \\ nil) do
%{"op" => "process", "backup_id" => backup.id, "admin_user_id" => admin_user_id}
|> new()
|> Oban.insert()
end
def schedule_deletion(backup) do
days = Pleroma.Config.get([Backup, :purge_after_days])
time = 60 * 60 * 24 * days
scheduled_at = Calendar.NaiveDateTime.add!(backup.inserted_at, time)
%{"op" => "delete", "backup_id" => backup.id}
|> new(scheduled_at: scheduled_at)
|> Oban.insert()
end
def delete(backup) do
%{"op" => "delete", "backup_id" => backup.id}
|> new()
|> Oban.insert()
end
@impl Oban.Worker @impl Oban.Worker
def perform(%Job{ def perform(%Job{
args: %{"op" => "process", "backup_id" => backup_id, "admin_user_id" => admin_user_id} args: %{"op" => "process", "backup_id" => backup_id}
}) do }) do
with {:ok, %Backup{} = backup} <- with {_, %Backup{} = backup} <- {:get, Backup.get_by_id(backup_id)},
backup_id |> Backup.get() |> Backup.process(), {_, {:ok, updated_backup}} <- {:run, Backup.run(backup)},
{:ok, _job} <- schedule_deletion(backup), {_, {:ok, uploaded_backup}} <- {:upload, Backup.upload(updated_backup)},
:ok <- Backup.remove_outdated(backup), {_, {:ok, _job}} <- {:delete, Backup.schedule_delete(uploaded_backup)},
:ok <- maybe_deliver_email(backup, admin_user_id) do {_, :ok} <- {:outdated, Backup.remove_outdated(uploaded_backup.user)},
{:ok, backup} {_, :ok} <- {:email, maybe_deliver_email(uploaded_backup)} do
{:ok, uploaded_backup}
else
e -> {:error, e}
end end
end end
def perform(%Job{args: %{"op" => "delete", "backup_id" => backup_id}}) do def perform(%Job{args: %{"op" => "delete", "backup_id" => backup_id}}) do
case Backup.get(backup_id) do case Backup.get_by_id(backup_id) do
%Backup{} = backup -> Backup.delete(backup) %Backup{} = backup -> Backup.delete_archive(backup)
nil -> :ok nil -> :ok
end end
end end
@impl Oban.Worker @impl Oban.Worker
def timeout(_job), do: :infinity def timeout(_job), do: Config.get([Backup, :timeout], :timer.minutes(30))
defp has_email?(user) do defp has_email?(user) do
not is_nil(user.email) and user.email != "" not is_nil(user.email) and user.email != ""
end end
defp maybe_deliver_email(backup, admin_user_id) do defp maybe_deliver_email(backup) do
has_mailer = Pleroma.Config.get([Pleroma.Emails.Mailer, :enabled]) has_mailer = Pleroma.Config.get([Pleroma.Emails.Mailer, :enabled])
backup = backup |> Pleroma.Repo.preload(:user) backup = backup |> Pleroma.Repo.preload(:user)
if has_email?(backup.user) and has_mailer do if has_email?(backup.user) and has_mailer do
backup backup
|> Pleroma.Emails.UserEmail.backup_is_ready_email(admin_user_id) |> Pleroma.Emails.UserEmail.backup_is_ready_email()
|> Pleroma.Emails.Mailer.deliver() |> Pleroma.Emails.Mailer.deliver()
:ok :ok

View file

@ -0,0 +1,19 @@
defmodule Pleroma.Repo.Migrations.BackupRefactor do
use Ecto.Migration
def up do
alter table("backups") do
remove(:state)
remove(:processed_number)
add(:tempdir, :string)
end
end
def down do
alter table("backups") do
add(:state, :integer, default: 5)
add(:processed_number, :integer, default: 0)
remove(:tempdir)
end
end
end

View file

@ -1,49 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.User.BackupAsyncTest do
use Pleroma.DataCase, async: true
import Pleroma.Factory
import Mox
alias Pleroma.UnstubbedConfigMock, as: ConfigMock
alias Pleroma.User.Backup
alias Pleroma.User.Backup.ProcessorMock
setup do
user = insert(:user, %{nickname: "cofe", name: "Cofe", ap_id: "http://cofe.io/users/cofe"})
{:ok, backup} = user |> Backup.new() |> Repo.insert()
%{backup: backup}
end
test "it handles unrecoverable exceptions", %{backup: backup} do
ProcessorMock
|> expect(:do_process, fn _, _ ->
raise "mock exception"
end)
ConfigMock
|> stub_with(Pleroma.Config)
{:error, %{backup: backup, reason: :exit}} = Backup.process(backup, ProcessorMock)
assert backup.state == :failed
end
test "it handles timeouts", %{backup: backup} do
ProcessorMock
|> expect(:do_process, fn _, _ ->
Process.sleep(:timer.seconds(4))
end)
ConfigMock
|> expect(:get, fn [Pleroma.User.Backup, :process_wait_time] -> :timer.seconds(2) end)
{:error, %{backup: backup, reason: :timeout}} = Backup.process(backup, ProcessorMock)
assert backup.state == :failed
end
end

View file

@ -6,7 +6,6 @@ defmodule Pleroma.User.BackupTest do
use Oban.Testing, repo: Pleroma.Repo use Oban.Testing, repo: Pleroma.Repo
use Pleroma.DataCase use Pleroma.DataCase
import Mock
import Pleroma.Factory import Pleroma.Factory
import Swoosh.TestAssertions import Swoosh.TestAssertions
import Mox import Mox
@ -16,7 +15,6 @@ defmodule Pleroma.User.BackupTest do
alias Pleroma.UnstubbedConfigMock, as: ConfigMock alias Pleroma.UnstubbedConfigMock, as: ConfigMock
alias Pleroma.Uploaders.S3.ExAwsMock alias Pleroma.Uploaders.S3.ExAwsMock
alias Pleroma.User.Backup alias Pleroma.User.Backup
alias Pleroma.User.Backup.ProcessorMock
alias Pleroma.Web.CommonAPI alias Pleroma.Web.CommonAPI
alias Pleroma.Workers.BackupWorker alias Pleroma.Workers.BackupWorker
@ -28,79 +26,56 @@ defmodule Pleroma.User.BackupTest do
ConfigMock ConfigMock
|> stub_with(Pleroma.Config) |> stub_with(Pleroma.Config)
ProcessorMock
|> stub_with(Pleroma.User.Backup.Processor)
:ok :ok
end end
test "it does not requrie enabled email" do test "it does not requrie enabled email" do
clear_config([Pleroma.Emails.Mailer, :enabled], false) clear_config([Pleroma.Emails.Mailer, :enabled], false)
user = insert(:user) user = insert(:user)
assert {:ok, _} = Backup.create(user) assert {:ok, _} = Backup.user(user)
end end
test "it does not require user's email" do test "it does not require user's email" do
user = insert(:user, %{email: nil}) user = insert(:user, %{email: nil})
assert {:ok, _} = Backup.create(user) assert {:ok, _} = Backup.user(user)
end end
test "it creates a backup record and an Oban job" do test "it creates a backup record and an Oban job" do
%{id: user_id} = user = insert(:user) user = insert(:user)
assert {:ok, %Oban.Job{args: args}} = Backup.create(user) assert {:ok, %Backup{} = backup} = Backup.user(user)
assert {:ok, %Oban.Job{args: args}} = Backup.schedule_backup(backup)
assert_enqueued(worker: BackupWorker, args: args) assert_enqueued(worker: BackupWorker, args: args)
backup = Backup.get(args["backup_id"]) backup = Backup.get_by_id(args["backup_id"])
assert %Backup{user_id: ^user_id, processed: false, file_size: 0, state: :pending} = backup assert %Backup{processed: false, file_size: 0} = backup
end end
test "it return an error if the export limit is over" do test "it return an error if the export limit is over" do
%{id: user_id} = user = insert(:user) user = insert(:user)
limit_days = Pleroma.Config.get([Backup, :limit_days]) limit_days = Pleroma.Config.get([Backup, :limit_days])
assert {:ok, %Oban.Job{args: args}} = Backup.create(user) {:ok, first_backup} = Backup.user(user)
backup = Backup.get(args["backup_id"]) {:ok, _run_backup} = Backup.run(first_backup)
assert %Backup{user_id: ^user_id, processed: false, file_size: 0} = backup
assert Backup.create(user) == {:error, "Last export was less than #{limit_days} days ago"} assert Backup.user(user) == {:error, "Last export was less than #{limit_days} days ago"}
end end
test "it process a backup record" do test "it process a backup record" do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local) clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
%{id: user_id} = user = insert(:user) %{id: user_id} = user = insert(:user)
assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id} = args}} = Backup.create(user) assert {:ok, %Backup{id: backup_id}} = Backup.user(user)
assert {:ok, backup} = perform_job(BackupWorker, args)
oban_args = %{"op" => "process", "backup_id" => backup_id}
assert {:ok, backup} = perform_job(BackupWorker, oban_args)
assert backup.file_size > 0 assert backup.file_size > 0
assert %Backup{id: ^backup_id, processed: true, user_id: ^user_id, state: :complete} = backup assert match?(%Backup{id: ^backup_id, processed: true, user_id: ^user_id}, backup)
delete_job_args = %{"op" => "delete", "backup_id" => backup_id} delete_job_args = %{"op" => "delete", "backup_id" => backup_id}
assert_enqueued(worker: BackupWorker, args: delete_job_args) assert_enqueued(worker: BackupWorker, args: delete_job_args)
assert {:ok, backup} = perform_job(BackupWorker, delete_job_args) assert {:ok, backup} = perform_job(BackupWorker, delete_job_args)
refute Backup.get(backup_id) refute Backup.get_by_id(backup_id)
email = Pleroma.Emails.UserEmail.backup_is_ready_email(backup)
assert_email_sent(
to: {user.name, user.email},
html_body: email.html_body
)
end
test "it updates states of the backup" do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
%{id: user_id} = user = insert(:user)
assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id} = args}} = Backup.create(user)
assert {:ok, backup} = perform_job(BackupWorker, args)
assert backup.file_size > 0
assert %Backup{id: ^backup_id, processed: true, user_id: ^user_id, state: :complete} = backup
delete_job_args = %{"op" => "delete", "backup_id" => backup_id}
assert_enqueued(worker: BackupWorker, args: delete_job_args)
assert {:ok, backup} = perform_job(BackupWorker, delete_job_args)
refute Backup.get(backup_id)
email = Pleroma.Emails.UserEmail.backup_is_ready_email(backup) email = Pleroma.Emails.UserEmail.backup_is_ready_email(backup)
@ -114,10 +89,15 @@ test "it does not send an email if the user does not have an email" do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local) clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
%{id: user_id} = user = insert(:user, %{email: nil}) %{id: user_id} = user = insert(:user, %{email: nil})
assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id} = args}} = Backup.create(user) assert {:ok, %Backup{} = backup} = Backup.user(user)
assert {:ok, backup} = perform_job(BackupWorker, args)
assert backup.file_size > 0 expected_args = %{"op" => "process", "backup_id" => backup.id}
assert %Backup{id: ^backup_id, processed: true, user_id: ^user_id} = backup
assert_enqueued(worker: BackupWorker, args: %{"backup_id" => backup.id})
assert {:ok, completed_backup} = perform_job(BackupWorker, expected_args)
assert completed_backup.file_size > 0
assert completed_backup.processed
assert completed_backup.user_id == user_id
assert_no_email_sent() assert_no_email_sent()
end end
@ -127,10 +107,13 @@ test "it does not send an email if mailer is not on" do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local) clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
%{id: user_id} = user = insert(:user) %{id: user_id} = user = insert(:user)
assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id} = args}} = Backup.create(user) assert {:ok, %Backup{id: backup_id}} = Backup.user(user)
assert {:ok, backup} = perform_job(BackupWorker, args)
oban_args = %{"op" => "process", "backup_id" => backup_id}
assert {:ok, backup} = perform_job(BackupWorker, oban_args)
assert backup.file_size > 0 assert backup.file_size > 0
assert %Backup{id: ^backup_id, processed: true, user_id: ^user_id} = backup assert match?(%Backup{id: ^backup_id, processed: true, user_id: ^user_id}, backup)
assert_no_email_sent() assert_no_email_sent()
end end
@ -139,10 +122,15 @@ test "it does not send an email if the user has an empty email" do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local) clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
%{id: user_id} = user = insert(:user, %{email: ""}) %{id: user_id} = user = insert(:user, %{email: ""})
assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id} = args}} = Backup.create(user) assert {:ok, %Backup{id: backup_id} = backup} = Backup.user(user)
assert {:ok, backup} = perform_job(BackupWorker, args)
expected_args = %{"op" => "process", "backup_id" => backup.id}
assert_enqueued(worker: BackupWorker, args: expected_args)
assert {:ok, backup} = perform_job(BackupWorker, expected_args)
assert backup.file_size > 0 assert backup.file_size > 0
assert %Backup{id: ^backup_id, processed: true, user_id: ^user_id} = backup assert match?(%Backup{id: ^backup_id, processed: true, user_id: ^user_id}, backup)
assert_no_email_sent() assert_no_email_sent()
end end
@ -152,16 +140,13 @@ test "it removes outdated backups after creating a fresh one" do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local) clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
user = insert(:user) user = insert(:user)
assert {:ok, job1} = Backup.create(user) assert {:ok, %{id: backup_one_id}} = Backup.user(user)
assert {:ok, %{id: _backup_two_id}} = Backup.user(user)
assert {:ok, %Backup{}} = ObanHelpers.perform(job1)
assert {:ok, job2} = Backup.create(user)
assert Pleroma.Repo.aggregate(Backup, :count) == 2
assert {:ok, backup2} = ObanHelpers.perform(job2)
# Run the backups
ObanHelpers.perform_all() ObanHelpers.perform_all()
assert [^backup2] = Pleroma.Repo.all(Backup) assert_enqueued(worker: BackupWorker, args: %{"op" => "delete", "backup_id" => backup_one_id})
end end
test "it creates a zip archive with user data" do test "it creates a zip archive with user data" do
@ -185,9 +170,12 @@ test "it creates a zip archive with user data" do
CommonAPI.follow(other_user, user) CommonAPI.follow(other_user, user)
assert {:ok, backup} = user |> Backup.new() |> Repo.insert() assert {:ok, backup} = Backup.user(user)
assert {:ok, path} = Backup.export(backup, self()) assert {:ok, run_backup} = Backup.run(backup)
assert {:ok, zipfile} = :zip.zip_open(String.to_charlist(path), [:memory])
tempfile = Path.join([run_backup.tempdir, run_backup.file_name])
assert {:ok, zipfile} = :zip.zip_open(String.to_charlist(tempfile), [:memory])
assert {:ok, {~c"actor.json", json}} = :zip.zip_get(~c"actor.json", zipfile) assert {:ok, {~c"actor.json", json}} = :zip.zip_get(~c"actor.json", zipfile)
assert %{ assert %{
@ -275,10 +263,10 @@ test "it creates a zip archive with user data" do
} = Jason.decode!(json) } = Jason.decode!(json)
:zip.zip_close(zipfile) :zip.zip_close(zipfile)
File.rm!(path) File.rm_rf!(run_backup.tempdir)
end end
test "it counts the correct number processed" do test "correct number processed" do
user = insert(:user, %{nickname: "cofe", name: "Cofe", ap_id: "http://cofe.io/users/cofe"}) user = insert(:user, %{nickname: "cofe", name: "Cofe", ap_id: "http://cofe.io/users/cofe"})
Enum.map(1..120, fn i -> Enum.map(1..120, fn i ->
@ -288,43 +276,21 @@ test "it counts the correct number processed" do
end) end)
assert {:ok, backup} = user |> Backup.new() |> Repo.insert() assert {:ok, backup} = user |> Backup.new() |> Repo.insert()
{:ok, backup} = Backup.process(backup) {:ok, backup} = Backup.run(backup)
assert backup.processed_number == 1 + 120 + 120 + 120 zip_path = Path.join([backup.tempdir, backup.file_name])
Backup.delete(backup) assert {:ok, zipfile} = :zip.zip_open(String.to_charlist(zip_path), [:memory])
end
test "it handles errors" do backup_parts = [~c"likes.json", ~c"bookmarks.json", ~c"outbox.json"]
user = insert(:user, %{nickname: "cofe", name: "Cofe", ap_id: "http://cofe.io/users/cofe"})
Enum.map(1..120, fn i -> Enum.each(backup_parts, fn part ->
{:ok, _status} = CommonAPI.post(user, %{status: "status #{i}"}) assert {:ok, {_part, part_json}} = :zip.zip_get(part, zipfile)
{:ok, decoded_part} = Jason.decode(part_json)
assert decoded_part["totalItems"] == 120
end) end)
assert {:ok, backup} = user |> Backup.new() |> Repo.insert() Backup.delete_archive(backup)
with_mock Pleroma.Web.ActivityPub.Transmogrifier,
[:passthrough],
prepare_outgoing: fn data ->
object =
data["object"]
|> Pleroma.Object.normalize(fetch: false)
|> Map.get(:data)
data = data |> Map.put("object", object)
if String.contains?(data["object"]["content"], "119"),
do: raise(%Postgrex.Error{}),
else: {:ok, data}
end do
{:ok, backup} = Backup.process(backup)
assert backup.processed
assert backup.state == :complete
assert backup.processed_number == 1 + 119
Backup.delete(backup)
end
end end
describe "it uploads and deletes a backup archive" do describe "it uploads and deletes a backup archive" do
@ -343,12 +309,11 @@ test "it handles errors" do
Bookmark.create(user.id, status3.id) Bookmark.create(user.id, status3.id)
assert {:ok, backup} = user |> Backup.new() |> Repo.insert() assert {:ok, backup} = user |> Backup.new() |> Repo.insert()
assert {:ok, path} = Backup.export(backup, self())
[path: path, backup: backup] [backup: backup]
end end
test "S3", %{path: path, backup: backup} do test "S3", %{backup: backup} do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.S3) clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.S3)
clear_config([Pleroma.Uploaders.S3, :streaming_enabled], false) clear_config([Pleroma.Uploaders.S3, :streaming_enabled], false)
@ -358,15 +323,17 @@ test "S3", %{path: path, backup: backup} do
%{http_method: :delete} -> {:ok, %{status_code: 204}} %{http_method: :delete} -> {:ok, %{status_code: 204}}
end) end)
assert {:ok, %Pleroma.Upload{}} = Backup.upload(backup, path) assert {:ok, backup} = Backup.run(backup)
assert {:ok, _backup} = Backup.delete(backup) assert {:ok, %Backup{processed: true}} = Backup.upload(backup)
assert {:ok, _backup} = Backup.delete_archive(backup)
end end
test "Local", %{path: path, backup: backup} do test "Local", %{backup: backup} do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local) clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
assert {:ok, %Pleroma.Upload{}} = Backup.upload(backup, path) assert {:ok, backup} = Backup.run(backup)
assert {:ok, _backup} = Backup.delete(backup) assert {:ok, %Backup{processed: true}} = Backup.upload(backup)
assert {:ok, _backup} = Backup.delete_archive(backup)
end end
end end
end end

View file

@ -1096,9 +1096,13 @@ test "it creates a backup", %{conn: conn} do
ObanHelpers.perform_all() ObanHelpers.perform_all()
email = Pleroma.Emails.UserEmail.backup_is_ready_email(backup, admin.id) email = Pleroma.Emails.UserEmail.backup_is_ready_email(backup)
assert String.contains?(
email.html_body,
"A full backup of your Pleroma account was requested"
)
assert String.contains?(email.html_body, "Admin @#{admin.nickname} requested a full backup")
assert_email_sent(to: {user.name, user.email}, html_body: email.html_body) assert_email_sent(to: {user.name, user.email}, html_body: email.html_body)
log_message = "@#{admin_nickname} requested account backup for @#{user_nickname}" log_message = "@#{admin_nickname} requested account backup for @#{user_nickname}"

View file

@ -20,9 +20,7 @@ defmodule Pleroma.Web.PleromaAPI.BackupControllerTest do
end end
test "GET /api/v1/pleroma/backups", %{user: user, conn: conn} do test "GET /api/v1/pleroma/backups", %{user: user, conn: conn} do
assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id}}} = Backup.create(user) assert {:ok, %Backup{} = backup} = Backup.user(user)
backup = Backup.get(backup_id)
response = response =
conn conn

View file

@ -27,42 +27,11 @@ test "it renders the ID" do
assert result.id == backup.id assert result.id == backup.id
end end
test "it renders the state and processed_number" do test "it renders the processed state" do
user = insert(:user) user = insert(:user)
backup = Backup.new(user) backup = Backup.new(user)
result = BackupView.render("show.json", backup: backup) result = BackupView.render("show.json", backup: backup)
assert result.state == to_string(backup.state) refute result.processed
assert result.processed_number == backup.processed_number
end
test "it renders failed state with legacy records" do
backup = %Backup{
id: 0,
content_type: "application/zip",
file_name: "dummy",
file_size: 1,
state: :invalid,
processed: true,
processed_number: 1,
inserted_at: NaiveDateTime.utc_now()
}
result = BackupView.render("show.json", backup: backup)
assert result.state == "complete"
backup = %Backup{
id: 0,
content_type: "application/zip",
file_name: "dummy",
file_size: 1,
state: :invalid,
processed: false,
processed_number: 1,
inserted_at: NaiveDateTime.utc_now()
}
result = BackupView.render("show.json", backup: backup)
assert result.state == "failed"
end end
end end

View file

@ -32,6 +32,4 @@
Mox.defmock(Pleroma.LoggerMock, for: Pleroma.Logging) Mox.defmock(Pleroma.LoggerMock, for: Pleroma.Logging)
Mox.defmock(Pleroma.User.Backup.ProcessorMock, for: Pleroma.User.Backup.ProcessorAPI)
Mox.defmock(Pleroma.Uploaders.S3.ExAwsMock, for: Pleroma.Uploaders.S3.ExAwsAPI) Mox.defmock(Pleroma.Uploaders.S3.ExAwsMock, for: Pleroma.Uploaders.S3.ExAwsAPI)